summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--src/tools/rust-analyzer/.cargo/config.toml11
-rw-r--r--src/tools/rust-analyzer/.editorconfig19
-rw-r--r--src/tools/rust-analyzer/.git-blame-ignore-revs8
-rw-r--r--src/tools/rust-analyzer/.vscode/extensions.json9
-rw-r--r--src/tools/rust-analyzer/.vscode/launch.json131
-rw-r--r--src/tools/rust-analyzer/.vscode/tasks.json67
-rw-r--r--src/tools/rust-analyzer/Cargo.lock2101
-rw-r--r--src/tools/rust-analyzer/Cargo.toml33
-rw-r--r--src/tools/rust-analyzer/LICENSE-APACHE201
-rw-r--r--src/tools/rust-analyzer/LICENSE-MIT23
-rw-r--r--src/tools/rust-analyzer/PRIVACY.md1
-rw-r--r--src/tools/rust-analyzer/README.md49
-rw-r--r--src/tools/rust-analyzer/assets/logo-square.svg88
-rw-r--r--src/tools/rust-analyzer/assets/logo-wide.svg142
-rw-r--r--src/tools/rust-analyzer/bench_data/glorious_old_parser8562
-rw-r--r--src/tools/rust-analyzer/bench_data/numerous_macro_rules560
-rw-r--r--src/tools/rust-analyzer/crates/base-db/Cargo.toml22
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/change.rs85
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/fixture.rs494
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/input.rs792
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/lib.rs131
-rw-r--r--src/tools/rust-analyzer/crates/cfg/Cargo.toml26
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs145
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/dnf.rs345
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/lib.rs202
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/tests.rs224
-rw-r--r--src/tools/rust-analyzer/crates/flycheck/Cargo.toml22
-rw-r--r--src/tools/rust-analyzer/crates/flycheck/src/lib.rs396
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/Cargo.toml43
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/adt.rs365
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr.rs1002
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body.rs471
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs1023
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs571
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs127
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs397
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/builtin_attr.rs654
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs158
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs207
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data.rs579
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/db.rs243
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs116
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/expr.rs444
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/find_path.rs1134
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/generics.rs522
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/import_map.rs1108
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/intern.rs227
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs464
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs961
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs773
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs754
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs360
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/keys.rs70
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs174
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lib.rs980
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs354
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs95
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs377
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs1632
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs138
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs154
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs911
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs200
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs130
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres.rs545
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs98
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs2202
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs137
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs161
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs448
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs81
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs933
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs338
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs237
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs1187
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/mod_resolution.rs843
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/primitives.rs23
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path.rs222
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs230
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs95
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/resolver.rs912
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/src.rs85
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/test_db.rs245
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/trace.rs51
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs486
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/visibility.rs242
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/Cargo.toml34
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs181
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs130
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs249
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs669
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/db.rs509
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/eager.rs266
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs382
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs256
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs1000
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs276
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/name.rs433
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs81
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/quote.rs284
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/Cargo.toml44
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs145
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/builder.rs311
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs799
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs358
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs469
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs148
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/db.rs225
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs701
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs199
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs416
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs508
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs1094
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs56
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/usefulness.rs811
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs104
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/display.rs1315
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer.rs1088
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs82
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs673
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs1527
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs354
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs295
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs738
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/interner.rs432
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lib.rs525
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lower.rs1778
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs148
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs1186
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs62
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs150
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests.rs578
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs755
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs75
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs176
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs51
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs1338
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs1792
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs485
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs991
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs1650
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs3072
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs3782
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tls.rs133
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/traits.rs187
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/utils.rs408
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/walk.rs147
-rw-r--r--src/tools/rust-analyzer/crates/hir/Cargo.toml28
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/attrs.rs177
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/db.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/diagnostics.rs170
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/display.rs530
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/from_id.rs293
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/has_source.rs174
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs3639
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs1540
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs473
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs915
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/symbols.rs348
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/Cargo.toml31
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs347
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs325
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs164
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs229
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs1340
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs1709
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs447
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs400
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs234
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs1292
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs216
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs575
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs395
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs268
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs351
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs556
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs497
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs574
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs840
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs188
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs2147
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs900
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs5333
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs1770
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs1076
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs360
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs1279
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs606
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs139
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs92
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs121
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs255
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs179
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs657
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs334
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs343
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs132
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs1328
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs316
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs342
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs227
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs310
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs1787
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs492
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs177
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs295
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs495
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs184
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs1194
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs954
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs838
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_generic.rs144
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs338
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs144
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs570
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs822
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs122
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs130
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs997
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs337
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs151
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs183
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs221
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs507
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs548
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs1297
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs509
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs241
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs37
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs409
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs212
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs284
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs1250
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs999
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs100
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs438
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs307
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs150
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs243
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs588
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs82
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs98
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs237
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs257
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs719
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs1020
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs980
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/lib.rs309
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests.rs558
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs2259
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests/sourcegen.rs195
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils.rs703
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs661
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs775
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/Cargo.toml33
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions.rs691
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs380
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs93
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs116
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs61
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs74
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs947
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs280
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs108
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs43
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs407
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs196
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs130
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs133
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs1160
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs237
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs341
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs354
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs185
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs616
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs311
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs369
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs189
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs246
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs120
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs41
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/config.rs41
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context.rs639
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs1293
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs413
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/item.rs637
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/lib.rs247
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render.rs1910
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs33
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs671
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs191
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs270
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs193
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs57
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs77
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs96
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs214
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests.rs305
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs1016
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs672
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs1232
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/fn_param.rs274
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs154
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs247
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs716
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs131
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs133
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs229
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs895
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs671
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs384
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/visibility.rs90
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/Cargo.toml39
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs78
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs163
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/assists.rs137
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/defs.rs545
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs185
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs7682
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/helpers.rs105
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs674
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs446
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs1084
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs295
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs151
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/label.rs48
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/lib.rs246
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/line_index.rs300
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs287
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rename.rs540
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs34
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/search.rs785
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/source_change.rs99
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs429
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs308
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs136
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs460
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt533
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs284
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/traits.rs273
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs86
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/use_trivial_contructor.rs34
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml34
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs30
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs203
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs144
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs486
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs218
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs37
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs334
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs837
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs1012
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs101
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs283
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs131
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs573
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs336
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs49
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs90
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs76
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs156
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs62
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs148
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs260
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs145
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/tests/sourcegen.rs73
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml26
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/errors.rs29
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs58
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs35
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs358
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs803
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/nester.rs99
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs406
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs242
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs308
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/search.rs289
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs1397
-rw-r--r--src/tools/rust-analyzer/crates/ide/Cargo.toml47
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/annotations.rs789
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs460
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links.rs549
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs77
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs491
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/expand_macro.rs521
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/extend_selection.rs662
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/file_structure.rs579
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/fixture.rs87
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/fn_references.rs94
-rwxr-xr-xsrc/tools/rust-analyzer/crates/ide/src/folding_ranges.rs626
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs112
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_definition.rs1634
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs344
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs296
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/highlight_related.rs1377
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover.rs390
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/render.rs563
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/tests.rs5053
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs2818
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/join_lines.rs1087
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/lib.rs702
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs22
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/markup.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/matching_brace.rs78
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/moniker.rs342
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/move_item.rs890
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/navigation_target.rs623
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/parent_module.rs167
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/prime_caches.rs158
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/prime_caches/topologic_sort.rs98
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/references.rs1636
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/rename.rs2252
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/runnables.rs2163
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs71
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/signature_help.rs1334
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/ssr.rs255
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/static_index.rs321
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/status.rs164
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs449
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs50
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs690
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlights.rs92
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs97
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs279
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs81
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs128
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs340
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html62
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html58
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html66
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html50
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html190
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html47
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html233
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html62
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html58
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html55
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html96
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html51
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html50
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html58
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html56
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html164
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html126
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs1096
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs339
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/typing.rs1210
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs616
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs93
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_hir.rs26
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs16
-rw-r--r--src/tools/rust-analyzer/crates/limit/Cargo.toml11
-rw-r--r--src/tools/rust-analyzer/crates/limit/src/lib.rs69
-rw-r--r--src/tools/rust-analyzer/crates/mbe/Cargo.toml24
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/benchmark.rs222
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander.rs121
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs914
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs272
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/lib.rs352
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/parser.rs261
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs844
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs99
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/token_map.rs113
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs160
-rw-r--r--src/tools/rust-analyzer/crates/parser/Cargo.toml19
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/event.rs133
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar.rs342
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs53
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs625
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs643
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs131
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs242
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items.rs465
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs168
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs37
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs140
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items/use_item.rs93
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/params.rs209
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs132
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs440
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/types.rs352
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/input.rs88
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lexed_str.rs300
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lib.rs181
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/output.rs77
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/parser.rs340
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/shortcuts.rs215
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs29
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs390
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests.rs166
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs107
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests/sourcegen_inline_tests.rs123
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs312
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/token_set.rs42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rast48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rs22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.txt48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rs17
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.txt26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rast4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.txt4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rast9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.txt9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rast9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.txt9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rast6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.txt6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.txt22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.txt16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rast3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.txt3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rast14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.txt14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rast64
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.txt64
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rast8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.txt8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rast57
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rs9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.txt57
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rast2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.txt2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rast2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.txt2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rs12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.txt22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rast8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.txt8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rast77
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.txt77
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rast12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.txt12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rast34
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rast18
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rast39
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rast15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rast62
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rast74
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rs9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rast80
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rs13
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rast56
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast13
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0012_broken_lambda.rast387
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rast89
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rast24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rast44
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rast47
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rast134
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rast107
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rs12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rast21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rast34
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rast171
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast327
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rast209
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rs32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rast49
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast205
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rs20
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rast68
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rast96
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rast55
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rast51
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rast83
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast75
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast256
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rs9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rast15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rast123
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rast27
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast17
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rast79
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast23
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast37
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rast21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast49
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rast31
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rast14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast53
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rast63
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast60
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rast38
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rast128
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rast53
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rast98
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rast21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast60
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rast35
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rast13
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rast38
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rast21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rast76
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rast81
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rast49
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rast13
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rast42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rast17
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rast105
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rast50
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rast90
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast90
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rast87
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rast98
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rast51
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rast57
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rast79
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast63
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rast31
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rast148
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rast20
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rast85
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rast14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rast72
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rast97
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rast77
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rast117
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast251
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs18
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rast60
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rast10
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast125
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rast65
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rast23
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rast126
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rast152
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rs9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast53
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rast96
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rast20
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast63
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rast90
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rast40
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rast14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rast96
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rast117
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rast139
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rs10
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0083_struct_items.rast87
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rast19
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rast136
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rast67
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast23
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rast34
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rast125
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rs13
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rast103
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rast175
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rast55
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rast41
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rast246
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rs15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast63
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rast39
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rast70
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rast90
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rast128
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rast57
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rast42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rast46
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rast77
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast84
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast151
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rast62
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast49
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast105
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rast37
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast31
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rast90
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast64
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast70
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rast111
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rast66
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rast42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rast111
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rast456
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rs25
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rast123
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rast23
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast19
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rast48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rast38
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rast15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rast38
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rast56
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast58
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rast51
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rast70
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rast79
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rast48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rast112
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rast38
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast57
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rast42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rast24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rast24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rast41
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rast24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rast39
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rast10
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rast8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rast12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rast25
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rast11
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rast20
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rast35
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rast19
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rast27
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rast35
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rast35
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rast8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rast24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast96
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rast72
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rast20
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rast13
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rast46
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rast25
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rast37
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rast41
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rast27
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rast25
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rast24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rast51
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rast31
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rast44
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rast50
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rast34
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rast95
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rast105
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rast47
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rs0
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rast39
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rast2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rast194
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rs10
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rast40
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rast77
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rs12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rast21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rast42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rast61
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rast133
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rast95
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rast65
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rast93
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rs10
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rast274
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rs17
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rast155
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rs25
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rast283
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rs10
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rast21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rast41
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rs0
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rast186
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rs14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rast152
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs11
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rast64
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rast61
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rast973
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rs29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast93
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rast223
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rs28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rast2339
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rs154
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rast93
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rast50
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rast127
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rs15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rast110
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rast77
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rast230
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rs24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rast323
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rs27
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rast201
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rs17
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rast92
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rast548
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rast81
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rast37
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rast126
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rast50
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rast65
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rast59
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rast97
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rast100
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rast56
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rast27
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rast177
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rs15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rast198
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rast134
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rast166
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rast17
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rast61
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rast222
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rs16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast413
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rs30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rast238
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs18
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rast204
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rast59
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rast72
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rast352
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rs14
-rw-r--r--src/tools/rust-analyzer/crates/paths/Cargo.toml16
-rw-r--r--src/tools/rust-analyzer/crates/paths/src/lib.rs299
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml31
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs181
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs154
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs328
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs107
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs151
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml17
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs19
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml36
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/build.rs25
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs104
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs143
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs485
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs24
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs70
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs429
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs305
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs81
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs352
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs1056
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs140
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs819
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs105
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/buffer.rs156
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs510
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/closure.rs32
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs89
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs451
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs304
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/scoped_cell.rs81
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/selfless_reify.rs83
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs332
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/diagnostic.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs1106
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/quote.rs139
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs834
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs105
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs156
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs529
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs32
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs89
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs493
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs304
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs81
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs84
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs339
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs1125
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs139
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs792
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs102
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs518
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs46
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs179
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs155
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs31
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs199
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs160
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs47
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml15
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/build.rs106
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml17
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs114
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs6
-rw-r--r--src/tools/rust-analyzer/crates/profile/Cargo.toml31
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/google_cpu_profiler.rs44
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/hprof.rs326
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/lib.rs130
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/memory_usage.rs127
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/stop_watch.rs101
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/tree.rs84
-rw-r--r--src/tools/rust-analyzer/crates/project-model/Cargo.toml28
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs238
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs504
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/cfg_flag.rs63
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/lib.rs159
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs51
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/project_json.rs198
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs60
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/sysroot.rs232
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/tests.rs1820
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/workspace.rs1032
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/alloc/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/core/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_abort/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_unwind/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/proc_macro/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/profiler_builtins/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/std/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/stdarch/crates/std_detect/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/term/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/test/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/unwind/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/hello-world-metadata.json245
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/hello-world-project.json12
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/is-proc-macro-project.json13
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml92
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/build.rs50
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs155
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs239
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/rustc_wrapper.rs46
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs210
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs228
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs69
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs447
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs86
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs248
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/highlight.rs14
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs164
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs328
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/parse.rs17
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs122
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs86
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/symbols.rs16
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs1985
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs135
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs109
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/clippy_pass_by_ref.txt301
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/handles_macro_location.txt64
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/macro_compiler_error.txt229
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/reasonable_line_numbers_from_empty_file.txt64
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_incompatible_type_for_trait.txt64
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_mismatched_type.txt64
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_range_map_lsp_position.txt184
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable.txt212
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_hint.txt212
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_info.txt212
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_wrong_number_of_parameters.txt184
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/snap_multi_line_fix.txt388
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs1843
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs53
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs266
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs117
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs375
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs1892
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs196
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs80
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/line_index.rs68
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs549
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs407
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs823
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/markdown.rs157
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/mem_docs.rs65
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs44
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs705
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs301
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs52
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs1397
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/version.rs57
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs1099
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/sourcegen.rs80
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs406
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/testdir.rs75
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs473
-rw-r--r--src/tools/rust-analyzer/crates/sourcegen/Cargo.toml16
-rw-r--r--src/tools/rust-analyzer/crates/sourcegen/src/lib.rs203
-rw-r--r--src/tools/rust-analyzer/crates/stdx/Cargo.toml24
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/lib.rs247
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/macros.rs47
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/non_empty_vec.rs39
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/panic_context.rs49
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/process.rs267
-rw-r--r--src/tools/rust-analyzer/crates/syntax/Cargo.toml39
-rw-r--r--src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml27
-rw-r--r--src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/parser.rs11
-rw-r--r--src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/reparse.rs11
-rw-r--r--src/tools/rust-analyzer/crates/syntax/rust.ungram667
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/algo.rs660
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast.rs367
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs174
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs717
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs410
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs41
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs4806
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs196
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/make.rs901
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs875
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs122
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs472
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs136
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/fuzz.rs75
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/hacks.rs15
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/lib.rs358
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/parsing.rs46
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs441
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ptr.rs104
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs44
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs75
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ted.rs206
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests.rs186
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs252
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs862
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/token_text.rs95
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/utils.rs43
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/validation.rs378
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/validation/block.rs24
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0000.rs199
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0001.rs106
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0002.rs1
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0003.rs1
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0004.rs1
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast127
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs15
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rast105
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rs6
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rast30
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rast96
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rast29
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rs2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rast196
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs6
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rast22
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rast216
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rs14
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0000.rs6
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0001.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0002.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0003.rsbin0 -> 8 bytes
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0004.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0005.rs7
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/Cargo.toml19
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/assert_linear.rs112
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/bench_fixture.rs45
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/fixture.rs409
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/lib.rs500
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/minicore.rs669
-rw-r--r--src/tools/rust-analyzer/crates/text-edit/Cargo.toml14
-rw-r--r--src/tools/rust-analyzer/crates/text-edit/src/lib.rs264
-rw-r--r--src/tools/rust-analyzer/crates/toolchain/Cargo.toml13
-rw-r--r--src/tools/rust-analyzer/crates/toolchain/src/lib.rs69
-rw-r--r--src/tools/rust-analyzer/crates/tt/Cargo.toml15
-rw-r--r--src/tools/rust-analyzer/crates/tt/src/buffer.rs231
-rw-r--r--src/tools/rust-analyzer/crates/tt/src/lib.rs322
-rw-r--r--src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml20
-rw-r--r--src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs234
-rw-r--r--src/tools/rust-analyzer/crates/vfs/Cargo.toml17
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/anchored_path.rs49
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/file_set.rs218
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/file_set/tests.rs42
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/lib.rs221
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/loader.rs215
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/path_interner.rs48
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs406
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/vfs_path/tests.rs30
-rw-r--r--src/tools/rust-analyzer/docs/dev/README.md266
-rw-r--r--src/tools/rust-analyzer/docs/dev/architecture.md497
-rw-r--r--src/tools/rust-analyzer/docs/dev/debugging.md99
-rw-r--r--src/tools/rust-analyzer/docs/dev/guide.md573
-rw-r--r--src/tools/rust-analyzer/docs/dev/lsp-extensions.md761
-rw-r--r--src/tools/rust-analyzer/docs/dev/style.md1172
-rw-r--r--src/tools/rust-analyzer/docs/dev/syntax.md534
-rw-r--r--src/tools/rust-analyzer/docs/user/generated_config.adoc620
-rw-r--r--src/tools/rust-analyzer/docs/user/manual.adoc863
-rw-r--r--src/tools/rust-analyzer/lib/README.md2
-rw-r--r--src/tools/rust-analyzer/lib/la-arena/Cargo.toml10
-rw-r--r--src/tools/rust-analyzer/lib/la-arena/src/lib.rs366
-rw-r--r--src/tools/rust-analyzer/lib/la-arena/src/map.rs75
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/Cargo.toml16
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs121
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/error.rs50
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/lib.rs232
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/msg.rs343
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/req_queue.rs62
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/socket.rs46
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs71
-rw-r--r--src/tools/rust-analyzer/rustfmt.toml2
-rw-r--r--src/tools/rust-analyzer/triagebot.toml1
-rw-r--r--src/tools/rust-analyzer/xtask/Cargo.toml15
-rw-r--r--src/tools/rust-analyzer/xtask/src/dist.rs170
-rw-r--r--src/tools/rust-analyzer/xtask/src/flags.rs148
-rw-r--r--src/tools/rust-analyzer/xtask/src/install.rs142
-rw-r--r--src/tools/rust-analyzer/xtask/src/main.rs91
-rw-r--r--src/tools/rust-analyzer/xtask/src/metrics.rs200
-rw-r--r--src/tools/rust-analyzer/xtask/src/release.rs96
-rw-r--r--src/tools/rust-analyzer/xtask/src/release/changelog.rs171
1650 files changed, 337186 insertions, 0 deletions
diff --git a/src/tools/rust-analyzer/.cargo/config.toml b/src/tools/rust-analyzer/.cargo/config.toml
new file mode 100644
index 000000000..24745d1c8
--- /dev/null
+++ b/src/tools/rust-analyzer/.cargo/config.toml
@@ -0,0 +1,11 @@
+[alias]
+xtask = "run --package xtask --bin xtask --"
+tq = "test -- -q"
+qt = "tq"
+lint = "clippy --all-targets -- -Aclippy::collapsible_if -Aclippy::needless_pass_by_value -Aclippy::nonminimal_bool -Aclippy::redundant_pattern_matching --cap-lints warn"
+
+[target.x86_64-pc-windows-msvc]
+linker = "rust-lld"
+
+[env]
+CARGO_WORKSPACE_DIR = { value = "", relative = true } \ No newline at end of file
diff --git a/src/tools/rust-analyzer/.editorconfig b/src/tools/rust-analyzer/.editorconfig
new file mode 100644
index 000000000..314f79d3f
--- /dev/null
+++ b/src/tools/rust-analyzer/.editorconfig
@@ -0,0 +1,19 @@
+# https://EditorConfig.org
+root = true
+
+[*]
+charset = utf-8
+trim_trailing_whitespace = true
+end_of_line = lf
+insert_final_newline = true
+indent_style = space
+
+[*.{rs,toml}]
+indent_size = 4
+
+[*.ts]
+indent_size = 4
+[*.js]
+indent_size = 4
+[*.json]
+indent_size = 4
diff --git a/src/tools/rust-analyzer/.git-blame-ignore-revs b/src/tools/rust-analyzer/.git-blame-ignore-revs
new file mode 100644
index 000000000..a302e2378
--- /dev/null
+++ b/src/tools/rust-analyzer/.git-blame-ignore-revs
@@ -0,0 +1,8 @@
+# for this file to take effect make sure you use git ^2.23 and
+# add ignoreFile to your git configuration:
+# ```
+# git config --global blame.ignoreRevsFile .git-blame-ignore-revs
+# ```
+
+# prettier format
+f247090558c9ba3c551566eae5882b7ca865225f
diff --git a/src/tools/rust-analyzer/.vscode/extensions.json b/src/tools/rust-analyzer/.vscode/extensions.json
new file mode 100644
index 000000000..027eeabc4
--- /dev/null
+++ b/src/tools/rust-analyzer/.vscode/extensions.json
@@ -0,0 +1,9 @@
+{
+ // See http://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
+ // Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
+
+ // List of extensions which should be recommended for users of this workspace.
+ "recommendations": ["vadimcn.vscode-lldb"],
+ // List of extensions recommended by VS Code that should not be recommended for users of this workspace.
+ "unwantedRecommendations": []
+}
diff --git a/src/tools/rust-analyzer/.vscode/launch.json b/src/tools/rust-analyzer/.vscode/launch.json
new file mode 100644
index 000000000..021b8f048
--- /dev/null
+++ b/src/tools/rust-analyzer/.vscode/launch.json
@@ -0,0 +1,131 @@
+{
+ // Use IntelliSense to learn about possible attributes.
+ // Hover to view descriptions of existing attributes.
+ // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
+
+ // NOTE: --disable-extensions
+ // Disable all installed extensions to increase performance of the debug instance
+ // and prevent potential conflicts with other installed extensions.
+
+ "version": "0.2.0",
+ "configurations": [
+ {
+ // Used for testing the extension with the installed LSP server.
+ "name": "Run Installed Extension",
+ "type": "extensionHost",
+ "request": "launch",
+ "runtimeExecutable": "${execPath}",
+ "args": [
+ // "--user-data-dir=${workspaceFolder}/target/code",
+ "--disable-extensions",
+ "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
+ ],
+ "outFiles": [
+ "${workspaceFolder}/editors/code/out/**/*.js"
+ ],
+ "preLaunchTask": "Build Extension",
+ "skipFiles": [
+ "<node_internals>/**/*.js"
+ ]
+ },
+ {
+ // Used for testing the extension with a local build of the LSP server (in `target/debug`).
+ "name": "Run Extension (Debug Build)",
+ "type": "extensionHost",
+ "request": "launch",
+ "runtimeExecutable": "${execPath}",
+ "args": [
+ "--disable-extensions",
+ "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
+ ],
+ "outFiles": [
+ "${workspaceFolder}/editors/code/out/**/*.js"
+ ],
+ "preLaunchTask": "Build Server and Extension",
+ "skipFiles": [
+ "<node_internals>/**/*.js"
+ ],
+ "env": {
+ "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/debug/rust-analyzer"
+ }
+ },
+ {
+ // Used for testing the extension with a local build of the LSP server (in `target/release`).
+ "name": "Run Extension (Release Build)",
+ "type": "extensionHost",
+ "request": "launch",
+ "runtimeExecutable": "${execPath}",
+ "args": [
+ "--disable-extensions",
+ "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
+ ],
+ "outFiles": [
+ "${workspaceFolder}/editors/code/out/**/*.js"
+ ],
+ "preLaunchTask": "Build Server (Release) and Extension",
+ "skipFiles": [
+ "<node_internals>/**/*.js"
+ ],
+ "env": {
+ "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/release/rust-analyzer"
+ }
+ },
+ {
+ // Used for testing the extension with a local build of the LSP server (in `target/release`)
+ // with all other extendions loaded.
+ "name": "Run With Extensions",
+ "type": "extensionHost",
+ "request": "launch",
+ "runtimeExecutable": "${execPath}",
+ "args": [
+ "--disable-extension", "matklad.rust-analyzer",
+ "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
+ ],
+ "outFiles": [
+ "${workspaceFolder}/editors/code/out/**/*.js"
+ ],
+ "preLaunchTask": "Build Server (Release) and Extension",
+ "skipFiles": [
+ "<node_internals>/**/*.js"
+ ],
+ "env": {
+ "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/release/rust-analyzer"
+ }
+ },
+ {
+ // Used to attach LLDB to a running LSP server.
+ // NOTE: Might require root permissions. For this run:
+ //
+ // `echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope`
+ //
+ // Don't forget to set `debug = 2` in `Cargo.toml` before building the server
+
+ "name": "Attach To Server",
+ "type": "lldb",
+ "request": "attach",
+ "program": "${workspaceFolder}/target/debug/rust-analyzer",
+ "pid": "${command:pickMyProcess}",
+ "sourceLanguages": [
+ "rust"
+ ]
+ },
+ {
+ "name": "Run Unit Tests",
+ "type": "extensionHost",
+ "request": "launch",
+ "runtimeExecutable": "${execPath}",
+ "args": [
+ "--extensionDevelopmentPath=${workspaceFolder}/editors/code",
+ "--extensionTestsPath=${workspaceFolder}/editors/code/out/tests/unit" ],
+ "sourceMaps": true,
+ "outFiles": [ "${workspaceFolder}/editors/code/out/tests/unit/**/*.js" ],
+ "preLaunchTask": "Pretest"
+ },
+ {
+ "name": "Win Attach to Server",
+ "type": "cppvsdbg",
+ "processId":"${command:pickProcess}",
+ "request": "attach"
+ }
+ ]
+}
diff --git a/src/tools/rust-analyzer/.vscode/tasks.json b/src/tools/rust-analyzer/.vscode/tasks.json
new file mode 100644
index 000000000..a25dff19e
--- /dev/null
+++ b/src/tools/rust-analyzer/.vscode/tasks.json
@@ -0,0 +1,67 @@
+// See https://go.microsoft.com/fwlink/?LinkId=733558
+// for the documentation about the tasks.json format
+{
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": "Build Extension in Background",
+ "group": "build",
+ "type": "npm",
+ "script": "watch",
+ "path": "editors/code/",
+ "problemMatcher": {
+ "base": "$tsc-watch",
+ "fileLocation": ["relative", "${workspaceFolder}/editors/code/"]
+ },
+ "isBackground": true,
+ },
+ {
+ "label": "Build Extension",
+ "group": "build",
+ "type": "npm",
+ "script": "build",
+ "path": "editors/code/",
+ "problemMatcher": {
+ "base": "$tsc",
+ "fileLocation": ["relative", "${workspaceFolder}/editors/code/"]
+ },
+ },
+ {
+ "label": "Build Server",
+ "group": "build",
+ "type": "shell",
+ "command": "cargo build --package rust-analyzer",
+ "problemMatcher": "$rustc"
+ },
+ {
+ "label": "Build Server (Release)",
+ "group": "build",
+ "type": "shell",
+ "command": "cargo build --release --package rust-analyzer",
+ "problemMatcher": "$rustc"
+ },
+ {
+ "label": "Pretest",
+ "group": "build",
+ "isBackground": false,
+ "type": "npm",
+ "script": "pretest",
+ "path": "editors/code/",
+ "problemMatcher": {
+ "base": "$tsc",
+ "fileLocation": ["relative", "${workspaceFolder}/editors/code/"]
+ }
+ },
+
+ {
+ "label": "Build Server and Extension",
+ "dependsOn": ["Build Server", "Build Extension"],
+ "problemMatcher": "$rustc"
+ },
+ {
+ "label": "Build Server (Release) and Extension",
+ "dependsOn": ["Build Server (Release)", "Build Extension"],
+ "problemMatcher": "$rustc"
+ }
+ ]
+}
diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
new file mode 100644
index 000000000..703f0e5b8
--- /dev/null
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -0,0 +1,2101 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "addr2line"
+version = "0.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
+dependencies = [
+ "gimli",
+]
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "always-assert"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fbf688625d06217d5b1bb0ea9d9c44a1635fd0ee3534466388d18203174f4d11"
+dependencies = [
+ "log",
+]
+
+[[package]]
+name = "ansi_term"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.58"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb07d2053ccdbe10e2af2995a2f116c1330396493dc1269f6a91d0ae82e19704"
+
+[[package]]
+name = "anymap"
+version = "1.0.0-beta.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f1f8f5a6f3d50d89e3797d7593a50f96bb2aaa20ca0cc7be1fb673232c91d72"
+
+[[package]]
+name = "arbitrary"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a7924531f38b1970ff630f03eb20a2fde69db5c590c93b0f3482e95dcc5fd60"
+
+[[package]]
+name = "arrayvec"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "backtrace"
+version = "0.3.65"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61"
+dependencies = [
+ "addr2line",
+ "cc",
+ "cfg-if",
+ "libc",
+ "miniz_oxide",
+ "object 0.28.4",
+ "rustc-demangle",
+]
+
+[[package]]
+name = "base-db"
+version = "0.0.0"
+dependencies = [
+ "cfg",
+ "profile",
+ "rustc-hash",
+ "salsa",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tt",
+ "vfs",
+]
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "camino"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "869119e97797867fd90f5e22af7d0bd274bd4635ebb9eb68c04f3f513ae6c412"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cargo-platform"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cargo_metadata"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3abb7553d5b9b8421c6de7cb02606ff15e0c6eea7d8eadd75ef013fd636bec36"
+dependencies = [
+ "camino",
+ "cargo-platform",
+ "semver",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "cc"
+version = "1.0.73"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
+
+[[package]]
+name = "cfg"
+version = "0.0.0"
+dependencies = [
+ "arbitrary",
+ "derive_arbitrary",
+ "expect-test",
+ "mbe",
+ "oorandom",
+ "rustc-hash",
+ "syntax",
+ "tt",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "chalk-derive"
+version = "0.83.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "83553c2ef7717e58aecdf42dd9e3c876229f5a1f35a16435b5ddc4addef81827"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "chalk-ir"
+version = "0.83.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dd42107d579d8ec2a5af20a8de62a37524a67bf6a4c0ff08a950068f0bfea91"
+dependencies = [
+ "bitflags",
+ "chalk-derive",
+ "lazy_static",
+]
+
+[[package]]
+name = "chalk-recursive"
+version = "0.83.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c444031541a76c13c145e76d91f1548e9feb2240e7f0c3e77879ceb694994f2d"
+dependencies = [
+ "chalk-derive",
+ "chalk-ir",
+ "chalk-solve",
+ "rustc-hash",
+ "tracing",
+]
+
+[[package]]
+name = "chalk-solve"
+version = "0.83.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c76f2db19c5e8a3d42340cf5b4d90b8c218750536fca35e2bb285ab6653c0bc8"
+dependencies = [
+ "chalk-derive",
+ "chalk-ir",
+ "ena",
+ "indexmap",
+ "itertools",
+ "petgraph",
+ "rustc-hash",
+ "tracing",
+]
+
+[[package]]
+name = "countme"
+version = "3.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636"
+dependencies = [
+ "dashmap",
+ "once_cell",
+ "rustc-hash",
+]
+
+[[package]]
+name = "cov-mark"
+version = "2.0.0-pre.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d48d8f76bd9331f19fe2aaf3821a9f9fb32c3963e1e3d6ce82a8c09cef7444a"
+
+[[package]]
+name = "crc32fast"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crossbeam"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ae5588f6b3c3cb05239e90bd110f257254aecd01e4635400391aeae07497845"
+dependencies = [
+ "cfg-if",
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-epoch",
+ "crossbeam-queue",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c02a4d71819009c192cf4872265391563fd6a84c81ff2c0f2a7026ca4c1d85c"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
+dependencies = [
+ "cfg-if",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d"
+dependencies = [
+ "autocfg",
+ "cfg-if",
+ "crossbeam-utils",
+ "memoffset",
+ "once_cell",
+ "scopeguard",
+]
+
+[[package]]
+name = "crossbeam-queue"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f25d8400f4a7a5778f0e4e52384a48cbd9b5c495d110786187fc750075277a2"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+]
+
+[[package]]
+name = "dashmap"
+version = "5.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3495912c9c1ccf2e18976439f4443f3fee0fd61f424ff99fde6a66b15ecb448f"
+dependencies = [
+ "cfg-if",
+ "hashbrown",
+ "lock_api",
+ "parking_lot_core 0.9.3",
+]
+
+[[package]]
+name = "derive_arbitrary"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c9a577516173adb681466d517d39bd468293bc2c2a16439375ef0f35bba45f3d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "dissimilar"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8c97b9233581d84b8e1e689cdd3a47b6f69770084fc246e86a7f78b0d9c1d4a5"
+
+[[package]]
+name = "dot"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a74b6c4d4a1cff5f454164363c16b72fa12463ca6b31f4b5f2035a65fa3d5906"
+
+[[package]]
+name = "drop_bomb"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1"
+
+[[package]]
+name = "either"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be"
+
+[[package]]
+name = "ena"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3"
+dependencies = [
+ "log",
+]
+
+[[package]]
+name = "expect-test"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d4661aca38d826eb7c72fe128e4238220616de4c0cc00db7bfc38e2e1364dd3"
+dependencies = [
+ "dissimilar",
+ "once_cell",
+]
+
+[[package]]
+name = "filetime"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "windows-sys 0.36.1",
+]
+
+[[package]]
+name = "fixedbitset"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
+
+[[package]]
+name = "flate2"
+version = "1.0.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
+dependencies = [
+ "crc32fast",
+ "miniz_oxide",
+]
+
+[[package]]
+name = "flycheck"
+version = "0.0.0"
+dependencies = [
+ "cargo_metadata",
+ "crossbeam-channel",
+ "jod-thread",
+ "paths",
+ "serde",
+ "serde_json",
+ "stdx",
+ "toolchain",
+ "tracing",
+]
+
+[[package]]
+name = "form_urlencoded"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191"
+dependencies = [
+ "matches",
+ "percent-encoding",
+]
+
+[[package]]
+name = "fs_extra"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394"
+
+[[package]]
+name = "fsevent-sys"
+version = "4.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "fst"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
+
+[[package]]
+name = "gimli"
+version = "0.26.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4"
+
+[[package]]
+name = "hashbrown"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3"
+
+[[package]]
+name = "heck"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
+dependencies = [
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "hir"
+version = "0.0.0"
+dependencies = [
+ "arrayvec",
+ "base-db",
+ "cfg",
+ "either",
+ "hir-def",
+ "hir-expand",
+ "hir-ty",
+ "itertools",
+ "once_cell",
+ "profile",
+ "rustc-hash",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "tt",
+]
+
+[[package]]
+name = "hir-def"
+version = "0.0.0"
+dependencies = [
+ "anymap",
+ "arrayvec",
+ "base-db",
+ "bitflags",
+ "cfg",
+ "cov-mark",
+ "dashmap",
+ "drop_bomb",
+ "either",
+ "expect-test",
+ "fst",
+ "hashbrown",
+ "hir-expand",
+ "indexmap",
+ "itertools",
+ "la-arena",
+ "limit",
+ "mbe",
+ "once_cell",
+ "profile",
+ "rustc-hash",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tracing",
+ "tt",
+]
+
+[[package]]
+name = "hir-expand"
+version = "0.0.0"
+dependencies = [
+ "base-db",
+ "cfg",
+ "cov-mark",
+ "either",
+ "expect-test",
+ "hashbrown",
+ "itertools",
+ "la-arena",
+ "limit",
+ "mbe",
+ "profile",
+ "rustc-hash",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "tracing",
+ "tt",
+]
+
+[[package]]
+name = "hir-ty"
+version = "0.0.0"
+dependencies = [
+ "arrayvec",
+ "base-db",
+ "chalk-ir",
+ "chalk-recursive",
+ "chalk-solve",
+ "cov-mark",
+ "ena",
+ "expect-test",
+ "hir-def",
+ "hir-expand",
+ "itertools",
+ "la-arena",
+ "limit",
+ "once_cell",
+ "profile",
+ "rustc-hash",
+ "scoped-tls",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tracing",
+ "tracing-subscriber",
+ "tracing-tree",
+ "typed-arena",
+]
+
+[[package]]
+name = "home"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "ide"
+version = "0.0.0"
+dependencies = [
+ "cfg",
+ "cov-mark",
+ "crossbeam-channel",
+ "dot",
+ "either",
+ "expect-test",
+ "hir",
+ "ide-assists",
+ "ide-completion",
+ "ide-db",
+ "ide-diagnostics",
+ "ide-ssr",
+ "itertools",
+ "oorandom",
+ "profile",
+ "pulldown-cmark",
+ "pulldown-cmark-to-cmark",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+ "toolchain",
+ "tracing",
+ "url",
+]
+
+[[package]]
+name = "ide-assists"
+version = "0.0.0"
+dependencies = [
+ "cov-mark",
+ "either",
+ "expect-test",
+ "hir",
+ "ide-db",
+ "itertools",
+ "profile",
+ "sourcegen",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+]
+
+[[package]]
+name = "ide-completion"
+version = "0.0.0"
+dependencies = [
+ "base-db",
+ "cov-mark",
+ "expect-test",
+ "hir",
+ "ide-db",
+ "itertools",
+ "once_cell",
+ "profile",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+]
+
+[[package]]
+name = "ide-db"
+version = "0.0.0"
+dependencies = [
+ "arrayvec",
+ "base-db",
+ "cov-mark",
+ "either",
+ "expect-test",
+ "fst",
+ "hir",
+ "indexmap",
+ "itertools",
+ "limit",
+ "once_cell",
+ "parser",
+ "profile",
+ "rayon",
+ "rustc-hash",
+ "sourcegen",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+ "tracing",
+ "xshell",
+]
+
+[[package]]
+name = "ide-diagnostics"
+version = "0.0.0"
+dependencies = [
+ "cfg",
+ "cov-mark",
+ "either",
+ "expect-test",
+ "hir",
+ "ide-db",
+ "itertools",
+ "profile",
+ "sourcegen",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+]
+
+[[package]]
+name = "ide-ssr"
+version = "0.0.0"
+dependencies = [
+ "cov-mark",
+ "expect-test",
+ "hir",
+ "ide-db",
+ "itertools",
+ "parser",
+ "syntax",
+ "test-utils",
+ "text-edit",
+]
+
+[[package]]
+name = "idna"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8"
+dependencies = [
+ "matches",
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+]
+
+[[package]]
+name = "inotify"
+version = "0.9.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff"
+dependencies = [
+ "bitflags",
+ "inotify-sys",
+ "libc",
+]
+
+[[package]]
+name = "inotify-sys"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "instant"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
+
+[[package]]
+name = "jod-thread"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
+
+[[package]]
+name = "kqueue"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d6112e8f37b59803ac47a42d14f1f3a59bbf72fc6857ffc5be455e28a691f8e"
+dependencies = [
+ "kqueue-sys",
+ "libc",
+]
+
+[[package]]
+name = "kqueue-sys"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8367585489f01bc55dd27404dcf56b95e6da061a256a666ab23be9ba96a2e587"
+dependencies = [
+ "bitflags",
+ "libc",
+]
+
+[[package]]
+name = "la-arena"
+version = "0.3.0"
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.126"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
+
+[[package]]
+name = "libloading"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd"
+dependencies = [
+ "cfg-if",
+ "winapi",
+]
+
+[[package]]
+name = "libmimalloc-sys"
+version = "0.1.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11ca136052550448f55df7898c6dbe651c6b574fe38a0d9ea687a9f8088a2e2c"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "limit"
+version = "0.0.0"
+
+[[package]]
+name = "lock_api"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53"
+dependencies = [
+ "autocfg",
+ "scopeguard",
+]
+
+[[package]]
+name = "log"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "lsp-server"
+version = "0.6.0"
+dependencies = [
+ "crossbeam-channel",
+ "log",
+ "lsp-types",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "lsp-types"
+version = "0.93.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70c74e2173b2b31f8655d33724b4b45ac13f439386f66290f539c22b144c2212"
+dependencies = [
+ "bitflags",
+ "serde",
+ "serde_json",
+ "serde_repr",
+ "url",
+]
+
+[[package]]
+name = "matchers"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+dependencies = [
+ "regex-automata",
+]
+
+[[package]]
+name = "matches"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
+
+[[package]]
+name = "mbe"
+version = "0.0.0"
+dependencies = [
+ "cov-mark",
+ "parser",
+ "rustc-hash",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tracing",
+ "tt",
+]
+
+[[package]]
+name = "memchr"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+
+[[package]]
+name = "memmap2"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d5172b50c23043ff43dd53e51392f36519d9b35a8f3a410d30ece5d1aedd58ae"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "memoffset"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "mimalloc"
+version = "0.1.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f64ad83c969af2e732e907564deb0d0ed393cec4af80776f77dd77a1a427698"
+dependencies = [
+ "libmimalloc-sys",
+]
+
+[[package]]
+name = "miniz_oxide"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc"
+dependencies = [
+ "adler",
+]
+
+[[package]]
+name = "mio"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf"
+dependencies = [
+ "libc",
+ "log",
+ "wasi",
+ "windows-sys 0.36.1",
+]
+
+[[package]]
+name = "miow"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7377f7792b3afb6a3cba68daa54ca23c032137010460d667fda53a8d66be00e"
+dependencies = [
+ "windows-sys 0.28.0",
+]
+
+[[package]]
+name = "notify"
+version = "5.0.0-pre.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "553f9844ad0b0824605c20fb55a661679782680410abfb1a8144c2e7e437e7a7"
+dependencies = [
+ "bitflags",
+ "crossbeam-channel",
+ "filetime",
+ "fsevent-sys",
+ "inotify",
+ "kqueue",
+ "libc",
+ "mio",
+ "walkdir",
+ "winapi",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
+dependencies = [
+ "hermit-abi",
+ "libc",
+]
+
+[[package]]
+name = "object"
+version = "0.28.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "object"
+version = "0.29.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1"
+
+[[package]]
+name = "oorandom"
+version = "11.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
+
+[[package]]
+name = "parking_lot"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
+dependencies = [
+ "instant",
+ "lock_api",
+ "parking_lot_core 0.8.5",
+]
+
+[[package]]
+name = "parking_lot"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
+dependencies = [
+ "lock_api",
+ "parking_lot_core 0.9.3",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
+dependencies = [
+ "cfg-if",
+ "instant",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "winapi",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "windows-sys 0.36.1",
+]
+
+[[package]]
+name = "parser"
+version = "0.0.0"
+dependencies = [
+ "drop_bomb",
+ "expect-test",
+ "limit",
+ "rustc-ap-rustc_lexer",
+ "sourcegen",
+]
+
+[[package]]
+name = "paste"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc"
+
+[[package]]
+name = "paths"
+version = "0.0.0"
+
+[[package]]
+name = "percent-encoding"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
+
+[[package]]
+name = "perf-event"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5396562cd2eaa828445d6d34258ae21ee1eb9d40fe626ca7f51c8dccb4af9d66"
+dependencies = [
+ "libc",
+ "perf-event-open-sys",
+]
+
+[[package]]
+name = "perf-event-open-sys"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce9bedf5da2c234fdf2391ede2b90fabf585355f33100689bc364a3ea558561a"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "petgraph"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7"
+dependencies = [
+ "fixedbitset",
+ "indexmap",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
+
+[[package]]
+name = "proc-macro-api"
+version = "0.0.0"
+dependencies = [
+ "memmap2",
+ "object 0.29.0",
+ "paths",
+ "profile",
+ "serde",
+ "serde_json",
+ "snap",
+ "stdx",
+ "tracing",
+ "tt",
+]
+
+[[package]]
+name = "proc-macro-srv"
+version = "0.0.0"
+dependencies = [
+ "crossbeam",
+ "expect-test",
+ "libloading",
+ "mbe",
+ "memmap2",
+ "object 0.29.0",
+ "paths",
+ "proc-macro-api",
+ "proc-macro-test",
+ "tt",
+]
+
+[[package]]
+name = "proc-macro-srv-cli"
+version = "0.0.0"
+dependencies = [
+ "proc-macro-srv",
+]
+
+[[package]]
+name = "proc-macro-test"
+version = "0.0.0"
+dependencies = [
+ "cargo_metadata",
+ "proc-macro-test-impl",
+ "toolchain",
+]
+
+[[package]]
+name = "proc-macro-test-impl"
+version = "0.0.0"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "profile"
+version = "0.0.0"
+dependencies = [
+ "cfg-if",
+ "countme",
+ "la-arena",
+ "libc",
+ "once_cell",
+ "perf-event",
+ "tikv-jemalloc-ctl",
+ "winapi",
+]
+
+[[package]]
+name = "project-model"
+version = "0.0.0"
+dependencies = [
+ "anyhow",
+ "base-db",
+ "cargo_metadata",
+ "cfg",
+ "expect-test",
+ "la-arena",
+ "paths",
+ "profile",
+ "rustc-hash",
+ "semver",
+ "serde",
+ "serde_json",
+ "stdx",
+ "toolchain",
+ "tracing",
+]
+
+[[package]]
+name = "pulldown-cmark"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34f197a544b0c9ab3ae46c359a7ec9cbbb5c7bf97054266fecb7ead794a181d6"
+dependencies = [
+ "bitflags",
+ "memchr",
+ "unicase",
+]
+
+[[package]]
+name = "pulldown-cmark-to-cmark"
+version = "10.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1353ac408192fa925228d3e60ff746167d03f4f7e54835d78ef79e08225d913"
+dependencies = [
+ "pulldown-cmark",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rayon"
+version = "1.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
+dependencies = [
+ "autocfg",
+ "crossbeam-deque",
+ "either",
+ "rayon-core",
+]
+
+[[package]]
+name = "rayon-core"
+version = "1.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
+dependencies = [
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-utils",
+ "num_cpus",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.2.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "regex"
+version = "1.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1"
+dependencies = [
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+dependencies = [
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64"
+
+[[package]]
+name = "rowan"
+version = "0.15.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e88acf7b001007e9e8c989fe7449f6601d909e5dd2c56399fc158977ad6c56e8"
+dependencies = [
+ "countme",
+ "hashbrown",
+ "memoffset",
+ "rustc-hash",
+ "text-size",
+]
+
+[[package]]
+name = "rust-analyzer"
+version = "0.0.0"
+dependencies = [
+ "always-assert",
+ "anyhow",
+ "cfg",
+ "crossbeam-channel",
+ "dissimilar",
+ "expect-test",
+ "flycheck",
+ "hir",
+ "hir-def",
+ "hir-ty",
+ "ide",
+ "ide-db",
+ "ide-ssr",
+ "itertools",
+ "jod-thread",
+ "lsp-server",
+ "lsp-types",
+ "mbe",
+ "mimalloc",
+ "num_cpus",
+ "oorandom",
+ "parking_lot 0.12.1",
+ "proc-macro-api",
+ "proc-macro-srv",
+ "profile",
+ "project-model",
+ "rayon",
+ "rustc-hash",
+ "serde",
+ "serde_json",
+ "sourcegen",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "threadpool",
+ "tikv-jemallocator",
+ "toolchain",
+ "tracing",
+ "tracing-log",
+ "tracing-subscriber",
+ "tracing-tree",
+ "tt",
+ "vfs",
+ "vfs-notify",
+ "winapi",
+ "xflags",
+ "xshell",
+]
+
+[[package]]
+name = "rustc-ap-rustc_lexer"
+version = "725.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f950742ef8a203aa7661aad3ab880438ddeb7f95d4b837c30d65db1a2c5df68e"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "rustc-demangle"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
+
+[[package]]
+name = "rustc-hash"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
+
+[[package]]
+name = "ryu"
+version = "1.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695"
+
+[[package]]
+name = "salsa"
+version = "0.17.0-pre.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b223dccb46c32753144d0b51290da7230bb4aedcd8379d6b4c9a474c18bf17a"
+dependencies = [
+ "crossbeam-utils",
+ "indexmap",
+ "lock_api",
+ "log",
+ "oorandom",
+ "parking_lot 0.11.2",
+ "rustc-hash",
+ "salsa-macros",
+ "smallvec",
+]
+
+[[package]]
+name = "salsa-macros"
+version = "0.17.0-pre.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac6c2e352df550bf019da7b16164ed2f7fa107c39653d1311d1bba42d1582ff7"
+dependencies = [
+ "heck",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "scoped-tls"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2"
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "semver"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.138"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1578c6245786b9d168c5447eeacfb96856573ca56c9d68fdcf394be134882a47"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.138"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "023e9b1467aef8a10fb88f25611870ada9800ef7e22afce356bb0d2387b6f27c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.82"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7"
+dependencies = [
+ "indexmap",
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_repr"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2ad84e47328a31223de7fed7a4f5087f2d6ddfe586cf3ca25b7a165bc0a5aed"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "sharded-slab"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
+name = "smallvec"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
+
+[[package]]
+name = "smol_str"
+version = "0.1.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7475118a28b7e3a2e157ce0131ba8c5526ea96e90ee601d9f6bb2e286a35ab44"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "snap"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451"
+
+[[package]]
+name = "sourcegen"
+version = "0.0.0"
+dependencies = [
+ "xshell",
+]
+
+[[package]]
+name = "stdx"
+version = "0.0.0"
+dependencies = [
+ "always-assert",
+ "backtrace",
+ "libc",
+ "miow",
+ "winapi",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.98"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "synstructure"
+version = "0.12.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "unicode-xid",
+]
+
+[[package]]
+name = "syntax"
+version = "0.0.0"
+dependencies = [
+ "cov-mark",
+ "expect-test",
+ "indexmap",
+ "itertools",
+ "once_cell",
+ "parser",
+ "proc-macro2",
+ "profile",
+ "quote",
+ "rayon",
+ "rowan",
+ "rustc-ap-rustc_lexer",
+ "rustc-hash",
+ "smol_str",
+ "sourcegen",
+ "stdx",
+ "test-utils",
+ "text-edit",
+ "ungrammar",
+]
+
+[[package]]
+name = "test-utils"
+version = "0.0.0"
+dependencies = [
+ "dissimilar",
+ "profile",
+ "rustc-hash",
+ "stdx",
+ "text-size",
+]
+
+[[package]]
+name = "text-edit"
+version = "0.0.0"
+dependencies = [
+ "itertools",
+ "text-size",
+]
+
+[[package]]
+name = "text-size"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
+
+[[package]]
+name = "thread_local"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
+name = "threadpool"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
+dependencies = [
+ "num_cpus",
+]
+
+[[package]]
+name = "tikv-jemalloc-ctl"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e37706572f4b151dff7a0146e040804e9c26fe3a3118591112f05cf12a4216c1"
+dependencies = [
+ "libc",
+ "paste",
+ "tikv-jemalloc-sys",
+]
+
+[[package]]
+name = "tikv-jemalloc-sys"
+version = "0.5.1+5.3.0-patched"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "931e876f91fed0827f863a2d153897790da0b24d882c721a79cb3beb0b903261"
+dependencies = [
+ "cc",
+ "fs_extra",
+ "libc",
+]
+
+[[package]]
+name = "tikv-jemallocator"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "20612db8a13a6c06d57ec83953694185a367e16945f66565e8028d2c0bd76979"
+dependencies = [
+ "libc",
+ "tikv-jemalloc-sys",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
+
+[[package]]
+name = "toolchain"
+version = "0.0.0"
+dependencies = [
+ "home",
+]
+
+[[package]]
+name = "tracing"
+version = "0.1.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a400e31aa60b9d44a52a8ee0343b5b18566b03a8321e0d321f695cf56e940160"
+dependencies = [
+ "cfg-if",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b7358be39f2f274f322d2aaed611acc57f382e8eb1e5b48cb9ae30933495ce7"
+dependencies = [
+ "once_cell",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
+dependencies = [
+ "lazy_static",
+ "log",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3a713421342a5a666b7577783721d3117f1b69a393df803ee17bb73b1e122a59"
+dependencies = [
+ "matchers",
+ "once_cell",
+ "regex",
+ "sharded-slab",
+ "thread_local",
+ "tracing",
+ "tracing-core",
+ "tracing-log",
+]
+
+[[package]]
+name = "tracing-tree"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d07e90b329c621ade432823988574e820212648aa40e7a2497777d58de0fb453"
+dependencies = [
+ "ansi_term",
+ "atty",
+ "tracing-core",
+ "tracing-log",
+ "tracing-subscriber",
+]
+
+[[package]]
+name = "tt"
+version = "0.0.0"
+dependencies = [
+ "smol_str",
+ "stdx",
+]
+
+[[package]]
+name = "typed-arena"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0685c84d5d54d1c26f7d3eb96cd41550adb97baed141a761cf335d3d33bcd0ae"
+
+[[package]]
+name = "ungrammar"
+version = "1.16.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f"
+
+[[package]]
+name = "unicase"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
+dependencies = [
+ "version_check",
+]
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c"
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04"
+
+[[package]]
+name = "url"
+version = "2.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "matches",
+ "percent-encoding",
+ "serde",
+]
+
+[[package]]
+name = "valuable"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "vfs"
+version = "0.0.0"
+dependencies = [
+ "fst",
+ "indexmap",
+ "paths",
+ "rustc-hash",
+]
+
+[[package]]
+name = "vfs-notify"
+version = "0.0.0"
+dependencies = [
+ "crossbeam-channel",
+ "jod-thread",
+ "notify",
+ "paths",
+ "tracing",
+ "vfs",
+ "walkdir",
+]
+
+[[package]]
+name = "walkdir"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
+dependencies = [
+ "same-file",
+ "winapi",
+ "winapi-util",
+]
+
+[[package]]
+name = "wasi"
+version = "0.11.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "windows-sys"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82ca39602d5cbfa692c4b67e3bcbb2751477355141c1ed434c94da4186836ff6"
+dependencies = [
+ "windows_aarch64_msvc 0.28.0",
+ "windows_i686_gnu 0.28.0",
+ "windows_i686_msvc 0.28.0",
+ "windows_x86_64_gnu 0.28.0",
+ "windows_x86_64_msvc 0.28.0",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
+dependencies = [
+ "windows_aarch64_msvc 0.36.1",
+ "windows_i686_gnu 0.36.1",
+ "windows_i686_msvc 0.36.1",
+ "windows_x86_64_gnu 0.36.1",
+ "windows_x86_64_msvc 0.36.1",
+]
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "52695a41e536859d5308cc613b4a022261a274390b25bd29dfff4bf08505f3c2"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f54725ac23affef038fecb177de6c9bf065787c2f432f79e3c373da92f3e1d8a"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "51d5158a43cc43623c0729d1ad6647e62fa384a3d135fd15108d37c683461f64"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc31f409f565611535130cfe7ee8e6655d3fa99c1c61013981e491921b5ce954"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f2b8c7cbd3bfdddd9ab98769f9746a7fad1bca236554cd032b78d768bc0e89f"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
+
+[[package]]
+name = "write-json"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06069a848f95fceae3e5e03c0ddc8cb78452b56654ee0c8e68f938cf790fb9e3"
+
+[[package]]
+name = "xflags"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f14fe1ed41a5a2b5ef3f565586c4a8a559ee55d3953faab360a771135bdee00"
+dependencies = [
+ "xflags-macros",
+]
+
+[[package]]
+name = "xflags-macros"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "45d11d5fc2a97287eded8b170ca80533b3c42646dd7fa386a5eb045817921022"
+
+[[package]]
+name = "xshell"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d47097dc5c85234b1e41851b3422dd6d19b3befdd35b4ae5ce386724aeca981"
+dependencies = [
+ "xshell-macros",
+]
+
+[[package]]
+name = "xshell-macros"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88301b56c26dd9bf5c43d858538f82d6f3f7764767defbc5d34e59459901c41a"
+
+[[package]]
+name = "xtask"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "flate2",
+ "write-json",
+ "xflags",
+ "xshell",
+]
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
new file mode 100644
index 000000000..6b68ca823
--- /dev/null
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -0,0 +1,33 @@
+[workspace]
+members = ["xtask/", "lib/*", "crates/*"]
+exclude = ["crates/proc-macro-test/imp"]
+
+[profile.dev]
+# Disabling debug info speeds up builds a bunch,
+# and we don't rely on it for debugging that much.
+debug = 0
+
+[profile.dev.package]
+# These speed up local tests.
+rowan.opt-level = 3
+rustc-hash.opt-level = 3
+smol_str.opt-level = 3
+text-size.opt-level = 3
+# This speeds up `cargo xtask dist`.
+miniz_oxide.opt-level = 3
+
+[profile.release]
+incremental = true
+# Set this to 1 or 2 to get more useful backtraces in debugger.
+debug = 0
+
+[patch.'crates-io']
+# rowan = { path = "../rowan" }
+
+# chalk-solve = { path = "../chalk/chalk-solve" }
+# chalk-ir = { path = "../chalk/chalk-ir" }
+# chalk-recursive = { path = "../chalk/chalk-recursive" }
+
+# ungrammar = { path = "../ungrammar" }
+
+# salsa = { path = "../salsa" }
diff --git a/src/tools/rust-analyzer/LICENSE-APACHE b/src/tools/rust-analyzer/LICENSE-APACHE
new file mode 100644
index 000000000..16fe87b06
--- /dev/null
+++ b/src/tools/rust-analyzer/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/src/tools/rust-analyzer/LICENSE-MIT b/src/tools/rust-analyzer/LICENSE-MIT
new file mode 100644
index 000000000..31aa79387
--- /dev/null
+++ b/src/tools/rust-analyzer/LICENSE-MIT
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/src/tools/rust-analyzer/PRIVACY.md b/src/tools/rust-analyzer/PRIVACY.md
new file mode 100644
index 000000000..89e252be7
--- /dev/null
+++ b/src/tools/rust-analyzer/PRIVACY.md
@@ -0,0 +1 @@
+See the [Privacy](https://rust-analyzer.github.io/manual.html#privacy) section of the user manual.
diff --git a/src/tools/rust-analyzer/README.md b/src/tools/rust-analyzer/README.md
new file mode 100644
index 000000000..8bb0517ed
--- /dev/null
+++ b/src/tools/rust-analyzer/README.md
@@ -0,0 +1,49 @@
+<p align="center">
+ <img
+ src="https://raw.githubusercontent.com/rust-analyzer/rust-analyzer/master/assets/logo-wide.svg"
+ alt="rust-analyzer logo">
+</p>
+
+rust-analyzer is a modular compiler frontend for the Rust language.
+It is a part of a larger rls-2.0 effort to create excellent IDE support for Rust.
+
+## Quick Start
+
+https://rust-analyzer.github.io/manual.html#installation
+
+## Documentation
+
+If you want to **contribute** to rust-analyzer or are just curious about how
+things work under the hood, check the [./docs/dev](./docs/dev) folder.
+
+If you want to **use** rust-analyzer's language server with your editor of
+choice, check [the manual](https://rust-analyzer.github.io/manual.html) folder.
+It also contains some tips & tricks to help you be more productive when using rust-analyzer.
+
+## Security and Privacy
+
+See the corresponding sections of [the manual](https://rust-analyzer.github.io/manual.html#security).
+
+## Communication
+
+For usage and troubleshooting requests, please use "IDEs and Editors" category of the Rust forum:
+
+https://users.rust-lang.org/c/ide/14
+
+For questions about development and implementation, join rust-analyzer working group on Zulip:
+
+https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
+
+## Quick Links
+
+* Website: https://rust-analyzer.github.io/
+* Metrics: https://rust-analyzer.github.io/metrics/
+* API docs: https://rust-lang.github.io/rust-analyzer/ide/
+* Changelog: https://rust-analyzer.github.io/thisweek
+
+## License
+
+Rust analyzer is primarily distributed under the terms of both the MIT
+license and the Apache License (Version 2.0).
+
+See LICENSE-APACHE and LICENSE-MIT for details.
diff --git a/src/tools/rust-analyzer/assets/logo-square.svg b/src/tools/rust-analyzer/assets/logo-square.svg
new file mode 100644
index 000000000..fe1c1fa02
--- /dev/null
+++ b/src/tools/rust-analyzer/assets/logo-square.svg
@@ -0,0 +1,88 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="24mm"
+ height="24mm"
+ viewBox="0 0 23.999999 24.000001"
+ version="1.1"
+ id="svg8"
+ inkscape:version="0.92.4 5da689c313, 2019-01-14"
+ sodipodi:docname="ra.svg">
+ <defs
+ id="defs2" />
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="3.959798"
+ inkscape:cx="-31.307418"
+ inkscape:cy="43.570897"
+ inkscape:document-units="mm"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ inkscape:window-width="1920"
+ inkscape:window-height="1006"
+ inkscape:window-x="0"
+ inkscape:window-y="0"
+ inkscape:window-maximized="1" />
+ <metadata
+ id="metadata5">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title />
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Ebene 1"
+ inkscape:groupmode="layer"
+ id="layer1"
+ transform="translate(-48.088531,-60.285631)">
+ <g
+ aria-label="r."
+ style="font-style:normal;font-weight:normal;font-size:10.58333302px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:#ffffff;stroke-width:0.5px;paint-order:stroke"
+ id="text3715">
+ <path
+ d="m 55.00077,63.442544 c -0.588718,-0.01704 -1.180779,0.251078 -1.524352,0.735633 -0.163942,0.198364 -0.296316,0.49938 -0.394953,0.683311 -0.101099,-0.416482 -0.202199,-0.832964 -0.303298,-1.249445 -0.671966,0 -1.343932,0 -2.015897,0 0,0.370348 0,0.740695 0,1.111043 0.246841,0 0.493682,0 0.740523,0 0,1.128958 0,2.257916 0,3.386874 -0.246841,0 -0.493682,0 -0.740523,0 0,0.373792 0,0.747585 0,1.121378 1.174777,0 2.349555,0 3.524332,0 0,-0.373793 0,-0.747586 0,-1.121378 -0.37052,0 -0.74104,0 -1.11156,0 0,-0.53623 0,-1.072458 0,-1.608688 0.190282,-0.586609 0.512347,-1.195617 1.085749,-1.482555 0.177384,-0.100666 0.369603,-0.139942 0.305897,0.108125 0,0.278138 0,0.556274 0,0.834412 0.349333,0 0.698666,0 1.047998,0 0.104042,-0.783071 0.208084,-1.566141 0.312126,-2.349211 -0.293304,-0.117433 -0.610556,-0.17161 -0.926042,-0.169499 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold'"
+ id="path817" />
+ <circle
+ cx="59.49345"
+ cy="68.231422"
+ r="1.1800417"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold'"
+ id="path819" />
+ </g>
+ <rect
+ style="fill:#30363b;fill-opacity:1;stroke:#20262b;stroke-width:0.39559129;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ id="rect3721"
+ width="10.604409"
+ height="12.604408"
+ x="61.286327"
+ y="71.483421" />
+ <g
+ aria-label="a"
+ style="font-style:normal;font-weight:normal;font-size:10.58333302px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.26458332"
+ id="text3719">
+ <path
+ d="m 69.065615,79.143583 q 0,0.3175 0.08467,0.460375 0.08996,0.142875 0.28575,0.216958 l -0.343958,1.100667 q -0.497417,-0.04762 -0.841375,-0.216959 -0.338667,-0.174625 -0.534459,-0.523875 -0.322791,0.386292 -0.825499,0.576792 -0.502709,0.185208 -1.026584,0.185208 -0.867833,0 -1.386416,-0.492125 -0.513292,-0.497416 -0.513292,-1.275291 0,-0.915459 0.714375,-1.412875 0.719667,-0.497417 2.021417,-0.497417 h 0.756708 v -0.211667 q 0,-0.439208 -0.28575,-0.650875 -0.280458,-0.211666 -0.8255,-0.211666 -0.269875,0 -0.693208,0.07937 -0.423334,0.07408 -0.846667,0.216958 l -0.386292,-1.11125 q 0.545042,-0.206375 1.132417,-0.312208 0.592667,-0.105834 1.058333,-0.105834 1.254125,0 1.852083,0.513292 0.60325,0.508 0.60325,1.471083 z m -2.624666,0.60325 q 0.269875,0 0.566208,-0.15875 0.296334,-0.164042 0.449792,-0.460375 v -0.910167 h -0.41275 q -0.6985,0 -1.026583,0.216958 -0.328084,0.211667 -0.328084,0.624417 0,0.322792 0.195792,0.508 0.201083,0.179917 0.555625,0.179917 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path822" />
+ </g>
+ </g>
+</svg>
diff --git a/src/tools/rust-analyzer/assets/logo-wide.svg b/src/tools/rust-analyzer/assets/logo-wide.svg
new file mode 100644
index 000000000..c5fb55b36
--- /dev/null
+++ b/src/tools/rust-analyzer/assets/logo-wide.svg
@@ -0,0 +1,142 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="85.797134mm"
+ height="24.747536mm"
+ viewBox="0 0 85.797134 24.747536"
+ version="1.1"
+ id="svg8"
+ inkscape:version="0.92.4 5da689c313, 2019-01-14"
+ sodipodi:docname="rust analyzer.svg">
+ <defs
+ id="defs2" />
+ <style>
+ #text3715 {
+ fill: #000000;
+ stroke: #ffffff;
+ stroke-width: 0.5;
+ paint-order: stroke;
+ }
+ @media (prefers-color-scheme: dark) {
+ #text3715 {
+ fill: #ffffff;
+ stroke: none;
+ }
+ }
+ </style>
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="1.979899"
+ inkscape:cx="64.938033"
+ inkscape:cy="-10.231391"
+ inkscape:document-units="mm"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ inkscape:window-width="1920"
+ inkscape:window-height="1006"
+ inkscape:window-x="0"
+ inkscape:window-y="0"
+ inkscape:window-maximized="1" />
+ <metadata
+ id="metadata5">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title />
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Ebene 1"
+ inkscape:groupmode="layer"
+ id="layer1"
+ transform="translate(-29.534624,-59.398722)">
+ <g
+ aria-label="rust."
+ style="font-style:normal;font-weight:normal;font-size:10.58333302px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px"
+ id="text3715">
+ <path
+ d="m 35.95077,62.913236 c -0.588719,-0.01704 -1.180779,0.251078 -1.524352,0.735632 -0.163943,0.198364 -0.296317,0.499384 -0.394954,0.683311 -0.101099,-0.416482 -0.202198,-0.832963 -0.303298,-1.249444 -0.671965,0 -1.343931,0 -2.015897,0 0,0.370348 0,0.740695 0,1.111043 0.246841,0 0.493682,0 0.740523,0 0,1.128958 0,2.257916 0,3.386873 -0.246841,0 -0.493682,0 -0.740523,0 0,0.373965 0,0.747931 0,1.121896 1.174777,0 2.349555,0 3.524332,0 0,-0.373965 0,-0.747931 0,-1.121896 -0.37052,0 -0.74104,0 -1.11156,0 0,-0.536229 0,-1.072458 0,-1.608687 0.190283,-0.586609 0.512347,-1.195617 1.085749,-1.482555 0.177393,-0.100673 0.369604,-0.139934 0.305898,0.108135 0,0.278134 0,0.556268 0,0.834401 0.349332,0 0.698665,0 1.047998,0 0.104041,-0.78307 0.208084,-1.56614 0.312125,-2.34921 -0.293304,-0.117432 -0.610556,-0.17161 -0.926041,-0.169499 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';"
+ id="path817" />
+ <path
+ d="m 39.681433,63.082627 v 3.847042 q 0,0.407458 0.148167,0.560917 0.153458,0.153458 0.423333,0.153458 0.259292,0 0.518583,-0.164042 0.259292,-0.164041 0.433917,-0.4445 V 63.082627 H 42.8776 v 5.61975 h -1.4605 l -0.07408,-0.656166 q -0.28575,0.41275 -0.751416,0.624416 -0.465667,0.211667 -0.973667,0.211667 -0.814917,0 -1.211792,-0.470958 -0.396875,-0.47625 -0.396875,-1.275292 v -4.053417 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';"
+ id="path819" />
+ <path
+ d="m 46.6135,67.686377 q 0.418042,0 0.672042,-0.132291 0.259291,-0.132292 0.259291,-0.396875 0,-0.179917 -0.100541,-0.301625 -0.100542,-0.121709 -0.386292,-0.232834 -0.28575,-0.111125 -0.846667,-0.264583 -0.513291,-0.137583 -0.910166,-0.34925 -0.391584,-0.211667 -0.613834,-0.545042 -0.216958,-0.333375 -0.216958,-0.830791 0,-0.502709 0.280458,-0.894292 0.280459,-0.391583 0.8255,-0.613833 0.545042,-0.227542 1.3335,-0.227542 0.751417,0 1.307042,0.195792 0.560917,0.1905 0.968375,0.486833 l -0.66675,0.98425 q -0.34925,-0.216958 -0.751417,-0.343958 -0.402166,-0.132292 -0.809625,-0.132292 -0.407458,0 -0.608541,0.111125 -0.195792,0.105833 -0.195792,0.322792 0,0.142875 0.100542,0.248708 0.105833,0.100542 0.391583,0.211667 0.28575,0.105833 0.836083,0.264583 0.545042,0.153458 0.947209,0.354542 0.407458,0.201083 0.629708,0.53975 0.22225,0.333375 0.22225,0.883708 0,0.613833 -0.365125,1.031875 -0.365125,0.41275 -0.968375,0.619125 -0.60325,0.206375 -1.322917,0.206375 -0.814916,0 -1.439333,-0.232833 -0.624417,-0.232834 -1.063625,-0.613834 L 44.9625,67.093711 q 0.328083,0.254 0.740833,0.423333 0.418042,0.169333 0.910167,0.169333 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';"
+ id="path821" />
+ <path
+ d="m 55.768067,68.374294 q -0.328083,0.211667 -0.79375,0.359833 -0.465666,0.148167 -1.047749,0.148167 -1.100667,0 -1.635125,-0.560917 -0.534459,-0.566208 -0.534459,-1.534583 v -2.550583 h -1.180041 v -1.153584 h 1.180041 v -1.217083 l 1.672167,-0.201083 v 1.418166 h 1.80975 l -0.164042,1.153584 h -1.645708 v 2.550583 q 0,0.418042 0.1905,0.597958 0.1905,0.179917 0.608541,0.179917 0.296334,0 0.53975,-0.06879 0.248709,-0.07408 0.4445,-0.185208 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';"
+ id="path823" />
+ <circle
+ cx="59.493385"
+ cy="67.702255"
+ r="1.1799999"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';"
+ id="path825" />
+ </g>
+ <rect
+ style="fill:#30363b;fill-opacity:1;stroke:#20262b;stroke-width:0.39205828;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ id="rect3721"
+ width="54.407944"
+ height="12.607942"
+ x="60.730652"
+ y="71.342285" />
+ <g
+ aria-label="analyzer"
+ style="font-style:normal;font-weight:normal;font-size:10.58333302px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.26458332"
+ id="text3719">
+ <path
+ d="m 68.007281,79.143583 q 0,0.3175 0.08467,0.460375 0.08996,0.142875 0.28575,0.216958 l -0.343959,1.100667 q -0.497416,-0.04762 -0.841375,-0.216959 -0.338666,-0.174625 -0.534458,-0.523875 -0.322792,0.386292 -0.8255,0.576792 -0.502708,0.185208 -1.026583,0.185208 -0.867833,0 -1.386417,-0.492125 -0.513291,-0.497416 -0.513291,-1.275291 0,-0.915459 0.714375,-1.412875 0.719666,-0.497417 2.021416,-0.497417 h 0.756708 v -0.211667 q 0,-0.439208 -0.28575,-0.650875 -0.280458,-0.211666 -0.825499,-0.211666 -0.269875,0 -0.693209,0.07937 -0.423333,0.07408 -0.846666,0.216958 l -0.386292,-1.11125 q 0.545042,-0.206375 1.132417,-0.312208 0.592666,-0.105834 1.058333,-0.105834 1.254125,0 1.852083,0.513292 0.60325,0.508 0.60325,1.471083 z m -2.624666,0.60325 q 0.269875,0 0.566208,-0.15875 0.296333,-0.164042 0.449791,-0.460375 v -0.910167 h -0.412749 q -0.6985,0 -1.026584,0.216958 -0.328083,0.211667 -0.328083,0.624417 0,0.322792 0.195792,0.508 0.201083,0.179917 0.555625,0.179917 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path828" />
+ <path
+ d="m 69.626515,80.762833 v -5.61975 h 1.4605 l 0.116417,0.650875 q 0.375708,-0.423334 0.79375,-0.629709 0.418041,-0.206375 0.9525,-0.206375 0.719666,0 1.132416,0.439209 0.41275,0.439208 0.41275,1.23825 v 4.1275 h -1.672166 v -3.645959 q 0,-0.343958 -0.04762,-0.545041 -0.04763,-0.206375 -0.169334,-0.291042 -0.116416,-0.08996 -0.322791,-0.08996 -0.174625,0 -0.343959,0.07937 -0.164041,0.07408 -0.322791,0.216958 -0.15875,0.142875 -0.3175,0.343958 v 3.931709 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path830" />
+ <path
+ d="m 80.707248,79.143583 q 0,0.3175 0.08467,0.460375 0.08996,0.142875 0.28575,0.216958 l -0.343958,1.100667 q -0.497417,-0.04762 -0.841375,-0.216959 -0.338667,-0.174625 -0.534458,-0.523875 -0.322792,0.386292 -0.8255,0.576792 -0.502709,0.185208 -1.026584,0.185208 -0.867833,0 -1.386416,-0.492125 -0.513292,-0.497416 -0.513292,-1.275291 0,-0.915459 0.714375,-1.412875 0.719667,-0.497417 2.021417,-0.497417 h 0.756708 v -0.211667 q 0,-0.439208 -0.28575,-0.650875 -0.280458,-0.211666 -0.8255,-0.211666 -0.269875,0 -0.693208,0.07937 -0.423334,0.07408 -0.846667,0.216958 l -0.386292,-1.11125 q 0.545042,-0.206375 1.132417,-0.312208 0.592667,-0.105834 1.058333,-0.105834 1.254125,0 1.852084,0.513292 0.603249,0.508 0.603249,1.471083 z m -2.624666,0.60325 q 0.269875,0 0.566208,-0.15875 0.296334,-0.164042 0.449792,-0.460375 v -0.910167 h -0.41275 q -0.6985,0 -1.026583,0.216958 -0.328084,0.211667 -0.328084,0.624417 0,0.322792 0.195792,0.508 0.201083,0.179917 0.555625,0.179917 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path832" />
+ <path
+ d="m 85.258066,72.91 v 6.085416 q 0,0.338667 0.1905,0.486833 0.195792,0.142875 0.534458,0.142875 0.216959,0 0.418042,-0.04762 0.201083,-0.05292 0.375708,-0.121708 l 0.402167,1.116542 q -0.28575,0.148166 -0.687917,0.259291 -0.402166,0.111125 -0.936625,0.111125 -1.016,0 -1.49225,-0.582083 -0.47625,-0.587375 -0.47625,-1.571625 V 74.053 H 81.929608 V 72.91 Z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path834" />
+ <path
+ d="m 93.989299,75.143083 -1.87325,5.61975 q -0.248708,0.746125 -0.661458,1.248833 -0.407458,0.508 -1.021292,0.783167 -0.608541,0.275166 -1.465791,0.322791 l -0.1905,-1.180041 q 0.555625,-0.06879 0.894291,-0.206375 0.343959,-0.137584 0.550334,-0.375709 0.211666,-0.232833 0.365125,-0.592666 h -0.5715 l -1.783292,-5.61975 h 1.767417 l 1.090083,4.550833 1.185333,-4.550833 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path836" />
+ <path
+ d="m 97.021408,79.498124 h 2.815166 l -0.15875,1.264709 h -4.630208 v -1.180042 l 2.788708,-3.180292 h -2.555875 v -1.259416 h 4.503208 v 1.17475 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path838" />
+ <path
+ d="m 102.82635,78.439791 q 0.0582,0.460375 0.23813,0.746125 0.1852,0.280458 0.47095,0.41275 0.28575,0.127 0.6403,0.127 0.38629,0 0.74612,-0.127 0.35983,-0.127 0.69321,-0.338667 l 0.67204,0.910167 q -0.39687,0.338667 -0.96308,0.555625 -0.56092,0.216958 -1.28059,0.216958 -0.96308,0 -1.61395,-0.381 -0.65088,-0.386291 -0.97896,-1.058333 -0.32809,-0.672042 -0.32809,-1.545167 0,-0.830791 0.3175,-1.508125 0.3175,-0.682625 0.92605,-1.084791 0.61383,-0.407459 1.49754,-0.407459 0.80433,0 1.39171,0.343959 0.59266,0.343958 0.91545,0.989541 0.32809,0.645584 0.32809,1.550459 0,0.142875 -0.0106,0.306916 -0.005,0.164042 -0.0212,0.291042 z m 1.03717,-2.360083 q -0.44979,0 -0.72496,0.322791 -0.27517,0.322792 -0.33338,1.031875 h 2.06375 q -0.005,-0.613833 -0.23812,-0.98425 -0.23283,-0.370416 -0.76729,-0.370416 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path840" />
+ <path
+ d="m 107.77933,80.762833 v -1.121834 h 0.74084 v -3.386666 h -0.74084 v -1.11125 h 2.01613 l 0.30692,1.264708 q 0.30162,-0.724958 0.76729,-1.0795 0.47096,-0.354542 1.14829,-0.354542 0.28575,0 0.508,0.04762 0.22225,0.04233 0.41804,0.121709 l -0.508,1.381125 q -0.15346,-0.04233 -0.30692,-0.0635 -0.15345,-0.02117 -0.33866,-0.02117 -0.55034,0 -0.96838,0.449792 -0.41275,0.449791 -0.62971,1.143 v 1.608666 h 1.11125 v 1.121834 z m 3.80471,-3.27025 v -1.640417 l 0.24871,-0.709083 h 1.11125 l -0.31221,2.3495 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path842" />
+ </g>
+ </g>
+</svg>
diff --git a/src/tools/rust-analyzer/bench_data/glorious_old_parser b/src/tools/rust-analyzer/bench_data/glorious_old_parser
new file mode 100644
index 000000000..7e900dfeb
--- /dev/null
+++ b/src/tools/rust-analyzer/bench_data/glorious_old_parser
@@ -0,0 +1,8562 @@
+use crate::ast::{AngleBracketedArgs, ParenthesizedArgs, AttrStyle, BareFnTy};
+use crate::ast::{GenericBound, TraitBoundModifier};
+use crate::ast::Unsafety;
+use crate::ast::{Mod, AnonConst, Arg, Arm, Guard, Attribute, BindingMode, TraitItemKind};
+use crate::ast::Block;
+use crate::ast::{BlockCheckMode, CaptureBy, Movability};
+use crate::ast::{Constness, Crate};
+use crate::ast::Defaultness;
+use crate::ast::EnumDef;
+use crate::ast::{Expr, ExprKind, RangeLimits};
+use crate::ast::{Field, FnDecl, FnHeader};
+use crate::ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
+use crate::ast::{GenericParam, GenericParamKind};
+use crate::ast::GenericArg;
+use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
+use crate::ast::{Label, Lifetime, Lit, LitKind};
+use crate::ast::Local;
+use crate::ast::MacStmtStyle;
+use crate::ast::{Mac, Mac_, MacDelimiter};
+use crate::ast::{MutTy, Mutability};
+use crate::ast::{Pat, PatKind, PathSegment};
+use crate::ast::{PolyTraitRef, QSelf};
+use crate::ast::{Stmt, StmtKind};
+use crate::ast::{VariantData, StructField};
+use crate::ast::StrStyle;
+use crate::ast::SelfKind;
+use crate::ast::{TraitItem, TraitRef, TraitObjectSyntax};
+use crate::ast::{Ty, TyKind, TypeBinding, GenericBounds};
+use crate::ast::{Visibility, VisibilityKind, WhereClause, CrateSugar};
+use crate::ast::{UseTree, UseTreeKind};
+use crate::ast::{BinOpKind, UnOp};
+use crate::ast::{RangeEnd, RangeSyntax};
+use crate::{ast, attr};
+use crate::ext::base::DummyResult;
+use crate::source_map::{self, SourceMap, Spanned, respan};
+use crate::parse::{self, SeqSep, classify, token};
+use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
+use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
+use crate::parse::token::DelimToken;
+use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
+use crate::util::parser::{AssocOp, Fixity};
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::parse::PResult;
+use crate::ThinVec;
+use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
+use crate::symbol::{Symbol, keywords};
+
+use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
+use rustc_target::spec::abi::{self, Abi};
+use syntax_pos::{Span, MultiSpan, BytePos, FileName};
+use log::{debug, trace};
+
+use std::borrow::Cow;
+use std::cmp;
+use std::mem;
+use std::path::{self, Path, PathBuf};
+use std::slice;
+
+#[derive(Debug)]
+/// Whether the type alias or associated type is a concrete type or an existential type
+pub enum AliasKind {
+ /// Just a new name for the same type
+ Weak(P<Ty>),
+ /// Only trait impls of the type will be usable, not the actual type itself
+ Existential(GenericBounds),
+}
+
+bitflags::bitflags! {
+ struct Restrictions: u8 {
+ const STMT_EXPR = 1 << 0;
+ const NO_STRUCT_LITERAL = 1 << 1;
+ }
+}
+
+type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute>>);
+
+/// Specifies how to parse a path.
+#[derive(Copy, Clone, PartialEq)]
+pub enum PathStyle {
+ /// In some contexts, notably in expressions, paths with generic arguments are ambiguous
+ /// with something else. For example, in expressions `segment < ....` can be interpreted
+ /// as a comparison and `segment ( ....` can be interpreted as a function call.
+ /// In all such contexts the non-path interpretation is preferred by default for practical
+ /// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g.
+ /// `x<y>` - comparisons, `x::<y>` - unambiguously a path.
+ Expr,
+ /// In other contexts, notably in types, no ambiguity exists and paths can be written
+ /// without the disambiguator, e.g., `x<y>` - unambiguously a path.
+ /// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too.
+ Type,
+ /// A path with generic arguments disallowed, e.g., `foo::bar::Baz`, used in imports,
+ /// visibilities or attributes.
+ /// Technically, this variant is unnecessary and e.g., `Expr` can be used instead
+ /// (paths in "mod" contexts have to be checked later for absence of generic arguments
+ /// anyway, due to macros), but it is used to avoid weird suggestions about expected
+ /// tokens when something goes wrong.
+ Mod,
+}
+
+#[derive(Clone, Copy, PartialEq, Debug)]
+enum SemiColonMode {
+ Break,
+ Ignore,
+ Comma,
+}
+
+#[derive(Clone, Copy, PartialEq, Debug)]
+enum BlockMode {
+ Break,
+ Ignore,
+}
+
+/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
+/// dropped into the token stream, which happens while parsing the result of
+/// macro expansion). Placement of these is not as complex as I feared it would
+/// be. The important thing is to make sure that lookahead doesn't balk at
+/// `token::Interpolated` tokens.
+macro_rules! maybe_whole_expr {
+ ($p:expr) => {
+ if let token::Interpolated(nt) = $p.token.clone() {
+ match *nt {
+ token::NtExpr(ref e) | token::NtLiteral(ref e) => {
+ $p.bump();
+ return Ok((*e).clone());
+ }
+ token::NtPath(ref path) => {
+ $p.bump();
+ let span = $p.span;
+ let kind = ExprKind::Path(None, (*path).clone());
+ return Ok($p.mk_expr(span, kind, ThinVec::new()));
+ }
+ token::NtBlock(ref block) => {
+ $p.bump();
+ let span = $p.span;
+ let kind = ExprKind::Block((*block).clone(), None);
+ return Ok($p.mk_expr(span, kind, ThinVec::new()));
+ }
+ _ => {},
+ };
+ }
+ }
+}
+
+/// As maybe_whole_expr, but for things other than expressions
+macro_rules! maybe_whole {
+ ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
+ if let token::Interpolated(nt) = $p.token.clone() {
+ if let token::$constructor($x) = (*nt).clone() {
+ $p.bump();
+ return Ok($e);
+ }
+ }
+ };
+}
+
+fn maybe_append(mut lhs: Vec<Attribute>, mut rhs: Option<Vec<Attribute>>) -> Vec<Attribute> {
+ if let Some(ref mut rhs) = rhs {
+ lhs.append(rhs);
+ }
+ lhs
+}
+
+#[derive(Debug, Clone, Copy, PartialEq)]
+enum PrevTokenKind {
+ DocComment,
+ Comma,
+ Plus,
+ Interpolated,
+ Eof,
+ Ident,
+ Other,
+}
+
+trait RecoverQPath: Sized {
+ const PATH_STYLE: PathStyle = PathStyle::Expr;
+ fn to_ty(&self) -> Option<P<Ty>>;
+ fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self;
+ fn to_string(&self) -> String;
+}
+
+impl RecoverQPath for Ty {
+ const PATH_STYLE: PathStyle = PathStyle::Type;
+ fn to_ty(&self) -> Option<P<Ty>> {
+ Some(P(self.clone()))
+ }
+ fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self {
+ Self { span: path.span, node: TyKind::Path(qself, path), id: self.id }
+ }
+ fn to_string(&self) -> String {
+ pprust::ty_to_string(self)
+ }
+}
+
+impl RecoverQPath for Pat {
+ fn to_ty(&self) -> Option<P<Ty>> {
+ self.to_ty()
+ }
+ fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self {
+ Self { span: path.span, node: PatKind::Path(qself, path), id: self.id }
+ }
+ fn to_string(&self) -> String {
+ pprust::pat_to_string(self)
+ }
+}
+
+impl RecoverQPath for Expr {
+ fn to_ty(&self) -> Option<P<Ty>> {
+ self.to_ty()
+ }
+ fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self {
+ Self { span: path.span, node: ExprKind::Path(qself, path),
+ id: self.id, attrs: self.attrs.clone() }
+ }
+ fn to_string(&self) -> String {
+ pprust::expr_to_string(self)
+ }
+}
+
+/* ident is handled by common.rs */
+
+#[derive(Clone)]
+pub struct Parser<'a> {
+ pub sess: &'a ParseSess,
+ /// the current token:
+ pub token: token::Token,
+ /// the span of the current token:
+ pub span: Span,
+ /// the span of the previous token:
+ meta_var_span: Option<Span>,
+ pub prev_span: Span,
+ /// the previous token kind
+ prev_token_kind: PrevTokenKind,
+ restrictions: Restrictions,
+ /// Used to determine the path to externally loaded source files
+ crate directory: Directory<'a>,
+ /// Whether to parse sub-modules in other files.
+ pub recurse_into_file_modules: bool,
+ /// Name of the root module this parser originated from. If `None`, then the
+ /// name is not known. This does not change while the parser is descending
+ /// into modules, and sub-parsers have new values for this name.
+ pub root_module_name: Option<String>,
+ crate expected_tokens: Vec<TokenType>,
+ token_cursor: TokenCursor,
+ desugar_doc_comments: bool,
+ /// Whether we should configure out of line modules as we parse.
+ pub cfg_mods: bool,
+ /// This field is used to keep track of how many left angle brackets we have seen. This is
+ /// required in order to detect extra leading left angle brackets (`<` characters) and error
+ /// appropriately.
+ ///
+ /// See the comments in the `parse_path_segment` function for more details.
+ crate unmatched_angle_bracket_count: u32,
+ crate max_angle_bracket_count: u32,
+ /// List of all unclosed delimiters found by the lexer. If an entry is used for error recovery
+ /// it gets removed from here. Every entry left at the end gets emitted as an independent
+ /// error.
+ crate unclosed_delims: Vec<UnmatchedBrace>,
+}
+
+
+#[derive(Clone)]
+struct TokenCursor {
+ frame: TokenCursorFrame,
+ stack: Vec<TokenCursorFrame>,
+}
+
+#[derive(Clone)]
+struct TokenCursorFrame {
+ delim: token::DelimToken,
+ span: DelimSpan,
+ open_delim: bool,
+ tree_cursor: tokenstream::Cursor,
+ close_delim: bool,
+ last_token: LastToken,
+}
+
+/// This is used in `TokenCursorFrame` above to track tokens that are consumed
+/// by the parser, and then that's transitively used to record the tokens that
+/// each parse AST item is created with.
+///
+/// Right now this has two states, either collecting tokens or not collecting
+/// tokens. If we're collecting tokens we just save everything off into a local
+/// `Vec`. This should eventually though likely save tokens from the original
+/// token stream and just use slicing of token streams to avoid creation of a
+/// whole new vector.
+///
+/// The second state is where we're passively not recording tokens, but the last
+/// token is still tracked for when we want to start recording tokens. This
+/// "last token" means that when we start recording tokens we'll want to ensure
+/// that this, the first token, is included in the output.
+///
+/// You can find some more example usage of this in the `collect_tokens` method
+/// on the parser.
+#[derive(Clone)]
+enum LastToken {
+ Collecting(Vec<TreeAndJoint>),
+ Was(Option<TreeAndJoint>),
+}
+
+impl TokenCursorFrame {
+ fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
+ TokenCursorFrame {
+ delim: delim,
+ span: sp,
+ open_delim: delim == token::NoDelim,
+ tree_cursor: tts.clone().into_trees(),
+ close_delim: delim == token::NoDelim,
+ last_token: LastToken::Was(None),
+ }
+ }
+}
+
+impl TokenCursor {
+ fn next(&mut self) -> TokenAndSpan {
+ loop {
+ let tree = if !self.frame.open_delim {
+ self.frame.open_delim = true;
+ TokenTree::open_tt(self.frame.span.open, self.frame.delim)
+ } else if let Some(tree) = self.frame.tree_cursor.next() {
+ tree
+ } else if !self.frame.close_delim {
+ self.frame.close_delim = true;
+ TokenTree::close_tt(self.frame.span.close, self.frame.delim)
+ } else if let Some(frame) = self.stack.pop() {
+ self.frame = frame;
+ continue
+ } else {
+ return TokenAndSpan { tok: token::Eof, sp: syntax_pos::DUMMY_SP }
+ };
+
+ match self.frame.last_token {
+ LastToken::Collecting(ref mut v) => v.push(tree.clone().into()),
+ LastToken::Was(ref mut t) => *t = Some(tree.clone().into()),
+ }
+
+ match tree {
+ TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp },
+ TokenTree::Delimited(sp, delim, tts) => {
+ let frame = TokenCursorFrame::new(sp, delim, &tts);
+ self.stack.push(mem::replace(&mut self.frame, frame));
+ }
+ }
+ }
+ }
+
+ fn next_desugared(&mut self) -> TokenAndSpan {
+ let (sp, name) = match self.next() {
+ TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
+ tok => return tok,
+ };
+
+ let stripped = strip_doc_comment_decoration(&name.as_str());
+
+ // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
+ // required to wrap the text.
+ let mut num_of_hashes = 0;
+ let mut count = 0;
+ for ch in stripped.chars() {
+ count = match ch {
+ '"' => 1,
+ '#' if count > 0 => count + 1,
+ _ => 0,
+ };
+ num_of_hashes = cmp::max(num_of_hashes, count);
+ }
+
+ let delim_span = DelimSpan::from_single(sp);
+ let body = TokenTree::Delimited(
+ delim_span,
+ token::Bracket,
+ [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
+ TokenTree::Token(sp, token::Eq),
+ TokenTree::Token(sp, token::Literal(
+ token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))
+ ]
+ .iter().cloned().collect::<TokenStream>().into(),
+ );
+
+ self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(
+ delim_span,
+ token::NoDelim,
+ &if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
+ [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
+ .iter().cloned().collect::<TokenStream>().into()
+ } else {
+ [TokenTree::Token(sp, token::Pound), body]
+ .iter().cloned().collect::<TokenStream>().into()
+ },
+ )));
+
+ self.next()
+ }
+}
+
+#[derive(Clone, PartialEq)]
+crate enum TokenType {
+ Token(token::Token),
+ Keyword(keywords::Keyword),
+ Operator,
+ Lifetime,
+ Ident,
+ Path,
+ Type,
+ Const,
+}
+
+impl TokenType {
+ fn to_string(&self) -> String {
+ match *self {
+ TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
+ TokenType::Keyword(kw) => format!("`{}`", kw.name()),
+ TokenType::Operator => "an operator".to_string(),
+ TokenType::Lifetime => "lifetime".to_string(),
+ TokenType::Ident => "identifier".to_string(),
+ TokenType::Path => "path".to_string(),
+ TokenType::Type => "type".to_string(),
+ TokenType::Const => "const".to_string(),
+ }
+ }
+}
+
+/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
+/// `IDENT<<u8 as Trait>::AssocTy>`.
+///
+/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
+/// that `IDENT` is not the ident of a fn trait.
+fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool {
+ t == &token::ModSep || t == &token::Lt ||
+ t == &token::BinOp(token::Shl)
+}
+
+/// Information about the path to a module.
+pub struct ModulePath {
+ name: String,
+ path_exists: bool,
+ pub result: Result<ModulePathSuccess, Error>,
+}
+
+pub struct ModulePathSuccess {
+ pub path: PathBuf,
+ pub directory_ownership: DirectoryOwnership,
+ warn: bool,
+}
+
+pub enum Error {
+ FileNotFoundForModule {
+ mod_name: String,
+ default_path: String,
+ secondary_path: String,
+ dir_path: String,
+ },
+ DuplicatePaths {
+ mod_name: String,
+ default_path: String,
+ secondary_path: String,
+ },
+ UselessDocComment,
+ InclusiveRangeWithNoEnd,
+}
+
+impl Error {
+ fn span_err<S: Into<MultiSpan>>(self,
+ sp: S,
+ handler: &errors::Handler) -> DiagnosticBuilder<'_> {
+ match self {
+ Error::FileNotFoundForModule { ref mod_name,
+ ref default_path,
+ ref secondary_path,
+ ref dir_path } => {
+ let mut err = struct_span_err!(handler, sp, E0583,
+ "file not found for module `{}`", mod_name);
+ err.help(&format!("name the file either {} or {} inside the directory \"{}\"",
+ default_path,
+ secondary_path,
+ dir_path));
+ err
+ }
+ Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => {
+ let mut err = struct_span_err!(handler, sp, E0584,
+ "file for module `{}` found at both {} and {}",
+ mod_name,
+ default_path,
+ secondary_path);
+ err.help("delete or rename one of them to remove the ambiguity");
+ err
+ }
+ Error::UselessDocComment => {
+ let mut err = struct_span_err!(handler, sp, E0585,
+ "found a documentation comment that doesn't document anything");
+ err.help("doc comments must come before what they document, maybe a comment was \
+ intended with `//`?");
+ err
+ }
+ Error::InclusiveRangeWithNoEnd => {
+ let mut err = struct_span_err!(handler, sp, E0586,
+ "inclusive range with no end");
+ err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)");
+ err
+ }
+ }
+ }
+}
+
+#[derive(Debug)]
+enum LhsExpr {
+ NotYetParsed,
+ AttributesParsed(ThinVec<Attribute>),
+ AlreadyParsed(P<Expr>),
+}
+
+impl From<Option<ThinVec<Attribute>>> for LhsExpr {
+ fn from(o: Option<ThinVec<Attribute>>) -> Self {
+ if let Some(attrs) = o {
+ LhsExpr::AttributesParsed(attrs)
+ } else {
+ LhsExpr::NotYetParsed
+ }
+ }
+}
+
+impl From<P<Expr>> for LhsExpr {
+ fn from(expr: P<Expr>) -> Self {
+ LhsExpr::AlreadyParsed(expr)
+ }
+}
+
+/// Creates a placeholder argument.
+fn dummy_arg(span: Span) -> Arg {
+ let ident = Ident::new(keywords::Invalid.name(), span);
+ let pat = P(Pat {
+ id: ast::DUMMY_NODE_ID,
+ node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None),
+ span,
+ });
+ let ty = Ty {
+ node: TyKind::Err,
+ span,
+ id: ast::DUMMY_NODE_ID
+ };
+ Arg { ty: P(ty), pat: pat, id: ast::DUMMY_NODE_ID }
+}
+
+#[derive(Copy, Clone, Debug)]
+enum TokenExpectType {
+ Expect,
+ NoExpect,
+}
+
+impl<'a> Parser<'a> {
+ pub fn new(sess: &'a ParseSess,
+ tokens: TokenStream,
+ directory: Option<Directory<'a>>,
+ recurse_into_file_modules: bool,
+ desugar_doc_comments: bool)
+ -> Self {
+ let mut parser = Parser {
+ sess,
+ token: token::Whitespace,
+ span: syntax_pos::DUMMY_SP,
+ prev_span: syntax_pos::DUMMY_SP,
+ meta_var_span: None,
+ prev_token_kind: PrevTokenKind::Other,
+ restrictions: Restrictions::empty(),
+ recurse_into_file_modules,
+ directory: Directory {
+ path: Cow::from(PathBuf::new()),
+ ownership: DirectoryOwnership::Owned { relative: None }
+ },
+ root_module_name: None,
+ expected_tokens: Vec::new(),
+ token_cursor: TokenCursor {
+ frame: TokenCursorFrame::new(
+ DelimSpan::dummy(),
+ token::NoDelim,
+ &tokens.into(),
+ ),
+ stack: Vec::new(),
+ },
+ desugar_doc_comments,
+ cfg_mods: true,
+ unmatched_angle_bracket_count: 0,
+ max_angle_bracket_count: 0,
+ unclosed_delims: Vec::new(),
+ };
+
+ let tok = parser.next_tok();
+ parser.token = tok.tok;
+ parser.span = tok.sp;
+
+ if let Some(directory) = directory {
+ parser.directory = directory;
+ } else if !parser.span.is_dummy() {
+ if let FileName::Real(mut path) = sess.source_map().span_to_unmapped_path(parser.span) {
+ path.pop();
+ parser.directory.path = Cow::from(path);
+ }
+ }
+
+ parser.process_potential_macro_variable();
+ parser
+ }
+
+ fn next_tok(&mut self) -> TokenAndSpan {
+ let mut next = if self.desugar_doc_comments {
+ self.token_cursor.next_desugared()
+ } else {
+ self.token_cursor.next()
+ };
+ if next.sp.is_dummy() {
+ // Tweak the location for better diagnostics, but keep syntactic context intact.
+ next.sp = self.prev_span.with_ctxt(next.sp.ctxt());
+ }
+ next
+ }
+
+ /// Converts the current token to a string using `self`'s reader.
+ pub fn this_token_to_string(&self) -> String {
+ pprust::token_to_string(&self.token)
+ }
+
+ fn token_descr(&self) -> Option<&'static str> {
+ Some(match &self.token {
+ t if t.is_special_ident() => "reserved identifier",
+ t if t.is_used_keyword() => "keyword",
+ t if t.is_unused_keyword() => "reserved keyword",
+ token::DocComment(..) => "doc comment",
+ _ => return None,
+ })
+ }
+
+ fn this_token_descr(&self) -> String {
+ if let Some(prefix) = self.token_descr() {
+ format!("{} `{}`", prefix, self.this_token_to_string())
+ } else {
+ format!("`{}`", self.this_token_to_string())
+ }
+ }
+
+ fn unexpected_last<T>(&self, t: &token::Token) -> PResult<'a, T> {
+ let token_str = pprust::token_to_string(t);
+ Err(self.span_fatal(self.prev_span, &format!("unexpected token: `{}`", token_str)))
+ }
+
+ crate fn unexpected<T>(&mut self) -> PResult<'a, T> {
+ match self.expect_one_of(&[], &[]) {
+ Err(e) => Err(e),
+ Ok(_) => unreachable!(),
+ }
+ }
+
+ /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
+ pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
+ if self.expected_tokens.is_empty() {
+ if self.token == *t {
+ self.bump();
+ Ok(false)
+ } else {
+ let token_str = pprust::token_to_string(t);
+ let this_token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!("expected `{}`, found {}",
+ token_str,
+ this_token_str));
+
+ let sp = if self.token == token::Token::Eof {
+ // EOF, don't want to point at the following char, but rather the last token
+ self.prev_span
+ } else {
+ self.sess.source_map().next_point(self.prev_span)
+ };
+ let label_exp = format!("expected `{}`", token_str);
+ match self.recover_closing_delimiter(&[t.clone()], err) {
+ Err(e) => err = e,
+ Ok(recovered) => {
+ return Ok(recovered);
+ }
+ }
+ let cm = self.sess.source_map();
+ match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
+ (Ok(ref a), Ok(ref b)) if a.line == b.line => {
+ // When the spans are in the same line, it means that the only content
+ // between them is whitespace, point only at the found token.
+ err.span_label(self.span, label_exp);
+ }
+ _ => {
+ err.span_label(sp, label_exp);
+ err.span_label(self.span, "unexpected token");
+ }
+ }
+ Err(err)
+ }
+ } else {
+ self.expect_one_of(slice::from_ref(t), &[])
+ }
+ }
+
+ fn recover_closing_delimiter(
+ &mut self,
+ tokens: &[token::Token],
+ mut err: DiagnosticBuilder<'a>,
+ ) -> PResult<'a, bool> {
+ let mut pos = None;
+ // we want to use the last closing delim that would apply
+ for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
+ if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
+ && Some(self.span) > unmatched.unclosed_span
+ {
+ pos = Some(i);
+ }
+ }
+ match pos {
+ Some(pos) => {
+ // Recover and assume that the detected unclosed delimiter was meant for
+ // this location. Emit the diagnostic and act as if the delimiter was
+ // present for the parser's sake.
+
+ // Don't attempt to recover from this unclosed delimiter more than once.
+ let unmatched = self.unclosed_delims.remove(pos);
+ let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
+
+ // We want to suggest the inclusion of the closing delimiter where it makes
+ // the most sense, which is immediately after the last token:
+ //
+ // {foo(bar {}}
+ // - ^
+ // | |
+ // | help: `)` may belong here (FIXME: #58270)
+ // |
+ // unclosed delimiter
+ if let Some(sp) = unmatched.unclosed_span {
+ err.span_label(sp, "unclosed delimiter");
+ }
+ err.span_suggestion_short(
+ self.sess.source_map().next_point(self.prev_span),
+ &format!("{} may belong here", delim.to_string()),
+ delim.to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ err.emit();
+ self.expected_tokens.clear(); // reduce errors
+ Ok(true)
+ }
+ _ => Err(err),
+ }
+ }
+
+ /// Expect next token to be edible or inedible token. If edible,
+ /// then consume it; if inedible, then return without consuming
+ /// anything. Signal a fatal error if next token is unexpected.
+ pub fn expect_one_of(
+ &mut self,
+ edible: &[token::Token],
+ inedible: &[token::Token],
+ ) -> PResult<'a, bool /* recovered */> {
+ fn tokens_to_string(tokens: &[TokenType]) -> String {
+ let mut i = tokens.iter();
+ // This might be a sign we need a connect method on Iterator.
+ let b = i.next()
+ .map_or(String::new(), |t| t.to_string());
+ i.enumerate().fold(b, |mut b, (i, a)| {
+ if tokens.len() > 2 && i == tokens.len() - 2 {
+ b.push_str(", or ");
+ } else if tokens.len() == 2 && i == tokens.len() - 2 {
+ b.push_str(" or ");
+ } else {
+ b.push_str(", ");
+ }
+ b.push_str(&a.to_string());
+ b
+ })
+ }
+ if edible.contains(&self.token) {
+ self.bump();
+ Ok(false)
+ } else if inedible.contains(&self.token) {
+ // leave it in the input
+ Ok(false)
+ } else {
+ let mut expected = edible.iter()
+ .map(|x| TokenType::Token(x.clone()))
+ .chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
+ .chain(self.expected_tokens.iter().cloned())
+ .collect::<Vec<_>>();
+ expected.sort_by_cached_key(|x| x.to_string());
+ expected.dedup();
+ let expect = tokens_to_string(&expected[..]);
+ let actual = self.this_token_to_string();
+ let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
+ let short_expect = if expected.len() > 6 {
+ format!("{} possible tokens", expected.len())
+ } else {
+ expect.clone()
+ };
+ (format!("expected one of {}, found `{}`", expect, actual),
+ (self.sess.source_map().next_point(self.prev_span),
+ format!("expected one of {} here", short_expect)))
+ } else if expected.is_empty() {
+ (format!("unexpected token: `{}`", actual),
+ (self.prev_span, "unexpected token after this".to_string()))
+ } else {
+ (format!("expected {}, found `{}`", expect, actual),
+ (self.sess.source_map().next_point(self.prev_span),
+ format!("expected {} here", expect)))
+ };
+ let mut err = self.fatal(&msg_exp);
+ if self.token.is_ident_named("and") {
+ err.span_suggestion_short(
+ self.span,
+ "use `&&` instead of `and` for the boolean operator",
+ "&&".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ if self.token.is_ident_named("or") {
+ err.span_suggestion_short(
+ self.span,
+ "use `||` instead of `or` for the boolean operator",
+ "||".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ let sp = if self.token == token::Token::Eof {
+ // This is EOF, don't want to point at the following char, but rather the last token
+ self.prev_span
+ } else {
+ label_sp
+ };
+ match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt {
+ TokenType::Token(t) => Some(t.clone()),
+ _ => None,
+ }).collect::<Vec<_>>(), err) {
+ Err(e) => err = e,
+ Ok(recovered) => {
+ return Ok(recovered);
+ }
+ }
+
+ let cm = self.sess.source_map();
+ match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
+ (Ok(ref a), Ok(ref b)) if a.line == b.line => {
+ // When the spans are in the same line, it means that the only content between
+ // them is whitespace, point at the found token in that case:
+ //
+ // X | () => { syntax error };
+ // | ^^^^^ expected one of 8 possible tokens here
+ //
+ // instead of having:
+ //
+ // X | () => { syntax error };
+ // | -^^^^^ unexpected token
+ // | |
+ // | expected one of 8 possible tokens here
+ err.span_label(self.span, label_exp);
+ }
+ _ if self.prev_span == syntax_pos::DUMMY_SP => {
+ // Account for macro context where the previous span might not be
+ // available to avoid incorrect output (#54841).
+ err.span_label(self.span, "unexpected token");
+ }
+ _ => {
+ err.span_label(sp, label_exp);
+ err.span_label(self.span, "unexpected token");
+ }
+ }
+ Err(err)
+ }
+ }
+
+ /// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
+ fn interpolated_or_expr_span(&self,
+ expr: PResult<'a, P<Expr>>)
+ -> PResult<'a, (Span, P<Expr>)> {
+ expr.map(|e| {
+ if self.prev_token_kind == PrevTokenKind::Interpolated {
+ (self.prev_span, e)
+ } else {
+ (e.span, e)
+ }
+ })
+ }
+
+ fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
+ let mut err = self.struct_span_err(self.span,
+ &format!("expected identifier, found {}",
+ self.this_token_descr()));
+ if let token::Ident(ident, false) = &self.token {
+ if ident.is_reserved() && !ident.is_path_segment_keyword() &&
+ ident.name != keywords::Underscore.name()
+ {
+ err.span_suggestion(
+ self.span,
+ "you can escape reserved keywords to use them as identifiers",
+ format!("r#{}", ident),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ if let Some(token_descr) = self.token_descr() {
+ err.span_label(self.span, format!("expected identifier, found {}", token_descr));
+ } else {
+ err.span_label(self.span, "expected identifier");
+ if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
+ err.span_suggestion(
+ self.span,
+ "remove this comma",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ err
+ }
+
+ pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> {
+ self.parse_ident_common(true)
+ }
+
+ fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
+ match self.token {
+ token::Ident(ident, _) => {
+ if self.token.is_reserved_ident() {
+ let mut err = self.expected_ident_found();
+ if recover {
+ err.emit();
+ } else {
+ return Err(err);
+ }
+ }
+ let span = self.span;
+ self.bump();
+ Ok(Ident::new(ident.name, span))
+ }
+ _ => {
+ Err(if self.prev_token_kind == PrevTokenKind::DocComment {
+ self.span_fatal_err(self.prev_span, Error::UselessDocComment)
+ } else {
+ self.expected_ident_found()
+ })
+ }
+ }
+ }
+
+ /// Checks if the next token is `tok`, and returns `true` if so.
+ ///
+ /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
+ /// encountered.
+ crate fn check(&mut self, tok: &token::Token) -> bool {
+ let is_present = self.token == *tok;
+ if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); }
+ is_present
+ }
+
+ /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
+ pub fn eat(&mut self, tok: &token::Token) -> bool {
+ let is_present = self.check(tok);
+ if is_present { self.bump() }
+ is_present
+ }
+
+ fn check_keyword(&mut self, kw: keywords::Keyword) -> bool {
+ self.expected_tokens.push(TokenType::Keyword(kw));
+ self.token.is_keyword(kw)
+ }
+
+ /// If the next token is the given keyword, eats it and returns
+ /// `true`. Otherwise, returns `false`.
+ pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool {
+ if self.check_keyword(kw) {
+ self.bump();
+ true
+ } else {
+ false
+ }
+ }
+
+ fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool {
+ if self.token.is_keyword(kw) {
+ self.bump();
+ true
+ } else {
+ false
+ }
+ }
+
+ /// If the given word is not a keyword, signals an error.
+ /// If the next token is not the given word, signals an error.
+ /// Otherwise, eats it.
+ fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> {
+ if !self.eat_keyword(kw) {
+ self.unexpected()
+ } else {
+ Ok(())
+ }
+ }
+
+ fn check_ident(&mut self) -> bool {
+ if self.token.is_ident() {
+ true
+ } else {
+ self.expected_tokens.push(TokenType::Ident);
+ false
+ }
+ }
+
+ fn check_path(&mut self) -> bool {
+ if self.token.is_path_start() {
+ true
+ } else {
+ self.expected_tokens.push(TokenType::Path);
+ false
+ }
+ }
+
+ fn check_type(&mut self) -> bool {
+ if self.token.can_begin_type() {
+ true
+ } else {
+ self.expected_tokens.push(TokenType::Type);
+ false
+ }
+ }
+
+ fn check_const_arg(&mut self) -> bool {
+ if self.token.can_begin_const_arg() {
+ true
+ } else {
+ self.expected_tokens.push(TokenType::Const);
+ false
+ }
+ }
+
+ /// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=`
+ /// and continues. If a `+` is not seen, returns `false`.
+ ///
+ /// This is used when token-splitting `+=` into `+`.
+ /// See issue #47856 for an example of when this may occur.
+ fn eat_plus(&mut self) -> bool {
+ self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
+ match self.token {
+ token::BinOp(token::Plus) => {
+ self.bump();
+ true
+ }
+ token::BinOpEq(token::Plus) => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ self.bump_with(token::Eq, span);
+ true
+ }
+ _ => false,
+ }
+ }
+
+
+ /// Checks to see if the next token is either `+` or `+=`.
+ /// Otherwise returns `false`.
+ fn check_plus(&mut self) -> bool {
+ if self.token.is_like_plus() {
+ true
+ }
+ else {
+ self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
+ false
+ }
+ }
+
+ /// Expects and consumes an `&`. If `&&` is seen, replaces it with a single
+ /// `&` and continues. If an `&` is not seen, signals an error.
+ fn expect_and(&mut self) -> PResult<'a, ()> {
+ self.expected_tokens.push(TokenType::Token(token::BinOp(token::And)));
+ match self.token {
+ token::BinOp(token::And) => {
+ self.bump();
+ Ok(())
+ }
+ token::AndAnd => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ Ok(self.bump_with(token::BinOp(token::And), span))
+ }
+ _ => self.unexpected()
+ }
+ }
+
+ /// Expects and consumes an `|`. If `||` is seen, replaces it with a single
+ /// `|` and continues. If an `|` is not seen, signals an error.
+ fn expect_or(&mut self) -> PResult<'a, ()> {
+ self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or)));
+ match self.token {
+ token::BinOp(token::Or) => {
+ self.bump();
+ Ok(())
+ }
+ token::OrOr => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ Ok(self.bump_with(token::BinOp(token::Or), span))
+ }
+ _ => self.unexpected()
+ }
+ }
+
+ fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
+ match suffix {
+ None => {/* everything ok */}
+ Some(suf) => {
+ let text = suf.as_str();
+ if text.is_empty() {
+ self.span_bug(sp, "found empty literal suffix in Some")
+ }
+ let msg = format!("{} with a suffix is invalid", kind);
+ self.struct_span_err(sp, &msg)
+ .span_label(sp, msg)
+ .emit();
+ }
+ }
+ }
+
+ /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
+ /// `<` and continue. If `<-` is seen, replaces it with a single `<`
+ /// and continue. If a `<` is not seen, returns false.
+ ///
+ /// This is meant to be used when parsing generics on a path to get the
+ /// starting token.
+ fn eat_lt(&mut self) -> bool {
+ self.expected_tokens.push(TokenType::Token(token::Lt));
+ let ate = match self.token {
+ token::Lt => {
+ self.bump();
+ true
+ }
+ token::BinOp(token::Shl) => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ self.bump_with(token::Lt, span);
+ true
+ }
+ token::LArrow => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ self.bump_with(token::BinOp(token::Minus), span);
+ true
+ }
+ _ => false,
+ };
+
+ if ate {
+ // See doc comment for `unmatched_angle_bracket_count`.
+ self.unmatched_angle_bracket_count += 1;
+ self.max_angle_bracket_count += 1;
+ debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
+ }
+
+ ate
+ }
+
+ fn expect_lt(&mut self) -> PResult<'a, ()> {
+ if !self.eat_lt() {
+ self.unexpected()
+ } else {
+ Ok(())
+ }
+ }
+
+ /// Expects and consumes a single `>` token. if a `>>` is seen, replaces it
+ /// with a single `>` and continues. If a `>` is not seen, signals an error.
+ fn expect_gt(&mut self) -> PResult<'a, ()> {
+ self.expected_tokens.push(TokenType::Token(token::Gt));
+ let ate = match self.token {
+ token::Gt => {
+ self.bump();
+ Some(())
+ }
+ token::BinOp(token::Shr) => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ Some(self.bump_with(token::Gt, span))
+ }
+ token::BinOpEq(token::Shr) => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ Some(self.bump_with(token::Ge, span))
+ }
+ token::Ge => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ Some(self.bump_with(token::Eq, span))
+ }
+ _ => None,
+ };
+
+ match ate {
+ Some(_) => {
+ // See doc comment for `unmatched_angle_bracket_count`.
+ if self.unmatched_angle_bracket_count > 0 {
+ self.unmatched_angle_bracket_count -= 1;
+ debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
+ }
+
+ Ok(())
+ },
+ None => self.unexpected(),
+ }
+ }
+
+ /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
+ /// passes through any errors encountered. Used for error recovery.
+ fn eat_to_tokens(&mut self, kets: &[&token::Token]) {
+ let handler = self.diagnostic();
+
+ if let Err(ref mut err) = self.parse_seq_to_before_tokens(kets,
+ SeqSep::none(),
+ TokenExpectType::Expect,
+ |p| Ok(p.parse_token_tree())) {
+ handler.cancel(err);
+ }
+ }
+
+ /// Parses a sequence, including the closing delimiter. The function
+ /// `f` must consume tokens until reaching the next separator or
+ /// closing bracket.
+ pub fn parse_seq_to_end<T, F>(&mut self,
+ ket: &token::Token,
+ sep: SeqSep,
+ f: F)
+ -> PResult<'a, Vec<T>> where
+ F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+ {
+ let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
+ if !recovered {
+ self.bump();
+ }
+ Ok(val)
+ }
+
+ /// Parses a sequence, not including the closing delimiter. The function
+ /// `f` must consume tokens until reaching the next separator or
+ /// closing bracket.
+ pub fn parse_seq_to_before_end<T, F>(
+ &mut self,
+ ket: &token::Token,
+ sep: SeqSep,
+ f: F,
+ ) -> PResult<'a, (Vec<T>, bool)>
+ where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
+ {
+ self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
+ }
+
+ fn parse_seq_to_before_tokens<T, F>(
+ &mut self,
+ kets: &[&token::Token],
+ sep: SeqSep,
+ expect: TokenExpectType,
+ mut f: F,
+ ) -> PResult<'a, (Vec<T>, bool /* recovered */)>
+ where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
+ {
+ let mut first = true;
+ let mut recovered = false;
+ let mut v = vec![];
+ while !kets.iter().any(|k| {
+ match expect {
+ TokenExpectType::Expect => self.check(k),
+ TokenExpectType::NoExpect => self.token == **k,
+ }
+ }) {
+ match self.token {
+ token::CloseDelim(..) | token::Eof => break,
+ _ => {}
+ };
+ if let Some(ref t) = sep.sep {
+ if first {
+ first = false;
+ } else {
+ match self.expect(t) {
+ Ok(false) => {}
+ Ok(true) => {
+ recovered = true;
+ break;
+ }
+ Err(mut e) => {
+ // Attempt to keep parsing if it was a similar separator
+ if let Some(ref tokens) = t.similar_tokens() {
+ if tokens.contains(&self.token) {
+ self.bump();
+ }
+ }
+ e.emit();
+ // Attempt to keep parsing if it was an omitted separator
+ match f(self) {
+ Ok(t) => {
+ v.push(t);
+ continue;
+ },
+ Err(mut e) => {
+ e.cancel();
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+ if sep.trailing_sep_allowed && kets.iter().any(|k| {
+ match expect {
+ TokenExpectType::Expect => self.check(k),
+ TokenExpectType::NoExpect => self.token == **k,
+ }
+ }) {
+ break;
+ }
+
+ let t = f(self)?;
+ v.push(t);
+ }
+
+ Ok((v, recovered))
+ }
+
+ /// Parses a sequence, including the closing delimiter. The function
+ /// `f` must consume tokens until reaching the next separator or
+ /// closing bracket.
+ fn parse_unspanned_seq<T, F>(
+ &mut self,
+ bra: &token::Token,
+ ket: &token::Token,
+ sep: SeqSep,
+ f: F,
+ ) -> PResult<'a, Vec<T>> where
+ F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+ {
+ self.expect(bra)?;
+ let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
+ if !recovered {
+ self.eat(ket);
+ }
+ Ok(result)
+ }
+
+ /// Advance the parser by one token
+ pub fn bump(&mut self) {
+ if self.prev_token_kind == PrevTokenKind::Eof {
+ // Bumping after EOF is a bad sign, usually an infinite loop.
+ self.bug("attempted to bump the parser past EOF (may be stuck in a loop)");
+ }
+
+ self.prev_span = self.meta_var_span.take().unwrap_or(self.span);
+
+ // Record last token kind for possible error recovery.
+ self.prev_token_kind = match self.token {
+ token::DocComment(..) => PrevTokenKind::DocComment,
+ token::Comma => PrevTokenKind::Comma,
+ token::BinOp(token::Plus) => PrevTokenKind::Plus,
+ token::Interpolated(..) => PrevTokenKind::Interpolated,
+ token::Eof => PrevTokenKind::Eof,
+ token::Ident(..) => PrevTokenKind::Ident,
+ _ => PrevTokenKind::Other,
+ };
+
+ let next = self.next_tok();
+ self.span = next.sp;
+ self.token = next.tok;
+ self.expected_tokens.clear();
+ // check after each token
+ self.process_potential_macro_variable();
+ }
+
+ /// Advance the parser using provided token as a next one. Use this when
+ /// consuming a part of a token. For example a single `<` from `<<`.
+ fn bump_with(&mut self, next: token::Token, span: Span) {
+ self.prev_span = self.span.with_hi(span.lo());
+ // It would be incorrect to record the kind of the current token, but
+ // fortunately for tokens currently using `bump_with`, the
+ // prev_token_kind will be of no use anyway.
+ self.prev_token_kind = PrevTokenKind::Other;
+ self.span = span;
+ self.token = next;
+ self.expected_tokens.clear();
+ }
+
+ pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
+ F: FnOnce(&token::Token) -> R,
+ {
+ if dist == 0 {
+ return f(&self.token)
+ }
+
+ f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
+ Some(tree) => match tree {
+ TokenTree::Token(_, tok) => tok,
+ TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim),
+ },
+ None => token::CloseDelim(self.token_cursor.frame.delim),
+ })
+ }
+
+ fn look_ahead_span(&self, dist: usize) -> Span {
+ if dist == 0 {
+ return self.span
+ }
+
+ match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
+ Some(TokenTree::Token(span, _)) => span,
+ Some(TokenTree::Delimited(span, ..)) => span.entire(),
+ None => self.look_ahead_span(dist - 1),
+ }
+ }
+ pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> {
+ self.sess.span_diagnostic.struct_span_fatal(self.span, m)
+ }
+ pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
+ self.sess.span_diagnostic.struct_span_fatal(sp, m)
+ }
+ fn span_fatal_err<S: Into<MultiSpan>>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> {
+ err.span_err(sp, self.diagnostic())
+ }
+ fn bug(&self, m: &str) -> ! {
+ self.sess.span_diagnostic.span_bug(self.span, m)
+ }
+ fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
+ self.sess.span_diagnostic.span_err(sp, m)
+ }
+ fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
+ self.sess.span_diagnostic.struct_span_err(sp, m)
+ }
+ crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
+ self.sess.span_diagnostic.span_bug(sp, m)
+ }
+
+ fn cancel(&self, err: &mut DiagnosticBuilder<'_>) {
+ self.sess.span_diagnostic.cancel(err)
+ }
+
+ crate fn diagnostic(&self) -> &'a errors::Handler {
+ &self.sess.span_diagnostic
+ }
+
+ /// Is the current token one of the keywords that signals a bare function type?
+ fn token_is_bare_fn_keyword(&mut self) -> bool {
+ self.check_keyword(keywords::Fn) ||
+ self.check_keyword(keywords::Unsafe) ||
+ self.check_keyword(keywords::Extern)
+ }
+
+ /// Parses a `TyKind::BareFn` type.
+ fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> {
+ /*
+
+ [unsafe] [extern "ABI"] fn (S) -> T
+ ^~~~^ ^~~~^ ^~^ ^
+ | | | |
+ | | | Return type
+ | | Argument types
+ | |
+ | ABI
+ Function Style
+ */
+
+ let unsafety = self.parse_unsafety();
+ let abi = if self.eat_keyword(keywords::Extern) {
+ self.parse_opt_abi()?.unwrap_or(Abi::C)
+ } else {
+ Abi::Rust
+ };
+
+ self.expect_keyword(keywords::Fn)?;
+ let (inputs, variadic) = self.parse_fn_args(false, true)?;
+ let ret_ty = self.parse_ret_ty(false)?;
+ let decl = P(FnDecl {
+ inputs,
+ output: ret_ty,
+ variadic,
+ });
+ Ok(TyKind::BareFn(P(BareFnTy {
+ abi,
+ unsafety,
+ generic_params,
+ decl,
+ })))
+ }
+
+ /// Parses asyncness: `async` or nothing.
+ fn parse_asyncness(&mut self) -> IsAsync {
+ if self.eat_keyword(keywords::Async) {
+ IsAsync::Async {
+ closure_id: ast::DUMMY_NODE_ID,
+ return_impl_trait_id: ast::DUMMY_NODE_ID,
+ }
+ } else {
+ IsAsync::NotAsync
+ }
+ }
+
+ /// Parses unsafety: `unsafe` or nothing.
+ fn parse_unsafety(&mut self) -> Unsafety {
+ if self.eat_keyword(keywords::Unsafe) {
+ Unsafety::Unsafe
+ } else {
+ Unsafety::Normal
+ }
+ }
+
+ /// Parses the items in a trait declaration.
+ pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> {
+ maybe_whole!(self, NtTraitItem, |x| x);
+ let attrs = self.parse_outer_attributes()?;
+ let (mut item, tokens) = self.collect_tokens(|this| {
+ this.parse_trait_item_(at_end, attrs)
+ })?;
+ // See `parse_item` for why this clause is here.
+ if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
+ item.tokens = Some(tokens);
+ }
+ Ok(item)
+ }
+
+ fn parse_trait_item_(&mut self,
+ at_end: &mut bool,
+ mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> {
+ let lo = self.span;
+
+ let (name, node, generics) = if self.eat_keyword(keywords::Type) {
+ self.parse_trait_item_assoc_ty()?
+ } else if self.is_const_item() {
+ self.expect_keyword(keywords::Const)?;
+ let ident = self.parse_ident()?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+ let default = if self.eat(&token::Eq) {
+ let expr = self.parse_expr()?;
+ self.expect(&token::Semi)?;
+ Some(expr)
+ } else {
+ self.expect(&token::Semi)?;
+ None
+ };
+ (ident, TraitItemKind::Const(ty, default), ast::Generics::default())
+ } else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? {
+ // trait item macro.
+ (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
+ } else {
+ let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?;
+
+ let ident = self.parse_ident()?;
+ let mut generics = self.parse_generics()?;
+
+ let d = self.parse_fn_decl_with_self(|p: &mut Parser<'a>| {
+ // This is somewhat dubious; We don't want to allow
+ // argument names to be left off if there is a
+ // definition...
+
+ // We don't allow argument names to be left off in edition 2018.
+ p.parse_arg_general(p.span.rust_2018(), true)
+ })?;
+ generics.where_clause = self.parse_where_clause()?;
+
+ let sig = ast::MethodSig {
+ header: FnHeader {
+ unsafety,
+ constness,
+ abi,
+ asyncness,
+ },
+ decl: d,
+ };
+
+ let body = match self.token {
+ token::Semi => {
+ self.bump();
+ *at_end = true;
+ debug!("parse_trait_methods(): parsing required method");
+ None
+ }
+ token::OpenDelim(token::Brace) => {
+ debug!("parse_trait_methods(): parsing provided method");
+ *at_end = true;
+ let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(inner_attrs.iter().cloned());
+ Some(body)
+ }
+ token::Interpolated(ref nt) => {
+ match **nt {
+ token::NtBlock(..) => {
+ *at_end = true;
+ let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(inner_attrs.iter().cloned());
+ Some(body)
+ }
+ _ => {
+ let token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!("expected `;` or `{{`, found {}",
+ token_str));
+ err.span_label(self.span, "expected `;` or `{`");
+ return Err(err);
+ }
+ }
+ }
+ _ => {
+ let token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!("expected `;` or `{{`, found {}",
+ token_str));
+ err.span_label(self.span, "expected `;` or `{`");
+ return Err(err);
+ }
+ };
+ (ident, ast::TraitItemKind::Method(sig, body), generics)
+ };
+
+ Ok(TraitItem {
+ id: ast::DUMMY_NODE_ID,
+ ident: name,
+ attrs,
+ generics,
+ node,
+ span: lo.to(self.prev_span),
+ tokens: None,
+ })
+ }
+
+ /// Parses an optional return type `[ -> TY ]` in a function declaration.
+ fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> {
+ if self.eat(&token::RArrow) {
+ Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true)?))
+ } else {
+ Ok(FunctionRetTy::Default(self.span.shrink_to_lo()))
+ }
+ }
+
+ /// Parses a type.
+ pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> {
+ self.parse_ty_common(true, true)
+ }
+
+ /// Parses a type in restricted contexts where `+` is not permitted.
+ ///
+ /// Example 1: `&'a TYPE`
+ /// `+` is prohibited to maintain operator priority (P(+) < P(&)).
+ /// Example 2: `value1 as TYPE + value2`
+ /// `+` is prohibited to avoid interactions with expression grammar.
+ fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> {
+ self.parse_ty_common(false, true)
+ }
+
+ fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool)
+ -> PResult<'a, P<Ty>> {
+ maybe_whole!(self, NtTy, |x| x);
+
+ let lo = self.span;
+ let mut impl_dyn_multi = false;
+ let node = if self.eat(&token::OpenDelim(token::Paren)) {
+ // `(TYPE)` is a parenthesized type.
+ // `(TYPE,)` is a tuple with a single field of type TYPE.
+ let mut ts = vec![];
+ let mut last_comma = false;
+ while self.token != token::CloseDelim(token::Paren) {
+ ts.push(self.parse_ty()?);
+ if self.eat(&token::Comma) {
+ last_comma = true;
+ } else {
+ last_comma = false;
+ break;
+ }
+ }
+ let trailing_plus = self.prev_token_kind == PrevTokenKind::Plus;
+ self.expect(&token::CloseDelim(token::Paren))?;
+
+ if ts.len() == 1 && !last_comma {
+ let ty = ts.into_iter().nth(0).unwrap().into_inner();
+ let maybe_bounds = allow_plus && self.token.is_like_plus();
+ match ty.node {
+ // `(TY_BOUND_NOPAREN) + BOUND + ...`.
+ TyKind::Path(None, ref path) if maybe_bounds => {
+ self.parse_remaining_bounds(Vec::new(), path.clone(), lo, true)?
+ }
+ TyKind::TraitObject(ref bounds, TraitObjectSyntax::None)
+ if maybe_bounds && bounds.len() == 1 && !trailing_plus => {
+ let path = match bounds[0] {
+ GenericBound::Trait(ref pt, ..) => pt.trait_ref.path.clone(),
+ GenericBound::Outlives(..) => self.bug("unexpected lifetime bound"),
+ };
+ self.parse_remaining_bounds(Vec::new(), path, lo, true)?
+ }
+ // `(TYPE)`
+ _ => TyKind::Paren(P(ty))
+ }
+ } else {
+ TyKind::Tup(ts)
+ }
+ } else if self.eat(&token::Not) {
+ // Never type `!`
+ TyKind::Never
+ } else if self.eat(&token::BinOp(token::Star)) {
+ // Raw pointer
+ TyKind::Ptr(self.parse_ptr()?)
+ } else if self.eat(&token::OpenDelim(token::Bracket)) {
+ // Array or slice
+ let t = self.parse_ty()?;
+ // Parse optional `; EXPR` in `[TYPE; EXPR]`
+ let t = match self.maybe_parse_fixed_length_of_vec()? {
+ None => TyKind::Slice(t),
+ Some(length) => TyKind::Array(t, AnonConst {
+ id: ast::DUMMY_NODE_ID,
+ value: length,
+ }),
+ };
+ self.expect(&token::CloseDelim(token::Bracket))?;
+ t
+ } else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
+ // Reference
+ self.expect_and()?;
+ self.parse_borrowed_pointee()?
+ } else if self.eat_keyword_noexpect(keywords::Typeof) {
+ // `typeof(EXPR)`
+ // In order to not be ambiguous, the type must be surrounded by parens.
+ self.expect(&token::OpenDelim(token::Paren))?;
+ let e = AnonConst {
+ id: ast::DUMMY_NODE_ID,
+ value: self.parse_expr()?,
+ };
+ self.expect(&token::CloseDelim(token::Paren))?;
+ TyKind::Typeof(e)
+ } else if self.eat_keyword(keywords::Underscore) {
+ // A type to be inferred `_`
+ TyKind::Infer
+ } else if self.token_is_bare_fn_keyword() {
+ // Function pointer type
+ self.parse_ty_bare_fn(Vec::new())?
+ } else if self.check_keyword(keywords::For) {
+ // Function pointer type or bound list (trait object type) starting with a poly-trait.
+ // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
+ // `for<'lt> Trait1<'lt> + Trait2 + 'a`
+ let lo = self.span;
+ let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
+ if self.token_is_bare_fn_keyword() {
+ self.parse_ty_bare_fn(lifetime_defs)?
+ } else {
+ let path = self.parse_path(PathStyle::Type)?;
+ let parse_plus = allow_plus && self.check_plus();
+ self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)?
+ }
+ } else if self.eat_keyword(keywords::Impl) {
+ // Always parse bounds greedily for better error recovery.
+ let bounds = self.parse_generic_bounds(None)?;
+ impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
+ TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
+ } else if self.check_keyword(keywords::Dyn) &&
+ (self.span.rust_2018() ||
+ self.look_ahead(1, |t| t.can_begin_bound() &&
+ !can_continue_type_after_non_fn_ident(t))) {
+ self.bump(); // `dyn`
+ // Always parse bounds greedily for better error recovery.
+ let bounds = self.parse_generic_bounds(None)?;
+ impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
+ TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn)
+ } else if self.check(&token::Question) ||
+ self.check_lifetime() && self.look_ahead(1, |t| t.is_like_plus()) {
+ // Bound list (trait object type)
+ TyKind::TraitObject(self.parse_generic_bounds_common(allow_plus, None)?,
+ TraitObjectSyntax::None)
+ } else if self.eat_lt() {
+ // Qualified path
+ let (qself, path) = self.parse_qpath(PathStyle::Type)?;
+ TyKind::Path(Some(qself), path)
+ } else if self.token.is_path_start() {
+ // Simple path
+ let path = self.parse_path(PathStyle::Type)?;
+ if self.eat(&token::Not) {
+ // Macro invocation in type position
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ let node = Mac_ { path, tts, delim };
+ TyKind::Mac(respan(lo.to(self.prev_span), node))
+ } else {
+ // Just a type path or bound list (trait object type) starting with a trait.
+ // `Type`
+ // `Trait1 + Trait2 + 'a`
+ if allow_plus && self.check_plus() {
+ self.parse_remaining_bounds(Vec::new(), path, lo, true)?
+ } else {
+ TyKind::Path(None, path)
+ }
+ }
+ } else {
+ let msg = format!("expected type, found {}", self.this_token_descr());
+ return Err(self.fatal(&msg));
+ };
+
+ let span = lo.to(self.prev_span);
+ let ty = Ty { node, span, id: ast::DUMMY_NODE_ID };
+
+ // Try to recover from use of `+` with incorrect priority.
+ self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty);
+ self.maybe_recover_from_bad_type_plus(allow_plus, &ty)?;
+ let ty = self.maybe_recover_from_bad_qpath(ty, allow_qpath_recovery)?;
+
+ Ok(P(ty))
+ }
+
+ fn parse_remaining_bounds(&mut self, generic_params: Vec<GenericParam>, path: ast::Path,
+ lo: Span, parse_plus: bool) -> PResult<'a, TyKind> {
+ let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_span));
+ let mut bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)];
+ if parse_plus {
+ self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded
+ bounds.append(&mut self.parse_generic_bounds(None)?);
+ }
+ Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
+ }
+
+ fn maybe_report_ambiguous_plus(&mut self, allow_plus: bool, impl_dyn_multi: bool, ty: &Ty) {
+ if !allow_plus && impl_dyn_multi {
+ let sum_with_parens = format!("({})", pprust::ty_to_string(&ty));
+ self.struct_span_err(ty.span, "ambiguous `+` in a type")
+ .span_suggestion(
+ ty.span,
+ "use parentheses to disambiguate",
+ sum_with_parens,
+ Applicability::MachineApplicable
+ ).emit();
+ }
+ }
+
+ fn maybe_recover_from_bad_type_plus(&mut self, allow_plus: bool, ty: &Ty) -> PResult<'a, ()> {
+ // Do not add `+` to expected tokens.
+ if !allow_plus || !self.token.is_like_plus() {
+ return Ok(())
+ }
+
+ self.bump(); // `+`
+ let bounds = self.parse_generic_bounds(None)?;
+ let sum_span = ty.span.to(self.prev_span);
+
+ let mut err = struct_span_err!(self.sess.span_diagnostic, sum_span, E0178,
+ "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty));
+
+ match ty.node {
+ TyKind::Rptr(ref lifetime, ref mut_ty) => {
+ let sum_with_parens = pprust::to_string(|s| {
+ use crate::print::pprust::PrintState;
+
+ s.s.word("&")?;
+ s.print_opt_lifetime(lifetime)?;
+ s.print_mutability(mut_ty.mutbl)?;
+ s.popen()?;
+ s.print_type(&mut_ty.ty)?;
+ s.print_type_bounds(" +", &bounds)?;
+ s.pclose()
+ });
+ err.span_suggestion(
+ sum_span,
+ "try adding parentheses",
+ sum_with_parens,
+ Applicability::MachineApplicable
+ );
+ }
+ TyKind::Ptr(..) | TyKind::BareFn(..) => {
+ err.span_label(sum_span, "perhaps you forgot parentheses?");
+ }
+ _ => {
+ err.span_label(sum_span, "expected a path");
+ },
+ }
+ err.emit();
+ Ok(())
+ }
+
+ // Try to recover from associated item paths like `[T]::AssocItem`/`(T, U)::AssocItem`.
+ fn maybe_recover_from_bad_qpath<T: RecoverQPath>(&mut self, base: T, allow_recovery: bool)
+ -> PResult<'a, T> {
+ // Do not add `::` to expected tokens.
+ if !allow_recovery || self.token != token::ModSep {
+ return Ok(base);
+ }
+ let ty = match base.to_ty() {
+ Some(ty) => ty,
+ None => return Ok(base),
+ };
+
+ self.bump(); // `::`
+ let mut segments = Vec::new();
+ self.parse_path_segments(&mut segments, T::PATH_STYLE, true)?;
+
+ let span = ty.span.to(self.prev_span);
+ let path_span = span.to(span); // use an empty path since `position` == 0
+ let recovered = base.to_recovered(
+ Some(QSelf { ty, path_span, position: 0 }),
+ ast::Path { segments, span },
+ );
+
+ self.diagnostic()
+ .struct_span_err(span, "missing angle brackets in associated item path")
+ .span_suggestion( // this is a best-effort recovery
+ span, "try", recovered.to_string(), Applicability::MaybeIncorrect
+ ).emit();
+
+ Ok(recovered)
+ }
+
+ fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> {
+ let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
+ let mutbl = self.parse_mutability();
+ let ty = self.parse_ty_no_plus()?;
+ return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl }));
+ }
+
+ fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
+ let mutbl = if self.eat_keyword(keywords::Mut) {
+ Mutability::Mutable
+ } else if self.eat_keyword(keywords::Const) {
+ Mutability::Immutable
+ } else {
+ let span = self.prev_span;
+ let msg = "expected mut or const in raw pointer type";
+ self.struct_span_err(span, msg)
+ .span_label(span, msg)
+ .help("use `*mut T` or `*const T` as appropriate")
+ .emit();
+ Mutability::Immutable
+ };
+ let t = self.parse_ty_no_plus()?;
+ Ok(MutTy { ty: t, mutbl: mutbl })
+ }
+
+ fn is_named_argument(&mut self) -> bool {
+ let offset = match self.token {
+ token::Interpolated(ref nt) => match **nt {
+ token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
+ _ => 0,
+ }
+ token::BinOp(token::And) | token::AndAnd => 1,
+ _ if self.token.is_keyword(keywords::Mut) => 1,
+ _ => 0,
+ };
+
+ self.look_ahead(offset, |t| t.is_ident()) &&
+ self.look_ahead(offset + 1, |t| t == &token::Colon)
+ }
+
+ /// Skips unexpected attributes and doc comments in this position and emits an appropriate
+ /// error.
+ fn eat_incorrect_doc_comment(&mut self, applied_to: &str) {
+ if let token::DocComment(_) = self.token {
+ let mut err = self.diagnostic().struct_span_err(
+ self.span,
+ &format!("documentation comments cannot be applied to {}", applied_to),
+ );
+ err.span_label(self.span, "doc comments are not allowed here");
+ err.emit();
+ self.bump();
+ } else if self.token == token::Pound && self.look_ahead(1, |t| {
+ *t == token::OpenDelim(token::Bracket)
+ }) {
+ let lo = self.span;
+ // Skip every token until next possible arg.
+ while self.token != token::CloseDelim(token::Bracket) {
+ self.bump();
+ }
+ let sp = lo.to(self.span);
+ self.bump();
+ let mut err = self.diagnostic().struct_span_err(
+ sp,
+ &format!("attributes cannot be applied to {}", applied_to),
+ );
+ err.span_label(sp, "attributes are not allowed here");
+ err.emit();
+ }
+ }
+
+ /// This version of parse arg doesn't necessarily require identifier names.
+ fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool) -> PResult<'a, Arg> {
+ maybe_whole!(self, NtArg, |x| x);
+
+ if let Ok(Some(_)) = self.parse_self_arg() {
+ let mut err = self.struct_span_err(self.prev_span,
+ "unexpected `self` argument in function");
+ err.span_label(self.prev_span,
+ "`self` is only valid as the first argument of an associated function");
+ return Err(err);
+ }
+
+ let (pat, ty) = if require_name || self.is_named_argument() {
+ debug!("parse_arg_general parse_pat (require_name:{})",
+ require_name);
+ self.eat_incorrect_doc_comment("method arguments");
+ let pat = self.parse_pat(Some("argument name"))?;
+
+ if let Err(mut err) = self.expect(&token::Colon) {
+ // If we find a pattern followed by an identifier, it could be an (incorrect)
+ // C-style parameter declaration.
+ if self.check_ident() && self.look_ahead(1, |t| {
+ *t == token::Comma || *t == token::CloseDelim(token::Paren)
+ }) {
+ let ident = self.parse_ident().unwrap();
+ let span = pat.span.with_hi(ident.span.hi());
+
+ err.span_suggestion(
+ span,
+ "declare the type after the parameter binding",
+ String::from("<identifier>: <type>"),
+ Applicability::HasPlaceholders,
+ );
+ } else if require_name && is_trait_item {
+ if let PatKind::Ident(_, ident, _) = pat.node {
+ err.span_suggestion(
+ pat.span,
+ "explicitly ignore parameter",
+ format!("_: {}", ident),
+ Applicability::MachineApplicable,
+ );
+ }
+
+ err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)");
+ }
+
+ return Err(err);
+ }
+
+ self.eat_incorrect_doc_comment("a method argument's type");
+ (pat, self.parse_ty()?)
+ } else {
+ debug!("parse_arg_general ident_to_pat");
+ let parser_snapshot_before_ty = self.clone();
+ self.eat_incorrect_doc_comment("a method argument's type");
+ let mut ty = self.parse_ty();
+ if ty.is_ok() && self.token != token::Comma &&
+ self.token != token::CloseDelim(token::Paren) {
+ // This wasn't actually a type, but a pattern looking like a type,
+ // so we are going to rollback and re-parse for recovery.
+ ty = self.unexpected();
+ }
+ match ty {
+ Ok(ty) => {
+ let ident = Ident::new(keywords::Invalid.name(), self.prev_span);
+ let pat = P(Pat {
+ id: ast::DUMMY_NODE_ID,
+ node: PatKind::Ident(
+ BindingMode::ByValue(Mutability::Immutable), ident, None),
+ span: ty.span,
+ });
+ (pat, ty)
+ }
+ Err(mut err) => {
+ // Recover from attempting to parse the argument as a type without pattern.
+ err.cancel();
+ mem::replace(self, parser_snapshot_before_ty);
+ let pat = self.parse_pat(Some("argument name"))?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+
+ let mut err = self.diagnostic().struct_span_err_with_code(
+ pat.span,
+ "patterns aren't allowed in methods without bodies",
+ DiagnosticId::Error("E0642".into()),
+ );
+ err.span_suggestion_short(
+ pat.span,
+ "give this argument a name or use an underscore to ignore it",
+ "_".to_owned(),
+ Applicability::MachineApplicable,
+ );
+ err.emit();
+
+ // Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
+ let pat = P(Pat {
+ node: PatKind::Wild,
+ span: pat.span,
+ id: ast::DUMMY_NODE_ID
+ });
+ (pat, ty)
+ }
+ }
+ };
+
+ Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID })
+ }
+
+ /// Parses a single function argument.
+ crate fn parse_arg(&mut self) -> PResult<'a, Arg> {
+ self.parse_arg_general(true, false)
+ }
+
+ /// Parses an argument in a lambda header (e.g., `|arg, arg|`).
+ fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> {
+ let pat = self.parse_pat(Some("argument name"))?;
+ let t = if self.eat(&token::Colon) {
+ self.parse_ty()?
+ } else {
+ P(Ty {
+ id: ast::DUMMY_NODE_ID,
+ node: TyKind::Infer,
+ span: self.prev_span,
+ })
+ };
+ Ok(Arg {
+ ty: t,
+ pat,
+ id: ast::DUMMY_NODE_ID
+ })
+ }
+
+ fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>>> {
+ if self.eat(&token::Semi) {
+ Ok(Some(self.parse_expr()?))
+ } else {
+ Ok(None)
+ }
+ }
+
+ /// Matches `token_lit = LIT_INTEGER | ...`.
+ fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
+ let out = match self.token {
+ token::Interpolated(ref nt) => match **nt {
+ token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
+ ExprKind::Lit(ref lit) => { lit.node.clone() }
+ _ => { return self.unexpected_last(&self.token); }
+ },
+ _ => { return self.unexpected_last(&self.token); }
+ },
+ token::Literal(lit, suf) => {
+ let diag = Some((self.span, &self.sess.span_diagnostic));
+ let (suffix_illegal, result) = parse::lit_token(lit, suf, diag);
+
+ if suffix_illegal {
+ let sp = self.span;
+ self.expect_no_suffix(sp, lit.literal_name(), suf)
+ }
+
+ result.unwrap()
+ }
+ token::Dot if self.look_ahead(1, |t| match t {
+ token::Literal(parse::token::Lit::Integer(_) , _) => true,
+ _ => false,
+ }) => { // recover from `let x = .4;`
+ let lo = self.span;
+ self.bump();
+ if let token::Literal(
+ parse::token::Lit::Integer(val),
+ suffix,
+ ) = self.token {
+ let suffix = suffix.and_then(|s| {
+ let s = s.as_str().get();
+ if ["f32", "f64"].contains(&s) {
+ Some(s)
+ } else {
+ None
+ }
+ }).unwrap_or("");
+ self.bump();
+ let sp = lo.to(self.prev_span);
+ let mut err = self.diagnostic()
+ .struct_span_err(sp, "float literals must have an integer part");
+ err.span_suggestion(
+ sp,
+ "must have an integer part",
+ format!("0.{}{}", val, suffix),
+ Applicability::MachineApplicable,
+ );
+ err.emit();
+ return Ok(match suffix {
+ "f32" => ast::LitKind::Float(val, ast::FloatTy::F32),
+ "f64" => ast::LitKind::Float(val, ast::FloatTy::F64),
+ _ => ast::LitKind::FloatUnsuffixed(val),
+ });
+ } else {
+ unreachable!();
+ };
+ }
+ _ => { return self.unexpected_last(&self.token); }
+ };
+
+ self.bump();
+ Ok(out)
+ }
+
+ /// Matches `lit = true | false | token_lit`.
+ crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
+ let lo = self.span;
+ let lit = if self.eat_keyword(keywords::True) {
+ LitKind::Bool(true)
+ } else if self.eat_keyword(keywords::False) {
+ LitKind::Bool(false)
+ } else {
+ let lit = self.parse_lit_token()?;
+ lit
+ };
+ Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) })
+ }
+
+ /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
+ crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
+ maybe_whole_expr!(self);
+
+ let minus_lo = self.span;
+ let minus_present = self.eat(&token::BinOp(token::Minus));
+ let lo = self.span;
+ let literal = self.parse_lit()?;
+ let hi = self.prev_span;
+ let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new());
+
+ if minus_present {
+ let minus_hi = self.prev_span;
+ let unary = self.mk_unary(UnOp::Neg, expr);
+ Ok(self.mk_expr(minus_lo.to(minus_hi), unary, ThinVec::new()))
+ } else {
+ Ok(expr)
+ }
+ }
+
+ fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
+ match self.token {
+ token::Ident(ident, _) if self.token.is_path_segment_keyword() => {
+ let span = self.span;
+ self.bump();
+ Ok(Ident::new(ident.name, span))
+ }
+ _ => self.parse_ident(),
+ }
+ }
+
+ fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
+ match self.token {
+ token::Ident(ident, false) if ident.name == keywords::Underscore.name() => {
+ let span = self.span;
+ self.bump();
+ Ok(Ident::new(ident.name, span))
+ }
+ _ => self.parse_ident(),
+ }
+ }
+
+ /// Parses a qualified path.
+ /// Assumes that the leading `<` has been parsed already.
+ ///
+ /// `qualified_path = <type [as trait_ref]>::path`
+ ///
+ /// # Examples
+ /// `<T>::default`
+ /// `<T as U>::a`
+ /// `<T as U>::F::a<S>` (without disambiguator)
+ /// `<T as U>::F::a::<S>` (with disambiguator)
+ fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, ast::Path)> {
+ let lo = self.prev_span;
+ let ty = self.parse_ty()?;
+
+ // `path` will contain the prefix of the path up to the `>`,
+ // if any (e.g., `U` in the `<T as U>::*` examples
+ // above). `path_span` has the span of that path, or an empty
+ // span in the case of something like `<T>::Bar`.
+ let (mut path, path_span);
+ if self.eat_keyword(keywords::As) {
+ let path_lo = self.span;
+ path = self.parse_path(PathStyle::Type)?;
+ path_span = path_lo.to(self.prev_span);
+ } else {
+ path = ast::Path { segments: Vec::new(), span: syntax_pos::DUMMY_SP };
+ path_span = self.span.to(self.span);
+ }
+
+ // See doc comment for `unmatched_angle_bracket_count`.
+ self.expect(&token::Gt)?;
+ if self.unmatched_angle_bracket_count > 0 {
+ self.unmatched_angle_bracket_count -= 1;
+ debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count);
+ }
+
+ self.expect(&token::ModSep)?;
+
+ let qself = QSelf { ty, path_span, position: path.segments.len() };
+ self.parse_path_segments(&mut path.segments, style, true)?;
+
+ Ok((qself, ast::Path { segments: path.segments, span: lo.to(self.prev_span) }))
+ }
+
+ /// Parses simple paths.
+ ///
+ /// `path = [::] segment+`
+ /// `segment = ident | ident[::]<args> | ident[::](args) [-> type]`
+ ///
+ /// # Examples
+ /// `a::b::C<D>` (without disambiguator)
+ /// `a::b::C::<D>` (with disambiguator)
+ /// `Fn(Args)` (without disambiguator)
+ /// `Fn::(Args)` (with disambiguator)
+ pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
+ self.parse_path_common(style, true)
+ }
+
+ crate fn parse_path_common(&mut self, style: PathStyle, enable_warning: bool)
+ -> PResult<'a, ast::Path> {
+ maybe_whole!(self, NtPath, |path| {
+ if style == PathStyle::Mod &&
+ path.segments.iter().any(|segment| segment.args.is_some()) {
+ self.diagnostic().span_err(path.span, "unexpected generic arguments in path");
+ }
+ path
+ });
+
+ let lo = self.meta_var_span.unwrap_or(self.span);
+ let mut segments = Vec::new();
+ let mod_sep_ctxt = self.span.ctxt();
+ if self.eat(&token::ModSep) {
+ segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
+ }
+ self.parse_path_segments(&mut segments, style, enable_warning)?;
+
+ Ok(ast::Path { segments, span: lo.to(self.prev_span) })
+ }
+
+ /// Like `parse_path`, but also supports parsing `Word` meta items into paths for
+ /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
+ /// attributes.
+ pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
+ let meta_ident = match self.token {
+ token::Interpolated(ref nt) => match **nt {
+ token::NtMeta(ref meta) => match meta.node {
+ ast::MetaItemKind::Word => Some(meta.ident.clone()),
+ _ => None,
+ },
+ _ => None,
+ },
+ _ => None,
+ };
+ if let Some(path) = meta_ident {
+ self.bump();
+ return Ok(path);
+ }
+ self.parse_path(style)
+ }
+
+ fn parse_path_segments(&mut self,
+ segments: &mut Vec<PathSegment>,
+ style: PathStyle,
+ enable_warning: bool)
+ -> PResult<'a, ()> {
+ loop {
+ let segment = self.parse_path_segment(style, enable_warning)?;
+ if style == PathStyle::Expr {
+ // In order to check for trailing angle brackets, we must have finished
+ // recursing (`parse_path_segment` can indirectly call this function),
+ // that is, the next token must be the highlighted part of the below example:
+ //
+ // `Foo::<Bar as Baz<T>>::Qux`
+ // ^ here
+ //
+ // As opposed to the below highlight (if we had only finished the first
+ // recursion):
+ //
+ // `Foo::<Bar as Baz<T>>::Qux`
+ // ^ here
+ //
+ // `PathStyle::Expr` is only provided at the root invocation and never in
+ // `parse_path_segment` to recurse and therefore can be checked to maintain
+ // this invariant.
+ self.check_trailing_angle_brackets(&segment, token::ModSep);
+ }
+ segments.push(segment);
+
+ if self.is_import_coupler() || !self.eat(&token::ModSep) {
+ return Ok(());
+ }
+ }
+ }
+
+ fn parse_path_segment(&mut self, style: PathStyle, enable_warning: bool)
+ -> PResult<'a, PathSegment> {
+ let ident = self.parse_path_segment_ident()?;
+
+ let is_args_start = |token: &token::Token| match *token {
+ token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren) => true,
+ _ => false,
+ };
+ let check_args_start = |this: &mut Self| {
+ this.expected_tokens.extend_from_slice(
+ &[TokenType::Token(token::Lt), TokenType::Token(token::OpenDelim(token::Paren))]
+ );
+ is_args_start(&this.token)
+ };
+
+ Ok(if style == PathStyle::Type && check_args_start(self) ||
+ style != PathStyle::Mod && self.check(&token::ModSep)
+ && self.look_ahead(1, |t| is_args_start(t)) {
+ // Generic arguments are found - `<`, `(`, `::<` or `::(`.
+ if self.eat(&token::ModSep) && style == PathStyle::Type && enable_warning {
+ self.diagnostic().struct_span_warn(self.prev_span, "unnecessary path disambiguator")
+ .span_label(self.prev_span, "try removing `::`").emit();
+ }
+ let lo = self.span;
+
+ // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
+ // it isn't, then we reset the unmatched angle bracket count as we're about to start
+ // parsing a new path.
+ if style == PathStyle::Expr {
+ self.unmatched_angle_bracket_count = 0;
+ self.max_angle_bracket_count = 0;
+ }
+
+ let args = if self.eat_lt() {
+ // `<'a, T, A = U>`
+ let (args, bindings) =
+ self.parse_generic_args_with_leaning_angle_bracket_recovery(style, lo)?;
+ self.expect_gt()?;
+ let span = lo.to(self.prev_span);
+ AngleBracketedArgs { args, bindings, span }.into()
+ } else {
+ // `(T, U) -> R`
+ self.bump(); // `(`
+ let (inputs, recovered) = self.parse_seq_to_before_tokens(
+ &[&token::CloseDelim(token::Paren)],
+ SeqSep::trailing_allowed(token::Comma),
+ TokenExpectType::Expect,
+ |p| p.parse_ty())?;
+ if !recovered {
+ self.bump(); // `)`
+ }
+ let span = lo.to(self.prev_span);
+ let output = if self.eat(&token::RArrow) {
+ Some(self.parse_ty_common(false, false)?)
+ } else {
+ None
+ };
+ ParenthesizedArgs { inputs, output, span }.into()
+ };
+
+ PathSegment { ident, args, id: ast::DUMMY_NODE_ID }
+ } else {
+ // Generic arguments are not found.
+ PathSegment::from_ident(ident)
+ })
+ }
+
+ crate fn check_lifetime(&mut self) -> bool {
+ self.expected_tokens.push(TokenType::Lifetime);
+ self.token.is_lifetime()
+ }
+
+ /// Parses a single lifetime `'a` or panics.
+ crate fn expect_lifetime(&mut self) -> Lifetime {
+ if let Some(ident) = self.token.lifetime() {
+ let span = self.span;
+ self.bump();
+ Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID }
+ } else {
+ self.span_bug(self.span, "not a lifetime")
+ }
+ }
+
+ fn eat_label(&mut self) -> Option<Label> {
+ if let Some(ident) = self.token.lifetime() {
+ let span = self.span;
+ self.bump();
+ Some(Label { ident: Ident::new(ident.name, span) })
+ } else {
+ None
+ }
+ }
+
+ /// Parses mutability (`mut` or nothing).
+ fn parse_mutability(&mut self) -> Mutability {
+ if self.eat_keyword(keywords::Mut) {
+ Mutability::Mutable
+ } else {
+ Mutability::Immutable
+ }
+ }
+
+ fn parse_field_name(&mut self) -> PResult<'a, Ident> {
+ if let token::Literal(token::Integer(name), None) = self.token {
+ self.bump();
+ Ok(Ident::new(name, self.prev_span))
+ } else {
+ self.parse_ident_common(false)
+ }
+ }
+
+ /// Parse ident (COLON expr)?
+ fn parse_field(&mut self) -> PResult<'a, Field> {
+ let attrs = self.parse_outer_attributes()?;
+ let lo = self.span;
+
+ // Check if a colon exists one ahead. This means we're parsing a fieldname.
+ let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| {
+ t == &token::Colon || t == &token::Eq
+ }) {
+ let fieldname = self.parse_field_name()?;
+
+ // Check for an equals token. This means the source incorrectly attempts to
+ // initialize a field with an eq rather than a colon.
+ if self.token == token::Eq {
+ self.diagnostic()
+ .struct_span_err(self.span, "expected `:`, found `=`")
+ .span_suggestion(
+ fieldname.span.shrink_to_hi().to(self.span),
+ "replace equals symbol with a colon",
+ ":".to_string(),
+ Applicability::MachineApplicable,
+ )
+ .emit();
+ }
+ self.bump(); // `:`
+ (fieldname, self.parse_expr()?, false)
+ } else {
+ let fieldname = self.parse_ident_common(false)?;
+
+ // Mimic `x: x` for the `x` field shorthand.
+ let path = ast::Path::from_ident(fieldname);
+ let expr = self.mk_expr(fieldname.span, ExprKind::Path(None, path), ThinVec::new());
+ (fieldname, expr, true)
+ };
+ Ok(ast::Field {
+ ident: fieldname,
+ span: lo.to(expr.span),
+ expr,
+ is_shorthand,
+ attrs: attrs.into(),
+ })
+ }
+
+ fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
+ P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID })
+ }
+
+ fn mk_unary(&mut self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind {
+ ExprKind::Unary(unop, expr)
+ }
+
+ fn mk_binary(&mut self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
+ ExprKind::Binary(binop, lhs, rhs)
+ }
+
+ fn mk_call(&mut self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind {
+ ExprKind::Call(f, args)
+ }
+
+ fn mk_index(&mut self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind {
+ ExprKind::Index(expr, idx)
+ }
+
+ fn mk_range(&mut self,
+ start: Option<P<Expr>>,
+ end: Option<P<Expr>>,
+ limits: RangeLimits)
+ -> PResult<'a, ast::ExprKind> {
+ if end.is_none() && limits == RangeLimits::Closed {
+ Err(self.span_fatal_err(self.span, Error::InclusiveRangeWithNoEnd))
+ } else {
+ Ok(ExprKind::Range(start, end, limits))
+ }
+ }
+
+ fn mk_assign_op(&mut self, binop: ast::BinOp,
+ lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
+ ExprKind::AssignOp(binop, lhs, rhs)
+ }
+
+ pub fn mk_mac_expr(&mut self, span: Span, m: Mac_, attrs: ThinVec<Attribute>) -> P<Expr> {
+ P(Expr {
+ id: ast::DUMMY_NODE_ID,
+ node: ExprKind::Mac(source_map::Spanned {node: m, span: span}),
+ span,
+ attrs,
+ })
+ }
+
+ fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
+ let delim = match self.token {
+ token::OpenDelim(delim) => delim,
+ _ => {
+ let msg = "expected open delimiter";
+ let mut err = self.fatal(msg);
+ err.span_label(self.span, msg);
+ return Err(err)
+ }
+ };
+ let tts = match self.parse_token_tree() {
+ TokenTree::Delimited(_, _, tts) => tts,
+ _ => unreachable!(),
+ };
+ let delim = match delim {
+ token::Paren => MacDelimiter::Parenthesis,
+ token::Bracket => MacDelimiter::Bracket,
+ token::Brace => MacDelimiter::Brace,
+ token::NoDelim => self.bug("unexpected no delimiter"),
+ };
+ Ok((delim, tts.into()))
+ }
+
+ /// At the bottom (top?) of the precedence hierarchy,
+ /// Parses things like parenthesized exprs, macros, `return`, etc.
+ ///
+ /// N.B., this does not parse outer attributes, and is private because it only works
+ /// correctly if called from `parse_dot_or_call_expr()`.
+ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
+ maybe_whole_expr!(self);
+
+ // Outer attributes are already parsed and will be
+ // added to the return value after the fact.
+ //
+ // Therefore, prevent sub-parser from parsing
+ // attributes by giving them a empty "already parsed" list.
+ let mut attrs = ThinVec::new();
+
+ let lo = self.span;
+ let mut hi = self.span;
+
+ let ex: ExprKind;
+
+ // Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr().
+ match self.token {
+ token::OpenDelim(token::Paren) => {
+ self.bump();
+
+ attrs.extend(self.parse_inner_attributes()?);
+
+ // (e) is parenthesized e
+ // (e,) is a tuple with only one field, e
+ let mut es = vec![];
+ let mut trailing_comma = false;
+ let mut recovered = false;
+ while self.token != token::CloseDelim(token::Paren) {
+ es.push(self.parse_expr()?);
+ recovered = self.expect_one_of(
+ &[],
+ &[token::Comma, token::CloseDelim(token::Paren)],
+ )?;
+ if self.eat(&token::Comma) {
+ trailing_comma = true;
+ } else {
+ trailing_comma = false;
+ break;
+ }
+ }
+ if !recovered {
+ self.bump();
+ }
+
+ hi = self.prev_span;
+ ex = if es.len() == 1 && !trailing_comma {
+ ExprKind::Paren(es.into_iter().nth(0).unwrap())
+ } else {
+ ExprKind::Tup(es)
+ };
+ }
+ token::OpenDelim(token::Brace) => {
+ return self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs);
+ }
+ token::BinOp(token::Or) | token::OrOr => {
+ return self.parse_lambda_expr(attrs);
+ }
+ token::OpenDelim(token::Bracket) => {
+ self.bump();
+
+ attrs.extend(self.parse_inner_attributes()?);
+
+ if self.eat(&token::CloseDelim(token::Bracket)) {
+ // Empty vector.
+ ex = ExprKind::Array(Vec::new());
+ } else {
+ // Nonempty vector.
+ let first_expr = self.parse_expr()?;
+ if self.eat(&token::Semi) {
+ // Repeating array syntax: [ 0; 512 ]
+ let count = AnonConst {
+ id: ast::DUMMY_NODE_ID,
+ value: self.parse_expr()?,
+ };
+ self.expect(&token::CloseDelim(token::Bracket))?;
+ ex = ExprKind::Repeat(first_expr, count);
+ } else if self.eat(&token::Comma) {
+ // Vector with two or more elements.
+ let remaining_exprs = self.parse_seq_to_end(
+ &token::CloseDelim(token::Bracket),
+ SeqSep::trailing_allowed(token::Comma),
+ |p| Ok(p.parse_expr()?)
+ )?;
+ let mut exprs = vec![first_expr];
+ exprs.extend(remaining_exprs);
+ ex = ExprKind::Array(exprs);
+ } else {
+ // Vector with one element.
+ self.expect(&token::CloseDelim(token::Bracket))?;
+ ex = ExprKind::Array(vec![first_expr]);
+ }
+ }
+ hi = self.prev_span;
+ }
+ _ => {
+ if self.eat_lt() {
+ let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
+ hi = path.span;
+ return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
+ }
+ if self.span.rust_2018() && self.check_keyword(keywords::Async)
+ {
+ if self.is_async_block() { // check for `async {` and `async move {`
+ return self.parse_async_block(attrs);
+ } else {
+ return self.parse_lambda_expr(attrs);
+ }
+ }
+ if self.check_keyword(keywords::Move) || self.check_keyword(keywords::Static) {
+ return self.parse_lambda_expr(attrs);
+ }
+ if self.eat_keyword(keywords::If) {
+ return self.parse_if_expr(attrs);
+ }
+ if self.eat_keyword(keywords::For) {
+ let lo = self.prev_span;
+ return self.parse_for_expr(None, lo, attrs);
+ }
+ if self.eat_keyword(keywords::While) {
+ let lo = self.prev_span;
+ return self.parse_while_expr(None, lo, attrs);
+ }
+ if let Some(label) = self.eat_label() {
+ let lo = label.ident.span;
+ self.expect(&token::Colon)?;
+ if self.eat_keyword(keywords::While) {
+ return self.parse_while_expr(Some(label), lo, attrs)
+ }
+ if self.eat_keyword(keywords::For) {
+ return self.parse_for_expr(Some(label), lo, attrs)
+ }
+ if self.eat_keyword(keywords::Loop) {
+ return self.parse_loop_expr(Some(label), lo, attrs)
+ }
+ if self.token == token::OpenDelim(token::Brace) {
+ return self.parse_block_expr(Some(label),
+ lo,
+ BlockCheckMode::Default,
+ attrs);
+ }
+ let msg = "expected `while`, `for`, `loop` or `{` after a label";
+ let mut err = self.fatal(msg);
+ err.span_label(self.span, msg);
+ return Err(err);
+ }
+ if self.eat_keyword(keywords::Loop) {
+ let lo = self.prev_span;
+ return self.parse_loop_expr(None, lo, attrs);
+ }
+ if self.eat_keyword(keywords::Continue) {
+ let label = self.eat_label();
+ let ex = ExprKind::Continue(label);
+ let hi = self.prev_span;
+ return Ok(self.mk_expr(lo.to(hi), ex, attrs));
+ }
+ if self.eat_keyword(keywords::Match) {
+ let match_sp = self.prev_span;
+ return self.parse_match_expr(attrs).map_err(|mut err| {
+ err.span_label(match_sp, "while parsing this match expression");
+ err
+ });
+ }
+ if self.eat_keyword(keywords::Unsafe) {
+ return self.parse_block_expr(
+ None,
+ lo,
+ BlockCheckMode::Unsafe(ast::UserProvided),
+ attrs);
+ }
+ if self.is_do_catch_block() {
+ let mut db = self.fatal("found removed `do catch` syntax");
+ db.help("Following RFC #2388, the new non-placeholder syntax is `try`");
+ return Err(db);
+ }
+ if self.is_try_block() {
+ let lo = self.span;
+ assert!(self.eat_keyword(keywords::Try));
+ return self.parse_try_block(lo, attrs);
+ }
+ if self.eat_keyword(keywords::Return) {
+ if self.token.can_begin_expr() {
+ let e = self.parse_expr()?;
+ hi = e.span;
+ ex = ExprKind::Ret(Some(e));
+ } else {
+ ex = ExprKind::Ret(None);
+ }
+ } else if self.eat_keyword(keywords::Break) {
+ let label = self.eat_label();
+ let e = if self.token.can_begin_expr()
+ && !(self.token == token::OpenDelim(token::Brace)
+ && self.restrictions.contains(
+ Restrictions::NO_STRUCT_LITERAL)) {
+ Some(self.parse_expr()?)
+ } else {
+ None
+ };
+ ex = ExprKind::Break(label, e);
+ hi = self.prev_span;
+ } else if self.eat_keyword(keywords::Yield) {
+ if self.token.can_begin_expr() {
+ let e = self.parse_expr()?;
+ hi = e.span;
+ ex = ExprKind::Yield(Some(e));
+ } else {
+ ex = ExprKind::Yield(None);
+ }
+ } else if self.token.is_keyword(keywords::Let) {
+ // Catch this syntax error here, instead of in `parse_ident`, so
+ // that we can explicitly mention that let is not to be used as an expression
+ let mut db = self.fatal("expected expression, found statement (`let`)");
+ db.span_label(self.span, "expected expression");
+ db.note("variable declaration using `let` is a statement");
+ return Err(db);
+ } else if self.token.is_path_start() {
+ let pth = self.parse_path(PathStyle::Expr)?;
+
+ // `!`, as an operator, is prefix, so we know this isn't that
+ if self.eat(&token::Not) {
+ // MACRO INVOCATION expression
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ let hi = self.prev_span;
+ let node = Mac_ { path: pth, tts, delim };
+ return Ok(self.mk_mac_expr(lo.to(hi), node, attrs))
+ }
+ if self.check(&token::OpenDelim(token::Brace)) {
+ // This is a struct literal, unless we're prohibited
+ // from parsing struct literals here.
+ let prohibited = self.restrictions.contains(
+ Restrictions::NO_STRUCT_LITERAL
+ );
+ if !prohibited {
+ return self.parse_struct_expr(lo, pth, attrs);
+ }
+ }
+
+ hi = pth.span;
+ ex = ExprKind::Path(None, pth);
+ } else {
+ if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
+ // Don't complain about bare semicolons after unclosed braces
+ // recovery in order to keep the error count down. Fixing the
+ // delimiters will possibly also fix the bare semicolon found in
+ // expression context. For example, silence the following error:
+ // ```
+ // error: expected expression, found `;`
+ // --> file.rs:2:13
+ // |
+ // 2 | foo(bar(;
+ // | ^ expected expression
+ // ```
+ self.bump();
+ return Ok(self.mk_expr(self.span, ExprKind::Err, ThinVec::new()));
+ }
+ match self.parse_literal_maybe_minus() {
+ Ok(expr) => {
+ hi = expr.span;
+ ex = expr.node.clone();
+ }
+ Err(mut err) => {
+ self.cancel(&mut err);
+ let msg = format!("expected expression, found {}",
+ self.this_token_descr());
+ let mut err = self.fatal(&msg);
+ err.span_label(self.span, "expected expression");
+ return Err(err);
+ }
+ }
+ }
+ }
+ }
+
+ let expr = Expr { node: ex, span: lo.to(hi), id: ast::DUMMY_NODE_ID, attrs };
+ let expr = self.maybe_recover_from_bad_qpath(expr, true)?;
+
+ return Ok(P(expr));
+ }
+
+ fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>> {
+ let struct_sp = lo.to(self.prev_span);
+ self.bump();
+ let mut fields = Vec::new();
+ let mut base = None;
+
+ attrs.extend(self.parse_inner_attributes()?);
+
+ while self.token != token::CloseDelim(token::Brace) {
+ if self.eat(&token::DotDot) {
+ let exp_span = self.prev_span;
+ match self.parse_expr() {
+ Ok(e) => {
+ base = Some(e);
+ }
+ Err(mut e) => {
+ e.emit();
+ self.recover_stmt();
+ }
+ }
+ if self.token == token::Comma {
+ let mut err = self.sess.span_diagnostic.mut_span_err(
+ exp_span.to(self.prev_span),
+ "cannot use a comma after the base struct",
+ );
+ err.span_suggestion_short(
+ self.span,
+ "remove this comma",
+ String::new(),
+ Applicability::MachineApplicable
+ );
+ err.note("the base struct must always be the last field");
+ err.emit();
+ self.recover_stmt();
+ }
+ break;
+ }
+
+ let mut recovery_field = None;
+ if let token::Ident(ident, _) = self.token {
+ if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) {
+ // Use in case of error after field-looking code: `S { foo: () with a }`
+ let mut ident = ident.clone();
+ ident.span = self.span;
+ recovery_field = Some(ast::Field {
+ ident,
+ span: self.span,
+ expr: self.mk_expr(self.span, ExprKind::Err, ThinVec::new()),
+ is_shorthand: false,
+ attrs: ThinVec::new(),
+ });
+ }
+ }
+ let mut parsed_field = None;
+ match self.parse_field() {
+ Ok(f) => parsed_field = Some(f),
+ Err(mut e) => {
+ e.span_label(struct_sp, "while parsing this struct");
+ e.emit();
+
+ // If the next token is a comma, then try to parse
+ // what comes next as additional fields, rather than
+ // bailing out until next `}`.
+ if self.token != token::Comma {
+ self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
+ if self.token != token::Comma {
+ break;
+ }
+ }
+ }
+ }
+
+ match self.expect_one_of(&[token::Comma],
+ &[token::CloseDelim(token::Brace)]) {
+ Ok(_) => if let Some(f) = parsed_field.or(recovery_field) {
+ // only include the field if there's no parse error for the field name
+ fields.push(f);
+ }
+ Err(mut e) => {
+ if let Some(f) = recovery_field {
+ fields.push(f);
+ }
+ e.span_label(struct_sp, "while parsing this struct");
+ e.emit();
+ self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
+ self.eat(&token::Comma);
+ }
+ }
+ }
+
+ let span = lo.to(self.span);
+ self.expect(&token::CloseDelim(token::Brace))?;
+ return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs));
+ }
+
+ fn parse_or_use_outer_attributes(&mut self,
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, ThinVec<Attribute>> {
+ if let Some(attrs) = already_parsed_attrs {
+ Ok(attrs)
+ } else {
+ self.parse_outer_attributes().map(|a| a.into())
+ }
+ }
+
+ /// Parses a block or unsafe block.
+ fn parse_block_expr(&mut self, opt_label: Option<Label>,
+ lo: Span, blk_mode: BlockCheckMode,
+ outer_attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>> {
+ self.expect(&token::OpenDelim(token::Brace))?;
+
+ let mut attrs = outer_attrs;
+ attrs.extend(self.parse_inner_attributes()?);
+
+ let blk = self.parse_block_tail(lo, blk_mode)?;
+ return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs));
+ }
+
+ /// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
+ fn parse_dot_or_call_expr(&mut self,
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, P<Expr>> {
+ let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
+
+ let b = self.parse_bottom_expr();
+ let (span, b) = self.interpolated_or_expr_span(b)?;
+ self.parse_dot_or_call_expr_with(b, span, attrs)
+ }
+
+ fn parse_dot_or_call_expr_with(&mut self,
+ e0: P<Expr>,
+ lo: Span,
+ mut attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>> {
+ // Stitch the list of outer attributes onto the return value.
+ // A little bit ugly, but the best way given the current code
+ // structure
+ self.parse_dot_or_call_expr_with_(e0, lo)
+ .map(|expr|
+ expr.map(|mut expr| {
+ attrs.extend::<Vec<_>>(expr.attrs.into());
+ expr.attrs = attrs;
+ match expr.node {
+ ExprKind::If(..) | ExprKind::IfLet(..) => {
+ if !expr.attrs.is_empty() {
+ // Just point to the first attribute in there...
+ let span = expr.attrs[0].span;
+
+ self.span_err(span,
+ "attributes are not yet allowed on `if` \
+ expressions");
+ }
+ }
+ _ => {}
+ }
+ expr
+ })
+ )
+ }
+
+ // Assuming we have just parsed `.`, continue parsing into an expression.
+ fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
+ let segment = self.parse_path_segment(PathStyle::Expr, true)?;
+ self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren));
+
+ Ok(match self.token {
+ token::OpenDelim(token::Paren) => {
+ // Method call `expr.f()`
+ let mut args = self.parse_unspanned_seq(
+ &token::OpenDelim(token::Paren),
+ &token::CloseDelim(token::Paren),
+ SeqSep::trailing_allowed(token::Comma),
+ |p| Ok(p.parse_expr()?)
+ )?;
+ args.insert(0, self_arg);
+
+ let span = lo.to(self.prev_span);
+ self.mk_expr(span, ExprKind::MethodCall(segment, args), ThinVec::new())
+ }
+ _ => {
+ // Field access `expr.f`
+ if let Some(args) = segment.args {
+ self.span_err(args.span(),
+ "field expressions may not have generic arguments");
+ }
+
+ let span = lo.to(self.prev_span);
+ self.mk_expr(span, ExprKind::Field(self_arg, segment.ident), ThinVec::new())
+ }
+ })
+ }
+
+ /// This function checks if there are trailing angle brackets and produces
+ /// a diagnostic to suggest removing them.
+ ///
+ /// ```ignore (diagnostic)
+ /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
+ /// ^^ help: remove extra angle brackets
+ /// ```
+ fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::Token) {
+ // This function is intended to be invoked after parsing a path segment where there are two
+ // cases:
+ //
+ // 1. A specific token is expected after the path segment.
+ // eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call),
+ // `Foo::`, or `Foo::<Bar>::` (mod sep - continued path).
+ // 2. No specific token is expected after the path segment.
+ // eg. `x.foo` (field access)
+ //
+ // This function is called after parsing `.foo` and before parsing the token `end` (if
+ // present). This includes any angle bracket arguments, such as `.foo::<u32>` or
+ // `Foo::<Bar>`.
+
+ // We only care about trailing angle brackets if we previously parsed angle bracket
+ // arguments. This helps stop us incorrectly suggesting that extra angle brackets be
+ // removed in this case:
+ //
+ // `x.foo >> (3)` (where `x.foo` is a `u32` for example)
+ //
+ // This case is particularly tricky as we won't notice it just looking at the tokens -
+ // it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will
+ // have already been parsed):
+ //
+ // `x.foo::<u32>>>(3)`
+ let parsed_angle_bracket_args = segment.args
+ .as_ref()
+ .map(|args| args.is_angle_bracketed())
+ .unwrap_or(false);
+
+ debug!(
+ "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",
+ parsed_angle_bracket_args,
+ );
+ if !parsed_angle_bracket_args {
+ return;
+ }
+
+ // Keep the span at the start so we can highlight the sequence of `>` characters to be
+ // removed.
+ let lo = self.span;
+
+ // We need to look-ahead to see if we have `>` characters without moving the cursor forward
+ // (since we might have the field access case and the characters we're eating are
+ // actual operators and not trailing characters - ie `x.foo >> 3`).
+ let mut position = 0;
+
+ // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how
+ // many of each (so we can correctly pluralize our error messages) and continue to
+ // advance.
+ let mut number_of_shr = 0;
+ let mut number_of_gt = 0;
+ while self.look_ahead(position, |t| {
+ trace!("check_trailing_angle_brackets: t={:?}", t);
+ if *t == token::BinOp(token::BinOpToken::Shr) {
+ number_of_shr += 1;
+ true
+ } else if *t == token::Gt {
+ number_of_gt += 1;
+ true
+ } else {
+ false
+ }
+ }) {
+ position += 1;
+ }
+
+ // If we didn't find any trailing `>` characters, then we have nothing to error about.
+ debug!(
+ "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}",
+ number_of_gt, number_of_shr,
+ );
+ if number_of_gt < 1 && number_of_shr < 1 {
+ return;
+ }
+
+ // Finally, double check that we have our end token as otherwise this is the
+ // second case.
+ if self.look_ahead(position, |t| {
+ trace!("check_trailing_angle_brackets: t={:?}", t);
+ *t == end
+ }) {
+ // Eat from where we started until the end token so that parsing can continue
+ // as if we didn't have those extra angle brackets.
+ self.eat_to_tokens(&[&end]);
+ let span = lo.until(self.span);
+
+ let plural = number_of_gt > 1 || number_of_shr >= 1;
+ self.diagnostic()
+ .struct_span_err(
+ span,
+ &format!("unmatched angle bracket{}", if plural { "s" } else { "" }),
+ )
+ .span_suggestion(
+ span,
+ &format!("remove extra angle bracket{}", if plural { "s" } else { "" }),
+ String::new(),
+ Applicability::MachineApplicable,
+ )
+ .emit();
+ }
+ }
+
+ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
+ let mut e = e0;
+ let mut hi;
+ loop {
+ // expr?
+ while self.eat(&token::Question) {
+ let hi = self.prev_span;
+ e = self.mk_expr(lo.to(hi), ExprKind::Try(e), ThinVec::new());
+ }
+
+ // expr.f
+ if self.eat(&token::Dot) {
+ match self.token {
+ token::Ident(..) => {
+ e = self.parse_dot_suffix(e, lo)?;
+ }
+ token::Literal(token::Integer(name), _) => {
+ let span = self.span;
+ self.bump();
+ let field = ExprKind::Field(e, Ident::new(name, span));
+ e = self.mk_expr(lo.to(span), field, ThinVec::new());
+ }
+ token::Literal(token::Float(n), _suf) => {
+ self.bump();
+ let fstr = n.as_str();
+ let mut err = self.diagnostic()
+ .struct_span_err(self.prev_span, &format!("unexpected token: `{}`", n));
+ err.span_label(self.prev_span, "unexpected token");
+ if fstr.chars().all(|x| "0123456789.".contains(x)) {
+ let float = match fstr.parse::<f64>().ok() {
+ Some(f) => f,
+ None => continue,
+ };
+ let sugg = pprust::to_string(|s| {
+ use crate::print::pprust::PrintState;
+ s.popen()?;
+ s.print_expr(&e)?;
+ s.s.word( ".")?;
+ s.print_usize(float.trunc() as usize)?;
+ s.pclose()?;
+ s.s.word(".")?;
+ s.s.word(fstr.splitn(2, ".").last().unwrap().to_string())
+ });
+ err.span_suggestion(
+ lo.to(self.prev_span),
+ "try parenthesizing the first index",
+ sugg,
+ Applicability::MachineApplicable
+ );
+ }
+ return Err(err);
+
+ }
+ _ => {
+ // FIXME Could factor this out into non_fatal_unexpected or something.
+ let actual = self.this_token_to_string();
+ self.span_err(self.span, &format!("unexpected token: `{}`", actual));
+ }
+ }
+ continue;
+ }
+ if self.expr_is_complete(&e) { break; }
+ match self.token {
+ // expr(...)
+ token::OpenDelim(token::Paren) => {
+ let es = self.parse_unspanned_seq(
+ &token::OpenDelim(token::Paren),
+ &token::CloseDelim(token::Paren),
+ SeqSep::trailing_allowed(token::Comma),
+ |p| Ok(p.parse_expr()?)
+ )?;
+ hi = self.prev_span;
+
+ let nd = self.mk_call(e, es);
+ e = self.mk_expr(lo.to(hi), nd, ThinVec::new());
+ }
+
+ // expr[...]
+ // Could be either an index expression or a slicing expression.
+ token::OpenDelim(token::Bracket) => {
+ self.bump();
+ let ix = self.parse_expr()?;
+ hi = self.span;
+ self.expect(&token::CloseDelim(token::Bracket))?;
+ let index = self.mk_index(e, ix);
+ e = self.mk_expr(lo.to(hi), index, ThinVec::new())
+ }
+ _ => return Ok(e)
+ }
+ }
+ return Ok(e);
+ }
+
+ crate fn process_potential_macro_variable(&mut self) {
+ let (token, span) = match self.token {
+ token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
+ self.look_ahead(1, |t| t.is_ident()) => {
+ self.bump();
+ let name = match self.token {
+ token::Ident(ident, _) => ident,
+ _ => unreachable!()
+ };
+ let mut err = self.fatal(&format!("unknown macro variable `{}`", name));
+ err.span_label(self.span, "unknown macro variable");
+ err.emit();
+ self.bump();
+ return
+ }
+ token::Interpolated(ref nt) => {
+ self.meta_var_span = Some(self.span);
+ // Interpolated identifier and lifetime tokens are replaced with usual identifier
+ // and lifetime tokens, so the former are never encountered during normal parsing.
+ match **nt {
+ token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span),
+ token::NtLifetime(ident) => (token::Lifetime(ident), ident.span),
+ _ => return,
+ }
+ }
+ _ => return,
+ };
+ self.token = token;
+ self.span = span;
+ }
+
+ /// Parses a single token tree from the input.
+ crate fn parse_token_tree(&mut self) -> TokenTree {
+ match self.token {
+ token::OpenDelim(..) => {
+ let frame = mem::replace(&mut self.token_cursor.frame,
+ self.token_cursor.stack.pop().unwrap());
+ self.span = frame.span.entire();
+ self.bump();
+ TokenTree::Delimited(
+ frame.span,
+ frame.delim,
+ frame.tree_cursor.stream.into(),
+ )
+ },
+ token::CloseDelim(_) | token::Eof => unreachable!(),
+ _ => {
+ let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span);
+ self.bump();
+ TokenTree::Token(span, token)
+ }
+ }
+ }
+
+ // parse a stream of tokens into a list of TokenTree's,
+ // up to EOF.
+ pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
+ let mut tts = Vec::new();
+ while self.token != token::Eof {
+ tts.push(self.parse_token_tree());
+ }
+ Ok(tts)
+ }
+
+ pub fn parse_tokens(&mut self) -> TokenStream {
+ let mut result = Vec::new();
+ loop {
+ match self.token {
+ token::Eof | token::CloseDelim(..) => break,
+ _ => result.push(self.parse_token_tree().into()),
+ }
+ }
+ TokenStream::new(result)
+ }
+
+ /// Parse a prefix-unary-operator expr
+ fn parse_prefix_expr(&mut self,
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, P<Expr>> {
+ let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
+ let lo = self.span;
+ // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr()
+ let (hi, ex) = match self.token {
+ token::Not => {
+ self.bump();
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), self.mk_unary(UnOp::Not, e))
+ }
+ // Suggest `!` for bitwise negation when encountering a `~`
+ token::Tilde => {
+ self.bump();
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ let span_of_tilde = lo;
+ let mut err = self.diagnostic()
+ .struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator");
+ err.span_suggestion_short(
+ span_of_tilde,
+ "use `!` to perform bitwise negation",
+ "!".to_owned(),
+ Applicability::MachineApplicable
+ );
+ err.emit();
+ (lo.to(span), self.mk_unary(UnOp::Not, e))
+ }
+ token::BinOp(token::Minus) => {
+ self.bump();
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), self.mk_unary(UnOp::Neg, e))
+ }
+ token::BinOp(token::Star) => {
+ self.bump();
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), self.mk_unary(UnOp::Deref, e))
+ }
+ token::BinOp(token::And) | token::AndAnd => {
+ self.expect_and()?;
+ let m = self.parse_mutability();
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), ExprKind::AddrOf(m, e))
+ }
+ token::Ident(..) if self.token.is_keyword(keywords::In) => {
+ self.bump();
+ let place = self.parse_expr_res(
+ Restrictions::NO_STRUCT_LITERAL,
+ None,
+ )?;
+ let blk = self.parse_block()?;
+ let span = blk.span;
+ let blk_expr = self.mk_expr(span, ExprKind::Block(blk, None), ThinVec::new());
+ (lo.to(span), ExprKind::ObsoleteInPlace(place, blk_expr))
+ }
+ token::Ident(..) if self.token.is_keyword(keywords::Box) => {
+ self.bump();
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), ExprKind::Box(e))
+ }
+ token::Ident(..) if self.token.is_ident_named("not") => {
+ // `not` is just an ordinary identifier in Rust-the-language,
+ // but as `rustc`-the-compiler, we can issue clever diagnostics
+ // for confused users who really want to say `!`
+ let token_cannot_continue_expr = |t: &token::Token| match *t {
+ // These tokens can start an expression after `!`, but
+ // can't continue an expression after an ident
+ token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
+ token::Literal(..) | token::Pound => true,
+ token::Interpolated(ref nt) => match **nt {
+ token::NtIdent(..) | token::NtExpr(..) |
+ token::NtBlock(..) | token::NtPath(..) => true,
+ _ => false,
+ },
+ _ => false
+ };
+ let cannot_continue_expr = self.look_ahead(1, token_cannot_continue_expr);
+ if cannot_continue_expr {
+ self.bump();
+ // Emit the error ...
+ let mut err = self.diagnostic()
+ .struct_span_err(self.span,
+ &format!("unexpected {} after identifier",
+ self.this_token_descr()));
+ // span the `not` plus trailing whitespace to avoid
+ // trailing whitespace after the `!` in our suggestion
+ let to_replace = self.sess.source_map()
+ .span_until_non_whitespace(lo.to(self.span));
+ err.span_suggestion_short(
+ to_replace,
+ "use `!` to perform logical negation",
+ "!".to_owned(),
+ Applicability::MachineApplicable
+ );
+ err.emit();
+ // —and recover! (just as if we were in the block
+ // for the `token::Not` arm)
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), self.mk_unary(UnOp::Not, e))
+ } else {
+ return self.parse_dot_or_call_expr(Some(attrs));
+ }
+ }
+ _ => { return self.parse_dot_or_call_expr(Some(attrs)); }
+ };
+ return Ok(self.mk_expr(lo.to(hi), ex, attrs));
+ }
+
+ /// Parses an associative expression.
+ ///
+ /// This parses an expression accounting for associativity and precedence of the operators in
+ /// the expression.
+ #[inline]
+ fn parse_assoc_expr(&mut self,
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, P<Expr>> {
+ self.parse_assoc_expr_with(0, already_parsed_attrs.into())
+ }
+
+ /// Parses an associative expression with operators of at least `min_prec` precedence.
+ fn parse_assoc_expr_with(&mut self,
+ min_prec: usize,
+ lhs: LhsExpr)
+ -> PResult<'a, P<Expr>> {
+ let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs {
+ expr
+ } else {
+ let attrs = match lhs {
+ LhsExpr::AttributesParsed(attrs) => Some(attrs),
+ _ => None,
+ };
+ if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token) {
+ return self.parse_prefix_range_expr(attrs);
+ } else {
+ self.parse_prefix_expr(attrs)?
+ }
+ };
+
+ if self.expr_is_complete(&lhs) {
+ // Semi-statement forms are odd. See https://github.com/rust-lang/rust/issues/29071
+ return Ok(lhs);
+ }
+ self.expected_tokens.push(TokenType::Operator);
+ while let Some(op) = AssocOp::from_token(&self.token) {
+
+ // Adjust the span for interpolated LHS to point to the `$lhs` token and not to what
+ // it refers to. Interpolated identifiers are unwrapped early and never show up here
+ // as `PrevTokenKind::Interpolated` so if LHS is a single identifier we always process
+ // it as "interpolated", it doesn't change the answer for non-interpolated idents.
+ let lhs_span = match (self.prev_token_kind, &lhs.node) {
+ (PrevTokenKind::Interpolated, _) => self.prev_span,
+ (PrevTokenKind::Ident, &ExprKind::Path(None, ref path))
+ if path.segments.len() == 1 => self.prev_span,
+ _ => lhs.span,
+ };
+
+ let cur_op_span = self.span;
+ let restrictions = if op.is_assign_like() {
+ self.restrictions & Restrictions::NO_STRUCT_LITERAL
+ } else {
+ self.restrictions
+ };
+ if op.precedence() < min_prec {
+ break;
+ }
+ // Check for deprecated `...` syntax
+ if self.token == token::DotDotDot && op == AssocOp::DotDotEq {
+ self.err_dotdotdot_syntax(self.span);
+ }
+
+ self.bump();
+ if op.is_comparison() {
+ self.check_no_chained_comparison(&lhs, &op);
+ }
+ // Special cases:
+ if op == AssocOp::As {
+ lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?;
+ continue
+ } else if op == AssocOp::Colon {
+ lhs = match self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type) {
+ Ok(lhs) => lhs,
+ Err(mut err) => {
+ err.span_label(self.span,
+ "expecting a type here because of type ascription");
+ let cm = self.sess.source_map();
+ let cur_pos = cm.lookup_char_pos(self.span.lo());
+ let op_pos = cm.lookup_char_pos(cur_op_span.hi());
+ if cur_pos.line != op_pos.line {
+ err.span_suggestion(
+ cur_op_span,
+ "try using a semicolon",
+ ";".to_string(),
+ Applicability::MaybeIncorrect // speculative
+ );
+ }
+ return Err(err);
+ }
+ };
+ continue
+ } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
+ // If we didn’t have to handle `x..`/`x..=`, it would be pretty easy to
+ // generalise it to the Fixity::None code.
+ //
+ // We have 2 alternatives here: `x..y`/`x..=y` and `x..`/`x..=` The other
+ // two variants are handled with `parse_prefix_range_expr` call above.
+ let rhs = if self.is_at_start_of_range_notation_rhs() {
+ Some(self.parse_assoc_expr_with(op.precedence() + 1,
+ LhsExpr::NotYetParsed)?)
+ } else {
+ None
+ };
+ let (lhs_span, rhs_span) = (lhs.span, if let Some(ref x) = rhs {
+ x.span
+ } else {
+ cur_op_span
+ });
+ let limits = if op == AssocOp::DotDot {
+ RangeLimits::HalfOpen
+ } else {
+ RangeLimits::Closed
+ };
+
+ let r = self.mk_range(Some(lhs), rhs, limits)?;
+ lhs = self.mk_expr(lhs_span.to(rhs_span), r, ThinVec::new());
+ break
+ }
+
+ let rhs = match op.fixity() {
+ Fixity::Right => self.with_res(
+ restrictions - Restrictions::STMT_EXPR,
+ |this| {
+ this.parse_assoc_expr_with(op.precedence(),
+ LhsExpr::NotYetParsed)
+ }),
+ Fixity::Left => self.with_res(
+ restrictions - Restrictions::STMT_EXPR,
+ |this| {
+ this.parse_assoc_expr_with(op.precedence() + 1,
+ LhsExpr::NotYetParsed)
+ }),
+ // We currently have no non-associative operators that are not handled above by
+ // the special cases. The code is here only for future convenience.
+ Fixity::None => self.with_res(
+ restrictions - Restrictions::STMT_EXPR,
+ |this| {
+ this.parse_assoc_expr_with(op.precedence() + 1,
+ LhsExpr::NotYetParsed)
+ }),
+ }?;
+
+ // Make sure that the span of the parent node is larger than the span of lhs and rhs,
+ // including the attributes.
+ let lhs_span = lhs
+ .attrs
+ .iter()
+ .filter(|a| a.style == AttrStyle::Outer)
+ .next()
+ .map_or(lhs_span, |a| a.span);
+ let span = lhs_span.to(rhs.span);
+ lhs = match op {
+ AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide |
+ AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor |
+ AssocOp::BitAnd | AssocOp::BitOr | AssocOp::ShiftLeft | AssocOp::ShiftRight |
+ AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual |
+ AssocOp::Greater | AssocOp::GreaterEqual => {
+ let ast_op = op.to_ast_binop().unwrap();
+ let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs);
+ self.mk_expr(span, binary, ThinVec::new())
+ }
+ AssocOp::Assign =>
+ self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()),
+ AssocOp::ObsoleteInPlace =>
+ self.mk_expr(span, ExprKind::ObsoleteInPlace(lhs, rhs), ThinVec::new()),
+ AssocOp::AssignOp(k) => {
+ let aop = match k {
+ token::Plus => BinOpKind::Add,
+ token::Minus => BinOpKind::Sub,
+ token::Star => BinOpKind::Mul,
+ token::Slash => BinOpKind::Div,
+ token::Percent => BinOpKind::Rem,
+ token::Caret => BinOpKind::BitXor,
+ token::And => BinOpKind::BitAnd,
+ token::Or => BinOpKind::BitOr,
+ token::Shl => BinOpKind::Shl,
+ token::Shr => BinOpKind::Shr,
+ };
+ let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs);
+ self.mk_expr(span, aopexpr, ThinVec::new())
+ }
+ AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => {
+ self.bug("AssocOp should have been handled by special case")
+ }
+ };
+
+ if op.fixity() == Fixity::None { break }
+ }
+ Ok(lhs)
+ }
+
+ fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span,
+ expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind)
+ -> PResult<'a, P<Expr>> {
+ let mk_expr = |this: &mut Self, rhs: P<Ty>| {
+ this.mk_expr(lhs_span.to(rhs.span), expr_kind(lhs, rhs), ThinVec::new())
+ };
+
+ // Save the state of the parser before parsing type normally, in case there is a
+ // LessThan comparison after this cast.
+ let parser_snapshot_before_type = self.clone();
+ match self.parse_ty_no_plus() {
+ Ok(rhs) => {
+ Ok(mk_expr(self, rhs))
+ }
+ Err(mut type_err) => {
+ // Rewind to before attempting to parse the type with generics, to recover
+ // from situations like `x as usize < y` in which we first tried to parse
+ // `usize < y` as a type with generic arguments.
+ let parser_snapshot_after_type = self.clone();
+ mem::replace(self, parser_snapshot_before_type);
+
+ match self.parse_path(PathStyle::Expr) {
+ Ok(path) => {
+ let (op_noun, op_verb) = match self.token {
+ token::Lt => ("comparison", "comparing"),
+ token::BinOp(token::Shl) => ("shift", "shifting"),
+ _ => {
+ // We can end up here even without `<` being the next token, for
+ // example because `parse_ty_no_plus` returns `Err` on keywords,
+ // but `parse_path` returns `Ok` on them due to error recovery.
+ // Return original error and parser state.
+ mem::replace(self, parser_snapshot_after_type);
+ return Err(type_err);
+ }
+ };
+
+ // Successfully parsed the type path leaving a `<` yet to parse.
+ type_err.cancel();
+
+ // Report non-fatal diagnostics, keep `x as usize` as an expression
+ // in AST and continue parsing.
+ let msg = format!("`<` is interpreted as a start of generic \
+ arguments for `{}`, not a {}", path, op_noun);
+ let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg);
+ err.span_label(self.look_ahead_span(1).to(parser_snapshot_after_type.span),
+ "interpreted as generic arguments");
+ err.span_label(self.span, format!("not interpreted as {}", op_noun));
+
+ let expr = mk_expr(self, P(Ty {
+ span: path.span,
+ node: TyKind::Path(None, path),
+ id: ast::DUMMY_NODE_ID
+ }));
+
+ let expr_str = self.sess.source_map().span_to_snippet(expr.span)
+ .unwrap_or_else(|_| pprust::expr_to_string(&expr));
+ err.span_suggestion(
+ expr.span,
+ &format!("try {} the cast value", op_verb),
+ format!("({})", expr_str),
+ Applicability::MachineApplicable
+ );
+ err.emit();
+
+ Ok(expr)
+ }
+ Err(mut path_err) => {
+ // Couldn't parse as a path, return original error and parser state.
+ path_err.cancel();
+ mem::replace(self, parser_snapshot_after_type);
+ Err(type_err)
+ }
+ }
+ }
+ }
+ }
+
+ /// Produce an error if comparison operators are chained (RFC #558).
+ /// We only need to check lhs, not rhs, because all comparison ops
+ /// have same precedence and are left-associative
+ fn check_no_chained_comparison(&mut self, lhs: &Expr, outer_op: &AssocOp) {
+ debug_assert!(outer_op.is_comparison(),
+ "check_no_chained_comparison: {:?} is not comparison",
+ outer_op);
+ match lhs.node {
+ ExprKind::Binary(op, _, _) if op.node.is_comparison() => {
+ // respan to include both operators
+ let op_span = op.span.to(self.span);
+ let mut err = self.diagnostic().struct_span_err(op_span,
+ "chained comparison operators require parentheses");
+ if op.node == BinOpKind::Lt &&
+ *outer_op == AssocOp::Less || // Include `<` to provide this recommendation
+ *outer_op == AssocOp::Greater // even in a case like the following:
+ { // Foo<Bar<Baz<Qux, ()>>>
+ err.help(
+ "use `::<...>` instead of `<...>` if you meant to specify type arguments");
+ err.help("or use `(...)` if you meant to specify fn arguments");
+ }
+ err.emit();
+ }
+ _ => {}
+ }
+ }
+
+ /// Parse prefix-forms of range notation: `..expr`, `..`, `..=expr`
+ fn parse_prefix_range_expr(&mut self,
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, P<Expr>> {
+ // Check for deprecated `...` syntax
+ if self.token == token::DotDotDot {
+ self.err_dotdotdot_syntax(self.span);
+ }
+
+ debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token),
+ "parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
+ self.token);
+ let tok = self.token.clone();
+ let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
+ let lo = self.span;
+ let mut hi = self.span;
+ self.bump();
+ let opt_end = if self.is_at_start_of_range_notation_rhs() {
+ // RHS must be parsed with more associativity than the dots.
+ let next_prec = AssocOp::from_token(&tok).unwrap().precedence() + 1;
+ Some(self.parse_assoc_expr_with(next_prec,
+ LhsExpr::NotYetParsed)
+ .map(|x|{
+ hi = x.span;
+ x
+ })?)
+ } else {
+ None
+ };
+ let limits = if tok == token::DotDot {
+ RangeLimits::HalfOpen
+ } else {
+ RangeLimits::Closed
+ };
+
+ let r = self.mk_range(None, opt_end, limits)?;
+ Ok(self.mk_expr(lo.to(hi), r, attrs))
+ }
+
+ fn is_at_start_of_range_notation_rhs(&self) -> bool {
+ if self.token.can_begin_expr() {
+ // parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
+ if self.token == token::OpenDelim(token::Brace) {
+ return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
+ }
+ true
+ } else {
+ false
+ }
+ }
+
+ /// Parses an `if` or `if let` expression (`if` token already eaten).
+ fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+ if self.check_keyword(keywords::Let) {
+ return self.parse_if_let_expr(attrs);
+ }
+ let lo = self.prev_span;
+ let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+
+ // Verify that the parsed `if` condition makes sense as a condition. If it is a block, then
+ // verify that the last statement is either an implicit return (no `;`) or an explicit
+ // return. This won't catch blocks with an explicit `return`, but that would be caught by
+ // the dead code lint.
+ if self.eat_keyword(keywords::Else) || !cond.returns() {
+ let sp = self.sess.source_map().next_point(lo);
+ let mut err = self.diagnostic()
+ .struct_span_err(sp, "missing condition for `if` statemement");
+ err.span_label(sp, "expected if condition here");
+ return Err(err)
+ }
+ let not_block = self.token != token::OpenDelim(token::Brace);
+ let thn = self.parse_block().map_err(|mut err| {
+ if not_block {
+ err.span_label(lo, "this `if` statement has a condition, but no block");
+ }
+ err
+ })?;
+ let mut els: Option<P<Expr>> = None;
+ let mut hi = thn.span;
+ if self.eat_keyword(keywords::Else) {
+ let elexpr = self.parse_else_expr()?;
+ hi = elexpr.span;
+ els = Some(elexpr);
+ }
+ Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs))
+ }
+
+ /// Parses an `if let` expression (`if` token already eaten).
+ fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>> {
+ let lo = self.prev_span;
+ self.expect_keyword(keywords::Let)?;
+ let pats = self.parse_pats()?;
+ self.expect(&token::Eq)?;
+ let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+ let thn = self.parse_block()?;
+ let (hi, els) = if self.eat_keyword(keywords::Else) {
+ let expr = self.parse_else_expr()?;
+ (expr.span, Some(expr))
+ } else {
+ (thn.span, None)
+ };
+ Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pats, expr, thn, els), attrs))
+ }
+
+ /// Parses `move |args| expr`.
+ fn parse_lambda_expr(&mut self,
+ attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>>
+ {
+ let lo = self.span;
+ let movability = if self.eat_keyword(keywords::Static) {
+ Movability::Static
+ } else {
+ Movability::Movable
+ };
+ let asyncness = if self.span.rust_2018() {
+ self.parse_asyncness()
+ } else {
+ IsAsync::NotAsync
+ };
+ let capture_clause = if self.eat_keyword(keywords::Move) {
+ CaptureBy::Value
+ } else {
+ CaptureBy::Ref
+ };
+ let decl = self.parse_fn_block_decl()?;
+ let decl_hi = self.prev_span;
+ let body = match decl.output {
+ FunctionRetTy::Default(_) => {
+ let restrictions = self.restrictions - Restrictions::STMT_EXPR;
+ self.parse_expr_res(restrictions, None)?
+ },
+ _ => {
+ // If an explicit return type is given, require a
+ // block to appear (RFC 968).
+ let body_lo = self.span;
+ self.parse_block_expr(None, body_lo, BlockCheckMode::Default, ThinVec::new())?
+ }
+ };
+
+ Ok(self.mk_expr(
+ lo.to(body.span),
+ ExprKind::Closure(capture_clause, asyncness, movability, decl, body, lo.to(decl_hi)),
+ attrs))
+ }
+
+ // `else` token already eaten
+ fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
+ if self.eat_keyword(keywords::If) {
+ return self.parse_if_expr(ThinVec::new());
+ } else {
+ let blk = self.parse_block()?;
+ return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None), ThinVec::new()));
+ }
+ }
+
+ /// Parse a 'for' .. 'in' expression ('for' token already eaten)
+ fn parse_for_expr(&mut self, opt_label: Option<Label>,
+ span_lo: Span,
+ mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+ // Parse: `for <src_pat> in <src_expr> <src_loop_block>`
+
+ let pat = self.parse_top_level_pat()?;
+ if !self.eat_keyword(keywords::In) {
+ let in_span = self.prev_span.between(self.span);
+ let mut err = self.sess.span_diagnostic
+ .struct_span_err(in_span, "missing `in` in `for` loop");
+ err.span_suggestion_short(
+ in_span, "try adding `in` here", " in ".into(),
+ // has been misleading, at least in the past (closed Issue #48492)
+ Applicability::MaybeIncorrect
+ );
+ err.emit();
+ }
+ let in_span = self.prev_span;
+ if self.eat_keyword(keywords::In) {
+ // a common typo: `for _ in in bar {}`
+ let mut err = self.sess.span_diagnostic.struct_span_err(
+ self.prev_span,
+ "expected iterable, found keyword `in`",
+ );
+ err.span_suggestion_short(
+ in_span.until(self.prev_span),
+ "remove the duplicated `in`",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ err.note("if you meant to use emplacement syntax, it is obsolete (for now, anyway)");
+ err.note("for more information on the status of emplacement syntax, see <\
+ https://github.com/rust-lang/rust/issues/27779#issuecomment-378416911>");
+ err.emit();
+ }
+ let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+ let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+
+ let hi = self.prev_span;
+ Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs))
+ }
+
+ /// Parses a `while` or `while let` expression (`while` token already eaten).
+ fn parse_while_expr(&mut self, opt_label: Option<Label>,
+ span_lo: Span,
+ mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+ if self.token.is_keyword(keywords::Let) {
+ return self.parse_while_let_expr(opt_label, span_lo, attrs);
+ }
+ let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+ let (iattrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+ let span = span_lo.to(body.span);
+ return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs));
+ }
+
+ /// Parses a `while let` expression (`while` token already eaten).
+ fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
+ span_lo: Span,
+ mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+ self.expect_keyword(keywords::Let)?;
+ let pats = self.parse_pats()?;
+ self.expect(&token::Eq)?;
+ let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+ let (iattrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+ let span = span_lo.to(body.span);
+ return Ok(self.mk_expr(span, ExprKind::WhileLet(pats, expr, body, opt_label), attrs));
+ }
+
+ // parse `loop {...}`, `loop` token already eaten
+ fn parse_loop_expr(&mut self, opt_label: Option<Label>,
+ span_lo: Span,
+ mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+ let (iattrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+ let span = span_lo.to(body.span);
+ Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs))
+ }
+
+ /// Parses an `async move {...}` expression.
+ pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>>
+ {
+ let span_lo = self.span;
+ self.expect_keyword(keywords::Async)?;
+ let capture_clause = if self.eat_keyword(keywords::Move) {
+ CaptureBy::Value
+ } else {
+ CaptureBy::Ref
+ };
+ let (iattrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+ Ok(self.mk_expr(
+ span_lo.to(body.span),
+ ExprKind::Async(capture_clause, ast::DUMMY_NODE_ID, body), attrs))
+ }
+
+ /// Parses a `try {...}` expression (`try` token already eaten).
+ fn parse_try_block(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>>
+ {
+ let (iattrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+ Ok(self.mk_expr(span_lo.to(body.span), ExprKind::TryBlock(body), attrs))
+ }
+
+ // `match` token already eaten
+ fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+ let match_span = self.prev_span;
+ let lo = self.prev_span;
+ let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL,
+ None)?;
+ if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
+ if self.token == token::Token::Semi {
+ e.span_suggestion_short(
+ match_span,
+ "try removing this `match`",
+ String::new(),
+ Applicability::MaybeIncorrect // speculative
+ );
+ }
+ return Err(e)
+ }
+ attrs.extend(self.parse_inner_attributes()?);
+
+ let mut arms: Vec<Arm> = Vec::new();
+ while self.token != token::CloseDelim(token::Brace) {
+ match self.parse_arm() {
+ Ok(arm) => arms.push(arm),
+ Err(mut e) => {
+ // Recover by skipping to the end of the block.
+ e.emit();
+ self.recover_stmt();
+ let span = lo.to(self.span);
+ if self.token == token::CloseDelim(token::Brace) {
+ self.bump();
+ }
+ return Ok(self.mk_expr(span, ExprKind::Match(discriminant, arms), attrs));
+ }
+ }
+ }
+ let hi = self.span;
+ self.bump();
+ return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs));
+ }
+
+ crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
+ maybe_whole!(self, NtArm, |x| x);
+
+ let attrs = self.parse_outer_attributes()?;
+ let pats = self.parse_pats()?;
+ let guard = if self.eat_keyword(keywords::If) {
+ Some(Guard::If(self.parse_expr()?))
+ } else {
+ None
+ };
+ let arrow_span = self.span;
+ self.expect(&token::FatArrow)?;
+ let arm_start_span = self.span;
+
+ let expr = self.parse_expr_res(Restrictions::STMT_EXPR, None)
+ .map_err(|mut err| {
+ err.span_label(arrow_span, "while parsing the `match` arm starting here");
+ err
+ })?;
+
+ let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
+ && self.token != token::CloseDelim(token::Brace);
+
+ if require_comma {
+ let cm = self.sess.source_map();
+ self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)])
+ .map_err(|mut err| {
+ match (cm.span_to_lines(expr.span), cm.span_to_lines(arm_start_span)) {
+ (Ok(ref expr_lines), Ok(ref arm_start_lines))
+ if arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
+ && expr_lines.lines.len() == 2
+ && self.token == token::FatArrow => {
+ // We check whether there's any trailing code in the parse span,
+ // if there isn't, we very likely have the following:
+ //
+ // X | &Y => "y"
+ // | -- - missing comma
+ // | |
+ // | arrow_span
+ // X | &X => "x"
+ // | - ^^ self.span
+ // | |
+ // | parsed until here as `"y" & X`
+ err.span_suggestion_short(
+ cm.next_point(arm_start_span),
+ "missing a comma here to end this `match` arm",
+ ",".to_owned(),
+ Applicability::MachineApplicable
+ );
+ }
+ _ => {
+ err.span_label(arrow_span,
+ "while parsing the `match` arm starting here");
+ }
+ }
+ err
+ })?;
+ } else {
+ self.eat(&token::Comma);
+ }
+
+ Ok(ast::Arm {
+ attrs,
+ pats,
+ guard,
+ body: expr,
+ })
+ }
+
+ /// Parses an expression.
+ #[inline]
+ pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
+ self.parse_expr_res(Restrictions::empty(), None)
+ }
+
+ /// Evaluates the closure with restrictions in place.
+ ///
+ /// Afters the closure is evaluated, restrictions are reset.
+ fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T
+ where F: FnOnce(&mut Self) -> T
+ {
+ let old = self.restrictions;
+ self.restrictions = r;
+ let r = f(self);
+ self.restrictions = old;
+ return r;
+
+ }
+
+ /// Parses an expression, subject to the given restrictions.
+ #[inline]
+ fn parse_expr_res(&mut self, r: Restrictions,
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, P<Expr>> {
+ self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
+ }
+
+ /// Parses the RHS of a local variable declaration (e.g., '= 14;').
+ fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> {
+ if self.eat(&token::Eq) {
+ Ok(Some(self.parse_expr()?))
+ } else if skip_eq {
+ Ok(Some(self.parse_expr()?))
+ } else {
+ Ok(None)
+ }
+ }
+
+ /// Parses patterns, separated by '|' s.
+ fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> {
+ // Allow a '|' before the pats (RFC 1925 + RFC 2530)
+ self.eat(&token::BinOp(token::Or));
+
+ let mut pats = Vec::new();
+ loop {
+ pats.push(self.parse_top_level_pat()?);
+
+ if self.token == token::OrOr {
+ let mut err = self.struct_span_err(self.span,
+ "unexpected token `||` after pattern");
+ err.span_suggestion(
+ self.span,
+ "use a single `|` to specify multiple patterns",
+ "|".to_owned(),
+ Applicability::MachineApplicable
+ );
+ err.emit();
+ self.bump();
+ } else if self.eat(&token::BinOp(token::Or)) {
+ // This is a No-op. Continue the loop to parse the next
+ // pattern.
+ } else {
+ return Ok(pats);
+ }
+ };
+ }
+
+ // Parses a parenthesized list of patterns like
+ // `()`, `(p)`, `(p,)`, `(p, q)`, or `(p, .., q)`. Returns:
+ // - a vector of the patterns that were parsed
+ // - an option indicating the index of the `..` element
+ // - a boolean indicating whether a trailing comma was present.
+ // Trailing commas are significant because (p) and (p,) are different patterns.
+ fn parse_parenthesized_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)> {
+ self.expect(&token::OpenDelim(token::Paren))?;
+ let result = self.parse_pat_list()?;
+ self.expect(&token::CloseDelim(token::Paren))?;
+ Ok(result)
+ }
+
+ fn parse_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)> {
+ let mut fields = Vec::new();
+ let mut ddpos = None;
+ let mut trailing_comma = false;
+ loop {
+ if self.eat(&token::DotDot) {
+ if ddpos.is_none() {
+ ddpos = Some(fields.len());
+ } else {
+ // Emit a friendly error, ignore `..` and continue parsing
+ self.struct_span_err(
+ self.prev_span,
+ "`..` can only be used once per tuple or tuple struct pattern",
+ )
+ .span_label(self.prev_span, "can only be used once per pattern")
+ .emit();
+ }
+ } else if !self.check(&token::CloseDelim(token::Paren)) {
+ fields.push(self.parse_pat(None)?);
+ } else {
+ break
+ }
+
+ trailing_comma = self.eat(&token::Comma);
+ if !trailing_comma {
+ break
+ }
+ }
+
+ if ddpos == Some(fields.len()) && trailing_comma {
+ // `..` needs to be followed by `)` or `, pat`, `..,)` is disallowed.
+ let msg = "trailing comma is not permitted after `..`";
+ self.struct_span_err(self.prev_span, msg)
+ .span_label(self.prev_span, msg)
+ .emit();
+ }
+
+ Ok((fields, ddpos, trailing_comma))
+ }
+
+ fn parse_pat_vec_elements(
+ &mut self,
+ ) -> PResult<'a, (Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>)> {
+ let mut before = Vec::new();
+ let mut slice = None;
+ let mut after = Vec::new();
+ let mut first = true;
+ let mut before_slice = true;
+
+ while self.token != token::CloseDelim(token::Bracket) {
+ if first {
+ first = false;
+ } else {
+ self.expect(&token::Comma)?;
+
+ if self.token == token::CloseDelim(token::Bracket)
+ && (before_slice || !after.is_empty()) {
+ break
+ }
+ }
+
+ if before_slice {
+ if self.eat(&token::DotDot) {
+
+ if self.check(&token::Comma) ||
+ self.check(&token::CloseDelim(token::Bracket)) {
+ slice = Some(P(Pat {
+ id: ast::DUMMY_NODE_ID,
+ node: PatKind::Wild,
+ span: self.prev_span,
+ }));
+ before_slice = false;
+ }
+ continue
+ }
+ }
+
+ let subpat = self.parse_pat(None)?;
+ if before_slice && self.eat(&token::DotDot) {
+ slice = Some(subpat);
+ before_slice = false;
+ } else if before_slice {
+ before.push(subpat);
+ } else {
+ after.push(subpat);
+ }
+ }
+
+ Ok((before, slice, after))
+ }
+
+ fn parse_pat_field(
+ &mut self,
+ lo: Span,
+ attrs: Vec<Attribute>
+ ) -> PResult<'a, source_map::Spanned<ast::FieldPat>> {
+ // Check if a colon exists one ahead. This means we're parsing a fieldname.
+ let hi;
+ let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
+ // Parsing a pattern of the form "fieldname: pat"
+ let fieldname = self.parse_field_name()?;
+ self.bump();
+ let pat = self.parse_pat(None)?;
+ hi = pat.span;
+ (pat, fieldname, false)
+ } else {
+ // Parsing a pattern of the form "(box) (ref) (mut) fieldname"
+ let is_box = self.eat_keyword(keywords::Box);
+ let boxed_span = self.span;
+ let is_ref = self.eat_keyword(keywords::Ref);
+ let is_mut = self.eat_keyword(keywords::Mut);
+ let fieldname = self.parse_ident()?;
+ hi = self.prev_span;
+
+ let bind_type = match (is_ref, is_mut) {
+ (true, true) => BindingMode::ByRef(Mutability::Mutable),
+ (true, false) => BindingMode::ByRef(Mutability::Immutable),
+ (false, true) => BindingMode::ByValue(Mutability::Mutable),
+ (false, false) => BindingMode::ByValue(Mutability::Immutable),
+ };
+ let fieldpat = P(Pat {
+ id: ast::DUMMY_NODE_ID,
+ node: PatKind::Ident(bind_type, fieldname, None),
+ span: boxed_span.to(hi),
+ });
+
+ let subpat = if is_box {
+ P(Pat {
+ id: ast::DUMMY_NODE_ID,
+ node: PatKind::Box(fieldpat),
+ span: lo.to(hi),
+ })
+ } else {
+ fieldpat
+ };
+ (subpat, fieldname, true)
+ };
+
+ Ok(source_map::Spanned {
+ span: lo.to(hi),
+ node: ast::FieldPat {
+ ident: fieldname,
+ pat: subpat,
+ is_shorthand,
+ attrs: attrs.into(),
+ }
+ })
+ }
+
+ /// Parses the fields of a struct-like pattern.
+ fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::FieldPat>>, bool)> {
+ let mut fields = Vec::new();
+ let mut etc = false;
+ let mut ate_comma = true;
+ let mut delayed_err: Option<DiagnosticBuilder<'a>> = None;
+ let mut etc_span = None;
+
+ while self.token != token::CloseDelim(token::Brace) {
+ let attrs = self.parse_outer_attributes()?;
+ let lo = self.span;
+
+ // check that a comma comes after every field
+ if !ate_comma {
+ let err = self.struct_span_err(self.prev_span, "expected `,`");
+ if let Some(mut delayed) = delayed_err {
+ delayed.emit();
+ }
+ return Err(err);
+ }
+ ate_comma = false;
+
+ if self.check(&token::DotDot) || self.token == token::DotDotDot {
+ etc = true;
+ let mut etc_sp = self.span;
+
+ if self.token == token::DotDotDot { // Issue #46718
+ // Accept `...` as if it were `..` to avoid further errors
+ let mut err = self.struct_span_err(self.span,
+ "expected field pattern, found `...`");
+ err.span_suggestion(
+ self.span,
+ "to omit remaining fields, use one fewer `.`",
+ "..".to_owned(),
+ Applicability::MachineApplicable
+ );
+ err.emit();
+ }
+ self.bump(); // `..` || `...`
+
+ if self.token == token::CloseDelim(token::Brace) {
+ etc_span = Some(etc_sp);
+ break;
+ }
+ let token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!("expected `}}`, found {}", token_str));
+
+ err.span_label(self.span, "expected `}`");
+ let mut comma_sp = None;
+ if self.token == token::Comma { // Issue #49257
+ etc_sp = etc_sp.to(self.sess.source_map().span_until_non_whitespace(self.span));
+ err.span_label(etc_sp,
+ "`..` must be at the end and cannot have a trailing comma");
+ comma_sp = Some(self.span);
+ self.bump();
+ ate_comma = true;
+ }
+
+ etc_span = Some(etc_sp.until(self.span));
+ if self.token == token::CloseDelim(token::Brace) {
+ // If the struct looks otherwise well formed, recover and continue.
+ if let Some(sp) = comma_sp {
+ err.span_suggestion_short(
+ sp,
+ "remove this comma",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ }
+ err.emit();
+ break;
+ } else if self.token.is_ident() && ate_comma {
+ // Accept fields coming after `..,`.
+ // This way we avoid "pattern missing fields" errors afterwards.
+ // We delay this error until the end in order to have a span for a
+ // suggested fix.
+ if let Some(mut delayed_err) = delayed_err {
+ delayed_err.emit();
+ return Err(err);
+ } else {
+ delayed_err = Some(err);
+ }
+ } else {
+ if let Some(mut err) = delayed_err {
+ err.emit();
+ }
+ return Err(err);
+ }
+ }
+
+ fields.push(match self.parse_pat_field(lo, attrs) {
+ Ok(field) => field,
+ Err(err) => {
+ if let Some(mut delayed_err) = delayed_err {
+ delayed_err.emit();
+ }
+ return Err(err);
+ }
+ });
+ ate_comma = self.eat(&token::Comma);
+ }
+
+ if let Some(mut err) = delayed_err {
+ if let Some(etc_span) = etc_span {
+ err.multipart_suggestion(
+ "move the `..` to the end of the field list",
+ vec![
+ (etc_span, String::new()),
+ (self.span, format!("{}.. }}", if ate_comma { "" } else { ", " })),
+ ],
+ Applicability::MachineApplicable,
+ );
+ }
+ err.emit();
+ }
+ return Ok((fields, etc));
+ }
+
+ fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
+ if self.token.is_path_start() {
+ let lo = self.span;
+ let (qself, path) = if self.eat_lt() {
+ // Parse a qualified path
+ let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
+ (Some(qself), path)
+ } else {
+ // Parse an unqualified path
+ (None, self.parse_path(PathStyle::Expr)?)
+ };
+ let hi = self.prev_span;
+ Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), ThinVec::new()))
+ } else {
+ self.parse_literal_maybe_minus()
+ }
+ }
+
+ // helper function to decide whether to parse as ident binding or to try to do
+ // something more complex like range patterns
+ fn parse_as_ident(&mut self) -> bool {
+ self.look_ahead(1, |t| match *t {
+ token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) |
+ token::DotDotDot | token::DotDotEq | token::ModSep | token::Not => Some(false),
+ // ensure slice patterns [a, b.., c] and [a, b, c..] don't go into the
+ // range pattern branch
+ token::DotDot => None,
+ _ => Some(true),
+ }).unwrap_or_else(|| self.look_ahead(2, |t| match *t {
+ token::Comma | token::CloseDelim(token::Bracket) => true,
+ _ => false,
+ }))
+ }
+
+ /// A wrapper around `parse_pat` with some special error handling for the
+ /// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contrast
+ /// to subpatterns within such).
+ fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> {
+ let pat = self.parse_pat(None)?;
+ if self.token == token::Comma {
+ // An unexpected comma after a top-level pattern is a clue that the
+ // user (perhaps more accustomed to some other language) forgot the
+ // parentheses in what should have been a tuple pattern; return a
+ // suggestion-enhanced error here rather than choking on the comma
+ // later.
+ let comma_span = self.span;
+ self.bump();
+ if let Err(mut err) = self.parse_pat_list() {
+ // We didn't expect this to work anyway; we just wanted
+ // to advance to the end of the comma-sequence so we know
+ // the span to suggest parenthesizing
+ err.cancel();
+ }
+ let seq_span = pat.span.to(self.prev_span);
+ let mut err = self.struct_span_err(comma_span,
+ "unexpected `,` in pattern");
+ if let Ok(seq_snippet) = self.sess.source_map().span_to_snippet(seq_span) {
+ err.span_suggestion(
+ seq_span,
+ "try adding parentheses to match on a tuple..",
+ format!("({})", seq_snippet),
+ Applicability::MachineApplicable
+ ).span_suggestion(
+ seq_span,
+ "..or a vertical bar to match on multiple alternatives",
+ format!("{}", seq_snippet.replace(",", " |")),
+ Applicability::MachineApplicable
+ );
+ }
+ return Err(err);
+ }
+ Ok(pat)
+ }
+
+ /// Parses a pattern.
+ pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
+ self.parse_pat_with_range_pat(true, expected)
+ }
+
+ /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
+ /// allowed).
+ fn parse_pat_with_range_pat(
+ &mut self,
+ allow_range_pat: bool,
+ expected: Option<&'static str>,
+ ) -> PResult<'a, P<Pat>> {
+ maybe_whole!(self, NtPat, |x| x);
+
+ let lo = self.span;
+ let pat;
+ match self.token {
+ token::BinOp(token::And) | token::AndAnd => {
+ // Parse &pat / &mut pat
+ self.expect_and()?;
+ let mutbl = self.parse_mutability();
+ if let token::Lifetime(ident) = self.token {
+ let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern",
+ ident));
+ err.span_label(self.span, "unexpected lifetime");
+ return Err(err);
+ }
+ let subpat = self.parse_pat_with_range_pat(false, expected)?;
+ pat = PatKind::Ref(subpat, mutbl);
+ }
+ token::OpenDelim(token::Paren) => {
+ // Parse (pat,pat,pat,...) as tuple pattern
+ let (fields, ddpos, trailing_comma) = self.parse_parenthesized_pat_list()?;
+ pat = if fields.len() == 1 && ddpos.is_none() && !trailing_comma {
+ PatKind::Paren(fields.into_iter().nth(0).unwrap())
+ } else {
+ PatKind::Tuple(fields, ddpos)
+ };
+ }
+ token::OpenDelim(token::Bracket) => {
+ // Parse [pat,pat,...] as slice pattern
+ self.bump();
+ let (before, slice, after) = self.parse_pat_vec_elements()?;
+ self.expect(&token::CloseDelim(token::Bracket))?;
+ pat = PatKind::Slice(before, slice, after);
+ }
+ // At this point, token != &, &&, (, [
+ _ => if self.eat_keyword(keywords::Underscore) {
+ // Parse _
+ pat = PatKind::Wild;
+ } else if self.eat_keyword(keywords::Mut) {
+ // Parse mut ident @ pat / mut ref ident @ pat
+ let mutref_span = self.prev_span.to(self.span);
+ let binding_mode = if self.eat_keyword(keywords::Ref) {
+ self.diagnostic()
+ .struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
+ .span_suggestion(
+ mutref_span,
+ "try switching the order",
+ "ref mut".into(),
+ Applicability::MachineApplicable
+ ).emit();
+ BindingMode::ByRef(Mutability::Mutable)
+ } else {
+ BindingMode::ByValue(Mutability::Mutable)
+ };
+ pat = self.parse_pat_ident(binding_mode)?;
+ } else if self.eat_keyword(keywords::Ref) {
+ // Parse ref ident @ pat / ref mut ident @ pat
+ let mutbl = self.parse_mutability();
+ pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?;
+ } else if self.eat_keyword(keywords::Box) {
+ // Parse box pat
+ let subpat = self.parse_pat_with_range_pat(false, None)?;
+ pat = PatKind::Box(subpat);
+ } else if self.token.is_ident() && !self.token.is_reserved_ident() &&
+ self.parse_as_ident() {
+ // Parse ident @ pat
+ // This can give false positives and parse nullary enums,
+ // they are dealt with later in resolve
+ let binding_mode = BindingMode::ByValue(Mutability::Immutable);
+ pat = self.parse_pat_ident(binding_mode)?;
+ } else if self.token.is_path_start() {
+ // Parse pattern starting with a path
+ let (qself, path) = if self.eat_lt() {
+ // Parse a qualified path
+ let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
+ (Some(qself), path)
+ } else {
+ // Parse an unqualified path
+ (None, self.parse_path(PathStyle::Expr)?)
+ };
+ match self.token {
+ token::Not if qself.is_none() => {
+ // Parse macro invocation
+ self.bump();
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ let mac = respan(lo.to(self.prev_span), Mac_ { path, tts, delim });
+ pat = PatKind::Mac(mac);
+ }
+ token::DotDotDot | token::DotDotEq | token::DotDot => {
+ let end_kind = match self.token {
+ token::DotDot => RangeEnd::Excluded,
+ token::DotDotDot => RangeEnd::Included(RangeSyntax::DotDotDot),
+ token::DotDotEq => RangeEnd::Included(RangeSyntax::DotDotEq),
+ _ => panic!("can only parse `..`/`...`/`..=` for ranges \
+ (checked above)"),
+ };
+ let op_span = self.span;
+ // Parse range
+ let span = lo.to(self.prev_span);
+ let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new());
+ self.bump();
+ let end = self.parse_pat_range_end()?;
+ let op = Spanned { span: op_span, node: end_kind };
+ pat = PatKind::Range(begin, end, op);
+ }
+ token::OpenDelim(token::Brace) => {
+ if qself.is_some() {
+ let msg = "unexpected `{` after qualified path";
+ let mut err = self.fatal(msg);
+ err.span_label(self.span, msg);
+ return Err(err);
+ }
+ // Parse struct pattern
+ self.bump();
+ let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| {
+ e.emit();
+ self.recover_stmt();
+ (vec![], false)
+ });
+ self.bump();
+ pat = PatKind::Struct(path, fields, etc);
+ }
+ token::OpenDelim(token::Paren) => {
+ if qself.is_some() {
+ let msg = "unexpected `(` after qualified path";
+ let mut err = self.fatal(msg);
+ err.span_label(self.span, msg);
+ return Err(err);
+ }
+ // Parse tuple struct or enum pattern
+ let (fields, ddpos, _) = self.parse_parenthesized_pat_list()?;
+ pat = PatKind::TupleStruct(path, fields, ddpos)
+ }
+ _ => pat = PatKind::Path(qself, path),
+ }
+ } else {
+ // Try to parse everything else as literal with optional minus
+ match self.parse_literal_maybe_minus() {
+ Ok(begin) => {
+ let op_span = self.span;
+ if self.check(&token::DotDot) || self.check(&token::DotDotEq) ||
+ self.check(&token::DotDotDot) {
+ let end_kind = if self.eat(&token::DotDotDot) {
+ RangeEnd::Included(RangeSyntax::DotDotDot)
+ } else if self.eat(&token::DotDotEq) {
+ RangeEnd::Included(RangeSyntax::DotDotEq)
+ } else if self.eat(&token::DotDot) {
+ RangeEnd::Excluded
+ } else {
+ panic!("impossible case: we already matched \
+ on a range-operator token")
+ };
+ let end = self.parse_pat_range_end()?;
+ let op = Spanned { span: op_span, node: end_kind };
+ pat = PatKind::Range(begin, end, op);
+ } else {
+ pat = PatKind::Lit(begin);
+ }
+ }
+ Err(mut err) => {
+ self.cancel(&mut err);
+ let expected = expected.unwrap_or("pattern");
+ let msg = format!(
+ "expected {}, found {}",
+ expected,
+ self.this_token_descr(),
+ );
+ let mut err = self.fatal(&msg);
+ err.span_label(self.span, format!("expected {}", expected));
+ return Err(err);
+ }
+ }
+ }
+ }
+
+ let pat = Pat { node: pat, span: lo.to(self.prev_span), id: ast::DUMMY_NODE_ID };
+ let pat = self.maybe_recover_from_bad_qpath(pat, true)?;
+
+ if !allow_range_pat {
+ match pat.node {
+ PatKind::Range(
+ _, _, Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. }
+ ) => {},
+ PatKind::Range(..) => {
+ let mut err = self.struct_span_err(
+ pat.span,
+ "the range pattern here has ambiguous interpretation",
+ );
+ err.span_suggestion(
+ pat.span,
+ "add parentheses to clarify the precedence",
+ format!("({})", pprust::pat_to_string(&pat)),
+ // "ambiguous interpretation" implies that we have to be guessing
+ Applicability::MaybeIncorrect
+ );
+ return Err(err);
+ }
+ _ => {}
+ }
+ }
+
+ Ok(P(pat))
+ }
+
+ /// Parses `ident` or `ident @ pat`.
+ /// used by the copy foo and ref foo patterns to give a good
+ /// error message when parsing mistakes like `ref foo(a, b)`.
+ fn parse_pat_ident(&mut self,
+ binding_mode: ast::BindingMode)
+ -> PResult<'a, PatKind> {
+ let ident = self.parse_ident()?;
+ let sub = if self.eat(&token::At) {
+ Some(self.parse_pat(Some("binding pattern"))?)
+ } else {
+ None
+ };
+
+ // just to be friendly, if they write something like
+ // ref Some(i)
+ // we end up here with ( as the current token. This shortly
+ // leads to a parse error. Note that if there is no explicit
+ // binding mode then we do not end up here, because the lookahead
+ // will direct us over to parse_enum_variant()
+ if self.token == token::OpenDelim(token::Paren) {
+ return Err(self.span_fatal(
+ self.prev_span,
+ "expected identifier, found enum pattern"))
+ }
+
+ Ok(PatKind::Ident(binding_mode, ident, sub))
+ }
+
+ /// Parses a local variable declaration.
+ fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
+ let lo = self.prev_span;
+ let pat = self.parse_top_level_pat()?;
+
+ let (err, ty) = if self.eat(&token::Colon) {
+ // Save the state of the parser before parsing type normally, in case there is a `:`
+ // instead of an `=` typo.
+ let parser_snapshot_before_type = self.clone();
+ let colon_sp = self.prev_span;
+ match self.parse_ty() {
+ Ok(ty) => (None, Some(ty)),
+ Err(mut err) => {
+ // Rewind to before attempting to parse the type and continue parsing
+ let parser_snapshot_after_type = self.clone();
+ mem::replace(self, parser_snapshot_before_type);
+
+ let snippet = self.sess.source_map().span_to_snippet(pat.span).unwrap();
+ err.span_label(pat.span, format!("while parsing the type for `{}`", snippet));
+ (Some((parser_snapshot_after_type, colon_sp, err)), None)
+ }
+ }
+ } else {
+ (None, None)
+ };
+ let init = match (self.parse_initializer(err.is_some()), err) {
+ (Ok(init), None) => { // init parsed, ty parsed
+ init
+ }
+ (Ok(init), Some((_, colon_sp, mut err))) => { // init parsed, ty error
+ // Could parse the type as if it were the initializer, it is likely there was a
+ // typo in the code: `:` instead of `=`. Add suggestion and emit the error.
+ err.span_suggestion_short(
+ colon_sp,
+ "use `=` if you meant to assign",
+ "=".to_string(),
+ Applicability::MachineApplicable
+ );
+ err.emit();
+ // As this was parsed successfully, continue as if the code has been fixed for the
+ // rest of the file. It will still fail due to the emitted error, but we avoid
+ // extra noise.
+ init
+ }
+ (Err(mut init_err), Some((snapshot, _, ty_err))) => { // init error, ty error
+ init_err.cancel();
+ // Couldn't parse the type nor the initializer, only raise the type error and
+ // return to the parser state before parsing the type as the initializer.
+ // let x: <parse_error>;
+ mem::replace(self, snapshot);
+ return Err(ty_err);
+ }
+ (Err(err), None) => { // init error, ty parsed
+ // Couldn't parse the initializer and we're not attempting to recover a failed
+ // parse of the type, return the error.
+ return Err(err);
+ }
+ };
+ let hi = if self.token == token::Semi {
+ self.span
+ } else {
+ self.prev_span
+ };
+ Ok(P(ast::Local {
+ ty,
+ pat,
+ init,
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(hi),
+ attrs,
+ }))
+ }
+
+ /// Parses a structure field.
+ fn parse_name_and_ty(&mut self,
+ lo: Span,
+ vis: Visibility,
+ attrs: Vec<Attribute>)
+ -> PResult<'a, StructField> {
+ let name = self.parse_ident()?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+ Ok(StructField {
+ span: lo.to(self.prev_span),
+ ident: Some(name),
+ vis,
+ id: ast::DUMMY_NODE_ID,
+ ty,
+ attrs,
+ })
+ }
+
+ /// Emits an expected-item-after-attributes error.
+ fn expected_item_err(&mut self, attrs: &[Attribute]) -> PResult<'a, ()> {
+ let message = match attrs.last() {
+ Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment",
+ _ => "expected item after attributes",
+ };
+
+ let mut err = self.diagnostic().struct_span_err(self.prev_span, message);
+ if attrs.last().unwrap().is_sugared_doc {
+ err.span_label(self.prev_span, "this doc comment doesn't document anything");
+ }
+ Err(err)
+ }
+
+ /// Parse a statement. This stops just before trailing semicolons on everything but items.
+ /// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed.
+ pub fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> {
+ Ok(self.parse_stmt_(true))
+ }
+
+ // Eat tokens until we can be relatively sure we reached the end of the
+ // statement. This is something of a best-effort heuristic.
+ //
+ // We terminate when we find an unmatched `}` (without consuming it).
+ fn recover_stmt(&mut self) {
+ self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
+ }
+
+ // If `break_on_semi` is `Break`, then we will stop consuming tokens after
+ // finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
+ // approximate - it can mean we break too early due to macros, but that
+ // should only lead to sub-optimal recovery, not inaccurate parsing).
+ //
+ // If `break_on_block` is `Break`, then we will stop consuming tokens
+ // after finding (and consuming) a brace-delimited block.
+ fn recover_stmt_(&mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode) {
+ let mut brace_depth = 0;
+ let mut bracket_depth = 0;
+ let mut in_block = false;
+ debug!("recover_stmt_ enter loop (semi={:?}, block={:?})",
+ break_on_semi, break_on_block);
+ loop {
+ debug!("recover_stmt_ loop {:?}", self.token);
+ match self.token {
+ token::OpenDelim(token::DelimToken::Brace) => {
+ brace_depth += 1;
+ self.bump();
+ if break_on_block == BlockMode::Break &&
+ brace_depth == 1 &&
+ bracket_depth == 0 {
+ in_block = true;
+ }
+ }
+ token::OpenDelim(token::DelimToken::Bracket) => {
+ bracket_depth += 1;
+ self.bump();
+ }
+ token::CloseDelim(token::DelimToken::Brace) => {
+ if brace_depth == 0 {
+ debug!("recover_stmt_ return - close delim {:?}", self.token);
+ break;
+ }
+ brace_depth -= 1;
+ self.bump();
+ if in_block && bracket_depth == 0 && brace_depth == 0 {
+ debug!("recover_stmt_ return - block end {:?}", self.token);
+ break;
+ }
+ }
+ token::CloseDelim(token::DelimToken::Bracket) => {
+ bracket_depth -= 1;
+ if bracket_depth < 0 {
+ bracket_depth = 0;
+ }
+ self.bump();
+ }
+ token::Eof => {
+ debug!("recover_stmt_ return - Eof");
+ break;
+ }
+ token::Semi => {
+ self.bump();
+ if break_on_semi == SemiColonMode::Break &&
+ brace_depth == 0 &&
+ bracket_depth == 0 {
+ debug!("recover_stmt_ return - Semi");
+ break;
+ }
+ }
+ token::Comma => {
+ if break_on_semi == SemiColonMode::Comma &&
+ brace_depth == 0 &&
+ bracket_depth == 0 {
+ debug!("recover_stmt_ return - Semi");
+ break;
+ } else {
+ self.bump();
+ }
+ }
+ _ => {
+ self.bump()
+ }
+ }
+ }
+ }
+
+ fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> {
+ self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| {
+ e.emit();
+ self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
+ None
+ })
+ }
+
+ fn is_async_block(&mut self) -> bool {
+ self.token.is_keyword(keywords::Async) &&
+ (
+ ( // `async move {`
+ self.look_ahead(1, |t| t.is_keyword(keywords::Move)) &&
+ self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
+ ) || ( // `async {`
+ self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
+ )
+ )
+ }
+
+ fn is_do_catch_block(&mut self) -> bool {
+ self.token.is_keyword(keywords::Do) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Catch)) &&
+ self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
+ !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
+ }
+
+ fn is_try_block(&mut self) -> bool {
+ self.token.is_keyword(keywords::Try) &&
+ self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
+ self.span.rust_2018() &&
+ // prevent `while try {} {}`, `if try {} {} else {}`, etc.
+ !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
+ }
+
+ fn is_union_item(&self) -> bool {
+ self.token.is_keyword(keywords::Union) &&
+ self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
+ }
+
+ fn is_crate_vis(&self) -> bool {
+ self.token.is_keyword(keywords::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
+ }
+
+ fn is_existential_type_decl(&self) -> bool {
+ self.token.is_keyword(keywords::Existential) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Type))
+ }
+
+ fn is_auto_trait_item(&mut self) -> bool {
+ // auto trait
+ (self.token.is_keyword(keywords::Auto)
+ && self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
+ || // unsafe auto trait
+ (self.token.is_keyword(keywords::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Auto)) &&
+ self.look_ahead(2, |t| t.is_keyword(keywords::Trait)))
+ }
+
+ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
+ -> PResult<'a, Option<P<Item>>> {
+ let token_lo = self.span;
+ let (ident, def) = match self.token {
+ token::Ident(ident, false) if ident.name == keywords::Macro.name() => {
+ self.bump();
+ let ident = self.parse_ident()?;
+ let tokens = if self.check(&token::OpenDelim(token::Brace)) {
+ match self.parse_token_tree() {
+ TokenTree::Delimited(_, _, tts) => tts,
+ _ => unreachable!(),
+ }
+ } else if self.check(&token::OpenDelim(token::Paren)) {
+ let args = self.parse_token_tree();
+ let body = if self.check(&token::OpenDelim(token::Brace)) {
+ self.parse_token_tree()
+ } else {
+ self.unexpected()?;
+ unreachable!()
+ };
+ TokenStream::new(vec![
+ args.into(),
+ TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
+ body.into(),
+ ])
+ } else {
+ self.unexpected()?;
+ unreachable!()
+ };
+
+ (ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
+ }
+ token::Ident(ident, _) if ident.name == "macro_rules" &&
+ self.look_ahead(1, |t| *t == token::Not) => {
+ let prev_span = self.prev_span;
+ self.complain_if_pub_macro(&vis.node, prev_span);
+ self.bump();
+ self.bump();
+
+ let ident = self.parse_ident()?;
+ let (delim, tokens) = self.expect_delimited_token_tree()?;
+ if delim != MacDelimiter::Brace {
+ if !self.eat(&token::Semi) {
+ let msg = "macros that expand to items must either \
+ be surrounded with braces or followed by a semicolon";
+ self.span_err(self.prev_span, msg);
+ }
+ }
+
+ (ident, ast::MacroDef { tokens: tokens, legacy: true })
+ }
+ _ => return Ok(None),
+ };
+
+ let span = lo.to(self.prev_span);
+ Ok(Some(self.mk_item(span, ident, ItemKind::MacroDef(def), vis.clone(), attrs.to_vec())))
+ }
+
+ fn parse_stmt_without_recovery(&mut self,
+ macro_legacy_warnings: bool)
+ -> PResult<'a, Option<Stmt>> {
+ maybe_whole!(self, NtStmt, |x| Some(x));
+
+ let attrs = self.parse_outer_attributes()?;
+ let lo = self.span;
+
+ Ok(Some(if self.eat_keyword(keywords::Let) {
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Local(self.parse_local(attrs.into())?),
+ span: lo.to(self.prev_span),
+ }
+ } else if let Some(macro_def) = self.eat_macro_def(
+ &attrs,
+ &source_map::respan(lo, VisibilityKind::Inherited),
+ lo,
+ )? {
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Item(macro_def),
+ span: lo.to(self.prev_span),
+ }
+ // Starts like a simple path, being careful to avoid contextual keywords
+ // such as a union items, item with `crate` visibility or auto trait items.
+ // Our goal here is to parse an arbitrary path `a::b::c` but not something that starts
+ // like a path (1 token), but it fact not a path.
+ // `union::b::c` - path, `union U { ... }` - not a path.
+ // `crate::b::c` - path, `crate struct S;` - not a path.
+ } else if self.token.is_path_start() &&
+ !self.token.is_qpath_start() &&
+ !self.is_union_item() &&
+ !self.is_crate_vis() &&
+ !self.is_existential_type_decl() &&
+ !self.is_auto_trait_item() {
+ let pth = self.parse_path(PathStyle::Expr)?;
+
+ if !self.eat(&token::Not) {
+ let expr = if self.check(&token::OpenDelim(token::Brace)) {
+ self.parse_struct_expr(lo, pth, ThinVec::new())?
+ } else {
+ let hi = self.prev_span;
+ self.mk_expr(lo.to(hi), ExprKind::Path(None, pth), ThinVec::new())
+ };
+
+ let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
+ let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs.into())?;
+ this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
+ })?;
+
+ return Ok(Some(Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Expr(expr),
+ span: lo.to(self.prev_span),
+ }));
+ }
+
+ // it's a macro invocation
+ let id = match self.token {
+ token::OpenDelim(_) => keywords::Invalid.ident(), // no special identifier
+ _ => self.parse_ident()?,
+ };
+
+ // check that we're pointing at delimiters (need to check
+ // again after the `if`, because of `parse_ident`
+ // consuming more tokens).
+ match self.token {
+ token::OpenDelim(_) => {}
+ _ => {
+ // we only expect an ident if we didn't parse one
+ // above.
+ let ident_str = if id.name == keywords::Invalid.name() {
+ "identifier, "
+ } else {
+ ""
+ };
+ let tok_str = self.this_token_descr();
+ let mut err = self.fatal(&format!("expected {}`(` or `{{`, found {}",
+ ident_str,
+ tok_str));
+ err.span_label(self.span, format!("expected {}`(` or `{{`", ident_str));
+ return Err(err)
+ },
+ }
+
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ let hi = self.prev_span;
+
+ let style = if delim == MacDelimiter::Brace {
+ MacStmtStyle::Braces
+ } else {
+ MacStmtStyle::NoBraces
+ };
+
+ if id.name == keywords::Invalid.name() {
+ let mac = respan(lo.to(hi), Mac_ { path: pth, tts, delim });
+ let node = if delim == MacDelimiter::Brace ||
+ self.token == token::Semi || self.token == token::Eof {
+ StmtKind::Mac(P((mac, style, attrs.into())))
+ }
+ // We used to incorrectly stop parsing macro-expanded statements here.
+ // If the next token will be an error anyway but could have parsed with the
+ // earlier behavior, stop parsing here and emit a warning to avoid breakage.
+ else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token {
+ // These can continue an expression, so we can't stop parsing and warn.
+ token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
+ token::BinOp(token::Minus) | token::BinOp(token::Star) |
+ token::BinOp(token::And) | token::BinOp(token::Or) |
+ token::AndAnd | token::OrOr |
+ token::DotDot | token::DotDotDot | token::DotDotEq => false,
+ _ => true,
+ } {
+ self.warn_missing_semicolon();
+ StmtKind::Mac(P((mac, style, attrs.into())))
+ } else {
+ let e = self.mk_mac_expr(lo.to(hi), mac.node, ThinVec::new());
+ let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
+ let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
+ StmtKind::Expr(e)
+ };
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(hi),
+ node,
+ }
+ } else {
+ // if it has a special ident, it's definitely an item
+ //
+ // Require a semicolon or braces.
+ if style != MacStmtStyle::Braces {
+ if !self.eat(&token::Semi) {
+ self.span_err(self.prev_span,
+ "macros that expand to items must \
+ either be surrounded with braces or \
+ followed by a semicolon");
+ }
+ }
+ let span = lo.to(hi);
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span,
+ node: StmtKind::Item({
+ self.mk_item(
+ span, id /*id is good here*/,
+ ItemKind::Mac(respan(span, Mac_ { path: pth, tts, delim })),
+ respan(lo, VisibilityKind::Inherited),
+ attrs)
+ }),
+ }
+ }
+ } else {
+ // FIXME: Bad copy of attrs
+ let old_directory_ownership =
+ mem::replace(&mut self.directory.ownership, DirectoryOwnership::UnownedViaBlock);
+ let item = self.parse_item_(attrs.clone(), false, true)?;
+ self.directory.ownership = old_directory_ownership;
+
+ match item {
+ Some(i) => Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(i.span),
+ node: StmtKind::Item(i),
+ },
+ None => {
+ let unused_attrs = |attrs: &[Attribute], s: &mut Self| {
+ if !attrs.is_empty() {
+ if s.prev_token_kind == PrevTokenKind::DocComment {
+ s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
+ } else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
+ s.span_err(s.span, "expected statement after outer attribute");
+ }
+ }
+ };
+
+ // Do not attempt to parse an expression if we're done here.
+ if self.token == token::Semi {
+ unused_attrs(&attrs, self);
+ self.bump();
+ return Ok(None);
+ }
+
+ if self.token == token::CloseDelim(token::Brace) {
+ unused_attrs(&attrs, self);
+ return Ok(None);
+ }
+
+ // Remainder are line-expr stmts.
+ let e = self.parse_expr_res(
+ Restrictions::STMT_EXPR, Some(attrs.into()))?;
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(e.span),
+ node: StmtKind::Expr(e),
+ }
+ }
+ }
+ }))
+ }
+
+ /// Checks if this expression is a successfully parsed statement.
+ fn expr_is_complete(&mut self, e: &Expr) -> bool {
+ self.restrictions.contains(Restrictions::STMT_EXPR) &&
+ !classify::expr_requires_semi_to_be_stmt(e)
+ }
+
+ /// Parses a block. No inner attributes are allowed.
+ pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
+ maybe_whole!(self, NtBlock, |x| x);
+
+ let lo = self.span;
+
+ if !self.eat(&token::OpenDelim(token::Brace)) {
+ let sp = self.span;
+ let tok = self.this_token_descr();
+ let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok));
+ let do_not_suggest_help =
+ self.token.is_keyword(keywords::In) || self.token == token::Colon;
+
+ if self.token.is_ident_named("and") {
+ e.span_suggestion_short(
+ self.span,
+ "use `&&` instead of `and` for the boolean operator",
+ "&&".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ if self.token.is_ident_named("or") {
+ e.span_suggestion_short(
+ self.span,
+ "use `||` instead of `or` for the boolean operator",
+ "||".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ }
+
+ // Check to see if the user has written something like
+ //
+ // if (cond)
+ // bar;
+ //
+ // Which is valid in other languages, but not Rust.
+ match self.parse_stmt_without_recovery(false) {
+ Ok(Some(stmt)) => {
+ if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace))
+ || do_not_suggest_help {
+ // if the next token is an open brace (e.g., `if a b {`), the place-
+ // inside-a-block suggestion would be more likely wrong than right
+ e.span_label(sp, "expected `{`");
+ return Err(e);
+ }
+ let mut stmt_span = stmt.span;
+ // expand the span to include the semicolon, if it exists
+ if self.eat(&token::Semi) {
+ stmt_span = stmt_span.with_hi(self.prev_span.hi());
+ }
+ let sugg = pprust::to_string(|s| {
+ use crate::print::pprust::{PrintState, INDENT_UNIT};
+ s.ibox(INDENT_UNIT)?;
+ s.bopen()?;
+ s.print_stmt(&stmt)?;
+ s.bclose_maybe_open(stmt.span, INDENT_UNIT, false)
+ });
+ e.span_suggestion(
+ stmt_span,
+ "try placing this code inside a block",
+ sugg,
+ // speculative, has been misleading in the past (closed Issue #46836)
+ Applicability::MaybeIncorrect
+ );
+ }
+ Err(mut e) => {
+ self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
+ self.cancel(&mut e);
+ }
+ _ => ()
+ }
+ e.span_label(sp, "expected `{`");
+ return Err(e);
+ }
+
+ self.parse_block_tail(lo, BlockCheckMode::Default)
+ }
+
+ /// Parses a block. Inner attributes are allowed.
+ fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
+ maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
+
+ let lo = self.span;
+ self.expect(&token::OpenDelim(token::Brace))?;
+ Ok((self.parse_inner_attributes()?,
+ self.parse_block_tail(lo, BlockCheckMode::Default)?))
+ }
+
+ /// Parses the rest of a block expression or function body.
+ /// Precondition: already parsed the '{'.
+ fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Block>> {
+ let mut stmts = vec![];
+ while !self.eat(&token::CloseDelim(token::Brace)) {
+ let stmt = match self.parse_full_stmt(false) {
+ Err(mut err) => {
+ err.emit();
+ self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
+ Some(Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Expr(DummyResult::raw_expr(self.span, true)),
+ span: self.span,
+ })
+ }
+ Ok(stmt) => stmt,
+ };
+ if let Some(stmt) = stmt {
+ stmts.push(stmt);
+ } else if self.token == token::Eof {
+ break;
+ } else {
+ // Found only `;` or `}`.
+ continue;
+ };
+ }
+ Ok(P(ast::Block {
+ stmts,
+ id: ast::DUMMY_NODE_ID,
+ rules: s,
+ span: lo.to(self.prev_span),
+ }))
+ }
+
+ /// Parses a statement, including the trailing semicolon.
+ crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
+ // skip looking for a trailing semicolon when we have an interpolated statement
+ maybe_whole!(self, NtStmt, |x| Some(x));
+
+ let mut stmt = match self.parse_stmt_without_recovery(macro_legacy_warnings)? {
+ Some(stmt) => stmt,
+ None => return Ok(None),
+ };
+
+ match stmt.node {
+ StmtKind::Expr(ref expr) if self.token != token::Eof => {
+ // expression without semicolon
+ if classify::expr_requires_semi_to_be_stmt(expr) {
+ // Just check for errors and recover; do not eat semicolon yet.
+ if let Err(mut e) =
+ self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)])
+ {
+ e.emit();
+ self.recover_stmt();
+ }
+ }
+ }
+ StmtKind::Local(..) => {
+ // We used to incorrectly allow a macro-expanded let statement to lack a semicolon.
+ if macro_legacy_warnings && self.token != token::Semi {
+ self.warn_missing_semicolon();
+ } else {
+ self.expect_one_of(&[], &[token::Semi])?;
+ }
+ }
+ _ => {}
+ }
+
+ if self.eat(&token::Semi) {
+ stmt = stmt.add_trailing_semicolon();
+ }
+
+ stmt.span = stmt.span.with_hi(self.prev_span.hi());
+ Ok(Some(stmt))
+ }
+
+ fn warn_missing_semicolon(&self) {
+ self.diagnostic().struct_span_warn(self.span, {
+ &format!("expected `;`, found {}", self.this_token_descr())
+ }).note({
+ "This was erroneously allowed and will become a hard error in a future release"
+ }).emit();
+ }
+
+ fn err_dotdotdot_syntax(&self, span: Span) {
+ self.diagnostic().struct_span_err(span, {
+ "unexpected token: `...`"
+ }).span_suggestion(
+ span, "use `..` for an exclusive range", "..".to_owned(),
+ Applicability::MaybeIncorrect
+ ).span_suggestion(
+ span, "or `..=` for an inclusive range", "..=".to_owned(),
+ Applicability::MaybeIncorrect
+ ).emit();
+ }
+
+ /// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`.
+ ///
+ /// ```
+ /// BOUND = TY_BOUND | LT_BOUND
+ /// LT_BOUND = LIFETIME (e.g., `'a`)
+ /// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
+ /// TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`)
+ /// ```
+ fn parse_generic_bounds_common(&mut self,
+ allow_plus: bool,
+ colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
+ let mut bounds = Vec::new();
+ let mut negative_bounds = Vec::new();
+ let mut last_plus_span = None;
+ loop {
+ // This needs to be synchronized with `Token::can_begin_bound`.
+ let is_bound_start = self.check_path() || self.check_lifetime() ||
+ self.check(&token::Not) || // used for error reporting only
+ self.check(&token::Question) ||
+ self.check_keyword(keywords::For) ||
+ self.check(&token::OpenDelim(token::Paren));
+ if is_bound_start {
+ let lo = self.span;
+ let has_parens = self.eat(&token::OpenDelim(token::Paren));
+ let inner_lo = self.span;
+ let is_negative = self.eat(&token::Not);
+ let question = if self.eat(&token::Question) { Some(self.prev_span) } else { None };
+ if self.token.is_lifetime() {
+ if let Some(question_span) = question {
+ self.span_err(question_span,
+ "`?` may only modify trait bounds, not lifetime bounds");
+ }
+ bounds.push(GenericBound::Outlives(self.expect_lifetime()));
+ if has_parens {
+ let inner_span = inner_lo.to(self.prev_span);
+ self.expect(&token::CloseDelim(token::Paren))?;
+ let mut err = self.struct_span_err(
+ lo.to(self.prev_span),
+ "parenthesized lifetime bounds are not supported"
+ );
+ if let Ok(snippet) = self.sess.source_map().span_to_snippet(inner_span) {
+ err.span_suggestion_short(
+ lo.to(self.prev_span),
+ "remove the parentheses",
+ snippet.to_owned(),
+ Applicability::MachineApplicable
+ );
+ }
+ err.emit();
+ }
+ } else {
+ let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
+ let path = self.parse_path(PathStyle::Type)?;
+ if has_parens {
+ self.expect(&token::CloseDelim(token::Paren))?;
+ }
+ let poly_span = lo.to(self.prev_span);
+ if is_negative {
+ negative_bounds.push(
+ last_plus_span.or(colon_span).unwrap()
+ .to(poly_span));
+ } else {
+ let poly_trait = PolyTraitRef::new(lifetime_defs, path, poly_span);
+ let modifier = if question.is_some() {
+ TraitBoundModifier::Maybe
+ } else {
+ TraitBoundModifier::None
+ };
+ bounds.push(GenericBound::Trait(poly_trait, modifier));
+ }
+ }
+ } else {
+ break
+ }
+
+ if !allow_plus || !self.eat_plus() {
+ break
+ } else {
+ last_plus_span = Some(self.prev_span);
+ }
+ }
+
+ if !negative_bounds.is_empty() {
+ let plural = negative_bounds.len() > 1;
+ let mut err = self.struct_span_err(negative_bounds,
+ "negative trait bounds are not supported");
+ let bound_list = colon_span.unwrap().to(self.prev_span);
+ let mut new_bound_list = String::new();
+ if !bounds.is_empty() {
+ let mut snippets = bounds.iter().map(|bound| bound.span())
+ .map(|span| self.sess.source_map().span_to_snippet(span));
+ while let Some(Ok(snippet)) = snippets.next() {
+ new_bound_list.push_str(" + ");
+ new_bound_list.push_str(&snippet);
+ }
+ new_bound_list = new_bound_list.replacen(" +", ":", 1);
+ }
+ err.span_suggestion_short(bound_list,
+ &format!("remove the trait bound{}",
+ if plural { "s" } else { "" }),
+ new_bound_list,
+ Applicability::MachineApplicable);
+ err.emit();
+ }
+
+ return Ok(bounds);
+ }
+
+ fn parse_generic_bounds(&mut self, colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
+ self.parse_generic_bounds_common(true, colon_span)
+ }
+
+ /// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
+ ///
+ /// ```
+ /// BOUND = LT_BOUND (e.g., `'a`)
+ /// ```
+ fn parse_lt_param_bounds(&mut self) -> GenericBounds {
+ let mut lifetimes = Vec::new();
+ while self.check_lifetime() {
+ lifetimes.push(ast::GenericBound::Outlives(self.expect_lifetime()));
+
+ if !self.eat_plus() {
+ break
+ }
+ }
+ lifetimes
+ }
+
+ /// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`.
+ fn parse_ty_param(&mut self,
+ preceding_attrs: Vec<Attribute>)
+ -> PResult<'a, GenericParam> {
+ let ident = self.parse_ident()?;
+
+ // Parse optional colon and param bounds.
+ let bounds = if self.eat(&token::Colon) {
+ self.parse_generic_bounds(None)?
+ } else {
+ Vec::new()
+ };
+
+ let default = if self.eat(&token::Eq) {
+ Some(self.parse_ty()?)
+ } else {
+ None
+ };
+
+ Ok(GenericParam {
+ ident,
+ id: ast::DUMMY_NODE_ID,
+ attrs: preceding_attrs.into(),
+ bounds,
+ kind: GenericParamKind::Type {
+ default,
+ }
+ })
+ }
+
+ /// Parses the following grammar:
+ ///
+ /// TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty]
+ fn parse_trait_item_assoc_ty(&mut self)
+ -> PResult<'a, (Ident, TraitItemKind, ast::Generics)> {
+ let ident = self.parse_ident()?;
+ let mut generics = self.parse_generics()?;
+
+ // Parse optional colon and param bounds.
+ let bounds = if self.eat(&token::Colon) {
+ self.parse_generic_bounds(None)?
+ } else {
+ Vec::new()
+ };
+ generics.where_clause = self.parse_where_clause()?;
+
+ let default = if self.eat(&token::Eq) {
+ Some(self.parse_ty()?)
+ } else {
+ None
+ };
+ self.expect(&token::Semi)?;
+
+ Ok((ident, TraitItemKind::Type(bounds, default), generics))
+ }
+
+ fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
+ self.expect_keyword(keywords::Const)?;
+ let ident = self.parse_ident()?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+
+ Ok(GenericParam {
+ ident,
+ id: ast::DUMMY_NODE_ID,
+ attrs: preceding_attrs.into(),
+ bounds: Vec::new(),
+ kind: GenericParamKind::Const {
+ ty,
+ }
+ })
+ }
+
+ /// Parses a (possibly empty) list of lifetime and type parameters, possibly including
+ /// a trailing comma and erroneous trailing attributes.
+ crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
+ let mut params = Vec::new();
+ loop {
+ let attrs = self.parse_outer_attributes()?;
+ if self.check_lifetime() {
+ let lifetime = self.expect_lifetime();
+ // Parse lifetime parameter.
+ let bounds = if self.eat(&token::Colon) {
+ self.parse_lt_param_bounds()
+ } else {
+ Vec::new()
+ };
+ params.push(ast::GenericParam {
+ ident: lifetime.ident,
+ id: lifetime.id,
+ attrs: attrs.into(),
+ bounds,
+ kind: ast::GenericParamKind::Lifetime,
+ });
+ } else if self.check_keyword(keywords::Const) {
+ // Parse const parameter.
+ params.push(self.parse_const_param(attrs)?);
+ } else if self.check_ident() {
+ // Parse type parameter.
+ params.push(self.parse_ty_param(attrs)?);
+ } else {
+ // Check for trailing attributes and stop parsing.
+ if !attrs.is_empty() {
+ if !params.is_empty() {
+ self.struct_span_err(
+ attrs[0].span,
+ &format!("trailing attribute after generic parameter"),
+ )
+ .span_label(attrs[0].span, "attributes must go before parameters")
+ .emit();
+ } else {
+ self.struct_span_err(
+ attrs[0].span,
+ &format!("attribute without generic parameters"),
+ )
+ .span_label(
+ attrs[0].span,
+ "attributes are only permitted when preceding parameters",
+ )
+ .emit();
+ }
+ }
+ break
+ }
+
+ if !self.eat(&token::Comma) {
+ break
+ }
+ }
+ Ok(params)
+ }
+
+ /// Parses a set of optional generic type parameter declarations. Where
+ /// clauses are not parsed here, and must be added later via
+ /// `parse_where_clause()`.
+ ///
+ /// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > )
+ /// | ( < lifetimes , typaramseq ( , )? > )
+ /// where typaramseq = ( typaram ) | ( typaram , typaramseq )
+ fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
+ maybe_whole!(self, NtGenerics, |x| x);
+
+ let span_lo = self.span;
+ if self.eat_lt() {
+ let params = self.parse_generic_params()?;
+ self.expect_gt()?;
+ Ok(ast::Generics {
+ params,
+ where_clause: WhereClause {
+ id: ast::DUMMY_NODE_ID,
+ predicates: Vec::new(),
+ span: syntax_pos::DUMMY_SP,
+ },
+ span: span_lo.to(self.prev_span),
+ })
+ } else {
+ Ok(ast::Generics::default())
+ }
+ }
+
+ /// Parses generic args (within a path segment) with recovery for extra leading angle brackets.
+ /// For the purposes of understanding the parsing logic of generic arguments, this function
+ /// can be thought of being the same as just calling `self.parse_generic_args()` if the source
+ /// had the correct amount of leading angle brackets.
+ ///
+ /// ```ignore (diagnostics)
+ /// bar::<<<<T as Foo>::Output>();
+ /// ^^ help: remove extra angle brackets
+ /// ```
+ fn parse_generic_args_with_leaning_angle_bracket_recovery(
+ &mut self,
+ style: PathStyle,
+ lo: Span,
+ ) -> PResult<'a, (Vec<GenericArg>, Vec<TypeBinding>)> {
+ // We need to detect whether there are extra leading left angle brackets and produce an
+ // appropriate error and suggestion. This cannot be implemented by looking ahead at
+ // upcoming tokens for a matching `>` character - if there are unmatched `<` tokens
+ // then there won't be matching `>` tokens to find.
+ //
+ // To explain how this detection works, consider the following example:
+ //
+ // ```ignore (diagnostics)
+ // bar::<<<<T as Foo>::Output>();
+ // ^^ help: remove extra angle brackets
+ // ```
+ //
+ // Parsing of the left angle brackets starts in this function. We start by parsing the
+ // `<` token (incrementing the counter of unmatched angle brackets on `Parser` via
+ // `eat_lt`):
+ //
+ // *Upcoming tokens:* `<<<<T as Foo>::Output>;`
+ // *Unmatched count:* 1
+ // *`parse_path_segment` calls deep:* 0
+ //
+ // This has the effect of recursing as this function is called if a `<` character
+ // is found within the expected generic arguments:
+ //
+ // *Upcoming tokens:* `<<<T as Foo>::Output>;`
+ // *Unmatched count:* 2
+ // *`parse_path_segment` calls deep:* 1
+ //
+ // Eventually we will have recursed until having consumed all of the `<` tokens and
+ // this will be reflected in the count:
+ //
+ // *Upcoming tokens:* `T as Foo>::Output>;`
+ // *Unmatched count:* 4
+ // `parse_path_segment` calls deep:* 3
+ //
+ // The parser will continue until reaching the first `>` - this will decrement the
+ // unmatched angle bracket count and return to the parent invocation of this function
+ // having succeeded in parsing:
+ //
+ // *Upcoming tokens:* `::Output>;`
+ // *Unmatched count:* 3
+ // *`parse_path_segment` calls deep:* 2
+ //
+ // This will continue until the next `>` character which will also return successfully
+ // to the parent invocation of this function and decrement the count:
+ //
+ // *Upcoming tokens:* `;`
+ // *Unmatched count:* 2
+ // *`parse_path_segment` calls deep:* 1
+ //
+ // At this point, this function will expect to find another matching `>` character but
+ // won't be able to and will return an error. This will continue all the way up the
+ // call stack until the first invocation:
+ //
+ // *Upcoming tokens:* `;`
+ // *Unmatched count:* 2
+ // *`parse_path_segment` calls deep:* 0
+ //
+ // In doing this, we have managed to work out how many unmatched leading left angle
+ // brackets there are, but we cannot recover as the unmatched angle brackets have
+ // already been consumed. To remedy this, we keep a snapshot of the parser state
+ // before we do the above. We can then inspect whether we ended up with a parsing error
+ // and unmatched left angle brackets and if so, restore the parser state before we
+ // consumed any `<` characters to emit an error and consume the erroneous tokens to
+ // recover by attempting to parse again.
+ //
+ // In practice, the recursion of this function is indirect and there will be other
+ // locations that consume some `<` characters - as long as we update the count when
+ // this happens, it isn't an issue.
+
+ let is_first_invocation = style == PathStyle::Expr;
+ // Take a snapshot before attempting to parse - we can restore this later.
+ let snapshot = if is_first_invocation {
+ Some(self.clone())
+ } else {
+ None
+ };
+
+ debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
+ match self.parse_generic_args() {
+ Ok(value) => Ok(value),
+ Err(ref mut e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
+ // Cancel error from being unable to find `>`. We know the error
+ // must have been this due to a non-zero unmatched angle bracket
+ // count.
+ e.cancel();
+
+ // Swap `self` with our backup of the parser state before attempting to parse
+ // generic arguments.
+ let snapshot = mem::replace(self, snapshot.unwrap());
+
+ debug!(
+ "parse_generic_args_with_leading_angle_bracket_recovery: (snapshot failure) \
+ snapshot.count={:?}",
+ snapshot.unmatched_angle_bracket_count,
+ );
+
+ // Eat the unmatched angle brackets.
+ for _ in 0..snapshot.unmatched_angle_bracket_count {
+ self.eat_lt();
+ }
+
+ // Make a span over ${unmatched angle bracket count} characters.
+ let span = lo.with_hi(
+ lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count)
+ );
+ let plural = snapshot.unmatched_angle_bracket_count > 1;
+ self.diagnostic()
+ .struct_span_err(
+ span,
+ &format!(
+ "unmatched angle bracket{}",
+ if plural { "s" } else { "" }
+ ),
+ )
+ .span_suggestion(
+ span,
+ &format!(
+ "remove extra angle bracket{}",
+ if plural { "s" } else { "" }
+ ),
+ String::new(),
+ Applicability::MachineApplicable,
+ )
+ .emit();
+
+ // Try again without unmatched angle bracket characters.
+ self.parse_generic_args()
+ },
+ Err(e) => Err(e),
+ }
+ }
+
+ /// Parses (possibly empty) list of lifetime and type arguments and associated type bindings,
+ /// possibly including trailing comma.
+ fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<TypeBinding>)> {
+ let mut args = Vec::new();
+ let mut bindings = Vec::new();
+ let mut misplaced_assoc_ty_bindings: Vec<Span> = Vec::new();
+ let mut assoc_ty_bindings: Vec<Span> = Vec::new();
+
+ let args_lo = self.span;
+
+ loop {
+ if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
+ // Parse lifetime argument.
+ args.push(GenericArg::Lifetime(self.expect_lifetime()));
+ misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
+ } else if self.check_ident() && self.look_ahead(1, |t| t == &token::Eq) {
+ // Parse associated type binding.
+ let lo = self.span;
+ let ident = self.parse_ident()?;
+ self.bump();
+ let ty = self.parse_ty()?;
+ let span = lo.to(self.prev_span);
+ bindings.push(TypeBinding {
+ id: ast::DUMMY_NODE_ID,
+ ident,
+ ty,
+ span,
+ });
+ assoc_ty_bindings.push(span);
+ } else if self.check_const_arg() {
+ // FIXME(const_generics): to distinguish between idents for types and consts,
+ // we should introduce a GenericArg::Ident in the AST and distinguish when
+ // lowering to the HIR. For now, idents for const args are not permitted.
+
+ // Parse const argument.
+ let expr = if let token::OpenDelim(token::Brace) = self.token {
+ self.parse_block_expr(None, self.span, BlockCheckMode::Default, ThinVec::new())?
+ } else if self.token.is_ident() {
+ // FIXME(const_generics): to distinguish between idents for types and consts,
+ // we should introduce a GenericArg::Ident in the AST and distinguish when
+ // lowering to the HIR. For now, idents for const args are not permitted.
+ return Err(
+ self.fatal("identifiers may currently not be used for const generics")
+ );
+ } else {
+ // FIXME(const_generics): this currently conflicts with emplacement syntax
+ // with negative integer literals.
+ self.parse_literal_maybe_minus()?
+ };
+ let value = AnonConst {
+ id: ast::DUMMY_NODE_ID,
+ value: expr,
+ };
+ args.push(GenericArg::Const(value));
+ misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
+ } else if self.check_type() {
+ // Parse type argument.
+ args.push(GenericArg::Type(self.parse_ty()?));
+ misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
+ } else {
+ break
+ }
+
+ if !self.eat(&token::Comma) {
+ break
+ }
+ }
+
+ // FIXME: we would like to report this in ast_validation instead, but we currently do not
+ // preserve ordering of generic parameters with respect to associated type binding, so we
+ // lose that information after parsing.
+ if misplaced_assoc_ty_bindings.len() > 0 {
+ let mut err = self.struct_span_err(
+ args_lo.to(self.prev_span),
+ "associated type bindings must be declared after generic parameters",
+ );
+ for span in misplaced_assoc_ty_bindings {
+ err.span_label(
+ span,
+ "this associated type binding should be moved after the generic parameters",
+ );
+ }
+ err.emit();
+ }
+
+ Ok((args, bindings))
+ }
+
+ /// Parses an optional where-clause and places it in `generics`.
+ ///
+ /// ```ignore (only-for-syntax-highlight)
+ /// where T : Trait<U, V> + 'b, 'a : 'b
+ /// ```
+ fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
+ maybe_whole!(self, NtWhereClause, |x| x);
+
+ let mut where_clause = WhereClause {
+ id: ast::DUMMY_NODE_ID,
+ predicates: Vec::new(),
+ span: syntax_pos::DUMMY_SP,
+ };
+
+ if !self.eat_keyword(keywords::Where) {
+ return Ok(where_clause);
+ }
+ let lo = self.prev_span;
+
+ // We are considering adding generics to the `where` keyword as an alternative higher-rank
+ // parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking
+ // change we parse those generics now, but report an error.
+ if self.choose_generics_over_qpath() {
+ let generics = self.parse_generics()?;
+ self.struct_span_err(
+ generics.span,
+ "generic parameters on `where` clauses are reserved for future use",
+ )
+ .span_label(generics.span, "currently unsupported")
+ .emit();
+ }
+
+ loop {
+ let lo = self.span;
+ if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
+ let lifetime = self.expect_lifetime();
+ // Bounds starting with a colon are mandatory, but possibly empty.
+ self.expect(&token::Colon)?;
+ let bounds = self.parse_lt_param_bounds();
+ where_clause.predicates.push(ast::WherePredicate::RegionPredicate(
+ ast::WhereRegionPredicate {
+ span: lo.to(self.prev_span),
+ lifetime,
+ bounds,
+ }
+ ));
+ } else if self.check_type() {
+ // Parse optional `for<'a, 'b>`.
+ // This `for` is parsed greedily and applies to the whole predicate,
+ // the bounded type can have its own `for` applying only to it.
+ // Example 1: for<'a> Trait1<'a>: Trait2<'a /*ok*/>
+ // Example 2: (for<'a> Trait1<'a>): Trait2<'a /*not ok*/>
+ // Example 3: for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /*ok*/, 'b /*not ok*/>
+ let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
+
+ // Parse type with mandatory colon and (possibly empty) bounds,
+ // or with mandatory equality sign and the second type.
+ let ty = self.parse_ty()?;
+ if self.eat(&token::Colon) {
+ let bounds = self.parse_generic_bounds(None)?;
+ where_clause.predicates.push(ast::WherePredicate::BoundPredicate(
+ ast::WhereBoundPredicate {
+ span: lo.to(self.prev_span),
+ bound_generic_params: lifetime_defs,
+ bounded_ty: ty,
+ bounds,
+ }
+ ));
+ // FIXME: Decide what should be used here, `=` or `==`.
+ // FIXME: We are just dropping the binders in lifetime_defs on the floor here.
+ } else if self.eat(&token::Eq) || self.eat(&token::EqEq) {
+ let rhs_ty = self.parse_ty()?;
+ where_clause.predicates.push(ast::WherePredicate::EqPredicate(
+ ast::WhereEqPredicate {
+ span: lo.to(self.prev_span),
+ lhs_ty: ty,
+ rhs_ty,
+ id: ast::DUMMY_NODE_ID,
+ }
+ ));
+ } else {
+ return self.unexpected();
+ }
+ } else {
+ break
+ }
+
+ if !self.eat(&token::Comma) {
+ break
+ }
+ }
+
+ where_clause.span = lo.to(self.prev_span);
+ Ok(where_clause)
+ }
+
+ fn parse_fn_args(&mut self, named_args: bool, allow_variadic: bool)
+ -> PResult<'a, (Vec<Arg> , bool)> {
+ self.expect(&token::OpenDelim(token::Paren))?;
+
+ let sp = self.span;
+ let mut variadic = false;
+ let (args, recovered): (Vec<Option<Arg>>, bool) =
+ self.parse_seq_to_before_end(
+ &token::CloseDelim(token::Paren),
+ SeqSep::trailing_allowed(token::Comma),
+ |p| {
+ if p.token == token::DotDotDot {
+ p.bump();
+ variadic = true;
+ if allow_variadic {
+ if p.token != token::CloseDelim(token::Paren) {
+ let span = p.span;
+ p.span_err(span,
+ "`...` must be last in argument list for variadic function");
+ }
+ Ok(None)
+ } else {
+ let span = p.prev_span;
+ if p.token == token::CloseDelim(token::Paren) {
+ // continue parsing to present any further errors
+ p.struct_span_err(
+ span,
+ "only foreign functions are allowed to be variadic"
+ ).emit();
+ Ok(Some(dummy_arg(span)))
+ } else {
+ // this function definition looks beyond recovery, stop parsing
+ p.span_err(span,
+ "only foreign functions are allowed to be variadic");
+ Ok(None)
+ }
+ }
+ } else {
+ match p.parse_arg_general(named_args, false) {
+ Ok(arg) => Ok(Some(arg)),
+ Err(mut e) => {
+ e.emit();
+ let lo = p.prev_span;
+ // Skip every token until next possible arg or end.
+ p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]);
+ // Create a placeholder argument for proper arg count (#34264).
+ let span = lo.to(p.prev_span);
+ Ok(Some(dummy_arg(span)))
+ }
+ }
+ }
+ }
+ )?;
+
+ if !recovered {
+ self.eat(&token::CloseDelim(token::Paren));
+ }
+
+ let args: Vec<_> = args.into_iter().filter_map(|x| x).collect();
+
+ if variadic && args.is_empty() {
+ self.span_err(sp,
+ "variadic function must be declared with at least one named argument");
+ }
+
+ Ok((args, variadic))
+ }
+
+ /// Parses the argument list and result type of a function declaration.
+ fn parse_fn_decl(&mut self, allow_variadic: bool) -> PResult<'a, P<FnDecl>> {
+
+ let (args, variadic) = self.parse_fn_args(true, allow_variadic)?;
+ let ret_ty = self.parse_ret_ty(true)?;
+
+ Ok(P(FnDecl {
+ inputs: args,
+ output: ret_ty,
+ variadic,
+ }))
+ }
+
+ /// Returns the parsed optional self argument and whether a self shortcut was used.
+ fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
+ let expect_ident = |this: &mut Self| match this.token {
+ // Preserve hygienic context.
+ token::Ident(ident, _) =>
+ { let span = this.span; this.bump(); Ident::new(ident.name, span) }
+ _ => unreachable!()
+ };
+ let isolated_self = |this: &mut Self, n| {
+ this.look_ahead(n, |t| t.is_keyword(keywords::SelfLower)) &&
+ this.look_ahead(n + 1, |t| t != &token::ModSep)
+ };
+
+ // Parse optional self parameter of a method.
+ // Only a limited set of initial token sequences is considered self parameters, anything
+ // else is parsed as a normal function parameter list, so some lookahead is required.
+ let eself_lo = self.span;
+ let (eself, eself_ident, eself_hi) = match self.token {
+ token::BinOp(token::And) => {
+ // &self
+ // &mut self
+ // &'lt self
+ // &'lt mut self
+ // &not_self
+ (if isolated_self(self, 1) {
+ self.bump();
+ SelfKind::Region(None, Mutability::Immutable)
+ } else if self.look_ahead(1, |t| t.is_keyword(keywords::Mut)) &&
+ isolated_self(self, 2) {
+ self.bump();
+ self.bump();
+ SelfKind::Region(None, Mutability::Mutable)
+ } else if self.look_ahead(1, |t| t.is_lifetime()) &&
+ isolated_self(self, 2) {
+ self.bump();
+ let lt = self.expect_lifetime();
+ SelfKind::Region(Some(lt), Mutability::Immutable)
+ } else if self.look_ahead(1, |t| t.is_lifetime()) &&
+ self.look_ahead(2, |t| t.is_keyword(keywords::Mut)) &&
+ isolated_self(self, 3) {
+ self.bump();
+ let lt = self.expect_lifetime();
+ self.bump();
+ SelfKind::Region(Some(lt), Mutability::Mutable)
+ } else {
+ return Ok(None);
+ }, expect_ident(self), self.prev_span)
+ }
+ token::BinOp(token::Star) => {
+ // *self
+ // *const self
+ // *mut self
+ // *not_self
+ // Emit special error for `self` cases.
+ let msg = "cannot pass `self` by raw pointer";
+ (if isolated_self(self, 1) {
+ self.bump();
+ self.struct_span_err(self.span, msg)
+ .span_label(self.span, msg)
+ .emit();
+ SelfKind::Value(Mutability::Immutable)
+ } else if self.look_ahead(1, |t| t.is_mutability()) &&
+ isolated_self(self, 2) {
+ self.bump();
+ self.bump();
+ self.struct_span_err(self.span, msg)
+ .span_label(self.span, msg)
+ .emit();
+ SelfKind::Value(Mutability::Immutable)
+ } else {
+ return Ok(None);
+ }, expect_ident(self), self.prev_span)
+ }
+ token::Ident(..) => {
+ if isolated_self(self, 0) {
+ // self
+ // self: TYPE
+ let eself_ident = expect_ident(self);
+ let eself_hi = self.prev_span;
+ (if self.eat(&token::Colon) {
+ let ty = self.parse_ty()?;
+ SelfKind::Explicit(ty, Mutability::Immutable)
+ } else {
+ SelfKind::Value(Mutability::Immutable)
+ }, eself_ident, eself_hi)
+ } else if self.token.is_keyword(keywords::Mut) &&
+ isolated_self(self, 1) {
+ // mut self
+ // mut self: TYPE
+ self.bump();
+ let eself_ident = expect_ident(self);
+ let eself_hi = self.prev_span;
+ (if self.eat(&token::Colon) {
+ let ty = self.parse_ty()?;
+ SelfKind::Explicit(ty, Mutability::Mutable)
+ } else {
+ SelfKind::Value(Mutability::Mutable)
+ }, eself_ident, eself_hi)
+ } else {
+ return Ok(None);
+ }
+ }
+ _ => return Ok(None),
+ };
+
+ let eself = source_map::respan(eself_lo.to(eself_hi), eself);
+ Ok(Some(Arg::from_self(eself, eself_ident)))
+ }
+
+ /// Parses the parameter list and result type of a function that may have a `self` parameter.
+ fn parse_fn_decl_with_self<F>(&mut self, parse_arg_fn: F) -> PResult<'a, P<FnDecl>>
+ where F: FnMut(&mut Parser<'a>) -> PResult<'a, Arg>,
+ {
+ self.expect(&token::OpenDelim(token::Paren))?;
+
+ // Parse optional self argument
+ let self_arg = self.parse_self_arg()?;
+
+ // Parse the rest of the function parameter list.
+ let sep = SeqSep::trailing_allowed(token::Comma);
+ let (fn_inputs, recovered) = if let Some(self_arg) = self_arg {
+ if self.check(&token::CloseDelim(token::Paren)) {
+ (vec![self_arg], false)
+ } else if self.eat(&token::Comma) {
+ let mut fn_inputs = vec![self_arg];
+ let (mut input, recovered) = self.parse_seq_to_before_end(
+ &token::CloseDelim(token::Paren), sep, parse_arg_fn)?;
+ fn_inputs.append(&mut input);
+ (fn_inputs, recovered)
+ } else {
+ return self.unexpected();
+ }
+ } else {
+ self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)?
+ };
+
+ if !recovered {
+ // Parse closing paren and return type.
+ self.expect(&token::CloseDelim(token::Paren))?;
+ }
+ Ok(P(FnDecl {
+ inputs: fn_inputs,
+ output: self.parse_ret_ty(true)?,
+ variadic: false
+ }))
+ }
+
+ /// Parses the `|arg, arg|` header of a closure.
+ fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
+ let inputs_captures = {
+ if self.eat(&token::OrOr) {
+ Vec::new()
+ } else {
+ self.expect(&token::BinOp(token::Or))?;
+ let args = self.parse_seq_to_before_tokens(
+ &[&token::BinOp(token::Or), &token::OrOr],
+ SeqSep::trailing_allowed(token::Comma),
+ TokenExpectType::NoExpect,
+ |p| p.parse_fn_block_arg()
+ )?.0;
+ self.expect_or()?;
+ args
+ }
+ };
+ let output = self.parse_ret_ty(true)?;
+
+ Ok(P(FnDecl {
+ inputs: inputs_captures,
+ output,
+ variadic: false
+ }))
+ }
+
+ /// Parses the name and optional generic types of a function header.
+ fn parse_fn_header(&mut self) -> PResult<'a, (Ident, ast::Generics)> {
+ let id = self.parse_ident()?;
+ let generics = self.parse_generics()?;
+ Ok((id, generics))
+ }
+
+ fn mk_item(&mut self, span: Span, ident: Ident, node: ItemKind, vis: Visibility,
+ attrs: Vec<Attribute>) -> P<Item> {
+ P(Item {
+ ident,
+ attrs,
+ id: ast::DUMMY_NODE_ID,
+ node,
+ vis,
+ span,
+ tokens: None,
+ })
+ }
+
+ /// Parses an item-position function declaration.
+ fn parse_item_fn(&mut self,
+ unsafety: Unsafety,
+ asyncness: IsAsync,
+ constness: Spanned<Constness>,
+ abi: Abi)
+ -> PResult<'a, ItemInfo> {
+ let (ident, mut generics) = self.parse_fn_header()?;
+ let decl = self.parse_fn_decl(false)?;
+ generics.where_clause = self.parse_where_clause()?;
+ let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+ let header = FnHeader { unsafety, asyncness, constness, abi };
+ Ok((ident, ItemKind::Fn(decl, header, generics, body), Some(inner_attrs)))
+ }
+
+ /// Returns `true` if we are looking at `const ID`
+ /// (returns `false` for things like `const fn`, etc.).
+ fn is_const_item(&mut self) -> bool {
+ self.token.is_keyword(keywords::Const) &&
+ !self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
+ !self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
+ }
+
+ /// Parses all the "front matter" for a `fn` declaration, up to
+ /// and including the `fn` keyword:
+ ///
+ /// - `const fn`
+ /// - `unsafe fn`
+ /// - `const unsafe fn`
+ /// - `extern fn`
+ /// - etc.
+ fn parse_fn_front_matter(&mut self)
+ -> PResult<'a, (
+ Spanned<Constness>,
+ Unsafety,
+ IsAsync,
+ Abi
+ )>
+ {
+ let is_const_fn = self.eat_keyword(keywords::Const);
+ let const_span = self.prev_span;
+ let unsafety = self.parse_unsafety();
+ let asyncness = self.parse_asyncness();
+ let (constness, unsafety, abi) = if is_const_fn {
+ (respan(const_span, Constness::Const), unsafety, Abi::Rust)
+ } else {
+ let abi = if self.eat_keyword(keywords::Extern) {
+ self.parse_opt_abi()?.unwrap_or(Abi::C)
+ } else {
+ Abi::Rust
+ };
+ (respan(self.prev_span, Constness::NotConst), unsafety, abi)
+ };
+ self.expect_keyword(keywords::Fn)?;
+ Ok((constness, unsafety, asyncness, abi))
+ }
+
+ /// Parses an impl item.
+ pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> {
+ maybe_whole!(self, NtImplItem, |x| x);
+ let attrs = self.parse_outer_attributes()?;
+ let (mut item, tokens) = self.collect_tokens(|this| {
+ this.parse_impl_item_(at_end, attrs)
+ })?;
+
+ // See `parse_item` for why this clause is here.
+ if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
+ item.tokens = Some(tokens);
+ }
+ Ok(item)
+ }
+
+ fn parse_impl_item_(&mut self,
+ at_end: &mut bool,
+ mut attrs: Vec<Attribute>) -> PResult<'a, ImplItem> {
+ let lo = self.span;
+ let vis = self.parse_visibility(false)?;
+ let defaultness = self.parse_defaultness();
+ let (name, node, generics) = if let Some(type_) = self.eat_type() {
+ let (name, alias, generics) = type_?;
+ let kind = match alias {
+ AliasKind::Weak(typ) => ast::ImplItemKind::Type(typ),
+ AliasKind::Existential(bounds) => ast::ImplItemKind::Existential(bounds),
+ };
+ (name, kind, generics)
+ } else if self.is_const_item() {
+ // This parses the grammar:
+ // ImplItemConst = "const" Ident ":" Ty "=" Expr ";"
+ self.expect_keyword(keywords::Const)?;
+ let name = self.parse_ident()?;
+ self.expect(&token::Colon)?;
+ let typ = self.parse_ty()?;
+ self.expect(&token::Eq)?;
+ let expr = self.parse_expr()?;
+ self.expect(&token::Semi)?;
+ (name, ast::ImplItemKind::Const(typ, expr), ast::Generics::default())
+ } else {
+ let (name, inner_attrs, generics, node) = self.parse_impl_method(&vis, at_end)?;
+ attrs.extend(inner_attrs);
+ (name, node, generics)
+ };
+
+ Ok(ImplItem {
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(self.prev_span),
+ ident: name,
+ vis,
+ defaultness,
+ attrs,
+ generics,
+ node,
+ tokens: None,
+ })
+ }
+
+ fn complain_if_pub_macro(&mut self, vis: &VisibilityKind, sp: Span) {
+ match *vis {
+ VisibilityKind::Inherited => {}
+ _ => {
+ let is_macro_rules: bool = match self.token {
+ token::Ident(sid, _) => sid.name == Symbol::intern("macro_rules"),
+ _ => false,
+ };
+ let mut err = if is_macro_rules {
+ let mut err = self.diagnostic()
+ .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`");
+ err.span_suggestion(
+ sp,
+ "try exporting the macro",
+ "#[macro_export]".to_owned(),
+ Applicability::MaybeIncorrect // speculative
+ );
+ err
+ } else {
+ let mut err = self.diagnostic()
+ .struct_span_err(sp, "can't qualify macro invocation with `pub`");
+ err.help("try adjusting the macro to put `pub` inside the invocation");
+ err
+ };
+ err.emit();
+ }
+ }
+ }
+
+ fn missing_assoc_item_kind_err(&mut self, item_type: &str, prev_span: Span)
+ -> DiagnosticBuilder<'a>
+ {
+ let expected_kinds = if item_type == "extern" {
+ "missing `fn`, `type`, or `static`"
+ } else {
+ "missing `fn`, `type`, or `const`"
+ };
+
+ // Given this code `path(`, it seems like this is not
+ // setting the visibility of a macro invocation, but rather
+ // a mistyped method declaration.
+ // Create a diagnostic pointing out that `fn` is missing.
+ //
+ // x | pub path(&self) {
+ // | ^ missing `fn`, `type`, or `const`
+ // pub path(
+ // ^^ `sp` below will point to this
+ let sp = prev_span.between(self.prev_span);
+ let mut err = self.diagnostic().struct_span_err(
+ sp,
+ &format!("{} for {}-item declaration",
+ expected_kinds, item_type));
+ err.span_label(sp, expected_kinds);
+ err
+ }
+
+ /// Parse a method or a macro invocation in a trait impl.
+ fn parse_impl_method(&mut self, vis: &Visibility, at_end: &mut bool)
+ -> PResult<'a, (Ident, Vec<Attribute>, ast::Generics,
+ ast::ImplItemKind)> {
+ // code copied from parse_macro_use_or_failure... abstraction!
+ if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
+ // method macro
+ Ok((keywords::Invalid.ident(), vec![], ast::Generics::default(),
+ ast::ImplItemKind::Macro(mac)))
+ } else {
+ let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?;
+ let ident = self.parse_ident()?;
+ let mut generics = self.parse_generics()?;
+ let decl = self.parse_fn_decl_with_self(|p| p.parse_arg())?;
+ generics.where_clause = self.parse_where_clause()?;
+ *at_end = true;
+ let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+ let header = ast::FnHeader { abi, unsafety, constness, asyncness };
+ Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method(
+ ast::MethodSig { header, decl },
+ body
+ )))
+ }
+ }
+
+ /// Parses `trait Foo { ... }` or `trait Foo = Bar;`.
+ fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
+ let ident = self.parse_ident()?;
+ let mut tps = self.parse_generics()?;
+
+ // Parse optional colon and supertrait bounds.
+ let bounds = if self.eat(&token::Colon) {
+ self.parse_generic_bounds(Some(self.prev_span))?
+ } else {
+ Vec::new()
+ };
+
+ if self.eat(&token::Eq) {
+ // it's a trait alias
+ let bounds = self.parse_generic_bounds(None)?;
+ tps.where_clause = self.parse_where_clause()?;
+ self.expect(&token::Semi)?;
+ if is_auto == IsAuto::Yes {
+ let msg = "trait aliases cannot be `auto`";
+ self.struct_span_err(self.prev_span, msg)
+ .span_label(self.prev_span, msg)
+ .emit();
+ }
+ if unsafety != Unsafety::Normal {
+ let msg = "trait aliases cannot be `unsafe`";
+ self.struct_span_err(self.prev_span, msg)
+ .span_label(self.prev_span, msg)
+ .emit();
+ }
+ Ok((ident, ItemKind::TraitAlias(tps, bounds), None))
+ } else {
+ // it's a normal trait
+ tps.where_clause = self.parse_where_clause()?;
+ self.expect(&token::OpenDelim(token::Brace))?;
+ let mut trait_items = vec![];
+ while !self.eat(&token::CloseDelim(token::Brace)) {
+ let mut at_end = false;
+ match self.parse_trait_item(&mut at_end) {
+ Ok(item) => trait_items.push(item),
+ Err(mut e) => {
+ e.emit();
+ if !at_end {
+ self.recover_stmt_(SemiColonMode::Break, BlockMode::Break);
+ }
+ }
+ }
+ }
+ Ok((ident, ItemKind::Trait(is_auto, unsafety, tps, bounds, trait_items), None))
+ }
+ }
+
+ fn choose_generics_over_qpath(&self) -> bool {
+ // There's an ambiguity between generic parameters and qualified paths in impls.
+ // If we see `<` it may start both, so we have to inspect some following tokens.
+ // The following combinations can only start generics,
+ // but not qualified paths (with one exception):
+ // `<` `>` - empty generic parameters
+ // `<` `#` - generic parameters with attributes
+ // `<` (LIFETIME|IDENT) `>` - single generic parameter
+ // `<` (LIFETIME|IDENT) `,` - first generic parameter in a list
+ // `<` (LIFETIME|IDENT) `:` - generic parameter with bounds
+ // `<` (LIFETIME|IDENT) `=` - generic parameter with a default
+ // `<` const - generic const parameter
+ // The only truly ambiguous case is
+ // `<` IDENT `>` `::` IDENT ...
+ // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
+ // because this is what almost always expected in practice, qualified paths in impls
+ // (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment.
+ self.token == token::Lt &&
+ (self.look_ahead(1, |t| t == &token::Pound || t == &token::Gt) ||
+ self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) &&
+ self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma ||
+ t == &token::Colon || t == &token::Eq) ||
+ self.look_ahead(1, |t| t.is_keyword(keywords::Const)))
+ }
+
+ fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> {
+ self.expect(&token::OpenDelim(token::Brace))?;
+ let attrs = self.parse_inner_attributes()?;
+
+ let mut impl_items = Vec::new();
+ while !self.eat(&token::CloseDelim(token::Brace)) {
+ let mut at_end = false;
+ match self.parse_impl_item(&mut at_end) {
+ Ok(impl_item) => impl_items.push(impl_item),
+ Err(mut err) => {
+ err.emit();
+ if !at_end {
+ self.recover_stmt_(SemiColonMode::Break, BlockMode::Break);
+ }
+ }
+ }
+ }
+ Ok((impl_items, attrs))
+ }
+
+ /// Parses an implementation item, `impl` keyword is already parsed.
+ ///
+ /// impl<'a, T> TYPE { /* impl items */ }
+ /// impl<'a, T> TRAIT for TYPE { /* impl items */ }
+ /// impl<'a, T> !TRAIT for TYPE { /* impl items */ }
+ ///
+ /// We actually parse slightly more relaxed grammar for better error reporting and recovery.
+ /// `impl` GENERICS `!`? TYPE `for`? (TYPE | `..`) (`where` PREDICATES)? `{` BODY `}`
+ /// `impl` GENERICS `!`? TYPE (`where` PREDICATES)? `{` BODY `}`
+ fn parse_item_impl(&mut self, unsafety: Unsafety, defaultness: Defaultness)
+ -> PResult<'a, ItemInfo> {
+ // First, parse generic parameters if necessary.
+ let mut generics = if self.choose_generics_over_qpath() {
+ self.parse_generics()?
+ } else {
+ ast::Generics::default()
+ };
+
+ // Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
+ let polarity = if self.check(&token::Not) && self.look_ahead(1, |t| t.can_begin_type()) {
+ self.bump(); // `!`
+ ast::ImplPolarity::Negative
+ } else {
+ ast::ImplPolarity::Positive
+ };
+
+ // Parse both types and traits as a type, then reinterpret if necessary.
+ let ty_first = self.parse_ty()?;
+
+ // If `for` is missing we try to recover.
+ let has_for = self.eat_keyword(keywords::For);
+ let missing_for_span = self.prev_span.between(self.span);
+
+ let ty_second = if self.token == token::DotDot {
+ // We need to report this error after `cfg` expansion for compatibility reasons
+ self.bump(); // `..`, do not add it to expected tokens
+ Some(P(Ty { node: TyKind::Err, span: self.prev_span, id: ast::DUMMY_NODE_ID }))
+ } else if has_for || self.token.can_begin_type() {
+ Some(self.parse_ty()?)
+ } else {
+ None
+ };
+
+ generics.where_clause = self.parse_where_clause()?;
+
+ let (impl_items, attrs) = self.parse_impl_body()?;
+
+ let item_kind = match ty_second {
+ Some(ty_second) => {
+ // impl Trait for Type
+ if !has_for {
+ self.struct_span_err(missing_for_span, "missing `for` in a trait impl")
+ .span_suggestion_short(
+ missing_for_span,
+ "add `for` here",
+ " for ".to_string(),
+ Applicability::MachineApplicable,
+ ).emit();
+ }
+
+ let ty_first = ty_first.into_inner();
+ let path = match ty_first.node {
+ // This notably includes paths passed through `ty` macro fragments (#46438).
+ TyKind::Path(None, path) => path,
+ _ => {
+ self.span_err(ty_first.span, "expected a trait, found type");
+ ast::Path::from_ident(Ident::new(keywords::Invalid.name(), ty_first.span))
+ }
+ };
+ let trait_ref = TraitRef { path, ref_id: ty_first.id };
+
+ ItemKind::Impl(unsafety, polarity, defaultness,
+ generics, Some(trait_ref), ty_second, impl_items)
+ }
+ None => {
+ // impl Type
+ ItemKind::Impl(unsafety, polarity, defaultness,
+ generics, None, ty_first, impl_items)
+ }
+ };
+
+ Ok((keywords::Invalid.ident(), item_kind, Some(attrs)))
+ }
+
+ fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
+ if self.eat_keyword(keywords::For) {
+ self.expect_lt()?;
+ let params = self.parse_generic_params()?;
+ self.expect_gt()?;
+ // We rely on AST validation to rule out invalid cases: There must not be type
+ // parameters, and the lifetime parameters must not have bounds.
+ Ok(params)
+ } else {
+ Ok(Vec::new())
+ }
+ }
+
+ /// Parses `struct Foo { ... }`.
+ fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
+ let class_name = self.parse_ident()?;
+
+ let mut generics = self.parse_generics()?;
+
+ // There is a special case worth noting here, as reported in issue #17904.
+ // If we are parsing a tuple struct it is the case that the where clause
+ // should follow the field list. Like so:
+ //
+ // struct Foo<T>(T) where T: Copy;
+ //
+ // If we are parsing a normal record-style struct it is the case
+ // that the where clause comes before the body, and after the generics.
+ // So if we look ahead and see a brace or a where-clause we begin
+ // parsing a record style struct.
+ //
+ // Otherwise if we look ahead and see a paren we parse a tuple-style
+ // struct.
+
+ let vdata = if self.token.is_keyword(keywords::Where) {
+ generics.where_clause = self.parse_where_clause()?;
+ if self.eat(&token::Semi) {
+ // If we see a: `struct Foo<T> where T: Copy;` style decl.
+ VariantData::Unit(ast::DUMMY_NODE_ID)
+ } else {
+ // If we see: `struct Foo<T> where T: Copy { ... }`
+ VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID)
+ }
+ // No `where` so: `struct Foo<T>;`
+ } else if self.eat(&token::Semi) {
+ VariantData::Unit(ast::DUMMY_NODE_ID)
+ // Record-style struct definition
+ } else if self.token == token::OpenDelim(token::Brace) {
+ VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID)
+ // Tuple-style struct definition with optional where-clause.
+ } else if self.token == token::OpenDelim(token::Paren) {
+ let body = VariantData::Tuple(self.parse_tuple_struct_body()?, ast::DUMMY_NODE_ID);
+ generics.where_clause = self.parse_where_clause()?;
+ self.expect(&token::Semi)?;
+ body
+ } else {
+ let token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!(
+ "expected `where`, `{{`, `(`, or `;` after struct name, found {}",
+ token_str
+ ));
+ err.span_label(self.span, "expected `where`, `{`, `(`, or `;` after struct name");
+ return Err(err);
+ };
+
+ Ok((class_name, ItemKind::Struct(vdata, generics), None))
+ }
+
+ /// Parses `union Foo { ... }`.
+ fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
+ let class_name = self.parse_ident()?;
+
+ let mut generics = self.parse_generics()?;
+
+ let vdata = if self.token.is_keyword(keywords::Where) {
+ generics.where_clause = self.parse_where_clause()?;
+ VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID)
+ } else if self.token == token::OpenDelim(token::Brace) {
+ VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID)
+ } else {
+ let token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!(
+ "expected `where` or `{{` after union name, found {}", token_str));
+ err.span_label(self.span, "expected `where` or `{` after union name");
+ return Err(err);
+ };
+
+ Ok((class_name, ItemKind::Union(vdata, generics), None))
+ }
+
+ fn consume_block(&mut self, delim: token::DelimToken) {
+ let mut brace_depth = 0;
+ loop {
+ if self.eat(&token::OpenDelim(delim)) {
+ brace_depth += 1;
+ } else if self.eat(&token::CloseDelim(delim)) {
+ if brace_depth == 0 {
+ return;
+ } else {
+ brace_depth -= 1;
+ continue;
+ }
+ } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
+ return;
+ } else {
+ self.bump();
+ }
+ }
+ }
+
+ fn parse_record_struct_body(&mut self) -> PResult<'a, Vec<StructField>> {
+ let mut fields = Vec::new();
+ if self.eat(&token::OpenDelim(token::Brace)) {
+ while self.token != token::CloseDelim(token::Brace) {
+ let field = self.parse_struct_decl_field().map_err(|e| {
+ self.recover_stmt();
+ e
+ });
+ match field {
+ Ok(field) => fields.push(field),
+ Err(mut err) => {
+ err.emit();
+ }
+ }
+ }
+ self.eat(&token::CloseDelim(token::Brace));
+ } else {
+ let token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!(
+ "expected `where`, or `{{` after struct name, found {}", token_str));
+ err.span_label(self.span, "expected `where`, or `{` after struct name");
+ return Err(err);
+ }
+
+ Ok(fields)
+ }
+
+ fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<StructField>> {
+ // This is the case where we find `struct Foo<T>(T) where T: Copy;`
+ // Unit like structs are handled in parse_item_struct function
+ let fields = self.parse_unspanned_seq(
+ &token::OpenDelim(token::Paren),
+ &token::CloseDelim(token::Paren),
+ SeqSep::trailing_allowed(token::Comma),
+ |p| {
+ let attrs = p.parse_outer_attributes()?;
+ let lo = p.span;
+ let vis = p.parse_visibility(true)?;
+ let ty = p.parse_ty()?;
+ Ok(StructField {
+ span: lo.to(ty.span),
+ vis,
+ ident: None,
+ id: ast::DUMMY_NODE_ID,
+ ty,
+ attrs,
+ })
+ })?;
+
+ Ok(fields)
+ }
+
+ /// Parses a structure field declaration.
+ fn parse_single_struct_field(&mut self,
+ lo: Span,
+ vis: Visibility,
+ attrs: Vec<Attribute> )
+ -> PResult<'a, StructField> {
+ let mut seen_comma: bool = false;
+ let a_var = self.parse_name_and_ty(lo, vis, attrs)?;
+ if self.token == token::Comma {
+ seen_comma = true;
+ }
+ match self.token {
+ token::Comma => {
+ self.bump();
+ }
+ token::CloseDelim(token::Brace) => {}
+ token::DocComment(_) => {
+ let previous_span = self.prev_span;
+ let mut err = self.span_fatal_err(self.span, Error::UselessDocComment);
+ self.bump(); // consume the doc comment
+ let comma_after_doc_seen = self.eat(&token::Comma);
+ // `seen_comma` is always false, because we are inside doc block
+ // condition is here to make code more readable
+ if seen_comma == false && comma_after_doc_seen == true {
+ seen_comma = true;
+ }
+ if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) {
+ err.emit();
+ } else {
+ if seen_comma == false {
+ let sp = self.sess.source_map().next_point(previous_span);
+ err.span_suggestion(
+ sp,
+ "missing comma here",
+ ",".into(),
+ Applicability::MachineApplicable
+ );
+ }
+ return Err(err);
+ }
+ }
+ _ => {
+ let sp = self.sess.source_map().next_point(self.prev_span);
+ let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found {}",
+ self.this_token_descr()));
+ if self.token.is_ident() {
+ // This is likely another field; emit the diagnostic and keep going
+ err.span_suggestion(
+ sp,
+ "try adding a comma",
+ ",".into(),
+ Applicability::MachineApplicable,
+ );
+ err.emit();
+ } else {
+ return Err(err)
+ }
+ }
+ }
+ Ok(a_var)
+ }
+
+ /// Parses an element of a struct declaration.
+ fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
+ let attrs = self.parse_outer_attributes()?;
+ let lo = self.span;
+ let vis = self.parse_visibility(false)?;
+ self.parse_single_struct_field(lo, vis, attrs)
+ }
+
+ /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
+ /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
+ /// If the following element can't be a tuple (i.e., it's a function definition), then
+ /// it's not a tuple struct field), and the contents within the parentheses isn't valid,
+ /// so emit a proper diagnostic.
+ pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> {
+ maybe_whole!(self, NtVis, |x| x);
+
+ self.expected_tokens.push(TokenType::Keyword(keywords::Crate));
+ if self.is_crate_vis() {
+ self.bump(); // `crate`
+ return Ok(respan(self.prev_span, VisibilityKind::Crate(CrateSugar::JustCrate)));
+ }
+
+ if !self.eat_keyword(keywords::Pub) {
+ // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
+ // keyword to grab a span from for inherited visibility; an empty span at the
+ // beginning of the current token would seem to be the "Schelling span".
+ return Ok(respan(self.span.shrink_to_lo(), VisibilityKind::Inherited))
+ }
+ let lo = self.prev_span;
+
+ if self.check(&token::OpenDelim(token::Paren)) {
+ // We don't `self.bump()` the `(` yet because this might be a struct definition where
+ // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
+ // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
+ // by the following tokens.
+ if self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) {
+ // `pub(crate)`
+ self.bump(); // `(`
+ self.bump(); // `crate`
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ let vis = respan(
+ lo.to(self.prev_span),
+ VisibilityKind::Crate(CrateSugar::PubCrate),
+ );
+ return Ok(vis)
+ } else if self.look_ahead(1, |t| t.is_keyword(keywords::In)) {
+ // `pub(in path)`
+ self.bump(); // `(`
+ self.bump(); // `in`
+ let path = self.parse_path(PathStyle::Mod)?; // `path`
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted {
+ path: P(path),
+ id: ast::DUMMY_NODE_ID,
+ });
+ return Ok(vis)
+ } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Super) ||
+ t.is_keyword(keywords::SelfLower))
+ {
+ // `pub(self)` or `pub(super)`
+ self.bump(); // `(`
+ let path = self.parse_path(PathStyle::Mod)?; // `super`/`self`
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted {
+ path: P(path),
+ id: ast::DUMMY_NODE_ID,
+ });
+ return Ok(vis)
+ } else if !can_take_tuple { // Provide this diagnostic if this is not a tuple struct
+ // `pub(something) fn ...` or `struct X { pub(something) y: Z }`
+ self.bump(); // `(`
+ let msg = "incorrect visibility restriction";
+ let suggestion = r##"some possible visibility restrictions are:
+`pub(crate)`: visible only on the current crate
+`pub(super)`: visible only in the current module's parent
+`pub(in path::to::module)`: visible only on the specified path"##;
+ let path = self.parse_path(PathStyle::Mod)?;
+ let sp = self.prev_span;
+ let help_msg = format!("make this visible only to module `{}` with `in`", path);
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ let mut err = struct_span_err!(self.sess.span_diagnostic, sp, E0704, "{}", msg);
+ err.help(suggestion);
+ err.span_suggestion(
+ sp, &help_msg, format!("in {}", path), Applicability::MachineApplicable
+ );
+ err.emit(); // emit diagnostic, but continue with public visibility
+ }
+ }
+
+ Ok(respan(lo, VisibilityKind::Public))
+ }
+
+ /// Parses defaultness (i.e., `default` or nothing).
+ fn parse_defaultness(&mut self) -> Defaultness {
+ // `pub` is included for better error messages
+ if self.check_keyword(keywords::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Impl) ||
+ t.is_keyword(keywords::Const) ||
+ t.is_keyword(keywords::Fn) ||
+ t.is_keyword(keywords::Unsafe) ||
+ t.is_keyword(keywords::Extern) ||
+ t.is_keyword(keywords::Type) ||
+ t.is_keyword(keywords::Pub)) {
+ self.bump(); // `default`
+ Defaultness::Default
+ } else {
+ Defaultness::Final
+ }
+ }
+
+ fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
+ if self.eat(&token::Semi) {
+ let mut err = self.struct_span_err(self.prev_span, "expected item, found `;`");
+ err.span_suggestion_short(
+ self.prev_span,
+ "remove this semicolon",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ if !items.is_empty() {
+ let previous_item = &items[items.len()-1];
+ let previous_item_kind_name = match previous_item.node {
+ // say "braced struct" because tuple-structs and
+ // braceless-empty-struct declarations do take a semicolon
+ ItemKind::Struct(..) => Some("braced struct"),
+ ItemKind::Enum(..) => Some("enum"),
+ ItemKind::Trait(..) => Some("trait"),
+ ItemKind::Union(..) => Some("union"),
+ _ => None,
+ };
+ if let Some(name) = previous_item_kind_name {
+ err.help(&format!("{} declarations are not followed by a semicolon", name));
+ }
+ }
+ err.emit();
+ true
+ } else {
+ false
+ }
+ }
+
+ /// Given a termination token, parses all of the items in a module.
+ fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> {
+ let mut items = vec![];
+ while let Some(item) = self.parse_item()? {
+ items.push(item);
+ self.maybe_consume_incorrect_semicolon(&items);
+ }
+
+ if !self.eat(term) {
+ let token_str = self.this_token_descr();
+ if !self.maybe_consume_incorrect_semicolon(&items) {
+ let mut err = self.fatal(&format!("expected item, found {}", token_str));
+ err.span_label(self.span, "expected item");
+ return Err(err);
+ }
+ }
+
+ let hi = if self.span.is_dummy() {
+ inner_lo
+ } else {
+ self.prev_span
+ };
+
+ Ok(ast::Mod {
+ inner: inner_lo.to(hi),
+ items,
+ inline: true
+ })
+ }
+
+ fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> {
+ let id = if m.is_none() { self.parse_ident_or_underscore() } else { self.parse_ident() }?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+ self.expect(&token::Eq)?;
+ let e = self.parse_expr()?;
+ self.expect(&token::Semi)?;
+ let item = match m {
+ Some(m) => ItemKind::Static(ty, m, e),
+ None => ItemKind::Const(ty, e),
+ };
+ Ok((id, item, None))
+ }
+
+ /// Parse a `mod <foo> { ... }` or `mod <foo>;` item
+ fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
+ let (in_cfg, outer_attrs) = {
+ let mut strip_unconfigured = crate::config::StripUnconfigured {
+ sess: self.sess,
+ features: None, // don't perform gated feature checking
+ };
+ let mut outer_attrs = outer_attrs.to_owned();
+ strip_unconfigured.process_cfg_attrs(&mut outer_attrs);
+ (!self.cfg_mods || strip_unconfigured.in_cfg(&outer_attrs), outer_attrs)
+ };
+
+ let id_span = self.span;
+ let id = self.parse_ident()?;
+ if self.eat(&token::Semi) {
+ if in_cfg && self.recurse_into_file_modules {
+ // This mod is in an external file. Let's go get it!
+ let ModulePathSuccess { path, directory_ownership, warn } =
+ self.submod_path(id, &outer_attrs, id_span)?;
+ let (module, mut attrs) =
+ self.eval_src_mod(path, directory_ownership, id.to_string(), id_span)?;
+ // Record that we fetched the mod from an external file
+ if warn {
+ let attr = Attribute {
+ id: attr::mk_attr_id(),
+ style: ast::AttrStyle::Outer,
+ path: ast::Path::from_ident(Ident::from_str("warn_directory_ownership")),
+ tokens: TokenStream::empty(),
+ is_sugared_doc: false,
+ span: syntax_pos::DUMMY_SP,
+ };
+ attr::mark_known(&attr);
+ attrs.push(attr);
+ }
+ Ok((id, ItemKind::Mod(module), Some(attrs)))
+ } else {
+ let placeholder = ast::Mod {
+ inner: syntax_pos::DUMMY_SP,
+ items: Vec::new(),
+ inline: false
+ };
+ Ok((id, ItemKind::Mod(placeholder), None))
+ }
+ } else {
+ let old_directory = self.directory.clone();
+ self.push_directory(id, &outer_attrs);
+
+ self.expect(&token::OpenDelim(token::Brace))?;
+ let mod_inner_lo = self.span;
+ let attrs = self.parse_inner_attributes()?;
+ let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?;
+
+ self.directory = old_directory;
+ Ok((id, ItemKind::Mod(module), Some(attrs)))
+ }
+ }
+
+ fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) {
+ if let Some(path) = attr::first_attr_value_str_by_name(attrs, "path") {
+ self.directory.path.to_mut().push(&path.as_str());
+ self.directory.ownership = DirectoryOwnership::Owned { relative: None };
+ } else {
+ // We have to push on the current module name in the case of relative
+ // paths in order to ensure that any additional module paths from inline
+ // `mod x { ... }` come after the relative extension.
+ //
+ // For example, a `mod z { ... }` inside `x/y.rs` should set the current
+ // directory path to `/x/y/z`, not `/x/z` with a relative offset of `y`.
+ if let DirectoryOwnership::Owned { relative } = &mut self.directory.ownership {
+ if let Some(ident) = relative.take() { // remove the relative offset
+ self.directory.path.to_mut().push(ident.as_str());
+ }
+ }
+ self.directory.path.to_mut().push(&id.as_str());
+ }
+ }
+
+ pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> {
+ if let Some(s) = attr::first_attr_value_str_by_name(attrs, "path") {
+ let s = s.as_str();
+
+ // On windows, the base path might have the form
+ // `\\?\foo\bar` in which case it does not tolerate
+ // mixed `/` and `\` separators, so canonicalize
+ // `/` to `\`.
+ #[cfg(windows)]
+ let s = s.replace("/", "\\");
+ Some(dir_path.join(s))
+ } else {
+ None
+ }
+ }
+
+ /// Returns a path to a module.
+ pub fn default_submod_path(
+ id: ast::Ident,
+ relative: Option<ast::Ident>,
+ dir_path: &Path,
+ source_map: &SourceMap) -> ModulePath
+ {
+ // If we're in a foo.rs file instead of a mod.rs file,
+ // we need to look for submodules in
+ // `./foo/<id>.rs` and `./foo/<id>/mod.rs` rather than
+ // `./<id>.rs` and `./<id>/mod.rs`.
+ let relative_prefix_string;
+ let relative_prefix = if let Some(ident) = relative {
+ relative_prefix_string = format!("{}{}", ident.as_str(), path::MAIN_SEPARATOR);
+ &relative_prefix_string
+ } else {
+ ""
+ };
+
+ let mod_name = id.to_string();
+ let default_path_str = format!("{}{}.rs", relative_prefix, mod_name);
+ let secondary_path_str = format!("{}{}{}mod.rs",
+ relative_prefix, mod_name, path::MAIN_SEPARATOR);
+ let default_path = dir_path.join(&default_path_str);
+ let secondary_path = dir_path.join(&secondary_path_str);
+ let default_exists = source_map.file_exists(&default_path);
+ let secondary_exists = source_map.file_exists(&secondary_path);
+
+ let result = match (default_exists, secondary_exists) {
+ (true, false) => Ok(ModulePathSuccess {
+ path: default_path,
+ directory_ownership: DirectoryOwnership::Owned {
+ relative: Some(id),
+ },
+ warn: false,
+ }),
+ (false, true) => Ok(ModulePathSuccess {
+ path: secondary_path,
+ directory_ownership: DirectoryOwnership::Owned {
+ relative: None,
+ },
+ warn: false,
+ }),
+ (false, false) => Err(Error::FileNotFoundForModule {
+ mod_name: mod_name.clone(),
+ default_path: default_path_str,
+ secondary_path: secondary_path_str,
+ dir_path: dir_path.display().to_string(),
+ }),
+ (true, true) => Err(Error::DuplicatePaths {
+ mod_name: mod_name.clone(),
+ default_path: default_path_str,
+ secondary_path: secondary_path_str,
+ }),
+ };
+
+ ModulePath {
+ name: mod_name,
+ path_exists: default_exists || secondary_exists,
+ result,
+ }
+ }
+
+ fn submod_path(&mut self,
+ id: ast::Ident,
+ outer_attrs: &[Attribute],
+ id_sp: Span)
+ -> PResult<'a, ModulePathSuccess> {
+ if let Some(path) = Parser::submod_path_from_attr(outer_attrs, &self.directory.path) {
+ return Ok(ModulePathSuccess {
+ directory_ownership: match path.file_name().and_then(|s| s.to_str()) {
+ // All `#[path]` files are treated as though they are a `mod.rs` file.
+ // This means that `mod foo;` declarations inside `#[path]`-included
+ // files are siblings,
+ //
+ // Note that this will produce weirdness when a file named `foo.rs` is
+ // `#[path]` included and contains a `mod foo;` declaration.
+ // If you encounter this, it's your own darn fault :P
+ Some(_) => DirectoryOwnership::Owned { relative: None },
+ _ => DirectoryOwnership::UnownedViaMod(true),
+ },
+ path,
+ warn: false,
+ });
+ }
+
+ let relative = match self.directory.ownership {
+ DirectoryOwnership::Owned { relative } => relative,
+ DirectoryOwnership::UnownedViaBlock |
+ DirectoryOwnership::UnownedViaMod(_) => None,
+ };
+ let paths = Parser::default_submod_path(
+ id, relative, &self.directory.path, self.sess.source_map());
+
+ match self.directory.ownership {
+ DirectoryOwnership::Owned { .. } => {
+ paths.result.map_err(|err| self.span_fatal_err(id_sp, err))
+ },
+ DirectoryOwnership::UnownedViaBlock => {
+ let msg =
+ "Cannot declare a non-inline module inside a block \
+ unless it has a path attribute";
+ let mut err = self.diagnostic().struct_span_err(id_sp, msg);
+ if paths.path_exists {
+ let msg = format!("Maybe `use` the module `{}` instead of redeclaring it",
+ paths.name);
+ err.span_note(id_sp, &msg);
+ }
+ Err(err)
+ }
+ DirectoryOwnership::UnownedViaMod(warn) => {
+ if warn {
+ if let Ok(result) = paths.result {
+ return Ok(ModulePathSuccess { warn: true, ..result });
+ }
+ }
+ let mut err = self.diagnostic().struct_span_err(id_sp,
+ "cannot declare a new module at this location");
+ if !id_sp.is_dummy() {
+ let src_path = self.sess.source_map().span_to_filename(id_sp);
+ if let FileName::Real(src_path) = src_path {
+ if let Some(stem) = src_path.file_stem() {
+ let mut dest_path = src_path.clone();
+ dest_path.set_file_name(stem);
+ dest_path.push("mod.rs");
+ err.span_note(id_sp,
+ &format!("maybe move this module `{}` to its own \
+ directory via `{}`", src_path.display(),
+ dest_path.display()));
+ }
+ }
+ }
+ if paths.path_exists {
+ err.span_note(id_sp,
+ &format!("... or maybe `use` the module `{}` instead \
+ of possibly redeclaring it",
+ paths.name));
+ }
+ Err(err)
+ }
+ }
+ }
+
+ /// Reads a module from a source file.
+ fn eval_src_mod(&mut self,
+ path: PathBuf,
+ directory_ownership: DirectoryOwnership,
+ name: String,
+ id_sp: Span)
+ -> PResult<'a, (ast::Mod, Vec<Attribute> )> {
+ let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut();
+ if let Some(i) = included_mod_stack.iter().position(|p| *p == path) {
+ let mut err = String::from("circular modules: ");
+ let len = included_mod_stack.len();
+ for p in &included_mod_stack[i.. len] {
+ err.push_str(&p.to_string_lossy());
+ err.push_str(" -> ");
+ }
+ err.push_str(&path.to_string_lossy());
+ return Err(self.span_fatal(id_sp, &err[..]));
+ }
+ included_mod_stack.push(path.clone());
+ drop(included_mod_stack);
+
+ let mut p0 =
+ new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp);
+ p0.cfg_mods = self.cfg_mods;
+ let mod_inner_lo = p0.span;
+ let mod_attrs = p0.parse_inner_attributes()?;
+ let mut m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?;
+ m0.inline = false;
+ self.sess.included_mod_stack.borrow_mut().pop();
+ Ok((m0, mod_attrs))
+ }
+
+ /// Parses a function declaration from a foreign module.
+ fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
+ -> PResult<'a, ForeignItem> {
+ self.expect_keyword(keywords::Fn)?;
+
+ let (ident, mut generics) = self.parse_fn_header()?;
+ let decl = self.parse_fn_decl(true)?;
+ generics.where_clause = self.parse_where_clause()?;
+ let hi = self.span;
+ self.expect(&token::Semi)?;
+ Ok(ast::ForeignItem {
+ ident,
+ attrs,
+ node: ForeignItemKind::Fn(decl, generics),
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(hi),
+ vis,
+ })
+ }
+
+ /// Parses a static item from a foreign module.
+ /// Assumes that the `static` keyword is already parsed.
+ fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
+ -> PResult<'a, ForeignItem> {
+ let mutbl = self.eat_keyword(keywords::Mut);
+ let ident = self.parse_ident()?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+ let hi = self.span;
+ self.expect(&token::Semi)?;
+ Ok(ForeignItem {
+ ident,
+ attrs,
+ node: ForeignItemKind::Static(ty, mutbl),
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(hi),
+ vis,
+ })
+ }
+
+ /// Parses a type from a foreign module.
+ fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
+ -> PResult<'a, ForeignItem> {
+ self.expect_keyword(keywords::Type)?;
+
+ let ident = self.parse_ident()?;
+ let hi = self.span;
+ self.expect(&token::Semi)?;
+ Ok(ast::ForeignItem {
+ ident: ident,
+ attrs: attrs,
+ node: ForeignItemKind::Ty,
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(hi),
+ vis: vis
+ })
+ }
+
+ fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, ast::Ident> {
+ let error_msg = "crate name using dashes are not valid in `extern crate` statements";
+ let suggestion_msg = "if the original crate name uses dashes you need to use underscores \
+ in the code";
+ let mut ident = if self.token.is_keyword(keywords::SelfLower) {
+ self.parse_path_segment_ident()
+ } else {
+ self.parse_ident()
+ }?;
+ let mut idents = vec![];
+ let mut replacement = vec![];
+ let mut fixed_crate_name = false;
+ // Accept `extern crate name-like-this` for better diagnostics
+ let dash = token::Token::BinOp(token::BinOpToken::Minus);
+ if self.token == dash { // Do not include `-` as part of the expected tokens list
+ while self.eat(&dash) {
+ fixed_crate_name = true;
+ replacement.push((self.prev_span, "_".to_string()));
+ idents.push(self.parse_ident()?);
+ }
+ }
+ if fixed_crate_name {
+ let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
+ let mut fixed_name = format!("{}", ident.name);
+ for part in idents {
+ fixed_name.push_str(&format!("_{}", part.name));
+ }
+ ident = Ident::from_str(&fixed_name).with_span_pos(fixed_name_sp);
+
+ let mut err = self.struct_span_err(fixed_name_sp, error_msg);
+ err.span_label(fixed_name_sp, "dash-separated idents are not valid");
+ err.multipart_suggestion(
+ suggestion_msg,
+ replacement,
+ Applicability::MachineApplicable,
+ );
+ err.emit();
+ }
+ Ok(ident)
+ }
+
+ /// Parses `extern crate` links.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// extern crate foo;
+ /// extern crate bar as foo;
+ /// ```
+ fn parse_item_extern_crate(&mut self,
+ lo: Span,
+ visibility: Visibility,
+ attrs: Vec<Attribute>)
+ -> PResult<'a, P<Item>> {
+ // Accept `extern crate name-like-this` for better diagnostics
+ let orig_name = self.parse_crate_name_with_dashes()?;
+ let (item_name, orig_name) = if let Some(rename) = self.parse_rename()? {
+ (rename, Some(orig_name.name))
+ } else {
+ (orig_name, None)
+ };
+ self.expect(&token::Semi)?;
+
+ let span = lo.to(self.prev_span);
+ Ok(self.mk_item(span, item_name, ItemKind::ExternCrate(orig_name), visibility, attrs))
+ }
+
+ /// Parses `extern` for foreign ABIs modules.
+ ///
+ /// `extern` is expected to have been
+ /// consumed before calling this method.
+ ///
+ /// # Examples
+ ///
+ /// ```ignore (only-for-syntax-highlight)
+ /// extern "C" {}
+ /// extern {}
+ /// ```
+ fn parse_item_foreign_mod(&mut self,
+ lo: Span,
+ opt_abi: Option<Abi>,
+ visibility: Visibility,
+ mut attrs: Vec<Attribute>)
+ -> PResult<'a, P<Item>> {
+ self.expect(&token::OpenDelim(token::Brace))?;
+
+ let abi = opt_abi.unwrap_or(Abi::C);
+
+ attrs.extend(self.parse_inner_attributes()?);
+
+ let mut foreign_items = vec![];
+ while !self.eat(&token::CloseDelim(token::Brace)) {
+ foreign_items.push(self.parse_foreign_item()?);
+ }
+
+ let prev_span = self.prev_span;
+ let m = ast::ForeignMod {
+ abi,
+ items: foreign_items
+ };
+ let invalid = keywords::Invalid.ident();
+ Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
+ }
+
+ /// Parses `type Foo = Bar;`
+ /// or
+ /// `existential type Foo: Bar;`
+ /// or
+ /// `return `None``
+ /// without modifying the parser state.
+ fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
+ // This parses the grammar:
+ // Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
+ if self.check_keyword(keywords::Type) ||
+ self.check_keyword(keywords::Existential) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Type)) {
+ let existential = self.eat_keyword(keywords::Existential);
+ assert!(self.eat_keyword(keywords::Type));
+ Some(self.parse_existential_or_alias(existential))
+ } else {
+ None
+ }
+ }
+
+ /// Parses a type alias or existential type.
+ fn parse_existential_or_alias(
+ &mut self,
+ existential: bool,
+ ) -> PResult<'a, (Ident, AliasKind, ast::Generics)> {
+ let ident = self.parse_ident()?;
+ let mut tps = self.parse_generics()?;
+ tps.where_clause = self.parse_where_clause()?;
+ let alias = if existential {
+ self.expect(&token::Colon)?;
+ let bounds = self.parse_generic_bounds(None)?;
+ AliasKind::Existential(bounds)
+ } else {
+ self.expect(&token::Eq)?;
+ let ty = self.parse_ty()?;
+ AliasKind::Weak(ty)
+ };
+ self.expect(&token::Semi)?;
+ Ok((ident, alias, tps))
+ }
+
+ /// Parses the part of an enum declaration following the `{`.
+ fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef> {
+ let mut variants = Vec::new();
+ let mut all_nullary = true;
+ let mut any_disr = vec![];
+ while self.token != token::CloseDelim(token::Brace) {
+ let variant_attrs = self.parse_outer_attributes()?;
+ let vlo = self.span;
+
+ let struct_def;
+ let mut disr_expr = None;
+ let ident = self.parse_ident()?;
+ if self.check(&token::OpenDelim(token::Brace)) {
+ // Parse a struct variant.
+ all_nullary = false;
+ struct_def = VariantData::Struct(self.parse_record_struct_body()?,
+ ast::DUMMY_NODE_ID);
+ } else if self.check(&token::OpenDelim(token::Paren)) {
+ all_nullary = false;
+ struct_def = VariantData::Tuple(self.parse_tuple_struct_body()?,
+ ast::DUMMY_NODE_ID);
+ } else if self.eat(&token::Eq) {
+ disr_expr = Some(AnonConst {
+ id: ast::DUMMY_NODE_ID,
+ value: self.parse_expr()?,
+ });
+ if let Some(sp) = disr_expr.as_ref().map(|c| c.value.span) {
+ any_disr.push(sp);
+ }
+ struct_def = VariantData::Unit(ast::DUMMY_NODE_ID);
+ } else {
+ struct_def = VariantData::Unit(ast::DUMMY_NODE_ID);
+ }
+
+ let vr = ast::Variant_ {
+ ident,
+ attrs: variant_attrs,
+ data: struct_def,
+ disr_expr,
+ };
+ variants.push(respan(vlo.to(self.prev_span), vr));
+
+ if !self.eat(&token::Comma) { break; }
+ }
+ self.expect(&token::CloseDelim(token::Brace))?;
+ if !any_disr.is_empty() && !all_nullary {
+ let mut err =self.struct_span_err(
+ any_disr.clone(),
+ "discriminator values can only be used with a field-less enum",
+ );
+ for sp in any_disr {
+ err.span_label(sp, "only valid in field-less enums");
+ }
+ err.emit();
+ }
+
+ Ok(ast::EnumDef { variants })
+ }
+
+ /// Parses an enum declaration.
+ fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
+ let id = self.parse_ident()?;
+ let mut generics = self.parse_generics()?;
+ generics.where_clause = self.parse_where_clause()?;
+ self.expect(&token::OpenDelim(token::Brace))?;
+
+ let enum_definition = self.parse_enum_def(&generics).map_err(|e| {
+ self.recover_stmt();
+ self.eat(&token::CloseDelim(token::Brace));
+ e
+ })?;
+ Ok((id, ItemKind::Enum(enum_definition, generics), None))
+ }
+
+ /// Parses a string as an ABI spec on an extern type or module. Consumes
+ /// the `extern` keyword, if one is found.
+ fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
+ match self.token {
+ token::Literal(token::Str_(s), suf) | token::Literal(token::StrRaw(s, _), suf) => {
+ let sp = self.span;
+ self.expect_no_suffix(sp, "ABI spec", suf);
+ self.bump();
+ match abi::lookup(&s.as_str()) {
+ Some(abi) => Ok(Some(abi)),
+ None => {
+ let prev_span = self.prev_span;
+ let mut err = struct_span_err!(
+ self.sess.span_diagnostic,
+ prev_span,
+ E0703,
+ "invalid ABI: found `{}`",
+ s);
+ err.span_label(prev_span, "invalid ABI");
+ err.help(&format!("valid ABIs: {}", abi::all_names().join(", ")));
+ err.emit();
+ Ok(None)
+ }
+ }
+ }
+
+ _ => Ok(None),
+ }
+ }
+
+ fn is_static_global(&mut self) -> bool {
+ if self.check_keyword(keywords::Static) {
+ // Check if this could be a closure
+ !self.look_ahead(1, |token| {
+ if token.is_keyword(keywords::Move) {
+ return true;
+ }
+ match *token {
+ token::BinOp(token::Or) | token::OrOr => true,
+ _ => false,
+ }
+ })
+ } else {
+ false
+ }
+ }
+
+ fn parse_item_(
+ &mut self,
+ attrs: Vec<Attribute>,
+ macros_allowed: bool,
+ attributes_allowed: bool,
+ ) -> PResult<'a, Option<P<Item>>> {
+ let (ret, tokens) = self.collect_tokens(|this| {
+ this.parse_item_implementation(attrs, macros_allowed, attributes_allowed)
+ })?;
+
+ // Once we've parsed an item and recorded the tokens we got while
+ // parsing we may want to store `tokens` into the item we're about to
+ // return. Note, though, that we specifically didn't capture tokens
+ // related to outer attributes. The `tokens` field here may later be
+ // used with procedural macros to convert this item back into a token
+ // stream, but during expansion we may be removing attributes as we go
+ // along.
+ //
+ // If we've got inner attributes then the `tokens` we've got above holds
+ // these inner attributes. If an inner attribute is expanded we won't
+ // actually remove it from the token stream, so we'll just keep yielding
+ // it (bad!). To work around this case for now we just avoid recording
+ // `tokens` if we detect any inner attributes. This should help keep
+ // expansion correct, but we should fix this bug one day!
+ Ok(ret.map(|item| {
+ item.map(|mut i| {
+ if !i.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
+ i.tokens = Some(tokens);
+ }
+ i
+ })
+ }))
+ }
+
+ /// Parses one of the items allowed by the flags.
+ fn parse_item_implementation(
+ &mut self,
+ attrs: Vec<Attribute>,
+ macros_allowed: bool,
+ attributes_allowed: bool,
+ ) -> PResult<'a, Option<P<Item>>> {
+ maybe_whole!(self, NtItem, |item| {
+ let mut item = item.into_inner();
+ let mut attrs = attrs;
+ mem::swap(&mut item.attrs, &mut attrs);
+ item.attrs.extend(attrs);
+ Some(P(item))
+ });
+
+ let lo = self.span;
+
+ let visibility = self.parse_visibility(false)?;
+
+ if self.eat_keyword(keywords::Use) {
+ // USE ITEM
+ let item_ = ItemKind::Use(P(self.parse_use_tree()?));
+ self.expect(&token::Semi)?;
+
+ let span = lo.to(self.prev_span);
+ let item = self.mk_item(span, keywords::Invalid.ident(), item_, visibility, attrs);
+ return Ok(Some(item));
+ }
+
+ if self.eat_keyword(keywords::Extern) {
+ if self.eat_keyword(keywords::Crate) {
+ return Ok(Some(self.parse_item_extern_crate(lo, visibility, attrs)?));
+ }
+
+ let opt_abi = self.parse_opt_abi()?;
+
+ if self.eat_keyword(keywords::Fn) {
+ // EXTERN FUNCTION ITEM
+ let fn_span = self.prev_span;
+ let abi = opt_abi.unwrap_or(Abi::C);
+ let (ident, item_, extra_attrs) =
+ self.parse_item_fn(Unsafety::Normal,
+ IsAsync::NotAsync,
+ respan(fn_span, Constness::NotConst),
+ abi)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ } else if self.check(&token::OpenDelim(token::Brace)) {
+ return Ok(Some(self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs)?));
+ }
+
+ self.unexpected()?;
+ }
+
+ if self.is_static_global() {
+ self.bump();
+ // STATIC ITEM
+ let m = if self.eat_keyword(keywords::Mut) {
+ Mutability::Mutable
+ } else {
+ Mutability::Immutable
+ };
+ let (ident, item_, extra_attrs) = self.parse_item_const(Some(m))?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.eat_keyword(keywords::Const) {
+ let const_span = self.prev_span;
+ if self.check_keyword(keywords::Fn)
+ || (self.check_keyword(keywords::Unsafe)
+ && self.look_ahead(1, |t| t.is_keyword(keywords::Fn))) {
+ // CONST FUNCTION ITEM
+ let unsafety = self.parse_unsafety();
+ self.bump();
+ let (ident, item_, extra_attrs) =
+ self.parse_item_fn(unsafety,
+ IsAsync::NotAsync,
+ respan(const_span, Constness::Const),
+ Abi::Rust)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+
+ // CONST ITEM
+ if self.eat_keyword(keywords::Mut) {
+ let prev_span = self.prev_span;
+ let mut err = self.diagnostic()
+ .struct_span_err(prev_span, "const globals cannot be mutable");
+ err.span_label(prev_span, "cannot be mutable");
+ err.span_suggestion(
+ const_span,
+ "you might want to declare a static instead",
+ "static".to_owned(),
+ Applicability::MaybeIncorrect,
+ );
+ err.emit();
+ }
+ let (ident, item_, extra_attrs) = self.parse_item_const(None)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+
+ // `unsafe async fn` or `async fn`
+ if (
+ self.check_keyword(keywords::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Async))
+ ) || (
+ self.check_keyword(keywords::Async) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
+ )
+ {
+ // ASYNC FUNCTION ITEM
+ let unsafety = self.parse_unsafety();
+ self.expect_keyword(keywords::Async)?;
+ self.expect_keyword(keywords::Fn)?;
+ let fn_span = self.prev_span;
+ let (ident, item_, extra_attrs) =
+ self.parse_item_fn(unsafety,
+ IsAsync::Async {
+ closure_id: ast::DUMMY_NODE_ID,
+ return_impl_trait_id: ast::DUMMY_NODE_ID,
+ },
+ respan(fn_span, Constness::NotConst),
+ Abi::Rust)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.check_keyword(keywords::Unsafe) &&
+ (self.look_ahead(1, |t| t.is_keyword(keywords::Trait)) ||
+ self.look_ahead(1, |t| t.is_keyword(keywords::Auto)))
+ {
+ // UNSAFE TRAIT ITEM
+ self.bump(); // `unsafe`
+ let is_auto = if self.eat_keyword(keywords::Trait) {
+ IsAuto::No
+ } else {
+ self.expect_keyword(keywords::Auto)?;
+ self.expect_keyword(keywords::Trait)?;
+ IsAuto::Yes
+ };
+ let (ident, item_, extra_attrs) =
+ self.parse_item_trait(is_auto, Unsafety::Unsafe)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.check_keyword(keywords::Impl) ||
+ self.check_keyword(keywords::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) ||
+ self.check_keyword(keywords::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) ||
+ self.check_keyword(keywords::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe)) {
+ // IMPL ITEM
+ let defaultness = self.parse_defaultness();
+ let unsafety = self.parse_unsafety();
+ self.expect_keyword(keywords::Impl)?;
+ let (ident, item, extra_attrs) = self.parse_item_impl(unsafety, defaultness)?;
+ let span = lo.to(self.prev_span);
+ return Ok(Some(self.mk_item(span, ident, item, visibility,
+ maybe_append(attrs, extra_attrs))));
+ }
+ if self.check_keyword(keywords::Fn) {
+ // FUNCTION ITEM
+ self.bump();
+ let fn_span = self.prev_span;
+ let (ident, item_, extra_attrs) =
+ self.parse_item_fn(Unsafety::Normal,
+ IsAsync::NotAsync,
+ respan(fn_span, Constness::NotConst),
+ Abi::Rust)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.check_keyword(keywords::Unsafe)
+ && self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
+ // UNSAFE FUNCTION ITEM
+ self.bump(); // `unsafe`
+ // `{` is also expected after `unsafe`, in case of error, include it in the diagnostic
+ self.check(&token::OpenDelim(token::Brace));
+ let abi = if self.eat_keyword(keywords::Extern) {
+ self.parse_opt_abi()?.unwrap_or(Abi::C)
+ } else {
+ Abi::Rust
+ };
+ self.expect_keyword(keywords::Fn)?;
+ let fn_span = self.prev_span;
+ let (ident, item_, extra_attrs) =
+ self.parse_item_fn(Unsafety::Unsafe,
+ IsAsync::NotAsync,
+ respan(fn_span, Constness::NotConst),
+ abi)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.eat_keyword(keywords::Mod) {
+ // MODULE ITEM
+ let (ident, item_, extra_attrs) =
+ self.parse_item_mod(&attrs[..])?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if let Some(type_) = self.eat_type() {
+ let (ident, alias, generics) = type_?;
+ // TYPE ITEM
+ let item_ = match alias {
+ AliasKind::Weak(ty) => ItemKind::Ty(ty, generics),
+ AliasKind::Existential(bounds) => ItemKind::Existential(bounds, generics),
+ };
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ attrs);
+ return Ok(Some(item));
+ }
+ if self.eat_keyword(keywords::Enum) {
+ // ENUM ITEM
+ let (ident, item_, extra_attrs) = self.parse_item_enum()?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.check_keyword(keywords::Trait)
+ || (self.check_keyword(keywords::Auto)
+ && self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
+ {
+ let is_auto = if self.eat_keyword(keywords::Trait) {
+ IsAuto::No
+ } else {
+ self.expect_keyword(keywords::Auto)?;
+ self.expect_keyword(keywords::Trait)?;
+ IsAuto::Yes
+ };
+ // TRAIT ITEM
+ let (ident, item_, extra_attrs) =
+ self.parse_item_trait(is_auto, Unsafety::Normal)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.eat_keyword(keywords::Struct) {
+ // STRUCT ITEM
+ let (ident, item_, extra_attrs) = self.parse_item_struct()?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.is_union_item() {
+ // UNION ITEM
+ self.bump();
+ let (ident, item_, extra_attrs) = self.parse_item_union()?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if let Some(macro_def) = self.eat_macro_def(&attrs, &visibility, lo)? {
+ return Ok(Some(macro_def));
+ }
+
+ // Verify whether we have encountered a struct or method definition where the user forgot to
+ // add the `struct` or `fn` keyword after writing `pub`: `pub S {}`
+ if visibility.node.is_pub() &&
+ self.check_ident() &&
+ self.look_ahead(1, |t| *t != token::Not)
+ {
+ // Space between `pub` keyword and the identifier
+ //
+ // pub S {}
+ // ^^^ `sp` points here
+ let sp = self.prev_span.between(self.span);
+ let full_sp = self.prev_span.to(self.span);
+ let ident_sp = self.span;
+ if self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) {
+ // possible public struct definition where `struct` was forgotten
+ let ident = self.parse_ident().unwrap();
+ let msg = format!("add `struct` here to parse `{}` as a public struct",
+ ident);
+ let mut err = self.diagnostic()
+ .struct_span_err(sp, "missing `struct` for struct definition");
+ err.span_suggestion_short(
+ sp, &msg, " struct ".into(), Applicability::MaybeIncorrect // speculative
+ );
+ return Err(err);
+ } else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) {
+ let ident = self.parse_ident().unwrap();
+ self.bump(); // `(`
+ let kw_name = if let Ok(Some(_)) = self.parse_self_arg() {
+ "method"
+ } else {
+ "function"
+ };
+ self.consume_block(token::Paren);
+ let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) {
+ self.eat_to_tokens(&[&token::OpenDelim(token::Brace)]);
+ self.bump(); // `{`
+ ("fn", kw_name, false)
+ } else if self.check(&token::OpenDelim(token::Brace)) {
+ self.bump(); // `{`
+ ("fn", kw_name, false)
+ } else if self.check(&token::Colon) {
+ let kw = "struct";
+ (kw, kw, false)
+ } else {
+ ("fn` or `struct", "function or struct", true)
+ };
+ self.consume_block(token::Brace);
+
+ let msg = format!("missing `{}` for {} definition", kw, kw_name);
+ let mut err = self.diagnostic().struct_span_err(sp, &msg);
+ if !ambiguous {
+ let suggestion = format!("add `{}` here to parse `{}` as a public {}",
+ kw,
+ ident,
+ kw_name);
+ err.span_suggestion_short(
+ sp, &suggestion, format!(" {} ", kw), Applicability::MachineApplicable
+ );
+ } else {
+ if let Ok(snippet) = self.sess.source_map().span_to_snippet(ident_sp) {
+ err.span_suggestion(
+ full_sp,
+ "if you meant to call a macro, try",
+ format!("{}!", snippet),
+ // this is the `ambiguous` conditional branch
+ Applicability::MaybeIncorrect
+ );
+ } else {
+ err.help("if you meant to call a macro, remove the `pub` \
+ and add a trailing `!` after the identifier");
+ }
+ }
+ return Err(err);
+ } else if self.look_ahead(1, |t| *t == token::Lt) {
+ let ident = self.parse_ident().unwrap();
+ self.eat_to_tokens(&[&token::Gt]);
+ self.bump(); // `>`
+ let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) {
+ if let Ok(Some(_)) = self.parse_self_arg() {
+ ("fn", "method", false)
+ } else {
+ ("fn", "function", false)
+ }
+ } else if self.check(&token::OpenDelim(token::Brace)) {
+ ("struct", "struct", false)
+ } else {
+ ("fn` or `struct", "function or struct", true)
+ };
+ let msg = format!("missing `{}` for {} definition", kw, kw_name);
+ let mut err = self.diagnostic().struct_span_err(sp, &msg);
+ if !ambiguous {
+ err.span_suggestion_short(
+ sp,
+ &format!("add `{}` here to parse `{}` as a public {}", kw, ident, kw_name),
+ format!(" {} ", kw),
+ Applicability::MachineApplicable,
+ );
+ }
+ return Err(err);
+ }
+ }
+ self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility)
+ }
+
+ /// Parses a foreign item.
+ crate fn parse_foreign_item(&mut self) -> PResult<'a, ForeignItem> {
+ maybe_whole!(self, NtForeignItem, |ni| ni);
+
+ let attrs = self.parse_outer_attributes()?;
+ let lo = self.span;
+ let visibility = self.parse_visibility(false)?;
+
+ // FOREIGN STATIC ITEM
+ // Treat `const` as `static` for error recovery, but don't add it to expected tokens.
+ if self.check_keyword(keywords::Static) || self.token.is_keyword(keywords::Const) {
+ if self.token.is_keyword(keywords::Const) {
+ self.diagnostic()
+ .struct_span_err(self.span, "extern items cannot be `const`")
+ .span_suggestion(
+ self.span,
+ "try using a static value",
+ "static".to_owned(),
+ Applicability::MachineApplicable
+ ).emit();
+ }
+ self.bump(); // `static` or `const`
+ return Ok(self.parse_item_foreign_static(visibility, lo, attrs)?);
+ }
+ // FOREIGN FUNCTION ITEM
+ if self.check_keyword(keywords::Fn) {
+ return Ok(self.parse_item_foreign_fn(visibility, lo, attrs)?);
+ }
+ // FOREIGN TYPE ITEM
+ if self.check_keyword(keywords::Type) {
+ return Ok(self.parse_item_foreign_type(visibility, lo, attrs)?);
+ }
+
+ match self.parse_assoc_macro_invoc("extern", Some(&visibility), &mut false)? {
+ Some(mac) => {
+ Ok(
+ ForeignItem {
+ ident: keywords::Invalid.ident(),
+ span: lo.to(self.prev_span),
+ id: ast::DUMMY_NODE_ID,
+ attrs,
+ vis: visibility,
+ node: ForeignItemKind::Macro(mac),
+ }
+ )
+ }
+ None => {
+ if !attrs.is_empty() {
+ self.expected_item_err(&attrs)?;
+ }
+
+ self.unexpected()
+ }
+ }
+ }
+
+ /// This is the fall-through for parsing items.
+ fn parse_macro_use_or_failure(
+ &mut self,
+ attrs: Vec<Attribute> ,
+ macros_allowed: bool,
+ attributes_allowed: bool,
+ lo: Span,
+ visibility: Visibility
+ ) -> PResult<'a, Option<P<Item>>> {
+ if macros_allowed && self.token.is_path_start() {
+ // MACRO INVOCATION ITEM
+
+ let prev_span = self.prev_span;
+ self.complain_if_pub_macro(&visibility.node, prev_span);
+
+ let mac_lo = self.span;
+
+ // item macro.
+ let pth = self.parse_path(PathStyle::Mod)?;
+ self.expect(&token::Not)?;
+
+ // a 'special' identifier (like what `macro_rules!` uses)
+ // is optional. We should eventually unify invoc syntax
+ // and remove this.
+ let id = if self.token.is_ident() {
+ self.parse_ident()?
+ } else {
+ keywords::Invalid.ident() // no special identifier
+ };
+ // eat a matched-delimiter token tree:
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ if delim != MacDelimiter::Brace {
+ if !self.eat(&token::Semi) {
+ self.span_err(self.prev_span,
+ "macros that expand to items must either \
+ be surrounded with braces or followed by \
+ a semicolon");
+ }
+ }
+
+ let hi = self.prev_span;
+ let mac = respan(mac_lo.to(hi), Mac_ { path: pth, tts, delim });
+ let item = self.mk_item(lo.to(hi), id, ItemKind::Mac(mac), visibility, attrs);
+ return Ok(Some(item));
+ }
+
+ // FAILURE TO PARSE ITEM
+ match visibility.node {
+ VisibilityKind::Inherited => {}
+ _ => {
+ return Err(self.span_fatal(self.prev_span, "unmatched visibility `pub`"));
+ }
+ }
+
+ if !attributes_allowed && !attrs.is_empty() {
+ self.expected_item_err(&attrs)?;
+ }
+ Ok(None)
+ }
+
+ /// Parses a macro invocation inside a `trait`, `impl` or `extern` block.
+ fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>,
+ at_end: &mut bool) -> PResult<'a, Option<Mac>>
+ {
+ if self.token.is_path_start() {
+ let prev_span = self.prev_span;
+ let lo = self.span;
+ let pth = self.parse_path(PathStyle::Mod)?;
+
+ if pth.segments.len() == 1 {
+ if !self.eat(&token::Not) {
+ return Err(self.missing_assoc_item_kind_err(item_kind, prev_span));
+ }
+ } else {
+ self.expect(&token::Not)?;
+ }
+
+ if let Some(vis) = vis {
+ self.complain_if_pub_macro(&vis.node, prev_span);
+ }
+
+ *at_end = true;
+
+ // eat a matched-delimiter token tree:
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ if delim != MacDelimiter::Brace {
+ self.expect(&token::Semi)?;
+ }
+
+ Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim })))
+ } else {
+ Ok(None)
+ }
+ }
+
+ fn collect_tokens<F, R>(&mut self, f: F) -> PResult<'a, (R, TokenStream)>
+ where F: FnOnce(&mut Self) -> PResult<'a, R>
+ {
+ // Record all tokens we parse when parsing this item.
+ let mut tokens = Vec::new();
+ let prev_collecting = match self.token_cursor.frame.last_token {
+ LastToken::Collecting(ref mut list) => {
+ Some(mem::replace(list, Vec::new()))
+ }
+ LastToken::Was(ref mut last) => {
+ tokens.extend(last.take());
+ None
+ }
+ };
+ self.token_cursor.frame.last_token = LastToken::Collecting(tokens);
+ let prev = self.token_cursor.stack.len();
+ let ret = f(self);
+ let last_token = if self.token_cursor.stack.len() == prev {
+ &mut self.token_cursor.frame.last_token
+ } else {
+ &mut self.token_cursor.stack[prev].last_token
+ };
+
+ // Pull out the tokens that we've collected from the call to `f` above.
+ let mut collected_tokens = match *last_token {
+ LastToken::Collecting(ref mut v) => mem::replace(v, Vec::new()),
+ LastToken::Was(_) => panic!("our vector went away?"),
+ };
+
+ // If we're not at EOF our current token wasn't actually consumed by
+ // `f`, but it'll still be in our list that we pulled out. In that case
+ // put it back.
+ let extra_token = if self.token != token::Eof {
+ collected_tokens.pop()
+ } else {
+ None
+ };
+
+ // If we were previously collecting tokens, then this was a recursive
+ // call. In that case we need to record all the tokens we collected in
+ // our parent list as well. To do that we push a clone of our stream
+ // onto the previous list.
+ match prev_collecting {
+ Some(mut list) => {
+ list.extend(collected_tokens.iter().cloned());
+ list.extend(extra_token);
+ *last_token = LastToken::Collecting(list);
+ }
+ None => {
+ *last_token = LastToken::Was(extra_token);
+ }
+ }
+
+ Ok((ret?, TokenStream::new(collected_tokens)))
+ }
+
+ pub fn parse_item(&mut self) -> PResult<'a, Option<P<Item>>> {
+ let attrs = self.parse_outer_attributes()?;
+ self.parse_item_(attrs, true, false)
+ }
+
+ /// `::{` or `::*`
+ fn is_import_coupler(&mut self) -> bool {
+ self.check(&token::ModSep) &&
+ self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace) ||
+ *t == token::BinOp(token::Star))
+ }
+
+ /// Parses a `UseTree`.
+ ///
+ /// ```
+ /// USE_TREE = [`::`] `*` |
+ /// [`::`] `{` USE_TREE_LIST `}` |
+ /// PATH `::` `*` |
+ /// PATH `::` `{` USE_TREE_LIST `}` |
+ /// PATH [`as` IDENT]
+ /// ```
+ fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
+ let lo = self.span;
+
+ let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo() };
+ let kind = if self.check(&token::OpenDelim(token::Brace)) ||
+ self.check(&token::BinOp(token::Star)) ||
+ self.is_import_coupler() {
+ // `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
+ let mod_sep_ctxt = self.span.ctxt();
+ if self.eat(&token::ModSep) {
+ prefix.segments.push(
+ PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))
+ );
+ }
+
+ if self.eat(&token::BinOp(token::Star)) {
+ UseTreeKind::Glob
+ } else {
+ UseTreeKind::Nested(self.parse_use_tree_list()?)
+ }
+ } else {
+ // `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
+ prefix = self.parse_path(PathStyle::Mod)?;
+
+ if self.eat(&token::ModSep) {
+ if self.eat(&token::BinOp(token::Star)) {
+ UseTreeKind::Glob
+ } else {
+ UseTreeKind::Nested(self.parse_use_tree_list()?)
+ }
+ } else {
+ UseTreeKind::Simple(self.parse_rename()?, ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID)
+ }
+ };
+
+ Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) })
+ }
+
+ /// Parses a `UseTreeKind::Nested(list)`.
+ ///
+ /// ```
+ /// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
+ /// ```
+ fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
+ self.parse_unspanned_seq(&token::OpenDelim(token::Brace),
+ &token::CloseDelim(token::Brace),
+ SeqSep::trailing_allowed(token::Comma), |this| {
+ Ok((this.parse_use_tree()?, ast::DUMMY_NODE_ID))
+ })
+ }
+
+ fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
+ if self.eat_keyword(keywords::As) {
+ self.parse_ident_or_underscore().map(Some)
+ } else {
+ Ok(None)
+ }
+ }
+
+ /// Parses a source module as a crate. This is the main entry point for the parser.
+ pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
+ let lo = self.span;
+ let krate = Ok(ast::Crate {
+ attrs: self.parse_inner_attributes()?,
+ module: self.parse_mod_items(&token::Eof, lo)?,
+ span: lo.to(self.span),
+ });
+ emit_unclosed_delims(&self.unclosed_delims, self.diagnostic());
+ self.unclosed_delims.clear();
+ krate
+ }
+
+ pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
+ let ret = match self.token {
+ token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf),
+ token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf),
+ _ => return None
+ };
+ self.bump();
+ Some(ret)
+ }
+
+ pub fn parse_str(&mut self) -> PResult<'a, (Symbol, StrStyle)> {
+ match self.parse_optional_str() {
+ Some((s, style, suf)) => {
+ let sp = self.prev_span;
+ self.expect_no_suffix(sp, "string literal", suf);
+ Ok((s, style))
+ }
+ _ => {
+ let msg = "expected string literal";
+ let mut err = self.fatal(msg);
+ err.span_label(self.span, msg);
+ Err(err)
+ }
+ }
+ }
+}
+
+pub fn emit_unclosed_delims(unclosed_delims: &[UnmatchedBrace], handler: &errors::Handler) {
+ for unmatched in unclosed_delims {
+ let mut err = handler.struct_span_err(unmatched.found_span, &format!(
+ "incorrect close delimiter: `{}`",
+ pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
+ ));
+ err.span_label(unmatched.found_span, "incorrect close delimiter");
+ if let Some(sp) = unmatched.candidate_span {
+ err.span_label(sp, "close delimiter possibly meant for this");
+ }
+ if let Some(sp) = unmatched.unclosed_span {
+ err.span_label(sp, "un-closed delimiter");
+ }
+ err.emit();
+ }
+}
diff --git a/src/tools/rust-analyzer/bench_data/numerous_macro_rules b/src/tools/rust-analyzer/bench_data/numerous_macro_rules
new file mode 100644
index 000000000..bf89ed594
--- /dev/null
+++ b/src/tools/rust-analyzer/bench_data/numerous_macro_rules
@@ -0,0 +1,560 @@
+macro_rules! __ra_macro_fixture0 {($T : ident )=>( int_module ! ($T , # [ stable ( feature = "rust1" , since = "1.0.0" )]);); ($T : ident , # [$attr : meta ])=>( doc_comment ! { concat ! ( "The smallest value that can be represented by this integer type.\nUse [`" , stringify ! ($T ), "::MIN" , "`](../../std/primitive." , stringify ! ($T ), ".html#associatedconstant.MIN) instead.\n\n# Examples\n\n```rust\n// deprecated way\nlet min = std::" , stringify ! ($T ), "::MIN;\n\n// intended way\nlet min = " , stringify ! ($T ), "::MIN;\n```\n" ), # [$attr ] pub const MIN : $T = $T :: MIN ; } doc_comment ! { concat ! ( "The largest value that can be represented by this integer type.\nUse [`" , stringify ! ($T ), "::MAX" , "`](../../std/primitive." , stringify ! ($T ), ".html#associatedconstant.MAX) instead.\n\n# Examples\n\n```rust\n// deprecated way\nlet max = std::" , stringify ! ($T ), "::MAX;\n\n// intended way\nlet max = " , stringify ! ($T ), "::MAX;\n```\n" ), # [$attr ] pub const MAX : $T = $T :: MAX ; })}
+macro_rules! __ra_macro_fixture1 {($($ty : ty : add ($addfn : path ), mul / div ($bigty : ident );)*)=>($(impl FullOps for $ty { fn full_add ( self , other : $ty , carry : bool )-> ( bool , $ty ){ let ( v , carry1 )= intrinsics :: add_with_overflow ( self , other ); let ( v , carry2 )= intrinsics :: add_with_overflow ( v , if carry { 1 } else { 0 }); ( carry1 || carry2 , v )} fn full_mul ( self , other : $ty , carry : $ty )-> ($ty , $ty ){ let v = ( self as $bigty )* ( other as $bigty )+ ( carry as $bigty ); (( v >> <$ty >:: BITS ) as $ty , v as $ty )} fn full_mul_add ( self , other : $ty , other2 : $ty , carry : $ty )-> ($ty , $ty ){ let v = ( self as $bigty )* ( other as $bigty )+ ( other2 as $bigty )+ ( carry as $bigty ); (( v >> <$ty >:: BITS ) as $ty , v as $ty )} fn full_div_rem ( self , other : $ty , borrow : $ty )-> ($ty , $ty ){ debug_assert ! ( borrow < other ); let lhs = (( borrow as $bigty )<< <$ty >:: BITS )| ( self as $bigty ); let rhs = other as $bigty ; (( lhs / rhs ) as $ty , ( lhs % rhs ) as $ty )}})* )}
+macro_rules! __ra_macro_fixture2 {($name : ident : type =$ty : ty , n =$n : expr )=>{# [ doc = " Stack-allocated arbitrary-precision (up to certain limit) integer." ]# [ doc = "" ]# [ doc = " This is backed by a fixed-size array of given type (\\\"digit\\\")." ]# [ doc = " While the array is not very large (normally some hundred bytes)," ]# [ doc = " copying it recklessly may result in the performance hit." ]# [ doc = " Thus this is intentionally not `Copy`." ]# [ doc = "" ]# [ doc = " All operations available to bignums panic in the case of overflows." ]# [ doc = " The caller is responsible to use large enough bignum types." ] pub struct $name {# [ doc = " One plus the offset to the maximum \\\"digit\\\" in use." ]# [ doc = " This does not decrease, so be aware of the computation order." ]# [ doc = " `base[size..]` should be zero." ] size : usize , # [ doc = " Digits. `[a, b, c, ...]` represents `a + b*2^W + c*2^(2W) + ...`" ]# [ doc = " where `W` is the number of bits in the digit type." ] base : [$ty ; $n ], } impl $name {# [ doc = " Makes a bignum from one digit." ] pub fn from_small ( v : $ty )-> $name { let mut base = [ 0 ; $n ]; base [ 0 ]= v ; $name { size : 1 , base : base }}# [ doc = " Makes a bignum from `u64` value." ] pub fn from_u64 ( mut v : u64 )-> $name { let mut base = [ 0 ; $n ]; let mut sz = 0 ; while v > 0 { base [ sz ]= v as $ty ; v >>= <$ty >:: BITS ; sz += 1 ; }$name { size : sz , base : base }}# [ doc = " Returns the internal digits as a slice `[a, b, c, ...]` such that the numeric" ]# [ doc = " value is `a + b * 2^W + c * 2^(2W) + ...` where `W` is the number of bits in" ]# [ doc = " the digit type." ] pub fn digits (& self )-> & [$ty ]{& self . base [.. self . size ]}# [ doc = " Returns the `i`-th bit where bit 0 is the least significant one." ]# [ doc = " In other words, the bit with weight `2^i`." ] pub fn get_bit (& self , i : usize )-> u8 { let digitbits = <$ty >:: BITS as usize ; let d = i / digitbits ; let b = i % digitbits ; (( self . base [ d ]>> b )& 1 ) as u8 }# [ doc = " Returns `true` if the bignum is zero." ] pub fn is_zero (& self )-> bool { self . digits (). iter (). all (|& v | v == 0 )}# [ doc = " Returns the number of bits necessary to represent this value. Note that zero" ]# [ doc = " is considered to need 0 bits." ] pub fn bit_length (& self )-> usize { let digits = self . digits (); let zeros = digits . iter (). rev (). take_while (|&& x | x == 0 ). count (); let end = digits . len ()- zeros ; let nonzero = & digits [.. end ]; if nonzero . is_empty (){ return 0 ; } let digitbits = <$ty >:: BITS as usize ; let mut i = nonzero . len ()* digitbits - 1 ; while self . get_bit ( i )== 0 { i -= 1 ; } i + 1 }# [ doc = " Adds `other` to itself and returns its own mutable reference." ] pub fn add < 'a > (& 'a mut self , other : &$name )-> & 'a mut $name { use crate :: cmp ; use crate :: num :: bignum :: FullOps ; let mut sz = cmp :: max ( self . size , other . size ); let mut carry = false ; for ( a , b ) in self . base [.. sz ]. iter_mut (). zip (& other . base [.. sz ]){ let ( c , v )= (* a ). full_add (* b , carry ); * a = v ; carry = c ; } if carry { self . base [ sz ]= 1 ; sz += 1 ; } self . size = sz ; self } pub fn add_small (& mut self , other : $ty )-> & mut $name { use crate :: num :: bignum :: FullOps ; let ( mut carry , v )= self . base [ 0 ]. full_add ( other , false ); self . base [ 0 ]= v ; let mut i = 1 ; while carry { let ( c , v )= self . base [ i ]. full_add ( 0 , carry ); self . base [ i ]= v ; carry = c ; i += 1 ; } if i > self . size { self . size = i ; } self }# [ doc = " Subtracts `other` from itself and returns its own mutable reference." ] pub fn sub < 'a > (& 'a mut self , other : &$name )-> & 'a mut $name { use crate :: cmp ; use crate :: num :: bignum :: FullOps ; let sz = cmp :: max ( self . size , other . size ); let mut noborrow = true ; for ( a , b ) in self . base [.. sz ]. iter_mut (). zip (& other . base [.. sz ]){ let ( c , v )= (* a ). full_add (!* b , noborrow ); * a = v ; noborrow = c ; } assert ! ( noborrow ); self . size = sz ; self }# [ doc = " Multiplies itself by a digit-sized `other` and returns its own" ]# [ doc = " mutable reference." ] pub fn mul_small (& mut self , other : $ty )-> & mut $name { use crate :: num :: bignum :: FullOps ; let mut sz = self . size ; let mut carry = 0 ; for a in & mut self . base [.. sz ]{ let ( c , v )= (* a ). full_mul ( other , carry ); * a = v ; carry = c ; } if carry > 0 { self . base [ sz ]= carry ; sz += 1 ; } self . size = sz ; self }# [ doc = " Multiplies itself by `2^bits` and returns its own mutable reference." ] pub fn mul_pow2 (& mut self , bits : usize )-> & mut $name { let digitbits = <$ty >:: BITS as usize ; let digits = bits / digitbits ; let bits = bits % digitbits ; assert ! ( digits < $n ); debug_assert ! ( self . base [$n - digits ..]. iter (). all (|& v | v == 0 )); debug_assert ! ( bits == 0 || ( self . base [$n - digits - 1 ]>> ( digitbits - bits ))== 0 ); for i in ( 0 .. self . size ). rev (){ self . base [ i + digits ]= self . base [ i ]; } for i in 0 .. digits { self . base [ i ]= 0 ; } let mut sz = self . size + digits ; if bits > 0 { let last = sz ; let overflow = self . base [ last - 1 ]>> ( digitbits - bits ); if overflow > 0 { self . base [ last ]= overflow ; sz += 1 ; } for i in ( digits + 1 .. last ). rev (){ self . base [ i ]= ( self . base [ i ]<< bits )| ( self . base [ i - 1 ]>> ( digitbits - bits )); } self . base [ digits ]<<= bits ; } self . size = sz ; self }# [ doc = " Multiplies itself by `5^e` and returns its own mutable reference." ] pub fn mul_pow5 (& mut self , mut e : usize )-> & mut $name { use crate :: mem ; use crate :: num :: bignum :: SMALL_POW5 ; let table_index = mem :: size_of ::<$ty > (). trailing_zeros () as usize ; let ( small_power , small_e )= SMALL_POW5 [ table_index ]; let small_power = small_power as $ty ; while e >= small_e { self . mul_small ( small_power ); e -= small_e ; } let mut rest_power = 1 ; for _ in 0 .. e { rest_power *= 5 ; } self . mul_small ( rest_power ); self }# [ doc = " Multiplies itself by a number described by `other[0] + other[1] * 2^W +" ]# [ doc = " other[2] * 2^(2W) + ...` (where `W` is the number of bits in the digit type)" ]# [ doc = " and returns its own mutable reference." ] pub fn mul_digits < 'a > (& 'a mut self , other : & [$ty ])-> & 'a mut $name { fn mul_inner ( ret : & mut [$ty ; $n ], aa : & [$ty ], bb : & [$ty ])-> usize { use crate :: num :: bignum :: FullOps ; let mut retsz = 0 ; for ( i , & a ) in aa . iter (). enumerate (){ if a == 0 { continue ; } let mut sz = bb . len (); let mut carry = 0 ; for ( j , & b ) in bb . iter (). enumerate (){ let ( c , v )= a . full_mul_add ( b , ret [ i + j ], carry ); ret [ i + j ]= v ; carry = c ; } if carry > 0 { ret [ i + sz ]= carry ; sz += 1 ; } if retsz < i + sz { retsz = i + sz ; }} retsz } let mut ret = [ 0 ; $n ]; let retsz = if self . size < other . len (){ mul_inner (& mut ret , & self . digits (), other )} else { mul_inner (& mut ret , other , & self . digits ())}; self . base = ret ; self . size = retsz ; self }# [ doc = " Divides itself by a digit-sized `other` and returns its own" ]# [ doc = " mutable reference *and* the remainder." ] pub fn div_rem_small (& mut self , other : $ty )-> (& mut $name , $ty ){ use crate :: num :: bignum :: FullOps ; assert ! ( other > 0 ); let sz = self . size ; let mut borrow = 0 ; for a in self . base [.. sz ]. iter_mut (). rev (){ let ( q , r )= (* a ). full_div_rem ( other , borrow ); * a = q ; borrow = r ; }( self , borrow )}# [ doc = " Divide self by another bignum, overwriting `q` with the quotient and `r` with the" ]# [ doc = " remainder." ] pub fn div_rem (& self , d : &$name , q : & mut $name , r : & mut $name ){ assert ! (! d . is_zero ()); let digitbits = <$ty >:: BITS as usize ; for digit in & mut q . base [..]{* digit = 0 ; } for digit in & mut r . base [..]{* digit = 0 ; } r . size = d . size ; q . size = 1 ; let mut q_is_zero = true ; let end = self . bit_length (); for i in ( 0 .. end ). rev (){ r . mul_pow2 ( 1 ); r . base [ 0 ]|= self . get_bit ( i ) as $ty ; if &* r >= d { r . sub ( d ); let digit_idx = i / digitbits ; let bit_idx = i % digitbits ; if q_is_zero { q . size = digit_idx + 1 ; q_is_zero = false ; } q . base [ digit_idx ]|= 1 << bit_idx ; }} debug_assert ! ( q . base [ q . size ..]. iter (). all (|& d | d == 0 )); debug_assert ! ( r . base [ r . size ..]. iter (). all (|& d | d == 0 )); }} impl crate :: cmp :: PartialEq for $name { fn eq (& self , other : &$name )-> bool { self . base [..]== other . base [..]}} impl crate :: cmp :: Eq for $name {} impl crate :: cmp :: PartialOrd for $name { fn partial_cmp (& self , other : &$name )-> crate :: option :: Option < crate :: cmp :: Ordering > { crate :: option :: Option :: Some ( self . cmp ( other ))}} impl crate :: cmp :: Ord for $name { fn cmp (& self , other : &$name )-> crate :: cmp :: Ordering { use crate :: cmp :: max ; let sz = max ( self . size , other . size ); let lhs = self . base [.. sz ]. iter (). cloned (). rev (); let rhs = other . base [.. sz ]. iter (). cloned (). rev (); lhs . cmp ( rhs )}} impl crate :: clone :: Clone for $name { fn clone (& self )-> Self { Self { size : self . size , base : self . base }}} impl crate :: fmt :: Debug for $name { fn fmt (& self , f : & mut crate :: fmt :: Formatter < '_ >)-> crate :: fmt :: Result { let sz = if self . size < 1 { 1 } else { self . size }; let digitlen = <$ty >:: BITS as usize / 4 ; write ! ( f , "{:#x}" , self . base [ sz - 1 ])?; for & v in self . base [.. sz - 1 ]. iter (). rev (){ write ! ( f , "_{:01$x}" , v , digitlen )?; } crate :: result :: Result :: Ok (())}}}; }
+macro_rules! __ra_macro_fixture3 {($t : ty )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl FromStr for $t { type Err = ParseFloatError ; # [ doc = " Converts a string in base 10 to a float." ]# [ doc = " Accepts an optional decimal exponent." ]# [ doc = "" ]# [ doc = " This function accepts strings such as" ]# [ doc = "" ]# [ doc = " * \\\'3.14\\\'" ]# [ doc = " * \\\'-3.14\\\'" ]# [ doc = " * \\\'2.5E10\\\', or equivalently, \\\'2.5e10\\\'" ]# [ doc = " * \\\'2.5E-10\\\'" ]# [ doc = " * \\\'5.\\\'" ]# [ doc = " * \\\'.5\\\', or, equivalently, \\\'0.5\\\'" ]# [ doc = " * \\\'inf\\\', \\\'-inf\\\', \\\'NaN\\\'" ]# [ doc = "" ]# [ doc = " Leading and trailing whitespace represent an error." ]# [ doc = "" ]# [ doc = " # Grammar" ]# [ doc = "" ]# [ doc = " All strings that adhere to the following [EBNF] grammar" ]# [ doc = " will result in an [`Ok`] being returned:" ]# [ doc = "" ]# [ doc = " ```txt" ]# [ doc = " Float ::= Sign? ( \\\'inf\\\' | \\\'NaN\\\' | Number )" ]# [ doc = " Number ::= ( Digit+ |" ]# [ doc = " Digit+ \\\'.\\\' Digit* |" ]# [ doc = " Digit* \\\'.\\\' Digit+ ) Exp?" ]# [ doc = " Exp ::= [eE] Sign? Digit+" ]# [ doc = " Sign ::= [+-]" ]# [ doc = " Digit ::= [0-9]" ]# [ doc = " ```" ]# [ doc = "" ]# [ doc = " [EBNF]: https://www.w3.org/TR/REC-xml/#sec-notation" ]# [ doc = "" ]# [ doc = " # Known bugs" ]# [ doc = "" ]# [ doc = " In some situations, some strings that should create a valid float" ]# [ doc = " instead return an error. See [issue #31407] for details." ]# [ doc = "" ]# [ doc = " [issue #31407]: https://github.com/rust-lang/rust/issues/31407" ]# [ doc = "" ]# [ doc = " # Arguments" ]# [ doc = "" ]# [ doc = " * src - A string" ]# [ doc = "" ]# [ doc = " # Return value" ]# [ doc = "" ]# [ doc = " `Err(ParseFloatError)` if the string did not represent a valid" ]# [ doc = " number. Otherwise, `Ok(n)` where `n` is the floating-point" ]# [ doc = " number represented by `src`." ]# [ inline ] fn from_str ( src : & str )-> Result < Self , ParseFloatError > { dec2flt ( src )}}}; }
+macro_rules! __ra_macro_fixture4 {($(# [$stability : meta ]$Ty : ident ($Int : ty ); )+ )=>{$(doc_comment ! { concat ! ( "An integer that is known not to equal zero.\n\nThis enables some memory layout optimization.\nFor example, `Option<" , stringify ! ($Ty ), ">` is the same size as `" , stringify ! ($Int ), "`:\n\n```rust\nuse std::mem::size_of;\nassert_eq!(size_of::<Option<core::num::" , stringify ! ($Ty ), ">>(), size_of::<" , stringify ! ($Int ), ">());\n```" ), # [$stability ]# [ derive ( Copy , Clone , Eq , PartialEq , Ord , PartialOrd , Hash )]# [ repr ( transparent )]# [ rustc_layout_scalar_valid_range_start ( 1 )]# [ rustc_nonnull_optimization_guaranteed ] pub struct $Ty ($Int ); } impl $Ty {# [ doc = " Creates a non-zero without checking the value." ]# [ doc = "" ]# [ doc = " # Safety" ]# [ doc = "" ]# [ doc = " The value must not be zero." ]# [$stability ]# [ rustc_const_stable ( feature = "nonzero" , since = "1.34.0" )]# [ inline ] pub const unsafe fn new_unchecked ( n : $Int )-> Self { unsafe { Self ( n )}}# [ doc = " Creates a non-zero if the given value is not zero." ]# [$stability ]# [ rustc_const_stable ( feature = "const_nonzero_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn new ( n : $Int )-> Option < Self > { if n != 0 { Some ( unsafe { Self ( n )})} else { None }}# [ doc = " Returns the value as a primitive type." ]# [$stability ]# [ inline ]# [ rustc_const_stable ( feature = "nonzero" , since = "1.34.0" )] pub const fn get ( self )-> $Int { self . 0 }}# [ stable ( feature = "from_nonzero" , since = "1.31.0" )] impl From <$Ty > for $Int { doc_comment ! { concat ! ( "Converts a `" , stringify ! ($Ty ), "` into an `" , stringify ! ($Int ), "`" ), # [ inline ] fn from ( nonzero : $Ty )-> Self { nonzero . 0 }}}# [ stable ( feature = "nonzero_bitor" , since = "1.45.0" )] impl BitOr for $Ty { type Output = Self ; # [ inline ] fn bitor ( self , rhs : Self )-> Self :: Output { unsafe {$Ty :: new_unchecked ( self . get ()| rhs . get ())}}}# [ stable ( feature = "nonzero_bitor" , since = "1.45.0" )] impl BitOr <$Int > for $Ty { type Output = Self ; # [ inline ] fn bitor ( self , rhs : $Int )-> Self :: Output { unsafe {$Ty :: new_unchecked ( self . get ()| rhs )}}}# [ stable ( feature = "nonzero_bitor" , since = "1.45.0" )] impl BitOr <$Ty > for $Int { type Output = $Ty ; # [ inline ] fn bitor ( self , rhs : $Ty )-> Self :: Output { unsafe {$Ty :: new_unchecked ( self | rhs . get ())}}}# [ stable ( feature = "nonzero_bitor" , since = "1.45.0" )] impl BitOrAssign for $Ty {# [ inline ] fn bitor_assign (& mut self , rhs : Self ){* self = * self | rhs ; }}# [ stable ( feature = "nonzero_bitor" , since = "1.45.0" )] impl BitOrAssign <$Int > for $Ty {# [ inline ] fn bitor_assign (& mut self , rhs : $Int ){* self = * self | rhs ; }} impl_nonzero_fmt ! {# [$stability ]( Debug , Display , Binary , Octal , LowerHex , UpperHex ) for $Ty })+ }}
+macro_rules! __ra_macro_fixture5 {($($t : ty )*)=>{$(# [ stable ( feature = "nonzero_parse" , since = "1.35.0" )] impl FromStr for $t { type Err = ParseIntError ; fn from_str ( src : & str )-> Result < Self , Self :: Err > { Self :: new ( from_str_radix ( src , 10 )?). ok_or ( ParseIntError { kind : IntErrorKind :: Zero })}})*}}
+macro_rules! __ra_macro_fixture6 {($($t : ident )*)=>($(sh_impl_unsigned ! {$t , usize })*)}
+macro_rules! __ra_macro_fixture7 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Add for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn add ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_add ( other . 0 ))}} forward_ref_binop ! { impl Add , add for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl AddAssign for Wrapping <$t > {# [ inline ] fn add_assign (& mut self , other : Wrapping <$t >){* self = * self + other ; }} forward_ref_op_assign ! { impl AddAssign , add_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Sub for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn sub ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_sub ( other . 0 ))}} forward_ref_binop ! { impl Sub , sub for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl SubAssign for Wrapping <$t > {# [ inline ] fn sub_assign (& mut self , other : Wrapping <$t >){* self = * self - other ; }} forward_ref_op_assign ! { impl SubAssign , sub_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Mul for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn mul ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_mul ( other . 0 ))}} forward_ref_binop ! { impl Mul , mul for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl MulAssign for Wrapping <$t > {# [ inline ] fn mul_assign (& mut self , other : Wrapping <$t >){* self = * self * other ; }} forward_ref_op_assign ! { impl MulAssign , mul_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "wrapping_div" , since = "1.3.0" )] impl Div for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn div ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_div ( other . 0 ))}} forward_ref_binop ! { impl Div , div for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl DivAssign for Wrapping <$t > {# [ inline ] fn div_assign (& mut self , other : Wrapping <$t >){* self = * self / other ; }} forward_ref_op_assign ! { impl DivAssign , div_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "wrapping_impls" , since = "1.7.0" )] impl Rem for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn rem ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_rem ( other . 0 ))}} forward_ref_binop ! { impl Rem , rem for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl RemAssign for Wrapping <$t > {# [ inline ] fn rem_assign (& mut self , other : Wrapping <$t >){* self = * self % other ; }} forward_ref_op_assign ! { impl RemAssign , rem_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Not for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn not ( self )-> Wrapping <$t > { Wrapping (! self . 0 )}} forward_ref_unop ! { impl Not , not for Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl BitXor for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn bitxor ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 ^ other . 0 )}} forward_ref_binop ! { impl BitXor , bitxor for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl BitXorAssign for Wrapping <$t > {# [ inline ] fn bitxor_assign (& mut self , other : Wrapping <$t >){* self = * self ^ other ; }} forward_ref_op_assign ! { impl BitXorAssign , bitxor_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl BitOr for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn bitor ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 | other . 0 )}} forward_ref_binop ! { impl BitOr , bitor for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl BitOrAssign for Wrapping <$t > {# [ inline ] fn bitor_assign (& mut self , other : Wrapping <$t >){* self = * self | other ; }} forward_ref_op_assign ! { impl BitOrAssign , bitor_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl BitAnd for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn bitand ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 & other . 0 )}} forward_ref_binop ! { impl BitAnd , bitand for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl BitAndAssign for Wrapping <$t > {# [ inline ] fn bitand_assign (& mut self , other : Wrapping <$t >){* self = * self & other ; }} forward_ref_op_assign ! { impl BitAndAssign , bitand_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "wrapping_neg" , since = "1.10.0" )] impl Neg for Wrapping <$t > { type Output = Self ; # [ inline ] fn neg ( self )-> Self { Wrapping ( 0 )- self }} forward_ref_unop ! { impl Neg , neg for Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]})*)}
+macro_rules! __ra_macro_fixture8 {($($t : ty )*)=>($(impl Wrapping <$t > { doc_comment ! { concat ! ( "Returns the smallest value that can be represented by this integer type.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(<Wrapping<" , stringify ! ($t ), ">>::MIN, Wrapping(" , stringify ! ($t ), "::MIN));\n```" ), # [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const MIN : Self = Self (<$t >:: MIN ); } doc_comment ! { concat ! ( "Returns the largest value that can be represented by this integer type.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(<Wrapping<" , stringify ! ($t ), ">>::MAX, Wrapping(" , stringify ! ($t ), "::MAX));\n```" ), # [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const MAX : Self = Self (<$t >:: MAX ); } doc_comment ! { concat ! ( "Returns the number of ones in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(0b01001100" , stringify ! ($t ), ");\n\nassert_eq!(n.count_ones(), 3);\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn count_ones ( self )-> u32 { self . 0 . count_ones ()}} doc_comment ! { concat ! ( "Returns the number of zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(Wrapping(!0" , stringify ! ($t ), ").count_zeros(), 0);\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn count_zeros ( self )-> u32 { self . 0 . count_zeros ()}} doc_comment ! { concat ! ( "Returns the number of trailing zeros in the binary representation\nof `self`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(0b0101000" , stringify ! ($t ), ");\n\nassert_eq!(n.trailing_zeros(), 3);\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn trailing_zeros ( self )-> u32 { self . 0 . trailing_zeros ()}}# [ doc = " Shifts the bits to the left by a specified amount, `n`," ]# [ doc = " wrapping the truncated bits to the end of the resulting" ]# [ doc = " integer." ]# [ doc = "" ]# [ doc = " Please note this isn\\\'t the same operation as the `<<` shifting" ]# [ doc = " operator!" ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " #![feature(wrapping_int_impl)]" ]# [ doc = " use std::num::Wrapping;" ]# [ doc = "" ]# [ doc = " let n: Wrapping<i64> = Wrapping(0x0123456789ABCDEF);" ]# [ doc = " let m: Wrapping<i64> = Wrapping(-0x76543210FEDCBA99);" ]# [ doc = "" ]# [ doc = " assert_eq!(n.rotate_left(32), m);" ]# [ doc = " ```" ]# [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn rotate_left ( self , n : u32 )-> Self { Wrapping ( self . 0 . rotate_left ( n ))}# [ doc = " Shifts the bits to the right by a specified amount, `n`," ]# [ doc = " wrapping the truncated bits to the beginning of the resulting" ]# [ doc = " integer." ]# [ doc = "" ]# [ doc = " Please note this isn\\\'t the same operation as the `>>` shifting" ]# [ doc = " operator!" ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " #![feature(wrapping_int_impl)]" ]# [ doc = " use std::num::Wrapping;" ]# [ doc = "" ]# [ doc = " let n: Wrapping<i64> = Wrapping(0x0123456789ABCDEF);" ]# [ doc = " let m: Wrapping<i64> = Wrapping(-0xFEDCBA987654322);" ]# [ doc = "" ]# [ doc = " assert_eq!(n.rotate_right(4), m);" ]# [ doc = " ```" ]# [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn rotate_right ( self , n : u32 )-> Self { Wrapping ( self . 0 . rotate_right ( n ))}# [ doc = " Reverses the byte order of the integer." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " #![feature(wrapping_int_impl)]" ]# [ doc = " use std::num::Wrapping;" ]# [ doc = "" ]# [ doc = " let n: Wrapping<i16> = Wrapping(0b0000000_01010101);" ]# [ doc = " assert_eq!(n, Wrapping(85));" ]# [ doc = "" ]# [ doc = " let m = n.swap_bytes();" ]# [ doc = "" ]# [ doc = " assert_eq!(m, Wrapping(0b01010101_00000000));" ]# [ doc = " assert_eq!(m, Wrapping(21760));" ]# [ doc = " ```" ]# [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn swap_bytes ( self )-> Self { Wrapping ( self . 0 . swap_bytes ())}# [ doc = " Reverses the bit pattern of the integer." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Please note that this example is shared between integer types." ]# [ doc = " Which explains why `i16` is used here." ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use std::num::Wrapping;" ]# [ doc = "" ]# [ doc = " let n = Wrapping(0b0000000_01010101i16);" ]# [ doc = " assert_eq!(n, Wrapping(85));" ]# [ doc = "" ]# [ doc = " let m = n.reverse_bits();" ]# [ doc = "" ]# [ doc = " assert_eq!(m.0 as u16, 0b10101010_00000000);" ]# [ doc = " assert_eq!(m, Wrapping(-22016));" ]# [ doc = " ```" ]# [ stable ( feature = "reverse_bits" , since = "1.37.0" )]# [ rustc_const_stable ( feature = "const_reverse_bits" , since = "1.37.0" )]# [ inline ]# [ must_use ] pub const fn reverse_bits ( self )-> Self { Wrapping ( self . 0 . reverse_bits ())} doc_comment ! { concat ! ( "Converts an integer from big endian to the target's endianness.\n\nOn big endian this is a no-op. On little endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(0x1A" , stringify ! ($t ), ");\n\nif cfg!(target_endian = \"big\") {\n assert_eq!(<Wrapping<" , stringify ! ($t ), ">>::from_be(n), n)\n} else {\n assert_eq!(<Wrapping<" , stringify ! ($t ), ">>::from_be(n), n.swap_bytes())\n}\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn from_be ( x : Self )-> Self { Wrapping (<$t >:: from_be ( x . 0 ))}} doc_comment ! { concat ! ( "Converts an integer from little endian to the target's endianness.\n\nOn little endian this is a no-op. On big endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(0x1A" , stringify ! ($t ), ");\n\nif cfg!(target_endian = \"little\") {\n assert_eq!(<Wrapping<" , stringify ! ($t ), ">>::from_le(n), n)\n} else {\n assert_eq!(<Wrapping<" , stringify ! ($t ), ">>::from_le(n), n.swap_bytes())\n}\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn from_le ( x : Self )-> Self { Wrapping (<$t >:: from_le ( x . 0 ))}} doc_comment ! { concat ! ( "Converts `self` to big endian from the target's endianness.\n\nOn big endian this is a no-op. On little endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(0x1A" , stringify ! ($t ), ");\n\nif cfg!(target_endian = \"big\") {\n assert_eq!(n.to_be(), n)\n} else {\n assert_eq!(n.to_be(), n.swap_bytes())\n}\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn to_be ( self )-> Self { Wrapping ( self . 0 . to_be ())}} doc_comment ! { concat ! ( "Converts `self` to little endian from the target's endianness.\n\nOn little endian this is a no-op. On big endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(0x1A" , stringify ! ($t ), ");\n\nif cfg!(target_endian = \"little\") {\n assert_eq!(n.to_le(), n)\n} else {\n assert_eq!(n.to_le(), n.swap_bytes())\n}\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn to_le ( self )-> Self { Wrapping ( self . 0 . to_le ())}} doc_comment ! { concat ! ( "Raises self to the power of `exp`, using exponentiation by squaring.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(Wrapping(3" , stringify ! ($t ), ").pow(4), Wrapping(81));\n```\n\nResults that are too large are wrapped:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(Wrapping(3i8).pow(5), Wrapping(-13));\nassert_eq!(Wrapping(3i8).pow(6), Wrapping(-39));\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub fn pow ( self , exp : u32 )-> Self { Wrapping ( self . 0 . wrapping_pow ( exp ))}}})*)}
+macro_rules! __ra_macro_fixture9 {($($t : ty )*)=>($(impl Wrapping <$t > { doc_comment ! { concat ! ( "Returns the number of leading zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(" , stringify ! ($t ), "::MAX) >> 2;\n\nassert_eq!(n.leading_zeros(), 3);\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn leading_zeros ( self )-> u32 { self . 0 . leading_zeros ()}} doc_comment ! { concat ! ( "Computes the absolute value of `self`, wrapping around at\nthe boundary of the type.\n\nThe only case where such wrapping can occur is when one takes the absolute value of the negative\nminimal value for the type this is a positive value that is too large to represent in the type. In\nsuch a case, this function returns `MIN` itself.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(Wrapping(100" , stringify ! ($t ), ").abs(), Wrapping(100));\nassert_eq!(Wrapping(-100" , stringify ! ($t ), ").abs(), Wrapping(100));\nassert_eq!(Wrapping(" , stringify ! ($t ), "::MIN).abs(), Wrapping(" , stringify ! ($t ), "::MIN));\nassert_eq!(Wrapping(-128i8).abs().0 as u8, 128u8);\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub fn abs ( self )-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_abs ())}} doc_comment ! { concat ! ( "Returns a number representing sign of `self`.\n\n - `0` if the number is zero\n - `1` if the number is positive\n - `-1` if the number is negative\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(Wrapping(10" , stringify ! ($t ), ").signum(), Wrapping(1));\nassert_eq!(Wrapping(0" , stringify ! ($t ), ").signum(), Wrapping(0));\nassert_eq!(Wrapping(-10" , stringify ! ($t ), ").signum(), Wrapping(-1));\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub fn signum ( self )-> Wrapping <$t > { Wrapping ( self . 0 . signum ())}} doc_comment ! { concat ! ( "Returns `true` if `self` is positive and `false` if the number is zero or\nnegative.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert!(Wrapping(10" , stringify ! ($t ), ").is_positive());\nassert!(!Wrapping(-10" , stringify ! ($t ), ").is_positive());\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn is_positive ( self )-> bool { self . 0 . is_positive ()}} doc_comment ! { concat ! ( "Returns `true` if `self` is negative and `false` if the number is zero or\npositive.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert!(Wrapping(-10" , stringify ! ($t ), ").is_negative());\nassert!(!Wrapping(10" , stringify ! ($t ), ").is_negative());\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn is_negative ( self )-> bool { self . 0 . is_negative ()}}})*)}
+macro_rules! __ra_macro_fixture10 {($($t : ty )*)=>($(impl Wrapping <$t > { doc_comment ! { concat ! ( "Returns the number of leading zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(" , stringify ! ($t ), "::MAX) >> 2;\n\nassert_eq!(n.leading_zeros(), 2);\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn leading_zeros ( self )-> u32 { self . 0 . leading_zeros ()}} doc_comment ! { concat ! ( "Returns `true` if and only if `self == 2^k` for some `k`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert!(Wrapping(16" , stringify ! ($t ), ").is_power_of_two());\nassert!(!Wrapping(10" , stringify ! ($t ), ").is_power_of_two());\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub fn is_power_of_two ( self )-> bool { self . 0 . is_power_of_two ()}} doc_comment ! { concat ! ( "Returns the smallest power of two greater than or equal to `self`.\n\nWhen return value overflows (i.e., `self > (1 << (N-1))` for type\n`uN`), overflows to `2^N = 0`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_next_power_of_two)]\nuse std::num::Wrapping;\n\nassert_eq!(Wrapping(2" , stringify ! ($t ), ").next_power_of_two(), Wrapping(2));\nassert_eq!(Wrapping(3" , stringify ! ($t ), ").next_power_of_two(), Wrapping(4));\nassert_eq!(Wrapping(200_u8).next_power_of_two(), Wrapping(0));\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_next_power_of_two" , issue = "32463" , reason = "needs decision on wrapping behaviour" )] pub fn next_power_of_two ( self )-> Self { Wrapping ( self . 0 . wrapping_next_power_of_two ())}}})*)}
+macro_rules! __ra_macro_fixture11 {($($t : ty )*)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl FromStr for $t { type Err = ParseIntError ; fn from_str ( src : & str )-> Result < Self , ParseIntError > { from_str_radix ( src , 10 )}})*}}
+macro_rules! __ra_macro_fixture12 {($($t : ty )*)=>($(impl FromStrRadixHelper for $t {# [ inline ] fn min_value ()-> Self { Self :: MIN }# [ inline ] fn max_value ()-> Self { Self :: MAX }# [ inline ] fn from_u32 ( u : u32 )-> Self { u as Self }# [ inline ] fn checked_mul (& self , other : u32 )-> Option < Self > { Self :: checked_mul (* self , other as Self )}# [ inline ] fn checked_sub (& self , other : u32 )-> Option < Self > { Self :: checked_sub (* self , other as Self )}# [ inline ] fn checked_add (& self , other : u32 )-> Option < Self > { Self :: checked_add (* self , other as Self )}})*)}
+macro_rules! __ra_macro_fixture13 {($($Arg : ident ),+)=>{ fnptr_impls_safety_abi ! { extern "Rust" fn ($($Arg ),+)-> Ret , $($Arg ),+ } fnptr_impls_safety_abi ! { extern "C" fn ($($Arg ),+)-> Ret , $($Arg ),+ } fnptr_impls_safety_abi ! { extern "C" fn ($($Arg ),+ , ...)-> Ret , $($Arg ),+ } fnptr_impls_safety_abi ! { unsafe extern "Rust" fn ($($Arg ),+)-> Ret , $($Arg ),+ } fnptr_impls_safety_abi ! { unsafe extern "C" fn ($($Arg ),+)-> Ret , $($Arg ),+ } fnptr_impls_safety_abi ! { unsafe extern "C" fn ($($Arg ),+ , ...)-> Ret , $($Arg ),+ }}; ()=>{ fnptr_impls_safety_abi ! { extern "Rust" fn ()-> Ret , } fnptr_impls_safety_abi ! { extern "C" fn ()-> Ret , } fnptr_impls_safety_abi ! { unsafe extern "Rust" fn ()-> Ret , } fnptr_impls_safety_abi ! { unsafe extern "C" fn ()-> Ret , }}; }
+macro_rules! __ra_macro_fixture14 {($($t : ty )*)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Clone for $t {# [ inline ] fn clone (& self )-> Self {* self }})* }}
+macro_rules! __ra_macro_fixture15 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl PartialEq for $t {# [ inline ] fn eq (& self , other : &$t )-> bool {(* self )== (* other )}# [ inline ] fn ne (& self , other : &$t )-> bool {(* self )!= (* other )}})*)}
+macro_rules! __ra_macro_fixture16 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Eq for $t {})*)}
+macro_rules! __ra_macro_fixture17 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl PartialOrd for $t {# [ inline ] fn partial_cmp (& self , other : &$t )-> Option < Ordering > { match ( self <= other , self >= other ){( false , false )=> None , ( false , true )=> Some ( Greater ), ( true , false )=> Some ( Less ), ( true , true )=> Some ( Equal ), }}# [ inline ] fn lt (& self , other : &$t )-> bool {(* self )< (* other )}# [ inline ] fn le (& self , other : &$t )-> bool {(* self )<= (* other )}# [ inline ] fn ge (& self , other : &$t )-> bool {(* self )>= (* other )}# [ inline ] fn gt (& self , other : &$t )-> bool {(* self )> (* other )}})*)}
+macro_rules! __ra_macro_fixture18 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl PartialOrd for $t {# [ inline ] fn partial_cmp (& self , other : &$t )-> Option < Ordering > { Some ( self . cmp ( other ))}# [ inline ] fn lt (& self , other : &$t )-> bool {(* self )< (* other )}# [ inline ] fn le (& self , other : &$t )-> bool {(* self )<= (* other )}# [ inline ] fn ge (& self , other : &$t )-> bool {(* self )>= (* other )}# [ inline ] fn gt (& self , other : &$t )-> bool {(* self )> (* other )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Ord for $t {# [ inline ] fn cmp (& self , other : &$t )-> Ordering { if * self < * other { Less } else if * self == * other { Equal } else { Greater }}})*)}
+macro_rules! __ra_macro_fixture19 {($Float : ident =>$($Int : ident )+ )=>{# [ unstable ( feature = "convert_float_to_int" , issue = "67057" )] impl private :: Sealed for $Float {}$(# [ unstable ( feature = "convert_float_to_int" , issue = "67057" )] impl FloatToInt <$Int > for $Float {# [ doc ( hidden )]# [ inline ] unsafe fn to_int_unchecked ( self )-> $Int { unsafe { crate :: intrinsics :: float_to_int_unchecked ( self )}}})+ }}
+macro_rules! __ra_macro_fixture20 {($target : ty , # [$attr : meta ])=>{ impl_from ! ( bool , $target , # [$attr ], concat ! ( "Converts a `bool` to a `" , stringify ! ($target ), "`. The resulting value is `0` for `false` and `1` for `true`\nvalues.\n\n# Examples\n\n```\nassert_eq!(" , stringify ! ($target ), "::from(true), 1);\nassert_eq!(" , stringify ! ($target ), "::from(false), 0);\n```" )); }; }
+macro_rules! __ra_macro_fixture21 {($Small : ty , $Large : ty , # [$attr : meta ], $doc : expr )=>{# [$attr ]# [ doc = $doc ] impl From <$Small > for $Large {# [ inline ] fn from ( small : $Small )-> Self { small as Self }}}; ($Small : ty , $Large : ty , # [$attr : meta ])=>{ impl_from ! ($Small , $Large , # [$attr ], concat ! ( "Converts `" , stringify ! ($Small ), "` to `" , stringify ! ($Large ), "` losslessly." )); }}
+macro_rules! __ra_macro_fixture22 {($source : ty , $($target : ty ),*)=>{$(# [ stable ( feature = "try_from" , since = "1.34.0" )] impl TryFrom <$source > for $target { type Error = TryFromIntError ; # [ doc = " Try to create the target number type from a source" ]# [ doc = " number type. This returns an error if the source value" ]# [ doc = " is outside of the range of the target type." ]# [ inline ] fn try_from ( u : $source )-> Result < Self , Self :: Error > { if u > ( Self :: MAX as $source ){ Err ( TryFromIntError (()))} else { Ok ( u as Self )}}})*}}
+macro_rules! __ra_macro_fixture23 {($source : ty , $($target : ty ),*)=>{$(# [ stable ( feature = "try_from" , since = "1.34.0" )] impl TryFrom <$source > for $target { type Error = TryFromIntError ; # [ doc = " Try to create the target number type from a source" ]# [ doc = " number type. This returns an error if the source value" ]# [ doc = " is outside of the range of the target type." ]# [ inline ] fn try_from ( u : $source )-> Result < Self , Self :: Error > { let min = Self :: MIN as $source ; let max = Self :: MAX as $source ; if u < min || u > max { Err ( TryFromIntError (()))} else { Ok ( u as Self )}}})*}}
+macro_rules! __ra_macro_fixture24 {($source : ty , $($target : ty ),*)=>{$(# [ stable ( feature = "try_from" , since = "1.34.0" )] impl TryFrom <$source > for $target { type Error = TryFromIntError ; # [ doc = " Try to create the target number type from a source" ]# [ doc = " number type. This returns an error if the source value" ]# [ doc = " is outside of the range of the target type." ]# [ inline ] fn try_from ( u : $source )-> Result < Self , Self :: Error > { if u >= 0 { Ok ( u as Self )} else { Err ( TryFromIntError (()))}}})*}}
+macro_rules! __ra_macro_fixture25 {($source : ty , $($target : ty ),*)=>{$(# [ stable ( feature = "try_from" , since = "1.34.0" )] impl TryFrom <$source > for $target { type Error = TryFromIntError ; # [ doc = " Try to create the target number type from a source" ]# [ doc = " number type. This returns an error if the source value" ]# [ doc = " is outside of the range of the target type." ]# [ inline ] fn try_from ( value : $source )-> Result < Self , Self :: Error > { Ok ( value as Self )}})*}}
+macro_rules! __ra_macro_fixture26 {($mac : ident , $source : ty , $($target : ty ),*)=>{$($mac ! ($target , $source ); )*}}
+macro_rules! __ra_macro_fixture27 {($Small : ty , $Large : ty , # [$attr : meta ], $doc : expr )=>{# [$attr ]# [ doc = $doc ] impl From <$Small > for $Large {# [ inline ] fn from ( small : $Small )-> Self { unsafe { Self :: new_unchecked ( small . get (). into ())}}}}; ($Small : ty , $Large : ty , # [$attr : meta ])=>{ nzint_impl_from ! ($Small , $Large , # [$attr ], concat ! ( "Converts `" , stringify ! ($Small ), "` to `" , stringify ! ($Large ), "` losslessly." )); }}
+macro_rules! __ra_macro_fixture28 {($Int : ty , $NonZeroInt : ty , # [$attr : meta ], $doc : expr )=>{# [$attr ]# [ doc = $doc ] impl TryFrom <$Int > for $NonZeroInt { type Error = TryFromIntError ; # [ inline ] fn try_from ( value : $Int )-> Result < Self , Self :: Error > { Self :: new ( value ). ok_or ( TryFromIntError (()))}}}; ($Int : ty , $NonZeroInt : ty , # [$attr : meta ])=>{ nzint_impl_try_from_int ! ($Int , $NonZeroInt , # [$attr ], concat ! ( "Attempts to convert `" , stringify ! ($Int ), "` to `" , stringify ! ($NonZeroInt ), "`." )); }}
+macro_rules! __ra_macro_fixture29 {($From : ty =>$To : ty , $doc : expr )=>{# [ stable ( feature = "nzint_try_from_nzint_conv" , since = "1.49.0" )]# [ doc = $doc ] impl TryFrom <$From > for $To { type Error = TryFromIntError ; # [ inline ] fn try_from ( value : $From )-> Result < Self , Self :: Error > { TryFrom :: try_from ( value . get ()). map (| v | { unsafe { Self :: new_unchecked ( v )}})}}}; ($To : ty : $($From : ty ),*)=>{$(nzint_impl_try_from_nzint ! ($From =>$To , concat ! ( "Attempts to convert `" , stringify ! ($From ), "` to `" , stringify ! ($To ), "`." , )); )*}; }
+macro_rules! __ra_macro_fixture30 {($t : ty , $v : expr , $doc : tt )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Default for $t {# [ inline ]# [ doc = $doc ] fn default ()-> $t {$v }}}}
+macro_rules! __ra_macro_fixture31 {($t : ident )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > Hash for $t < T > {# [ inline ] fn hash < H : Hasher > (& self , _: & mut H ){}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > cmp :: PartialEq for $t < T > { fn eq (& self , _other : &$t < T >)-> bool { true }}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > cmp :: Eq for $t < T > {}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > cmp :: PartialOrd for $t < T > { fn partial_cmp (& self , _other : &$t < T >)-> Option < cmp :: Ordering > { Option :: Some ( cmp :: Ordering :: Equal )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > cmp :: Ord for $t < T > { fn cmp (& self , _other : &$t < T >)-> cmp :: Ordering { cmp :: Ordering :: Equal }}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > Copy for $t < T > {}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > Clone for $t < T > { fn clone (& self )-> Self { Self }}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > Default for $t < T > { fn default ()-> Self { Self }}# [ unstable ( feature = "structural_match" , issue = "31434" )] impl < T : ? Sized > StructuralPartialEq for $t < T > {}# [ unstable ( feature = "structural_match" , issue = "31434" )] impl < T : ? Sized > StructuralEq for $t < T > {}}; }
+macro_rules! __ra_macro_fixture32 {($($t : ty )*)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Copy for $t {})* }}
+macro_rules! __ra_macro_fixture33 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Add for $t { type Output = $t ; # [ inline ]# [ rustc_inherit_overflow_checks ] fn add ( self , other : $t )-> $t { self + other }} forward_ref_binop ! { impl Add , add for $t , $t })*)}
+macro_rules! __ra_macro_fixture34 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Sub for $t { type Output = $t ; # [ inline ]# [ rustc_inherit_overflow_checks ] fn sub ( self , other : $t )-> $t { self - other }} forward_ref_binop ! { impl Sub , sub for $t , $t })*)}
+macro_rules! __ra_macro_fixture35 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Mul for $t { type Output = $t ; # [ inline ]# [ rustc_inherit_overflow_checks ] fn mul ( self , other : $t )-> $t { self * other }} forward_ref_binop ! { impl Mul , mul for $t , $t })*)}
+macro_rules! __ra_macro_fixture36 {($($t : ty )*)=>($(# [ doc = " This operation rounds towards zero, truncating any" ]# [ doc = " fractional part of the exact result." ]# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Div for $t { type Output = $t ; # [ inline ] fn div ( self , other : $t )-> $t { self / other }} forward_ref_binop ! { impl Div , div for $t , $t })*)}
+macro_rules! __ra_macro_fixture37 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Div for $t { type Output = $t ; # [ inline ] fn div ( self , other : $t )-> $t { self / other }} forward_ref_binop ! { impl Div , div for $t , $t })*)}
+macro_rules! __ra_macro_fixture38 {($($t : ty )*)=>($(# [ doc = " This operation satisfies `n % d == n - (n / d) * d`. The" ]# [ doc = " result has the same sign as the left operand." ]# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Rem for $t { type Output = $t ; # [ inline ] fn rem ( self , other : $t )-> $t { self % other }} forward_ref_binop ! { impl Rem , rem for $t , $t })*)}
+macro_rules! __ra_macro_fixture39 {($($t : ty )*)=>($(# [ doc = " The remainder from the division of two floats." ]# [ doc = "" ]# [ doc = " The remainder has the same sign as the dividend and is computed as:" ]# [ doc = " `x - (x / y).trunc() * y`." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = " ```" ]# [ doc = " let x: f32 = 50.50;" ]# [ doc = " let y: f32 = 8.125;" ]# [ doc = " let remainder = x - (x / y).trunc() * y;" ]# [ doc = "" ]# [ doc = " // The answer to both operations is 1.75" ]# [ doc = " assert_eq!(x % y, remainder);" ]# [ doc = " ```" ]# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Rem for $t { type Output = $t ; # [ inline ] fn rem ( self , other : $t )-> $t { self % other }} forward_ref_binop ! { impl Rem , rem for $t , $t })*)}
+macro_rules! __ra_macro_fixture40 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Neg for $t { type Output = $t ; # [ inline ]# [ rustc_inherit_overflow_checks ] fn neg ( self )-> $t {- self }} forward_ref_unop ! { impl Neg , neg for $t })*)}
+macro_rules! __ra_macro_fixture41 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl AddAssign for $t {# [ inline ]# [ rustc_inherit_overflow_checks ] fn add_assign (& mut self , other : $t ){* self += other }} forward_ref_op_assign ! { impl AddAssign , add_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture42 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl SubAssign for $t {# [ inline ]# [ rustc_inherit_overflow_checks ] fn sub_assign (& mut self , other : $t ){* self -= other }} forward_ref_op_assign ! { impl SubAssign , sub_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture43 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl MulAssign for $t {# [ inline ]# [ rustc_inherit_overflow_checks ] fn mul_assign (& mut self , other : $t ){* self *= other }} forward_ref_op_assign ! { impl MulAssign , mul_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture44 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl DivAssign for $t {# [ inline ] fn div_assign (& mut self , other : $t ){* self /= other }} forward_ref_op_assign ! { impl DivAssign , div_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture45 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl RemAssign for $t {# [ inline ] fn rem_assign (& mut self , other : $t ){* self %= other }} forward_ref_op_assign ! { impl RemAssign , rem_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture46 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Not for $t { type Output = $t ; # [ inline ] fn not ( self )-> $t {! self }} forward_ref_unop ! { impl Not , not for $t })*)}
+macro_rules! __ra_macro_fixture47 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl BitAnd for $t { type Output = $t ; # [ inline ] fn bitand ( self , rhs : $t )-> $t { self & rhs }} forward_ref_binop ! { impl BitAnd , bitand for $t , $t })*)}
+macro_rules! __ra_macro_fixture48 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl BitOr for $t { type Output = $t ; # [ inline ] fn bitor ( self , rhs : $t )-> $t { self | rhs }} forward_ref_binop ! { impl BitOr , bitor for $t , $t })*)}
+macro_rules! __ra_macro_fixture49 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl BitXor for $t { type Output = $t ; # [ inline ] fn bitxor ( self , other : $t )-> $t { self ^ other }} forward_ref_binop ! { impl BitXor , bitxor for $t , $t })*)}
+macro_rules! __ra_macro_fixture50 {($($t : ty )*)=>($(shl_impl ! {$t , u8 } shl_impl ! {$t , u16 } shl_impl ! {$t , u32 } shl_impl ! {$t , u64 } shl_impl ! {$t , u128 } shl_impl ! {$t , usize } shl_impl ! {$t , i8 } shl_impl ! {$t , i16 } shl_impl ! {$t , i32 } shl_impl ! {$t , i64 } shl_impl ! {$t , i128 } shl_impl ! {$t , isize })*)}
+macro_rules! __ra_macro_fixture51 {($($t : ty )*)=>($(shr_impl ! {$t , u8 } shr_impl ! {$t , u16 } shr_impl ! {$t , u32 } shr_impl ! {$t , u64 } shr_impl ! {$t , u128 } shr_impl ! {$t , usize } shr_impl ! {$t , i8 } shr_impl ! {$t , i16 } shr_impl ! {$t , i32 } shr_impl ! {$t , i64 } shr_impl ! {$t , i128 } shr_impl ! {$t , isize })*)}
+macro_rules! __ra_macro_fixture52 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl BitAndAssign for $t {# [ inline ] fn bitand_assign (& mut self , other : $t ){* self &= other }} forward_ref_op_assign ! { impl BitAndAssign , bitand_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture53 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl BitOrAssign for $t {# [ inline ] fn bitor_assign (& mut self , other : $t ){* self |= other }} forward_ref_op_assign ! { impl BitOrAssign , bitor_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture54 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl BitXorAssign for $t {# [ inline ] fn bitxor_assign (& mut self , other : $t ){* self ^= other }} forward_ref_op_assign ! { impl BitXorAssign , bitxor_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture55 {($($t : ty )*)=>($(shl_assign_impl ! {$t , u8 } shl_assign_impl ! {$t , u16 } shl_assign_impl ! {$t , u32 } shl_assign_impl ! {$t , u64 } shl_assign_impl ! {$t , u128 } shl_assign_impl ! {$t , usize } shl_assign_impl ! {$t , i8 } shl_assign_impl ! {$t , i16 } shl_assign_impl ! {$t , i32 } shl_assign_impl ! {$t , i64 } shl_assign_impl ! {$t , i128 } shl_assign_impl ! {$t , isize })*)}
+macro_rules! __ra_macro_fixture56 {($($t : ty )*)=>($(shr_assign_impl ! {$t , u8 } shr_assign_impl ! {$t , u16 } shr_assign_impl ! {$t , u32 } shr_assign_impl ! {$t , u64 } shr_assign_impl ! {$t , u128 } shr_assign_impl ! {$t , usize } shr_assign_impl ! {$t , i8 } shr_assign_impl ! {$t , i16 } shr_assign_impl ! {$t , i32 } shr_assign_impl ! {$t , i64 } shr_assign_impl ! {$t , i128 } shr_assign_impl ! {$t , isize })*)}
+macro_rules! __ra_macro_fixture57 {{$n : expr , $t : ident $($ts : ident )*}=>{# [ stable ( since = "1.4.0" , feature = "array_default" )] impl < T > Default for [ T ; $n ] where T : Default { fn default ()-> [ T ; $n ]{[$t :: default (), $($ts :: default ()),*]}} array_impl_default ! {($n - 1 ), $($ts )*}}; {$n : expr ,}=>{# [ stable ( since = "1.4.0" , feature = "array_default" )] impl < T > Default for [ T ; $n ]{ fn default ()-> [ T ; $n ]{[]}}}; }
+macro_rules! __ra_macro_fixture58 {($($t : ty ),+)=>{$(# [ unstable ( feature = "c_variadic" , reason = "the `c_variadic` feature has not been properly tested on \\n all supported platforms" , issue = "44930" )] impl sealed_trait :: VaArgSafe for $t {})+ }}
+macro_rules! __ra_macro_fixture59 {{ narrower than or same width as usize : $([$u_narrower : ident $i_narrower : ident ]),+; wider than usize : $([$u_wider : ident $i_wider : ident ]),+; }=>{$(# [ allow ( unreachable_patterns )]# [ unstable ( feature = "step_trait" , reason = "recently redesigned" , issue = "42168" )] unsafe impl Step for $u_narrower { step_identical_methods ! (); # [ inline ] fn steps_between ( start : & Self , end : & Self )-> Option < usize > { if * start <= * end { Some ((* end - * start ) as usize )} else { None }}# [ inline ] fn forward_checked ( start : Self , n : usize )-> Option < Self > { match Self :: try_from ( n ){ Ok ( n )=> start . checked_add ( n ), Err (_)=> None , }}# [ inline ] fn backward_checked ( start : Self , n : usize )-> Option < Self > { match Self :: try_from ( n ){ Ok ( n )=> start . checked_sub ( n ), Err (_)=> None , }}}# [ allow ( unreachable_patterns )]# [ unstable ( feature = "step_trait" , reason = "recently redesigned" , issue = "42168" )] unsafe impl Step for $i_narrower { step_identical_methods ! (); # [ inline ] fn steps_between ( start : & Self , end : & Self )-> Option < usize > { if * start <= * end { Some ((* end as isize ). wrapping_sub (* start as isize ) as usize )} else { None }}# [ inline ] fn forward_checked ( start : Self , n : usize )-> Option < Self > { match $u_narrower :: try_from ( n ){ Ok ( n )=>{ let wrapped = start . wrapping_add ( n as Self ); if wrapped >= start { Some ( wrapped )} else { None }} Err (_)=> None , }}# [ inline ] fn backward_checked ( start : Self , n : usize )-> Option < Self > { match $u_narrower :: try_from ( n ){ Ok ( n )=>{ let wrapped = start . wrapping_sub ( n as Self ); if wrapped <= start { Some ( wrapped )} else { None }} Err (_)=> None , }}})+ $(# [ allow ( unreachable_patterns )]# [ unstable ( feature = "step_trait" , reason = "recently redesigned" , issue = "42168" )] unsafe impl Step for $u_wider { step_identical_methods ! (); # [ inline ] fn steps_between ( start : & Self , end : & Self )-> Option < usize > { if * start <= * end { usize :: try_from (* end - * start ). ok ()} else { None }}# [ inline ] fn forward_checked ( start : Self , n : usize )-> Option < Self > { start . checked_add ( n as Self )}# [ inline ] fn backward_checked ( start : Self , n : usize )-> Option < Self > { start . checked_sub ( n as Self )}}# [ allow ( unreachable_patterns )]# [ unstable ( feature = "step_trait" , reason = "recently redesigned" , issue = "42168" )] unsafe impl Step for $i_wider { step_identical_methods ! (); # [ inline ] fn steps_between ( start : & Self , end : & Self )-> Option < usize > { if * start <= * end { match end . checked_sub (* start ){ Some ( result )=> usize :: try_from ( result ). ok (), None => None , }} else { None }}# [ inline ] fn forward_checked ( start : Self , n : usize )-> Option < Self > { start . checked_add ( n as Self )}# [ inline ] fn backward_checked ( start : Self , n : usize )-> Option < Self > { start . checked_sub ( n as Self )}})+ }; }
+macro_rules! __ra_macro_fixture60 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl ExactSizeIterator for ops :: Range <$t > {})*)}
+macro_rules! __ra_macro_fixture61 {($($t : ty )*)=>($(# [ stable ( feature = "inclusive_range" , since = "1.26.0" )] impl ExactSizeIterator for ops :: RangeInclusive <$t > {})*)}
+macro_rules! __ra_macro_fixture62 {(@ impls $zero : expr , $one : expr , # [$attr : meta ], $($a : ty )*)=>($(# [$attr ] impl Sum for $a { fn sum < I : Iterator < Item = Self >> ( iter : I )-> Self { iter . fold ($zero , Add :: add )}}# [$attr ] impl Product for $a { fn product < I : Iterator < Item = Self >> ( iter : I )-> Self { iter . fold ($one , Mul :: mul )}}# [$attr ] impl < 'a > Sum <& 'a $a > for $a { fn sum < I : Iterator < Item =& 'a Self >> ( iter : I )-> Self { iter . fold ($zero , Add :: add )}}# [$attr ] impl < 'a > Product <& 'a $a > for $a { fn product < I : Iterator < Item =& 'a Self >> ( iter : I )-> Self { iter . fold ($one , Mul :: mul )}})*); ($($a : ty )*)=>( integer_sum_product ! (@ impls 0 , 1 , # [ stable ( feature = "iter_arith_traits" , since = "1.12.0" )], $($a )*); integer_sum_product ! (@ impls Wrapping ( 0 ), Wrapping ( 1 ), # [ stable ( feature = "wrapping_iter_arith" , since = "1.14.0" )], $(Wrapping <$a >)*); ); }
+macro_rules! __ra_macro_fixture63 {($($a : ident )*)=>($(# [ stable ( feature = "iter_arith_traits" , since = "1.12.0" )] impl Sum for $a { fn sum < I : Iterator < Item = Self >> ( iter : I )-> Self { iter . fold ( 0.0 , Add :: add )}}# [ stable ( feature = "iter_arith_traits" , since = "1.12.0" )] impl Product for $a { fn product < I : Iterator < Item = Self >> ( iter : I )-> Self { iter . fold ( 1.0 , Mul :: mul )}}# [ stable ( feature = "iter_arith_traits" , since = "1.12.0" )] impl < 'a > Sum <& 'a $a > for $a { fn sum < I : Iterator < Item =& 'a Self >> ( iter : I )-> Self { iter . fold ( 0.0 , Add :: add )}}# [ stable ( feature = "iter_arith_traits" , since = "1.12.0" )] impl < 'a > Product <& 'a $a > for $a { fn product < I : Iterator < Item =& 'a Self >> ( iter : I )-> Self { iter . fold ( 1.0 , Mul :: mul )}})*)}
+macro_rules! __ra_macro_fixture64 {($cfg_cas : meta , $cfg_align : meta , $stable : meta , $stable_cxchg : meta , $stable_debug : meta , $stable_access : meta , $stable_from : meta , $stable_nand : meta , $const_stable : meta , $stable_init_const : meta , $s_int_type : literal , $int_ref : expr , $extra_feature : expr , $min_fn : ident , $max_fn : ident , $align : expr , $atomic_new : expr , $int_type : ident $atomic_type : ident $atomic_init : ident )=>{# [ doc = " An integer type which can be safely shared between threads." ]# [ doc = "" ]# [ doc = " This type has the same in-memory representation as the underlying" ]# [ doc = " integer type, [`" ]# [ doc = $s_int_type ]# [ doc = " `](" ]# [ doc = $int_ref ]# [ doc = " ). For more about the differences between atomic types and" ]# [ doc = " non-atomic types as well as information about the portability of" ]# [ doc = " this type, please see the [module-level documentation]." ]# [ doc = "" ]# [ doc = " **Note:** This type is only available on platforms that support" ]# [ doc = " atomic loads and stores of [`" ]# [ doc = $s_int_type ]# [ doc = " `](" ]# [ doc = $int_ref ]# [ doc = " )." ]# [ doc = "" ]# [ doc = " [module-level documentation]: crate::sync::atomic" ]# [$stable ]# [ repr ( C , align ($align ))] pub struct $atomic_type { v : UnsafeCell <$int_type >, }# [ doc = " An atomic integer initialized to `0`." ]# [$stable_init_const ]# [ rustc_deprecated ( since = "1.34.0" , reason = "the `new` function is now preferred" , suggestion = $atomic_new , )] pub const $atomic_init : $atomic_type = $atomic_type :: new ( 0 ); # [$stable ] impl Default for $atomic_type {# [ inline ] fn default ()-> Self { Self :: new ( Default :: default ())}}# [$stable_from ] impl From <$int_type > for $atomic_type { doc_comment ! { concat ! ( "Converts an `" , stringify ! ($int_type ), "` into an `" , stringify ! ($atomic_type ), "`." ), # [ inline ] fn from ( v : $int_type )-> Self { Self :: new ( v )}}}# [$stable_debug ] impl fmt :: Debug for $atomic_type { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { fmt :: Debug :: fmt (& self . load ( Ordering :: SeqCst ), f )}}# [$stable ] unsafe impl Sync for $atomic_type {} impl $atomic_type { doc_comment ! { concat ! ( "Creates a new atomic integer.\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::" , stringify ! ($atomic_type ), ";\n\nlet atomic_forty_two = " , stringify ! ($atomic_type ), "::new(42);\n```" ), # [ inline ]# [$stable ]# [$const_stable ] pub const fn new ( v : $int_type )-> Self { Self { v : UnsafeCell :: new ( v )}}} doc_comment ! { concat ! ( "Returns a mutable reference to the underlying integer.\n\nThis is safe because the mutable reference guarantees that no other threads are\nconcurrently accessing the atomic data.\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet mut some_var = " , stringify ! ($atomic_type ), "::new(10);\nassert_eq!(*some_var.get_mut(), 10);\n*some_var.get_mut() = 5;\nassert_eq!(some_var.load(Ordering::SeqCst), 5);\n```" ), # [ inline ]# [$stable_access ] pub fn get_mut (& mut self )-> & mut $int_type { self . v . get_mut ()}} doc_comment ! { concat ! ( "Get atomic access to a `&mut " , stringify ! ($int_type ), "`.\n\n" , if_not_8_bit ! {$int_type , concat ! ( "**Note:** This function is only available on targets where `" , stringify ! ($int_type ), "` has an alignment of " , $align , " bytes." )}, "\n\n# Examples\n\n```\n#![feature(atomic_from_mut)]\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet mut some_int = 123;\nlet a = " , stringify ! ($atomic_type ), "::from_mut(&mut some_int);\na.store(100, Ordering::Relaxed);\nassert_eq!(some_int, 100);\n```\n " ), # [ inline ]# [$cfg_align ]# [ unstable ( feature = "atomic_from_mut" , issue = "76314" )] pub fn from_mut ( v : & mut $int_type )-> & Self { use crate :: mem :: align_of ; let []= [(); align_of ::< Self > ()- align_of ::<$int_type > ()]; unsafe {&* ( v as * mut $int_type as * mut Self )}}} doc_comment ! { concat ! ( "Consumes the atomic and returns the contained value.\n\nThis is safe because passing `self` by value guarantees that no other threads are\nconcurrently accessing the atomic data.\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::" , stringify ! ($atomic_type ), ";\n\nlet some_var = " , stringify ! ($atomic_type ), "::new(5);\nassert_eq!(some_var.into_inner(), 5);\n```" ), # [ inline ]# [$stable_access ]# [ rustc_const_unstable ( feature = "const_cell_into_inner" , issue = "78729" )] pub const fn into_inner ( self )-> $int_type { self . v . into_inner ()}} doc_comment ! { concat ! ( "Loads a value from the atomic integer.\n\n`load` takes an [`Ordering`] argument which describes the memory ordering of this operation.\nPossible values are [`SeqCst`], [`Acquire`] and [`Relaxed`].\n\n# Panics\n\nPanics if `order` is [`Release`] or [`AcqRel`].\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet some_var = " , stringify ! ($atomic_type ), "::new(5);\n\nassert_eq!(some_var.load(Ordering::Relaxed), 5);\n```" ), # [ inline ]# [$stable ] pub fn load (& self , order : Ordering )-> $int_type { unsafe { atomic_load ( self . v . get (), order )}}} doc_comment ! { concat ! ( "Stores a value into the atomic integer.\n\n`store` takes an [`Ordering`] argument which describes the memory ordering of this operation.\n Possible values are [`SeqCst`], [`Release`] and [`Relaxed`].\n\n# Panics\n\nPanics if `order` is [`Acquire`] or [`AcqRel`].\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet some_var = " , stringify ! ($atomic_type ), "::new(5);\n\nsome_var.store(10, Ordering::Relaxed);\nassert_eq!(some_var.load(Ordering::Relaxed), 10);\n```" ), # [ inline ]# [$stable ] pub fn store (& self , val : $int_type , order : Ordering ){ unsafe { atomic_store ( self . v . get (), val , order ); }}} doc_comment ! { concat ! ( "Stores a value into the atomic integer, returning the previous value.\n\n`swap` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet some_var = " , stringify ! ($atomic_type ), "::new(5);\n\nassert_eq!(some_var.swap(10, Ordering::Relaxed), 5);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn swap (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_swap ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Stores a value into the atomic integer if the current value is the same as\nthe `current` value.\n\nThe return value is always the previous value. If it is equal to `current`, then the\nvalue was updated.\n\n`compare_and_swap` also takes an [`Ordering`] argument which describes the memory\nordering of this operation. Notice that even when using [`AcqRel`], the operation\nmight fail and hence just perform an `Acquire` load, but not have `Release` semantics.\nUsing [`Acquire`] makes the store part of this operation [`Relaxed`] if it\nhappens, and using [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet some_var = " , stringify ! ($atomic_type ), "::new(5);\n\nassert_eq!(some_var.compare_and_swap(5, 10, Ordering::Relaxed), 5);\nassert_eq!(some_var.load(Ordering::Relaxed), 10);\n\nassert_eq!(some_var.compare_and_swap(6, 12, Ordering::Relaxed), 10);\nassert_eq!(some_var.load(Ordering::Relaxed), 10);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn compare_and_swap (& self , current : $int_type , new : $int_type , order : Ordering )-> $int_type { match self . compare_exchange ( current , new , order , strongest_failure_ordering ( order )){ Ok ( x )=> x , Err ( x )=> x , }}} doc_comment ! { concat ! ( "Stores a value into the atomic integer if the current value is the same as\nthe `current` value.\n\nThe return value is a result indicating whether the new value was written and\ncontaining the previous value. On success this value is guaranteed to be equal to\n`current`.\n\n`compare_exchange` takes two [`Ordering`] arguments to describe the memory\nordering of this operation. The first describes the required ordering if the\noperation succeeds while the second describes the required ordering when the\noperation fails. Using [`Acquire`] as success ordering makes the store part\nof this operation [`Relaxed`], and using [`Release`] makes the successful load\n[`Relaxed`]. The failure ordering can only be [`SeqCst`], [`Acquire`] or [`Relaxed`]\nand must be equivalent to or weaker than the success ordering.\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet some_var = " , stringify ! ($atomic_type ), "::new(5);\n\nassert_eq!(some_var.compare_exchange(5, 10,\n Ordering::Acquire,\n Ordering::Relaxed),\n Ok(5));\nassert_eq!(some_var.load(Ordering::Relaxed), 10);\n\nassert_eq!(some_var.compare_exchange(6, 12,\n Ordering::SeqCst,\n Ordering::Acquire),\n Err(10));\nassert_eq!(some_var.load(Ordering::Relaxed), 10);\n```" ), # [ inline ]# [$stable_cxchg ]# [$cfg_cas ] pub fn compare_exchange (& self , current : $int_type , new : $int_type , success : Ordering , failure : Ordering )-> Result <$int_type , $int_type > { unsafe { atomic_compare_exchange ( self . v . get (), current , new , success , failure )}}} doc_comment ! { concat ! ( "Stores a value into the atomic integer if the current value is the same as\nthe `current` value.\n\nUnlike [`" , stringify ! ($atomic_type ), "::compare_exchange`], this function is allowed to spuriously fail even\nwhen the comparison succeeds, which can result in more efficient code on some\nplatforms. The return value is a result indicating whether the new value was\nwritten and containing the previous value.\n\n`compare_exchange_weak` takes two [`Ordering`] arguments to describe the memory\nordering of this operation. The first describes the required ordering if the\noperation succeeds while the second describes the required ordering when the\noperation fails. Using [`Acquire`] as success ordering makes the store part\nof this operation [`Relaxed`], and using [`Release`] makes the successful load\n[`Relaxed`]. The failure ordering can only be [`SeqCst`], [`Acquire`] or [`Relaxed`]\nand must be equivalent to or weaker than the success ordering.\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet val = " , stringify ! ($atomic_type ), "::new(4);\n\nlet mut old = val.load(Ordering::Relaxed);\nloop {\n let new = old * 2;\n match val.compare_exchange_weak(old, new, Ordering::SeqCst, Ordering::Relaxed) {\n Ok(_) => break,\n Err(x) => old = x,\n }\n}\n```" ), # [ inline ]# [$stable_cxchg ]# [$cfg_cas ] pub fn compare_exchange_weak (& self , current : $int_type , new : $int_type , success : Ordering , failure : Ordering )-> Result <$int_type , $int_type > { unsafe { atomic_compare_exchange_weak ( self . v . get (), current , new , success , failure )}}} doc_comment ! { concat ! ( "Adds to the current value, returning the previous value.\n\nThis operation wraps around on overflow.\n\n`fetch_add` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(0);\nassert_eq!(foo.fetch_add(10, Ordering::SeqCst), 0);\nassert_eq!(foo.load(Ordering::SeqCst), 10);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn fetch_add (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_add ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Subtracts from the current value, returning the previous value.\n\nThis operation wraps around on overflow.\n\n`fetch_sub` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(20);\nassert_eq!(foo.fetch_sub(10, Ordering::SeqCst), 20);\nassert_eq!(foo.load(Ordering::SeqCst), 10);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn fetch_sub (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_sub ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Bitwise \"and\" with the current value.\n\nPerforms a bitwise \"and\" operation on the current value and the argument `val`, and\nsets the new value to the result.\n\nReturns the previous value.\n\n`fetch_and` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(0b101101);\nassert_eq!(foo.fetch_and(0b110011, Ordering::SeqCst), 0b101101);\nassert_eq!(foo.load(Ordering::SeqCst), 0b100001);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn fetch_and (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_and ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Bitwise \"nand\" with the current value.\n\nPerforms a bitwise \"nand\" operation on the current value and the argument `val`, and\nsets the new value to the result.\n\nReturns the previous value.\n\n`fetch_nand` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "\nuse std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(0x13);\nassert_eq!(foo.fetch_nand(0x31, Ordering::SeqCst), 0x13);\nassert_eq!(foo.load(Ordering::SeqCst), !(0x13 & 0x31));\n```" ), # [ inline ]# [$stable_nand ]# [$cfg_cas ] pub fn fetch_nand (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_nand ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Bitwise \"or\" with the current value.\n\nPerforms a bitwise \"or\" operation on the current value and the argument `val`, and\nsets the new value to the result.\n\nReturns the previous value.\n\n`fetch_or` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(0b101101);\nassert_eq!(foo.fetch_or(0b110011, Ordering::SeqCst), 0b101101);\nassert_eq!(foo.load(Ordering::SeqCst), 0b111111);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn fetch_or (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_or ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Bitwise \"xor\" with the current value.\n\nPerforms a bitwise \"xor\" operation on the current value and the argument `val`, and\nsets the new value to the result.\n\nReturns the previous value.\n\n`fetch_xor` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(0b101101);\nassert_eq!(foo.fetch_xor(0b110011, Ordering::SeqCst), 0b101101);\nassert_eq!(foo.load(Ordering::SeqCst), 0b011110);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn fetch_xor (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_xor ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Fetches the value, and applies a function to it that returns an optional\nnew value. Returns a `Result` of `Ok(previous_value)` if the function returned `Some(_)`, else\n`Err(previous_value)`.\n\nNote: This may call the function multiple times if the value has been changed from other threads in\nthe meantime, as long as the function returns `Some(_)`, but the function will have been applied\nonly once to the stored value.\n\n`fetch_update` takes two [`Ordering`] arguments to describe the memory ordering of this operation.\nThe first describes the required ordering for when the operation finally succeeds while the second\ndescribes the required ordering for loads. These correspond to the success and failure orderings of\n[`" , stringify ! ($atomic_type ), "::compare_exchange`] respectively.\n\nUsing [`Acquire`] as success ordering makes the store part\nof this operation [`Relaxed`], and using [`Release`] makes the final successful load\n[`Relaxed`]. The (failed) load ordering can only be [`SeqCst`], [`Acquire`] or [`Relaxed`]\nand must be equivalent to or weaker than the success ordering.\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```rust\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet x = " , stringify ! ($atomic_type ), "::new(7);\nassert_eq!(x.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |_| None), Err(7));\nassert_eq!(x.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |x| Some(x + 1)), Ok(7));\nassert_eq!(x.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |x| Some(x + 1)), Ok(8));\nassert_eq!(x.load(Ordering::SeqCst), 9);\n```" ), # [ inline ]# [ stable ( feature = "no_more_cas" , since = "1.45.0" )]# [$cfg_cas ] pub fn fetch_update < F > (& self , set_order : Ordering , fetch_order : Ordering , mut f : F )-> Result <$int_type , $int_type > where F : FnMut ($int_type )-> Option <$int_type > { let mut prev = self . load ( fetch_order ); while let Some ( next )= f ( prev ){ match self . compare_exchange_weak ( prev , next , set_order , fetch_order ){ x @ Ok (_)=> return x , Err ( next_prev )=> prev = next_prev }} Err ( prev )}} doc_comment ! { concat ! ( "Maximum with the current value.\n\nFinds the maximum of the current value and the argument `val`, and\nsets the new value to the result.\n\nReturns the previous value.\n\n`fetch_max` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(23);\nassert_eq!(foo.fetch_max(42, Ordering::SeqCst), 23);\nassert_eq!(foo.load(Ordering::SeqCst), 42);\n```\n\nIf you want to obtain the maximum value in one step, you can use the following:\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(23);\nlet bar = 42;\nlet max_foo = foo.fetch_max(bar, Ordering::SeqCst).max(bar);\nassert!(max_foo == 42);\n```" ), # [ inline ]# [ stable ( feature = "atomic_min_max" , since = "1.45.0" )]# [$cfg_cas ] pub fn fetch_max (& self , val : $int_type , order : Ordering )-> $int_type { unsafe {$max_fn ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Minimum with the current value.\n\nFinds the minimum of the current value and the argument `val`, and\nsets the new value to the result.\n\nReturns the previous value.\n\n`fetch_min` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(23);\nassert_eq!(foo.fetch_min(42, Ordering::Relaxed), 23);\nassert_eq!(foo.load(Ordering::Relaxed), 23);\nassert_eq!(foo.fetch_min(22, Ordering::Relaxed), 23);\nassert_eq!(foo.load(Ordering::Relaxed), 22);\n```\n\nIf you want to obtain the minimum value in one step, you can use the following:\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(23);\nlet bar = 12;\nlet min_foo = foo.fetch_min(bar, Ordering::SeqCst).min(bar);\nassert_eq!(min_foo, 12);\n```" ), # [ inline ]# [ stable ( feature = "atomic_min_max" , since = "1.45.0" )]# [$cfg_cas ] pub fn fetch_min (& self , val : $int_type , order : Ordering )-> $int_type { unsafe {$min_fn ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Returns a mutable pointer to the underlying integer.\n\nDoing non-atomic reads and writes on the resulting integer can be a data race.\nThis method is mostly useful for FFI, where the function signature may use\n`*mut " , stringify ! ($int_type ), "` instead of `&" , stringify ! ($atomic_type ), "`.\n\nReturning an `*mut` pointer from a shared reference to this atomic is safe because the\natomic types work with interior mutability. All modifications of an atomic change the value\nthrough a shared reference, and can do so safely as long as they use atomic operations. Any\nuse of the returned raw pointer requires an `unsafe` block and still has to uphold the same\nrestriction: operations on it must be atomic.\n\n# Examples\n\n```ignore (extern-declaration)\n# fn main() {\n" , $extra_feature , "use std::sync::atomic::" , stringify ! ($atomic_type ), ";\n\nextern {\n fn my_atomic_op(arg: *mut " , stringify ! ($int_type ), ");\n}\n\nlet mut atomic = " , stringify ! ($atomic_type ), "::new(1);\n" , "unsafe {\n my_atomic_op(atomic.as_mut_ptr());\n}\n# }\n```" ), # [ inline ]# [ unstable ( feature = "atomic_mut_ptr" , reason = "recently added" , issue = "66893" )] pub fn as_mut_ptr (& self )-> * mut $int_type { self . v . get ()}}}}}
+macro_rules! __ra_macro_fixture65 {($($target_pointer_width : literal $align : literal )* )=>{$(# [ cfg ( target_has_atomic_load_store = "ptr" )]# [ cfg ( target_pointer_width = $target_pointer_width )] atomic_int ! { cfg ( target_has_atomic = "ptr" ), cfg ( target_has_atomic_equal_alignment = "ptr" ), stable ( feature = "rust1" , since = "1.0.0" ), stable ( feature = "extended_compare_and_swap" , since = "1.10.0" ), stable ( feature = "atomic_debug" , since = "1.3.0" ), stable ( feature = "atomic_access" , since = "1.15.0" ), stable ( feature = "atomic_from" , since = "1.23.0" ), stable ( feature = "atomic_nand" , since = "1.27.0" ), rustc_const_stable ( feature = "const_integer_atomics" , since = "1.34.0" ), stable ( feature = "rust1" , since = "1.0.0" ), "isize" , "../../../std/primitive.isize.html" , "" , atomic_min , atomic_max , $align , "AtomicIsize::new(0)" , isize AtomicIsize ATOMIC_ISIZE_INIT }# [ cfg ( target_has_atomic_load_store = "ptr" )]# [ cfg ( target_pointer_width = $target_pointer_width )] atomic_int ! { cfg ( target_has_atomic = "ptr" ), cfg ( target_has_atomic_equal_alignment = "ptr" ), stable ( feature = "rust1" , since = "1.0.0" ), stable ( feature = "extended_compare_and_swap" , since = "1.10.0" ), stable ( feature = "atomic_debug" , since = "1.3.0" ), stable ( feature = "atomic_access" , since = "1.15.0" ), stable ( feature = "atomic_from" , since = "1.23.0" ), stable ( feature = "atomic_nand" , since = "1.27.0" ), rustc_const_stable ( feature = "const_integer_atomics" , since = "1.34.0" ), stable ( feature = "rust1" , since = "1.0.0" ), "usize" , "../../../std/primitive.usize.html" , "" , atomic_umin , atomic_umax , $align , "AtomicUsize::new(0)" , usize AtomicUsize ATOMIC_USIZE_INIT })* }; }
+macro_rules! __ra_macro_fixture66 {($ty : ident )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Debug for $ty { fn fmt (& self , fmt : & mut Formatter < '_ >)-> Result { float_to_decimal_common ( fmt , self , true , 1 )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Display for $ty { fn fmt (& self , fmt : & mut Formatter < '_ >)-> Result { float_to_decimal_common ( fmt , self , false , 0 )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl LowerExp for $ty { fn fmt (& self , fmt : & mut Formatter < '_ >)-> Result { float_to_exponential_common ( fmt , self , false )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl UpperExp for $ty { fn fmt (& self , fmt : & mut Formatter < '_ >)-> Result { float_to_exponential_common ( fmt , self , true )}}}; }
+macro_rules! __ra_macro_fixture67 {($($t : ident )*)=>($(impl DisplayInt for $t { fn zero ()-> Self { 0 } fn from_u8 ( u : u8 )-> Self { u as Self } fn to_u8 (& self )-> u8 {* self as u8 } fn to_u16 (& self )-> u16 {* self as u16 } fn to_u32 (& self )-> u32 {* self as u32 } fn to_u64 (& self )-> u64 {* self as u64 } fn to_u128 (& self )-> u128 {* self as u128 }})* )}
+macro_rules! __ra_macro_fixture68 {($($t : ident )*)=>($(impl DisplayInt for $t { fn zero ()-> Self { 0 } fn from_u8 ( u : u8 )-> Self { u as Self } fn to_u8 (& self )-> u8 {* self as u8 } fn to_u16 (& self )-> u16 {* self as u16 } fn to_u32 (& self )-> u32 {* self as u32 } fn to_u64 (& self )-> u64 {* self as u64 } fn to_u128 (& self )-> u128 {* self as u128 }})* )}
+macro_rules! __ra_macro_fixture69 {($T : ident , $base : expr , $prefix : expr , $($x : pat =>$conv : expr ),+)=>{ impl GenericRadix for $T { const BASE : u8 = $base ; const PREFIX : & 'static str = $prefix ; fn digit ( x : u8 )-> u8 { match x {$($x =>$conv ,)+ x => panic ! ( "number not in the range 0..={}: {}" , Self :: BASE - 1 , x ), }}}}}
+macro_rules! __ra_macro_fixture70 {($Int : ident , $Uint : ident )=>{ int_base ! { fmt :: Binary for $Int as $Uint -> Binary } int_base ! { fmt :: Octal for $Int as $Uint -> Octal } int_base ! { fmt :: LowerHex for $Int as $Uint -> LowerHex } int_base ! { fmt :: UpperHex for $Int as $Uint -> UpperHex } int_base ! { fmt :: Binary for $Uint as $Uint -> Binary } int_base ! { fmt :: Octal for $Uint as $Uint -> Octal } int_base ! { fmt :: LowerHex for $Uint as $Uint -> LowerHex } int_base ! { fmt :: UpperHex for $Uint as $Uint -> UpperHex }}; }
+macro_rules! __ra_macro_fixture71 {($($T : ident )*)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl fmt :: Debug for $T {# [ inline ] fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { if f . debug_lower_hex (){ fmt :: LowerHex :: fmt ( self , f )} else if f . debug_upper_hex (){ fmt :: UpperHex :: fmt ( self , f )} else { fmt :: Display :: fmt ( self , f )}}})*}; }
+macro_rules! __ra_macro_fixture72 {($($t : ident ),* as $u : ident via $conv_fn : ident named $name : ident )=>{ fn $name ( mut n : $u , is_nonnegative : bool , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { let mut buf = [ MaybeUninit ::< u8 >:: uninit (); 39 ]; let mut curr = buf . len () as isize ; let buf_ptr = MaybeUninit :: slice_as_mut_ptr (& mut buf ); let lut_ptr = DEC_DIGITS_LUT . as_ptr (); unsafe { assert ! ( crate :: mem :: size_of ::<$u > ()>= 2 ); while n >= 10000 { let rem = ( n % 10000 ) as isize ; n /= 10000 ; let d1 = ( rem / 100 )<< 1 ; let d2 = ( rem % 100 )<< 1 ; curr -= 4 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); ptr :: copy_nonoverlapping ( lut_ptr . offset ( d2 ), buf_ptr . offset ( curr + 2 ), 2 ); } let mut n = n as isize ; if n >= 100 { let d1 = ( n % 100 )<< 1 ; n /= 100 ; curr -= 2 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); } if n < 10 { curr -= 1 ; * buf_ptr . offset ( curr )= ( n as u8 )+ b'0' ; } else { let d1 = n << 1 ; curr -= 2 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); }} let buf_slice = unsafe { str :: from_utf8_unchecked ( slice :: from_raw_parts ( buf_ptr . offset ( curr ), buf . len ()- curr as usize ))}; f . pad_integral ( is_nonnegative , "" , buf_slice )}$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl fmt :: Display for $t {# [ allow ( unused_comparisons )] fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { let is_nonnegative = * self >= 0 ; let n = if is_nonnegative { self .$conv_fn ()} else {(! self .$conv_fn ()). wrapping_add ( 1 )}; $name ( n , is_nonnegative , f )}})* }; }
+macro_rules! __ra_macro_fixture73 {($($t : ident ),* as $u : ident via $conv_fn : ident named $name : ident )=>{ fn $name ( mut n : $u , is_nonnegative : bool , upper : bool , f : & mut fmt :: Formatter < '_ > )-> fmt :: Result { let ( mut n , mut exponent , trailing_zeros , added_precision )= { let mut exponent = 0 ; while n % 10 == 0 && n >= 10 { n /= 10 ; exponent += 1 ; } let trailing_zeros = exponent ; let ( added_precision , subtracted_precision )= match f . precision (){ Some ( fmt_prec )=>{ let mut tmp = n ; let mut prec = 0 ; while tmp >= 10 { tmp /= 10 ; prec += 1 ; }( fmt_prec . saturating_sub ( prec ), prec . saturating_sub ( fmt_prec ))} None =>( 0 , 0 )}; for _ in 1 .. subtracted_precision { n /= 10 ; exponent += 1 ; } if subtracted_precision != 0 { let rem = n % 10 ; n /= 10 ; exponent += 1 ; if rem >= 5 { n += 1 ; }}( n , exponent , trailing_zeros , added_precision )}; let mut buf = [ MaybeUninit ::< u8 >:: uninit (); 40 ]; let mut curr = buf . len () as isize ; let buf_ptr = MaybeUninit :: slice_as_mut_ptr (& mut buf ); let lut_ptr = DEC_DIGITS_LUT . as_ptr (); while n >= 100 { let d1 = (( n % 100 ) as isize )<< 1 ; curr -= 2 ; unsafe { ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); } n /= 100 ; exponent += 2 ; } let mut n = n as isize ; if n >= 10 { curr -= 1 ; unsafe {* buf_ptr . offset ( curr )= ( n as u8 % 10_u8 )+ b'0' ; } n /= 10 ; exponent += 1 ; } if exponent != trailing_zeros || added_precision != 0 { curr -= 1 ; unsafe {* buf_ptr . offset ( curr )= b'.' ; }} let buf_slice = unsafe { curr -= 1 ; * buf_ptr . offset ( curr )= ( n as u8 )+ b'0' ; let len = buf . len ()- curr as usize ; slice :: from_raw_parts ( buf_ptr . offset ( curr ), len )}; let mut exp_buf = [ MaybeUninit ::< u8 >:: uninit (); 3 ]; let exp_ptr = MaybeUninit :: slice_as_mut_ptr (& mut exp_buf ); let exp_slice = unsafe {* exp_ptr . offset ( 0 )= if upper { b'E' } else { b'e' }; let len = if exponent < 10 {* exp_ptr . offset ( 1 )= ( exponent as u8 )+ b'0' ; 2 } else { let off = exponent << 1 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( off ), exp_ptr . offset ( 1 ), 2 ); 3 }; slice :: from_raw_parts ( exp_ptr , len )}; let parts = & [ flt2dec :: Part :: Copy ( buf_slice ), flt2dec :: Part :: Zero ( added_precision ), flt2dec :: Part :: Copy ( exp_slice )]; let sign = if ! is_nonnegative { "-" } else if f . sign_plus (){ "+" } else { "" }; let formatted = flt2dec :: Formatted { sign , parts }; f . pad_formatted_parts (& formatted )}$(# [ stable ( feature = "integer_exp_format" , since = "1.42.0" )] impl fmt :: LowerExp for $t {# [ allow ( unused_comparisons )] fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { let is_nonnegative = * self >= 0 ; let n = if is_nonnegative { self .$conv_fn ()} else {(! self .$conv_fn ()). wrapping_add ( 1 )}; $name ( n , is_nonnegative , false , f )}})* $(# [ stable ( feature = "integer_exp_format" , since = "1.42.0" )] impl fmt :: UpperExp for $t {# [ allow ( unused_comparisons )] fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { let is_nonnegative = * self >= 0 ; let n = if is_nonnegative { self .$conv_fn ()} else {(! self .$conv_fn ()). wrapping_add ( 1 )}; $name ( n , is_nonnegative , true , f )}})* }; }
+macro_rules! __ra_macro_fixture74 {($($tr : ident ),*)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized + $tr > $tr for & T { fn fmt (& self , f : & mut Formatter < '_ >)-> Result {$tr :: fmt (&** self , f )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized + $tr > $tr for & mut T { fn fmt (& self , f : & mut Formatter < '_ >)-> Result {$tr :: fmt (&** self , f )}})* }}
+macro_rules! __ra_macro_fixture75 {()=>(); ($($name : ident ,)+ )=>(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($name : Debug ),+> Debug for ($($name ,)+) where last_type ! ($($name ,)+): ? Sized {# [ allow ( non_snake_case , unused_assignments )] fn fmt (& self , f : & mut Formatter < '_ >)-> Result { let mut builder = f . debug_tuple ( "" ); let ($(ref $name ,)+)= * self ; $(builder . field (&$name ); )+ builder . finish ()}} peel ! {$($name ,)+ })}
+macro_rules! __ra_macro_fixture76 {($(($ty : ident , $meth : ident ),)*)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Hash for $ty { fn hash < H : Hasher > (& self , state : & mut H ){ state .$meth (* self )} fn hash_slice < H : Hasher > ( data : & [$ty ], state : & mut H ){ let newlen = data . len ()* mem :: size_of ::<$ty > (); let ptr = data . as_ptr () as * const u8 ; state . write ( unsafe { slice :: from_raw_parts ( ptr , newlen )})}})*}}
+macro_rules! __ra_macro_fixture77 {()=>(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Hash for (){ fn hash < H : Hasher > (& self , _state : & mut H ){}}); ($($name : ident )+)=>(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($name : Hash ),+> Hash for ($($name ,)+) where last_type ! ($($name ,)+): ? Sized {# [ allow ( non_snake_case )] fn hash < S : Hasher > (& self , state : & mut S ){ let ($(ref $name ,)+)= * self ; $($name . hash ( state );)+ }}); }
+macro_rules! __ra_macro_fixture78 {($([$($p : tt )*]$t : ty ,)*)=>{$(impl <$($p )*> AlwaysApplicableOrd for $t {})* }}
+macro_rules! __ra_macro_fixture79 {($traitname : ident , $($ty : ty )*)=>{$(impl $traitname <$ty > for $ty {})* }}
+macro_rules! __ra_macro_fixture80 {( struct $name : ident -> $ptr : ty , $elem : ty , $raw_mut : tt , {$($mut_ : tt )?}, {$($extra : tt )*})=>{ macro_rules ! next_unchecked {($self : ident )=>{& $($mut_ )? *$self . post_inc_start ( 1 )}} macro_rules ! next_back_unchecked {($self : ident )=>{& $($mut_ )? *$self . pre_dec_end ( 1 )}} macro_rules ! zst_shrink {($self : ident , $n : ident )=>{$self . end = ($self . end as * $raw_mut u8 ). wrapping_offset (-$n ) as * $raw_mut T ; }} impl < 'a , T > $name < 'a , T > {# [ inline ( always )] fn make_slice (& self )-> & 'a [ T ]{ unsafe { from_raw_parts ( self . ptr . as_ptr (), len ! ( self ))}}# [ inline ( always )] unsafe fn post_inc_start (& mut self , offset : isize )-> * $raw_mut T { if mem :: size_of ::< T > ()== 0 { zst_shrink ! ( self , offset ); self . ptr . as_ptr ()} else { let old = self . ptr . as_ptr (); self . ptr = unsafe { NonNull :: new_unchecked ( self . ptr . as_ptr (). offset ( offset ))}; old }}# [ inline ( always )] unsafe fn pre_dec_end (& mut self , offset : isize )-> * $raw_mut T { if mem :: size_of ::< T > ()== 0 { zst_shrink ! ( self , offset ); self . ptr . as_ptr ()} else { self . end = unsafe { self . end . offset (- offset )}; self . end }}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T > ExactSizeIterator for $name < '_ , T > {# [ inline ( always )] fn len (& self )-> usize { len ! ( self )}# [ inline ( always )] fn is_empty (& self )-> bool { is_empty ! ( self )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < 'a , T > Iterator for $name < 'a , T > { type Item = $elem ; # [ inline ] fn next (& mut self )-> Option <$elem > { unsafe { assume (! self . ptr . as_ptr (). is_null ()); if mem :: size_of ::< T > ()!= 0 { assume (! self . end . is_null ()); } if is_empty ! ( self ){ None } else { Some ( next_unchecked ! ( self ))}}}# [ inline ] fn size_hint (& self )-> ( usize , Option < usize >){ let exact = len ! ( self ); ( exact , Some ( exact ))}# [ inline ] fn count ( self )-> usize { len ! ( self )}# [ inline ] fn nth (& mut self , n : usize )-> Option <$elem > { if n >= len ! ( self ){ if mem :: size_of ::< T > ()== 0 { self . end = self . ptr . as_ptr (); } else { unsafe { self . ptr = NonNull :: new_unchecked ( self . end as * mut T ); }} return None ; } unsafe { self . post_inc_start ( n as isize ); Some ( next_unchecked ! ( self ))}}# [ inline ] fn last ( mut self )-> Option <$elem > { self . next_back ()}# [ inline ] fn for_each < F > ( mut self , mut f : F ) where Self : Sized , F : FnMut ( Self :: Item ), { while let Some ( x )= self . next (){ f ( x ); }}# [ inline ] fn all < F > (& mut self , mut f : F )-> bool where Self : Sized , F : FnMut ( Self :: Item )-> bool , { while let Some ( x )= self . next (){ if ! f ( x ){ return false ; }} true }# [ inline ] fn any < F > (& mut self , mut f : F )-> bool where Self : Sized , F : FnMut ( Self :: Item )-> bool , { while let Some ( x )= self . next (){ if f ( x ){ return true ; }} false }# [ inline ] fn find < P > (& mut self , mut predicate : P )-> Option < Self :: Item > where Self : Sized , P : FnMut (& Self :: Item )-> bool , { while let Some ( x )= self . next (){ if predicate (& x ){ return Some ( x ); }} None }# [ inline ] fn find_map < B , F > (& mut self , mut f : F )-> Option < B > where Self : Sized , F : FnMut ( Self :: Item )-> Option < B >, { while let Some ( x )= self . next (){ if let Some ( y )= f ( x ){ return Some ( y ); }} None }# [ inline ]# [ rustc_inherit_overflow_checks ] fn position < P > (& mut self , mut predicate : P )-> Option < usize > where Self : Sized , P : FnMut ( Self :: Item )-> bool , { let n = len ! ( self ); let mut i = 0 ; while let Some ( x )= self . next (){ if predicate ( x ){ unsafe { assume ( i < n )}; return Some ( i ); } i += 1 ; } None }# [ inline ] fn rposition < P > (& mut self , mut predicate : P )-> Option < usize > where P : FnMut ( Self :: Item )-> bool , Self : Sized + ExactSizeIterator + DoubleEndedIterator { let n = len ! ( self ); let mut i = n ; while let Some ( x )= self . next_back (){ i -= 1 ; if predicate ( x ){ unsafe { assume ( i < n )}; return Some ( i ); }} None }# [ doc ( hidden )] unsafe fn __iterator_get_unchecked (& mut self , idx : usize )-> Self :: Item { unsafe {& $($mut_ )? * self . ptr . as_ptr (). add ( idx )}}$($extra )* }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < 'a , T > DoubleEndedIterator for $name < 'a , T > {# [ inline ] fn next_back (& mut self )-> Option <$elem > { unsafe { assume (! self . ptr . as_ptr (). is_null ()); if mem :: size_of ::< T > ()!= 0 { assume (! self . end . is_null ()); } if is_empty ! ( self ){ None } else { Some ( next_back_unchecked ! ( self ))}}}# [ inline ] fn nth_back (& mut self , n : usize )-> Option <$elem > { if n >= len ! ( self ){ self . end = self . ptr . as_ptr (); return None ; } unsafe { self . pre_dec_end ( n as isize ); Some ( next_back_unchecked ! ( self ))}}}# [ stable ( feature = "fused" , since = "1.26.0" )] impl < T > FusedIterator for $name < '_ , T > {}# [ unstable ( feature = "trusted_len" , issue = "37572" )] unsafe impl < T > TrustedLen for $name < '_ , T > {}}}
+macro_rules! __ra_macro_fixture81 {($name : ident : $elem : ident , $iter_of : ty )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < 'a , $elem , P > Iterator for $name < 'a , $elem , P > where P : FnMut (& T )-> bool , { type Item = $iter_of ; # [ inline ] fn next (& mut self )-> Option <$iter_of > { self . inner . next ()}# [ inline ] fn size_hint (& self )-> ( usize , Option < usize >){ self . inner . size_hint ()}}# [ stable ( feature = "fused" , since = "1.26.0" )] impl < 'a , $elem , P > FusedIterator for $name < 'a , $elem , P > where P : FnMut (& T )-> bool {}}; }
+macro_rules! __ra_macro_fixture82 {( clone $t : ident with |$s : ident | $e : expr )=>{ impl < 'a , P > Clone for $t < 'a , P > where P : Pattern < 'a , Searcher : Clone >, { fn clone (& self )-> Self { let $s = self ; $e }}}; }
+macro_rules! __ra_macro_fixture83 {{ forward : $(# [$forward_iterator_attribute : meta ])* struct $forward_iterator : ident ; reverse : $(# [$reverse_iterator_attribute : meta ])* struct $reverse_iterator : ident ; stability : $(# [$common_stability_attribute : meta ])* internal : $internal_iterator : ident yielding ($iterty : ty ); delegate $($t : tt )* }=>{$(# [$forward_iterator_attribute ])* $(# [$common_stability_attribute ])* pub struct $forward_iterator < 'a , P : Pattern < 'a >> ( pub ( super )$internal_iterator < 'a , P >); $(# [$common_stability_attribute ])* impl < 'a , P > fmt :: Debug for $forward_iterator < 'a , P > where P : Pattern < 'a , Searcher : fmt :: Debug >, { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { f . debug_tuple ( stringify ! ($forward_iterator )). field (& self . 0 ). finish ()}}$(# [$common_stability_attribute ])* impl < 'a , P : Pattern < 'a >> Iterator for $forward_iterator < 'a , P > { type Item = $iterty ; # [ inline ] fn next (& mut self )-> Option <$iterty > { self . 0 . next ()}}$(# [$common_stability_attribute ])* impl < 'a , P > Clone for $forward_iterator < 'a , P > where P : Pattern < 'a , Searcher : Clone >, { fn clone (& self )-> Self {$forward_iterator ( self . 0 . clone ())}}$(# [$reverse_iterator_attribute ])* $(# [$common_stability_attribute ])* pub struct $reverse_iterator < 'a , P : Pattern < 'a >> ( pub ( super )$internal_iterator < 'a , P >); $(# [$common_stability_attribute ])* impl < 'a , P > fmt :: Debug for $reverse_iterator < 'a , P > where P : Pattern < 'a , Searcher : fmt :: Debug >, { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { f . debug_tuple ( stringify ! ($reverse_iterator )). field (& self . 0 ). finish ()}}$(# [$common_stability_attribute ])* impl < 'a , P > Iterator for $reverse_iterator < 'a , P > where P : Pattern < 'a , Searcher : ReverseSearcher < 'a >>, { type Item = $iterty ; # [ inline ] fn next (& mut self )-> Option <$iterty > { self . 0 . next_back ()}}$(# [$common_stability_attribute ])* impl < 'a , P > Clone for $reverse_iterator < 'a , P > where P : Pattern < 'a , Searcher : Clone >, { fn clone (& self )-> Self {$reverse_iterator ( self . 0 . clone ())}}# [ stable ( feature = "fused" , since = "1.26.0" )] impl < 'a , P : Pattern < 'a >> FusedIterator for $forward_iterator < 'a , P > {}# [ stable ( feature = "fused" , since = "1.26.0" )] impl < 'a , P > FusedIterator for $reverse_iterator < 'a , P > where P : Pattern < 'a , Searcher : ReverseSearcher < 'a >>, {} generate_pattern_iterators ! ($($t )* with $(# [$common_stability_attribute ])*, $forward_iterator , $reverse_iterator , $iterty ); }; { double ended ; with $(# [$common_stability_attribute : meta ])*, $forward_iterator : ident , $reverse_iterator : ident , $iterty : ty }=>{$(# [$common_stability_attribute ])* impl < 'a , P > DoubleEndedIterator for $forward_iterator < 'a , P > where P : Pattern < 'a , Searcher : DoubleEndedSearcher < 'a >>, {# [ inline ] fn next_back (& mut self )-> Option <$iterty > { self . 0 . next_back ()}}$(# [$common_stability_attribute ])* impl < 'a , P > DoubleEndedIterator for $reverse_iterator < 'a , P > where P : Pattern < 'a , Searcher : DoubleEndedSearcher < 'a >>, {# [ inline ] fn next_back (& mut self )-> Option <$iterty > { self . 0 . next ()}}}; { single ended ; with $(# [$common_stability_attribute : meta ])*, $forward_iterator : ident , $reverse_iterator : ident , $iterty : ty }=>{}}
+macro_rules! __ra_macro_fixture84 {($($Name : ident ),+)=>{$(# [ stable ( feature = "str_escape" , since = "1.34.0" )] impl < 'a > fmt :: Display for $Name < 'a > { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { self . clone (). try_for_each (| c | f . write_char ( c ))}}# [ stable ( feature = "str_escape" , since = "1.34.0" )] impl < 'a > Iterator for $Name < 'a > { type Item = char ; # [ inline ] fn next (& mut self )-> Option < char > { self . inner . next ()}# [ inline ] fn size_hint (& self )-> ( usize , Option < usize >){ self . inner . size_hint ()}# [ inline ] fn try_fold < Acc , Fold , R > (& mut self , init : Acc , fold : Fold )-> R where Self : Sized , Fold : FnMut ( Acc , Self :: Item )-> R , R : Try < Ok = Acc >{ self . inner . try_fold ( init , fold )}# [ inline ] fn fold < Acc , Fold > ( self , init : Acc , fold : Fold )-> Acc where Fold : FnMut ( Acc , Self :: Item )-> Acc , { self . inner . fold ( init , fold )}}# [ stable ( feature = "str_escape" , since = "1.34.0" )] impl < 'a > FusedIterator for $Name < 'a > {})+}}
+macro_rules! __ra_macro_fixture85 {($($(# [$attr : meta ])* struct $Name : ident impl $(<$($lifetime : lifetime ),+> )? Fn = |$($arg : ident : $ArgTy : ty ),*| -> $ReturnTy : ty $body : block ; )+)=>{$($(# [$attr ])* struct $Name ; impl $(<$($lifetime ),+> )? Fn < ($($ArgTy , )*)> for $Name {# [ inline ] extern "rust-call" fn call (& self , ($($arg , )*): ($($ArgTy , )*))-> $ReturnTy {$body }} impl $(<$($lifetime ),+> )? FnMut < ($($ArgTy , )*)> for $Name {# [ inline ] extern "rust-call" fn call_mut (& mut self , ($($arg , )*): ($($ArgTy , )*))-> $ReturnTy { Fn :: call (&* self , ($($arg , )*))}} impl $(<$($lifetime ),+> )? FnOnce < ($($ArgTy , )*)> for $Name { type Output = $ReturnTy ; # [ inline ] extern "rust-call" fn call_once ( self , ($($arg , )*): ($($ArgTy , )*))-> $ReturnTy { Fn :: call (& self , ($($arg , )*))}})+ }}
+macro_rules! __ra_macro_fixture86 {($($Tuple : ident {$(($idx : tt )-> $T : ident )+ })+)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($T : PartialEq ),+> PartialEq for ($($T ,)+) where last_type ! ($($T ,)+): ? Sized {# [ inline ] fn eq (& self , other : & ($($T ,)+))-> bool {$(self .$idx == other .$idx )&&+ }# [ inline ] fn ne (& self , other : & ($($T ,)+))-> bool {$(self .$idx != other .$idx )||+ }}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($T : Eq ),+> Eq for ($($T ,)+) where last_type ! ($($T ,)+): ? Sized {}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($T : PartialOrd + PartialEq ),+> PartialOrd for ($($T ,)+) where last_type ! ($($T ,)+): ? Sized {# [ inline ] fn partial_cmp (& self , other : & ($($T ,)+))-> Option < Ordering > { lexical_partial_cmp ! ($(self .$idx , other .$idx ),+)}# [ inline ] fn lt (& self , other : & ($($T ,)+))-> bool { lexical_ord ! ( lt , $(self .$idx , other .$idx ),+)}# [ inline ] fn le (& self , other : & ($($T ,)+))-> bool { lexical_ord ! ( le , $(self .$idx , other .$idx ),+)}# [ inline ] fn ge (& self , other : & ($($T ,)+))-> bool { lexical_ord ! ( ge , $(self .$idx , other .$idx ),+)}# [ inline ] fn gt (& self , other : & ($($T ,)+))-> bool { lexical_ord ! ( gt , $(self .$idx , other .$idx ),+)}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($T : Ord ),+> Ord for ($($T ,)+) where last_type ! ($($T ,)+): ? Sized {# [ inline ] fn cmp (& self , other : & ($($T ,)+))-> Ordering { lexical_cmp ! ($(self .$idx , other .$idx ),+)}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($T : Default ),+> Default for ($($T ,)+){# [ inline ] fn default ()-> ($($T ,)+){($({let x : $T = Default :: default (); x },)+)}})+ }}
+macro_rules! __ra_macro_fixture87 {($x : expr , $($tt : tt )*)=>{# [ doc = $x ]$($tt )* }; }
+macro_rules! __ra_macro_fixture88 {($x : expr , $($tt : tt )*)=>{# [ doc = $x ]$($tt )* }; }
+macro_rules! __ra_macro_fixture89 {(# [$stability : meta ]($($Trait : ident ),+ ) for $Ty : ident )=>{$(# [$stability ] impl fmt ::$Trait for $Ty {# [ inline ] fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { self . get (). fmt ( f )}})+ }}
+macro_rules! __ra_macro_fixture90 {($t : ident , $f : ident )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Shl <$f > for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn shl ( self , other : $f )-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_shl (( other & self :: shift_max ::$t as $f ) as u32 ))}} forward_ref_binop ! { impl Shl , shl for Wrapping <$t >, $f , # [ stable ( feature = "wrapping_ref_ops" , since = "1.39.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl ShlAssign <$f > for Wrapping <$t > {# [ inline ] fn shl_assign (& mut self , other : $f ){* self = * self << other ; }} forward_ref_op_assign ! { impl ShlAssign , shl_assign for Wrapping <$t >, $f }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Shr <$f > for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn shr ( self , other : $f )-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_shr (( other & self :: shift_max ::$t as $f ) as u32 ))}} forward_ref_binop ! { impl Shr , shr for Wrapping <$t >, $f , # [ stable ( feature = "wrapping_ref_ops" , since = "1.39.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl ShrAssign <$f > for Wrapping <$t > {# [ inline ] fn shr_assign (& mut self , other : $f ){* self = * self >> other ; }} forward_ref_op_assign ! { impl ShrAssign , shr_assign for Wrapping <$t >, $f }}; }
+macro_rules! __ra_macro_fixture91 {( impl $imp : ident , $method : ident for $t : ty , $u : ty )=>{ forward_ref_binop ! ( impl $imp , $method for $t , $u , # [ stable ( feature = "rust1" , since = "1.0.0" )]); }; ( impl $imp : ident , $method : ident for $t : ty , $u : ty , # [$attr : meta ])=>{# [$attr ] impl < 'a > $imp <$u > for & 'a $t { type Output = <$t as $imp <$u >>:: Output ; # [ inline ] fn $method ( self , other : $u )-> <$t as $imp <$u >>:: Output {$imp ::$method (* self , other )}}# [$attr ] impl $imp <&$u > for $t { type Output = <$t as $imp <$u >>:: Output ; # [ inline ] fn $method ( self , other : &$u )-> <$t as $imp <$u >>:: Output {$imp ::$method ( self , * other )}}# [$attr ] impl $imp <&$u > for &$t { type Output = <$t as $imp <$u >>:: Output ; # [ inline ] fn $method ( self , other : &$u )-> <$t as $imp <$u >>:: Output {$imp ::$method (* self , * other )}}}}
+macro_rules! __ra_macro_fixture92 {( impl $imp : ident , $method : ident for $t : ty , $u : ty )=>{ forward_ref_op_assign ! ( impl $imp , $method for $t , $u , # [ stable ( feature = "op_assign_builtins_by_ref" , since = "1.22.0" )]); }; ( impl $imp : ident , $method : ident for $t : ty , $u : ty , # [$attr : meta ])=>{# [$attr ] impl $imp <&$u > for $t {# [ inline ] fn $method (& mut self , other : &$u ){$imp ::$method ( self , * other ); }}}}
+macro_rules! __ra_macro_fixture93 {( impl $imp : ident , $method : ident for $t : ty )=>{ forward_ref_unop ! ( impl $imp , $method for $t , # [ stable ( feature = "rust1" , since = "1.0.0" )]); }; ( impl $imp : ident , $method : ident for $t : ty , # [$attr : meta ])=>{# [$attr ] impl $imp for &$t { type Output = <$t as $imp >:: Output ; # [ inline ] fn $method ( self )-> <$t as $imp >:: Output {$imp ::$method (* self )}}}}
+macro_rules! __ra_macro_fixture94 {($FnTy : ty , $($Arg : ident ),*)=>{# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> PartialEq for $FnTy {# [ inline ] fn eq (& self , other : & Self )-> bool {* self as usize == * other as usize }}# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> Eq for $FnTy {}# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> PartialOrd for $FnTy {# [ inline ] fn partial_cmp (& self , other : & Self )-> Option < Ordering > {(* self as usize ). partial_cmp (& (* other as usize ))}}# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> Ord for $FnTy {# [ inline ] fn cmp (& self , other : & Self )-> Ordering {(* self as usize ). cmp (& (* other as usize ))}}# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> hash :: Hash for $FnTy { fn hash < HH : hash :: Hasher > (& self , state : & mut HH ){ state . write_usize (* self as usize )}}# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> fmt :: Pointer for $FnTy { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { fmt :: Pointer :: fmt (& (* self as usize as * const ()), f )}}# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> fmt :: Debug for $FnTy { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { fmt :: Pointer :: fmt (& (* self as usize as * const ()), f )}}}}
+macro_rules! __ra_macro_fixture95 {($t : ty , $f : ty )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Shl <$f > for $t { type Output = $t ; # [ inline ]# [ rustc_inherit_overflow_checks ] fn shl ( self , other : $f )-> $t { self << other }} forward_ref_binop ! { impl Shl , shl for $t , $f }}; }
+macro_rules! __ra_macro_fixture96 {($t : ty , $f : ty )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Shr <$f > for $t { type Output = $t ; # [ inline ]# [ rustc_inherit_overflow_checks ] fn shr ( self , other : $f )-> $t { self >> other }} forward_ref_binop ! { impl Shr , shr for $t , $f }}; }
+macro_rules! __ra_macro_fixture97 {($t : ty , $f : ty )=>{# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl ShlAssign <$f > for $t {# [ inline ]# [ rustc_inherit_overflow_checks ] fn shl_assign (& mut self , other : $f ){* self <<= other }} forward_ref_op_assign ! { impl ShlAssign , shl_assign for $t , $f }}; }
+macro_rules! __ra_macro_fixture98 {($t : ty , $f : ty )=>{# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl ShrAssign <$f > for $t {# [ inline ]# [ rustc_inherit_overflow_checks ] fn shr_assign (& mut self , other : $f ){* self >>= other }} forward_ref_op_assign ! { impl ShrAssign , shr_assign for $t , $f }}; }
+macro_rules! __ra_macro_fixture99 {( fmt ::$Trait : ident for $T : ident as $U : ident -> $Radix : ident )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl fmt ::$Trait for $T { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result {$Radix . fmt_int (* self as $U , f )}}}; }
+macro_rules! __ra_macro_fixture100 {($name : ident , $($other : ident ,)*)=>( tuple ! {$($other ,)* })}
+macro_rules! __ra_macro_fixture101 {{ unsafe fn $name : ident : $adjacent_kv : ident }=>{# [ doc = " Given a leaf edge handle into an owned tree, returns a handle to the next KV," ]# [ doc = " while deallocating any node left behind yet leaving the corresponding edge" ]# [ doc = " in its parent node dangling." ]# [ doc = "" ]# [ doc = " # Safety" ]# [ doc = " - The leaf edge must not be the last one in the direction travelled." ]# [ doc = " - The node carrying the next KV returned must not have been deallocated by a" ]# [ doc = " previous call on any handle obtained for this tree." ] unsafe fn $name < K , V > ( leaf_edge : Handle < NodeRef < marker :: Owned , K , V , marker :: Leaf >, marker :: Edge >, )-> Handle < NodeRef < marker :: Owned , K , V , marker :: LeafOrInternal >, marker :: KV > { let mut edge = leaf_edge . forget_node_type (); loop { edge = match edge .$adjacent_kv (){ Ok ( internal_kv )=> return internal_kv , Err ( last_edge )=>{ unsafe { let parent_edge = last_edge . into_node (). deallocate_and_ascend (); unwrap_unchecked ( parent_edge ). forget_node_type ()}}}}}}; }
+macro_rules! __ra_macro_fixture102 {([$($vars : tt )*]$lhs : ty , $rhs : ty , $($constraints : tt )*)=>{# [ stable ( feature = "vec_deque_partial_eq_slice" , since = "1.17.0" )] impl < A , B , $($vars )*> PartialEq <$rhs > for $lhs where A : PartialEq < B >, $($constraints )* { fn eq (& self , other : &$rhs )-> bool { if self . len ()!= other . len (){ return false ; } let ( sa , sb )= self . as_slices (); let ( oa , ob )= other [..]. split_at ( sa . len ()); sa == oa && sb == ob }}}}
+macro_rules! __ra_macro_fixture103 {($lhs : ty , $rhs : ty )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )]# [ allow ( unused_lifetimes )] impl < 'a , 'b > PartialEq <$rhs > for $lhs {# [ inline ] fn eq (& self , other : &$rhs )-> bool { PartialEq :: eq (& self [..], & other [..])}# [ inline ] fn ne (& self , other : &$rhs )-> bool { PartialEq :: ne (& self [..], & other [..])}}# [ stable ( feature = "rust1" , since = "1.0.0" )]# [ allow ( unused_lifetimes )] impl < 'a , 'b > PartialEq <$lhs > for $rhs {# [ inline ] fn eq (& self , other : &$lhs )-> bool { PartialEq :: eq (& self [..], & other [..])}# [ inline ] fn ne (& self , other : &$lhs )-> bool { PartialEq :: ne (& self [..], & other [..])}}}; }
+macro_rules! __ra_macro_fixture104 {($t : ty , $is_zero : expr )=>{ unsafe impl IsZero for $t {# [ inline ] fn is_zero (& self )-> bool {$is_zero (* self )}}}; }
+macro_rules! __ra_macro_fixture105 {([$($vars : tt )*]$lhs : ty , $rhs : ty $(where $ty : ty : $bound : ident )?, # [$stability : meta ])=>{# [$stability ] impl < A , B , $($vars )*> PartialEq <$rhs > for $lhs where A : PartialEq < B >, $($ty : $bound )? {# [ inline ] fn eq (& self , other : &$rhs )-> bool { self [..]== other [..]}# [ inline ] fn ne (& self , other : &$rhs )-> bool { self [..]!= other [..]}}}}
+macro_rules! __ra_macro_fixture106 {('owned : $($oty : ident ,)* 'interned : $($ity : ident ,)* )=>{# [ repr ( C )]# [ allow ( non_snake_case )] pub struct HandleCounters {$($oty : AtomicUsize ,)* $($ity : AtomicUsize ,)* } impl HandleCounters { extern "C" fn get ()-> & 'static Self { static COUNTERS : HandleCounters = HandleCounters {$($oty : AtomicUsize :: new ( 1 ),)* $($ity : AtomicUsize :: new ( 1 ),)* }; & COUNTERS }}# [ repr ( C )]# [ allow ( non_snake_case )] pub ( super ) struct HandleStore < S : server :: Types > {$($oty : handle :: OwnedStore < S ::$oty >,)* $($ity : handle :: InternedStore < S ::$ity >,)* } impl < S : server :: Types > HandleStore < S > { pub ( super ) fn new ( handle_counters : & 'static HandleCounters )-> Self { HandleStore {$($oty : handle :: OwnedStore :: new (& handle_counters .$oty ),)* $($ity : handle :: InternedStore :: new (& handle_counters .$ity ),)* }}}$(# [ repr ( C )] pub ( crate ) struct $oty ( handle :: Handle ); impl ! Send for $oty {} impl ! Sync for $oty {} impl Drop for $oty { fn drop (& mut self ){$oty ( self . 0 ). drop (); }} impl < S > Encode < S > for $oty { fn encode ( self , w : & mut Writer , s : & mut S ){ let handle = self . 0 ; mem :: forget ( self ); handle . encode ( w , s ); }} impl < S : server :: Types > DecodeMut < '_ , '_ , HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$oty , $oty > { fn decode ( r : & mut Reader < '_ >, s : & mut HandleStore < server :: MarkedTypes < S >>)-> Self { s .$oty . take ( handle :: Handle :: decode ( r , & mut ()))}} impl < S > Encode < S > for &$oty { fn encode ( self , w : & mut Writer , s : & mut S ){ self . 0 . encode ( w , s ); }} impl < S : server :: Types > Decode < '_ , 's , HandleStore < server :: MarkedTypes < S >>> for & 's Marked < S ::$oty , $oty > { fn decode ( r : & mut Reader < '_ >, s : & 's HandleStore < server :: MarkedTypes < S >>)-> Self {& s .$oty [ handle :: Handle :: decode ( r , & mut ())]}} impl < S > Encode < S > for & mut $oty { fn encode ( self , w : & mut Writer , s : & mut S ){ self . 0 . encode ( w , s ); }} impl < S : server :: Types > DecodeMut < '_ , 's , HandleStore < server :: MarkedTypes < S >>> for & 's mut Marked < S ::$oty , $oty > { fn decode ( r : & mut Reader < '_ >, s : & 's mut HandleStore < server :: MarkedTypes < S >> )-> Self {& mut s .$oty [ handle :: Handle :: decode ( r , & mut ())]}} impl < S : server :: Types > Encode < HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$oty , $oty > { fn encode ( self , w : & mut Writer , s : & mut HandleStore < server :: MarkedTypes < S >>){ s .$oty . alloc ( self ). encode ( w , s ); }} impl < S > DecodeMut < '_ , '_ , S > for $oty { fn decode ( r : & mut Reader < '_ >, s : & mut S )-> Self {$oty ( handle :: Handle :: decode ( r , s ))}})* $(# [ repr ( C )]# [ derive ( Copy , Clone , PartialEq , Eq , Hash )] pub ( crate ) struct $ity ( handle :: Handle ); impl ! Send for $ity {} impl ! Sync for $ity {} impl < S > Encode < S > for $ity { fn encode ( self , w : & mut Writer , s : & mut S ){ self . 0 . encode ( w , s ); }} impl < S : server :: Types > DecodeMut < '_ , '_ , HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$ity , $ity > { fn decode ( r : & mut Reader < '_ >, s : & mut HandleStore < server :: MarkedTypes < S >>)-> Self { s .$ity . copy ( handle :: Handle :: decode ( r , & mut ()))}} impl < S : server :: Types > Encode < HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$ity , $ity > { fn encode ( self , w : & mut Writer , s : & mut HandleStore < server :: MarkedTypes < S >>){ s .$ity . alloc ( self ). encode ( w , s ); }} impl < S > DecodeMut < '_ , '_ , S > for $ity { fn decode ( r : & mut Reader < '_ >, s : & mut S )-> Self {$ity ( handle :: Handle :: decode ( r , s ))}})* }}
+macro_rules! __ra_macro_fixture107 {($S : ident , $self : ident , $m : ident )=>{$m ! { FreeFunctions { fn drop ($self : $S :: FreeFunctions ); fn track_env_var ( var : & str , value : Option <& str >); }, TokenStream { fn drop ($self : $S :: TokenStream ); fn clone ($self : &$S :: TokenStream )-> $S :: TokenStream ; fn new ()-> $S :: TokenStream ; fn is_empty ($self : &$S :: TokenStream )-> bool ; fn from_str ( src : & str )-> $S :: TokenStream ; fn to_string ($self : &$S :: TokenStream )-> String ; fn from_token_tree ( tree : TokenTree <$S :: Group , $S :: Punct , $S :: Ident , $S :: Literal >, )-> $S :: TokenStream ; fn into_iter ($self : $S :: TokenStream )-> $S :: TokenStreamIter ; }, TokenStreamBuilder { fn drop ($self : $S :: TokenStreamBuilder ); fn new ()-> $S :: TokenStreamBuilder ; fn push ($self : & mut $S :: TokenStreamBuilder , stream : $S :: TokenStream ); fn build ($self : $S :: TokenStreamBuilder )-> $S :: TokenStream ; }, TokenStreamIter { fn drop ($self : $S :: TokenStreamIter ); fn clone ($self : &$S :: TokenStreamIter )-> $S :: TokenStreamIter ; fn next ($self : & mut $S :: TokenStreamIter , )-> Option < TokenTree <$S :: Group , $S :: Punct , $S :: Ident , $S :: Literal >>; }, Group { fn drop ($self : $S :: Group ); fn clone ($self : &$S :: Group )-> $S :: Group ; fn new ( delimiter : Delimiter , stream : $S :: TokenStream )-> $S :: Group ; fn delimiter ($self : &$S :: Group )-> Delimiter ; fn stream ($self : &$S :: Group )-> $S :: TokenStream ; fn span ($self : &$S :: Group )-> $S :: Span ; fn span_open ($self : &$S :: Group )-> $S :: Span ; fn span_close ($self : &$S :: Group )-> $S :: Span ; fn set_span ($self : & mut $S :: Group , span : $S :: Span ); }, Punct { fn new ( ch : char , spacing : Spacing )-> $S :: Punct ; fn as_char ($self : $S :: Punct )-> char ; fn spacing ($self : $S :: Punct )-> Spacing ; fn span ($self : $S :: Punct )-> $S :: Span ; fn with_span ($self : $S :: Punct , span : $S :: Span )-> $S :: Punct ; }, Ident { fn new ( string : & str , span : $S :: Span , is_raw : bool )-> $S :: Ident ; fn span ($self : $S :: Ident )-> $S :: Span ; fn with_span ($self : $S :: Ident , span : $S :: Span )-> $S :: Ident ; }, Literal { fn drop ($self : $S :: Literal ); fn clone ($self : &$S :: Literal )-> $S :: Literal ; fn debug_kind ($self : &$S :: Literal )-> String ; fn symbol ($self : &$S :: Literal )-> String ; fn suffix ($self : &$S :: Literal )-> Option < String >; fn integer ( n : & str )-> $S :: Literal ; fn typed_integer ( n : & str , kind : & str )-> $S :: Literal ; fn float ( n : & str )-> $S :: Literal ; fn f32 ( n : & str )-> $S :: Literal ; fn f64 ( n : & str )-> $S :: Literal ; fn string ( string : & str )-> $S :: Literal ; fn character ( ch : char )-> $S :: Literal ; fn byte_string ( bytes : & [ u8 ])-> $S :: Literal ; fn span ($self : &$S :: Literal )-> $S :: Span ; fn set_span ($self : & mut $S :: Literal , span : $S :: Span ); fn subspan ($self : &$S :: Literal , start : Bound < usize >, end : Bound < usize >, )-> Option <$S :: Span >; }, SourceFile { fn drop ($self : $S :: SourceFile ); fn clone ($self : &$S :: SourceFile )-> $S :: SourceFile ; fn eq ($self : &$S :: SourceFile , other : &$S :: SourceFile )-> bool ; fn path ($self : &$S :: SourceFile )-> String ; fn is_real ($self : &$S :: SourceFile )-> bool ; }, MultiSpan { fn drop ($self : $S :: MultiSpan ); fn new ()-> $S :: MultiSpan ; fn push ($self : & mut $S :: MultiSpan , span : $S :: Span ); }, Diagnostic { fn drop ($self : $S :: Diagnostic ); fn new ( level : Level , msg : & str , span : $S :: MultiSpan )-> $S :: Diagnostic ; fn sub ($self : & mut $S :: Diagnostic , level : Level , msg : & str , span : $S :: MultiSpan , ); fn emit ($self : $S :: Diagnostic ); }, Span { fn debug ($self : $S :: Span )-> String ; fn def_site ()-> $S :: Span ; fn call_site ()-> $S :: Span ; fn mixed_site ()-> $S :: Span ; fn source_file ($self : $S :: Span )-> $S :: SourceFile ; fn parent ($self : $S :: Span )-> Option <$S :: Span >; fn source ($self : $S :: Span )-> $S :: Span ; fn start ($self : $S :: Span )-> LineColumn ; fn end ($self : $S :: Span )-> LineColumn ; fn join ($self : $S :: Span , other : $S :: Span )-> Option <$S :: Span >; fn resolved_at ($self : $S :: Span , at : $S :: Span )-> $S :: Span ; fn source_text ($self : $S :: Span )-> Option < String >; }, }}; }
+macro_rules! __ra_macro_fixture108 {( le $ty : ty )=>{ impl < S > Encode < S > for $ty { fn encode ( self , w : & mut Writer , _: & mut S ){ w . write_all (& self . to_le_bytes ()). unwrap (); }} impl < S > DecodeMut < '_ , '_ , S > for $ty { fn decode ( r : & mut Reader < '_ >, _: & mut S )-> Self { const N : usize = :: std :: mem :: size_of ::<$ty > (); let mut bytes = [ 0 ; N ]; bytes . copy_from_slice (& r [.. N ]); * r = & r [ N ..]; Self :: from_le_bytes ( bytes )}}}; ( struct $name : ident {$($field : ident ),* $(,)? })=>{ impl < S > Encode < S > for $name { fn encode ( self , w : & mut Writer , s : & mut S ){$(self .$field . encode ( w , s );)* }} impl < S > DecodeMut < '_ , '_ , S > for $name { fn decode ( r : & mut Reader < '_ >, s : & mut S )-> Self {$name {$($field : DecodeMut :: decode ( r , s )),* }}}}; ( enum $name : ident $(<$($T : ident ),+>)? {$($variant : ident $(($field : ident ))*),* $(,)? })=>{ impl < S , $($($T : Encode < S >),+)?> Encode < S > for $name $(<$($T ),+>)? { fn encode ( self , w : & mut Writer , s : & mut S ){# [ allow ( non_upper_case_globals )] mod tag {# [ repr ( u8 )] enum Tag {$($variant ),* }$(pub const $variant : u8 = Tag ::$variant as u8 ;)* } match self {$($name ::$variant $(($field ))* =>{ tag ::$variant . encode ( w , s ); $($field . encode ( w , s );)* })* }}} impl < S , $($($T : for < 's > DecodeMut < 'a , 's , S >),+)?> DecodeMut < 'a , '_ , S > for $name $(<$($T ),+>)? { fn decode ( r : & mut Reader < 'a >, s : & mut S )-> Self {# [ allow ( non_upper_case_globals )] mod tag {# [ repr ( u8 )] enum Tag {$($variant ),* }$(pub const $variant : u8 = Tag ::$variant as u8 ;)* } match u8 :: decode ( r , s ){$(tag ::$variant =>{$(let $field = DecodeMut :: decode ( r , s );)* $name ::$variant $(($field ))* })* _ => unreachable ! (), }}}}}
+macro_rules! __ra_macro_fixture109 {($($ty : ty ),* $(,)?)=>{$(impl Mark for $ty { type Unmarked = Self ; fn mark ( unmarked : Self :: Unmarked )-> Self { unmarked }} impl Unmark for $ty { type Unmarked = Self ; fn unmark ( self )-> Self :: Unmarked { self }})* }}
+macro_rules! __ra_macro_fixture110 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )*;)* }),* $(,)?)=>{$(impl $name {$(pub ( crate ) fn $method ($($arg : $arg_ty ),*)$(-> $ret_ty )* { Bridge :: with (| bridge | { let mut b = bridge . cached_buffer . take (); b . clear (); api_tags :: Method ::$name ( api_tags ::$name ::$method ). encode (& mut b , & mut ()); reverse_encode ! ( b ; $($arg ),*); b = bridge . dispatch . call ( b ); let r = Result ::<_, PanicMessage >:: decode (& mut & b [..], & mut ()); bridge . cached_buffer = b ; r . unwrap_or_else (| e | panic :: resume_unwind ( e . into ()))})})* })* }}
+macro_rules! __ra_macro_fixture111 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )?;)* }),* $(,)?)=>{ pub trait Types {$(associated_item ! ( type $name );)* }$(pub trait $name : Types {$(associated_item ! ( fn $method (& mut self , $($arg : $arg_ty ),*)$(-> $ret_ty )?);)* })* pub trait Server : Types $(+ $name )* {} impl < S : Types $(+ $name )*> Server for S {}}}
+macro_rules! __ra_macro_fixture112 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )?;)* }),* $(,)?)=>{ impl < S : Types > Types for MarkedTypes < S > {$(type $name = Marked < S ::$name , client ::$name >;)* }$(impl < S : $name > $name for MarkedTypes < S > {$(fn $method (& mut self , $($arg : $arg_ty ),*)$(-> $ret_ty )? {<_>:: mark ($name ::$method (& mut self . 0 , $($arg . unmark ()),*))})* })* }}
+macro_rules! __ra_macro_fixture113 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )?;)* }),* $(,)?)=>{ pub trait DispatcherTrait {$(type $name ;)* fn dispatch (& mut self , b : Buffer < u8 >)-> Buffer < u8 >; } impl < S : Server > DispatcherTrait for Dispatcher < MarkedTypes < S >> {$(type $name = < MarkedTypes < S > as Types >::$name ;)* fn dispatch (& mut self , mut b : Buffer < u8 >)-> Buffer < u8 > { let Dispatcher { handle_store , server }= self ; let mut reader = & b [..]; match api_tags :: Method :: decode (& mut reader , & mut ()){$(api_tags :: Method ::$name ( m )=> match m {$(api_tags ::$name ::$method =>{ let mut call_method = || { reverse_decode ! ( reader , handle_store ; $($arg : $arg_ty ),*); $name ::$method ( server , $($arg ),*)}; let r = if thread :: panicking (){ Ok ( call_method ())} else { panic :: catch_unwind ( panic :: AssertUnwindSafe ( call_method )). map_err ( PanicMessage :: from )}; b . clear (); r . encode (& mut b , handle_store ); })* }),* } b }}}}
+macro_rules! __ra_macro_fixture114 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )*;)* }),* $(,)?)=>{$(pub ( super ) enum $name {$($method ),* } rpc_encode_decode ! ( enum $name {$($method ),* }); )* pub ( super ) enum Method {$($name ($name )),* } rpc_encode_decode ! ( enum Method {$($name ( m )),* }); }}
+macro_rules! __ra_macro_fixture115 {($(if # [ cfg ($meta : meta )]{$($tokens : tt )* }) else * else {$($tokens2 : tt )* })=>{$crate :: cfg_if ! {@ __items (); $((($meta )($($tokens )*)), )* (()($($tokens2 )*)), }}; ( if # [ cfg ($i_met : meta )]{$($i_tokens : tt )* }$(else if # [ cfg ($e_met : meta )]{$($e_tokens : tt )* })* )=>{$crate :: cfg_if ! {@ __items (); (($i_met )($($i_tokens )*)), $((($e_met )($($e_tokens )*)), )* (()()), }}; (@ __items ($($not : meta ,)*); )=>{}; (@ __items ($($not : meta ,)*); (($($m : meta ),*)($($tokens : tt )*)), $($rest : tt )*)=>{# [ cfg ( all ($($m ,)* not ( any ($($not ),*))))]$crate :: cfg_if ! {@ __identity $($tokens )* }$crate :: cfg_if ! {@ __items ($($not ,)* $($m ,)*); $($rest )* }}; (@ __identity $($tokens : tt )*)=>{$($tokens )* }; }
+macro_rules! __ra_macro_fixture116 {($lhs : ty , $rhs : ty )=>{# [ stable ( feature = "cmp_os_str" , since = "1.8.0" )] impl < 'a , 'b > PartialEq <$rhs > for $lhs {# [ inline ] fn eq (& self , other : &$rhs )-> bool {< OsStr as PartialEq >:: eq ( self , other )}}# [ stable ( feature = "cmp_os_str" , since = "1.8.0" )] impl < 'a , 'b > PartialEq <$lhs > for $rhs {# [ inline ] fn eq (& self , other : &$lhs )-> bool {< OsStr as PartialEq >:: eq ( self , other )}}# [ stable ( feature = "cmp_os_str" , since = "1.8.0" )] impl < 'a , 'b > PartialOrd <$rhs > for $lhs {# [ inline ] fn partial_cmp (& self , other : &$rhs )-> Option < cmp :: Ordering > {< OsStr as PartialOrd >:: partial_cmp ( self , other )}}# [ stable ( feature = "cmp_os_str" , since = "1.8.0" )] impl < 'a , 'b > PartialOrd <$lhs > for $rhs {# [ inline ] fn partial_cmp (& self , other : &$lhs )-> Option < cmp :: Ordering > {< OsStr as PartialOrd >:: partial_cmp ( self , other )}}}; }
+macro_rules! __ra_macro_fixture117 {()=>{}; ($(# [$attr : meta ])* $vis : vis static $name : ident : $t : ty = $init : expr ; $($rest : tt )*)=>($crate :: __thread_local_inner ! ($(# [$attr ])* $vis $name , $t , $init ); $crate :: thread_local ! ($($rest )*); ); ($(# [$attr : meta ])* $vis : vis static $name : ident : $t : ty = $init : expr )=>($crate :: __thread_local_inner ! ($(# [$attr ])* $vis $name , $t , $init ); ); }
+macro_rules! __ra_macro_fixture118 {($($t : ty )*)=>($(impl ReadNumberHelper for $t { const ZERO : Self = 0 ; # [ inline ] fn checked_mul (& self , other : u32 )-> Option < Self > { Self :: checked_mul (* self , other . try_into (). ok ()?)}# [ inline ] fn checked_add (& self , other : u32 )-> Option < Self > { Self :: checked_add (* self , other . try_into (). ok ()?)}})*)}
+macro_rules! __ra_macro_fixture119 {($lhs : ty , $rhs : ty )=>{# [ stable ( feature = "partialeq_path" , since = "1.6.0" )] impl < 'a , 'b > PartialEq <$rhs > for $lhs {# [ inline ] fn eq (& self , other : &$rhs )-> bool {< Path as PartialEq >:: eq ( self , other )}}# [ stable ( feature = "partialeq_path" , since = "1.6.0" )] impl < 'a , 'b > PartialEq <$lhs > for $rhs {# [ inline ] fn eq (& self , other : &$lhs )-> bool {< Path as PartialEq >:: eq ( self , other )}}# [ stable ( feature = "cmp_path" , since = "1.8.0" )] impl < 'a , 'b > PartialOrd <$rhs > for $lhs {# [ inline ] fn partial_cmp (& self , other : &$rhs )-> Option < cmp :: Ordering > {< Path as PartialOrd >:: partial_cmp ( self , other )}}# [ stable ( feature = "cmp_path" , since = "1.8.0" )] impl < 'a , 'b > PartialOrd <$lhs > for $rhs {# [ inline ] fn partial_cmp (& self , other : &$lhs )-> Option < cmp :: Ordering > {< Path as PartialOrd >:: partial_cmp ( self , other )}}}; }
+macro_rules! __ra_macro_fixture120 {($lhs : ty , $rhs : ty )=>{# [ stable ( feature = "cmp_path" , since = "1.8.0" )] impl < 'a , 'b > PartialEq <$rhs > for $lhs {# [ inline ] fn eq (& self , other : &$rhs )-> bool {< Path as PartialEq >:: eq ( self , other . as_ref ())}}# [ stable ( feature = "cmp_path" , since = "1.8.0" )] impl < 'a , 'b > PartialEq <$lhs > for $rhs {# [ inline ] fn eq (& self , other : &$lhs )-> bool {< Path as PartialEq >:: eq ( self . as_ref (), other )}}# [ stable ( feature = "cmp_path" , since = "1.8.0" )] impl < 'a , 'b > PartialOrd <$rhs > for $lhs {# [ inline ] fn partial_cmp (& self , other : &$rhs )-> Option < cmp :: Ordering > {< Path as PartialOrd >:: partial_cmp ( self , other . as_ref ())}}# [ stable ( feature = "cmp_path" , since = "1.8.0" )] impl < 'a , 'b > PartialOrd <$lhs > for $rhs {# [ inline ] fn partial_cmp (& self , other : &$lhs )-> Option < cmp :: Ordering > {< Path as PartialOrd >:: partial_cmp ( self . as_ref (), other )}}}; }
+macro_rules! __ra_macro_fixture121 {(@ key $t : ty , $init : expr )=>{{# [ inline ] fn __init ()-> $t {$init } unsafe fn __getit ()-> $crate :: option :: Option <& 'static $t > {# [ cfg ( all ( target_arch = "wasm32" , not ( target_feature = "atomics" )))] static __KEY : $crate :: thread :: __StaticLocalKeyInner <$t > = $crate :: thread :: __StaticLocalKeyInner :: new (); # [ thread_local ]# [ cfg ( all ( target_thread_local , not ( all ( target_arch = "wasm32" , not ( target_feature = "atomics" ))), ))] static __KEY : $crate :: thread :: __FastLocalKeyInner <$t > = $crate :: thread :: __FastLocalKeyInner :: new (); # [ cfg ( all ( not ( target_thread_local ), not ( all ( target_arch = "wasm32" , not ( target_feature = "atomics" ))), ))] static __KEY : $crate :: thread :: __OsLocalKeyInner <$t > = $crate :: thread :: __OsLocalKeyInner :: new (); # [ allow ( unused_unsafe )] unsafe { __KEY . get ( __init )}} unsafe {$crate :: thread :: LocalKey :: new ( __getit )}}}; ($(# [$attr : meta ])* $vis : vis $name : ident , $t : ty , $init : expr )=>{$(# [$attr ])* $vis const $name : $crate :: thread :: LocalKey <$t > = $crate :: __thread_local_inner ! (@ key $t , $init ); }}
+macro_rules! __ra_macro_fixture122 {({$($then_tt : tt )* } else {$($else_tt : tt )* })=>{ cfg_if :: cfg_if ! { if # [ cfg ( all ( target_os = "linux" , target_env = "gnu" ))]{$($then_tt )* } else {$($else_tt )* }}}; ($($block_inner : tt )*)=>{# [ cfg ( all ( target_os = "linux" , target_env = "gnu" ))]{$($block_inner )* }}; }
+macro_rules! __ra_macro_fixture123 {($($t : ident )*)=>($(impl IsMinusOne for $t { fn is_minus_one (& self )-> bool {* self == - 1 }})*)}
+macro_rules! __ra_macro_fixture124 {($(if # [ cfg ($($meta : meta ),*)]{$($it : item )* }) else * else {$($it2 : item )* })=>{ cfg_if ! {@ __items (); $((($($meta ),*)($($it )*)), )* (()($($it2 )*)), }}; ( if # [ cfg ($($i_met : meta ),*)]{$($i_it : item )* }$(else if # [ cfg ($($e_met : meta ),*)]{$($e_it : item )* })* )=>{ cfg_if ! {@ __items (); (($($i_met ),*)($($i_it )*)), $((($($e_met ),*)($($e_it )*)), )* (()()), }}; (@ __items ($($not : meta ,)*); )=>{}; (@ __items ($($not : meta ,)*); (($($m : meta ),*)($($it : item )*)), $($rest : tt )*)=>{ cfg_if ! {@ __apply cfg ( all ($($m ,)* not ( any ($($not ),*)))), $($it )* } cfg_if ! {@ __items ($($not ,)* $($m ,)*); $($rest )* }}; (@ __apply $m : meta , $($it : item )*)=>{$(# [$m ]$it )* }; }
+macro_rules! __ra_macro_fixture125 {($bench_macro : ident , $bench_ahash_serial : ident , $bench_std_serial : ident , $bench_ahash_highbits : ident , $bench_std_highbits : ident , $bench_ahash_random : ident , $bench_std_random : ident )=>{$bench_macro ! ($bench_ahash_serial , AHashMap , 0 ..); $bench_macro ! ($bench_std_serial , StdHashMap , 0 ..); $bench_macro ! ($bench_ahash_highbits , AHashMap , ( 0 ..). map ( usize :: swap_bytes )); $bench_macro ! ($bench_std_highbits , StdHashMap , ( 0 ..). map ( usize :: swap_bytes )); $bench_macro ! ($bench_ahash_random , AHashMap , RandomKeys :: new ()); $bench_macro ! ($bench_std_random , StdHashMap , RandomKeys :: new ()); }; }
+macro_rules! __ra_macro_fixture126 {($name : ident , $maptype : ident , $keydist : expr )=>{# [ bench ] fn $name ( b : & mut Bencher ){ let mut m = $maptype :: with_capacity_and_hasher ( SIZE , Default :: default ()); b . iter (|| { m . clear (); for i in ($keydist ). take ( SIZE ){ m . insert ( i , i ); } black_box (& mut m ); })}}; }
+macro_rules! __ra_macro_fixture127 {($name : ident , $maptype : ident , $keydist : expr )=>{# [ bench ] fn $name ( b : & mut Bencher ){ let mut base = $maptype :: default (); for i in ($keydist ). take ( SIZE ){ base . insert ( i , i ); } let skip = $keydist . skip ( SIZE ); b . iter (|| { let mut m = base . clone (); let mut add_iter = skip . clone (); let mut remove_iter = $keydist ; for ( add , remove ) in (& mut add_iter ). zip (& mut remove_iter ). take ( SIZE ){ m . insert ( add , add ); black_box ( m . remove (& remove )); } black_box ( m ); })}}; }
+macro_rules! __ra_macro_fixture128 {($name : ident , $maptype : ident , $keydist : expr )=>{# [ bench ] fn $name ( b : & mut Bencher ){ let mut m = $maptype :: default (); for i in $keydist . take ( SIZE ){ m . insert ( i , i ); } b . iter (|| { for i in $keydist . take ( SIZE ){ black_box ( m . get (& i )); }})}}; }
+macro_rules! __ra_macro_fixture129 {($name : ident , $maptype : ident , $keydist : expr )=>{# [ bench ] fn $name ( b : & mut Bencher ){ let mut m = $maptype :: default (); let mut iter = $keydist ; for i in (& mut iter ). take ( SIZE ){ m . insert ( i , i ); } b . iter (|| { for i in (& mut iter ). take ( SIZE ){ black_box ( m . get (& i )); }})}}; }
+macro_rules! __ra_macro_fixture130 {($name : ident , $maptype : ident , $keydist : expr )=>{# [ bench ] fn $name ( b : & mut Bencher ){ let mut m = $maptype :: default (); for i in ($keydist ). take ( SIZE ){ m . insert ( i , i ); } b . iter (|| { for i in & m { black_box ( i ); }})}}; }
+macro_rules! __ra_macro_fixture131 {($(if # [ cfg ($($meta : meta ),*)]{$($it : item )* }) else * else {$($it2 : item )* })=>{ cfg_if ! {@ __items (); $((($($meta ),*)($($it )*)), )* (()($($it2 )*)), }}; ( if # [ cfg ($($i_met : meta ),*)]{$($i_it : item )* }$(else if # [ cfg ($($e_met : meta ),*)]{$($e_it : item )* })* )=>{ cfg_if ! {@ __items (); (($($i_met ),*)($($i_it )*)), $((($($e_met ),*)($($e_it )*)), )* (()()), }}; (@ __items ($($not : meta ,)*); )=>{}; (@ __items ($($not : meta ,)*); (($($m : meta ),*)($($it : item )*)), $($rest : tt )*)=>{ cfg_if ! {@ __apply cfg ( all ($($m ,)* not ( any ($($not ),*)))), $($it )* } cfg_if ! {@ __items ($($not ,)* $($m ,)*); $($rest )* }}; (@ __apply $m : meta , $($it : item )*)=>{$(# [$m ]$it )* }; }
+macro_rules! __ra_macro_fixture132 {($($(# [$attr : meta ])* pub $t : ident $i : ident {$($field : tt )* })*)=>($(s ! ( it : $(# [$attr ])* pub $t $i {$($field )* }); )*); ( it : $(# [$attr : meta ])* pub union $i : ident {$($field : tt )* })=>( compile_error ! ( "unions cannot derive extra traits, use s_no_extra_traits instead" ); ); ( it : $(# [$attr : meta ])* pub struct $i : ident {$($field : tt )* })=>( __item ! {# [ repr ( C )]# [ cfg_attr ( feature = "extra_traits" , derive ( Debug , Eq , Hash , PartialEq ))]# [ allow ( deprecated )]$(# [$attr ])* pub struct $i {$($field )* }}# [ allow ( deprecated )] impl :: Copy for $i {}# [ allow ( deprecated )] impl :: Clone for $i { fn clone (& self )-> $i {* self }}); }
+macro_rules! __ra_macro_fixture133 {($i : item )=>{$i }; }
+macro_rules! __ra_macro_fixture134 {($($(# [$attr : meta ])* pub $t : ident $i : ident {$($field : tt )* })*)=>($(s_no_extra_traits ! ( it : $(# [$attr ])* pub $t $i {$($field )* }); )*); ( it : $(# [$attr : meta ])* pub union $i : ident {$($field : tt )* })=>( cfg_if ! { if # [ cfg ( libc_union )]{ __item ! {# [ repr ( C )]$(# [$attr ])* pub union $i {$($field )* }} impl :: Copy for $i {} impl :: Clone for $i { fn clone (& self )-> $i {* self }}}}); ( it : $(# [$attr : meta ])* pub struct $i : ident {$($field : tt )* })=>( __item ! {# [ repr ( C )]$(# [$attr ])* pub struct $i {$($field )* }}# [ allow ( deprecated )] impl :: Copy for $i {}# [ allow ( deprecated )] impl :: Clone for $i { fn clone (& self )-> $i {* self }}); }
+macro_rules! __ra_macro_fixture135 {($($(# [$attr : meta ])* pub const $name : ident : $t1 : ty = $t2 : ident {$($field : tt )* };)*)=>($(# [ cfg ( libc_align )]$(# [$attr ])* pub const $name : $t1 = $t2 {$($field )* }; # [ cfg ( not ( libc_align ))]$(# [$attr ])* pub const $name : $t1 = $t2 {$($field )* __align : [], }; )*)}
+macro_rules! __ra_macro_fixture136 {($($args : tt )* )=>{$(define_ioctl ! ($args ); )* }}
+macro_rules! __ra_macro_fixture137 {({$name : ident , $ioctl : ident , $arg_type : ty })=>{ pub unsafe fn $name ( fd : c_int , arg : $arg_type )-> c_int { untyped_ioctl ( fd , bindings ::$ioctl , arg )}}; }
+macro_rules! __ra_macro_fixture138 {($($T : ty ),*)=>{$(impl IdentFragment for $T { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { fmt :: Display :: fmt ( self , f )}})* }}
+macro_rules! __ra_macro_fixture139 {($($t : ident =>$name : ident )*)=>($(impl ToTokens for $t { fn to_tokens (& self , tokens : & mut TokenStream ){ tokens . append ( Literal ::$name (* self )); }})*)}
+macro_rules! __ra_macro_fixture140 {($($l : tt )*)=>{$(impl < 'q , T : 'q > RepAsIteratorExt < 'q > for [ T ; $l ]{ type Iter = slice :: Iter < 'q , T >; fn quote_into_iter (& 'q self )-> ( Self :: Iter , HasIter ){( self . iter (), HasIter )}})* }}
+macro_rules! __ra_macro_fixture141 {($name : ident $spanned : ident $char1 : tt )=>{ pub fn $name ( tokens : & mut TokenStream ){ tokens . append ( Punct :: new ($char1 , Spacing :: Alone )); } pub fn $spanned ( tokens : & mut TokenStream , span : Span ){ let mut punct = Punct :: new ($char1 , Spacing :: Alone ); punct . set_span ( span ); tokens . append ( punct ); }}; ($name : ident $spanned : ident $char1 : tt $char2 : tt )=>{ pub fn $name ( tokens : & mut TokenStream ){ tokens . append ( Punct :: new ($char1 , Spacing :: Joint )); tokens . append ( Punct :: new ($char2 , Spacing :: Alone )); } pub fn $spanned ( tokens : & mut TokenStream , span : Span ){ let mut punct = Punct :: new ($char1 , Spacing :: Joint ); punct . set_span ( span ); tokens . append ( punct ); let mut punct = Punct :: new ($char2 , Spacing :: Alone ); punct . set_span ( span ); tokens . append ( punct ); }}; ($name : ident $spanned : ident $char1 : tt $char2 : tt $char3 : tt )=>{ pub fn $name ( tokens : & mut TokenStream ){ tokens . append ( Punct :: new ($char1 , Spacing :: Joint )); tokens . append ( Punct :: new ($char2 , Spacing :: Joint )); tokens . append ( Punct :: new ($char3 , Spacing :: Alone )); } pub fn $spanned ( tokens : & mut TokenStream , span : Span ){ let mut punct = Punct :: new ($char1 , Spacing :: Joint ); punct . set_span ( span ); tokens . append ( punct ); let mut punct = Punct :: new ($char2 , Spacing :: Joint ); punct . set_span ( span ); tokens . append ( punct ); let mut punct = Punct :: new ($char3 , Spacing :: Alone ); punct . set_span ( span ); tokens . append ( punct ); }}; }
+macro_rules! __ra_macro_fixture142 {($display : tt $name : ty )=>{# [ cfg ( feature = "parsing" )] impl Token for $name { fn peek ( cursor : Cursor )-> bool { fn peek ( input : ParseStream )-> bool {<$name as Parse >:: parse ( input ). is_ok ()} peek_impl ( cursor , peek )} fn display ()-> & 'static str {$display }}# [ cfg ( feature = "parsing" )] impl private :: Sealed for $name {}}; }
+macro_rules! __ra_macro_fixture143 {($display : tt $ty : ident $get : ident )=>{# [ cfg ( feature = "parsing" )] impl Token for $ty { fn peek ( cursor : Cursor )-> bool { cursor .$get (). is_some ()} fn display ()-> & 'static str {$display }}# [ cfg ( feature = "parsing" )] impl private :: Sealed for $ty {}}; }
+macro_rules! __ra_macro_fixture144 {($($token : tt pub struct $name : ident /$len : tt # [$doc : meta ])*)=>{$(# [ repr ( C )]# [$doc ]# [ doc = "" ]# [ doc = " Don\\\'t try to remember the name of this type &mdash; use the" ]# [ doc = " [`Token!`] macro instead." ]# [ doc = "" ]# [ doc = " [`Token!`]: crate::token" ] pub struct $name { pub spans : [ Span ; $len ], }# [ doc ( hidden )]# [ allow ( non_snake_case )] pub fn $name < S : IntoSpans < [ Span ; $len ]>> ( spans : S )-> $name {$name { spans : spans . into_spans (), }} impl std :: default :: Default for $name { fn default ()-> Self {$name { spans : [ Span :: call_site (); $len ], }}}# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Copy for $name {}# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Clone for $name { fn clone (& self )-> Self {* self }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Debug for $name { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { f . write_str ( stringify ! ($name ))}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl cmp :: Eq for $name {}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl PartialEq for $name { fn eq (& self , _other : &$name )-> bool { true }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Hash for $name { fn hash < H : Hasher > (& self , _state : & mut H ){}} impl_deref_if_len_is_1 ! ($name /$len ); )* }; }
+macro_rules! __ra_macro_fixture145 {($($token : tt pub struct $name : ident # [$doc : meta ])*)=>{$(# [$doc ]# [ doc = "" ]# [ doc = " Don\\\'t try to remember the name of this type &mdash; use the" ]# [ doc = " [`Token!`] macro instead." ]# [ doc = "" ]# [ doc = " [`Token!`]: crate::token" ] pub struct $name { pub span : Span , }# [ doc ( hidden )]# [ allow ( non_snake_case )] pub fn $name < S : IntoSpans < [ Span ; 1 ]>> ( span : S )-> $name {$name { span : span . into_spans ()[ 0 ], }} impl std :: default :: Default for $name { fn default ()-> Self {$name { span : Span :: call_site (), }}}# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Copy for $name {}# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Clone for $name { fn clone (& self )-> Self {* self }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Debug for $name { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { f . write_str ( stringify ! ($name ))}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl cmp :: Eq for $name {}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl PartialEq for $name { fn eq (& self , _other : &$name )-> bool { true }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Hash for $name { fn hash < H : Hasher > (& self , _state : & mut H ){}}# [ cfg ( feature = "printing" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "printing" )))] impl ToTokens for $name { fn to_tokens (& self , tokens : & mut TokenStream ){ printing :: keyword ($token , self . span , tokens ); }}# [ cfg ( feature = "parsing" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "parsing" )))] impl Parse for $name { fn parse ( input : ParseStream )-> Result < Self > { Ok ($name { span : parsing :: keyword ( input , $token )?, })}}# [ cfg ( feature = "parsing" )] impl Token for $name { fn peek ( cursor : Cursor )-> bool { parsing :: peek_keyword ( cursor , $token )} fn display ()-> & 'static str { concat ! ( "`" , $token , "`" )}}# [ cfg ( feature = "parsing" )] impl private :: Sealed for $name {})* }; }
+macro_rules! __ra_macro_fixture146 {($($token : tt pub struct $name : ident /$len : tt # [$doc : meta ])*)=>{$(define_punctuation_structs ! {$token pub struct $name /$len # [$doc ]}# [ cfg ( feature = "printing" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "printing" )))] impl ToTokens for $name { fn to_tokens (& self , tokens : & mut TokenStream ){ printing :: punct ($token , & self . spans , tokens ); }}# [ cfg ( feature = "parsing" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "parsing" )))] impl Parse for $name { fn parse ( input : ParseStream )-> Result < Self > { Ok ($name { spans : parsing :: punct ( input , $token )?, })}}# [ cfg ( feature = "parsing" )] impl Token for $name { fn peek ( cursor : Cursor )-> bool { parsing :: peek_punct ( cursor , $token )} fn display ()-> & 'static str { concat ! ( "`" , $token , "`" )}}# [ cfg ( feature = "parsing" )] impl private :: Sealed for $name {})* }; }
+macro_rules! __ra_macro_fixture147 {($($token : tt pub struct $name : ident # [$doc : meta ])*)=>{$(# [$doc ] pub struct $name { pub span : Span , }# [ doc ( hidden )]# [ allow ( non_snake_case )] pub fn $name < S : IntoSpans < [ Span ; 1 ]>> ( span : S )-> $name {$name { span : span . into_spans ()[ 0 ], }} impl std :: default :: Default for $name { fn default ()-> Self {$name { span : Span :: call_site (), }}}# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Copy for $name {}# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Clone for $name { fn clone (& self )-> Self {* self }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Debug for $name { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { f . write_str ( stringify ! ($name ))}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl cmp :: Eq for $name {}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl PartialEq for $name { fn eq (& self , _other : &$name )-> bool { true }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Hash for $name { fn hash < H : Hasher > (& self , _state : & mut H ){}} impl $name {# [ cfg ( feature = "printing" )] pub fn surround < F > (& self , tokens : & mut TokenStream , f : F ) where F : FnOnce (& mut TokenStream ), { printing :: delim ($token , self . span , tokens , f ); }}# [ cfg ( feature = "parsing" )] impl private :: Sealed for $name {})* }; }
+macro_rules! __ra_macro_fixture148 {($token : ident )=>{ impl From < Token ! [$token ]> for Ident { fn from ( token : Token ! [$token ])-> Ident { Ident :: new ( stringify ! ($token ), token . span )}}}; }
+macro_rules! __ra_macro_fixture149 {([$($attrs_pub : tt )*] struct $name : ident # full $($rest : tt )* )=>{# [ cfg ( feature = "full" )]$($attrs_pub )* struct $name $($rest )* # [ cfg ( not ( feature = "full" ))]$($attrs_pub )* struct $name { _noconstruct : :: std :: marker :: PhantomData <:: proc_macro2 :: Span >, }# [ cfg ( all ( not ( feature = "full" ), feature = "printing" ))] impl :: quote :: ToTokens for $name { fn to_tokens (& self , _: & mut :: proc_macro2 :: TokenStream ){ unreachable ! ()}}}; ([$($attrs_pub : tt )*] struct $name : ident $($rest : tt )* )=>{$($attrs_pub )* struct $name $($rest )* }; ($($t : tt )*)=>{ strip_attrs_pub ! ( ast_struct ! ($($t )*)); }; }
+macro_rules! __ra_macro_fixture150 {([$($attrs_pub : tt )*] enum $name : ident # no_visit $($rest : tt )* )=>( ast_enum ! ([$($attrs_pub )*] enum $name $($rest )*); ); ([$($attrs_pub : tt )*] enum $name : ident $($rest : tt )* )=>($($attrs_pub )* enum $name $($rest )* ); ($($t : tt )*)=>{ strip_attrs_pub ! ( ast_enum ! ($($t )*)); }; }
+macro_rules! __ra_macro_fixture151 {($(# [$enum_attr : meta ])* $pub : ident $enum : ident $name : ident #$tag : ident $body : tt $($remaining : tt )* )=>{ ast_enum ! ($(# [$enum_attr ])* $pub $enum $name #$tag $body ); ast_enum_of_structs_impl ! ($pub $enum $name $body $($remaining )*); }; ($(# [$enum_attr : meta ])* $pub : ident $enum : ident $name : ident $body : tt $($remaining : tt )* )=>{ ast_enum ! ($(# [$enum_attr ])* $pub $enum $name $body ); ast_enum_of_structs_impl ! ($pub $enum $name $body $($remaining )*); }; }
+macro_rules! __ra_macro_fixture152 {($ident : ident )=>{# [ allow ( non_camel_case_types )] pub struct $ident { pub span : $crate :: __private :: Span , }# [ doc ( hidden )]# [ allow ( dead_code , non_snake_case )] pub fn $ident < __S : $crate :: __private :: IntoSpans < [$crate :: __private :: Span ; 1 ]>> ( span : __S , )-> $ident {$ident { span : $crate :: __private :: IntoSpans :: into_spans ( span )[ 0 ], }} impl $crate :: __private :: Default for $ident { fn default ()-> Self {$ident { span : $crate :: __private :: Span :: call_site (), }}}$crate :: impl_parse_for_custom_keyword ! ($ident ); $crate :: impl_to_tokens_for_custom_keyword ! ($ident ); $crate :: impl_clone_for_custom_keyword ! ($ident ); $crate :: impl_extra_traits_for_custom_keyword ! ($ident ); }; }
+macro_rules! __ra_macro_fixture153 {($($expr_type : ty , $variant : ident , $msg : expr , )* )=>{$(# [ cfg ( all ( feature = "full" , feature = "printing" ))]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "parsing" )))] impl Parse for $expr_type { fn parse ( input : ParseStream )-> Result < Self > { let mut expr : Expr = input . parse ()?; loop { match expr { Expr ::$variant ( inner )=> return Ok ( inner ), Expr :: Group ( next )=> expr = * next . expr , _ => return Err ( Error :: new_spanned ( expr , $msg )), }}}})* }; }
+macro_rules! __ra_macro_fixture154 {($ty : ident )=>{# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl < 'a > Clone for $ty < 'a > { fn clone (& self )-> Self {$ty ( self . 0 )}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl < 'a > Debug for $ty < 'a > { fn fmt (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . debug_tuple ( stringify ! ($ty )). field ( self . 0 ). finish ()}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl < 'a > Eq for $ty < 'a > {}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl < 'a > PartialEq for $ty < 'a > { fn eq (& self , other : & Self )-> bool { self . 0 == other . 0 }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl < 'a > Hash for $ty < 'a > { fn hash < H : Hasher > (& self , state : & mut H ){ self . 0 . hash ( state ); }}}; }
+macro_rules! __ra_macro_fixture155 {($ty : ident )=>{# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Clone for $ty { fn clone (& self )-> Self {$ty { repr : self . repr . clone (), }}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl PartialEq for $ty { fn eq (& self , other : & Self )-> bool { self . repr . token . to_string ()== other . repr . token . to_string ()}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Hash for $ty { fn hash < H > (& self , state : & mut H ) where H : Hasher , { self . repr . token . to_string (). hash ( state ); }}# [ cfg ( feature = "parsing" )]# [ doc ( hidden )]# [ allow ( non_snake_case )] pub fn $ty ( marker : lookahead :: TokenMarker )-> $ty { match marker {}}}; }
+macro_rules! __ra_macro_fixture156 {($name : ident / 1 )=>{ impl Deref for $name { type Target = WithSpan ; fn deref (& self )-> & Self :: Target { unsafe {&* ( self as * const Self as * const WithSpan )}}} impl DerefMut for $name { fn deref_mut (& mut self )-> & mut Self :: Target { unsafe {& mut * ( self as * mut Self as * mut WithSpan )}}}}; ($name : ident /$len : tt )=>{}; }
+macro_rules! __ra_macro_fixture157 {($($await_rule : tt )*)=>{# [ doc = " A type-macro that expands to the name of the Rust type representation of a" ]# [ doc = " given token." ]# [ doc = "" ]# [ doc = " See the [token module] documentation for details and examples." ]# [ doc = "" ]# [ doc = " [token module]: crate::token" ]# [ macro_export ] macro_rules ! Token {[ abstract ]=>{$crate :: token :: Abstract }; [ as ]=>{$crate :: token :: As }; [ async ]=>{$crate :: token :: Async }; [ auto ]=>{$crate :: token :: Auto }; $($await_rule =>{$crate :: token :: Await };)* [ become ]=>{$crate :: token :: Become }; [ box ]=>{$crate :: token :: Box }; [ break ]=>{$crate :: token :: Break }; [ const ]=>{$crate :: token :: Const }; [ continue ]=>{$crate :: token :: Continue }; [ crate ]=>{$crate :: token :: Crate }; [ default ]=>{$crate :: token :: Default }; [ do ]=>{$crate :: token :: Do }; [ dyn ]=>{$crate :: token :: Dyn }; [ else ]=>{$crate :: token :: Else }; [ enum ]=>{$crate :: token :: Enum }; [ extern ]=>{$crate :: token :: Extern }; [ final ]=>{$crate :: token :: Final }; [ fn ]=>{$crate :: token :: Fn }; [ for ]=>{$crate :: token :: For }; [ if ]=>{$crate :: token :: If }; [ impl ]=>{$crate :: token :: Impl }; [ in ]=>{$crate :: token :: In }; [ let ]=>{$crate :: token :: Let }; [ loop ]=>{$crate :: token :: Loop }; [ macro ]=>{$crate :: token :: Macro }; [ match ]=>{$crate :: token :: Match }; [ mod ]=>{$crate :: token :: Mod }; [ move ]=>{$crate :: token :: Move }; [ mut ]=>{$crate :: token :: Mut }; [ override ]=>{$crate :: token :: Override }; [ priv ]=>{$crate :: token :: Priv }; [ pub ]=>{$crate :: token :: Pub }; [ ref ]=>{$crate :: token :: Ref }; [ return ]=>{$crate :: token :: Return }; [ Self ]=>{$crate :: token :: SelfType }; [ self ]=>{$crate :: token :: SelfValue }; [ static ]=>{$crate :: token :: Static }; [ struct ]=>{$crate :: token :: Struct }; [ super ]=>{$crate :: token :: Super }; [ trait ]=>{$crate :: token :: Trait }; [ try ]=>{$crate :: token :: Try }; [ type ]=>{$crate :: token :: Type }; [ typeof ]=>{$crate :: token :: Typeof }; [ union ]=>{$crate :: token :: Union }; [ unsafe ]=>{$crate :: token :: Unsafe }; [ unsized ]=>{$crate :: token :: Unsized }; [ use ]=>{$crate :: token :: Use }; [ virtual ]=>{$crate :: token :: Virtual }; [ where ]=>{$crate :: token :: Where }; [ while ]=>{$crate :: token :: While }; [ yield ]=>{$crate :: token :: Yield }; [+]=>{$crate :: token :: Add }; [+=]=>{$crate :: token :: AddEq }; [&]=>{$crate :: token :: And }; [&&]=>{$crate :: token :: AndAnd }; [&=]=>{$crate :: token :: AndEq }; [@]=>{$crate :: token :: At }; [!]=>{$crate :: token :: Bang }; [^]=>{$crate :: token :: Caret }; [^=]=>{$crate :: token :: CaretEq }; [:]=>{$crate :: token :: Colon }; [::]=>{$crate :: token :: Colon2 }; [,]=>{$crate :: token :: Comma }; [/]=>{$crate :: token :: Div }; [/=]=>{$crate :: token :: DivEq }; [$]=>{$crate :: token :: Dollar }; [.]=>{$crate :: token :: Dot }; [..]=>{$crate :: token :: Dot2 }; [...]=>{$crate :: token :: Dot3 }; [..=]=>{$crate :: token :: DotDotEq }; [=]=>{$crate :: token :: Eq }; [==]=>{$crate :: token :: EqEq }; [>=]=>{$crate :: token :: Ge }; [>]=>{$crate :: token :: Gt }; [<=]=>{$crate :: token :: Le }; [<]=>{$crate :: token :: Lt }; [*=]=>{$crate :: token :: MulEq }; [!=]=>{$crate :: token :: Ne }; [|]=>{$crate :: token :: Or }; [|=]=>{$crate :: token :: OrEq }; [||]=>{$crate :: token :: OrOr }; [#]=>{$crate :: token :: Pound }; [?]=>{$crate :: token :: Question }; [->]=>{$crate :: token :: RArrow }; [<-]=>{$crate :: token :: LArrow }; [%]=>{$crate :: token :: Rem }; [%=]=>{$crate :: token :: RemEq }; [=>]=>{$crate :: token :: FatArrow }; [;]=>{$crate :: token :: Semi }; [<<]=>{$crate :: token :: Shl }; [<<=]=>{$crate :: token :: ShlEq }; [>>]=>{$crate :: token :: Shr }; [>>=]=>{$crate :: token :: ShrEq }; [*]=>{$crate :: token :: Star }; [-]=>{$crate :: token :: Sub }; [-=]=>{$crate :: token :: SubEq }; [~]=>{$crate :: token :: Tilde }; [_]=>{$crate :: token :: Underscore }; }}; }
+macro_rules! __ra_macro_fixture158 {($mac : ident ! ($(# [$m : meta ])* $pub : ident $($t : tt )*))=>{ check_keyword_matches ! ( pub $pub ); $mac ! ([$(# [$m ])* $pub ]$($t )*); }; }
+macro_rules! __ra_macro_fixture159 {($pub : ident $enum : ident $name : ident {$($(# [$variant_attr : meta ])* $variant : ident $(($($member : ident )::+))*, )* }$($remaining : tt )* )=>{ check_keyword_matches ! ( pub $pub ); check_keyword_matches ! ( enum $enum ); $($(ast_enum_from_struct ! ($name ::$variant , $($member )::+); )*)* # [ cfg ( feature = "printing" )] generate_to_tokens ! {$($remaining )* () tokens $name {$($variant $($($member )::+)*,)* }}}; }
+macro_rules! __ra_macro_fixture160 {($ident : ident )=>{ impl $crate :: token :: CustomToken for $ident { fn peek ( cursor : $crate :: buffer :: Cursor )-> $crate :: __private :: bool { if let Some (( ident , _rest ))= cursor . ident (){ ident == stringify ! ($ident )} else { false }} fn display ()-> & 'static $crate :: __private :: str { concat ! ( "`" , stringify ! ($ident ), "`" )}} impl $crate :: parse :: Parse for $ident { fn parse ( input : $crate :: parse :: ParseStream )-> $crate :: parse :: Result <$ident > { input . step (| cursor | { if let $crate :: __private :: Some (( ident , rest ))= cursor . ident (){ if ident == stringify ! ($ident ){ return $crate :: __private :: Ok (($ident { span : ident . span ()}, rest )); }}$crate :: __private :: Err ( cursor . error ( concat ! ( "expected `" , stringify ! ($ident ), "`" )))})}}}; }
+macro_rules! __ra_macro_fixture161 {($ident : ident )=>{ impl $crate :: __private :: ToTokens for $ident { fn to_tokens (& self , tokens : & mut $crate :: __private :: TokenStream2 ){ let ident = $crate :: Ident :: new ( stringify ! ($ident ), self . span ); $crate :: __private :: TokenStreamExt :: append ( tokens , ident ); }}}; }
+macro_rules! __ra_macro_fixture162 {($ident : ident )=>{ impl $crate :: __private :: Copy for $ident {} impl $crate :: __private :: Clone for $ident { fn clone (& self )-> Self {* self }}}; }
+macro_rules! __ra_macro_fixture163 {($ident : ident )=>{ impl $crate :: __private :: Debug for $ident { fn fmt (& self , f : & mut $crate :: __private :: Formatter )-> $crate :: __private :: fmt :: Result {$crate :: __private :: Formatter :: write_str ( f , concat ! ( "Keyword [" , stringify ! ($ident ), "]" ), )}} impl $crate :: __private :: Eq for $ident {} impl $crate :: __private :: PartialEq for $ident { fn eq (& self , _other : & Self )-> $crate :: __private :: bool { true }} impl $crate :: __private :: Hash for $ident { fn hash < __H : $crate :: __private :: Hasher > (& self , _state : & mut __H ){}}}; }
+macro_rules! __ra_macro_fixture164 {( struct struct )=>{}; ( enum enum )=>{}; ( pub pub )=>{}; }
+macro_rules! __ra_macro_fixture165 {($name : ident :: Verbatim , $member : ident )=>{}; ($name : ident ::$variant : ident , crate :: private )=>{}; ($name : ident ::$variant : ident , $member : ident )=>{ impl From <$member > for $name { fn from ( e : $member )-> $name {$name ::$variant ( e )}}}; }
+macro_rules! __ra_macro_fixture166 {( do_not_generate_to_tokens $($foo : tt )*)=>(); (($($arms : tt )*)$tokens : ident $name : ident {$variant : ident , $($next : tt )*})=>{ generate_to_tokens ! (($($arms )* $name ::$variant =>{})$tokens $name {$($next )* }); }; (($($arms : tt )*)$tokens : ident $name : ident {$variant : ident $member : ident , $($next : tt )*})=>{ generate_to_tokens ! (($($arms )* $name ::$variant ( _e )=> _e . to_tokens ($tokens ),)$tokens $name {$($next )* }); }; (($($arms : tt )*)$tokens : ident $name : ident {$variant : ident crate :: private , $($next : tt )*})=>{ generate_to_tokens ! (($($arms )* $name ::$variant (_)=> unreachable ! (),)$tokens $name {$($next )* }); }; (($($arms : tt )*)$tokens : ident $name : ident {})=>{# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "printing" )))] impl :: quote :: ToTokens for $name { fn to_tokens (& self , $tokens : & mut :: proc_macro2 :: TokenStream ){ match self {$($arms )* }}}}; }
+macro_rules! __ra_macro_fixture167 {($(# [$attr : meta ])* static ref $N : ident : $T : ty = $e : expr ; $($t : tt )*)=>{ __lazy_static_internal ! ($(# [$attr ])* () static ref $N : $T = $e ; $($t )*); }; ($(# [$attr : meta ])* pub static ref $N : ident : $T : ty = $e : expr ; $($t : tt )*)=>{ __lazy_static_internal ! ($(# [$attr ])* ( pub ) static ref $N : $T = $e ; $($t )*); }; ($(# [$attr : meta ])* pub ($($vis : tt )+) static ref $N : ident : $T : ty = $e : expr ; $($t : tt )*)=>{ __lazy_static_internal ! ($(# [$attr ])* ( pub ($($vis )+)) static ref $N : $T = $e ; $($t )*); }; ()=>()}
+macro_rules! __ra_macro_fixture168 {($($record : ident ($($whatever : tt )+ )),+ )=>{$(impl_value ! {$record ($($whatever )+ )})+ }}
+macro_rules! __ra_macro_fixture169 {($($len : tt ),+ )=>{$(impl < 'a > private :: ValidLen < 'a > for [(& 'a Field , Option <& 'a ( dyn Value + 'a )>); $len ]{})+ }}
+macro_rules! __ra_macro_fixture170 {($(# [$attr : meta ])* ($($vis : tt )*) static ref $N : ident : $T : ty = $e : expr ; $($t : tt )*)=>{ __lazy_static_internal ! (@ MAKE TY , $(# [$attr ])*, ($($vis )*), $N ); __lazy_static_internal ! (@ TAIL , $N : $T = $e ); lazy_static ! ($($t )*); }; (@ TAIL , $N : ident : $T : ty = $e : expr )=>{ impl $crate :: __Deref for $N { type Target = $T ; fn deref (& self )-> &$T {# [ inline ( always )] fn __static_ref_initialize ()-> $T {$e }# [ inline ( always )] fn __stability ()-> & 'static $T { __lazy_static_create ! ( LAZY , $T ); LAZY . get ( __static_ref_initialize )} __stability ()}} impl $crate :: LazyStatic for $N { fn initialize ( lazy : & Self ){ let _ = &** lazy ; }}}; (@ MAKE TY , $(# [$attr : meta ])*, ($($vis : tt )*), $N : ident )=>{# [ allow ( missing_copy_implementations )]# [ allow ( non_camel_case_types )]# [ allow ( dead_code )]$(# [$attr ])* $($vis )* struct $N { __private_field : ()}# [ doc ( hidden )]$($vis )* static $N : $N = $N { __private_field : ()}; }; ()=>()}
+macro_rules! __ra_macro_fixture171 {($record : ident ($($value_ty : tt ),+ ))=>{$(impl_one_value ! ($value_ty , | this : $value_ty | this , $record ); )+ }; ($record : ident ($($value_ty : tt ),+ as $as_ty : ty ))=>{$(impl_one_value ! ($value_ty , | this : $value_ty | this as $as_ty , $record ); )+ }; }
+macro_rules! __ra_macro_fixture172 {( bool , $op : expr , $record : ident )=>{ impl_one_value ! ( normal , bool , $op , $record ); }; ($value_ty : tt , $op : expr , $record : ident )=>{ impl_one_value ! ( normal , $value_ty , $op , $record ); impl_one_value ! ( nonzero , $value_ty , $op , $record ); }; ( normal , $value_ty : tt , $op : expr , $record : ident )=>{ impl $crate :: sealed :: Sealed for $value_ty {} impl $crate :: field :: Value for $value_ty { fn record (& self , key : &$crate :: field :: Field , visitor : & mut dyn $crate :: field :: Visit ){ visitor .$record ( key , $op (* self ))}}}; ( nonzero , $value_ty : tt , $op : expr , $record : ident )=>{# [ allow ( clippy :: useless_attribute , unused )] use num ::*; impl $crate :: sealed :: Sealed for ty_to_nonzero ! ($value_ty ){} impl $crate :: field :: Value for ty_to_nonzero ! ($value_ty ){ fn record (& self , key : &$crate :: field :: Field , visitor : & mut dyn $crate :: field :: Visit ){ visitor .$record ( key , $op ( self . get ()))}}}; }
+macro_rules! __ra_macro_fixture173 {($(# [ doc $($doc : tt )*])* # [ project = $proj_mut_ident : ident ]# [ project_ref = $proj_ref_ident : ident ]# [ project_replace = $proj_replace_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[$proj_mut_ident ][$proj_ref_ident ][$proj_replace_ident ]$(# [ doc $($doc )*])* $($tt )* }}; ($(# [ doc $($doc : tt )*])* # [ project = $proj_mut_ident : ident ]# [ project_ref = $proj_ref_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[$proj_mut_ident ][$proj_ref_ident ][]$(# [ doc $($doc )*])* $($tt )* }}; ($(# [ doc $($doc : tt )*])* # [ project = $proj_mut_ident : ident ]# [ project_replace = $proj_replace_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[$proj_mut_ident ][][$proj_replace_ident ]$(# [ doc $($doc )*])* $($tt )* }}; ($(# [ doc $($doc : tt )*])* # [ project_ref = $proj_ref_ident : ident ]# [ project_replace = $proj_replace_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[][$proj_ref_ident ][$proj_replace_ident ]$(# [ doc $($doc )*])* $($tt )* }}; ($(# [ doc $($doc : tt )*])* # [ project = $proj_mut_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[$proj_mut_ident ][][]$(# [ doc $($doc )*])* $($tt )* }}; ($(# [ doc $($doc : tt )*])* # [ project_ref = $proj_ref_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[][$proj_ref_ident ][]$(# [ doc $($doc )*])* $($tt )* }}; ($(# [ doc $($doc : tt )*])* # [ project_replace = $proj_replace_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[][][$proj_replace_ident ]$(# [ doc $($doc )*])* $($tt )* }}; ($($tt : tt )* )=>{$crate :: __pin_project_internal ! {[][][]$($tt )* }}; }
+macro_rules! __ra_macro_fixture174 {(@ struct => internal ; [$($proj_mut_ident : ident )?][$($proj_ref_ident : ident )?][$($proj_replace_ident : ident )?][$proj_vis : vis ][$(# [$attrs : meta ])* $vis : vis struct $ident : ident ][$($def_generics : tt )*][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )*)?]{$($(# [$pin : ident ])? $field_vis : vis $field : ident : $field_ty : ty ),+ })=>{$(# [$attrs ])* $vis struct $ident $($def_generics )* $(where $($where_clause )*)? {$($field_vis $field : $field_ty ),+ }$crate :: __pin_project_internal ! {@ struct => make_proj_ty => named ; [$proj_vis ][$($proj_mut_ident )?][ make_proj_field_mut ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}$crate :: __pin_project_internal ! {@ struct => make_proj_ty => named ; [$proj_vis ][$($proj_ref_ident )?][ make_proj_field_ref ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}$crate :: __pin_project_internal ! {@ struct => make_proj_replace_ty => named ; [$proj_vis ][$($proj_replace_ident )?][ make_proj_field_replace ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}# [ allow ( explicit_outlives_requirements )]# [ allow ( single_use_lifetimes )]# [ allow ( clippy :: unknown_clippy_lints )]# [ allow ( clippy :: redundant_pub_crate )]# [ allow ( clippy :: used_underscore_binding )] const _: ()= {$crate :: __pin_project_internal ! {@ struct => make_proj_ty => unnamed ; [$proj_vis ][$($proj_mut_ident )?][ Projection ][ make_proj_field_mut ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}$crate :: __pin_project_internal ! {@ struct => make_proj_ty => unnamed ; [$proj_vis ][$($proj_ref_ident )?][ ProjectionRef ][ make_proj_field_ref ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}$crate :: __pin_project_internal ! {@ struct => make_proj_replace_ty => unnamed ; [$proj_vis ][$($proj_replace_ident )?][ ProjectionReplace ][ make_proj_field_replace ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }} impl <$($impl_generics )*> $ident <$($ty_generics )*> $(where $($where_clause )*)? {$crate :: __pin_project_internal ! {@ struct => make_proj_method ; [$proj_vis ][$($proj_mut_ident )?][ Projection ][ project get_unchecked_mut mut ][$($ty_generics )*]{$($(# [$pin ])? $field_vis $field ),+ }}$crate :: __pin_project_internal ! {@ struct => make_proj_method ; [$proj_vis ][$($proj_ref_ident )?][ ProjectionRef ][ project_ref get_ref ][$($ty_generics )*]{$($(# [$pin ])? $field_vis $field ),+ }}$crate :: __pin_project_internal ! {@ struct => make_proj_replace_method ; [$proj_vis ][$($proj_replace_ident )?][ ProjectionReplace ][$($ty_generics )*]{$($(# [$pin ])? $field_vis $field ),+ }}}$crate :: __pin_project_internal ! {@ make_unpin_impl ; [$vis $ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]$($field : $crate :: __pin_project_internal ! (@ make_unpin_bound ; $(# [$pin ])? $field_ty )),+ }$crate :: __pin_project_internal ! {@ make_drop_impl ; [$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]}# [ forbid ( safe_packed_borrows )] fn __assert_not_repr_packed <$($impl_generics )*> ( this : &$ident <$($ty_generics )*>)$(where $($where_clause )*)? {$(let _ = & this .$field ; )+ }}; }; (@ enum => internal ; [$($proj_mut_ident : ident )?][$($proj_ref_ident : ident )?][$($proj_replace_ident : ident )?][$proj_vis : vis ][$(# [$attrs : meta ])* $vis : vis enum $ident : ident ][$($def_generics : tt )*][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )*)?]{$($(# [$variant_attrs : meta ])* $variant : ident $({$($(# [$pin : ident ])? $field : ident : $field_ty : ty ),+ })? ),+ })=>{$(# [$attrs ])* $vis enum $ident $($def_generics )* $(where $($where_clause )*)? {$($(# [$variant_attrs ])* $variant $({$($field : $field_ty ),+ })? ),+ }$crate :: __pin_project_internal ! {@ enum => make_proj_ty ; [$proj_vis ][$($proj_mut_ident )?][ make_proj_field_mut ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($variant $({$($(# [$pin ])? $field : $field_ty ),+ })? ),+ }}$crate :: __pin_project_internal ! {@ enum => make_proj_ty ; [$proj_vis ][$($proj_ref_ident )?][ make_proj_field_ref ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($variant $({$($(# [$pin ])? $field : $field_ty ),+ })? ),+ }}$crate :: __pin_project_internal ! {@ enum => make_proj_replace_ty ; [$proj_vis ][$($proj_replace_ident )?][ make_proj_field_replace ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($variant $({$($(# [$pin ])? $field : $field_ty ),+ })? ),+ }}# [ allow ( single_use_lifetimes )]# [ allow ( clippy :: unknown_clippy_lints )]# [ allow ( clippy :: used_underscore_binding )] const _: ()= { impl <$($impl_generics )*> $ident <$($ty_generics )*> $(where $($where_clause )*)? {$crate :: __pin_project_internal ! {@ enum => make_proj_method ; [$proj_vis ][$($proj_mut_ident )?][ project get_unchecked_mut mut ][$($ty_generics )*]{$($variant $({$($(# [$pin ])? $field ),+ })? ),+ }}$crate :: __pin_project_internal ! {@ enum => make_proj_method ; [$proj_vis ][$($proj_ref_ident )?][ project_ref get_ref ][$($ty_generics )*]{$($variant $({$($(# [$pin ])? $field ),+ })? ),+ }}$crate :: __pin_project_internal ! {@ enum => make_proj_replace_method ; [$proj_vis ][$($proj_replace_ident )?][$($ty_generics )*]{$($variant $({$($(# [$pin ])? $field ),+ })? ),+ }}}$crate :: __pin_project_internal ! {@ make_unpin_impl ; [$vis $ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]$($variant : ($($($crate :: __pin_project_internal ! (@ make_unpin_bound ; $(# [$pin ])? $field_ty )),+ )?)),+ }$crate :: __pin_project_internal ! {@ make_drop_impl ; [$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]}}; }; (@ struct => make_proj_ty => unnamed ; [$proj_vis : vis ][$_proj_ty_ident : ident ][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{}; (@ struct => make_proj_ty => unnamed ; [$proj_vis : vis ][][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{$crate :: __pin_project_internal ! {@ struct => make_proj_ty => named ; [$proj_vis ][$proj_ty_ident ][$make_proj_field ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]$($field )* }}; (@ struct => make_proj_ty => named ; [$proj_vis : vis ][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]{$($(# [$pin : ident ])? $field_vis : vis $field : ident : $field_ty : ty ),+ })=>{# [ allow ( dead_code )]# [ allow ( single_use_lifetimes )]# [ allow ( clippy :: unknown_clippy_lints )]# [ allow ( clippy :: mut_mut )]# [ allow ( clippy :: redundant_pub_crate )]# [ allow ( clippy :: ref_option_ref )]# [ allow ( clippy :: type_repetition_in_bounds )]$proj_vis struct $proj_ty_ident < '__pin , $($impl_generics )*> where $ident <$($ty_generics )*>: '__pin $(, $($where_clause )*)? {$($field_vis $field : $crate :: __pin_project_internal ! (@$make_proj_field ; $(# [$pin ])? $field_ty )),+ }}; (@ struct => make_proj_ty => named ; [$proj_vis : vis ][][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{}; (@ struct => make_proj_replace_ty => unnamed ; [$proj_vis : vis ][$_proj_ty_ident : ident ][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{}; (@ struct => make_proj_replace_ty => unnamed ; [$proj_vis : vis ][][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{}; (@ struct => make_proj_replace_ty => named ; [$proj_vis : vis ][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]{$($(# [$pin : ident ])? $field_vis : vis $field : ident : $field_ty : ty ),+ })=>{# [ allow ( dead_code )]# [ allow ( single_use_lifetimes )]# [ allow ( clippy :: mut_mut )]# [ allow ( clippy :: redundant_pub_crate )]# [ allow ( clippy :: type_repetition_in_bounds )]$proj_vis struct $proj_ty_ident <$($impl_generics )*> where $($($where_clause )*)? {$($field_vis $field : $crate :: __pin_project_internal ! (@$make_proj_field ; $(# [$pin ])? $field_ty )),+ }}; (@ struct => make_proj_replace_ty => named ; [$proj_vis : vis ][][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{}; (@ enum => make_proj_ty ; [$proj_vis : vis ][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]{$($variant : ident $({$($(# [$pin : ident ])? $field : ident : $field_ty : ty ),+ })? ),+ })=>{# [ allow ( dead_code )]# [ allow ( single_use_lifetimes )]# [ allow ( clippy :: unknown_clippy_lints )]# [ allow ( clippy :: mut_mut )]# [ allow ( clippy :: redundant_pub_crate )]# [ allow ( clippy :: ref_option_ref )]# [ allow ( clippy :: type_repetition_in_bounds )]$proj_vis enum $proj_ty_ident < '__pin , $($impl_generics )*> where $ident <$($ty_generics )*>: '__pin $(, $($where_clause )*)? {$($variant $({$($field : $crate :: __pin_project_internal ! (@$make_proj_field ; $(# [$pin ])? $field_ty )),+ })? ),+ }}; (@ enum => make_proj_ty ; [$proj_vis : vis ][][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($variant : tt )* )=>{}; (@ enum => make_proj_replace_ty ; [$proj_vis : vis ][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]{$($variant : ident $({$($(# [$pin : ident ])? $field : ident : $field_ty : ty ),+ })? ),+ })=>{# [ allow ( dead_code )]# [ allow ( single_use_lifetimes )]# [ allow ( clippy :: mut_mut )]# [ allow ( clippy :: redundant_pub_crate )]# [ allow ( clippy :: type_repetition_in_bounds )]$proj_vis enum $proj_ty_ident <$($impl_generics )*> where $($($where_clause )*)? {$($variant $({$($field : $crate :: __pin_project_internal ! (@$make_proj_field ; $(# [$pin ])? $field_ty )),+ })? ),+ }}; (@ enum => make_proj_replace_ty ; [$proj_vis : vis ][][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($variant : tt )* )=>{}; (@ make_proj_replace_block ; [$($proj_path : tt )+]{$($(# [$pin : ident ])? $field_vis : vis $field : ident ),+ })=>{ let result = $($proj_path )* {$($field : $crate :: __pin_project_internal ! (@ make_replace_field_proj ; $(# [$pin ])? $field )),+ }; {($($crate :: __pin_project_internal ! (@ make_unsafe_drop_in_place_guard ; $(# [$pin ])? $field ), )* ); } result }; (@ make_proj_replace_block ; [$($proj_path : tt )+])=>{$($proj_path )* }; (@ struct => make_proj_method ; [$proj_vis : vis ][$proj_ty_ident : ident ][$_proj_ty_ident : ident ][$method_ident : ident $get_method : ident $($mut : ident )?][$($ty_generics : tt )*]{$($(# [$pin : ident ])? $field_vis : vis $field : ident ),+ })=>{$proj_vis fn $method_ident < '__pin > ( self : $crate :: __private :: Pin <& '__pin $($mut )? Self >, )-> $proj_ty_ident < '__pin , $($ty_generics )*> { unsafe { let Self {$($field ),* }= self .$get_method (); $proj_ty_ident {$($field : $crate :: __pin_project_internal ! (@ make_unsafe_field_proj ; $(# [$pin ])? $field )),+ }}}}; (@ struct => make_proj_method ; [$proj_vis : vis ][][$proj_ty_ident : ident ][$method_ident : ident $get_method : ident $($mut : ident )?][$($ty_generics : tt )*]$($variant : tt )* )=>{$crate :: __pin_project_internal ! {@ struct => make_proj_method ; [$proj_vis ][$proj_ty_ident ][$proj_ty_ident ][$method_ident $get_method $($mut )?][$($ty_generics )*]$($variant )* }}; (@ struct => make_proj_replace_method ; [$proj_vis : vis ][$proj_ty_ident : ident ][$_proj_ty_ident : ident ][$($ty_generics : tt )*]{$($(# [$pin : ident ])? $field_vis : vis $field : ident ),+ })=>{$proj_vis fn project_replace ( self : $crate :: __private :: Pin <& mut Self >, replacement : Self , )-> $proj_ty_ident <$($ty_generics )*> { unsafe { let __self_ptr : * mut Self = self . get_unchecked_mut (); let __guard = $crate :: __private :: UnsafeOverwriteGuard { target : __self_ptr , value : $crate :: __private :: ManuallyDrop :: new ( replacement ), }; let Self {$($field ),* }= & mut * __self_ptr ; $crate :: __pin_project_internal ! {@ make_proj_replace_block ; [$proj_ty_ident ]{$($(# [$pin ])? $field ),+ }}}}}; (@ struct => make_proj_replace_method ; [$proj_vis : vis ][][$proj_ty_ident : ident ][$($ty_generics : tt )*]$($variant : tt )* )=>{}; (@ enum => make_proj_method ; [$proj_vis : vis ][$proj_ty_ident : ident ][$method_ident : ident $get_method : ident $($mut : ident )?][$($ty_generics : tt )*]{$($variant : ident $({$($(# [$pin : ident ])? $field : ident ),+ })? ),+ })=>{$proj_vis fn $method_ident < '__pin > ( self : $crate :: __private :: Pin <& '__pin $($mut )? Self >, )-> $proj_ty_ident < '__pin , $($ty_generics )*> { unsafe { match self .$get_method (){$(Self ::$variant $({$($field ),+ })? =>{$proj_ty_ident ::$variant $({$($field : $crate :: __pin_project_internal ! (@ make_unsafe_field_proj ; $(# [$pin ])? $field )),+ })? }),+ }}}}; (@ enum => make_proj_method ; [$proj_vis : vis ][][$method_ident : ident $get_method : ident $($mut : ident )?][$($ty_generics : tt )*]$($variant : tt )* )=>{}; (@ enum => make_proj_replace_method ; [$proj_vis : vis ][$proj_ty_ident : ident ][$($ty_generics : tt )*]{$($variant : ident $({$($(# [$pin : ident ])? $field : ident ),+ })? ),+ })=>{$proj_vis fn project_replace ( self : $crate :: __private :: Pin <& mut Self >, replacement : Self , )-> $proj_ty_ident <$($ty_generics )*> { unsafe { let __self_ptr : * mut Self = self . get_unchecked_mut (); let __guard = $crate :: __private :: UnsafeOverwriteGuard { target : __self_ptr , value : $crate :: __private :: ManuallyDrop :: new ( replacement ), }; match & mut * __self_ptr {$(Self ::$variant $({$($field ),+ })? =>{$crate :: __pin_project_internal ! {@ make_proj_replace_block ; [$proj_ty_ident :: $variant ]$({$($(# [$pin ])? $field ),+ })? }}),+ }}}}; (@ enum => make_proj_replace_method ; [$proj_vis : vis ][][$($ty_generics : tt )*]$($variant : tt )* )=>{}; (@ make_unpin_impl ; [$vis : vis $ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{# [ allow ( non_snake_case )]$vis struct __Origin < '__pin , $($impl_generics )*> $(where $($where_clause )*)? { __dummy_lifetime : $crate :: __private :: PhantomData <& '__pin ()>, $($field )* } impl < '__pin , $($impl_generics )*> $crate :: __private :: Unpin for $ident <$($ty_generics )*> where __Origin < '__pin , $($ty_generics )*>: $crate :: __private :: Unpin $(, $($where_clause )*)? {}}; (@ make_drop_impl ; [$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?])=>{ trait MustNotImplDrop {}# [ allow ( clippy :: drop_bounds , drop_bounds )] impl < T : $crate :: __private :: Drop > MustNotImplDrop for T {} impl <$($impl_generics )*> MustNotImplDrop for $ident <$($ty_generics )*> $(where $($where_clause )*)? {}}; (@ make_unpin_bound ; # [ pin ]$field_ty : ty )=>{$field_ty }; (@ make_unpin_bound ; $field_ty : ty )=>{$crate :: __private :: AlwaysUnpin <$field_ty > }; (@ make_unsafe_field_proj ; # [ pin ]$field : ident )=>{$crate :: __private :: Pin :: new_unchecked ($field )}; (@ make_unsafe_field_proj ; $field : ident )=>{$field }; (@ make_replace_field_proj ; # [ pin ]$field : ident )=>{$crate :: __private :: PhantomData }; (@ make_replace_field_proj ; $field : ident )=>{$crate :: __private :: ptr :: read ($field )}; (@ make_unsafe_drop_in_place_guard ; # [ pin ]$field : ident )=>{$crate :: __private :: UnsafeDropInPlaceGuard ($field )}; (@ make_unsafe_drop_in_place_guard ; $field : ident )=>{()}; (@ make_proj_field_mut ; # [ pin ]$field_ty : ty )=>{$crate :: __private :: Pin <& '__pin mut ($field_ty )> }; (@ make_proj_field_mut ; $field_ty : ty )=>{& '__pin mut ($field_ty )}; (@ make_proj_field_ref ; # [ pin ]$field_ty : ty )=>{$crate :: __private :: Pin <& '__pin ($field_ty )> }; (@ make_proj_field_ref ; $field_ty : ty )=>{& '__pin ($field_ty )}; (@ make_proj_field_replace ; # [ pin ]$field_ty : ty )=>{$crate :: __private :: PhantomData <$field_ty > }; (@ make_proj_field_replace ; $field_ty : ty )=>{$field_ty }; ([$($proj_mut_ident : ident )?][$($proj_ref_ident : ident )?][$($proj_replace_ident : ident )?]$(# [$attrs : meta ])* pub struct $ident : ident $(< $($lifetime : lifetime $(: $lifetime_bound : lifetime )? ),* $(,)? $($generics : ident $(: $generics_bound : path )? $(: ?$generics_unsized_bound : path )? $(: $generics_lifetime_bound : lifetime )? $(= $generics_default : ty )? ),* $(,)? >)? $(where $($where_clause_ty : ty $(: $where_clause_bound : path )? $(: ?$where_clause_unsized_bound : path )? $(: $where_clause_lifetime_bound : lifetime )? ),* $(,)? )? {$($(# [$pin : ident ])? $field_vis : vis $field : ident : $field_ty : ty ),+ $(,)? })=>{$crate :: __pin_project_internal ! {@ struct => internal ; [$($proj_mut_ident )?][$($proj_ref_ident )?][$($proj_replace_ident )?][ pub ( crate )][$(# [$attrs ])* pub struct $ident ][$(< $($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? $(= $generics_default )? ),* >)?][$($($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? ),* )?][$($($lifetime ,)* $($generics ),* )?][$(where $($where_clause_ty $(: $where_clause_bound )? $(: ?$where_clause_unsized_bound )? $(: $where_clause_lifetime_bound )? ),* )?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}}; ([$($proj_mut_ident : ident )?][$($proj_ref_ident : ident )?][$($proj_replace_ident : ident )?]$(# [$attrs : meta ])* $vis : vis struct $ident : ident $(< $($lifetime : lifetime $(: $lifetime_bound : lifetime )? ),* $(,)? $($generics : ident $(: $generics_bound : path )? $(: ?$generics_unsized_bound : path )? $(: $generics_lifetime_bound : lifetime )? $(= $generics_default : ty )? ),* $(,)? >)? $(where $($where_clause_ty : ty $(: $where_clause_bound : path )? $(: ?$where_clause_unsized_bound : path )? $(: $where_clause_lifetime_bound : lifetime )? ),* $(,)? )? {$($(# [$pin : ident ])? $field_vis : vis $field : ident : $field_ty : ty ),+ $(,)? })=>{$crate :: __pin_project_internal ! {@ struct => internal ; [$($proj_mut_ident )?][$($proj_ref_ident )?][$($proj_replace_ident )?][$vis ][$(# [$attrs ])* $vis struct $ident ][$(< $($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? $(= $generics_default )? ),* >)?][$($($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? ),* )?][$($($lifetime ,)* $($generics ),* )?][$(where $($where_clause_ty $(: $where_clause_bound )? $(: ?$where_clause_unsized_bound )? $(: $where_clause_lifetime_bound )? ),* )?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}}; ([$($proj_mut_ident : ident )?][$($proj_ref_ident : ident )?][$($proj_replace_ident : ident )?]$(# [$attrs : meta ])* pub enum $ident : ident $(< $($lifetime : lifetime $(: $lifetime_bound : lifetime )? ),* $(,)? $($generics : ident $(: $generics_bound : path )? $(: ?$generics_unsized_bound : path )? $(: $generics_lifetime_bound : lifetime )? $(= $generics_default : ty )? ),* $(,)? >)? $(where $($where_clause_ty : ty $(: $where_clause_bound : path )? $(: ?$where_clause_unsized_bound : path )? $(: $where_clause_lifetime_bound : lifetime )? ),* $(,)? )? {$($(# [$variant_attrs : meta ])* $variant : ident $({$($(# [$pin : ident ])? $field : ident : $field_ty : ty ),+ $(,)? })? ),+ $(,)? })=>{$crate :: __pin_project_internal ! {@ enum => internal ; [$($proj_mut_ident )?][$($proj_ref_ident )?][$($proj_replace_ident )?][ pub ( crate )][$(# [$attrs ])* pub enum $ident ][$(< $($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? $(= $generics_default )? ),* >)?][$($($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? ),* )?][$($($lifetime ,)* $($generics ),* )?][$(where $($where_clause_ty $(: $where_clause_bound )? $(: ?$where_clause_unsized_bound )? $(: $where_clause_lifetime_bound )? ),* )?]{$($(# [$variant_attrs ])* $variant $({$($(# [$pin ])? $field : $field_ty ),+ })? ),+ }}}; ([$($proj_mut_ident : ident )?][$($proj_ref_ident : ident )?][$($proj_replace_ident : ident )?]$(# [$attrs : meta ])* $vis : vis enum $ident : ident $(< $($lifetime : lifetime $(: $lifetime_bound : lifetime )? ),* $(,)? $($generics : ident $(: $generics_bound : path )? $(: ?$generics_unsized_bound : path )? $(: $generics_lifetime_bound : lifetime )? $(= $generics_default : ty )? ),* $(,)? >)? $(where $($where_clause_ty : ty $(: $where_clause_bound : path )? $(: ?$where_clause_unsized_bound : path )? $(: $where_clause_lifetime_bound : lifetime )? ),* $(,)? )? {$($(# [$variant_attrs : meta ])* $variant : ident $({$($(# [$pin : ident ])? $field : ident : $field_ty : ty ),+ $(,)? })? ),+ $(,)? })=>{$crate :: __pin_project_internal ! {@ enum => internal ; [$($proj_mut_ident )?][$($proj_ref_ident )?][$($proj_replace_ident )?][$vis ][$(# [$attrs ])* $vis enum $ident ][$(< $($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? $(= $generics_default )? ),* >)?][$($($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? ),* )?][$($($lifetime ,)* $($generics ),* )?][$(where $($where_clause_ty $(: $where_clause_bound )? $(: ?$where_clause_unsized_bound )? $(: $where_clause_lifetime_bound )? ),* )?]{$($(# [$variant_attrs ])* $variant $({$($(# [$pin ])? $field : $field_ty ),+ })? ),+ }}}; }
+macro_rules! __ra_macro_fixture175 {($t : ty , $example : tt )=>{ impl AtomicCell <$t > {# [ doc = " Increments the current value by `val` and returns the previous value." ]# [ doc = "" ]# [ doc = " The addition wraps on overflow." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_add(3), 7);" ]# [ doc = " assert_eq!(a.load(), 10);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_add (& self , val : $t )-> $t { if can_transmute ::<$t , atomic :: AtomicUsize > (){ let a = unsafe {&* ( self . value . get () as * const atomic :: AtomicUsize )}; a . fetch_add ( val as usize , Ordering :: AcqRel ) as $t } else { let _guard = lock ( self . value . get () as usize ). write (); let value = unsafe {& mut * ( self . value . get ())}; let old = * value ; * value = value . wrapping_add ( val ); old }}# [ doc = " Decrements the current value by `val` and returns the previous value." ]# [ doc = "" ]# [ doc = " The subtraction wraps on overflow." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_sub(3), 7);" ]# [ doc = " assert_eq!(a.load(), 4);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_sub (& self , val : $t )-> $t { if can_transmute ::<$t , atomic :: AtomicUsize > (){ let a = unsafe {&* ( self . value . get () as * const atomic :: AtomicUsize )}; a . fetch_sub ( val as usize , Ordering :: AcqRel ) as $t } else { let _guard = lock ( self . value . get () as usize ). write (); let value = unsafe {& mut * ( self . value . get ())}; let old = * value ; * value = value . wrapping_sub ( val ); old }}# [ doc = " Applies bitwise \\\"and\\\" to the current value and returns the previous value." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_and(3), 7);" ]# [ doc = " assert_eq!(a.load(), 3);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_and (& self , val : $t )-> $t { if can_transmute ::<$t , atomic :: AtomicUsize > (){ let a = unsafe {&* ( self . value . get () as * const atomic :: AtomicUsize )}; a . fetch_and ( val as usize , Ordering :: AcqRel ) as $t } else { let _guard = lock ( self . value . get () as usize ). write (); let value = unsafe {& mut * ( self . value . get ())}; let old = * value ; * value &= val ; old }}# [ doc = " Applies bitwise \\\"or\\\" to the current value and returns the previous value." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_or(16), 7);" ]# [ doc = " assert_eq!(a.load(), 23);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_or (& self , val : $t )-> $t { if can_transmute ::<$t , atomic :: AtomicUsize > (){ let a = unsafe {&* ( self . value . get () as * const atomic :: AtomicUsize )}; a . fetch_or ( val as usize , Ordering :: AcqRel ) as $t } else { let _guard = lock ( self . value . get () as usize ). write (); let value = unsafe {& mut * ( self . value . get ())}; let old = * value ; * value |= val ; old }}# [ doc = " Applies bitwise \\\"xor\\\" to the current value and returns the previous value." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_xor(2), 7);" ]# [ doc = " assert_eq!(a.load(), 5);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_xor (& self , val : $t )-> $t { if can_transmute ::<$t , atomic :: AtomicUsize > (){ let a = unsafe {&* ( self . value . get () as * const atomic :: AtomicUsize )}; a . fetch_xor ( val as usize , Ordering :: AcqRel ) as $t } else { let _guard = lock ( self . value . get () as usize ). write (); let value = unsafe {& mut * ( self . value . get ())}; let old = * value ; * value ^= val ; old }}}}; ($t : ty , $atomic : ty , $example : tt )=>{ impl AtomicCell <$t > {# [ doc = " Increments the current value by `val` and returns the previous value." ]# [ doc = "" ]# [ doc = " The addition wraps on overflow." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_add(3), 7);" ]# [ doc = " assert_eq!(a.load(), 10);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_add (& self , val : $t )-> $t { let a = unsafe {&* ( self . value . get () as * const $atomic )}; a . fetch_add ( val , Ordering :: AcqRel )}# [ doc = " Decrements the current value by `val` and returns the previous value." ]# [ doc = "" ]# [ doc = " The subtraction wraps on overflow." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_sub(3), 7);" ]# [ doc = " assert_eq!(a.load(), 4);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_sub (& self , val : $t )-> $t { let a = unsafe {&* ( self . value . get () as * const $atomic )}; a . fetch_sub ( val , Ordering :: AcqRel )}# [ doc = " Applies bitwise \\\"and\\\" to the current value and returns the previous value." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_and(3), 7);" ]# [ doc = " assert_eq!(a.load(), 3);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_and (& self , val : $t )-> $t { let a = unsafe {&* ( self . value . get () as * const $atomic )}; a . fetch_and ( val , Ordering :: AcqRel )}# [ doc = " Applies bitwise \\\"or\\\" to the current value and returns the previous value." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_or(16), 7);" ]# [ doc = " assert_eq!(a.load(), 23);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_or (& self , val : $t )-> $t { let a = unsafe {&* ( self . value . get () as * const $atomic )}; a . fetch_or ( val , Ordering :: AcqRel )}# [ doc = " Applies bitwise \\\"xor\\\" to the current value and returns the previous value." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_xor(2), 7);" ]# [ doc = " assert_eq!(a.load(), 5);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_xor (& self , val : $t )-> $t { let a = unsafe {&* ( self . value . get () as * const $atomic )}; a . fetch_xor ( val , Ordering :: AcqRel )}}}; }
+macro_rules! __ra_macro_fixture176 {($atomic : ident , $val : ty )=>{ impl AtomicConsume for :: core :: sync :: atomic ::$atomic { type Val = $val ; impl_consume ! (); }}; }
+macro_rules! __ra_macro_fixture177 {($t : ty , $min : expr , $max : expr )=>{ impl Bounded for $t {# [ inline ] fn min_value ()-> $t {$min }# [ inline ] fn max_value ()-> $t {$max }}}; }
+macro_rules! __ra_macro_fixture178 {($m : ident )=>{ for_each_tuple_ ! {$m !! A , B , C , D , E , F , G , H , I , J , K , L , M , N , O , P , Q , R , S , T , }}; }
+macro_rules! __ra_macro_fixture179 {($T : ident )=>{ impl ToPrimitive for $T { impl_to_primitive_int_to_int ! {$T : fn to_isize -> isize ; fn to_i8 -> i8 ; fn to_i16 -> i16 ; fn to_i32 -> i32 ; fn to_i64 -> i64 ; # [ cfg ( has_i128 )] fn to_i128 -> i128 ; } impl_to_primitive_int_to_uint ! {$T : fn to_usize -> usize ; fn to_u8 -> u8 ; fn to_u16 -> u16 ; fn to_u32 -> u32 ; fn to_u64 -> u64 ; # [ cfg ( has_i128 )] fn to_u128 -> u128 ; }# [ inline ] fn to_f32 (& self )-> Option < f32 > { Some (* self as f32 )}# [ inline ] fn to_f64 (& self )-> Option < f64 > { Some (* self as f64 )}}}; }
+macro_rules! __ra_macro_fixture180 {($T : ident )=>{ impl ToPrimitive for $T { impl_to_primitive_uint_to_int ! {$T : fn to_isize -> isize ; fn to_i8 -> i8 ; fn to_i16 -> i16 ; fn to_i32 -> i32 ; fn to_i64 -> i64 ; # [ cfg ( has_i128 )] fn to_i128 -> i128 ; } impl_to_primitive_uint_to_uint ! {$T : fn to_usize -> usize ; fn to_u8 -> u8 ; fn to_u16 -> u16 ; fn to_u32 -> u32 ; fn to_u64 -> u64 ; # [ cfg ( has_i128 )] fn to_u128 -> u128 ; }# [ inline ] fn to_f32 (& self )-> Option < f32 > { Some (* self as f32 )}# [ inline ] fn to_f64 (& self )-> Option < f64 > { Some (* self as f64 )}}}; }
+macro_rules! __ra_macro_fixture181 {($T : ident )=>{ impl ToPrimitive for $T { impl_to_primitive_float_to_signed_int ! {$T : fn to_isize -> isize ; fn to_i8 -> i8 ; fn to_i16 -> i16 ; fn to_i32 -> i32 ; fn to_i64 -> i64 ; # [ cfg ( has_i128 )] fn to_i128 -> i128 ; } impl_to_primitive_float_to_unsigned_int ! {$T : fn to_usize -> usize ; fn to_u8 -> u8 ; fn to_u16 -> u16 ; fn to_u32 -> u32 ; fn to_u64 -> u64 ; # [ cfg ( has_i128 )] fn to_u128 -> u128 ; } impl_to_primitive_float_to_float ! {$T : fn to_f32 -> f32 ; fn to_f64 -> f64 ; }}}; }
+macro_rules! __ra_macro_fixture182 {($T : ty , $to_ty : ident )=>{# [ allow ( deprecated )] impl FromPrimitive for $T {# [ inline ] fn from_isize ( n : isize )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_i8 ( n : i8 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_i16 ( n : i16 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_i32 ( n : i32 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_i64 ( n : i64 )-> Option <$T > { n .$to_ty ()}# [ cfg ( has_i128 )]# [ inline ] fn from_i128 ( n : i128 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_usize ( n : usize )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_u8 ( n : u8 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_u16 ( n : u16 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_u32 ( n : u32 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_u64 ( n : u64 )-> Option <$T > { n .$to_ty ()}# [ cfg ( has_i128 )]# [ inline ] fn from_u128 ( n : u128 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_f32 ( n : f32 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_f64 ( n : f64 )-> Option <$T > { n .$to_ty ()}}}; }
+macro_rules! __ra_macro_fixture183 {($T : ty , $conv : ident )=>{ impl NumCast for $T {# [ inline ]# [ allow ( deprecated )] fn from < N : ToPrimitive > ( n : N )-> Option <$T > { n .$conv ()}}}; }
+macro_rules! __ra_macro_fixture184 {(@ $T : ty =>$(# [$cfg : meta ])* impl $U : ty )=>{$(# [$cfg ])* impl AsPrimitive <$U > for $T {# [ inline ] fn as_ ( self )-> $U { self as $U }}}; (@ $T : ty =>{$($U : ty ),* })=>{$(impl_as_primitive ! (@ $T => impl $U ); )*}; ($T : ty =>{$($U : ty ),* })=>{ impl_as_primitive ! (@ $T =>{$($U ),* }); impl_as_primitive ! (@ $T =>{ u8 , u16 , u32 , u64 , usize }); impl_as_primitive ! (@ $T =># [ cfg ( has_i128 )] impl u128 ); impl_as_primitive ! (@ $T =>{ i8 , i16 , i32 , i64 , isize }); impl_as_primitive ! (@ $T =># [ cfg ( has_i128 )] impl i128 ); }; }
+macro_rules! __ra_macro_fixture185 {($(# [$doc : meta ]$constant : ident ,)+)=>(# [ allow ( non_snake_case )] pub trait FloatConst {$(# [$doc ] fn $constant ()-> Self ;)+ # [ doc = "Return the full circle constant `τ`." ]# [ inline ] fn TAU ()-> Self where Self : Sized + Add < Self , Output = Self >{ Self :: PI ()+ Self :: PI ()}# [ doc = "Return `log10(2.0)`." ]# [ inline ] fn LOG10_2 ()-> Self where Self : Sized + Div < Self , Output = Self >{ Self :: LN_2 ()/ Self :: LN_10 ()}# [ doc = "Return `log2(10.0)`." ]# [ inline ] fn LOG2_10 ()-> Self where Self : Sized + Div < Self , Output = Self >{ Self :: LN_10 ()/ Self :: LN_2 ()}} float_const_impl ! {@ float f32 , $($constant ,)+ } float_const_impl ! {@ float f64 , $($constant ,)+ }); (@ float $T : ident , $($constant : ident ,)+)=>( impl FloatConst for $T { constant ! {$($constant ()-> $T :: consts ::$constant ; )+ TAU ()-> 6.28318530717958647692528676655900577 ; LOG10_2 ()-> 0.301029995663981195213738894724493027 ; LOG2_10 ()-> 3.32192809488736234787031942948939018 ; }}); }
+macro_rules! __ra_macro_fixture186 {($t : ty , $v : expr )=>{ impl Zero for $t {# [ inline ] fn zero ()-> $t {$v }# [ inline ] fn is_zero (& self )-> bool {* self == $v }}}; }
+macro_rules! __ra_macro_fixture187 {($t : ty , $v : expr )=>{ impl One for $t {# [ inline ] fn one ()-> $t {$v }# [ inline ] fn is_one (& self )-> bool {* self == $v }}}; }
+macro_rules! __ra_macro_fixture188 {($T : ty , $S : ty , $U : ty )=>{ impl PrimInt for $T {# [ inline ] fn count_ones ( self )-> u32 {<$T >:: count_ones ( self )}# [ inline ] fn count_zeros ( self )-> u32 {<$T >:: count_zeros ( self )}# [ inline ] fn leading_zeros ( self )-> u32 {<$T >:: leading_zeros ( self )}# [ inline ] fn trailing_zeros ( self )-> u32 {<$T >:: trailing_zeros ( self )}# [ inline ] fn rotate_left ( self , n : u32 )-> Self {<$T >:: rotate_left ( self , n )}# [ inline ] fn rotate_right ( self , n : u32 )-> Self {<$T >:: rotate_right ( self , n )}# [ inline ] fn signed_shl ( self , n : u32 )-> Self {(( self as $S )<< n ) as $T }# [ inline ] fn signed_shr ( self , n : u32 )-> Self {(( self as $S )>> n ) as $T }# [ inline ] fn unsigned_shl ( self , n : u32 )-> Self {(( self as $U )<< n ) as $T }# [ inline ] fn unsigned_shr ( self , n : u32 )-> Self {(( self as $U )>> n ) as $T }# [ inline ] fn swap_bytes ( self )-> Self {<$T >:: swap_bytes ( self )}# [ inline ] fn from_be ( x : Self )-> Self {<$T >:: from_be ( x )}# [ inline ] fn from_le ( x : Self )-> Self {<$T >:: from_le ( x )}# [ inline ] fn to_be ( self )-> Self {<$T >:: to_be ( self )}# [ inline ] fn to_le ( self )-> Self {<$T >:: to_le ( self )}# [ inline ] fn pow ( self , exp : u32 )-> Self {<$T >:: pow ( self , exp )}}}; }
+macro_rules! __ra_macro_fixture189 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self , v : &$t )-> Option <$t > {<$t >::$method (* self , * v )}}}; }
+macro_rules! __ra_macro_fixture190 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self )-> Option <$t > {<$t >::$method (* self )}}}; }
+macro_rules! __ra_macro_fixture191 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self , rhs : u32 )-> Option <$t > {<$t >::$method (* self , rhs )}}}; }
+macro_rules! __ra_macro_fixture192 {($trait_name : ident for $($t : ty )*)=>{$(impl $trait_name for $t { type Output = Self ; # [ inline ] fn mul_add ( self , a : Self , b : Self )-> Self :: Output {( self * a )+ b }})*}}
+macro_rules! __ra_macro_fixture193 {($trait_name : ident for $($t : ty )*)=>{$(impl $trait_name for $t {# [ inline ] fn mul_add_assign (& mut self , a : Self , b : Self ){* self = (* self * a )+ b }})*}}
+macro_rules! __ra_macro_fixture194 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self , v : & Self )-> ( Self , bool ){<$t >::$method (* self , * v )}}}; }
+macro_rules! __ra_macro_fixture195 {($trait_name : ident for $($t : ty )*)=>{$(impl $trait_name for $t {# [ inline ] fn saturating_add ( self , v : Self )-> Self { Self :: saturating_add ( self , v )}# [ inline ] fn saturating_sub ( self , v : Self )-> Self { Self :: saturating_sub ( self , v )}})*}}
+macro_rules! __ra_macro_fixture196 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self , v : & Self )-> Self {<$t >::$method (* self , * v )}}}; }
+macro_rules! __ra_macro_fixture197 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self , v : & Self )-> Self {<$t >::$method (* self , * v )}}}; ($trait_name : ident , $method : ident , $t : ty , $rhs : ty )=>{ impl $trait_name <$rhs > for $t {# [ inline ] fn $method (& self , v : &$rhs )-> Self {<$t >::$method (* self , * v )}}}; }
+macro_rules! __ra_macro_fixture198 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self )-> $t {<$t >::$method (* self )}}}; }
+macro_rules! __ra_macro_fixture199 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self , rhs : u32 )-> $t {<$t >::$method (* self , rhs )}}}; }
+macro_rules! __ra_macro_fixture200 {($t : ty )=>{ pow_impl ! ($t , u8 ); pow_impl ! ($t , usize ); }; ($t : ty , $rhs : ty )=>{ pow_impl ! ($t , $rhs , usize , pow ); }; ($t : ty , $rhs : ty , $desired_rhs : ty , $method : expr )=>{ impl Pow <$rhs > for $t { type Output = $t ; # [ inline ] fn pow ( self , rhs : $rhs )-> $t {($method )( self , <$desired_rhs >:: from ( rhs ))}} impl < 'a > Pow <& 'a $rhs > for $t { type Output = $t ; # [ inline ] fn pow ( self , rhs : & 'a $rhs )-> $t {($method )( self , <$desired_rhs >:: from (* rhs ))}} impl < 'a > Pow <$rhs > for & 'a $t { type Output = $t ; # [ inline ] fn pow ( self , rhs : $rhs )-> $t {($method )(* self , <$desired_rhs >:: from ( rhs ))}} impl < 'a , 'b > Pow <& 'a $rhs > for & 'b $t { type Output = $t ; # [ inline ] fn pow ( self , rhs : & 'a $rhs )-> $t {($method )(* self , <$desired_rhs >:: from (* rhs ))}}}; }
+macro_rules! __ra_macro_fixture201 {($($t : ty )*)=>($(impl Signed for $t {# [ inline ] fn abs (& self )-> $t { if self . is_negative (){-* self } else {* self }}# [ inline ] fn abs_sub (& self , other : &$t )-> $t { if * self <= * other { 0 } else {* self - * other }}# [ inline ] fn signum (& self )-> $t { match * self { n if n > 0 => 1 , 0 => 0 , _ =>- 1 , }}# [ inline ] fn is_positive (& self )-> bool {* self > 0 }# [ inline ] fn is_negative (& self )-> bool {* self < 0 }})*)}
+macro_rules! __ra_macro_fixture202 {($t : ty )=>{ impl Signed for $t {# [ doc = " Computes the absolute value. Returns `NAN` if the number is `NAN`." ]# [ inline ] fn abs (& self )-> $t { FloatCore :: abs (* self )}# [ doc = " The positive difference of two numbers. Returns `0.0` if the number is" ]# [ doc = " less than or equal to `other`, otherwise the difference between`self`" ]# [ doc = " and `other` is returned." ]# [ inline ] fn abs_sub (& self , other : &$t )-> $t { if * self <= * other { 0. } else {* self - * other }}# [ doc = " # Returns" ]# [ doc = "" ]# [ doc = " - `1.0` if the number is positive, `+0.0` or `INFINITY`" ]# [ doc = " - `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY`" ]# [ doc = " - `NAN` if the number is NaN" ]# [ inline ] fn signum (& self )-> $t { FloatCore :: signum (* self )}# [ doc = " Returns `true` if the number is positive, including `+0.0` and `INFINITY`" ]# [ inline ] fn is_positive (& self )-> bool { FloatCore :: is_sign_positive (* self )}# [ doc = " Returns `true` if the number is negative, including `-0.0` and `NEG_INFINITY`" ]# [ inline ] fn is_negative (& self )-> bool { FloatCore :: is_sign_negative (* self )}}}; }
+macro_rules! __ra_macro_fixture203 {($name : ident for $($t : ty )*)=>($(impl $name for $t {})*)}
+macro_rules! __ra_macro_fixture204 {($name : ident for $($t : ty )*)=>($(impl $name for $t { type FromStrRadixErr = :: core :: num :: ParseIntError ; # [ inline ] fn from_str_radix ( s : & str , radix : u32 )-> Result < Self , :: core :: num :: ParseIntError > {<$t >:: from_str_radix ( s , radix )}})*)}
+macro_rules! __ra_macro_fixture205 {($name : ident for $($t : ident )*)=>($(impl $name for $t { type FromStrRadixErr = ParseFloatError ; fn from_str_radix ( src : & str , radix : u32 )-> Result < Self , Self :: FromStrRadixErr > { use self :: FloatErrorKind ::*; use self :: ParseFloatError as PFE ; match src { "inf" => return Ok ( core ::$t :: INFINITY ), "-inf" => return Ok ( core ::$t :: NEG_INFINITY ), "NaN" => return Ok ( core ::$t :: NAN ), _ =>{}, } fn slice_shift_char ( src : & str )-> Option < ( char , & str )> { let mut chars = src . chars (); if let Some ( ch )= chars . next (){ Some (( ch , chars . as_str ()))} else { None }} let ( is_positive , src )= match slice_shift_char ( src ){ None => return Err ( PFE { kind : Empty }), Some (( '-' , "" ))=> return Err ( PFE { kind : Empty }), Some (( '-' , src ))=>( false , src ), Some ((_, _))=>( true , src ), }; let mut sig = if is_positive { 0.0 } else {- 0.0 }; let mut prev_sig = sig ; let mut cs = src . chars (). enumerate (); let mut exp_info = None ::< ( char , usize )>; for ( i , c ) in cs . by_ref (){ match c . to_digit ( radix ){ Some ( digit )=>{ sig = sig * ( radix as $t ); if is_positive { sig = sig + (( digit as isize ) as $t ); } else { sig = sig - (( digit as isize ) as $t ); } if prev_sig != 0.0 { if is_positive && sig <= prev_sig { return Ok ( core ::$t :: INFINITY ); } if ! is_positive && sig >= prev_sig { return Ok ( core ::$t :: NEG_INFINITY ); } if is_positive && ( prev_sig != ( sig - digit as $t )/ radix as $t ){ return Ok ( core ::$t :: INFINITY ); } if ! is_positive && ( prev_sig != ( sig + digit as $t )/ radix as $t ){ return Ok ( core ::$t :: NEG_INFINITY ); }} prev_sig = sig ; }, None => match c { 'e' | 'E' | 'p' | 'P' =>{ exp_info = Some (( c , i + 1 )); break ; }, '.' =>{ break ; }, _ =>{ return Err ( PFE { kind : Invalid }); }, }, }} if exp_info . is_none (){ let mut power = 1.0 ; for ( i , c ) in cs . by_ref (){ match c . to_digit ( radix ){ Some ( digit )=>{ power = power / ( radix as $t ); sig = if is_positive { sig + ( digit as $t )* power } else { sig - ( digit as $t )* power }; if is_positive && sig < prev_sig { return Ok ( core ::$t :: INFINITY ); } if ! is_positive && sig > prev_sig { return Ok ( core ::$t :: NEG_INFINITY ); } prev_sig = sig ; }, None => match c { 'e' | 'E' | 'p' | 'P' =>{ exp_info = Some (( c , i + 1 )); break ; }, _ =>{ return Err ( PFE { kind : Invalid }); }, }, }}} let exp = match exp_info { Some (( c , offset ))=>{ let base = match c { 'E' | 'e' if radix == 10 => 10.0 , 'P' | 'p' if radix == 16 => 2.0 , _ => return Err ( PFE { kind : Invalid }), }; let src = & src [ offset ..]; let ( is_positive , exp )= match slice_shift_char ( src ){ Some (( '-' , src ))=>( false , src . parse ::< usize > ()), Some (( '+' , src ))=>( true , src . parse ::< usize > ()), Some ((_, _))=>( true , src . parse ::< usize > ()), None => return Err ( PFE { kind : Invalid }), }; # [ cfg ( feature = "std" )] fn pow ( base : $t , exp : usize )-> $t { Float :: powi ( base , exp as i32 )} match ( is_positive , exp ){( true , Ok ( exp ))=> pow ( base , exp ), ( false , Ok ( exp ))=> 1.0 / pow ( base , exp ), (_, Err (_))=> return Err ( PFE { kind : Invalid }), }}, None => 1.0 , }; Ok ( sig * exp )}})*)}
+macro_rules! __ra_macro_fixture206 {($m : ident !! )=>($m ! {}); ($m : ident !! $h : ident , $($t : ident ,)* )=>($m ! {$h $($t )* } for_each_tuple_ ! {$m !! $($t ,)* }); }
+macro_rules! __ra_macro_fixture207 {($($name : ident )* )=>( impl <$($name : Bounded ,)*> Bounded for ($($name ,)*){# [ inline ] fn min_value ()-> Self {($($name :: min_value (),)*)}# [ inline ] fn max_value ()-> Self {($($name :: max_value (),)*)}}); }
+macro_rules! __ra_macro_fixture208 {($T : ty , $U : ty )=>{ impl Roots for $T {# [ inline ] fn nth_root (& self , n : u32 )-> Self { if * self >= 0 {(* self as $U ). nth_root ( n ) as Self } else { assert ! ( n . is_odd (), "even roots of a negative are imaginary" ); - (( self . wrapping_neg () as $U ). nth_root ( n ) as Self )}}# [ inline ] fn sqrt (& self )-> Self { assert ! (* self >= 0 , "the square root of a negative is imaginary" ); (* self as $U ). sqrt () as Self }# [ inline ] fn cbrt (& self )-> Self { if * self >= 0 {(* self as $U ). cbrt () as Self } else {- (( self . wrapping_neg () as $U ). cbrt () as Self )}}}}; }
+macro_rules! __ra_macro_fixture209 {($T : ident )=>{ impl Roots for $T {# [ inline ] fn nth_root (& self , n : u32 )-> Self { fn go ( a : $T , n : u32 )-> $T { match n { 0 => panic ! ( "can't find a root of degree 0!" ), 1 => return a , 2 => return a . sqrt (), 3 => return a . cbrt (), _ =>(), } if bits ::<$T > ()<= n || a < ( 1 << n ){ return ( a > 0 ) as $T ; } if bits ::<$T > ()> 64 { return if a <= core :: u64 :: MAX as $T {( a as u64 ). nth_root ( n ) as $T } else { let lo = ( a >> n ). nth_root ( n )<< 1 ; let hi = lo + 1 ; if hi . next_power_of_two (). trailing_zeros ()* n >= bits ::<$T > (){ match checked_pow ( hi , n as usize ){ Some ( x ) if x <= a => hi , _ => lo , }} else { if hi . pow ( n )<= a { hi } else { lo }}}; }# [ cfg ( feature = "std" )]# [ inline ] fn guess ( x : $T , n : u32 )-> $T { if bits ::<$T > ()<= 32 || x <= core :: u32 :: MAX as $T { 1 << (( log2 ( x )+ n - 1 )/ n )} else {(( x as f64 ). ln ()/ f64 :: from ( n )). exp () as $T }}# [ cfg ( not ( feature = "std" ))]# [ inline ] fn guess ( x : $T , n : u32 )-> $T { 1 << (( log2 ( x )+ n - 1 )/ n )} let n1 = n - 1 ; let next = | x : $T | { let y = match checked_pow ( x , n1 as usize ){ Some ( ax )=> a / ax , None => 0 , }; ( y + x * n1 as $T )/ n as $T }; fixpoint ( guess ( a , n ), next )} go (* self , n )}# [ inline ] fn sqrt (& self )-> Self { fn go ( a : $T )-> $T { if bits ::<$T > ()> 64 { return if a <= core :: u64 :: MAX as $T {( a as u64 ). sqrt () as $T } else { let lo = ( a >> 2u32 ). sqrt ()<< 1 ; let hi = lo + 1 ; if hi * hi <= a { hi } else { lo }}; } if a < 4 { return ( a > 0 ) as $T ; }# [ cfg ( feature = "std" )]# [ inline ] fn guess ( x : $T )-> $T {( x as f64 ). sqrt () as $T }# [ cfg ( not ( feature = "std" ))]# [ inline ] fn guess ( x : $T )-> $T { 1 << (( log2 ( x )+ 1 )/ 2 )} let next = | x : $T | ( a / x + x )>> 1 ; fixpoint ( guess ( a ), next )} go (* self )}# [ inline ] fn cbrt (& self )-> Self { fn go ( a : $T )-> $T { if bits ::<$T > ()> 64 { return if a <= core :: u64 :: MAX as $T {( a as u64 ). cbrt () as $T } else { let lo = ( a >> 3u32 ). cbrt ()<< 1 ; let hi = lo + 1 ; if hi * hi * hi <= a { hi } else { lo }}; } if bits ::<$T > ()<= 32 { let mut x = a ; let mut y2 = 0 ; let mut y = 0 ; let smax = bits ::<$T > ()/ 3 ; for s in ( 0 .. smax + 1 ). rev (){ let s = s * 3 ; y2 *= 4 ; y *= 2 ; let b = 3 * ( y2 + y )+ 1 ; if x >> s >= b { x -= b << s ; y2 += 2 * y + 1 ; y += 1 ; }} return y ; } if a < 8 { return ( a > 0 ) as $T ; } if a <= core :: u32 :: MAX as $T { return ( a as u32 ). cbrt () as $T ; }# [ cfg ( feature = "std" )]# [ inline ] fn guess ( x : $T )-> $T {( x as f64 ). cbrt () as $T }# [ cfg ( not ( feature = "std" ))]# [ inline ] fn guess ( x : $T )-> $T { 1 << (( log2 ( x )+ 2 )/ 3 )} let next = | x : $T | ( a / ( x * x )+ x * 2 )/ 3 ; fixpoint ( guess ( a ), next )} go (* self )}}}; }
+macro_rules! __ra_macro_fixture210 {($T : ty , $test_mod : ident )=>{ impl Integer for $T {# [ doc = " Floored integer division" ]# [ inline ] fn div_floor (& self , other : & Self )-> Self { let ( d , r )= self . div_rem ( other ); if ( r > 0 && * other < 0 )|| ( r < 0 && * other > 0 ){ d - 1 } else { d }}# [ doc = " Floored integer modulo" ]# [ inline ] fn mod_floor (& self , other : & Self )-> Self { let r = * self % * other ; if ( r > 0 && * other < 0 )|| ( r < 0 && * other > 0 ){ r + * other } else { r }}# [ doc = " Calculates `div_floor` and `mod_floor` simultaneously" ]# [ inline ] fn div_mod_floor (& self , other : & Self )-> ( Self , Self ){ let ( d , r )= self . div_rem ( other ); if ( r > 0 && * other < 0 )|| ( r < 0 && * other > 0 ){( d - 1 , r + * other )} else {( d , r )}}# [ inline ] fn div_ceil (& self , other : & Self )-> Self { let ( d , r )= self . div_rem ( other ); if ( r > 0 && * other > 0 )|| ( r < 0 && * other < 0 ){ d + 1 } else { d }}# [ doc = " Calculates the Greatest Common Divisor (GCD) of the number and" ]# [ doc = " `other`. The result is always positive." ]# [ inline ] fn gcd (& self , other : & Self )-> Self { let mut m = * self ; let mut n = * other ; if m == 0 || n == 0 { return ( m | n ). abs (); } let shift = ( m | n ). trailing_zeros (); if m == Self :: min_value ()|| n == Self :: min_value (){ return ( 1 << shift ). abs (); } m = m . abs (); n = n . abs (); m >>= m . trailing_zeros (); n >>= n . trailing_zeros (); while m != n { if m > n { m -= n ; m >>= m . trailing_zeros (); } else { n -= m ; n >>= n . trailing_zeros (); }} m << shift }# [ inline ] fn extended_gcd_lcm (& self , other : & Self )-> ( ExtendedGcd < Self >, Self ){ let egcd = self . extended_gcd ( other ); let lcm = if egcd . gcd . is_zero (){ Self :: zero ()} else {(* self * (* other / egcd . gcd )). abs ()}; ( egcd , lcm )}# [ doc = " Calculates the Lowest Common Multiple (LCM) of the number and" ]# [ doc = " `other`." ]# [ inline ] fn lcm (& self , other : & Self )-> Self { self . gcd_lcm ( other ). 1 }# [ doc = " Calculates the Greatest Common Divisor (GCD) and" ]# [ doc = " Lowest Common Multiple (LCM) of the number and `other`." ]# [ inline ] fn gcd_lcm (& self , other : & Self )-> ( Self , Self ){ if self . is_zero ()&& other . is_zero (){ return ( Self :: zero (), Self :: zero ()); } let gcd = self . gcd ( other ); let lcm = (* self * (* other / gcd )). abs (); ( gcd , lcm )}# [ doc = " Deprecated, use `is_multiple_of` instead." ]# [ inline ] fn divides (& self , other : & Self )-> bool { self . is_multiple_of ( other )}# [ doc = " Returns `true` if the number is a multiple of `other`." ]# [ inline ] fn is_multiple_of (& self , other : & Self )-> bool {* self % * other == 0 }# [ doc = " Returns `true` if the number is divisible by `2`" ]# [ inline ] fn is_even (& self )-> bool {(* self )& 1 == 0 }# [ doc = " Returns `true` if the number is not divisible by `2`" ]# [ inline ] fn is_odd (& self )-> bool {! self . is_even ()}# [ doc = " Simultaneous truncated integer division and modulus." ]# [ inline ] fn div_rem (& self , other : & Self )-> ( Self , Self ){(* self / * other , * self % * other )}}# [ cfg ( test )] mod $test_mod { use core :: mem ; use Integer ; # [ doc = " Checks that the division rule holds for:" ]# [ doc = "" ]# [ doc = " - `n`: numerator (dividend)" ]# [ doc = " - `d`: denominator (divisor)" ]# [ doc = " - `qr`: quotient and remainder" ]# [ cfg ( test )] fn test_division_rule (( n , d ): ($T , $T ), ( q , r ): ($T , $T )){ assert_eq ! ( d * q + r , n ); }# [ test ] fn test_div_rem (){ fn test_nd_dr ( nd : ($T , $T ), qr : ($T , $T )){ let ( n , d )= nd ; let separate_div_rem = ( n / d , n % d ); let combined_div_rem = n . div_rem (& d ); assert_eq ! ( separate_div_rem , qr ); assert_eq ! ( combined_div_rem , qr ); test_division_rule ( nd , separate_div_rem ); test_division_rule ( nd , combined_div_rem ); } test_nd_dr (( 8 , 3 ), ( 2 , 2 )); test_nd_dr (( 8 , - 3 ), (- 2 , 2 )); test_nd_dr ((- 8 , 3 ), (- 2 , - 2 )); test_nd_dr ((- 8 , - 3 ), ( 2 , - 2 )); test_nd_dr (( 1 , 2 ), ( 0 , 1 )); test_nd_dr (( 1 , - 2 ), ( 0 , 1 )); test_nd_dr ((- 1 , 2 ), ( 0 , - 1 )); test_nd_dr ((- 1 , - 2 ), ( 0 , - 1 )); }# [ test ] fn test_div_mod_floor (){ fn test_nd_dm ( nd : ($T , $T ), dm : ($T , $T )){ let ( n , d )= nd ; let separate_div_mod_floor = ( n . div_floor (& d ), n . mod_floor (& d )); let combined_div_mod_floor = n . div_mod_floor (& d ); assert_eq ! ( separate_div_mod_floor , dm ); assert_eq ! ( combined_div_mod_floor , dm ); test_division_rule ( nd , separate_div_mod_floor ); test_division_rule ( nd , combined_div_mod_floor ); } test_nd_dm (( 8 , 3 ), ( 2 , 2 )); test_nd_dm (( 8 , - 3 ), (- 3 , - 1 )); test_nd_dm ((- 8 , 3 ), (- 3 , 1 )); test_nd_dm ((- 8 , - 3 ), ( 2 , - 2 )); test_nd_dm (( 1 , 2 ), ( 0 , 1 )); test_nd_dm (( 1 , - 2 ), (- 1 , - 1 )); test_nd_dm ((- 1 , 2 ), (- 1 , 1 )); test_nd_dm ((- 1 , - 2 ), ( 0 , - 1 )); }# [ test ] fn test_gcd (){ assert_eq ! (( 10 as $T ). gcd (& 2 ), 2 as $T ); assert_eq ! (( 10 as $T ). gcd (& 3 ), 1 as $T ); assert_eq ! (( 0 as $T ). gcd (& 3 ), 3 as $T ); assert_eq ! (( 3 as $T ). gcd (& 3 ), 3 as $T ); assert_eq ! (( 56 as $T ). gcd (& 42 ), 14 as $T ); assert_eq ! (( 3 as $T ). gcd (&- 3 ), 3 as $T ); assert_eq ! ((- 6 as $T ). gcd (& 3 ), 3 as $T ); assert_eq ! ((- 4 as $T ). gcd (&- 2 ), 2 as $T ); }# [ test ] fn test_gcd_cmp_with_euclidean (){ fn euclidean_gcd ( mut m : $T , mut n : $T )-> $T { while m != 0 { mem :: swap (& mut m , & mut n ); m %= n ; } n . abs ()} for i in - 127 .. 127 { for j in - 127 .. 127 { assert_eq ! ( euclidean_gcd ( i , j ), i . gcd (& j )); }} let i = 127 ; for j in - 127 .. 127 { assert_eq ! ( euclidean_gcd ( i , j ), i . gcd (& j )); } assert_eq ! ( 127 . gcd (& 127 ), 127 ); }# [ test ] fn test_gcd_min_val (){ let min = <$T >:: min_value (); let max = <$T >:: max_value (); let max_pow2 = max / 2 + 1 ; assert_eq ! ( min . gcd (& max ), 1 as $T ); assert_eq ! ( max . gcd (& min ), 1 as $T ); assert_eq ! ( min . gcd (& max_pow2 ), max_pow2 ); assert_eq ! ( max_pow2 . gcd (& min ), max_pow2 ); assert_eq ! ( min . gcd (& 42 ), 2 as $T ); assert_eq ! (( 42 as $T ). gcd (& min ), 2 as $T ); }# [ test ]# [ should_panic ] fn test_gcd_min_val_min_val (){ let min = <$T >:: min_value (); assert ! ( min . gcd (& min )>= 0 ); }# [ test ]# [ should_panic ] fn test_gcd_min_val_0 (){ let min = <$T >:: min_value (); assert ! ( min . gcd (& 0 )>= 0 ); }# [ test ]# [ should_panic ] fn test_gcd_0_min_val (){ let min = <$T >:: min_value (); assert ! (( 0 as $T ). gcd (& min )>= 0 ); }# [ test ] fn test_lcm (){ assert_eq ! (( 1 as $T ). lcm (& 0 ), 0 as $T ); assert_eq ! (( 0 as $T ). lcm (& 1 ), 0 as $T ); assert_eq ! (( 1 as $T ). lcm (& 1 ), 1 as $T ); assert_eq ! ((- 1 as $T ). lcm (& 1 ), 1 as $T ); assert_eq ! (( 1 as $T ). lcm (&- 1 ), 1 as $T ); assert_eq ! ((- 1 as $T ). lcm (&- 1 ), 1 as $T ); assert_eq ! (( 8 as $T ). lcm (& 9 ), 72 as $T ); assert_eq ! (( 11 as $T ). lcm (& 5 ), 55 as $T ); }# [ test ] fn test_gcd_lcm (){ use core :: iter :: once ; for i in once ( 0 ). chain (( 1 ..). take ( 127 ). flat_map (| a | once ( a ). chain ( once (- a )))). chain ( once (- 128 )){ for j in once ( 0 ). chain (( 1 ..). take ( 127 ). flat_map (| a | once ( a ). chain ( once (- a )))). chain ( once (- 128 )){ assert_eq ! ( i . gcd_lcm (& j ), ( i . gcd (& j ), i . lcm (& j ))); }}}# [ test ] fn test_extended_gcd_lcm (){ use core :: fmt :: Debug ; use traits :: NumAssign ; use ExtendedGcd ; fn check < A : Copy + Debug + Integer + NumAssign > ( a : A , b : A ){ let ExtendedGcd { gcd , x , y , .. }= a . extended_gcd (& b ); assert_eq ! ( gcd , x * a + y * b ); } use core :: iter :: once ; for i in once ( 0 ). chain (( 1 ..). take ( 127 ). flat_map (| a | once ( a ). chain ( once (- a )))). chain ( once (- 128 )){ for j in once ( 0 ). chain (( 1 ..). take ( 127 ). flat_map (| a | once ( a ). chain ( once (- a )))). chain ( once (- 128 )){ check ( i , j ); let ( ExtendedGcd { gcd , .. }, lcm )= i . extended_gcd_lcm (& j ); assert_eq ! (( gcd , lcm ), ( i . gcd (& j ), i . lcm (& j ))); }}}# [ test ] fn test_even (){ assert_eq ! ((- 4 as $T ). is_even (), true ); assert_eq ! ((- 3 as $T ). is_even (), false ); assert_eq ! ((- 2 as $T ). is_even (), true ); assert_eq ! ((- 1 as $T ). is_even (), false ); assert_eq ! (( 0 as $T ). is_even (), true ); assert_eq ! (( 1 as $T ). is_even (), false ); assert_eq ! (( 2 as $T ). is_even (), true ); assert_eq ! (( 3 as $T ). is_even (), false ); assert_eq ! (( 4 as $T ). is_even (), true ); }# [ test ] fn test_odd (){ assert_eq ! ((- 4 as $T ). is_odd (), false ); assert_eq ! ((- 3 as $T ). is_odd (), true ); assert_eq ! ((- 2 as $T ). is_odd (), false ); assert_eq ! ((- 1 as $T ). is_odd (), true ); assert_eq ! (( 0 as $T ). is_odd (), false ); assert_eq ! (( 1 as $T ). is_odd (), true ); assert_eq ! (( 2 as $T ). is_odd (), false ); assert_eq ! (( 3 as $T ). is_odd (), true ); assert_eq ! (( 4 as $T ). is_odd (), false ); }}}; }
+macro_rules! __ra_macro_fixture211 {($T : ty , $test_mod : ident )=>{ impl Integer for $T {# [ doc = " Unsigned integer division. Returns the same result as `div` (`/`)." ]# [ inline ] fn div_floor (& self , other : & Self )-> Self {* self / * other }# [ doc = " Unsigned integer modulo operation. Returns the same result as `rem` (`%`)." ]# [ inline ] fn mod_floor (& self , other : & Self )-> Self {* self % * other }# [ inline ] fn div_ceil (& self , other : & Self )-> Self {* self / * other + ( 0 != * self % * other ) as Self }# [ doc = " Calculates the Greatest Common Divisor (GCD) of the number and `other`" ]# [ inline ] fn gcd (& self , other : & Self )-> Self { let mut m = * self ; let mut n = * other ; if m == 0 || n == 0 { return m | n ; } let shift = ( m | n ). trailing_zeros (); m >>= m . trailing_zeros (); n >>= n . trailing_zeros (); while m != n { if m > n { m -= n ; m >>= m . trailing_zeros (); } else { n -= m ; n >>= n . trailing_zeros (); }} m << shift }# [ inline ] fn extended_gcd_lcm (& self , other : & Self )-> ( ExtendedGcd < Self >, Self ){ let egcd = self . extended_gcd ( other ); let lcm = if egcd . gcd . is_zero (){ Self :: zero ()} else {* self * (* other / egcd . gcd )}; ( egcd , lcm )}# [ doc = " Calculates the Lowest Common Multiple (LCM) of the number and `other`." ]# [ inline ] fn lcm (& self , other : & Self )-> Self { self . gcd_lcm ( other ). 1 }# [ doc = " Calculates the Greatest Common Divisor (GCD) and" ]# [ doc = " Lowest Common Multiple (LCM) of the number and `other`." ]# [ inline ] fn gcd_lcm (& self , other : & Self )-> ( Self , Self ){ if self . is_zero ()&& other . is_zero (){ return ( Self :: zero (), Self :: zero ()); } let gcd = self . gcd ( other ); let lcm = * self * (* other / gcd ); ( gcd , lcm )}# [ doc = " Deprecated, use `is_multiple_of` instead." ]# [ inline ] fn divides (& self , other : & Self )-> bool { self . is_multiple_of ( other )}# [ doc = " Returns `true` if the number is a multiple of `other`." ]# [ inline ] fn is_multiple_of (& self , other : & Self )-> bool {* self % * other == 0 }# [ doc = " Returns `true` if the number is divisible by `2`." ]# [ inline ] fn is_even (& self )-> bool {* self % 2 == 0 }# [ doc = " Returns `true` if the number is not divisible by `2`." ]# [ inline ] fn is_odd (& self )-> bool {! self . is_even ()}# [ doc = " Simultaneous truncated integer division and modulus." ]# [ inline ] fn div_rem (& self , other : & Self )-> ( Self , Self ){(* self / * other , * self % * other )}}# [ cfg ( test )] mod $test_mod { use core :: mem ; use Integer ; # [ test ] fn test_div_mod_floor (){ assert_eq ! (( 10 as $T ). div_floor (& ( 3 as $T )), 3 as $T ); assert_eq ! (( 10 as $T ). mod_floor (& ( 3 as $T )), 1 as $T ); assert_eq ! (( 10 as $T ). div_mod_floor (& ( 3 as $T )), ( 3 as $T , 1 as $T )); assert_eq ! (( 5 as $T ). div_floor (& ( 5 as $T )), 1 as $T ); assert_eq ! (( 5 as $T ). mod_floor (& ( 5 as $T )), 0 as $T ); assert_eq ! (( 5 as $T ). div_mod_floor (& ( 5 as $T )), ( 1 as $T , 0 as $T )); assert_eq ! (( 3 as $T ). div_floor (& ( 7 as $T )), 0 as $T ); assert_eq ! (( 3 as $T ). mod_floor (& ( 7 as $T )), 3 as $T ); assert_eq ! (( 3 as $T ). div_mod_floor (& ( 7 as $T )), ( 0 as $T , 3 as $T )); }# [ test ] fn test_gcd (){ assert_eq ! (( 10 as $T ). gcd (& 2 ), 2 as $T ); assert_eq ! (( 10 as $T ). gcd (& 3 ), 1 as $T ); assert_eq ! (( 0 as $T ). gcd (& 3 ), 3 as $T ); assert_eq ! (( 3 as $T ). gcd (& 3 ), 3 as $T ); assert_eq ! (( 56 as $T ). gcd (& 42 ), 14 as $T ); }# [ test ] fn test_gcd_cmp_with_euclidean (){ fn euclidean_gcd ( mut m : $T , mut n : $T )-> $T { while m != 0 { mem :: swap (& mut m , & mut n ); m %= n ; } n } for i in 0 .. 255 { for j in 0 .. 255 { assert_eq ! ( euclidean_gcd ( i , j ), i . gcd (& j )); }} let i = 255 ; for j in 0 .. 255 { assert_eq ! ( euclidean_gcd ( i , j ), i . gcd (& j )); } assert_eq ! ( 255 . gcd (& 255 ), 255 ); }# [ test ] fn test_lcm (){ assert_eq ! (( 1 as $T ). lcm (& 0 ), 0 as $T ); assert_eq ! (( 0 as $T ). lcm (& 1 ), 0 as $T ); assert_eq ! (( 1 as $T ). lcm (& 1 ), 1 as $T ); assert_eq ! (( 8 as $T ). lcm (& 9 ), 72 as $T ); assert_eq ! (( 11 as $T ). lcm (& 5 ), 55 as $T ); assert_eq ! (( 15 as $T ). lcm (& 17 ), 255 as $T ); }# [ test ] fn test_gcd_lcm (){ for i in ( 0 ..). take ( 256 ){ for j in ( 0 ..). take ( 256 ){ assert_eq ! ( i . gcd_lcm (& j ), ( i . gcd (& j ), i . lcm (& j ))); }}}# [ test ] fn test_is_multiple_of (){ assert ! (( 6 as $T ). is_multiple_of (& ( 6 as $T ))); assert ! (( 6 as $T ). is_multiple_of (& ( 3 as $T ))); assert ! (( 6 as $T ). is_multiple_of (& ( 1 as $T ))); }# [ test ] fn test_even (){ assert_eq ! (( 0 as $T ). is_even (), true ); assert_eq ! (( 1 as $T ). is_even (), false ); assert_eq ! (( 2 as $T ). is_even (), true ); assert_eq ! (( 3 as $T ). is_even (), false ); assert_eq ! (( 4 as $T ). is_even (), true ); }# [ test ] fn test_odd (){ assert_eq ! (( 0 as $T ). is_odd (), false ); assert_eq ! (( 1 as $T ). is_odd (), true ); assert_eq ! (( 2 as $T ). is_odd (), false ); assert_eq ! (( 3 as $T ). is_odd (), true ); assert_eq ! (( 4 as $T ). is_odd (), false ); }}}; }
+macro_rules! __ra_macro_fixture212 {($I : ident , $U : ident )=>{ mod $I { use check ; use neg ; use num_integer :: Roots ; use pos ; use std :: mem ; # [ test ]# [ should_panic ] fn zeroth_root (){( 123 as $I ). nth_root ( 0 ); }# [ test ] fn sqrt (){ check (& pos ::<$I > (), 2 ); }# [ test ]# [ should_panic ] fn sqrt_neg (){(- 123 as $I ). sqrt (); }# [ test ] fn cbrt (){ check (& pos ::<$I > (), 3 ); }# [ test ] fn cbrt_neg (){ check (& neg ::<$I > (), 3 ); }# [ test ] fn nth_root (){ let bits = 8 * mem :: size_of ::<$I > () as u32 - 1 ; let pos = pos ::<$I > (); for n in 4 .. bits { check (& pos , n ); }}# [ test ] fn nth_root_neg (){ let bits = 8 * mem :: size_of ::<$I > () as u32 - 1 ; let neg = neg ::<$I > (); for n in 2 .. bits / 2 { check (& neg , 2 * n + 1 ); }}# [ test ] fn bit_size (){ let bits = 8 * mem :: size_of ::<$I > () as u32 - 1 ; assert_eq ! ($I :: max_value (). nth_root ( bits - 1 ), 2 ); assert_eq ! ($I :: max_value (). nth_root ( bits ), 1 ); assert_eq ! ($I :: min_value (). nth_root ( bits ), - 2 ); assert_eq ! (($I :: min_value ()+ 1 ). nth_root ( bits ), - 1 ); }} mod $U { use check ; use num_integer :: Roots ; use pos ; use std :: mem ; # [ test ]# [ should_panic ] fn zeroth_root (){( 123 as $U ). nth_root ( 0 ); }# [ test ] fn sqrt (){ check (& pos ::<$U > (), 2 ); }# [ test ] fn cbrt (){ check (& pos ::<$U > (), 3 ); }# [ test ] fn nth_root (){ let bits = 8 * mem :: size_of ::<$I > () as u32 - 1 ; let pos = pos ::<$I > (); for n in 4 .. bits { check (& pos , n ); }}# [ test ] fn bit_size (){ let bits = 8 * mem :: size_of ::<$U > () as u32 ; assert_eq ! ($U :: max_value (). nth_root ( bits - 1 ), 2 ); assert_eq ! ($U :: max_value (). nth_root ( bits ), 1 ); }}}; }
+macro_rules! __ra_macro_fixture213 {($name : ident , $ranges : ident )=>{# [ test ] fn $name (){ let set = ranges_to_set ( general_category ::$ranges ); let hashset : HashSet < u32 > = set . iter (). cloned (). collect (); let trie = TrieSetOwned :: from_codepoints (& set ). unwrap (); for cp in 0 .. 0x110000 { assert ! ( trie . contains_u32 ( cp )== hashset . contains (& cp )); } assert ! (! trie . contains_u32 ( 0x110000 )); assert ! (! hashset . contains (& 0x110000 )); }}; }
+macro_rules! __ra_macro_fixture214 {{$(mod $module : ident ; [$($prop : ident , )*]; )*}=>{$(# [ allow ( unused )] mod $module ; $(pub fn $prop ( c : char )-> bool { self ::$module ::$prop . contains_char ( c )})* )*}; }
+macro_rules! __ra_macro_fixture215 {($name : ident : $input : expr , $($x : tt )* )=>{# [ test ] fn $name (){ let expected_sets = vec ! [$($x )*]; let range_set : RangeSet = $input . parse (). expect ( "parse failed" ); assert_eq ! ( range_set . ranges . len (), expected_sets . len ()); for it in range_set . ranges . iter (). zip ( expected_sets . iter ()){ let ( ai , bi )= it ; assert_eq ! ( ai . comparator_set . len (), * bi ); }}}; }
+macro_rules! __ra_macro_fixture216 {($name : ident : $input : expr , $($x : tt )* )=>{# [ test ] fn $name (){ let expected_sets = vec ! [$($x )*]; let range_set = RangeSet :: parse ($input , Compat :: Npm ). expect ( "parse failed" ); assert_eq ! ( range_set . ranges . len (), expected_sets . len ()); for it in range_set . ranges . iter (). zip ( expected_sets . iter ()){ let ( ai , bi )= it ; assert_eq ! ( ai . comparator_set . len (), * bi ); }}}; }
+macro_rules! __ra_macro_fixture217 {($($name : ident : $value : expr , )* )=>{$(# [ test ] fn $name (){ assert ! ($value . parse ::< RangeSet > (). is_err ()); })* }; }
+macro_rules! __ra_macro_fixture218 {($($name : ident : $value : expr , )* )=>{$(# [ test ] fn $name (){ let ( input , expected_range )= $value ; let parsed_range = parse_range ( input ); let range = from_pair_iterator ( parsed_range , range_set :: Compat :: Cargo ). expect ( "parsing failed" ); let num_comparators = range . comparator_set . len (); let expected_comparators = expected_range . comparator_set . len (); assert_eq ! ( expected_comparators , num_comparators , "expected number of comparators: {}, got: {}" , expected_comparators , num_comparators ); assert_eq ! ( range , expected_range ); })* }; }
+macro_rules! __ra_macro_fixture219 {($($name : ident : $value : expr , )* )=>{$(# [ test ] fn $name (){ let ( input , expected_range )= $value ; let parsed_range = parse_range ( input ); let range = from_pair_iterator ( parsed_range , range_set :: Compat :: Npm ). expect ( "parsing failed" ); let num_comparators = range . comparator_set . len (); let expected_comparators = expected_range . comparator_set . len (); assert_eq ! ( expected_comparators , num_comparators , "expected number of comparators: {}, got: {}" , expected_comparators , num_comparators ); assert_eq ! ( range , expected_range ); })* }; }
+macro_rules! __ra_macro_fixture220 {($ty : ident $(<$lifetime : tt >)*)=>{ impl <$($lifetime ,)* E > Copy for $ty <$($lifetime ,)* E > {} impl <$($lifetime ,)* E > Clone for $ty <$($lifetime ,)* E > { fn clone (& self )-> Self {* self }}}; }
+macro_rules! __ra_macro_fixture221 {($ty : ty , $doc : tt , $name : ident , $method : ident $($cast : tt )*)=>{# [ doc = "A deserializer holding" ]# [ doc = $doc ] pub struct $name < E > { value : $ty , marker : PhantomData < E > } impl_copy_clone ! ($name ); impl < 'de , E > IntoDeserializer < 'de , E > for $ty where E : de :: Error , { type Deserializer = $name < E >; fn into_deserializer ( self )-> $name < E > {$name { value : self , marker : PhantomData , }}} impl < 'de , E > de :: Deserializer < 'de > for $name < E > where E : de :: Error , { type Error = E ; forward_to_deserialize_any ! { bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string bytes byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct map struct enum identifier ignored_any } fn deserialize_any < V > ( self , visitor : V )-> Result < V :: Value , Self :: Error > where V : de :: Visitor < 'de >, { visitor .$method ( self . value $($cast )*)}} impl < E > Debug for $name < E > { fn fmt (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . debug_struct ( stringify ! ($name )). field ( "value" , & self . value ). finish ()}}}}
+macro_rules! __ra_macro_fixture222 {($($tt : tt )*)=>{}; }
+macro_rules! __ra_macro_fixture223 {($ty : ident , $deserialize : ident $($methods : tt )*)=>{ impl < 'de > Deserialize < 'de > for $ty {# [ inline ] fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { struct PrimitiveVisitor ; impl < 'de > Visitor < 'de > for PrimitiveVisitor { type Value = $ty ; fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( stringify ! ($ty ))}$($methods )* } deserializer .$deserialize ( PrimitiveVisitor )}}}; }
+macro_rules! __ra_macro_fixture224 {($ty : ident < T $(: $tbound1 : ident $(+ $tbound2 : ident )*)* $(, $typaram : ident : $bound1 : ident $(+ $bound2 : ident )*)* >, $access : ident , $clear : expr , $with_capacity : expr , $reserve : expr , $insert : expr )=>{ impl < 'de , T $(, $typaram )*> Deserialize < 'de > for $ty < T $(, $typaram )*> where T : Deserialize < 'de > $(+ $tbound1 $(+ $tbound2 )*)*, $($typaram : $bound1 $(+ $bound2 )*,)* { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { struct SeqVisitor < T $(, $typaram )*> { marker : PhantomData <$ty < T $(, $typaram )*>>, } impl < 'de , T $(, $typaram )*> Visitor < 'de > for SeqVisitor < T $(, $typaram )*> where T : Deserialize < 'de > $(+ $tbound1 $(+ $tbound2 )*)*, $($typaram : $bound1 $(+ $bound2 )*,)* { type Value = $ty < T $(, $typaram )*>; fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( "a sequence" )}# [ inline ] fn visit_seq < A > ( self , mut $access : A )-> Result < Self :: Value , A :: Error > where A : SeqAccess < 'de >, { let mut values = $with_capacity ; while let Some ( value )= try ! ($access . next_element ()){$insert (& mut values , value ); } Ok ( values )}} let visitor = SeqVisitor { marker : PhantomData }; deserializer . deserialize_seq ( visitor )} fn deserialize_in_place < D > ( deserializer : D , place : & mut Self )-> Result < (), D :: Error > where D : Deserializer < 'de >, { struct SeqInPlaceVisitor < 'a , T : 'a $(, $typaram : 'a )*> (& 'a mut $ty < T $(, $typaram )*>); impl < 'a , 'de , T $(, $typaram )*> Visitor < 'de > for SeqInPlaceVisitor < 'a , T $(, $typaram )*> where T : Deserialize < 'de > $(+ $tbound1 $(+ $tbound2 )*)*, $($typaram : $bound1 $(+ $bound2 )*,)* { type Value = (); fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( "a sequence" )}# [ inline ] fn visit_seq < A > ( mut self , mut $access : A )-> Result < Self :: Value , A :: Error > where A : SeqAccess < 'de >, {$clear (& mut self . 0 ); $reserve (& mut self . 0 , size_hint :: cautious ($access . size_hint ())); while let Some ( value )= try ! ($access . next_element ()){$insert (& mut self . 0 , value ); } Ok (())}} deserializer . deserialize_seq ( SeqInPlaceVisitor ( place ))}}}}
+macro_rules! __ra_macro_fixture225 {($($len : expr =>($($n : tt )+))+)=>{$(impl < 'de , T > Visitor < 'de > for ArrayVisitor < [ T ; $len ]> where T : Deserialize < 'de >, { type Value = [ T ; $len ]; fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( concat ! ( "an array of length " , $len ))}# [ inline ] fn visit_seq < A > ( self , mut seq : A )-> Result < Self :: Value , A :: Error > where A : SeqAccess < 'de >, { Ok ([$(match try ! ( seq . next_element ()){ Some ( val )=> val , None => return Err ( Error :: invalid_length ($n , & self )), }),+])}} impl < 'a , 'de , T > Visitor < 'de > for ArrayInPlaceVisitor < 'a , [ T ; $len ]> where T : Deserialize < 'de >, { type Value = (); fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( concat ! ( "an array of length " , $len ))}# [ inline ] fn visit_seq < A > ( self , mut seq : A )-> Result < Self :: Value , A :: Error > where A : SeqAccess < 'de >, { let mut fail_idx = None ; for ( idx , dest ) in self . 0 [..]. iter_mut (). enumerate (){ if try ! ( seq . next_element_seed ( InPlaceSeed ( dest ))). is_none (){ fail_idx = Some ( idx ); break ; }} if let Some ( idx )= fail_idx { return Err ( Error :: invalid_length ( idx , & self )); } Ok (())}} impl < 'de , T > Deserialize < 'de > for [ T ; $len ] where T : Deserialize < 'de >, { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { deserializer . deserialize_tuple ($len , ArrayVisitor ::< [ T ; $len ]>:: new ())} fn deserialize_in_place < D > ( deserializer : D , place : & mut Self )-> Result < (), D :: Error > where D : Deserializer < 'de >, { deserializer . deserialize_tuple ($len , ArrayInPlaceVisitor ( place ))}})+ }}
+macro_rules! __ra_macro_fixture226 {($($len : tt =>($($n : tt $name : ident )+))+)=>{$(impl < 'de , $($name : Deserialize < 'de >),+> Deserialize < 'de > for ($($name ,)+){# [ inline ] fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { struct TupleVisitor <$($name ,)+> { marker : PhantomData < ($($name ,)+)>, } impl < 'de , $($name : Deserialize < 'de >),+> Visitor < 'de > for TupleVisitor <$($name ,)+> { type Value = ($($name ,)+); fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( concat ! ( "a tuple of size " , $len ))}# [ inline ]# [ allow ( non_snake_case )] fn visit_seq < A > ( self , mut seq : A )-> Result < Self :: Value , A :: Error > where A : SeqAccess < 'de >, {$(let $name = match try ! ( seq . next_element ()){ Some ( value )=> value , None => return Err ( Error :: invalid_length ($n , & self )), }; )+ Ok (($($name ,)+))}} deserializer . deserialize_tuple ($len , TupleVisitor { marker : PhantomData })}# [ inline ] fn deserialize_in_place < D > ( deserializer : D , place : & mut Self )-> Result < (), D :: Error > where D : Deserializer < 'de >, { struct TupleInPlaceVisitor < 'a , $($name : 'a ,)+> (& 'a mut ($($name ,)+)); impl < 'a , 'de , $($name : Deserialize < 'de >),+> Visitor < 'de > for TupleInPlaceVisitor < 'a , $($name ,)+> { type Value = (); fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( concat ! ( "a tuple of size " , $len ))}# [ inline ]# [ allow ( non_snake_case )] fn visit_seq < A > ( self , mut seq : A )-> Result < Self :: Value , A :: Error > where A : SeqAccess < 'de >, {$(if try ! ( seq . next_element_seed ( InPlaceSeed (& mut ( self . 0 ).$n ))). is_none (){ return Err ( Error :: invalid_length ($n , & self )); })+ Ok (())}} deserializer . deserialize_tuple ($len , TupleInPlaceVisitor ( place ))}})+ }}
+macro_rules! __ra_macro_fixture227 {($ty : ident < K $(: $kbound1 : ident $(+ $kbound2 : ident )*)*, V $(, $typaram : ident : $bound1 : ident $(+ $bound2 : ident )*)* >, $access : ident , $with_capacity : expr )=>{ impl < 'de , K , V $(, $typaram )*> Deserialize < 'de > for $ty < K , V $(, $typaram )*> where K : Deserialize < 'de > $(+ $kbound1 $(+ $kbound2 )*)*, V : Deserialize < 'de >, $($typaram : $bound1 $(+ $bound2 )*),* { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { struct MapVisitor < K , V $(, $typaram )*> { marker : PhantomData <$ty < K , V $(, $typaram )*>>, } impl < 'de , K , V $(, $typaram )*> Visitor < 'de > for MapVisitor < K , V $(, $typaram )*> where K : Deserialize < 'de > $(+ $kbound1 $(+ $kbound2 )*)*, V : Deserialize < 'de >, $($typaram : $bound1 $(+ $bound2 )*),* { type Value = $ty < K , V $(, $typaram )*>; fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( "a map" )}# [ inline ] fn visit_map < A > ( self , mut $access : A )-> Result < Self :: Value , A :: Error > where A : MapAccess < 'de >, { let mut values = $with_capacity ; while let Some (( key , value ))= try ! ($access . next_entry ()){ values . insert ( key , value ); } Ok ( values )}} let visitor = MapVisitor { marker : PhantomData }; deserializer . deserialize_map ( visitor )}}}}
+macro_rules! __ra_macro_fixture228 {($expecting : tt $ty : ty ; $size : tt )=>{ impl < 'de > Deserialize < 'de > for $ty { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { if deserializer . is_human_readable (){ deserializer . deserialize_str ( FromStrVisitor :: new ($expecting ))} else {< [ u8 ; $size ]>:: deserialize ( deserializer ). map (<$ty >:: from )}}}}; }
+macro_rules! __ra_macro_fixture229 {($expecting : tt $ty : ty , $new : expr )=>{ impl < 'de > Deserialize < 'de > for $ty { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { if deserializer . is_human_readable (){ deserializer . deserialize_str ( FromStrVisitor :: new ($expecting ))} else {< (_, u16 )>:: deserialize ( deserializer ). map (| ( ip , port )| $new ( ip , port ))}}}}; }
+macro_rules! __ra_macro_fixture230 {($name_kind : ident ($($variant : ident ; $bytes : expr ; $index : expr ),* )$expecting_message : expr , $variants_name : ident )=>{ enum $name_kind {$($variant ),* } static $variants_name : & 'static [& 'static str ]= & [$(stringify ! ($variant )),*]; impl < 'de > Deserialize < 'de > for $name_kind { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { struct KindVisitor ; impl < 'de > Visitor < 'de > for KindVisitor { type Value = $name_kind ; fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ($expecting_message )} fn visit_u64 < E > ( self , value : u64 )-> Result < Self :: Value , E > where E : Error , { match value {$($index => Ok ($name_kind :: $variant ), )* _ => Err ( Error :: invalid_value ( Unexpected :: Unsigned ( value ), & self ),), }} fn visit_str < E > ( self , value : & str )-> Result < Self :: Value , E > where E : Error , { match value {$(stringify ! ($variant )=> Ok ($name_kind :: $variant ), )* _ => Err ( Error :: unknown_variant ( value , $variants_name )), }} fn visit_bytes < E > ( self , value : & [ u8 ])-> Result < Self :: Value , E > where E : Error , { match value {$($bytes => Ok ($name_kind :: $variant ), )* _ =>{ match str :: from_utf8 ( value ){ Ok ( value )=> Err ( Error :: unknown_variant ( value , $variants_name )), Err (_)=> Err ( Error :: invalid_value ( Unexpected :: Bytes ( value ), & self )), }}}}} deserializer . deserialize_identifier ( KindVisitor )}}}}
+macro_rules! __ra_macro_fixture231 {($(# [ doc = $doc : tt ])* ($($id : ident ),* ), $ty : ty , $func : expr )=>{$(# [ doc = $doc ])* impl < 'de $(, $id : Deserialize < 'de >,)*> Deserialize < 'de > for $ty { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { Deserialize :: deserialize ( deserializer ). map ($func )}}}}
+macro_rules! __ra_macro_fixture232 {($($T : ident , )+ )=>{$(# [ cfg ( num_nonzero )] impl < 'de > Deserialize < 'de > for num ::$T { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { let value = try ! ( Deserialize :: deserialize ( deserializer )); match < num ::$T >:: new ( value ){ Some ( nonzero )=> Ok ( nonzero ), None => Err ( Error :: custom ( "expected a non-zero value" )), }}})+ }; }
+macro_rules! __ra_macro_fixture233 {( Error : Sized $(+ $($supertrait : ident )::+)*)=>{# [ doc = " The `Error` trait allows `Deserialize` implementations to create descriptive" ]# [ doc = " error messages belonging to the `Deserializer` against which they are" ]# [ doc = " currently running." ]# [ doc = "" ]# [ doc = " Every `Deserializer` declares an `Error` type that encompasses both" ]# [ doc = " general-purpose deserialization errors as well as errors specific to the" ]# [ doc = " particular deserialization format. For example the `Error` type of" ]# [ doc = " `serde_json` can represent errors like an invalid JSON escape sequence or an" ]# [ doc = " unterminated string literal, in addition to the error cases that are part of" ]# [ doc = " this trait." ]# [ doc = "" ]# [ doc = " Most deserializers should only need to provide the `Error::custom` method" ]# [ doc = " and inherit the default behavior for the other methods." ]# [ doc = "" ]# [ doc = " # Example implementation" ]# [ doc = "" ]# [ doc = " The [example data format] presented on the website shows an error" ]# [ doc = " type appropriate for a basic JSON data format." ]# [ doc = "" ]# [ doc = " [example data format]: https://serde.rs/data-format.html" ] pub trait Error : Sized $(+ $($supertrait )::+)* {# [ doc = " Raised when there is general error when deserializing a type." ]# [ doc = "" ]# [ doc = " The message should not be capitalized and should not end with a period." ]# [ doc = "" ]# [ doc = " ```edition2018" ]# [ doc = " # use std::str::FromStr;" ]# [ doc = " #" ]# [ doc = " # struct IpAddr;" ]# [ doc = " #" ]# [ doc = " # impl FromStr for IpAddr {" ]# [ doc = " # type Err = String;" ]# [ doc = " #" ]# [ doc = " # fn from_str(_: &str) -> Result<Self, String> {" ]# [ doc = " # unimplemented!()" ]# [ doc = " # }" ]# [ doc = " # }" ]# [ doc = " #" ]# [ doc = " use serde::de::{self, Deserialize, Deserializer};" ]# [ doc = "" ]# [ doc = " impl<\\\'de> Deserialize<\\\'de> for IpAddr {" ]# [ doc = " fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>" ]# [ doc = " where" ]# [ doc = " D: Deserializer<\\\'de>," ]# [ doc = " {" ]# [ doc = " let s = String::deserialize(deserializer)?;" ]# [ doc = " s.parse().map_err(de::Error::custom)" ]# [ doc = " }" ]# [ doc = " }" ]# [ doc = " ```" ] fn custom < T > ( msg : T )-> Self where T : Display ; # [ doc = " Raised when a `Deserialize` receives a type different from what it was" ]# [ doc = " expecting." ]# [ doc = "" ]# [ doc = " The `unexp` argument provides information about what type was received." ]# [ doc = " This is the type that was present in the input file or other source data" ]# [ doc = " of the Deserializer." ]# [ doc = "" ]# [ doc = " The `exp` argument provides information about what type was being" ]# [ doc = " expected. This is the type that is written in the program." ]# [ doc = "" ]# [ doc = " For example if we try to deserialize a String out of a JSON file" ]# [ doc = " containing an integer, the unexpected type is the integer and the" ]# [ doc = " expected type is the string." ]# [ cold ] fn invalid_type ( unexp : Unexpected , exp : & Expected )-> Self { Error :: custom ( format_args ! ( "invalid type: {}, expected {}" , unexp , exp ))}# [ doc = " Raised when a `Deserialize` receives a value of the right type but that" ]# [ doc = " is wrong for some other reason." ]# [ doc = "" ]# [ doc = " The `unexp` argument provides information about what value was received." ]# [ doc = " This is the value that was present in the input file or other source" ]# [ doc = " data of the Deserializer." ]# [ doc = "" ]# [ doc = " The `exp` argument provides information about what value was being" ]# [ doc = " expected. This is the type that is written in the program." ]# [ doc = "" ]# [ doc = " For example if we try to deserialize a String out of some binary data" ]# [ doc = " that is not valid UTF-8, the unexpected value is the bytes and the" ]# [ doc = " expected value is a string." ]# [ cold ] fn invalid_value ( unexp : Unexpected , exp : & Expected )-> Self { Error :: custom ( format_args ! ( "invalid value: {}, expected {}" , unexp , exp ))}# [ doc = " Raised when deserializing a sequence or map and the input data contains" ]# [ doc = " too many or too few elements." ]# [ doc = "" ]# [ doc = " The `len` argument is the number of elements encountered. The sequence" ]# [ doc = " or map may have expected more arguments or fewer arguments." ]# [ doc = "" ]# [ doc = " The `exp` argument provides information about what data was being" ]# [ doc = " expected. For example `exp` might say that a tuple of size 6 was" ]# [ doc = " expected." ]# [ cold ] fn invalid_length ( len : usize , exp : & Expected )-> Self { Error :: custom ( format_args ! ( "invalid length {}, expected {}" , len , exp ))}# [ doc = " Raised when a `Deserialize` enum type received a variant with an" ]# [ doc = " unrecognized name." ]# [ cold ] fn unknown_variant ( variant : & str , expected : & 'static [& 'static str ])-> Self { if expected . is_empty (){ Error :: custom ( format_args ! ( "unknown variant `{}`, there are no variants" , variant ))} else { Error :: custom ( format_args ! ( "unknown variant `{}`, expected {}" , variant , OneOf { names : expected }))}}# [ doc = " Raised when a `Deserialize` struct type received a field with an" ]# [ doc = " unrecognized name." ]# [ cold ] fn unknown_field ( field : & str , expected : & 'static [& 'static str ])-> Self { if expected . is_empty (){ Error :: custom ( format_args ! ( "unknown field `{}`, there are no fields" , field ))} else { Error :: custom ( format_args ! ( "unknown field `{}`, expected {}" , field , OneOf { names : expected }))}}# [ doc = " Raised when a `Deserialize` struct type expected to receive a required" ]# [ doc = " field with a particular name but that field was not present in the" ]# [ doc = " input." ]# [ cold ] fn missing_field ( field : & 'static str )-> Self { Error :: custom ( format_args ! ( "missing field `{}`" , field ))}# [ doc = " Raised when a `Deserialize` struct type received more than one of the" ]# [ doc = " same field." ]# [ cold ] fn duplicate_field ( field : & 'static str )-> Self { Error :: custom ( format_args ! ( "duplicate field `{}`" , field ))}}}}
+macro_rules! __ra_macro_fixture234 {($ty : ident , $method : ident $($cast : tt )*)=>{ impl Serialize for $ty {# [ inline ] fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , { serializer .$method (* self $($cast )*)}}}}
+macro_rules! __ra_macro_fixture235 {($($len : tt )+)=>{$(impl < T > Serialize for [ T ; $len ] where T : Serialize , {# [ inline ] fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , { let mut seq = try ! ( serializer . serialize_tuple ($len )); for e in self { try ! ( seq . serialize_element ( e )); } seq . end ()}})+ }}
+macro_rules! __ra_macro_fixture236 {($ty : ident < T $(: $tbound1 : ident $(+ $tbound2 : ident )*)* $(, $typaram : ident : $bound : ident )* >)=>{ impl < T $(, $typaram )*> Serialize for $ty < T $(, $typaram )*> where T : Serialize $(+ $tbound1 $(+ $tbound2 )*)*, $($typaram : $bound ,)* {# [ inline ] fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , { serializer . collect_seq ( self )}}}}
+macro_rules! __ra_macro_fixture237 {($($len : expr =>($($n : tt $name : ident )+))+)=>{$(impl <$($name ),+> Serialize for ($($name ,)+) where $($name : Serialize ,)+ {# [ inline ] fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , { let mut tuple = try ! ( serializer . serialize_tuple ($len )); $(try ! ( tuple . serialize_element (& self .$n )); )+ tuple . end ()}})+ }}
+macro_rules! __ra_macro_fixture238 {($ty : ident < K $(: $kbound1 : ident $(+ $kbound2 : ident )*)*, V $(, $typaram : ident : $bound : ident )* >)=>{ impl < K , V $(, $typaram )*> Serialize for $ty < K , V $(, $typaram )*> where K : Serialize $(+ $kbound1 $(+ $kbound2 )*)*, V : Serialize , $($typaram : $bound ,)* {# [ inline ] fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , { serializer . collect_map ( self )}}}}
+macro_rules! __ra_macro_fixture239 {($(# [ doc = $doc : tt ])* <$($desc : tt )+ )=>{$(# [ doc = $doc ])* impl <$($desc )+ {# [ inline ] fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , {(** self ). serialize ( serializer )}}}; }
+macro_rules! __ra_macro_fixture240 {($($T : ident , )+ )=>{$(# [ cfg ( num_nonzero )] impl Serialize for num ::$T { fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , { self . get (). serialize ( serializer )}})+ }}
+macro_rules! __ra_macro_fixture241 {( Error : Sized $(+ $($supertrait : ident )::+)*)=>{# [ doc = " Trait used by `Serialize` implementations to generically construct" ]# [ doc = " errors belonging to the `Serializer` against which they are" ]# [ doc = " currently running." ]# [ doc = "" ]# [ doc = " # Example implementation" ]# [ doc = "" ]# [ doc = " The [example data format] presented on the website shows an error" ]# [ doc = " type appropriate for a basic JSON data format." ]# [ doc = "" ]# [ doc = " [example data format]: https://serde.rs/data-format.html" ] pub trait Error : Sized $(+ $($supertrait )::+)* {# [ doc = " Used when a [`Serialize`] implementation encounters any error" ]# [ doc = " while serializing a type." ]# [ doc = "" ]# [ doc = " The message should not be capitalized and should not end with a" ]# [ doc = " period." ]# [ doc = "" ]# [ doc = " For example, a filesystem [`Path`] may refuse to serialize" ]# [ doc = " itself if it contains invalid UTF-8 data." ]# [ doc = "" ]# [ doc = " ```edition2018" ]# [ doc = " # struct Path;" ]# [ doc = " #" ]# [ doc = " # impl Path {" ]# [ doc = " # fn to_str(&self) -> Option<&str> {" ]# [ doc = " # unimplemented!()" ]# [ doc = " # }" ]# [ doc = " # }" ]# [ doc = " #" ]# [ doc = " use serde::ser::{self, Serialize, Serializer};" ]# [ doc = "" ]# [ doc = " impl Serialize for Path {" ]# [ doc = " fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>" ]# [ doc = " where" ]# [ doc = " S: Serializer," ]# [ doc = " {" ]# [ doc = " match self.to_str() {" ]# [ doc = " Some(s) => serializer.serialize_str(s)," ]# [ doc = " None => Err(ser::Error::custom(\\\"path contains invalid UTF-8 characters\\\"))," ]# [ doc = " }" ]# [ doc = " }" ]# [ doc = " }" ]# [ doc = " ```" ]# [ doc = "" ]# [ doc = " [`Path`]: https://doc.rust-lang.org/std/path/struct.Path.html" ]# [ doc = " [`Serialize`]: ../trait.Serialize.html" ] fn custom < T > ( msg : T )-> Self where T : Display ; }}}
+macro_rules! __ra_macro_fixture242 {($t : ty , $($attr : meta ),* )=>{$(# [$attr ])* impl < L , R > AsRef <$t > for Either < L , R > where L : AsRef <$t >, R : AsRef <$t > { fn as_ref (& self )-> &$t { either ! (* self , ref inner => inner . as_ref ())}}$(# [$attr ])* impl < L , R > AsMut <$t > for Either < L , R > where L : AsMut <$t >, R : AsMut <$t > { fn as_mut (& mut self )-> & mut $t { either ! (* self , ref mut inner => inner . as_mut ())}}}; }
+macro_rules! __ra_macro_fixture243 {($C : ident $P : ident ; $A : ident , $($I : ident ),* ; $($X : ident )*)=>(# [ derive ( Clone , Debug )] pub struct $C < I : Iterator > { item : Option < I :: Item >, iter : I , c : $P < I >, } impl < I : Iterator + Clone > From < I > for $C < I > { fn from ( mut iter : I )-> Self {$C { item : iter . next (), iter : iter . clone (), c : $P :: from ( iter ), }}} impl < I : Iterator + Clone > From < I > for $C < Fuse < I >> { fn from ( iter : I )-> Self { let mut iter = iter . fuse (); $C { item : iter . next (), iter : iter . clone (), c : $P :: from ( iter ), }}} impl < I , $A > Iterator for $C < I > where I : Iterator < Item = $A > + Clone , I :: Item : Clone { type Item = ($($I ),*); fn next (& mut self )-> Option < Self :: Item > { if let Some (($($X ),*,))= self . c . next (){ let z = self . item . clone (). unwrap (); Some (( z , $($X ),*))} else { self . item = self . iter . next (); self . item . clone (). and_then (| z | { self . c = $P :: from ( self . iter . clone ()); self . c . next (). map (| ($($X ),*,)| ( z , $($X ),*))})}}} impl < I , $A > HasCombination < I > for ($($I ),*) where I : Iterator < Item = $A > + Clone , I :: Item : Clone { type Combination = $C < Fuse < I >>; })}
+macro_rules! __ra_macro_fixture244 (($_A : ident , $_B : ident , )=>(); ($A : ident , $($B : ident ,)*)=>( impl_cons_iter ! ($($B ,)*); # [ allow ( non_snake_case )] impl < X , Iter , $($B ),*> Iterator for ConsTuples < Iter , (($($B ,)*), X )> where Iter : Iterator < Item = (($($B ,)*), X )>, { type Item = ($($B ,)* X , ); fn next (& mut self )-> Option < Self :: Item > { self . iter . next (). map (| (($($B ,)*), x )| ($($B ,)* x , ))} fn size_hint (& self )-> ( usize , Option < usize >){ self . iter . size_hint ()} fn fold < Acc , Fold > ( self , accum : Acc , mut f : Fold )-> Acc where Fold : FnMut ( Acc , Self :: Item )-> Acc , { self . iter . fold ( accum , move | acc , (($($B ,)*), x )| f ( acc , ($($B ,)* x , )))}}# [ allow ( non_snake_case )] impl < X , Iter , $($B ),*> DoubleEndedIterator for ConsTuples < Iter , (($($B ,)*), X )> where Iter : DoubleEndedIterator < Item = (($($B ,)*), X )>, { fn next_back (& mut self )-> Option < Self :: Item > { self . iter . next (). map (| (($($B ,)*), x )| ($($B ,)* x , ))}}); );
+macro_rules! __ra_macro_fixture245 {($($fmt_trait : ident )*)=>{$(impl < 'a , I > fmt ::$fmt_trait for Format < 'a , I > where I : Iterator , I :: Item : fmt ::$fmt_trait , { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { self . format ( f , fmt ::$fmt_trait :: fmt )}})* }}
+macro_rules! __ra_macro_fixture246 {([$($typarm : tt )*]$type_ : ty )=>{ impl <$($typarm )*> PeekingNext for $type_ { fn peeking_next < F > (& mut self , accept : F )-> Option < Self :: Item > where F : FnOnce (& Self :: Item )-> bool { let saved_state = self . clone (); if let Some ( r )= self . next (){ if ! accept (& r ){* self = saved_state ; } else { return Some ( r )}} None }}}}
+macro_rules! __ra_macro_fixture247 {($dummy : ident ,)=>{}; ($dummy : ident , $($Y : ident ,)*)=>( impl_tuple_collect ! ($($Y ,)*); impl < A > TupleCollect for ($(ignore_ident ! ($Y , A ),)*){ type Item = A ; type Buffer = [ Option < A >; count_ident ! ($($Y ,)*)- 1 ]; # [ allow ( unused_assignments , unused_mut )] fn collect_from_iter < I > ( iter : I , buf : & mut Self :: Buffer )-> Option < Self > where I : IntoIterator < Item = A >{ let mut iter = iter . into_iter (); $(let mut $Y = None ; )* loop {$($Y = iter . next (); if $Y . is_none (){ break })* return Some (($($Y . unwrap ()),*,))} let mut i = 0 ; let mut s = buf . as_mut (); $(if i < s . len (){ s [ i ]= $Y ; i += 1 ; })* return None ; } fn collect_from_iter_no_buf < I > ( iter : I )-> Option < Self > where I : IntoIterator < Item = A >{ let mut iter = iter . into_iter (); Some (($({let $Y = iter . next ()?; $Y }, )*))} fn num_items ()-> usize { count_ident ! ($($Y ,)*)} fn left_shift_push (& mut self , mut item : A ){ use std :: mem :: replace ; let & mut ($(ref mut $Y ),*,)= self ; macro_rules ! replace_item {($i : ident )=>{ item = replace ($i , item ); }}; rev_for_each_ident ! ( replace_item , $($Y ,)*); drop ( item ); }})}
+macro_rules! __ra_macro_fixture248 {($($B : ident ),*)=>(# [ allow ( non_snake_case )] impl <$($B : IntoIterator ),*> From < ($($B ,)*)> for Zip < ($($B :: IntoIter ,)*)> { fn from ( t : ($($B ,)*))-> Self { let ($($B ,)*)= t ; Zip { t : ($($B . into_iter (),)*)}}}# [ allow ( non_snake_case )]# [ allow ( unused_assignments )] impl <$($B ),*> Iterator for Zip < ($($B ,)*)> where $($B : Iterator , )* { type Item = ($($B :: Item ,)*); fn next (& mut self )-> Option < Self :: Item > { let ($(ref mut $B ,)*)= self . t ; $(let $B = match $B . next (){ None => return None , Some ( elt )=> elt }; )* Some (($($B ,)*))} fn size_hint (& self )-> ( usize , Option < usize >){ let sh = (:: std :: usize :: MAX , None ); let ($(ref $B ,)*)= self . t ; $(let sh = size_hint :: min ($B . size_hint (), sh ); )* sh }}# [ allow ( non_snake_case )] impl <$($B ),*> ExactSizeIterator for Zip < ($($B ,)*)> where $($B : ExactSizeIterator , )* {}# [ allow ( non_snake_case )] impl <$($B ),*> DoubleEndedIterator for Zip < ($($B ,)*)> where $($B : DoubleEndedIterator + ExactSizeIterator , )* {# [ inline ] fn next_back (& mut self )-> Option < Self :: Item > { let ($(ref mut $B ,)*)= self . t ; let size = * [$($B . len (), )*]. iter (). min (). unwrap (); $(if $B . len ()!= size { for _ in 0 ..$B . len ()- size {$B . next_back (); }})* match ($($B . next_back (),)*){($(Some ($B ),)*)=> Some (($($B ,)*)), _ => None , }}}); }
+macro_rules! __ra_macro_fixture249 {($iter : ty =>$item : ty , impl $($args : tt )* )=>{ delegate_iterator ! {$iter =>$item , impl $($args )* } impl $($args )* IndexedParallelIterator for $iter { fn drive < C > ( self , consumer : C )-> C :: Result where C : Consumer < Self :: Item > { self . inner . drive ( consumer )} fn len (& self )-> usize { self . inner . len ()} fn with_producer < CB > ( self , callback : CB )-> CB :: Output where CB : ProducerCallback < Self :: Item > { self . inner . with_producer ( callback )}}}}
+macro_rules! __ra_macro_fixture250 {($t : ty =>$iter : ident <$($i : tt ),*>, impl $($args : tt )*)=>{ impl $($args )* IntoParallelIterator for $t { type Item = <$t as IntoIterator >:: Item ; type Iter = $iter <$($i ),*>; fn into_par_iter ( self )-> Self :: Iter { use std :: iter :: FromIterator ; $iter { inner : Vec :: from_iter ( self ). into_par_iter ()}}}}; }
+macro_rules! __ra_macro_fixture251 {($iter : ty =>$item : ty , impl $($args : tt )* )=>{ impl $($args )* ParallelIterator for $iter { type Item = $item ; fn drive_unindexed < C > ( self , consumer : C )-> C :: Result where C : UnindexedConsumer < Self :: Item > { self . inner . drive_unindexed ( consumer )} fn opt_len (& self )-> Option < usize > { self . inner . opt_len ()}}}}
+macro_rules! __ra_macro_fixture252 {($($Tuple : ident {$(($idx : tt )-> $T : ident )+ })+)=>{$(impl <$($T , )+> IntoParallelIterator for ($($T , )+) where $($T : IntoParallelIterator , $T :: Iter : IndexedParallelIterator , )+ { type Item = ($($T :: Item , )+); type Iter = MultiZip < ($($T :: Iter , )+)>; fn into_par_iter ( self )-> Self :: Iter { MultiZip { tuple : ($(self .$idx . into_par_iter (), )+ ), }}} impl < 'a , $($T , )+> IntoParallelIterator for & 'a ($($T , )+) where $($T : IntoParallelRefIterator < 'a >, $T :: Iter : IndexedParallelIterator , )+ { type Item = ($($T :: Item , )+); type Iter = MultiZip < ($($T :: Iter , )+)>; fn into_par_iter ( self )-> Self :: Iter { MultiZip { tuple : ($(self .$idx . par_iter (), )+ ), }}} impl < 'a , $($T , )+> IntoParallelIterator for & 'a mut ($($T , )+) where $($T : IntoParallelRefMutIterator < 'a >, $T :: Iter : IndexedParallelIterator , )+ { type Item = ($($T :: Item , )+); type Iter = MultiZip < ($($T :: Iter , )+)>; fn into_par_iter ( self )-> Self :: Iter { MultiZip { tuple : ($(self .$idx . par_iter_mut (), )+ ), }}} impl <$($T , )+> ParallelIterator for MultiZip < ($($T , )+)> where $($T : IndexedParallelIterator , )+ { type Item = ($($T :: Item , )+); fn drive_unindexed < CONSUMER > ( self , consumer : CONSUMER )-> CONSUMER :: Result where CONSUMER : UnindexedConsumer < Self :: Item >, { self . drive ( consumer )} fn opt_len (& self )-> Option < usize > { Some ( self . len ())}} impl <$($T , )+> IndexedParallelIterator for MultiZip < ($($T , )+)> where $($T : IndexedParallelIterator , )+ { fn drive < CONSUMER > ( self , consumer : CONSUMER )-> CONSUMER :: Result where CONSUMER : Consumer < Self :: Item >, { reduce ! ($(self . tuple .$idx ),+ => IndexedParallelIterator :: zip ). map ( flatten ! ($($T ),+)). drive ( consumer )} fn len (& self )-> usize { reduce ! ($(self . tuple .$idx . len ()),+ => Ord :: min )} fn with_producer < CB > ( self , callback : CB )-> CB :: Output where CB : ProducerCallback < Self :: Item >, { reduce ! ($(self . tuple .$idx ),+ => IndexedParallelIterator :: zip ). map ( flatten ! ($($T ),+)). with_producer ( callback )}})+ }}
+macro_rules! __ra_macro_fixture253 {($t : ty )=>{ impl ParallelIterator for Iter <$t > { type Item = $t ; fn drive_unindexed < C > ( self , consumer : C )-> C :: Result where C : UnindexedConsumer < Self :: Item >, { bridge ( self , consumer )} fn opt_len (& self )-> Option < usize > { Some ( self . len ())}} impl IndexedParallelIterator for Iter <$t > { fn drive < C > ( self , consumer : C )-> C :: Result where C : Consumer < Self :: Item >, { bridge ( self , consumer )} fn len (& self )-> usize { self . range . len ()} fn with_producer < CB > ( self , callback : CB )-> CB :: Output where CB : ProducerCallback < Self :: Item >, { callback . callback ( IterProducer { range : self . range })}} impl Producer for IterProducer <$t > { type Item = < Range <$t > as Iterator >:: Item ; type IntoIter = Range <$t >; fn into_iter ( self )-> Self :: IntoIter { self . range } fn split_at ( self , index : usize )-> ( Self , Self ){ assert ! ( index <= self . range . len ()); let mid = self . range . start . wrapping_add ( index as $t ); let left = self . range . start .. mid ; let right = mid .. self . range . end ; ( IterProducer { range : left }, IterProducer { range : right })}}}; }
+macro_rules! __ra_macro_fixture254 {($t : ty , $len_t : ty )=>{ impl UnindexedRangeLen <$len_t > for Range <$t > { fn len (& self )-> $len_t { let & Range { start , end }= self ; if end > start { end . wrapping_sub ( start ) as $len_t } else { 0 }}} impl ParallelIterator for Iter <$t > { type Item = $t ; fn drive_unindexed < C > ( self , consumer : C )-> C :: Result where C : UnindexedConsumer < Self :: Item >, {# [ inline ] fn offset ( start : $t )-> impl Fn ( usize )-> $t { move | i | start . wrapping_add ( i as $t )} if let Some ( len )= self . opt_len (){( 0 .. len ). into_par_iter (). map ( offset ( self . range . start )). drive ( consumer )} else { bridge_unindexed ( IterProducer { range : self . range }, consumer )}} fn opt_len (& self )-> Option < usize > { let len = self . range . len (); if len <= usize :: MAX as $len_t { Some ( len as usize )} else { None }}} impl UnindexedProducer for IterProducer <$t > { type Item = $t ; fn split ( mut self )-> ( Self , Option < Self >){ let index = self . range . len ()/ 2 ; if index > 0 { let mid = self . range . start . wrapping_add ( index as $t ); let right = mid .. self . range . end ; self . range . end = mid ; ( self , Some ( IterProducer { range : right }))} else {( self , None )}} fn fold_with < F > ( self , folder : F )-> F where F : Folder < Self :: Item >, { folder . consume_iter ( self )}}}; }
+macro_rules! __ra_macro_fixture255 {($t : ty )=>{ parallel_range_impl ! {$t } impl IndexedParallelIterator for Iter <$t > { fn drive < C > ( self , consumer : C )-> C :: Result where C : Consumer < Self :: Item >, { convert ! ( self . drive ( consumer ))} fn len (& self )-> usize { self . range . len ()} fn with_producer < CB > ( self , callback : CB )-> CB :: Output where CB : ProducerCallback < Self :: Item >, { convert ! ( self . with_producer ( callback ))}}}; }
+macro_rules! __ra_macro_fixture256 {($t : ty )=>{ impl ParallelIterator for Iter <$t > { type Item = $t ; fn drive_unindexed < C > ( self , consumer : C )-> C :: Result where C : UnindexedConsumer < Self :: Item >, { convert ! ( self . drive_unindexed ( consumer ))} fn opt_len (& self )-> Option < usize > { convert ! ( self . opt_len ())}}}; }
+macro_rules! __ra_macro_fixture257 {($f : ident , $name : ident )=>{# [ test ] fn $name (){ let mut rng = thread_rng (); for len in ( 0 .. 25 ). chain ( 500 .. 501 ){ for & modulus in & [ 5 , 10 , 100 ]{ let dist = Uniform :: new ( 0 , modulus ); for _ in 0 .. 100 { let v : Vec < i32 > = rng . sample_iter (& dist ). take ( len ). collect (); let mut tmp = v . clone (); tmp .$f (| a , b | a . cmp ( b )); assert ! ( tmp . windows ( 2 ). all (| w | w [ 0 ]<= w [ 1 ])); let mut tmp = v . clone (); tmp .$f (| a , b | b . cmp ( a )); assert ! ( tmp . windows ( 2 ). all (| w | w [ 0 ]>= w [ 1 ])); }}} for & len in & [ 1_000 , 10_000 , 100_000 ]{ for & modulus in & [ 5 , 10 , 100 , 10_000 ]{ let dist = Uniform :: new ( 0 , modulus ); let mut v : Vec < i32 > = rng . sample_iter (& dist ). take ( len ). collect (); v .$f (| a , b | a . cmp ( b )); assert ! ( v . windows ( 2 ). all (| w | w [ 0 ]<= w [ 1 ])); }} for & len in & [ 1_000 , 10_000 , 100_000 ]{ let len_dist = Uniform :: new ( 0 , len ); for & modulus in & [ 5 , 10 , 1000 , 50_000 ]{ let dist = Uniform :: new ( 0 , modulus ); let mut v : Vec < i32 > = rng . sample_iter (& dist ). take ( len ). collect (); v . sort (); v . reverse (); for _ in 0 .. 5 { let a = rng . sample (& len_dist ); let b = rng . sample (& len_dist ); if a < b { v [ a .. b ]. reverse (); } else { v . swap ( a , b ); }} v .$f (| a , b | a . cmp ( b )); assert ! ( v . windows ( 2 ). all (| w | w [ 0 ]<= w [ 1 ])); }} let mut v : Vec <_> = ( 0 .. 100 ). collect (); v .$f (|_, _| * [ Less , Equal , Greater ]. choose (& mut thread_rng ()). unwrap ()); v .$f (| a , b | a . cmp ( b )); for i in 0 .. v . len (){ assert_eq ! ( v [ i ], i ); }[ 0i32 ; 0 ].$f (| a , b | a . cmp ( b )); [(); 10 ].$f (| a , b | a . cmp ( b )); [(); 100 ].$f (| a , b | a . cmp ( b )); let mut v = [ 0xDEAD_BEEFu64 ]; v .$f (| a , b | a . cmp ( b )); assert ! ( v == [ 0xDEAD_BEEF ]); }}; }
+macro_rules! __ra_macro_fixture258 {($($name : ident # [$expr : meta ])*)=>{$(# [ doc = " First sanity check that the expression is OK." ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " #![deny(unused_must_use)]" ]# [ doc = "" ]# [ doc = " use rayon::prelude::*;" ]# [ doc = "" ]# [ doc = " let v: Vec<_> = (0..100).map(Some).collect();" ]# [ doc = " let _ =" ]# [$expr ]# [ doc = " ```" ]# [ doc = "" ]# [ doc = " Now trigger the `must_use`." ]# [ doc = "" ]# [ doc = " ```compile_fail" ]# [ doc = " #![deny(unused_must_use)]" ]# [ doc = "" ]# [ doc = " use rayon::prelude::*;" ]# [ doc = "" ]# [ doc = " let v: Vec<_> = (0..100).map(Some).collect();" ]# [$expr ]# [ doc = " ```" ] mod $name {})*}}
+macro_rules! __ra_macro_fixture259 {($name : ident : $style : expr ; $input : expr =>$result : expr )=>{# [ test ] fn $name (){ assert_eq ! ($style . paint ($input ). to_string (), $result . to_string ()); let mut v = Vec :: new (); $style . paint ($input . as_bytes ()). write_to (& mut v ). unwrap (); assert_eq ! ( v . as_slice (), $result . as_bytes ()); }}; }
+macro_rules! __ra_macro_fixture260 {($name : ident : $first : expr ; $next : expr =>$result : expr )=>{# [ test ] fn $name (){ assert_eq ! ($result , Difference :: between (&$first , &$next )); }}; }
+macro_rules! __ra_macro_fixture261 {($name : ident : $obj : expr =>$result : expr )=>{# [ test ] fn $name (){ assert_eq ! ($result , format ! ( "{:?}" , $obj )); }}; }
+macro_rules! __ra_macro_fixture262 {($name : ident , $ty_int : ty , $max : expr , $bytes : expr , $read : ident , $write : ident )=>{ mod $name {# [ allow ( unused_imports )] use super :: { qc_sized , Wi128 }; use crate :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; # [ test ] fn big_endian (){ fn prop ( n : $ty_int )-> bool { let mut buf = [ 0 ; 16 ]; BigEndian ::$write (& mut buf , n . clone (), $bytes ); n == BigEndian ::$read (& buf [..$bytes ], $bytes )} qc_sized ( prop as fn ($ty_int )-> bool , $max ); }# [ test ] fn little_endian (){ fn prop ( n : $ty_int )-> bool { let mut buf = [ 0 ; 16 ]; LittleEndian ::$write (& mut buf , n . clone (), $bytes ); n == LittleEndian ::$read (& buf [..$bytes ], $bytes )} qc_sized ( prop as fn ($ty_int )-> bool , $max ); }# [ test ] fn native_endian (){ fn prop ( n : $ty_int )-> bool { let mut buf = [ 0 ; 16 ]; NativeEndian ::$write (& mut buf , n . clone (), $bytes ); n == NativeEndian ::$read (& buf [..$bytes ], $bytes )} qc_sized ( prop as fn ($ty_int )-> bool , $max ); }}}; ($name : ident , $ty_int : ty , $max : expr , $read : ident , $write : ident )=>{ mod $name {# [ allow ( unused_imports )] use super :: { qc_sized , Wi128 }; use crate :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; use core :: mem :: size_of ; # [ test ] fn big_endian (){ fn prop ( n : $ty_int )-> bool { let bytes = size_of ::<$ty_int > (); let mut buf = [ 0 ; 16 ]; BigEndian ::$write (& mut buf [ 16 - bytes ..], n . clone ()); n == BigEndian ::$read (& buf [ 16 - bytes ..])} qc_sized ( prop as fn ($ty_int )-> bool , $max - 1 ); }# [ test ] fn little_endian (){ fn prop ( n : $ty_int )-> bool { let bytes = size_of ::<$ty_int > (); let mut buf = [ 0 ; 16 ]; LittleEndian ::$write (& mut buf [.. bytes ], n . clone ()); n == LittleEndian ::$read (& buf [.. bytes ])} qc_sized ( prop as fn ($ty_int )-> bool , $max - 1 ); }# [ test ] fn native_endian (){ fn prop ( n : $ty_int )-> bool { let bytes = size_of ::<$ty_int > (); let mut buf = [ 0 ; 16 ]; NativeEndian ::$write (& mut buf [.. bytes ], n . clone ()); n == NativeEndian ::$read (& buf [.. bytes ])} qc_sized ( prop as fn ($ty_int )-> bool , $max - 1 ); }}}; }
+macro_rules! __ra_macro_fixture263 {($name : ident , $maximally_small : expr , $zero : expr , $read : ident , $write : ident )=>{ mod $name { use crate :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; # [ test ]# [ should_panic ] fn read_big_endian (){ let buf = [ 0 ; $maximally_small ]; BigEndian ::$read (& buf ); }# [ test ]# [ should_panic ] fn read_little_endian (){ let buf = [ 0 ; $maximally_small ]; LittleEndian ::$read (& buf ); }# [ test ]# [ should_panic ] fn read_native_endian (){ let buf = [ 0 ; $maximally_small ]; NativeEndian ::$read (& buf ); }# [ test ]# [ should_panic ] fn write_big_endian (){ let mut buf = [ 0 ; $maximally_small ]; BigEndian ::$write (& mut buf , $zero ); }# [ test ]# [ should_panic ] fn write_little_endian (){ let mut buf = [ 0 ; $maximally_small ]; LittleEndian ::$write (& mut buf , $zero ); }# [ test ]# [ should_panic ] fn write_native_endian (){ let mut buf = [ 0 ; $maximally_small ]; NativeEndian ::$write (& mut buf , $zero ); }}}; ($name : ident , $maximally_small : expr , $read : ident )=>{ mod $name { use crate :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; # [ test ]# [ should_panic ] fn read_big_endian (){ let buf = [ 0 ; $maximally_small ]; BigEndian ::$read (& buf , $maximally_small + 1 ); }# [ test ]# [ should_panic ] fn read_little_endian (){ let buf = [ 0 ; $maximally_small ]; LittleEndian ::$read (& buf , $maximally_small + 1 ); }# [ test ]# [ should_panic ] fn read_native_endian (){ let buf = [ 0 ; $maximally_small ]; NativeEndian ::$read (& buf , $maximally_small + 1 ); }}}; }
+macro_rules! __ra_macro_fixture264 {($name : ident , $read : ident , $write : ident , $num_bytes : expr , $numbers : expr )=>{ mod $name { use crate :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; # [ test ]# [ should_panic ] fn read_big_endian (){ let bytes = [ 0 ; $num_bytes ]; let mut numbers = $numbers ; BigEndian ::$read (& bytes , & mut numbers ); }# [ test ]# [ should_panic ] fn read_little_endian (){ let bytes = [ 0 ; $num_bytes ]; let mut numbers = $numbers ; LittleEndian ::$read (& bytes , & mut numbers ); }# [ test ]# [ should_panic ] fn read_native_endian (){ let bytes = [ 0 ; $num_bytes ]; let mut numbers = $numbers ; NativeEndian ::$read (& bytes , & mut numbers ); }# [ test ]# [ should_panic ] fn write_big_endian (){ let mut bytes = [ 0 ; $num_bytes ]; let numbers = $numbers ; BigEndian ::$write (& numbers , & mut bytes ); }# [ test ]# [ should_panic ] fn write_little_endian (){ let mut bytes = [ 0 ; $num_bytes ]; let numbers = $numbers ; LittleEndian ::$write (& numbers , & mut bytes ); }# [ test ]# [ should_panic ] fn write_native_endian (){ let mut bytes = [ 0 ; $num_bytes ]; let numbers = $numbers ; NativeEndian ::$write (& numbers , & mut bytes ); }}}; }
+macro_rules! __ra_macro_fixture265 {($name : ident , $which : ident , $re : expr )=>{ test_lit ! ($name , $which , $re ,); }; ($name : ident , $which : ident , $re : expr , $($lit : expr ),*)=>{# [ test ] fn $name (){ let expr = ParserBuilder :: new (). build (). parse ($re ). unwrap (); let lits = Literals ::$which (& expr ); assert_lit_eq ! ( Unicode , lits , $($lit ),*); let expr = ParserBuilder :: new (). allow_invalid_utf8 ( true ). unicode ( false ). build (). parse ($re ). unwrap (); let lits = Literals ::$which (& expr ); assert_lit_eq ! ( Bytes , lits , $($lit ),*); }}; }
+macro_rules! __ra_macro_fixture266 {($name : ident , $which : ident , $re : expr )=>{ test_exhausted ! ($name , $which , $re ,); }; ($name : ident , $which : ident , $re : expr , $($lit : expr ),*)=>{# [ test ] fn $name (){ let expr = ParserBuilder :: new (). build (). parse ($re ). unwrap (); let mut lits = Literals :: empty (); lits . set_limit_size ( 20 ). set_limit_class ( 10 ); $which (& mut lits , & expr ); assert_lit_eq ! ( Unicode , lits , $($lit ),*); let expr = ParserBuilder :: new (). allow_invalid_utf8 ( true ). unicode ( false ). build (). parse ($re ). unwrap (); let mut lits = Literals :: empty (); lits . set_limit_size ( 20 ). set_limit_class ( 10 ); $which (& mut lits , & expr ); assert_lit_eq ! ( Bytes , lits , $($lit ),*); }}; }
+macro_rules! __ra_macro_fixture267 {($name : ident , $given : expr , $expected : expr )=>{# [ test ] fn $name (){ let given : Vec < Literal > = $given . into_iter (). map (| ul | { let cut = ul . is_cut (); Literal { v : ul . v . into_bytes (), cut : cut }}). collect (); let lits = create_lits ( given ); let got = lits . unambiguous_prefixes (); assert_eq ! ($expected , escape_lits ( got . literals ())); }}; }
+macro_rules! __ra_macro_fixture268 {($name : ident , $trim : expr , $given : expr , $expected : expr )=>{# [ test ] fn $name (){ let given : Vec < Literal > = $given . into_iter (). map (| ul | { let cut = ul . is_cut (); Literal { v : ul . v . into_bytes (), cut : cut }}). collect (); let lits = create_lits ( given ); let got = lits . trim_suffix ($trim ). unwrap (); assert_eq ! ($expected , escape_lits ( got . literals ())); }}; }
+macro_rules! __ra_macro_fixture269 {($name : ident , $given : expr , $expected : expr )=>{# [ test ] fn $name (){ let given : Vec < Literal > = $given . into_iter (). map (| s : & str | Literal { v : s . to_owned (). into_bytes (), cut : false , }). collect (); let lits = create_lits ( given ); let got = lits . longest_common_prefix (); assert_eq ! ($expected , escape_bytes ( got )); }}; }
+macro_rules! __ra_macro_fixture270 {($name : ident , $given : expr , $expected : expr )=>{# [ test ] fn $name (){ let given : Vec < Literal > = $given . into_iter (). map (| s : & str | Literal { v : s . to_owned (). into_bytes (), cut : false , }). collect (); let lits = create_lits ( given ); let got = lits . longest_common_suffix (); assert_eq ! ($expected , escape_bytes ( got )); }}; }
+macro_rules! __ra_macro_fixture271 {($name : ident , $text : expr )=>{# [ test ] fn $name (){ assert_eq ! ( None , find_cap_ref ($text . as_bytes ())); }}; ($name : ident , $text : expr , $capref : expr )=>{# [ test ] fn $name (){ assert_eq ! ( Some ($capref ), find_cap_ref ($text . as_bytes ())); }}; }
+macro_rules! __ra_macro_fixture272 {($name : ident , $regex_mod : ident , $only_utf8 : expr )=>{ pub mod $name { use super :: RegexOptions ; use error :: Error ; use exec :: ExecBuilder ; use $regex_mod :: Regex ; # [ doc = " A configurable builder for a regular expression." ]# [ doc = "" ]# [ doc = " A builder can be used to configure how the regex is built, for example, by" ]# [ doc = " setting the default flags (which can be overridden in the expression" ]# [ doc = " itself) or setting various limits." ]# [ derive ( Debug )] pub struct RegexBuilder ( RegexOptions ); impl RegexBuilder {# [ doc = " Create a new regular expression builder with the given pattern." ]# [ doc = "" ]# [ doc = " If the pattern is invalid, then an error will be returned when" ]# [ doc = " `build` is called." ] pub fn new ( pattern : & str )-> RegexBuilder { let mut builder = RegexBuilder ( RegexOptions :: default ()); builder . 0 . pats . push ( pattern . to_owned ()); builder }# [ doc = " Consume the builder and compile the regular expression." ]# [ doc = "" ]# [ doc = " Note that calling `as_str` on the resulting `Regex` will produce the" ]# [ doc = " pattern given to `new` verbatim. Notably, it will not incorporate any" ]# [ doc = " of the flags set on this builder." ] pub fn build (& self )-> Result < Regex , Error > { ExecBuilder :: new_options ( self . 0 . clone ()). only_utf8 ($only_utf8 ). build (). map ( Regex :: from )}# [ doc = " Set the value for the case insensitive (`i`) flag." ]# [ doc = "" ]# [ doc = " When enabled, letters in the pattern will match both upper case and" ]# [ doc = " lower case variants." ] pub fn case_insensitive (& mut self , yes : bool , )-> & mut RegexBuilder { self . 0 . case_insensitive = yes ; self }# [ doc = " Set the value for the multi-line matching (`m`) flag." ]# [ doc = "" ]# [ doc = " When enabled, `^` matches the beginning of lines and `$` matches the" ]# [ doc = " end of lines." ]# [ doc = "" ]# [ doc = " By default, they match beginning/end of the input." ] pub fn multi_line (& mut self , yes : bool )-> & mut RegexBuilder { self . 0 . multi_line = yes ; self }# [ doc = " Set the value for the any character (`s`) flag, where in `.` matches" ]# [ doc = " anything when `s` is set and matches anything except for new line when" ]# [ doc = " it is not set (the default)." ]# [ doc = "" ]# [ doc = " N.B. \\\"matches anything\\\" means \\\"any byte\\\" when Unicode is disabled and" ]# [ doc = " means \\\"any valid UTF-8 encoding of any Unicode scalar value\\\" when" ]# [ doc = " Unicode is enabled." ] pub fn dot_matches_new_line (& mut self , yes : bool , )-> & mut RegexBuilder { self . 0 . dot_matches_new_line = yes ; self }# [ doc = " Set the value for the greedy swap (`U`) flag." ]# [ doc = "" ]# [ doc = " When enabled, a pattern like `a*` is lazy (tries to find shortest" ]# [ doc = " match) and `a*?` is greedy (tries to find longest match)." ]# [ doc = "" ]# [ doc = " By default, `a*` is greedy and `a*?` is lazy." ] pub fn swap_greed (& mut self , yes : bool )-> & mut RegexBuilder { self . 0 . swap_greed = yes ; self }# [ doc = " Set the value for the ignore whitespace (`x`) flag." ]# [ doc = "" ]# [ doc = " When enabled, whitespace such as new lines and spaces will be ignored" ]# [ doc = " between expressions of the pattern, and `#` can be used to start a" ]# [ doc = " comment until the next new line." ] pub fn ignore_whitespace (& mut self , yes : bool , )-> & mut RegexBuilder { self . 0 . ignore_whitespace = yes ; self }# [ doc = " Set the value for the Unicode (`u`) flag." ]# [ doc = "" ]# [ doc = " Enabled by default. When disabled, character classes such as `\\\\w` only" ]# [ doc = " match ASCII word characters instead of all Unicode word characters." ] pub fn unicode (& mut self , yes : bool )-> & mut RegexBuilder { self . 0 . unicode = yes ; self }# [ doc = " Whether to support octal syntax or not." ]# [ doc = "" ]# [ doc = " Octal syntax is a little-known way of uttering Unicode codepoints in" ]# [ doc = " a regular expression. For example, `a`, `\\\\x61`, `\\\\u0061` and" ]# [ doc = " `\\\\141` are all equivalent regular expressions, where the last example" ]# [ doc = " shows octal syntax." ]# [ doc = "" ]# [ doc = " While supporting octal syntax isn\\\'t in and of itself a problem, it does" ]# [ doc = " make good error messages harder. That is, in PCRE based regex engines," ]# [ doc = " syntax like `\\\\0` invokes a backreference, which is explicitly" ]# [ doc = " unsupported in Rust\\\'s regex engine. However, many users expect it to" ]# [ doc = " be supported. Therefore, when octal support is disabled, the error" ]# [ doc = " message will explicitly mention that backreferences aren\\\'t supported." ]# [ doc = "" ]# [ doc = " Octal syntax is disabled by default." ] pub fn octal (& mut self , yes : bool )-> & mut RegexBuilder { self . 0 . octal = yes ; self }# [ doc = " Set the approximate size limit of the compiled regular expression." ]# [ doc = "" ]# [ doc = " This roughly corresponds to the number of bytes occupied by a single" ]# [ doc = " compiled program. If the program exceeds this number, then a" ]# [ doc = " compilation error is returned." ] pub fn size_limit (& mut self , limit : usize , )-> & mut RegexBuilder { self . 0 . size_limit = limit ; self }# [ doc = " Set the approximate size of the cache used by the DFA." ]# [ doc = "" ]# [ doc = " This roughly corresponds to the number of bytes that the DFA will" ]# [ doc = " use while searching." ]# [ doc = "" ]# [ doc = " Note that this is a *per thread* limit. There is no way to set a global" ]# [ doc = " limit. In particular, if a regex is used from multiple threads" ]# [ doc = " simultaneously, then each thread may use up to the number of bytes" ]# [ doc = " specified here." ] pub fn dfa_size_limit (& mut self , limit : usize , )-> & mut RegexBuilder { self . 0 . dfa_size_limit = limit ; self }# [ doc = " Set the nesting limit for this parser." ]# [ doc = "" ]# [ doc = " The nesting limit controls how deep the abstract syntax tree is allowed" ]# [ doc = " to be. If the AST exceeds the given limit (e.g., with too many nested" ]# [ doc = " groups), then an error is returned by the parser." ]# [ doc = "" ]# [ doc = " The purpose of this limit is to act as a heuristic to prevent stack" ]# [ doc = " overflow for consumers that do structural induction on an `Ast` using" ]# [ doc = " explicit recursion. While this crate never does this (instead using" ]# [ doc = " constant stack space and moving the call stack to the heap), other" ]# [ doc = " crates may." ]# [ doc = "" ]# [ doc = " This limit is not checked until the entire Ast is parsed. Therefore," ]# [ doc = " if callers want to put a limit on the amount of heap space used, then" ]# [ doc = " they should impose a limit on the length, in bytes, of the concrete" ]# [ doc = " pattern string. In particular, this is viable since this parser" ]# [ doc = " implementation will limit itself to heap space proportional to the" ]# [ doc = " length of the pattern string." ]# [ doc = "" ]# [ doc = " Note that a nest limit of `0` will return a nest limit error for most" ]# [ doc = " patterns but not all. For example, a nest limit of `0` permits `a` but" ]# [ doc = " not `ab`, since `ab` requires a concatenation, which results in a nest" ]# [ doc = " depth of `1`. In general, a nest limit is not something that manifests" ]# [ doc = " in an obvious way in the concrete syntax, therefore, it should not be" ]# [ doc = " used in a granular way." ] pub fn nest_limit (& mut self , limit : u32 )-> & mut RegexBuilder { self . 0 . nest_limit = limit ; self }}}}; }
+macro_rules! __ra_macro_fixture273 {($name : ident , $regex_mod : ident , $only_utf8 : expr )=>{ pub mod $name { use super :: RegexOptions ; use error :: Error ; use exec :: ExecBuilder ; use re_set ::$regex_mod :: RegexSet ; # [ doc = " A configurable builder for a set of regular expressions." ]# [ doc = "" ]# [ doc = " A builder can be used to configure how the regexes are built, for example," ]# [ doc = " by setting the default flags (which can be overridden in the expression" ]# [ doc = " itself) or setting various limits." ]# [ derive ( Debug )] pub struct RegexSetBuilder ( RegexOptions ); impl RegexSetBuilder {# [ doc = " Create a new regular expression builder with the given pattern." ]# [ doc = "" ]# [ doc = " If the pattern is invalid, then an error will be returned when" ]# [ doc = " `build` is called." ] pub fn new < I , S > ( patterns : I )-> RegexSetBuilder where S : AsRef < str >, I : IntoIterator < Item = S >, { let mut builder = RegexSetBuilder ( RegexOptions :: default ()); for pat in patterns { builder . 0 . pats . push ( pat . as_ref (). to_owned ()); } builder }# [ doc = " Consume the builder and compile the regular expressions into a set." ] pub fn build (& self )-> Result < RegexSet , Error > { ExecBuilder :: new_options ( self . 0 . clone ()). only_utf8 ($only_utf8 ). build (). map ( RegexSet :: from )}# [ doc = " Set the value for the case insensitive (`i`) flag." ] pub fn case_insensitive (& mut self , yes : bool , )-> & mut RegexSetBuilder { self . 0 . case_insensitive = yes ; self }# [ doc = " Set the value for the multi-line matching (`m`) flag." ] pub fn multi_line (& mut self , yes : bool , )-> & mut RegexSetBuilder { self . 0 . multi_line = yes ; self }# [ doc = " Set the value for the any character (`s`) flag, where in `.` matches" ]# [ doc = " anything when `s` is set and matches anything except for new line when" ]# [ doc = " it is not set (the default)." ]# [ doc = "" ]# [ doc = " N.B. \\\"matches anything\\\" means \\\"any byte\\\" for `regex::bytes::RegexSet`" ]# [ doc = " expressions and means \\\"any Unicode scalar value\\\" for `regex::RegexSet`" ]# [ doc = " expressions." ] pub fn dot_matches_new_line (& mut self , yes : bool , )-> & mut RegexSetBuilder { self . 0 . dot_matches_new_line = yes ; self }# [ doc = " Set the value for the greedy swap (`U`) flag." ] pub fn swap_greed (& mut self , yes : bool , )-> & mut RegexSetBuilder { self . 0 . swap_greed = yes ; self }# [ doc = " Set the value for the ignore whitespace (`x`) flag." ] pub fn ignore_whitespace (& mut self , yes : bool , )-> & mut RegexSetBuilder { self . 0 . ignore_whitespace = yes ; self }# [ doc = " Set the value for the Unicode (`u`) flag." ] pub fn unicode (& mut self , yes : bool )-> & mut RegexSetBuilder { self . 0 . unicode = yes ; self }# [ doc = " Whether to support octal syntax or not." ]# [ doc = "" ]# [ doc = " Octal syntax is a little-known way of uttering Unicode codepoints in" ]# [ doc = " a regular expression. For example, `a`, `\\\\x61`, `\\\\u0061` and" ]# [ doc = " `\\\\141` are all equivalent regular expressions, where the last example" ]# [ doc = " shows octal syntax." ]# [ doc = "" ]# [ doc = " While supporting octal syntax isn\\\'t in and of itself a problem, it does" ]# [ doc = " make good error messages harder. That is, in PCRE based regex engines," ]# [ doc = " syntax like `\\\\0` invokes a backreference, which is explicitly" ]# [ doc = " unsupported in Rust\\\'s regex engine. However, many users expect it to" ]# [ doc = " be supported. Therefore, when octal support is disabled, the error" ]# [ doc = " message will explicitly mention that backreferences aren\\\'t supported." ]# [ doc = "" ]# [ doc = " Octal syntax is disabled by default." ] pub fn octal (& mut self , yes : bool )-> & mut RegexSetBuilder { self . 0 . octal = yes ; self }# [ doc = " Set the approximate size limit of the compiled regular expression." ]# [ doc = "" ]# [ doc = " This roughly corresponds to the number of bytes occupied by a single" ]# [ doc = " compiled program. If the program exceeds this number, then a" ]# [ doc = " compilation error is returned." ] pub fn size_limit (& mut self , limit : usize , )-> & mut RegexSetBuilder { self . 0 . size_limit = limit ; self }# [ doc = " Set the approximate size of the cache used by the DFA." ]# [ doc = "" ]# [ doc = " This roughly corresponds to the number of bytes that the DFA will" ]# [ doc = " use while searching." ]# [ doc = "" ]# [ doc = " Note that this is a *per thread* limit. There is no way to set a global" ]# [ doc = " limit. In particular, if a regex is used from multiple threads" ]# [ doc = " simultaneously, then each thread may use up to the number of bytes" ]# [ doc = " specified here." ] pub fn dfa_size_limit (& mut self , limit : usize , )-> & mut RegexSetBuilder { self . 0 . dfa_size_limit = limit ; self }# [ doc = " Set the nesting limit for this parser." ]# [ doc = "" ]# [ doc = " The nesting limit controls how deep the abstract syntax tree is allowed" ]# [ doc = " to be. If the AST exceeds the given limit (e.g., with too many nested" ]# [ doc = " groups), then an error is returned by the parser." ]# [ doc = "" ]# [ doc = " The purpose of this limit is to act as a heuristic to prevent stack" ]# [ doc = " overflow for consumers that do structural induction on an `Ast` using" ]# [ doc = " explicit recursion. While this crate never does this (instead using" ]# [ doc = " constant stack space and moving the call stack to the heap), other" ]# [ doc = " crates may." ]# [ doc = "" ]# [ doc = " This limit is not checked until the entire Ast is parsed. Therefore," ]# [ doc = " if callers want to put a limit on the amount of heap space used, then" ]# [ doc = " they should impose a limit on the length, in bytes, of the concrete" ]# [ doc = " pattern string. In particular, this is viable since this parser" ]# [ doc = " implementation will limit itself to heap space proportional to the" ]# [ doc = " length of the pattern string." ]# [ doc = "" ]# [ doc = " Note that a nest limit of `0` will return a nest limit error for most" ]# [ doc = " patterns but not all. For example, a nest limit of `0` permits `a` but" ]# [ doc = " not `ab`, since `ab` requires a concatenation, which results in a nest" ]# [ doc = " depth of `1`. In general, a nest limit is not something that manifests" ]# [ doc = " in an obvious way in the concrete syntax, therefore, it should not be" ]# [ doc = " used in a granular way." ] pub fn nest_limit (& mut self , limit : u32 , )-> & mut RegexSetBuilder { self . 0 . nest_limit = limit ; self }}}}; }
+macro_rules! __ra_macro_fixture274 {($name : ident , $builder_mod : ident , $text_ty : ty , $as_bytes : expr , $(# [$doc_regexset_example : meta ])* )=>{ pub mod $name { use std :: fmt ; use std :: iter ; use std :: slice ; use std :: vec ; use error :: Error ; use exec :: Exec ; use re_builder ::$builder_mod :: RegexSetBuilder ; use re_trait :: RegularExpression ; # [ doc = " Match multiple (possibly overlapping) regular expressions in a single scan." ]# [ doc = "" ]# [ doc = " A regex set corresponds to the union of two or more regular expressions." ]# [ doc = " That is, a regex set will match text where at least one of its" ]# [ doc = " constituent regular expressions matches. A regex set as its formulated here" ]# [ doc = " provides a touch more power: it will also report *which* regular" ]# [ doc = " expressions in the set match. Indeed, this is the key difference between" ]# [ doc = " regex sets and a single `Regex` with many alternates, since only one" ]# [ doc = " alternate can match at a time." ]# [ doc = "" ]# [ doc = " For example, consider regular expressions to match email addresses and" ]# [ doc = " domains: `[a-z]+@[a-z]+\\\\.(com|org|net)` and `[a-z]+\\\\.(com|org|net)`. If a" ]# [ doc = " regex set is constructed from those regexes, then searching the text" ]# [ doc = " `foo@example.com` will report both regexes as matching. Of course, one" ]# [ doc = " could accomplish this by compiling each regex on its own and doing two" ]# [ doc = " searches over the text. The key advantage of using a regex set is that it" ]# [ doc = " will report the matching regexes using a *single pass through the text*." ]# [ doc = " If one has hundreds or thousands of regexes to match repeatedly (like a URL" ]# [ doc = " router for a complex web application or a user agent matcher), then a regex" ]# [ doc = " set can realize huge performance gains." ]# [ doc = "" ]# [ doc = " # Example" ]# [ doc = "" ]# [ doc = " This shows how the above two regexes (for matching email addresses and" ]# [ doc = " domains) might work:" ]# [ doc = "" ]$(# [$doc_regexset_example ])* # [ doc = "" ]# [ doc = " Note that it would be possible to adapt the above example to using `Regex`" ]# [ doc = " with an expression like:" ]# [ doc = "" ]# [ doc = " ```ignore" ]# [ doc = " (?P<email>[a-z]+@(?P<email_domain>[a-z]+[.](com|org|net)))|(?P<domain>[a-z]+[.](com|org|net))" ]# [ doc = " ```" ]# [ doc = "" ]# [ doc = " After a match, one could then inspect the capture groups to figure out" ]# [ doc = " which alternates matched. The problem is that it is hard to make this" ]# [ doc = " approach scale when there are many regexes since the overlap between each" ]# [ doc = " alternate isn\\\'t always obvious to reason about." ]# [ doc = "" ]# [ doc = " # Limitations" ]# [ doc = "" ]# [ doc = " Regex sets are limited to answering the following two questions:" ]# [ doc = "" ]# [ doc = " 1. Does any regex in the set match?" ]# [ doc = " 2. If so, which regexes in the set match?" ]# [ doc = "" ]# [ doc = " As with the main `Regex` type, it is cheaper to ask (1) instead of (2)" ]# [ doc = " since the matching engines can stop after the first match is found." ]# [ doc = "" ]# [ doc = " Other features like finding the location of successive matches or their" ]# [ doc = " sub-captures aren\\\'t supported. If you need this functionality, the" ]# [ doc = " recommended approach is to compile each regex in the set independently and" ]# [ doc = " selectively match them based on which regexes in the set matched." ]# [ doc = "" ]# [ doc = " # Performance" ]# [ doc = "" ]# [ doc = " A `RegexSet` has the same performance characteristics as `Regex`. Namely," ]# [ doc = " search takes `O(mn)` time, where `m` is proportional to the size of the" ]# [ doc = " regex set and `n` is proportional to the length of the search text." ]# [ derive ( Clone )] pub struct RegexSet ( Exec ); impl RegexSet {# [ doc = " Create a new regex set with the given regular expressions." ]# [ doc = "" ]# [ doc = " This takes an iterator of `S`, where `S` is something that can produce" ]# [ doc = " a `&str`. If any of the strings in the iterator are not valid regular" ]# [ doc = " expressions, then an error is returned." ]# [ doc = "" ]# [ doc = " # Example" ]# [ doc = "" ]# [ doc = " Create a new regex set from an iterator of strings:" ]# [ doc = "" ]# [ doc = " ```rust" ]# [ doc = " # use regex::RegexSet;" ]# [ doc = " let set = RegexSet::new(&[r\\\"\\\\w+\\\", r\\\"\\\\d+\\\"]).unwrap();" ]# [ doc = " assert!(set.is_match(\\\"foo\\\"));" ]# [ doc = " ```" ] pub fn new < I , S > ( exprs : I )-> Result < RegexSet , Error > where S : AsRef < str >, I : IntoIterator < Item = S >{ RegexSetBuilder :: new ( exprs ). build ()}# [ doc = " Create a new empty regex set." ]# [ doc = "" ]# [ doc = " # Example" ]# [ doc = "" ]# [ doc = " ```rust" ]# [ doc = " # use regex::RegexSet;" ]# [ doc = " let set = RegexSet::empty();" ]# [ doc = " assert!(set.is_empty());" ]# [ doc = " ```" ] pub fn empty ()-> RegexSet { RegexSetBuilder :: new (& [ "" ; 0 ]). build (). unwrap ()}# [ doc = " Returns true if and only if one of the regexes in this set matches" ]# [ doc = " the text given." ]# [ doc = "" ]# [ doc = " This method should be preferred if you only need to test whether any" ]# [ doc = " of the regexes in the set should match, but don\\\'t care about *which*" ]# [ doc = " regexes matched. This is because the underlying matching engine will" ]# [ doc = " quit immediately after seeing the first match instead of continuing to" ]# [ doc = " find all matches." ]# [ doc = "" ]# [ doc = " Note that as with searches using `Regex`, the expression is unanchored" ]# [ doc = " by default. That is, if the regex does not start with `^` or `\\\\A`, or" ]# [ doc = " end with `$` or `\\\\z`, then it is permitted to match anywhere in the" ]# [ doc = " text." ]# [ doc = "" ]# [ doc = " # Example" ]# [ doc = "" ]# [ doc = " Tests whether a set matches some text:" ]# [ doc = "" ]# [ doc = " ```rust" ]# [ doc = " # use regex::RegexSet;" ]# [ doc = " let set = RegexSet::new(&[r\\\"\\\\w+\\\", r\\\"\\\\d+\\\"]).unwrap();" ]# [ doc = " assert!(set.is_match(\\\"foo\\\"));" ]# [ doc = " assert!(!set.is_match(\\\"\\u{2603}\\\"));" ]# [ doc = " ```" ] pub fn is_match (& self , text : $text_ty )-> bool { self . is_match_at ( text , 0 )}# [ doc = " Returns the same as is_match, but starts the search at the given" ]# [ doc = " offset." ]# [ doc = "" ]# [ doc = " The significance of the starting point is that it takes the surrounding" ]# [ doc = " context into consideration. For example, the `\\\\A` anchor can only" ]# [ doc = " match when `start == 0`." ]# [ doc ( hidden )] pub fn is_match_at (& self , text : $text_ty , start : usize )-> bool { self . 0 . searcher (). is_match_at ($as_bytes ( text ), start )}# [ doc = " Returns the set of regular expressions that match in the given text." ]# [ doc = "" ]# [ doc = " The set returned contains the index of each regular expression that" ]# [ doc = " matches in the given text. The index is in correspondence with the" ]# [ doc = " order of regular expressions given to `RegexSet`\\\'s constructor." ]# [ doc = "" ]# [ doc = " The set can also be used to iterate over the matched indices." ]# [ doc = "" ]# [ doc = " Note that as with searches using `Regex`, the expression is unanchored" ]# [ doc = " by default. That is, if the regex does not start with `^` or `\\\\A`, or" ]# [ doc = " end with `$` or `\\\\z`, then it is permitted to match anywhere in the" ]# [ doc = " text." ]# [ doc = "" ]# [ doc = " # Example" ]# [ doc = "" ]# [ doc = " Tests which regular expressions match the given text:" ]# [ doc = "" ]# [ doc = " ```rust" ]# [ doc = " # use regex::RegexSet;" ]# [ doc = " let set = RegexSet::new(&[" ]# [ doc = " r\\\"\\\\w+\\\"," ]# [ doc = " r\\\"\\\\d+\\\"," ]# [ doc = " r\\\"\\\\pL+\\\"," ]# [ doc = " r\\\"foo\\\"," ]# [ doc = " r\\\"bar\\\"," ]# [ doc = " r\\\"barfoo\\\"," ]# [ doc = " r\\\"foobar\\\"," ]# [ doc = " ]).unwrap();" ]# [ doc = " let matches: Vec<_> = set.matches(\\\"foobar\\\").into_iter().collect();" ]# [ doc = " assert_eq!(matches, vec![0, 2, 3, 4, 6]);" ]# [ doc = "" ]# [ doc = " // You can also test whether a particular regex matched:" ]# [ doc = " let matches = set.matches(\\\"foobar\\\");" ]# [ doc = " assert!(!matches.matched(5));" ]# [ doc = " assert!(matches.matched(6));" ]# [ doc = " ```" ] pub fn matches (& self , text : $text_ty )-> SetMatches { let mut matches = vec ! [ false ; self . 0 . regex_strings (). len ()]; let any = self . read_matches_at (& mut matches , text , 0 ); SetMatches { matched_any : any , matches : matches , }}# [ doc = " Returns the same as matches, but starts the search at the given" ]# [ doc = " offset and stores the matches into the slice given." ]# [ doc = "" ]# [ doc = " The significance of the starting point is that it takes the surrounding" ]# [ doc = " context into consideration. For example, the `\\\\A` anchor can only" ]# [ doc = " match when `start == 0`." ]# [ doc = "" ]# [ doc = " `matches` must have a length that is at least the number of regexes" ]# [ doc = " in this set." ]# [ doc = "" ]# [ doc = " This method returns true if and only if at least one member of" ]# [ doc = " `matches` is true after executing the set against `text`." ]# [ doc ( hidden )] pub fn read_matches_at (& self , matches : & mut [ bool ], text : $text_ty , start : usize , )-> bool { self . 0 . searcher (). many_matches_at ( matches , $as_bytes ( text ), start )}# [ doc = " Returns the total number of regular expressions in this set." ] pub fn len (& self )-> usize { self . 0 . regex_strings (). len ()}# [ doc = " Returns `true` if this set contains no regular expressions." ] pub fn is_empty (& self )-> bool { self . 0 . regex_strings (). is_empty ()}# [ doc = " Returns the patterns that this set will match on." ]# [ doc = "" ]# [ doc = " This function can be used to determine the pattern for a match. The" ]# [ doc = " slice returned has exactly as many patterns givens to this regex set," ]# [ doc = " and the order of the slice is the same as the order of the patterns" ]# [ doc = " provided to the set." ]# [ doc = "" ]# [ doc = " # Example" ]# [ doc = "" ]# [ doc = " ```rust" ]# [ doc = " # use regex::RegexSet;" ]# [ doc = " let set = RegexSet::new(&[" ]# [ doc = " r\\\"\\\\w+\\\"," ]# [ doc = " r\\\"\\\\d+\\\"," ]# [ doc = " r\\\"\\\\pL+\\\"," ]# [ doc = " r\\\"foo\\\"," ]# [ doc = " r\\\"bar\\\"," ]# [ doc = " r\\\"barfoo\\\"," ]# [ doc = " r\\\"foobar\\\"," ]# [ doc = " ]).unwrap();" ]# [ doc = " let matches: Vec<_> = set" ]# [ doc = " .matches(\\\"foobar\\\")" ]# [ doc = " .into_iter()" ]# [ doc = " .map(|match_idx| &set.patterns()[match_idx])" ]# [ doc = " .collect();" ]# [ doc = " assert_eq!(matches, vec![r\\\"\\\\w+\\\", r\\\"\\\\pL+\\\", r\\\"foo\\\", r\\\"bar\\\", r\\\"foobar\\\"]);" ]# [ doc = " ```" ] pub fn patterns (& self )-> & [ String ]{ self . 0 . regex_strings ()}}# [ doc = " A set of matches returned by a regex set." ]# [ derive ( Clone , Debug )] pub struct SetMatches { matched_any : bool , matches : Vec < bool >, } impl SetMatches {# [ doc = " Whether this set contains any matches." ] pub fn matched_any (& self )-> bool { self . matched_any }# [ doc = " Whether the regex at the given index matched." ]# [ doc = "" ]# [ doc = " The index for a regex is determined by its insertion order upon the" ]# [ doc = " initial construction of a `RegexSet`, starting at `0`." ]# [ doc = "" ]# [ doc = " # Panics" ]# [ doc = "" ]# [ doc = " If `regex_index` is greater than or equal to `self.len()`." ] pub fn matched (& self , regex_index : usize )-> bool { self . matches [ regex_index ]}# [ doc = " The total number of regexes in the set that created these matches." ] pub fn len (& self )-> usize { self . matches . len ()}# [ doc = " Returns an iterator over indexes in the regex that matched." ]# [ doc = "" ]# [ doc = " This will always produces matches in ascending order of index, where" ]# [ doc = " the index corresponds to the index of the regex that matched with" ]# [ doc = " respect to its position when initially building the set." ] pub fn iter (& self )-> SetMatchesIter { SetMatchesIter ((&* self . matches ). into_iter (). enumerate ())}} impl IntoIterator for SetMatches { type IntoIter = SetMatchesIntoIter ; type Item = usize ; fn into_iter ( self )-> Self :: IntoIter { SetMatchesIntoIter ( self . matches . into_iter (). enumerate ())}} impl < 'a > IntoIterator for & 'a SetMatches { type IntoIter = SetMatchesIter < 'a >; type Item = usize ; fn into_iter ( self )-> Self :: IntoIter { self . iter ()}}# [ doc = " An owned iterator over the set of matches from a regex set." ]# [ doc = "" ]# [ doc = " This will always produces matches in ascending order of index, where the" ]# [ doc = " index corresponds to the index of the regex that matched with respect to" ]# [ doc = " its position when initially building the set." ]# [ derive ( Debug )] pub struct SetMatchesIntoIter ( iter :: Enumerate < vec :: IntoIter < bool >>); impl Iterator for SetMatchesIntoIter { type Item = usize ; fn next (& mut self )-> Option < usize > { loop { match self . 0 . next (){ None => return None , Some ((_, false ))=>{} Some (( i , true ))=> return Some ( i ), }}} fn size_hint (& self )-> ( usize , Option < usize >){ self . 0 . size_hint ()}} impl DoubleEndedIterator for SetMatchesIntoIter { fn next_back (& mut self )-> Option < usize > { loop { match self . 0 . next_back (){ None => return None , Some ((_, false ))=>{} Some (( i , true ))=> return Some ( i ), }}}} impl iter :: FusedIterator for SetMatchesIntoIter {}# [ doc = " A borrowed iterator over the set of matches from a regex set." ]# [ doc = "" ]# [ doc = " The lifetime `\\\'a` refers to the lifetime of a `SetMatches` value." ]# [ doc = "" ]# [ doc = " This will always produces matches in ascending order of index, where the" ]# [ doc = " index corresponds to the index of the regex that matched with respect to" ]# [ doc = " its position when initially building the set." ]# [ derive ( Clone , Debug )] pub struct SetMatchesIter < 'a > ( iter :: Enumerate < slice :: Iter < 'a , bool >>); impl < 'a > Iterator for SetMatchesIter < 'a > { type Item = usize ; fn next (& mut self )-> Option < usize > { loop { match self . 0 . next (){ None => return None , Some ((_, & false ))=>{} Some (( i , & true ))=> return Some ( i ), }}} fn size_hint (& self )-> ( usize , Option < usize >){ self . 0 . size_hint ()}} impl < 'a > DoubleEndedIterator for SetMatchesIter < 'a > { fn next_back (& mut self )-> Option < usize > { loop { match self . 0 . next_back (){ None => return None , Some ((_, & false ))=>{} Some (( i , & true ))=> return Some ( i ), }}}} impl < 'a > iter :: FusedIterator for SetMatchesIter < 'a > {}# [ doc ( hidden )] impl From < Exec > for RegexSet { fn from ( exec : Exec )-> Self { RegexSet ( exec )}} impl fmt :: Debug for RegexSet { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { write ! ( f , "RegexSet({:?})" , self . 0 . regex_strings ())}}# [ allow ( dead_code )] fn as_bytes_str ( text : & str )-> & [ u8 ]{ text . as_bytes ()}# [ allow ( dead_code )] fn as_bytes_bytes ( text : & [ u8 ])-> & [ u8 ]{ text }}}}
+macro_rules! __ra_macro_fixture275 {($($max_len : expr =>$t : ident ),* as $conv_fn : ident )=>{$(impl_IntegerCommon ! ($max_len , $t ); impl IntegerPrivate < [ u8 ; $max_len ]> for $t {# [ allow ( unused_comparisons )]# [ inline ] fn write_to ( self , buf : & mut [ u8 ; $max_len ])-> & [ u8 ]{ let is_nonnegative = self >= 0 ; let mut n = if is_nonnegative { self as $conv_fn } else {(! ( self as $conv_fn )). wrapping_add ( 1 )}; let mut curr = buf . len () as isize ; let buf_ptr = buf . as_mut_ptr (); let lut_ptr = DEC_DIGITS_LUT . as_ptr (); unsafe { if mem :: size_of ::<$t > ()>= 2 { while n >= 10000 { let rem = ( n % 10000 ) as isize ; n /= 10000 ; let d1 = ( rem / 100 )<< 1 ; let d2 = ( rem % 100 )<< 1 ; curr -= 4 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); ptr :: copy_nonoverlapping ( lut_ptr . offset ( d2 ), buf_ptr . offset ( curr + 2 ), 2 ); }} let mut n = n as isize ; if n >= 100 { let d1 = ( n % 100 )<< 1 ; n /= 100 ; curr -= 2 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); } if n < 10 { curr -= 1 ; * buf_ptr . offset ( curr )= ( n as u8 )+ b'0' ; } else { let d1 = n << 1 ; curr -= 2 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); } if ! is_nonnegative { curr -= 1 ; * buf_ptr . offset ( curr )= b'-' ; }} let len = buf . len ()- curr as usize ; unsafe { slice :: from_raw_parts ( buf_ptr . offset ( curr ), len )}}})*}; }
+macro_rules! __ra_macro_fixture276 {($max_len : expr , $t : ident )=>{ impl Integer for $t {# [ inline ] fn write ( self , buf : & mut Buffer )-> & str { unsafe { debug_assert ! ($max_len <= I128_MAX_LEN ); let buf = mem :: transmute ::<& mut [ u8 ; I128_MAX_LEN ], & mut [ u8 ; $max_len ]> (& mut buf . bytes , ); let bytes = self . write_to ( buf ); str :: from_utf8_unchecked ( bytes )}}} impl private :: Sealed for $t {}}; }
+macro_rules! __ra_macro_fixture277 {(($name : ident $($generics : tt )*)=>$item : ty )=>{ impl $($generics )* Iterator for $name $($generics )* { type Item = $item ; # [ inline ] fn next (& mut self )-> Option < Self :: Item > { self . iter . next ()}# [ inline ] fn size_hint (& self )-> ( usize , Option < usize >){ self . iter . size_hint ()}} impl $($generics )* DoubleEndedIterator for $name $($generics )* {# [ inline ] fn next_back (& mut self )-> Option < Self :: Item > { self . iter . next_back ()}} impl $($generics )* ExactSizeIterator for $name $($generics )* {# [ inline ] fn len (& self )-> usize { self . iter . len ()}} impl $($generics )* FusedIterator for $name $($generics )* {}}}
+macro_rules! __ra_macro_fixture278 {($($ty : ident )*)=>{$(impl From <$ty > for Value { fn from ( n : $ty )-> Self { Value :: Number ( n . into ())}})* }; }
+macro_rules! __ra_macro_fixture279 {($($eq : ident [$($ty : ty )*])*)=>{$($(impl PartialEq <$ty > for Value { fn eq (& self , other : &$ty )-> bool {$eq ( self , * other as _)}} impl PartialEq < Value > for $ty { fn eq (& self , other : & Value )-> bool {$eq ( other , * self as _)}} impl < 'a > PartialEq <$ty > for & 'a Value { fn eq (& self , other : &$ty )-> bool {$eq (* self , * other as _)}} impl < 'a > PartialEq <$ty > for & 'a mut Value { fn eq (& self , other : &$ty )-> bool {$eq (* self , * other as _)}})*)* }}
+macro_rules! __ra_macro_fixture280 {($($ty : ty ),* )=>{$(impl From <$ty > for Number {# [ inline ] fn from ( u : $ty )-> Self { let n = {# [ cfg ( not ( feature = "arbitrary_precision" ))]{ N :: PosInt ( u as u64 )}# [ cfg ( feature = "arbitrary_precision" )]{ itoa :: Buffer :: new (). format ( u ). to_owned ()}}; Number { n }}})* }; }
+macro_rules! __ra_macro_fixture281 {($($ty : ty ),* )=>{$(impl From <$ty > for Number {# [ inline ] fn from ( i : $ty )-> Self { let n = {# [ cfg ( not ( feature = "arbitrary_precision" ))]{ if i < 0 { N :: NegInt ( i as i64 )} else { N :: PosInt ( i as u64 )}}# [ cfg ( feature = "arbitrary_precision" )]{ itoa :: Buffer :: new (). format ( i ). to_owned ()}}; Number { n }}})* }; }
+macro_rules! __ra_macro_fixture282 (($($size : expr ),+)=>{$(unsafe impl < T > Array for [ T ; $size ]{ type Item = T ; fn size ()-> usize {$size }})+ });
+macro_rules! __ra_macro_fixture283 {($($name : ident ( repeats : $repeats : expr , latches : $latches : expr , delay : $delay : expr , threads : $threads : expr , single_unparks : $single_unparks : expr ); )* )=>{$(# [ test ] fn $name (){ let delay = Duration :: from_micros ($delay ); for _ in 0 ..$repeats { run_parking_test ($latches , delay , $threads , $single_unparks ); }})* }; }
+macro_rules! __ra_macro_fixture284 {($C : ident $P : ident ; $A : ident , $($I : ident ),* ; $($X : ident )*)=>(# [ derive ( Clone , Debug )] pub struct $C < I : Iterator > { item : Option < I :: Item >, iter : I , c : $P < I >, } impl < I : Iterator + Clone > From < I > for $C < I > { fn from ( mut iter : I )-> Self {$C { item : iter . next (), iter : iter . clone (), c : $P :: from ( iter ), }}} impl < I : Iterator + Clone > From < I > for $C < Fuse < I >> { fn from ( iter : I )-> Self { let mut iter = iter . fuse (); $C { item : iter . next (), iter : iter . clone (), c : $P :: from ( iter ), }}} impl < I , $A > Iterator for $C < I > where I : Iterator < Item = $A > + Clone , I :: Item : Clone { type Item = ($($I ),*); fn next (& mut self )-> Option < Self :: Item > { if let Some (($($X ),*,))= self . c . next (){ let z = self . item . clone (). unwrap (); Some (( z , $($X ),*))} else { self . item = self . iter . next (); self . item . clone (). and_then (| z | { self . c = $P :: from ( self . iter . clone ()); self . c . next (). map (| ($($X ),*,)| ( z , $($X ),*))})}}} impl < I , $A > HasCombination < I > for ($($I ),*) where I : Iterator < Item = $A > + Clone , I :: Item : Clone { type Combination = $C < Fuse < I >>; })}
+macro_rules! __ra_macro_fixture285 (($_A : ident , $_B : ident , )=>(); ($A : ident , $($B : ident ,)*)=>( impl_cons_iter ! ($($B ,)*); # [ allow ( non_snake_case )] impl < X , Iter , $($B ),*> Iterator for ConsTuples < Iter , (($($B ,)*), X )> where Iter : Iterator < Item = (($($B ,)*), X )>, { type Item = ($($B ,)* X , ); fn next (& mut self )-> Option < Self :: Item > { self . iter . next (). map (| (($($B ,)*), x )| ($($B ,)* x , ))} fn size_hint (& self )-> ( usize , Option < usize >){ self . iter . size_hint ()} fn fold < Acc , Fold > ( self , accum : Acc , mut f : Fold )-> Acc where Fold : FnMut ( Acc , Self :: Item )-> Acc , { self . iter . fold ( accum , move | acc , (($($B ,)*), x )| f ( acc , ($($B ,)* x , )))}}# [ allow ( non_snake_case )] impl < X , Iter , $($B ),*> DoubleEndedIterator for ConsTuples < Iter , (($($B ,)*), X )> where Iter : DoubleEndedIterator < Item = (($($B ,)*), X )>, { fn next_back (& mut self )-> Option < Self :: Item > { self . iter . next (). map (| (($($B ,)*), x )| ($($B ,)* x , ))}}); );
+macro_rules! __ra_macro_fixture286 {($($fmt_trait : ident )*)=>{$(impl < 'a , I > fmt ::$fmt_trait for Format < 'a , I > where I : Iterator , I :: Item : fmt ::$fmt_trait , { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { self . format ( f , fmt ::$fmt_trait :: fmt )}})* }}
+macro_rules! __ra_macro_fixture287 {([$($typarm : tt )*]$type_ : ty )=>{ impl <$($typarm )*> PeekingNext for $type_ { fn peeking_next < F > (& mut self , accept : F )-> Option < Self :: Item > where F : FnOnce (& Self :: Item )-> bool { let saved_state = self . clone (); if let Some ( r )= self . next (){ if ! accept (& r ){* self = saved_state ; } else { return Some ( r )}} None }}}}
+macro_rules! __ra_macro_fixture288 {()=>(); ($N : expr ; $A : ident ; $($X : ident ),* ; $($Y : ident ),* ; $($Y_rev : ident ),*)=>( impl <$A > TupleCollect for ($($X ),*,){ type Item = $A ; type Buffer = [ Option <$A >; $N - 1 ]; # [ allow ( unused_assignments , unused_mut )] fn collect_from_iter < I > ( iter : I , buf : & mut Self :: Buffer )-> Option < Self > where I : IntoIterator < Item = $A > { let mut iter = iter . into_iter (); $(let mut $Y = None ; )* loop {$($Y = iter . next (); if $Y . is_none (){ break })* return Some (($($Y . unwrap ()),*,))} let mut i = 0 ; let mut s = buf . as_mut (); $(if i < s . len (){ s [ i ]= $Y ; i += 1 ; })* return None ; }# [ allow ( unused_assignments )] fn collect_from_iter_no_buf < I > ( iter : I )-> Option < Self > where I : IntoIterator < Item = $A > { let mut iter = iter . into_iter (); loop {$(let $Y = if let Some ($Y )= iter . next (){$Y } else { break ; }; )* return Some (($($Y ),*,))} return None ; } fn num_items ()-> usize {$N } fn left_shift_push (& mut self , item : $A ){ use std :: mem :: replace ; let & mut ($(ref mut $Y ),*,)= self ; let tmp = item ; $(let tmp = replace ($Y_rev , tmp ); )* drop ( tmp ); }})}
+macro_rules! __ra_macro_fixture289 {($($B : ident ),*)=>(# [ allow ( non_snake_case )] impl <$($B : IntoIterator ),*> From < ($($B ,)*)> for Zip < ($($B :: IntoIter ,)*)> { fn from ( t : ($($B ,)*))-> Self { let ($($B ,)*)= t ; Zip { t : ($($B . into_iter (),)*)}}}# [ allow ( non_snake_case )]# [ allow ( unused_assignments )] impl <$($B ),*> Iterator for Zip < ($($B ,)*)> where $($B : Iterator , )* { type Item = ($($B :: Item ,)*); fn next (& mut self )-> Option < Self :: Item > { let ($(ref mut $B ,)*)= self . t ; $(let $B = match $B . next (){ None => return None , Some ( elt )=> elt }; )* Some (($($B ,)*))} fn size_hint (& self )-> ( usize , Option < usize >){ let sh = (:: std :: usize :: MAX , None ); let ($(ref $B ,)*)= self . t ; $(let sh = size_hint :: min ($B . size_hint (), sh ); )* sh }}# [ allow ( non_snake_case )] impl <$($B ),*> ExactSizeIterator for Zip < ($($B ,)*)> where $($B : ExactSizeIterator , )* {}); }
+macro_rules! __ra_macro_fixture290 {( impl $Op : ident for TextRange by fn $f : ident = $op : tt )=>{ impl $Op <& TextSize > for TextRange { type Output = TextRange ; # [ inline ] fn $f ( self , other : & TextSize )-> TextRange { self $op * other }} impl < T > $Op < T > for & TextRange where TextRange : $Op < T , Output = TextRange >, { type Output = TextRange ; # [ inline ] fn $f ( self , other : T )-> TextRange {* self $op other }}}; }
+macro_rules! __ra_macro_fixture291 {( impl $Op : ident for TextSize by fn $f : ident = $op : tt )=>{ impl $Op < TextSize > for TextSize { type Output = TextSize ; # [ inline ] fn $f ( self , other : TextSize )-> TextSize { TextSize { raw : self . raw $op other . raw }}} impl $Op <& TextSize > for TextSize { type Output = TextSize ; # [ inline ] fn $f ( self , other : & TextSize )-> TextSize { self $op * other }} impl < T > $Op < T > for & TextSize where TextSize : $Op < T , Output = TextSize >, { type Output = TextSize ; # [ inline ] fn $f ( self , other : T )-> TextSize {* self $op other }}}; }
+macro_rules! __ra_macro_fixture292 {($expr : expr )=>{ const _: i32 = 0 / $expr as i32 ; }; }
+macro_rules! __ra_macro_fixture293 {($index_type : ty , )=>(); ($index_type : ty , $($len : expr ,)*)=>($(fix_array_impl ! ($index_type , $len );)* ); }
+macro_rules! __ra_macro_fixture294 {($index_type : ty , $len : expr )=>( unsafe impl < T > Array for [ T ; $len ]{ type Item = T ; type Index = $index_type ; const CAPACITY : usize = $len ; # [ doc ( hidden )] fn as_slice (& self )-> & [ Self :: Item ]{ self }# [ doc ( hidden )] fn as_mut_slice (& mut self )-> & mut [ Self :: Item ]{ self }})}
+macro_rules! __ra_macro_fixture295 {($($variant : ident $(($($sub_variant : ident ),*))?),* for $enum : ident )=>{$(impl From <$variant > for $enum { fn from ( it : $variant )-> $enum {$enum ::$variant ( it )}}$($(impl From <$sub_variant > for $enum { fn from ( it : $sub_variant )-> $enum {$enum ::$variant ($variant ::$sub_variant ( it ))}})*)? )* }}
+macro_rules! __ra_macro_fixture296 {($name : ident )=>{ impl $name { pub ( crate ) fn expand_tt (& self , invocation : & str )-> tt :: Subtree { self . try_expand_tt ( invocation ). unwrap ()} fn try_expand_tt (& self , invocation : & str )-> Result < tt :: Subtree , ExpandError > { let source_file = ast :: SourceFile :: parse ( invocation ). tree (); let macro_invocation = source_file . syntax (). descendants (). find_map ( ast :: MacroCall :: cast ). unwrap (); let ( invocation_tt , _)= ast_to_token_tree (& macro_invocation . token_tree (). unwrap ()). ok_or_else (|| ExpandError :: ConversionError )?; self . rules . expand (& invocation_tt ). result ()}# [ allow ( unused )] fn assert_expand_err (& self , invocation : & str , err : & ExpandError ){ assert_eq ! ( self . try_expand_tt ( invocation ). as_ref (), Err ( err )); }# [ allow ( unused )] fn expand_items (& self , invocation : & str )-> SyntaxNode { let expanded = self . expand_tt ( invocation ); token_tree_to_syntax_node (& expanded , FragmentKind :: Items ). unwrap (). 0 . syntax_node ()}# [ allow ( unused )] fn expand_statements (& self , invocation : & str )-> SyntaxNode { let expanded = self . expand_tt ( invocation ); token_tree_to_syntax_node (& expanded , FragmentKind :: Statements ). unwrap (). 0 . syntax_node ()}# [ allow ( unused )] fn expand_expr (& self , invocation : & str )-> SyntaxNode { let expanded = self . expand_tt ( invocation ); token_tree_to_syntax_node (& expanded , FragmentKind :: Expr ). unwrap (). 0 . syntax_node ()}# [ allow ( unused )] fn assert_expand_tt (& self , invocation : & str , expected : & str ){ let expansion = self . expand_tt ( invocation ); assert_eq ! ( expansion . to_string (), expected ); }# [ allow ( unused )] fn assert_expand (& self , invocation : & str , expected : & str ){ let expansion = self . expand_tt ( invocation ); let actual = format ! ( "{:?}" , expansion ); test_utils :: assert_eq_text ! (& expected . trim (), & actual . trim ()); } fn assert_expand_items (& self , invocation : & str , expected : & str )-> &$name { self . assert_expansion ( FragmentKind :: Items , invocation , expected ); self }# [ allow ( unused )] fn assert_expand_statements (& self , invocation : & str , expected : & str )-> &$name { self . assert_expansion ( FragmentKind :: Statements , invocation , expected ); self } fn assert_expansion (& self , kind : FragmentKind , invocation : & str , expected : & str ){ let expanded = self . expand_tt ( invocation ); assert_eq ! ( expanded . to_string (), expected ); let expected = expected . replace ( "$crate" , "C_C__C" ); let expected = { let wrapped = format ! ( "wrap_macro!( {} )" , expected ); let wrapped = ast :: SourceFile :: parse (& wrapped ); let wrapped = wrapped . tree (). syntax (). descendants (). find_map ( ast :: TokenTree :: cast ). unwrap (); let mut wrapped = ast_to_token_tree (& wrapped ). unwrap (). 0 ; wrapped . delimiter = None ; wrapped }; let expanded_tree = token_tree_to_syntax_node (& expanded , kind ). unwrap (). 0 . syntax_node (); let expanded_tree = debug_dump_ignore_spaces (& expanded_tree ). trim (). to_string (); let expected_tree = token_tree_to_syntax_node (& expected , kind ). unwrap (). 0 . syntax_node (); let expected_tree = debug_dump_ignore_spaces (& expected_tree ). trim (). to_string (); let expected_tree = expected_tree . replace ( "C_C__C" , "$crate" ); assert_eq ! ( expanded_tree , expected_tree , "\nleft:\n{}\nright:\n{}" , expanded_tree , expected_tree , ); }}}; }
+macro_rules! __ra_macro_fixture297 {($($name : ident ( num_producers : $num_producers : expr , num_consumers : $num_consumers : expr , max_queue_size : $max_queue_size : expr , messages_per_producer : $messages_per_producer : expr , notification_style : $notification_style : expr , timeout : $timeout : expr , delay_seconds : $delay_seconds : expr ); )* )=>{$(# [ test ] fn $name (){ let delay = Duration :: from_secs ($delay_seconds ); run_queue_test ($num_producers , $num_consumers , $max_queue_size , $messages_per_producer , $notification_style , $timeout , delay , ); })* }; }
+macro_rules! __ra_macro_fixture298 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_camel_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture299 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_kebab_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture300 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_mixed_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture301 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_shouty_kebab_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture302 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_shouty_snake_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture303 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_snake_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture304 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_title_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture305 {($($struct_name : ident ),+ $(,)?)=>{$(unsafe impl < E : Endian > Pod for $struct_name < E > {})+ }}
+macro_rules! __ra_macro_fixture306 {($($struct_name : ident ),+ $(,)?)=>{$(unsafe impl Pod for $struct_name {})+ }}
+macro_rules! __ra_macro_fixture307 {($name : ident , {$($in : tt )* })=>{# [ test ] fn $name (){ syn :: parse_file ( stringify ! ($($in )*)). unwrap (); }}}
+macro_rules! __ra_macro_fixture308 {($name : ident , $op : ident )=>{ fn $name ( sets : Vec < Vec <& str >>)-> Vec < String > { let fsts : Vec < Fst <_>> = sets . into_iter (). map ( fst_set ). collect (); let op : OpBuilder = fsts . iter (). collect (); let mut stream = op .$op (). into_stream (); let mut keys = vec ! []; while let Some (( key , _))= stream . next (){ keys . push ( String :: from_utf8 ( key . to_vec ()). unwrap ()); } keys }}; }
+macro_rules! __ra_macro_fixture309 {($name : ident , $op : ident )=>{ fn $name ( sets : Vec < Vec < (& str , u64 )>>)-> Vec < ( String , u64 )> { let fsts : Vec < Fst <_>> = sets . into_iter (). map ( fst_map ). collect (); let op : OpBuilder = fsts . iter (). collect (); let mut stream = op .$op (). into_stream (); let mut keys = vec ! []; while let Some (( key , outs ))= stream . next (){ let merged = outs . iter (). fold ( 0 , | a , b | a + b . value ); let s = String :: from_utf8 ( key . to_vec ()). unwrap (); keys . push (( s , merged )); } keys }}; }
+macro_rules! __ra_macro_fixture310 {($name : ident , $($s : expr ),+)=>{# [ test ] fn $name (){ let mut items = vec ! [$($s ),*]; let fst = fst_set (& items ); let mut rdr = fst . stream (); items . sort (); items . dedup (); for item in & items { assert_eq ! ( rdr . next (). unwrap (). 0 , item . as_bytes ()); } assert_eq ! ( rdr . next (), None ); for item in & items { assert ! ( fst . get ( item ). is_some ()); }}}}
+macro_rules! __ra_macro_fixture311 {($name : ident , $($s : expr ),+)=>{# [ test ]# [ should_panic ] fn $name (){ let mut bfst = Builder :: memory (); $(bfst . add ($s ). unwrap ();)* }}}
+macro_rules! __ra_macro_fixture312 {($name : ident , $($s : expr , $o : expr ),+)=>{# [ test ] fn $name (){ let fst = fst_map ( vec ! [$(($s , $o )),*]); let mut rdr = fst . stream (); $({let ( s , o )= rdr . next (). unwrap (); assert_eq ! (( s , o . value ()), ($s . as_bytes (), $o )); })* assert_eq ! ( rdr . next (), None ); $({assert_eq ! ( fst . get ($s . as_bytes ()), Some ( Output :: new ($o ))); })* }}}
+macro_rules! __ra_macro_fixture313 {($name : ident , $($s : expr , $o : expr ),+)=>{# [ test ]# [ should_panic ] fn $name (){ let mut bfst = Builder :: memory (); $(bfst . insert ($s , $o ). unwrap ();)* }}}
+macro_rules! __ra_macro_fixture314 {($name : ident , min : $min : expr , max : $max : expr , imin : $imin : expr , imax : $imax : expr , $($s : expr ),* )=>{# [ test ] fn $name (){ let items : Vec <& 'static str > = vec ! [$($s ),*]; let items : Vec <_> = items . into_iter (). enumerate (). map (| ( i , k )| ( k , i as u64 )). collect (); let fst = fst_map ( items . clone ()); let mut rdr = Stream :: new ( fst . as_ref (), AlwaysMatch , $min , $max ); for i in $imin ..$imax { assert_eq ! ( rdr . next (). unwrap (), ( items [ i ]. 0 . as_bytes (), Output :: new ( items [ i ]. 1 )), ); } assert_eq ! ( rdr . next (), None ); }}}
+macro_rules! __ra_macro_fixture315 {($ty : ty , $tag : ident )=>{ impl TryFrom < Response > for $ty { type Error = & 'static str ; fn try_from ( value : Response )-> Result < Self , Self :: Error > { match value { Response ::$tag ( res )=> Ok ( res ), _ => Err ( concat ! ( "Failed to convert response to " , stringify ! ($tag ))), }}}}; }
+macro_rules! __ra_macro_fixture316 {( CloneAny )=>{# [ doc = " A type to emulate dynamic typing." ]# [ doc = "" ]# [ doc = " Every type with no non-`\\\'static` references implements `Any`." ] define ! ( CloneAny remainder ); }; ( Any )=>{# [ doc = " A type to emulate dynamic typing with cloning." ]# [ doc = "" ]# [ doc = " Every type with no non-`\\\'static` references that implements `Clone` implements `Any`." ] define ! ( Any remainder ); }; ($t : ident remainder )=>{# [ doc = " See the [`std::any` documentation](https://doc.rust-lang.org/std/any/index.html) for" ]# [ doc = " more details on `Any` in general." ]# [ doc = "" ]# [ doc = " This trait is not `std::any::Any` but rather a type extending that for this library\\u{2019}s" ]# [ doc = " purposes so that it can be combined with marker traits like " ]# [ doc = " <code><a class=trait title=core::marker::Send" ]# [ doc = " href=http://doc.rust-lang.org/std/marker/trait.Send.html>Send</a></code> and" ]# [ doc = " <code><a class=trait title=core::marker::Sync" ]# [ doc = " href=http://doc.rust-lang.org/std/marker/trait.Sync.html>Sync</a></code>." ]# [ doc = "" ] define ! ($t trait ); }; ( CloneAny trait )=>{# [ doc = " See also [`Any`](trait.Any.html) for a version without the `Clone` requirement." ] pub trait CloneAny : Any + CloneToAny {} impl < T : StdAny + Clone > CloneAny for T {}}; ( Any trait )=>{# [ doc = " See also [`CloneAny`](trait.CloneAny.html) for a cloneable version of this trait." ] pub trait Any : StdAny {} impl < T : StdAny > Any for T {}}; }
+macro_rules! __ra_macro_fixture317 {($base : ident , $(+ $bounds : ident )*)=>{ impl fmt :: Debug for $base $(+ $bounds )* {# [ inline ] fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { f . pad ( stringify ! ($base $(+ $bounds )*))}} impl UncheckedAnyExt for $base $(+ $bounds )* {# [ inline ] unsafe fn downcast_ref_unchecked < T : 'static > (& self )-> & T {&* ( self as * const Self as * const T )}# [ inline ] unsafe fn downcast_mut_unchecked < T : 'static > (& mut self )-> & mut T {& mut * ( self as * mut Self as * mut T )}# [ inline ] unsafe fn downcast_unchecked < T : 'static > ( self : Box < Self >)-> Box < T > { Box :: from_raw ( Box :: into_raw ( self ) as * mut T )}} impl < T : $base $(+ $bounds )*> IntoBox <$base $(+ $bounds )*> for T {# [ inline ] fn into_box ( self )-> Box <$base $(+ $bounds )*> { Box :: new ( self )}}}}
+macro_rules! __ra_macro_fixture318 {($t : ty , $method : ident )=>{ impl Clone for Box <$t > {# [ inline ] fn clone (& self )-> Box <$t > {(** self ).$method ()}}}}
+macro_rules! __ra_macro_fixture319 {( field : $t : ident .$field : ident ; new ()=>$new : expr ; with_capacity ($with_capacity_arg : ident )=>$with_capacity : expr ; )=>{ impl < A : ? Sized + UncheckedAnyExt > $t < A > {# [ doc = " Create an empty collection." ]# [ inline ] pub fn new ()-> $t < A > {$t {$field : $new , }}# [ doc = " Creates an empty collection with the given initial capacity." ]# [ inline ] pub fn with_capacity ($with_capacity_arg : usize )-> $t < A > {$t {$field : $with_capacity , }}# [ doc = " Returns the number of elements the collection can hold without reallocating." ]# [ inline ] pub fn capacity (& self )-> usize { self .$field . capacity ()}# [ doc = " Reserves capacity for at least `additional` more elements to be inserted" ]# [ doc = " in the collection. The collection may reserve more space to avoid" ]# [ doc = " frequent reallocations." ]# [ doc = "" ]# [ doc = " # Panics" ]# [ doc = "" ]# [ doc = " Panics if the new allocation size overflows `usize`." ]# [ inline ] pub fn reserve (& mut self , additional : usize ){ self .$field . reserve ( additional )}# [ doc = " Shrinks the capacity of the collection as much as possible. It will drop" ]# [ doc = " down as much as possible while maintaining the internal rules" ]# [ doc = " and possibly leaving some space in accordance with the resize policy." ]# [ inline ] pub fn shrink_to_fit (& mut self ){ self .$field . shrink_to_fit ()}# [ doc = " Returns the number of items in the collection." ]# [ inline ] pub fn len (& self )-> usize { self .$field . len ()}# [ doc = " Returns true if there are no items in the collection." ]# [ inline ] pub fn is_empty (& self )-> bool { self .$field . is_empty ()}# [ doc = " Removes all items from the collection. Keeps the allocated memory for reuse." ]# [ inline ] pub fn clear (& mut self ){ self .$field . clear ()}}}}
+macro_rules! __ra_macro_fixture320 {($name : ident , $init : ty )=>{# [ test ] fn $name (){ let mut map = <$init >:: new (); assert_eq ! ( map . insert ( A ( 10 )), None ); assert_eq ! ( map . insert ( B ( 20 )), None ); assert_eq ! ( map . insert ( C ( 30 )), None ); assert_eq ! ( map . insert ( D ( 40 )), None ); assert_eq ! ( map . insert ( E ( 50 )), None ); assert_eq ! ( map . insert ( F ( 60 )), None ); match map . entry ::< A > (){ Entry :: Vacant (_)=> unreachable ! (), Entry :: Occupied ( mut view )=>{ assert_eq ! ( view . get (), & A ( 10 )); assert_eq ! ( view . insert ( A ( 100 )), A ( 10 )); }} assert_eq ! ( map . get ::< A > (). unwrap (), & A ( 100 )); assert_eq ! ( map . len (), 6 ); match map . entry ::< B > (){ Entry :: Vacant (_)=> unreachable ! (), Entry :: Occupied ( mut view )=>{ let v = view . get_mut (); let new_v = B ( v . 0 * 10 ); * v = new_v ; }} assert_eq ! ( map . get ::< B > (). unwrap (), & B ( 200 )); assert_eq ! ( map . len (), 6 ); match map . entry ::< C > (){ Entry :: Vacant (_)=> unreachable ! (), Entry :: Occupied ( view )=>{ assert_eq ! ( view . remove (), C ( 30 )); }} assert_eq ! ( map . get ::< C > (), None ); assert_eq ! ( map . len (), 5 ); match map . entry ::< J > (){ Entry :: Occupied (_)=> unreachable ! (), Entry :: Vacant ( view )=>{ assert_eq ! (* view . insert ( J ( 1000 )), J ( 1000 )); }} assert_eq ! ( map . get ::< J > (). unwrap (), & J ( 1000 )); assert_eq ! ( map . len (), 6 ); map . entry ::< B > (). or_insert ( B ( 71 )). 0 += 1 ; assert_eq ! ( map . get ::< B > (). unwrap (), & B ( 201 )); assert_eq ! ( map . len (), 6 ); map . entry ::< C > (). or_insert ( C ( 300 )). 0 += 1 ; assert_eq ! ( map . get ::< C > (). unwrap (), & C ( 301 )); assert_eq ! ( map . len (), 7 ); }}}
+macro_rules! __ra_macro_fixture321 {($(# [$outer : meta ])* pub struct $BitFlags : ident : $T : ty {$($(# [$inner : ident $($args : tt )*])* const $Flag : ident = $value : expr ; )+ })=>{ __bitflags ! {$(# [$outer ])* ( pub )$BitFlags : $T {$($(# [$inner $($args )*])* $Flag = $value ; )+ }}}; ($(# [$outer : meta ])* struct $BitFlags : ident : $T : ty {$($(# [$inner : ident $($args : tt )*])* const $Flag : ident = $value : expr ; )+ })=>{ __bitflags ! {$(# [$outer ])* ()$BitFlags : $T {$($(# [$inner $($args )*])* $Flag = $value ; )+ }}}; ($(# [$outer : meta ])* pub ($($vis : tt )+) struct $BitFlags : ident : $T : ty {$($(# [$inner : ident $($args : tt )*])* const $Flag : ident = $value : expr ; )+ })=>{ __bitflags ! {$(# [$outer ])* ( pub ($($vis )+))$BitFlags : $T {$($(# [$inner $($args )*])* $Flag = $value ; )+ }}}; }
+macro_rules! __ra_macro_fixture322 {($(# [$outer : meta ])* ($($vis : tt )*)$BitFlags : ident : $T : ty {$($(# [$inner : ident $($args : tt )*])* $Flag : ident = $value : expr ; )+ })=>{$(# [$outer ])* # [ derive ( Copy , PartialEq , Eq , Clone , PartialOrd , Ord , Hash )]$($vis )* struct $BitFlags { bits : $T , } __impl_bitflags ! {$BitFlags : $T {$($(# [$inner $($args )*])* $Flag = $value ; )+ }}}; }
+macro_rules! __ra_macro_fixture323 {($BitFlags : ident : $T : ty {$($(# [$attr : ident $($args : tt )*])* $Flag : ident = $value : expr ; )+ })=>{ impl $crate :: _core :: fmt :: Debug for $BitFlags { fn fmt (& self , f : & mut $crate :: _core :: fmt :: Formatter )-> $crate :: _core :: fmt :: Result {# [ allow ( non_snake_case )] trait __BitFlags {$(# [ inline ] fn $Flag (& self )-> bool { false })+ } impl __BitFlags for $BitFlags {$(__impl_bitflags ! {# [ allow ( deprecated )]# [ inline ]$(? # [$attr $($args )*])* fn $Flag (& self )-> bool { if Self ::$Flag . bits == 0 && self . bits != 0 { false } else { self . bits & Self ::$Flag . bits == Self ::$Flag . bits }}})+ } let mut first = true ; $(if <$BitFlags as __BitFlags >::$Flag ( self ){ if ! first { f . write_str ( " | " )?; } first = false ; f . write_str ( __bitflags_stringify ! ($Flag ))?; })+ let extra_bits = self . bits & !$BitFlags :: all (). bits (); if extra_bits != 0 { if ! first { f . write_str ( " | " )?; } first = false ; f . write_str ( "0x" )?; $crate :: _core :: fmt :: LowerHex :: fmt (& extra_bits , f )?; } if first { f . write_str ( "(empty)" )?; } Ok (())}} impl $crate :: _core :: fmt :: Binary for $BitFlags { fn fmt (& self , f : & mut $crate :: _core :: fmt :: Formatter )-> $crate :: _core :: fmt :: Result {$crate :: _core :: fmt :: Binary :: fmt (& self . bits , f )}} impl $crate :: _core :: fmt :: Octal for $BitFlags { fn fmt (& self , f : & mut $crate :: _core :: fmt :: Formatter )-> $crate :: _core :: fmt :: Result {$crate :: _core :: fmt :: Octal :: fmt (& self . bits , f )}} impl $crate :: _core :: fmt :: LowerHex for $BitFlags { fn fmt (& self , f : & mut $crate :: _core :: fmt :: Formatter )-> $crate :: _core :: fmt :: Result {$crate :: _core :: fmt :: LowerHex :: fmt (& self . bits , f )}} impl $crate :: _core :: fmt :: UpperHex for $BitFlags { fn fmt (& self , f : & mut $crate :: _core :: fmt :: Formatter )-> $crate :: _core :: fmt :: Result {$crate :: _core :: fmt :: UpperHex :: fmt (& self . bits , f )}}# [ allow ( dead_code )] impl $BitFlags {$($(# [$attr $($args )*])* pub const $Flag : $BitFlags = $BitFlags { bits : $value }; )+ __fn_bitflags ! {# [ doc = " Returns an empty set of flags" ]# [ inline ] pub const fn empty ()-> $BitFlags {$BitFlags { bits : 0 }}} __fn_bitflags ! {# [ doc = " Returns the set containing all flags." ]# [ inline ] pub const fn all ()-> $BitFlags {# [ allow ( non_snake_case )] trait __BitFlags {$(const $Flag : $T = 0 ; )+ } impl __BitFlags for $BitFlags {$(__impl_bitflags ! {# [ allow ( deprecated )]$(? # [$attr $($args )*])* const $Flag : $T = Self ::$Flag . bits ; })+ }$BitFlags { bits : $(<$BitFlags as __BitFlags >::$Flag )|+ }}} __fn_bitflags ! {# [ doc = " Returns the raw value of the flags currently stored." ]# [ inline ] pub const fn bits (& self )-> $T { self . bits }}# [ doc = " Convert from underlying bit representation, unless that" ]# [ doc = " representation contains bits that do not correspond to a flag." ]# [ inline ] pub fn from_bits ( bits : $T )-> $crate :: _core :: option :: Option <$BitFlags > { if ( bits & !$BitFlags :: all (). bits ())== 0 {$crate :: _core :: option :: Option :: Some ($BitFlags { bits })} else {$crate :: _core :: option :: Option :: None }} __fn_bitflags ! {# [ doc = " Convert from underlying bit representation, dropping any bits" ]# [ doc = " that do not correspond to flags." ]# [ inline ] pub const fn from_bits_truncate ( bits : $T )-> $BitFlags {$BitFlags { bits : bits & $BitFlags :: all (). bits }}} __fn_bitflags ! {# [ doc = " Convert from underlying bit representation, preserving all" ]# [ doc = " bits (even those not corresponding to a defined flag)." ]# [ inline ] pub const unsafe fn from_bits_unchecked ( bits : $T )-> $BitFlags {$BitFlags { bits }}} __fn_bitflags ! {# [ doc = " Returns `true` if no flags are currently stored." ]# [ inline ] pub const fn is_empty (& self )-> bool { self . bits ()== $BitFlags :: empty (). bits ()}} __fn_bitflags ! {# [ doc = " Returns `true` if all flags are currently set." ]# [ inline ] pub const fn is_all (& self )-> bool { self . bits == $BitFlags :: all (). bits }} __fn_bitflags ! {# [ doc = " Returns `true` if there are flags common to both `self` and `other`." ]# [ inline ] pub const fn intersects (& self , other : $BitFlags )-> bool {!$BitFlags { bits : self . bits & other . bits }. is_empty ()}} __fn_bitflags ! {# [ doc = " Returns `true` all of the flags in `other` are contained within `self`." ]# [ inline ] pub const fn contains (& self , other : $BitFlags )-> bool {( self . bits & other . bits )== other . bits }}# [ doc = " Inserts the specified flags in-place." ]# [ inline ] pub fn insert (& mut self , other : $BitFlags ){ self . bits |= other . bits ; }# [ doc = " Removes the specified flags in-place." ]# [ inline ] pub fn remove (& mut self , other : $BitFlags ){ self . bits &= ! other . bits ; }# [ doc = " Toggles the specified flags in-place." ]# [ inline ] pub fn toggle (& mut self , other : $BitFlags ){ self . bits ^= other . bits ; }# [ doc = " Inserts or removes the specified flags depending on the passed value." ]# [ inline ] pub fn set (& mut self , other : $BitFlags , value : bool ){ if value { self . insert ( other ); } else { self . remove ( other ); }}} impl $crate :: _core :: ops :: BitOr for $BitFlags { type Output = $BitFlags ; # [ doc = " Returns the union of the two sets of flags." ]# [ inline ] fn bitor ( self , other : $BitFlags )-> $BitFlags {$BitFlags { bits : self . bits | other . bits }}} impl $crate :: _core :: ops :: BitOrAssign for $BitFlags {# [ doc = " Adds the set of flags." ]# [ inline ] fn bitor_assign (& mut self , other : $BitFlags ){ self . bits |= other . bits ; }} impl $crate :: _core :: ops :: BitXor for $BitFlags { type Output = $BitFlags ; # [ doc = " Returns the left flags, but with all the right flags toggled." ]# [ inline ] fn bitxor ( self , other : $BitFlags )-> $BitFlags {$BitFlags { bits : self . bits ^ other . bits }}} impl $crate :: _core :: ops :: BitXorAssign for $BitFlags {# [ doc = " Toggles the set of flags." ]# [ inline ] fn bitxor_assign (& mut self , other : $BitFlags ){ self . bits ^= other . bits ; }} impl $crate :: _core :: ops :: BitAnd for $BitFlags { type Output = $BitFlags ; # [ doc = " Returns the intersection between the two sets of flags." ]# [ inline ] fn bitand ( self , other : $BitFlags )-> $BitFlags {$BitFlags { bits : self . bits & other . bits }}} impl $crate :: _core :: ops :: BitAndAssign for $BitFlags {# [ doc = " Disables all flags disabled in the set." ]# [ inline ] fn bitand_assign (& mut self , other : $BitFlags ){ self . bits &= other . bits ; }} impl $crate :: _core :: ops :: Sub for $BitFlags { type Output = $BitFlags ; # [ doc = " Returns the set difference of the two sets of flags." ]# [ inline ] fn sub ( self , other : $BitFlags )-> $BitFlags {$BitFlags { bits : self . bits & ! other . bits }}} impl $crate :: _core :: ops :: SubAssign for $BitFlags {# [ doc = " Disables all flags enabled in the set." ]# [ inline ] fn sub_assign (& mut self , other : $BitFlags ){ self . bits &= ! other . bits ; }} impl $crate :: _core :: ops :: Not for $BitFlags { type Output = $BitFlags ; # [ doc = " Returns the complement of this set of flags." ]# [ inline ] fn not ( self )-> $BitFlags {$BitFlags { bits : ! self . bits }& $BitFlags :: all ()}} impl $crate :: _core :: iter :: Extend <$BitFlags > for $BitFlags { fn extend < T : $crate :: _core :: iter :: IntoIterator < Item =$BitFlags >> (& mut self , iterator : T ){ for item in iterator { self . insert ( item )}}} impl $crate :: _core :: iter :: FromIterator <$BitFlags > for $BitFlags { fn from_iter < T : $crate :: _core :: iter :: IntoIterator < Item =$BitFlags >> ( iterator : T )-> $BitFlags { let mut result = Self :: empty (); result . extend ( iterator ); result }}}; ($(# [$filtered : meta ])* ? # [ cfg $($cfgargs : tt )*]$(? # [$rest : ident $($restargs : tt )*])* fn $($item : tt )* )=>{ __impl_bitflags ! {$(# [$filtered ])* # [ cfg $($cfgargs )*]$(? # [$rest $($restargs )*])* fn $($item )* }}; ($(# [$filtered : meta ])* ? # [$next : ident $($nextargs : tt )*]$(? # [$rest : ident $($restargs : tt )*])* fn $($item : tt )* )=>{ __impl_bitflags ! {$(# [$filtered ])* $(? # [$rest $($restargs )*])* fn $($item )* }}; ($(# [$filtered : meta ])* fn $($item : tt )* )=>{$(# [$filtered ])* fn $($item )* }; ($(# [$filtered : meta ])* ? # [ cfg $($cfgargs : tt )*]$(? # [$rest : ident $($restargs : tt )*])* const $($item : tt )* )=>{ __impl_bitflags ! {$(# [$filtered ])* # [ cfg $($cfgargs )*]$(? # [$rest $($restargs )*])* const $($item )* }}; ($(# [$filtered : meta ])* ? # [$next : ident $($nextargs : tt )*]$(? # [$rest : ident $($restargs : tt )*])* const $($item : tt )* )=>{ __impl_bitflags ! {$(# [$filtered ])* $(? # [$rest $($restargs )*])* const $($item )* }}; ($(# [$filtered : meta ])* const $($item : tt )* )=>{$(# [$filtered ])* const $($item )* }; }
+macro_rules! __ra_macro_fixture324 {($($item : item )*)=>{$(# [ cfg ( feature = "os-poll" )]# [ cfg_attr ( docsrs , doc ( cfg ( feature = "os-poll" )))]$item )* }}
+macro_rules! __ra_macro_fixture325 {($($item : item )*)=>{$(# [ cfg ( not ( feature = "os-poll" ))]$item )* }}
+macro_rules! __ra_macro_fixture326 {($($item : item )*)=>{$(# [ cfg ( any ( feature = "net" , all ( unix , feature = "os-ext" )))]# [ cfg_attr ( docsrs , doc ( any ( feature = "net" , all ( unix , feature = "os-ext" ))))]$item )* }}
+macro_rules! __ra_macro_fixture327 {($($item : item )*)=>{$(# [ cfg ( feature = "net" )]# [ cfg_attr ( docsrs , doc ( cfg ( feature = "net" )))]$item )* }}
+macro_rules! __ra_macro_fixture328 {($($item : item )*)=>{$(# [ cfg ( feature = "os-ext" )]# [ cfg_attr ( docsrs , doc ( cfg ( feature = "os-ext" )))]$item )* }}
+macro_rules! __ra_macro_fixture329 {($name : ident , $read : ident , $bytes : expr , $data : expr )=>{ mod $name { use byteorder :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; use test :: black_box as bb ; use test :: Bencher ; const NITER : usize = 100_000 ; # [ bench ] fn read_big_endian ( b : & mut Bencher ){ let buf = $data ; b . iter (|| { for _ in 0 .. NITER { bb ( BigEndian ::$read (& buf , $bytes )); }}); }# [ bench ] fn read_little_endian ( b : & mut Bencher ){ let buf = $data ; b . iter (|| { for _ in 0 .. NITER { bb ( LittleEndian ::$read (& buf , $bytes )); }}); }# [ bench ] fn read_native_endian ( b : & mut Bencher ){ let buf = $data ; b . iter (|| { for _ in 0 .. NITER { bb ( NativeEndian ::$read (& buf , $bytes )); }}); }}}; ($ty : ident , $max : ident , $read : ident , $write : ident , $size : expr , $data : expr )=>{ mod $ty { use byteorder :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; use std ::$ty ; use test :: black_box as bb ; use test :: Bencher ; const NITER : usize = 100_000 ; # [ bench ] fn read_big_endian ( b : & mut Bencher ){ let buf = $data ; b . iter (|| { for _ in 0 .. NITER { bb ( BigEndian ::$read (& buf )); }}); }# [ bench ] fn read_little_endian ( b : & mut Bencher ){ let buf = $data ; b . iter (|| { for _ in 0 .. NITER { bb ( LittleEndian ::$read (& buf )); }}); }# [ bench ] fn read_native_endian ( b : & mut Bencher ){ let buf = $data ; b . iter (|| { for _ in 0 .. NITER { bb ( NativeEndian ::$read (& buf )); }}); }# [ bench ] fn write_big_endian ( b : & mut Bencher ){ let mut buf = $data ; let n = $ty ::$max ; b . iter (|| { for _ in 0 .. NITER { bb ( BigEndian ::$write (& mut buf , n )); }}); }# [ bench ] fn write_little_endian ( b : & mut Bencher ){ let mut buf = $data ; let n = $ty ::$max ; b . iter (|| { for _ in 0 .. NITER { bb ( LittleEndian ::$write (& mut buf , n )); }}); }# [ bench ] fn write_native_endian ( b : & mut Bencher ){ let mut buf = $data ; let n = $ty ::$max ; b . iter (|| { for _ in 0 .. NITER { bb ( NativeEndian ::$write (& mut buf , n )); }}); }}}; }
+macro_rules! __ra_macro_fixture330 {($name : ident , $numty : ty , $read : ident , $write : ident )=>{ mod $name { use std :: mem :: size_of ; use byteorder :: { BigEndian , ByteOrder , LittleEndian }; use rand :: distributions ; use rand :: { self , Rng }; use test :: Bencher ; # [ bench ] fn read_big_endian ( b : & mut Bencher ){ let mut numbers : Vec <$numty > = rand :: thread_rng (). sample_iter (& distributions :: Standard ). take ( 100000 ). collect (); let mut bytes = vec ! [ 0 ; numbers . len ()* size_of ::<$numty > ()]; BigEndian ::$write (& numbers , & mut bytes ); b . bytes = bytes . len () as u64 ; b . iter (|| { BigEndian ::$read (& bytes , & mut numbers ); }); }# [ bench ] fn read_little_endian ( b : & mut Bencher ){ let mut numbers : Vec <$numty > = rand :: thread_rng (). sample_iter (& distributions :: Standard ). take ( 100000 ). collect (); let mut bytes = vec ! [ 0 ; numbers . len ()* size_of ::<$numty > ()]; LittleEndian ::$write (& numbers , & mut bytes ); b . bytes = bytes . len () as u64 ; b . iter (|| { LittleEndian ::$read (& bytes , & mut numbers ); }); }# [ bench ] fn write_big_endian ( b : & mut Bencher ){ let numbers : Vec <$numty > = rand :: thread_rng (). sample_iter (& distributions :: Standard ). take ( 100000 ). collect (); let mut bytes = vec ! [ 0 ; numbers . len ()* size_of ::<$numty > ()]; b . bytes = bytes . len () as u64 ; b . iter (|| { BigEndian ::$write (& numbers , & mut bytes ); }); }# [ bench ] fn write_little_endian ( b : & mut Bencher ){ let numbers : Vec <$numty > = rand :: thread_rng (). sample_iter (& distributions :: Standard ). take ( 100000 ). collect (); let mut bytes = vec ! [ 0 ; numbers . len ()* size_of ::<$numty > ()]; b . bytes = bytes . len () as u64 ; b . iter (|| { LittleEndian ::$write (& numbers , & mut bytes ); }); }}}; }
+macro_rules! __ra_macro_fixture331 {{$($(#$attr : tt )* fn $fn_name : ident ($($arg : tt )*)-> $ret : ty {$($code : tt )* })*}=>($(# [ test ]$(#$attr )* fn $fn_name (){ fn prop ($($arg )*)-> $ret {$($code )* }:: quickcheck :: quickcheck ( quickcheck ! (@ fn prop []$($arg )*)); })* ); (@ fn $f : ident [$($t : tt )*])=>{$f as fn ($($t ),*)-> _ }; (@ fn $f : ident [$($p : tt )*]: $($tail : tt )*)=>{ quickcheck ! (@ fn $f [$($p )* _]$($tail )*)}; (@ fn $f : ident [$($p : tt )*]$t : tt $($tail : tt )*)=>{ quickcheck ! (@ fn $f [$($p )*]$($tail )*)}; }
+macro_rules! __ra_macro_fixture332 {($from : ty =>$to : ty ; $by : ident )=>( impl < 'a > From <$from > for UniCase <$to > { fn from ( s : $from )-> Self { UniCase :: unicode ( s .$by ())}}); ($from : ty =>$to : ty )=>( from_impl ! ($from =>$to ; into ); )}
+macro_rules! __ra_macro_fixture333 {($to : ty )=>( impl < 'a > Into <$to > for UniCase <$to > { fn into ( self )-> $to { self . into_inner ()}}); }
+macro_rules! __ra_macro_fixture334 {($name : ident , $ty : ident )=>{ fn $name ()-> usize { let mut rng = rand_xorshift :: XorShiftRng :: from_seed ([ 123u8 ; 16 ]); let mut mv = MeanAndVariance :: new (); let mut throwaway = 0 ; for _ in 0 .. SAMPLES { let f = loop { let f = $ty :: from_bits ( rng . gen ()); if f . is_finite (){ break f ; }}; let t1 = std :: time :: SystemTime :: now (); for _ in 0 .. ITERATIONS { throwaway += ryu :: Buffer :: new (). format_finite ( f ). len (); } let duration = t1 . elapsed (). unwrap (); let nanos = duration . as_secs ()* 1_000_000_000 + duration . subsec_nanos () as u64 ; mv . update ( nanos as f64 / ITERATIONS as f64 ); } println ! ( "{:12} {:8.3} {:8.3}" , concat ! ( stringify ! ($name ), ":" ), mv . mean , mv . stddev (), ); throwaway }}; }
+macro_rules! __ra_macro_fixture335 {($(# [$doc : meta ])* pub trait $name : ident $($methods : tt )*)=>{ macro_rules ! $name {($m : ident $extra : tt )=>{$m ! {$extra pub trait $name $($methods )* }}} remove_sections ! {[]$(# [$doc ])* pub trait $name $($methods )* }}}
+macro_rules! __ra_macro_fixture336 {($name : ident <$($typarm : tt ),*> where {$($bounds : tt )* } item : $item : ty , iter : $iter : ty , )=>( pub struct $name <$($typarm ),*> where $($bounds )* { iter : $iter , } impl <$($typarm ),*> Iterator for $name <$($typarm ),*> where $($bounds )* { type Item = $item ; # [ inline ] fn next (& mut self )-> Option < Self :: Item > { self . iter . next ()}# [ inline ] fn size_hint (& self )-> ( usize , Option < usize >){ self . iter . size_hint ()}}); }
+macro_rules! __ra_macro_fixture337 {($($fmt_trait : ident )*)=>{$(impl < 'a , I > fmt ::$fmt_trait for Format < 'a , I > where I : Iterator , I :: Item : fmt ::$fmt_trait , { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { self . format ( f , fmt ::$fmt_trait :: fmt )}})* }}
+macro_rules! __ra_macro_fixture338 {($($t : ty ),*)=>{$(not_zero_impl ! ($t , 0 ); )* }}
+macro_rules! __ra_macro_fixture339 {($name : ident )=>{ impl Clone for $name {# [ inline ] fn clone (& self )-> Self {* self }}}; }
+macro_rules! __ra_macro_fixture340 {([$($stack : tt )*])=>{$($stack )* }; ([$($stack : tt )*]{$($tail : tt )* })=>{$($stack )* { remove_sections_inner ! ([]$($tail )*); }}; ([$($stack : tt )*]$t : tt $($tail : tt )*)=>{ remove_sections ! ([$($stack )* $t ]$($tail )*); }; }
+macro_rules! __ra_macro_fixture341 {($t : ty ,$z : expr )=>{ impl Zero for $t { fn zero ()-> Self {$z as $t } fn is_zero (& self )-> bool { self == & Self :: zero ()}}}; }
+macro_rules! __ra_macro_fixture342 {($($ident : ident ),* $(,)?)=>{$(# [ allow ( bad_style )] pub const $ident : super :: Name = super :: Name :: new_inline ( stringify ! ($ident )); )* }; }
+macro_rules! __ra_macro_fixture343 {($($trait : ident =>$expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinDeriveExpander {$($trait ),* } impl BuiltinDeriveExpander { pub fn expand (& self , db : & dyn AstDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> Result < tt :: Subtree , mbe :: ExpandError > { let expander = match * self {$(BuiltinDeriveExpander ::$trait =>$expand , )* }; expander ( db , id , tt )} fn find_by_name ( name : & name :: Name )-> Option < Self > { match name {$(id if id == & name :: name ! [$trait ]=> Some ( BuiltinDeriveExpander ::$trait ), )* _ => None , }}}}; }
+macro_rules! __ra_macro_fixture344 {( LAZY : $(($name : ident , $kind : ident )=>$expand : ident ),* , EAGER : $(($e_name : ident , $e_kind : ident )=>$e_expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinFnLikeExpander {$($kind ),* }# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum EagerExpander {$($e_kind ),* } impl BuiltinFnLikeExpander { pub fn expand (& self , db : & dyn AstDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> ExpandResult < tt :: Subtree > { let expander = match * self {$(BuiltinFnLikeExpander ::$kind =>$expand , )* }; expander ( db , id , tt )}} impl EagerExpander { pub fn expand (& self , db : & dyn AstDatabase , arg_id : EagerMacroId , tt : & tt :: Subtree , )-> ExpandResult < Option < ( tt :: Subtree , FragmentKind )>> { let expander = match * self {$(EagerExpander ::$e_kind =>$e_expand , )* }; expander ( db , arg_id , tt )}} fn find_by_name ( ident : & name :: Name )-> Option < Either < BuiltinFnLikeExpander , EagerExpander >> { match ident {$(id if id == & name :: name ! [$name ]=> Some ( Either :: Left ( BuiltinFnLikeExpander ::$kind )), )* $(id if id == & name :: name ! [$e_name ]=> Some ( Either :: Right ( EagerExpander ::$e_kind )), )* _ => return None , }}}; }
+macro_rules! __ra_macro_fixture345 {($($ty : ty =>$this : ident $im : block );*)=>{$(impl ToTokenTree for $ty { fn to_token ($this )-> tt :: TokenTree { let leaf : tt :: Leaf = $im . into (); leaf . into ()}} impl ToTokenTree for &$ty { fn to_token ($this )-> tt :: TokenTree { let leaf : tt :: Leaf = $im . clone (). into (); leaf . into ()}})* }}
+macro_rules! __ra_macro_fixture346 {($name : ident )=>{ impl $crate :: salsa :: InternKey for $name { fn from_intern_id ( v : $crate :: salsa :: InternId )-> Self {$name ( v )} fn as_intern_id (& self )-> $crate :: salsa :: InternId { self . 0 }}}; }
+macro_rules! __ra_macro_fixture347 {($($var : ident ($t : ty )),+ )=>{$(impl From <$t > for AttrOwner { fn from ( t : $t )-> AttrOwner { AttrOwner ::$var ( t )}})+ }; }
+macro_rules! __ra_macro_fixture348 {($($typ : ident in $fld : ident -> $ast : ty ),+ $(,)? )=>{# [ derive ( Debug , Copy , Clone , Eq , PartialEq , Hash )] pub enum ModItem {$($typ ( FileItemTreeId <$typ >), )+ }$(impl From < FileItemTreeId <$typ >> for ModItem { fn from ( id : FileItemTreeId <$typ >)-> ModItem { ModItem ::$typ ( id )}})+ $(impl ItemTreeNode for $typ { type Source = $ast ; fn ast_id (& self )-> FileAstId < Self :: Source > { self . ast_id } fn lookup ( tree : & ItemTree , index : Idx < Self >)-> & Self {& tree . data ().$fld [ index ]} fn id_from_mod_item ( mod_item : ModItem )-> Option < FileItemTreeId < Self >> { if let ModItem ::$typ ( id )= mod_item { Some ( id )} else { None }} fn id_to_mod_item ( id : FileItemTreeId < Self >)-> ModItem { ModItem ::$typ ( id )}} impl Index < Idx <$typ >> for ItemTree { type Output = $typ ; fn index (& self , index : Idx <$typ >)-> & Self :: Output {& self . data ().$fld [ index ]}})+ }; }
+macro_rules! __ra_macro_fixture349 {($($fld : ident : $t : ty ),+ $(,)? )=>{$(impl Index < Idx <$t >> for ItemTree { type Output = $t ; fn index (& self , index : Idx <$t >)-> & Self :: Output {& self . data ().$fld [ index ]}})+ }; }
+macro_rules! __ra_macro_fixture350 {($e : ident {$($v : ident ($t : ty )),* $(,)? })=>{$(impl From <$t > for $e { fn from ( it : $t )-> $e {$e ::$v ( it )}})* }}
+macro_rules! __ra_macro_fixture351 {($id : ident , $loc : ident , $intern : ident , $lookup : ident )=>{ impl_intern_key ! ($id ); impl Intern for $loc { type ID = $id ; fn intern ( self , db : & dyn db :: DefDatabase )-> $id { db .$intern ( self )}} impl Lookup for $id { type Data = $loc ; fn lookup (& self , db : & dyn db :: DefDatabase )-> $loc { db .$lookup (* self )}}}; }
+macro_rules! __ra_macro_fixture352 {([$derives : ident $($derive_t : tt )*]=>$(# [$($attrs : tt )*])* $inner : path )=>{# [ proc_macro_derive ($derives $($derive_t )*)]# [ allow ( non_snake_case )]$(# [$($attrs )*])* pub fn $derives ( i : $crate :: macros :: TokenStream )-> $crate :: macros :: TokenStream { match $crate :: macros :: parse ::<$crate :: macros :: DeriveInput > ( i ){ Ok ( p )=>{ match $crate :: Structure :: try_new (& p ){ Ok ( s )=>$crate :: MacroResult :: into_stream ($inner ( s )), Err ( e )=> e . to_compile_error (). into (), }} Err ( e )=> e . to_compile_error (). into (), }}}; }
+macro_rules! __ra_macro_fixture353 {($I : ident =>$t : ty )=>{ impl <$I : Interner > Zip <$I > for $t { fn zip_with < 'i , Z : Zipper < 'i , $I >> ( _zipper : & mut Z , _variance : Variance , a : & Self , b : & Self , )-> Fallible < ()> where I : 'i , { if a != b { return Err ( NoSolution ); } Ok (())}}}; }
+macro_rules! __ra_macro_fixture354 {($($n : ident ),*)=>{ impl <$($n : Fold < I >,)* I : Interner > Fold < I > for ($($n ,)*){ type Result = ($($n :: Result ,)*); fn fold_with < 'i > ( self , folder : & mut dyn Folder < 'i , I >, outer_binder : DebruijnIndex )-> Fallible < Self :: Result > where I : 'i , {# [ allow ( non_snake_case )] let ($($n ),*)= self ; Ok (($($n . fold_with ( folder , outer_binder )?,)*))}}}}
+macro_rules! __ra_macro_fixture355 {($t : ty )=>{ impl < I : Interner > $crate :: fold :: Fold < I > for $t { type Result = Self ; fn fold_with < 'i > ( self , _folder : & mut dyn ($crate :: fold :: Folder < 'i , I >), _outer_binder : DebruijnIndex , )-> :: chalk_ir :: Fallible < Self :: Result > where I : 'i , { Ok ( self )}}}; }
+macro_rules! __ra_macro_fixture356 {($t : ident )=>{ impl < I : Interner > $crate :: fold :: Fold < I > for $t < I > { type Result = $t < I >; fn fold_with < 'i > ( self , _folder : & mut dyn ($crate :: fold :: Folder < 'i , I >), _outer_binder : DebruijnIndex , )-> :: chalk_ir :: Fallible < Self :: Result > where I : 'i , { Ok ( self )}}}; }
+macro_rules! __ra_macro_fixture357 {($($n : ident ),*)=>{ impl <$($n : Visit < I >,)* I : Interner > Visit < I > for ($($n ,)*){ fn visit_with < 'i , BT > (& self , visitor : & mut dyn Visitor < 'i , I , BreakTy = BT >, outer_binder : DebruijnIndex )-> ControlFlow < BT > where I : 'i {# [ allow ( non_snake_case )] let & ($(ref $n ),*)= self ; $(try_break ! ($n . visit_with ( visitor , outer_binder )); )* ControlFlow :: CONTINUE }}}}
+macro_rules! __ra_macro_fixture358 {($t : ty )=>{ impl < I : Interner > $crate :: visit :: Visit < I > for $t { fn visit_with < 'i , B > (& self , _visitor : & mut dyn ($crate :: visit :: Visitor < 'i , I , BreakTy = B >), _outer_binder : DebruijnIndex , )-> ControlFlow < B > where I : 'i , { ControlFlow :: CONTINUE }}}; }
+macro_rules! __ra_macro_fixture359 {($t : ident )=>{ impl < I : Interner > $crate :: visit :: Visit < I > for $t < I > { fn visit_with < 'i , B > (& self , _visitor : & mut dyn ($crate :: visit :: Visitor < 'i , I , BreakTy = B >), _outer_binder : DebruijnIndex , )-> ControlFlow < B > where I : 'i , { ControlFlow :: CONTINUE }}}; }
+macro_rules! __ra_macro_fixture360 {( for ($($t : tt )*)$u : ty )=>{ impl <$($t )*> CastTo <$u > for $u { fn cast_to ( self , _interner : &<$u as HasInterner >:: Interner )-> $u { self }}}; ($u : ty )=>{ impl CastTo <$u > for $u { fn cast_to ( self , interner : &<$u as HasInterner >:: Interner )-> $u { self }}}; }
+macro_rules! __ra_macro_fixture361 {($($id : ident ), *)=>{$(impl < I : Interner > std :: fmt :: Debug for $id < I > { fn fmt (& self , fmt : & mut std :: fmt :: Formatter < '_ >)-> Result < (), std :: fmt :: Error > { write ! ( fmt , "{}({:?})" , stringify ! ($id ), self . 0 )}})* }; }
+macro_rules! __ra_macro_fixture362 {($seq : ident , $data : ident =>$elem : ty , $intern : ident =>$interned : ident )=>{ interned_slice_common ! ($seq , $data =>$elem , $intern =>$interned ); impl < I : Interner > $seq < I > {# [ doc = " Tries to create a sequence using an iterator of element-like things." ] pub fn from_fallible < E > ( interner : & I , elements : impl IntoIterator < Item = Result < impl CastTo <$elem >, E >>, )-> Result < Self , E > { Ok ( Self { interned : I ::$intern ( interner , elements . into_iter (). casted ( interner ))?, })}# [ doc = " Create a sequence from elements" ] pub fn from_iter ( interner : & I , elements : impl IntoIterator < Item = impl CastTo <$elem >>, )-> Self { Self :: from_fallible ( interner , elements . into_iter (). map (| el | -> Result <$elem , ()> { Ok ( el . cast ( interner ))}), ). unwrap ()}# [ doc = " Create a sequence from a single element." ] pub fn from1 ( interner : & I , element : impl CastTo <$elem >)-> Self { Self :: from_iter ( interner , Some ( element ))}}}; }
+macro_rules! __ra_macro_fixture363 {($seq : ident , $data : ident =>$elem : ty , $intern : ident =>$interned : ident )=>{# [ doc = " List of interned elements." ]# [ derive ( Copy , Clone , PartialEq , Eq , Hash , PartialOrd , Ord , HasInterner )] pub struct $seq < I : Interner > { interned : I ::$interned , } impl < I : Interner > $seq < I > {# [ doc = " Get the interned elements." ] pub fn interned (& self )-> & I ::$interned {& self . interned }# [ doc = " Returns a slice containing the elements." ] pub fn as_slice (& self , interner : & I )-> & [$elem ]{ Interner ::$data ( interner , & self . interned )}# [ doc = " Index into the sequence." ] pub fn at (& self , interner : & I , index : usize )-> &$elem {& self . as_slice ( interner )[ index ]}# [ doc = " Create an empty sequence." ] pub fn empty ( interner : & I )-> Self { Self :: from_iter ( interner , None ::<$elem >)}# [ doc = " Check whether this is an empty sequence." ] pub fn is_empty (& self , interner : & I )-> bool { self . as_slice ( interner ). is_empty ()}# [ doc = " Get an iterator over the elements of the sequence." ] pub fn iter (& self , interner : & I )-> std :: slice :: Iter < '_ , $elem > { self . as_slice ( interner ). iter ()}# [ doc = " Get the length of the sequence." ] pub fn len (& self , interner : & I )-> usize { self . as_slice ( interner ). len ()}}}; }
+macro_rules! __ra_macro_fixture364 {($(# [$attrs : meta ])* $vis : vis static $name : ident : $ty : ty )=>($(# [$attrs ])* $vis static $name : $crate :: ScopedKey <$ty > = $crate :: ScopedKey { inner : { thread_local ! ( static FOO : :: std :: cell :: Cell < usize > = {:: std :: cell :: Cell :: new ( 0 )}); & FOO }, _marker : :: std :: marker :: PhantomData , }; )}
+macro_rules! __ra_macro_fixture365 {($(($def : path , $ast : path , $meth : ident )),* ,)=>{$(impl ToDef for $ast { type Def = $def ; fn to_def ( sema : & SemanticsImpl , src : InFile < Self >)-> Option < Self :: Def > { sema . with_ctx (| ctx | ctx .$meth ( src )). map (<$def >:: from )}})*}}
+macro_rules! __ra_macro_fixture366 {($(($id : path , $ty : path )),*)=>{$(impl From <$id > for $ty { fn from ( id : $id )-> $ty {$ty { id }}} impl From <$ty > for $id { fn from ( ty : $ty )-> $id { ty . id }})*}}
+macro_rules! __ra_macro_fixture367 {($(($def : ident , $def_id : ident ),)*)=>{$(impl HasAttrs for $def { fn attrs ( self , db : & dyn HirDatabase )-> Attrs { let def = AttrDefId ::$def_id ( self . into ()); db . attrs ( def )} fn docs ( self , db : & dyn HirDatabase )-> Option < Documentation > { let def = AttrDefId ::$def_id ( self . into ()); db . attrs ( def ). docs ()} fn resolve_doc_path ( self , db : & dyn HirDatabase , link : & str , ns : Option < Namespace >)-> Option < ModuleDef > { let def = AttrDefId ::$def_id ( self . into ()); resolve_doc_path ( db , def , link , ns ). map ( ModuleDef :: from )}})*}; }
+macro_rules! __ra_macro_fixture368 {($($variant : ident ),* for $enum : ident )=>{$(impl HasAttrs for $variant { fn attrs ( self , db : & dyn HirDatabase )-> Attrs {$enum ::$variant ( self ). attrs ( db )} fn docs ( self , db : & dyn HirDatabase )-> Option < Documentation > {$enum ::$variant ( self ). docs ( db )} fn resolve_doc_path ( self , db : & dyn HirDatabase , link : & str , ns : Option < Namespace >)-> Option < ModuleDef > {$enum ::$variant ( self ). resolve_doc_path ( db , link , ns )}})*}; }
+macro_rules! __ra_macro_fixture369 {{$($(#$attr : tt )* fn $fn_name : ident ($($arg : tt )*)-> $ret : ty {$($code : tt )* })*}=>($(# [ test ]$(#$attr )* fn $fn_name (){ fn prop ($($arg )*)-> $ret {$($code )* }:: quickcheck :: quickcheck ( quickcheck ! (@ fn prop []$($arg )*)); })* ); (@ fn $f : ident [$($t : tt )*])=>{$f as fn ($($t ),*)-> _ }; (@ fn $f : ident [$($p : tt )*]: $($tail : tt )*)=>{ quickcheck ! (@ fn $f [$($p )* _]$($tail )*)}; (@ fn $f : ident [$($p : tt )*]$t : tt $($tail : tt )*)=>{ quickcheck ! (@ fn $f [$($p )*]$($tail )*)}; }
+macro_rules! __ra_macro_fixture370 {($($bool : expr , )+)=>{ fn _static_assert (){$(let _ = std :: mem :: transmute ::< [ u8 ; $bool as usize ], u8 >; )+ }}}
+macro_rules! __ra_macro_fixture371 {($ty : ident is $($marker : ident ) and +)=>{# [ test ]# [ allow ( non_snake_case )] fn $ty (){ fn assert_implemented < T : $($marker +)+> (){} assert_implemented ::<$ty > (); }}; ($ty : ident is not $($marker : ident ) or +)=>{# [ test ]# [ allow ( non_snake_case )] fn $ty (){$({trait IsNotImplemented { fn assert_not_implemented (){}} impl < T : $marker > IsNotImplemented for T {} trait IsImplemented { fn assert_not_implemented (){}} impl IsImplemented for $ty {}<$ty >:: assert_not_implemented (); })+ }}; }
+macro_rules! __ra_macro_fixture372 {($($types : ident )*)=>{$(assert_impl ! ($types is UnwindSafe and RefUnwindSafe ); )* }; }
+macro_rules! __ra_macro_fixture373 {($($(# [$attr : meta ])* $name : ident ($value : expr , $expected : expr )),* )=>{$($(# [$attr ])* # [ test ] fn $name (){# [ cfg ( feature = "std" )]{ let mut buf = [ b'\0' ; 40 ]; let len = itoa :: write (& mut buf [..], $value ). unwrap (); assert_eq ! (& buf [ 0 .. len ], $expected . as_bytes ()); } let mut s = String :: new (); itoa :: fmt (& mut s , $value ). unwrap (); assert_eq ! ( s , $expected ); })* }}
+macro_rules! __ra_macro_fixture374 {($($name : ident =>$description : expr ,)+)=>{# [ doc = " Errors that can occur during parsing." ]# [ doc = "" ]# [ doc = " This may be extended in the future so exhaustive matching is" ]# [ doc = " discouraged with an unused variant." ]# [ allow ( clippy :: manual_non_exhaustive )]# [ derive ( PartialEq , Eq , Clone , Copy , Debug )] pub enum ParseError {$($name , )+ # [ doc = " Unused variant enable non-exhaustive matching" ]# [ doc ( hidden )] __FutureProof , } impl fmt :: Display for ParseError { fn fmt (& self , fmt : & mut Formatter < '_ >)-> fmt :: Result { match * self {$(ParseError ::$name => fmt . write_str ($description ), )+ ParseError :: __FutureProof =>{ unreachable ! ( "Don't abuse the FutureProof!" ); }}}}}}
+macro_rules! __ra_macro_fixture375 {($($name : ident =>$description : expr ,)+)=>{# [ doc = " Non-fatal syntax violations that can occur during parsing." ]# [ doc = "" ]# [ doc = " This may be extended in the future so exhaustive matching is" ]# [ doc = " discouraged with an unused variant." ]# [ allow ( clippy :: manual_non_exhaustive )]# [ derive ( PartialEq , Eq , Clone , Copy , Debug )] pub enum SyntaxViolation {$($name , )+ # [ doc = " Unused variant enable non-exhaustive matching" ]# [ doc ( hidden )] __FutureProof , } impl SyntaxViolation { pub fn description (& self )-> & 'static str { match * self {$(SyntaxViolation ::$name =>$description , )+ SyntaxViolation :: __FutureProof =>{ unreachable ! ( "Don't abuse the FutureProof!" ); }}}}}}
+macro_rules! __ra_macro_fixture376 {('owned : $($oty : ident ,)* 'interned : $($ity : ident ,)* )=>{# [ repr ( C )]# [ allow ( non_snake_case )] pub struct HandleCounters {$($oty : AtomicUsize ,)* $($ity : AtomicUsize ,)* } impl HandleCounters { extern "C" fn get ()-> & 'static Self { static COUNTERS : HandleCounters = HandleCounters {$($oty : AtomicUsize :: new ( 1 ),)* $($ity : AtomicUsize :: new ( 1 ),)* }; & COUNTERS }}# [ repr ( C )]# [ allow ( non_snake_case )] pub ( super ) struct HandleStore < S : server :: Types > {$($oty : handle :: OwnedStore < S ::$oty >,)* $($ity : handle :: InternedStore < S ::$ity >,)* } impl < S : server :: Types > HandleStore < S > { pub ( super ) fn new ( handle_counters : & 'static HandleCounters )-> Self { HandleStore {$($oty : handle :: OwnedStore :: new (& handle_counters .$oty ),)* $($ity : handle :: InternedStore :: new (& handle_counters .$ity ),)* }}}$(# [ repr ( C )] pub struct $oty ( pub ( crate ) handle :: Handle ); impl Drop for $oty { fn drop (& mut self ){$oty ( self . 0 ). drop (); }} impl < S > Encode < S > for $oty { fn encode ( self , w : & mut Writer , s : & mut S ){ let handle = self . 0 ; mem :: forget ( self ); handle . encode ( w , s ); }} impl < S : server :: Types > DecodeMut < '_ , '_ , HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$oty , $oty > { fn decode ( r : & mut Reader < '_ >, s : & mut HandleStore < server :: MarkedTypes < S >>)-> Self { s .$oty . take ( handle :: Handle :: decode ( r , & mut ()))}} impl < S > Encode < S > for &$oty { fn encode ( self , w : & mut Writer , s : & mut S ){ self . 0 . encode ( w , s ); }} impl < 's , S : server :: Types ,> Decode < '_ , 's , HandleStore < server :: MarkedTypes < S >>> for & 's Marked < S ::$oty , $oty > { fn decode ( r : & mut Reader < '_ >, s : & 's HandleStore < server :: MarkedTypes < S >>)-> Self {& s .$oty [ handle :: Handle :: decode ( r , & mut ())]}} impl < S > Encode < S > for & mut $oty { fn encode ( self , w : & mut Writer , s : & mut S ){ self . 0 . encode ( w , s ); }} impl < 's , S : server :: Types > DecodeMut < '_ , 's , HandleStore < server :: MarkedTypes < S >>> for & 's mut Marked < S ::$oty , $oty > { fn decode ( r : & mut Reader < '_ >, s : & 's mut HandleStore < server :: MarkedTypes < S >> )-> Self {& mut s .$oty [ handle :: Handle :: decode ( r , & mut ())]}} impl < S : server :: Types > Encode < HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$oty , $oty > { fn encode ( self , w : & mut Writer , s : & mut HandleStore < server :: MarkedTypes < S >>){ s .$oty . alloc ( self ). encode ( w , s ); }} impl < S > DecodeMut < '_ , '_ , S > for $oty { fn decode ( r : & mut Reader < '_ >, s : & mut S )-> Self {$oty ( handle :: Handle :: decode ( r , s ))}})* $(# [ repr ( C )]# [ derive ( Copy , Clone , PartialEq , Eq , Hash )] pub ( crate ) struct $ity ( handle :: Handle ); impl < S > Encode < S > for $ity { fn encode ( self , w : & mut Writer , s : & mut S ){ self . 0 . encode ( w , s ); }} impl < S : server :: Types > DecodeMut < '_ , '_ , HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$ity , $ity > { fn decode ( r : & mut Reader < '_ >, s : & mut HandleStore < server :: MarkedTypes < S >>)-> Self { s .$ity . copy ( handle :: Handle :: decode ( r , & mut ()))}} impl < S : server :: Types > Encode < HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$ity , $ity > { fn encode ( self , w : & mut Writer , s : & mut HandleStore < server :: MarkedTypes < S >>){ s .$ity . alloc ( self ). encode ( w , s ); }} impl < S > DecodeMut < '_ , '_ , S > for $ity { fn decode ( r : & mut Reader < '_ >, s : & mut S )-> Self {$ity ( handle :: Handle :: decode ( r , s ))}})* }}
+macro_rules! __ra_macro_fixture377 {($S : ident , $self : ident , $m : ident )=>{$m ! { FreeFunctions { fn drop ($self : $S :: FreeFunctions ); fn track_env_var ( var : & str , value : Option <& str >); }, TokenStream { fn drop ($self : $S :: TokenStream ); fn clone ($self : &$S :: TokenStream )-> $S :: TokenStream ; fn new ()-> $S :: TokenStream ; fn is_empty ($self : &$S :: TokenStream )-> bool ; fn from_str ( src : & str )-> $S :: TokenStream ; fn to_string ($self : &$S :: TokenStream )-> String ; fn from_token_tree ( tree : TokenTree <$S :: Group , $S :: Punct , $S :: Ident , $S :: Literal >, )-> $S :: TokenStream ; fn into_iter ($self : $S :: TokenStream )-> $S :: TokenStreamIter ; }, TokenStreamBuilder { fn drop ($self : $S :: TokenStreamBuilder ); fn new ()-> $S :: TokenStreamBuilder ; fn push ($self : & mut $S :: TokenStreamBuilder , stream : $S :: TokenStream ); fn build ($self : $S :: TokenStreamBuilder )-> $S :: TokenStream ; }, TokenStreamIter { fn drop ($self : $S :: TokenStreamIter ); fn clone ($self : &$S :: TokenStreamIter )-> $S :: TokenStreamIter ; fn next ($self : & mut $S :: TokenStreamIter , )-> Option < TokenTree <$S :: Group , $S :: Punct , $S :: Ident , $S :: Literal >>; }, Group { fn drop ($self : $S :: Group ); fn clone ($self : &$S :: Group )-> $S :: Group ; fn new ( delimiter : Delimiter , stream : $S :: TokenStream )-> $S :: Group ; fn delimiter ($self : &$S :: Group )-> Delimiter ; fn stream ($self : &$S :: Group )-> $S :: TokenStream ; fn span ($self : &$S :: Group )-> $S :: Span ; fn span_open ($self : &$S :: Group )-> $S :: Span ; fn span_close ($self : &$S :: Group )-> $S :: Span ; fn set_span ($self : & mut $S :: Group , span : $S :: Span ); }, Punct { fn new ( ch : char , spacing : Spacing )-> $S :: Punct ; fn as_char ($self : $S :: Punct )-> char ; fn spacing ($self : $S :: Punct )-> Spacing ; fn span ($self : $S :: Punct )-> $S :: Span ; fn with_span ($self : $S :: Punct , span : $S :: Span )-> $S :: Punct ; }, Ident { fn new ( string : & str , span : $S :: Span , is_raw : bool )-> $S :: Ident ; fn span ($self : $S :: Ident )-> $S :: Span ; fn with_span ($self : $S :: Ident , span : $S :: Span )-> $S :: Ident ; }, Literal { fn drop ($self : $S :: Literal ); fn clone ($self : &$S :: Literal )-> $S :: Literal ; fn debug_kind ($self : &$S :: Literal )-> String ; fn symbol ($self : &$S :: Literal )-> String ; fn suffix ($self : &$S :: Literal )-> Option < String >; fn integer ( n : & str )-> $S :: Literal ; fn typed_integer ( n : & str , kind : & str )-> $S :: Literal ; fn float ( n : & str )-> $S :: Literal ; fn f32 ( n : & str )-> $S :: Literal ; fn f64 ( n : & str )-> $S :: Literal ; fn string ( string : & str )-> $S :: Literal ; fn character ( ch : char )-> $S :: Literal ; fn byte_string ( bytes : & [ u8 ])-> $S :: Literal ; fn span ($self : &$S :: Literal )-> $S :: Span ; fn set_span ($self : & mut $S :: Literal , span : $S :: Span ); fn subspan ($self : &$S :: Literal , start : Bound < usize >, end : Bound < usize >, )-> Option <$S :: Span >; }, SourceFile { fn drop ($self : $S :: SourceFile ); fn clone ($self : &$S :: SourceFile )-> $S :: SourceFile ; fn eq ($self : &$S :: SourceFile , other : &$S :: SourceFile )-> bool ; fn path ($self : &$S :: SourceFile )-> String ; fn is_real ($self : &$S :: SourceFile )-> bool ; }, MultiSpan { fn drop ($self : $S :: MultiSpan ); fn new ()-> $S :: MultiSpan ; fn push ($self : & mut $S :: MultiSpan , span : $S :: Span ); }, Diagnostic { fn drop ($self : $S :: Diagnostic ); fn new ( level : Level , msg : & str , span : $S :: MultiSpan )-> $S :: Diagnostic ; fn sub ($self : & mut $S :: Diagnostic , level : Level , msg : & str , span : $S :: MultiSpan , ); fn emit ($self : $S :: Diagnostic ); }, Span { fn debug ($self : $S :: Span )-> String ; fn def_site ()-> $S :: Span ; fn call_site ()-> $S :: Span ; fn mixed_site ()-> $S :: Span ; fn source_file ($self : $S :: Span )-> $S :: SourceFile ; fn parent ($self : $S :: Span )-> Option <$S :: Span >; fn source ($self : $S :: Span )-> $S :: Span ; fn start ($self : $S :: Span )-> LineColumn ; fn end ($self : $S :: Span )-> LineColumn ; fn join ($self : $S :: Span , other : $S :: Span )-> Option <$S :: Span >; fn resolved_at ($self : $S :: Span , at : $S :: Span )-> $S :: Span ; fn source_text ($self : $S :: Span )-> Option < String >; }, }}; }
+macro_rules! __ra_macro_fixture378 {( le $ty : ty )=>{ impl < S > Encode < S > for $ty { fn encode ( self , w : & mut Writer , _: & mut S ){ w . write_all (& self . to_le_bytes ()). unwrap (); }} impl < S > DecodeMut < '_ , '_ , S > for $ty { fn decode ( r : & mut Reader < '_ >, _: & mut S )-> Self { const N : usize = :: std :: mem :: size_of ::<$ty > (); let mut bytes = [ 0 ; N ]; bytes . copy_from_slice (& r [.. N ]); * r = & r [ N ..]; Self :: from_le_bytes ( bytes )}}}; ( struct $name : ident {$($field : ident ),* $(,)? })=>{ impl < S > Encode < S > for $name { fn encode ( self , w : & mut Writer , s : & mut S ){$(self .$field . encode ( w , s );)* }} impl < S > DecodeMut < '_ , '_ , S > for $name { fn decode ( r : & mut Reader < '_ >, s : & mut S )-> Self {$name {$($field : DecodeMut :: decode ( r , s )),* }}}}; ( enum $name : ident $(<$($T : ident ),+>)? {$($variant : ident $(($field : ident ))*),* $(,)? })=>{ impl < S , $($($T : Encode < S >),+)?> Encode < S > for $name $(<$($T ),+>)? { fn encode ( self , w : & mut Writer , s : & mut S ){# [ allow ( non_upper_case_globals )] mod tag {# [ repr ( u8 )] enum Tag {$($variant ),* }$(pub const $variant : u8 = Tag ::$variant as u8 ;)* } match self {$($name ::$variant $(($field ))* =>{ tag ::$variant . encode ( w , s ); $($field . encode ( w , s );)* })* }}} impl < 'a , S , $($($T : for < 's > DecodeMut < 'a , 's , S >),+)?> DecodeMut < 'a , '_ , S > for $name $(<$($T ),+>)? { fn decode ( r : & mut Reader < 'a >, s : & mut S )-> Self {# [ allow ( non_upper_case_globals )] mod tag {# [ repr ( u8 )] enum Tag {$($variant ),* }$(pub const $variant : u8 = Tag ::$variant as u8 ;)* } match u8 :: decode ( r , s ){$(tag ::$variant =>{$(let $field = DecodeMut :: decode ( r , s );)* $name ::$variant $(($field ))* })* _ => unreachable ! (), }}}}}
+macro_rules! __ra_macro_fixture379 {($($ty : ty ),* $(,)?)=>{$(impl Mark for $ty { type Unmarked = Self ; fn mark ( unmarked : Self :: Unmarked )-> Self { unmarked }} impl Unmark for $ty { type Unmarked = Self ; fn unmark ( self )-> Self :: Unmarked { self }})* }}
+macro_rules! __ra_macro_fixture380 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )*;)* }),* $(,)?)=>{$(impl $name {# [ allow ( unused )]$(pub ( crate ) fn $method ($($arg : $arg_ty ),*)$(-> $ret_ty )* { panic ! ( "hello" ); })* })* }}
+macro_rules! __ra_macro_fixture381 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )?;)* }),* $(,)?)=>{ pub trait Types {$(associated_item ! ( type $name );)* }$(pub trait $name : Types {$(associated_item ! ( fn $method (& mut self , $($arg : $arg_ty ),*)$(-> $ret_ty )?);)* })* pub trait Server : Types $(+ $name )* {} impl < S : Types $(+ $name )*> Server for S {}}}
+macro_rules! __ra_macro_fixture382 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )?;)* }),* $(,)?)=>{ impl < S : Types > Types for MarkedTypes < S > {$(type $name = Marked < S ::$name , client ::$name >;)* }$(impl < S : $name > $name for MarkedTypes < S > {$(fn $method (& mut self , $($arg : $arg_ty ),*)$(-> $ret_ty )? {<_>:: mark ($name ::$method (& mut self . 0 , $($arg . unmark ()),*))})* })* }}
+macro_rules! __ra_macro_fixture383 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )?;)* }),* $(,)?)=>{ pub trait DispatcherTrait {$(type $name ;)* fn dispatch (& mut self , b : Buffer < u8 >)-> Buffer < u8 >; } impl < S : Server > DispatcherTrait for Dispatcher < MarkedTypes < S >> {$(type $name = < MarkedTypes < S > as Types >::$name ;)* fn dispatch (& mut self , mut b : Buffer < u8 >)-> Buffer < u8 > { let Dispatcher { handle_store , server }= self ; let mut reader = & b [..]; match api_tags :: Method :: decode (& mut reader , & mut ()){$(api_tags :: Method ::$name ( m )=> match m {$(api_tags ::$name ::$method =>{ let mut call_method = || { reverse_decode ! ( reader , handle_store ; $($arg : $arg_ty ),*); $name ::$method ( server , $($arg ),*)}; let r = if thread :: panicking (){ Ok ( call_method ())} else { panic :: catch_unwind ( panic :: AssertUnwindSafe ( call_method )). map_err ( PanicMessage :: from )}; b . clear (); r . encode (& mut b , handle_store ); })* }),* } b }}}}
+macro_rules! __ra_macro_fixture384 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )*;)* }),* $(,)?)=>{$(pub ( super ) enum $name {$($method ),* } rpc_encode_decode ! ( enum $name {$($method ),* }); )* pub ( super ) enum Method {$($name ($name )),* } rpc_encode_decode ! ( enum Method {$($name ( m )),* }); }}
+macro_rules! __ra_macro_fixture385 {($(($ident : ident , $string : literal )),*$(,)?)=>{$(pub ( crate ) const $ident : SemanticTokenType = SemanticTokenType :: new ($string );)* pub ( crate ) const SUPPORTED_TYPES : & [ SemanticTokenType ]= & [ SemanticTokenType :: COMMENT , SemanticTokenType :: KEYWORD , SemanticTokenType :: STRING , SemanticTokenType :: NUMBER , SemanticTokenType :: REGEXP , SemanticTokenType :: OPERATOR , SemanticTokenType :: NAMESPACE , SemanticTokenType :: TYPE , SemanticTokenType :: STRUCT , SemanticTokenType :: CLASS , SemanticTokenType :: INTERFACE , SemanticTokenType :: ENUM , SemanticTokenType :: ENUM_MEMBER , SemanticTokenType :: TYPE_PARAMETER , SemanticTokenType :: FUNCTION , SemanticTokenType :: METHOD , SemanticTokenType :: PROPERTY , SemanticTokenType :: MACRO , SemanticTokenType :: VARIABLE , SemanticTokenType :: PARAMETER , $($ident ),* ]; }; }
+macro_rules! __ra_macro_fixture386 {($(($ident : ident , $string : literal )),*$(,)?)=>{$(pub ( crate ) const $ident : SemanticTokenModifier = SemanticTokenModifier :: new ($string );)* pub ( crate ) const SUPPORTED_MODIFIERS : & [ SemanticTokenModifier ]= & [ SemanticTokenModifier :: DOCUMENTATION , SemanticTokenModifier :: DECLARATION , SemanticTokenModifier :: DEFINITION , SemanticTokenModifier :: STATIC , SemanticTokenModifier :: ABSTRACT , SemanticTokenModifier :: DEPRECATED , SemanticTokenModifier :: READONLY , $($ident ),* ]; }; }
+macro_rules! __ra_macro_fixture387 {( struct $name : ident {$($(# [ doc =$doc : literal ])* $field : ident $(| $alias : ident )?: $ty : ty = $default : expr , )* })=>{# [ allow ( non_snake_case )]# [ derive ( Debug , Clone )] struct $name {$($field : $ty ,)* } impl $name { fn from_json ( mut json : serde_json :: Value )-> $name {$name {$($field : get_field (& mut json , stringify ! ($field ), None $(. or ( Some ( stringify ! ($alias ))))?, $default , ), )*}} fn json_schema ()-> serde_json :: Value { schema (& [$({let field = stringify ! ($field ); let ty = stringify ! ($ty ); ( field , ty , & [$($doc ),*], $default )},)* ])}# [ cfg ( test )] fn manual ()-> String { manual (& [$({let field = stringify ! ($field ); let ty = stringify ! ($ty ); ( field , ty , & [$($doc ),*], $default )},)* ])}}}; }
+macro_rules! __ra_macro_fixture388 {($($name : ident ($value : expr ),)*)=>{ mod bench_ryu { use super ::*; $(# [ bench ] fn $name ( b : & mut Bencher ){ let mut buf = ryu :: Buffer :: new (); b . iter ( move || { let value = black_box ($value ); let formatted = buf . format_finite ( value ); black_box ( formatted ); }); })* } mod bench_std_fmt { use super ::*; $(# [ bench ] fn $name ( b : & mut Bencher ){ let mut buf = Vec :: with_capacity ( 20 ); b . iter (|| { buf . clear (); let value = black_box ($value ); write ! (& mut buf , "{}" , value ). unwrap (); black_box ( buf . as_slice ()); }); })* }}; }
+macro_rules! __ra_macro_fixture389 {($($T : ident ),*)=>{$(mod $T { use test :: Bencher ; use num_integer :: { Average , Integer }; use super :: { UncheckedAverage , NaiveAverage , ModuloAverage }; use super :: { bench_ceil , bench_floor , bench_unchecked }; naive_average ! ($T ); unchecked_average ! ($T ); modulo_average ! ($T ); const SIZE : $T = 30 ; fn overflowing ()-> Vec < ($T , $T )> {(($T :: max_value ()- SIZE )..$T :: max_value ()). flat_map (| x | -> Vec <_> {(($T :: max_value ()- 100 ).. ($T :: max_value ()- 100 + SIZE )). map (| y | ( x , y )). collect ()}). collect ()} fn small ()-> Vec < ($T , $T )> {( 0 .. SIZE ). flat_map (| x | -> Vec <_> {( 0 .. SIZE ). map (| y | ( x , y )). collect ()}). collect ()} fn rand ()-> Vec < ($T , $T )> { small (). into_iter (). map (| ( x , y )| ( super :: lcg ( x ), super :: lcg ( y ))). collect ()} mod ceil { use super ::*; mod small { use super ::*; # [ bench ] fn optimized ( b : & mut Bencher ){ let v = small (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . average_ceil ( y )); }# [ bench ] fn naive ( b : & mut Bencher ){ let v = small (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . naive_average_ceil ( y )); }# [ bench ] fn unchecked ( b : & mut Bencher ){ let v = small (); bench_unchecked ( b , & v , | x : &$T , y : &$T | x . unchecked_average_ceil ( y )); }# [ bench ] fn modulo ( b : & mut Bencher ){ let v = small (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . modulo_average_ceil ( y )); }} mod overflowing { use super ::*; # [ bench ] fn optimized ( b : & mut Bencher ){ let v = overflowing (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . average_ceil ( y )); }# [ bench ] fn naive ( b : & mut Bencher ){ let v = overflowing (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . naive_average_ceil ( y )); }# [ bench ] fn unchecked ( b : & mut Bencher ){ let v = overflowing (); bench_unchecked ( b , & v , | x : &$T , y : &$T | x . unchecked_average_ceil ( y )); }# [ bench ] fn modulo ( b : & mut Bencher ){ let v = overflowing (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . modulo_average_ceil ( y )); }} mod rand { use super ::*; # [ bench ] fn optimized ( b : & mut Bencher ){ let v = rand (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . average_ceil ( y )); }# [ bench ] fn naive ( b : & mut Bencher ){ let v = rand (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . naive_average_ceil ( y )); }# [ bench ] fn unchecked ( b : & mut Bencher ){ let v = rand (); bench_unchecked ( b , & v , | x : &$T , y : &$T | x . unchecked_average_ceil ( y )); }# [ bench ] fn modulo ( b : & mut Bencher ){ let v = rand (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . modulo_average_ceil ( y )); }}} mod floor { use super ::*; mod small { use super ::*; # [ bench ] fn optimized ( b : & mut Bencher ){ let v = small (); bench_floor ( b , & v , | x : &$T , y : &$T | x . average_floor ( y )); }# [ bench ] fn naive ( b : & mut Bencher ){ let v = small (); bench_floor ( b , & v , | x : &$T , y : &$T | x . naive_average_floor ( y )); }# [ bench ] fn unchecked ( b : & mut Bencher ){ let v = small (); bench_unchecked ( b , & v , | x : &$T , y : &$T | x . unchecked_average_floor ( y )); }# [ bench ] fn modulo ( b : & mut Bencher ){ let v = small (); bench_floor ( b , & v , | x : &$T , y : &$T | x . modulo_average_floor ( y )); }} mod overflowing { use super ::*; # [ bench ] fn optimized ( b : & mut Bencher ){ let v = overflowing (); bench_floor ( b , & v , | x : &$T , y : &$T | x . average_floor ( y )); }# [ bench ] fn naive ( b : & mut Bencher ){ let v = overflowing (); bench_floor ( b , & v , | x : &$T , y : &$T | x . naive_average_floor ( y )); }# [ bench ] fn unchecked ( b : & mut Bencher ){ let v = overflowing (); bench_unchecked ( b , & v , | x : &$T , y : &$T | x . unchecked_average_floor ( y )); }# [ bench ] fn modulo ( b : & mut Bencher ){ let v = overflowing (); bench_floor ( b , & v , | x : &$T , y : &$T | x . modulo_average_floor ( y )); }} mod rand { use super ::*; # [ bench ] fn optimized ( b : & mut Bencher ){ let v = rand (); bench_floor ( b , & v , | x : &$T , y : &$T | x . average_floor ( y )); }# [ bench ] fn naive ( b : & mut Bencher ){ let v = rand (); bench_floor ( b , & v , | x : &$T , y : &$T | x . naive_average_floor ( y )); }# [ bench ] fn unchecked ( b : & mut Bencher ){ let v = rand (); bench_unchecked ( b , & v , | x : &$T , y : &$T | x . unchecked_average_floor ( y )); }# [ bench ] fn modulo ( b : & mut Bencher ){ let v = rand (); bench_floor ( b , & v , | x : &$T , y : &$T | x . modulo_average_floor ( y )); }}}})*}}
+macro_rules! __ra_macro_fixture390 {($T : ident )=>{ impl super :: NaiveAverage for $T { fn naive_average_floor (& self , other : &$T )-> $T { match self . checked_add (* other ){ Some ( z )=> z . div_floor (& 2 ), None =>{ if self > other { let diff = self - other ; other + diff . div_floor (& 2 )} else { let diff = other - self ; self + diff . div_floor (& 2 )}}}} fn naive_average_ceil (& self , other : &$T )-> $T { match self . checked_add (* other ){ Some ( z )=> z . div_ceil (& 2 ), None =>{ if self > other { let diff = self - other ; self - diff . div_floor (& 2 )} else { let diff = other - self ; other - diff . div_floor (& 2 )}}}}}}; }
+macro_rules! __ra_macro_fixture391 {($T : ident )=>{ impl super :: UncheckedAverage for $T { fn unchecked_average_floor (& self , other : &$T )-> $T { self . wrapping_add (* other )/ 2 } fn unchecked_average_ceil (& self , other : &$T )-> $T {( self . wrapping_add (* other )/ 2 ). wrapping_add ( 1 )}}}; }
+macro_rules! __ra_macro_fixture392 {($T : ident )=>{ impl super :: ModuloAverage for $T { fn modulo_average_ceil (& self , other : &$T )-> $T { let ( q1 , r1 )= self . div_mod_floor (& 2 ); let ( q2 , r2 )= other . div_mod_floor (& 2 ); q1 + q2 + ( r1 | r2 )} fn modulo_average_floor (& self , other : &$T )-> $T { let ( q1 , r1 )= self . div_mod_floor (& 2 ); let ( q2 , r2 )= other . div_mod_floor (& 2 ); q1 + q2 + ( r1 * r2 )}}}; }
+macro_rules! __ra_macro_fixture393 {($N : expr , $FUN : ident , $BENCH_NAME : ident , )=>( mod $BENCH_NAME { use super ::*; pub fn sum ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N ). collect (); c . bench_function (& ( stringify ! ($BENCH_NAME ). replace ( '_' , " " )+ " sum" ), move | b | { b . iter (|| { cloned (& v ).$FUN (| x , y | x + y )})}); } pub fn complex_iter ( c : & mut Criterion ){ let u = ( 3 ..). take ($N / 2 ); let v = ( 5 ..). take ($N / 2 ); let it = u . chain ( v ); c . bench_function (& ( stringify ! ($BENCH_NAME ). replace ( '_' , " " )+ " complex iter" ), move | b | { b . iter (|| { it . clone (). map (| x | x as f32 ).$FUN ( f32 :: atan2 )})}); } pub fn string_format ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. ($N / 4 )). collect (); c . bench_function (& ( stringify ! ($BENCH_NAME ). replace ( '_' , " " )+ " string format" ), move | b | { b . iter (|| { cloned (& v ). map (| x | x . to_string ()).$FUN (| x , y | format ! ( "{} + {}" , x , y ))})}); }} criterion_group ! ($BENCH_NAME , $BENCH_NAME :: sum , $BENCH_NAME :: complex_iter , $BENCH_NAME :: string_format , ); )}
+macro_rules! __ra_macro_fixture394 {($ast : ident , $kind : ident )=>{# [ derive ( PartialEq , Eq , Hash )]# [ repr ( transparent )] struct $ast ( SyntaxNode ); impl $ast {# [ allow ( unused )] fn cast ( node : SyntaxNode )-> Option < Self > { if node . kind ()== $kind { Some ( Self ( node ))} else { None }}}}; }
+macro_rules! __ra_macro_fixture395 {($I : ident , $U : ident )=>{ mod $I { mod ceil { use num_integer :: Average ; # [ test ] fn same_sign (){ assert_eq ! (( 14 as $I ). average_ceil (& 16 ), 15 as $I ); assert_eq ! (( 14 as $I ). average_ceil (& 17 ), 16 as $I ); let max = $crate :: std ::$I :: MAX ; assert_eq ! (( max - 3 ). average_ceil (& ( max - 1 )), max - 2 ); assert_eq ! (( max - 3 ). average_ceil (& ( max - 2 )), max - 2 ); }# [ test ] fn different_sign (){ assert_eq ! (( 14 as $I ). average_ceil (&- 4 ), 5 as $I ); assert_eq ! (( 14 as $I ). average_ceil (&- 5 ), 5 as $I ); let min = $crate :: std ::$I :: MIN ; let max = $crate :: std ::$I :: MAX ; assert_eq ! ( min . average_ceil (& max ), 0 as $I ); }} mod floor { use num_integer :: Average ; # [ test ] fn same_sign (){ assert_eq ! (( 14 as $I ). average_floor (& 16 ), 15 as $I ); assert_eq ! (( 14 as $I ). average_floor (& 17 ), 15 as $I ); let max = $crate :: std ::$I :: MAX ; assert_eq ! (( max - 3 ). average_floor (& ( max - 1 )), max - 2 ); assert_eq ! (( max - 3 ). average_floor (& ( max - 2 )), max - 3 ); }# [ test ] fn different_sign (){ assert_eq ! (( 14 as $I ). average_floor (&- 4 ), 5 as $I ); assert_eq ! (( 14 as $I ). average_floor (&- 5 ), 4 as $I ); let min = $crate :: std ::$I :: MIN ; let max = $crate :: std ::$I :: MAX ; assert_eq ! ( min . average_floor (& max ), - 1 as $I ); }}} mod $U { mod ceil { use num_integer :: Average ; # [ test ] fn bounded (){ assert_eq ! (( 14 as $U ). average_ceil (& 16 ), 15 as $U ); assert_eq ! (( 14 as $U ). average_ceil (& 17 ), 16 as $U ); }# [ test ] fn overflow (){ let max = $crate :: std ::$U :: MAX ; assert_eq ! (( max - 3 ). average_ceil (& ( max - 1 )), max - 2 ); assert_eq ! (( max - 3 ). average_ceil (& ( max - 2 )), max - 2 ); }} mod floor { use num_integer :: Average ; # [ test ] fn bounded (){ assert_eq ! (( 14 as $U ). average_floor (& 16 ), 15 as $U ); assert_eq ! (( 14 as $U ). average_floor (& 17 ), 15 as $U ); }# [ test ] fn overflow (){ let max = $crate :: std ::$U :: MAX ; assert_eq ! (( max - 3 ). average_floor (& ( max - 1 )), max - 2 ); assert_eq ! (( max - 3 ). average_floor (& ( max - 2 )), max - 3 ); }}}}; }
+macro_rules! __ra_macro_fixture396 {($N : expr ; $BENCH_GROUP : ident , $TUPLE_FUN : ident , $TUPLES : ident , $TUPLE_WINDOWS : ident ; $SLICE_FUN : ident , $CHUNKS : ident , $WINDOWS : ident ; $FOR_CHUNKS : ident , $FOR_WINDOWS : ident )=>( fn $FOR_CHUNKS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N * 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($FOR_CHUNKS ). replace ( '_' , " " ), move | b | { b . iter (|| { let mut j = 0 ; for _ in 0 .. 1_000 { s += $SLICE_FUN (& v [ j .. ( j + $N )]); j += $N ; } s })}); } fn $FOR_WINDOWS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($FOR_WINDOWS ). replace ( '_' , " " ), move | b | { b . iter (|| { for i in 0 .. ( 1_000 - $N ){ s += $SLICE_FUN (& v [ i .. ( i + $N )]); } s })}); } fn $TUPLES ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N * 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($TUPLES ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . iter (). tuples (){ s += $TUPLE_FUN (& x ); } s })}); } fn $CHUNKS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N * 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($CHUNKS ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . chunks ($N ){ s += $SLICE_FUN ( x ); } s })}); } fn $TUPLE_WINDOWS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($TUPLE_WINDOWS ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . iter (). tuple_windows (){ s += $TUPLE_FUN (& x ); } s })}); } fn $WINDOWS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($WINDOWS ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . windows ($N ){ s += $SLICE_FUN ( x ); } s })}); } criterion_group ! ($BENCH_GROUP , $FOR_CHUNKS , $FOR_WINDOWS , $TUPLES , $CHUNKS , $TUPLE_WINDOWS , $WINDOWS , ); )}
+macro_rules! __ra_macro_fixture397 {($N : expr , $FUN : ident , $BENCH_NAME : ident , )=>( mod $BENCH_NAME { use super ::*; pub fn sum ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N ). collect (); c . bench_function (& ( stringify ! ($BENCH_NAME ). replace ( '_' , " " )+ " sum" ), move | b | { b . iter (|| { cloned (& v ).$FUN (| x , y | x + y )})}); } pub fn complex_iter ( c : & mut Criterion ){ let u = ( 3 ..). take ($N / 2 ); let v = ( 5 ..). take ($N / 2 ); let it = u . chain ( v ); c . bench_function (& ( stringify ! ($BENCH_NAME ). replace ( '_' , " " )+ " complex iter" ), move | b | { b . iter (|| { it . clone (). map (| x | x as f32 ).$FUN ( f32 :: atan2 )})}); } pub fn string_format ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. ($N / 4 )). collect (); c . bench_function (& ( stringify ! ($BENCH_NAME ). replace ( '_' , " " )+ " string format" ), move | b | { b . iter (|| { cloned (& v ). map (| x | x . to_string ()).$FUN (| x , y | format ! ( "{} + {}" , x , y ))})}); }} criterion_group ! ($BENCH_NAME , $BENCH_NAME :: sum , $BENCH_NAME :: complex_iter , $BENCH_NAME :: string_format , ); )}
+macro_rules! __ra_macro_fixture398 {($N : expr ; $BENCH_GROUP : ident , $TUPLE_FUN : ident , $TUPLES : ident , $TUPLE_WINDOWS : ident ; $SLICE_FUN : ident , $CHUNKS : ident , $WINDOWS : ident ; $FOR_CHUNKS : ident , $FOR_WINDOWS : ident )=>( fn $FOR_CHUNKS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N * 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($FOR_CHUNKS ). replace ( '_' , " " ), move | b | { b . iter (|| { let mut j = 0 ; for _ in 0 .. 1_000 { s += $SLICE_FUN (& v [ j .. ( j + $N )]); j += $N ; } s })}); } fn $FOR_WINDOWS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($FOR_WINDOWS ). replace ( '_' , " " ), move | b | { b . iter (|| { for i in 0 .. ( 1_000 - $N ){ s += $SLICE_FUN (& v [ i .. ( i + $N )]); } s })}); } fn $TUPLES ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N * 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($TUPLES ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . iter (). tuples (){ s += $TUPLE_FUN (& x ); } s })}); } fn $CHUNKS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N * 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($CHUNKS ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . chunks ($N ){ s += $SLICE_FUN ( x ); } s })}); } fn $TUPLE_WINDOWS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($TUPLE_WINDOWS ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . iter (). tuple_windows (){ s += $TUPLE_FUN (& x ); } s })}); } fn $WINDOWS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($WINDOWS ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . windows ($N ){ s += $SLICE_FUN ( x ); } s })}); } criterion_group ! ($BENCH_GROUP , $FOR_CHUNKS , $FOR_WINDOWS , $TUPLES , $CHUNKS , $TUPLE_WINDOWS , $WINDOWS , ); )}
+macro_rules! __ra_macro_fixture399 {($name : ident : $e : expr )=>{# [ cfg_attr ( target_arch = "wasm32" , wasm_bindgen_test :: wasm_bindgen_test )]# [ test ] fn $name (){ let ( subscriber , handle )= subscriber :: mock (). event ( event :: mock (). with_fields ( field :: mock ( "answer" ). with_value (& 42 ). and ( field :: mock ( "to_question" ). with_value (& "life, the universe, and everything" ), ). only (), ), ). done (). run_with_handle (); with_default ( subscriber , || { info ! ( answer = $e , to_question = "life, the universe, and everything" ); }); handle . assert_finished (); }}; }
+macro_rules! __ra_macro_fixture400 {($T : ty )=>{ impl GcdOld for $T {# [ doc = " Calculates the Greatest Common Divisor (GCD) of the number and" ]# [ doc = " `other`. The result is always positive." ]# [ inline ] fn gcd_old (& self , other : & Self )-> Self { let mut m = * self ; let mut n = * other ; if m == 0 || n == 0 { return ( m | n ). abs (); } let shift = ( m | n ). trailing_zeros (); if m == Self :: min_value ()|| n == Self :: min_value (){ return ( 1 << shift ). abs (); } m = m . abs (); n = n . abs (); n >>= n . trailing_zeros (); while m != 0 { m >>= m . trailing_zeros (); if n > m { std :: mem :: swap (& mut n , & mut m )} m -= n ; } n << shift }}}; }
+macro_rules! __ra_macro_fixture401 {($T : ty )=>{ impl GcdOld for $T {# [ doc = " Calculates the Greatest Common Divisor (GCD) of the number and" ]# [ doc = " `other`. The result is always positive." ]# [ inline ] fn gcd_old (& self , other : & Self )-> Self { let mut m = * self ; let mut n = * other ; if m == 0 || n == 0 { return m | n ; } let shift = ( m | n ). trailing_zeros (); n >>= n . trailing_zeros (); while m != 0 { m >>= m . trailing_zeros (); if n > m { std :: mem :: swap (& mut n , & mut m )} m -= n ; } n << shift }}}; }
+macro_rules! __ra_macro_fixture402 {($T : ident )=>{ mod $T { use crate :: { run_bench , GcdOld }; use num_integer :: Integer ; use test :: Bencher ; # [ bench ] fn bench_gcd ( b : & mut Bencher ){ run_bench ( b , $T :: gcd ); }# [ bench ] fn bench_gcd_old ( b : & mut Bencher ){ run_bench ( b , $T :: gcd_old ); }}}; }
+macro_rules! __ra_macro_fixture403 {($f : ident , $($t : ty ),+)=>{$(paste :: item ! { qc :: quickcheck ! { fn [< $f _ $t >]( i : RandIter <$t >, k : u16 )-> (){$f ( i , k )}}})+ }; }
+macro_rules! __ra_macro_fixture404 {($name : ident )=>{# [ derive ( Debug )] struct $name { message : & 'static str , drop : DetectDrop , } impl Display for $name { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { f . write_str ( self . message )}}}; }
+macro_rules! __ra_macro_fixture405 {($($(# [$attr : meta ])* $name : ident ($value : expr )),* )=>{ mod bench_itoa_write { use test :: { Bencher , black_box }; $($(# [$attr ])* # [ bench ] fn $name ( b : & mut Bencher ){ use itoa ; let mut buf = Vec :: with_capacity ( 40 ); b . iter (|| { buf . clear (); itoa :: write (& mut buf , black_box ($value )). unwrap ()}); })* } mod bench_itoa_fmt { use test :: { Bencher , black_box }; $($(# [$attr ])* # [ bench ] fn $name ( b : & mut Bencher ){ use itoa ; let mut buf = String :: with_capacity ( 40 ); b . iter (|| { buf . clear (); itoa :: fmt (& mut buf , black_box ($value )). unwrap ()}); })* } mod bench_std_fmt { use test :: { Bencher , black_box }; $($(# [$attr ])* # [ bench ] fn $name ( b : & mut Bencher ){ use std :: io :: Write ; let mut buf = Vec :: with_capacity ( 40 ); b . iter (|| { buf . clear (); write ! (& mut buf , "{}" , black_box ($value )). unwrap ()}); })* }}}
+macro_rules! __ra_macro_fixture406 {($typ : ty {$($b_name : ident =>$g_name : ident ($($args : expr ),*),)* })=>{$(# [ bench ] fn $b_name ( b : & mut Bencher ){$g_name ::<$typ > ($($args ,)* b )})* }}
+macro_rules! __ra_macro_fixture407 {($($T : ident ),*)=>{$(mod $T { use test :: Bencher ; use num_integer :: Roots ; # [ bench ] fn sqrt_rand ( b : & mut Bencher ){:: bench_rand_pos ( b , $T :: sqrt , 2 ); }# [ bench ] fn sqrt_small ( b : & mut Bencher ){:: bench_small_pos ( b , $T :: sqrt , 2 ); }# [ bench ] fn cbrt_rand ( b : & mut Bencher ){:: bench_rand ( b , $T :: cbrt , 3 ); }# [ bench ] fn cbrt_small ( b : & mut Bencher ){:: bench_small ( b , $T :: cbrt , 3 ); }# [ bench ] fn fourth_root_rand ( b : & mut Bencher ){:: bench_rand_pos ( b , | x : &$T | x . nth_root ( 4 ), 4 ); }# [ bench ] fn fourth_root_small ( b : & mut Bencher ){:: bench_small_pos ( b , | x : &$T | x . nth_root ( 4 ), 4 ); }# [ bench ] fn fifth_root_rand ( b : & mut Bencher ){:: bench_rand ( b , | x : &$T | x . nth_root ( 5 ), 5 ); }# [ bench ] fn fifth_root_small ( b : & mut Bencher ){:: bench_small ( b , | x : &$T | x . nth_root ( 5 ), 5 ); }})*}}
+macro_rules! __ra_macro_fixture408 {($name : ident , $level : expr )=>{# [ doc = " Creates a new `Diagnostic` with the given `message` at the span" ]# [ doc = " `self`." ] pub fn $name < T : Into < String >> ( self , message : T )-> Diagnostic { Diagnostic :: spanned ( self , $level , message )}}; }
+macro_rules! __ra_macro_fixture409 {($($name : ident =>$kind : ident ,)*)=>($(# [ doc = " Creates a new suffixed integer literal with the specified value." ]# [ doc = "" ]# [ doc = " This function will create an integer like `1u32` where the integer" ]# [ doc = " value specified is the first part of the token and the integral is" ]# [ doc = " also suffixed at the end." ]# [ doc = " Literals created from negative numbers may not survive round-trips through" ]# [ doc = " `TokenStream` or strings and may be broken into two tokens (`-` and positive literal)." ]# [ doc = "" ]# [ doc = " Literals created through this method have the `Span::call_site()`" ]# [ doc = " span by default, which can be configured with the `set_span` method" ]# [ doc = " below." ] pub fn $name ( n : $kind )-> Literal { Literal ( bridge :: client :: Literal :: typed_integer (& n . to_string (), stringify ! ($kind )))})*)}
+macro_rules! __ra_macro_fixture410 {($($name : ident =>$kind : ident ,)*)=>($(# [ doc = " Creates a new unsuffixed integer literal with the specified value." ]# [ doc = "" ]# [ doc = " This function will create an integer like `1` where the integer" ]# [ doc = " value specified is the first part of the token. No suffix is" ]# [ doc = " specified on this token, meaning that invocations like" ]# [ doc = " `Literal::i8_unsuffixed(1)` are equivalent to" ]# [ doc = " `Literal::u32_unsuffixed(1)`." ]# [ doc = " Literals created from negative numbers may not survive rountrips through" ]# [ doc = " `TokenStream` or strings and may be broken into two tokens (`-` and positive literal)." ]# [ doc = "" ]# [ doc = " Literals created through this method have the `Span::call_site()`" ]# [ doc = " span by default, which can be configured with the `set_span` method" ]# [ doc = " below." ] pub fn $name ( n : $kind )-> Literal { Literal ( bridge :: client :: Literal :: integer (& n . to_string ()))})*)}
+macro_rules! __ra_macro_fixture411 {($spanned : ident , $regular : ident , $level : expr )=>{# [ doc = " Adds a new child diagnostic message to `self` with the level" ]# [ doc = " identified by this method\\\'s name with the given `spans` and" ]# [ doc = " `message`." ] pub fn $spanned < S , T > ( mut self , spans : S , message : T )-> Diagnostic where S : MultiSpan , T : Into < String >, { self . children . push ( Diagnostic :: spanned ( spans , $level , message )); self }# [ doc = " Adds a new child diagnostic message to `self` with the level" ]# [ doc = " identified by this method\\\'s name with the given `message`." ] pub fn $regular < T : Into < String >> ( mut self , message : T )-> Diagnostic { self . children . push ( Diagnostic :: new ($level , message )); self }}; }
+macro_rules! __ra_macro_fixture412 {($($arg : tt )*)=>{{ let res = $crate :: fmt :: format ($crate :: __export :: format_args ! ($($arg )*)); res }}}
+macro_rules! __ra_macro_fixture413 {($dst : expr , $($arg : tt )*)=>($dst . write_fmt ($crate :: format_args ! ($($arg )*)))}
+macro_rules! __ra_macro_fixture414 {($dst : expr $(,)?)=>($crate :: write ! ($dst , "\n" )); ($dst : expr , $($arg : tt )*)=>($dst . write_fmt ($crate :: format_args_nl ! ($($arg )*))); }
+macro_rules! __ra_macro_fixture415 {($($name : ident =>$kind : ident ,)*)=>($(# [ doc = " Creates a new suffixed integer literal with the specified value." ]# [ doc = "" ]# [ doc = " This function will create an integer like `1u32` where the integer" ]# [ doc = " value specified is the first part of the token and the integral is" ]# [ doc = " also suffixed at the end. Literals created from negative numbers may" ]# [ doc = " not survive rountrips through `TokenStream` or strings and may be" ]# [ doc = " broken into two tokens (`-` and positive literal)." ]# [ doc = "" ]# [ doc = " Literals created through this method have the `Span::call_site()`" ]# [ doc = " span by default, which can be configured with the `set_span` method" ]# [ doc = " below." ] pub fn $name ( n : $kind )-> Literal { Literal :: _new ( imp :: Literal ::$name ( n ))})*)}
+macro_rules! __ra_macro_fixture416 {($($name : ident =>$kind : ident ,)*)=>($(# [ doc = " Creates a new unsuffixed integer literal with the specified value." ]# [ doc = "" ]# [ doc = " This function will create an integer like `1` where the integer" ]# [ doc = " value specified is the first part of the token. No suffix is" ]# [ doc = " specified on this token, meaning that invocations like" ]# [ doc = " `Literal::i8_unsuffixed(1)` are equivalent to" ]# [ doc = " `Literal::u32_unsuffixed(1)`. Literals created from negative numbers" ]# [ doc = " may not survive rountrips through `TokenStream` or strings and may" ]# [ doc = " be broken into two tokens (`-` and positive literal)." ]# [ doc = "" ]# [ doc = " Literals created through this method have the `Span::call_site()`" ]# [ doc = " span by default, which can be configured with the `set_span` method" ]# [ doc = " below." ] pub fn $name ( n : $kind )-> Literal { Literal :: _new ( imp :: Literal ::$name ( n ))})*)}
+macro_rules! __ra_macro_fixture417 {($($name : ident =>$kind : ident ,)*)=>($(pub fn $name ( n : $kind )-> Literal { Literal :: _new ( format ! ( concat ! ( "{}" , stringify ! ($kind )), n ))})*)}
+macro_rules! __ra_macro_fixture418 {($($name : ident =>$kind : ident ,)*)=>($(pub fn $name ( n : $kind )-> Literal { Literal :: _new ( n . to_string ())})*)}
+macro_rules! __ra_macro_fixture419 {(<$visitor : ident : Visitor <$lifetime : tt >> $($func : ident )*)=>{$(forward_to_deserialize_any_helper ! {$func <$lifetime , $visitor >})* }; ($($func : ident )*)=>{$(forward_to_deserialize_any_helper ! {$func < 'de , V >})* }; }
+macro_rules! __ra_macro_fixture420 {( bool <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_bool <$l , $v > ()}}; ( i8 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_i8 <$l , $v > ()}}; ( i16 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_i16 <$l , $v > ()}}; ( i32 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_i32 <$l , $v > ()}}; ( i64 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_i64 <$l , $v > ()}}; ( i128 <$l : tt , $v : ident >)=>{ serde_if_integer128 ! { forward_to_deserialize_any_method ! { deserialize_i128 <$l , $v > ()}}}; ( u8 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_u8 <$l , $v > ()}}; ( u16 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_u16 <$l , $v > ()}}; ( u32 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_u32 <$l , $v > ()}}; ( u64 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_u64 <$l , $v > ()}}; ( u128 <$l : tt , $v : ident >)=>{ serde_if_integer128 ! { forward_to_deserialize_any_method ! { deserialize_u128 <$l , $v > ()}}}; ( f32 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_f32 <$l , $v > ()}}; ( f64 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_f64 <$l , $v > ()}}; ( char <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_char <$l , $v > ()}}; ( str <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_str <$l , $v > ()}}; ( string <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_string <$l , $v > ()}}; ( bytes <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_bytes <$l , $v > ()}}; ( byte_buf <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_byte_buf <$l , $v > ()}}; ( option <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_option <$l , $v > ()}}; ( unit <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_unit <$l , $v > ()}}; ( unit_struct <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_unit_struct <$l , $v > ( name : & 'static str )}}; ( newtype_struct <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_newtype_struct <$l , $v > ( name : & 'static str )}}; ( seq <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_seq <$l , $v > ()}}; ( tuple <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_tuple <$l , $v > ( len : usize )}}; ( tuple_struct <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_tuple_struct <$l , $v > ( name : & 'static str , len : usize )}}; ( map <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_map <$l , $v > ()}}; ( struct <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_struct <$l , $v > ( name : & 'static str , fields : & 'static [& 'static str ])}}; ( enum <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_enum <$l , $v > ( name : & 'static str , variants : & 'static [& 'static str ])}}; ( identifier <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_identifier <$l , $v > ()}}; ( ignored_any <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_ignored_any <$l , $v > ()}}; }
+macro_rules! __ra_macro_fixture421 {($func : ident <$l : tt , $v : ident > ($($arg : ident : $ty : ty ),*))=>{# [ inline ] fn $func <$v > ( self , $($arg : $ty ,)* visitor : $v )-> $crate :: __private :: Result <$v :: Value , Self :: Error > where $v : $crate :: de :: Visitor <$l >, {$(let _ = $arg ; )* self . deserialize_any ( visitor )}}; }
+macro_rules! __ra_macro_fixture422 {($($f : ident : $t : ty ,)*)=>{$(fn $f ( self , v : $t )-> fmt :: Result { Display :: fmt (& v , self )})* }; }
+macro_rules! __ra_macro_fixture423 {($name : ident , $level : expr )=>{# [ doc = " Creates a new `Diagnostic` with the given `message` at the span" ]# [ doc = " `self`." ]# [ unstable ( feature = "proc_macro_diagnostic" , issue = "54140" )] pub fn $name < T : Into < String >> ( self , message : T )-> Diagnostic { Diagnostic :: spanned ( self , $level , message )}}; }
+macro_rules! __ra_macro_fixture424 {($($name : ident =>$kind : ident ,)*)=>($(# [ doc = " Creates a new suffixed integer literal with the specified value." ]# [ doc = "" ]# [ doc = " This function will create an integer like `1u32` where the integer" ]# [ doc = " value specified is the first part of the token and the integral is" ]# [ doc = " also suffixed at the end." ]# [ doc = " Literals created from negative numbers may not survive round-trips through" ]# [ doc = " `TokenStream` or strings and may be broken into two tokens (`-` and positive literal)." ]# [ doc = "" ]# [ doc = " Literals created through this method have the `Span::call_site()`" ]# [ doc = " span by default, which can be configured with the `set_span` method" ]# [ doc = " below." ]# [ stable ( feature = "proc_macro_lib2" , since = "1.29.0" )] pub fn $name ( n : $kind )-> Literal { Literal ( bridge :: client :: Literal :: typed_integer (& n . to_string (), stringify ! ($kind )))})*)}
+macro_rules! __ra_macro_fixture425 {($($name : ident =>$kind : ident ,)*)=>($(# [ doc = " Creates a new unsuffixed integer literal with the specified value." ]# [ doc = "" ]# [ doc = " This function will create an integer like `1` where the integer" ]# [ doc = " value specified is the first part of the token. No suffix is" ]# [ doc = " specified on this token, meaning that invocations like" ]# [ doc = " `Literal::i8_unsuffixed(1)` are equivalent to" ]# [ doc = " `Literal::u32_unsuffixed(1)`." ]# [ doc = " Literals created from negative numbers may not survive rountrips through" ]# [ doc = " `TokenStream` or strings and may be broken into two tokens (`-` and positive literal)." ]# [ doc = "" ]# [ doc = " Literals created through this method have the `Span::call_site()`" ]# [ doc = " span by default, which can be configured with the `set_span` method" ]# [ doc = " below." ]# [ stable ( feature = "proc_macro_lib2" , since = "1.29.0" )] pub fn $name ( n : $kind )-> Literal { Literal ( bridge :: client :: Literal :: integer (& n . to_string ()))})*)}
+macro_rules! __ra_macro_fixture426 {( type FreeFunctions )=>( type FreeFunctions : 'static ;); ( type TokenStream )=>( type TokenStream : 'static + Clone ;); ( type TokenStreamBuilder )=>( type TokenStreamBuilder : 'static ;); ( type TokenStreamIter )=>( type TokenStreamIter : 'static + Clone ;); ( type Group )=>( type Group : 'static + Clone ;); ( type Punct )=>( type Punct : 'static + Copy + Eq + Hash ;); ( type Ident )=>( type Ident : 'static + Copy + Eq + Hash ;); ( type Literal )=>( type Literal : 'static + Clone ;); ( type SourceFile )=>( type SourceFile : 'static + Clone ;); ( type MultiSpan )=>( type MultiSpan : 'static ;); ( type Diagnostic )=>( type Diagnostic : 'static ;); ( type Span )=>( type Span : 'static + Copy + Eq + Hash ;); ( fn drop (& mut self , $arg : ident : $arg_ty : ty ))=>( fn drop (& mut self , $arg : $arg_ty ){ mem :: drop ($arg )}); ( fn clone (& mut self , $arg : ident : $arg_ty : ty )-> $ret_ty : ty )=>( fn clone (& mut self , $arg : $arg_ty )-> $ret_ty {$arg . clone ()}); ($($item : tt )*)=>($($item )*;)}
+macro_rules! __ra_macro_fixture427 {($spanned : ident , $regular : ident , $level : expr )=>{# [ doc = " Adds a new child diagnostic message to `self` with the level" ]# [ doc = " identified by this method\\\'s name with the given `spans` and" ]# [ doc = " `message`." ]# [ unstable ( feature = "proc_macro_diagnostic" , issue = "54140" )] pub fn $spanned < S , T > ( mut self , spans : S , message : T )-> Diagnostic where S : MultiSpan , T : Into < String >, { self . children . push ( Diagnostic :: spanned ( spans , $level , message )); self }# [ doc = " Adds a new child diagnostic message to `self` with the level" ]# [ doc = " identified by this method\\\'s name with the given `message`." ]# [ unstable ( feature = "proc_macro_diagnostic" , issue = "54140" )] pub fn $regular < T : Into < String >> ( mut self , message : T )-> Diagnostic { self . children . push ( Diagnostic :: new ($level , message )); self }}; }
+macro_rules! __ra_macro_fixture428 {($SelfT : ty , $ActualT : ident , $UnsignedT : ty , $BITS : expr , $Min : expr , $Max : expr , $Feature : expr , $EndFeature : expr , $rot : expr , $rot_op : expr , $rot_result : expr , $swap_op : expr , $swapped : expr , $reversed : expr , $le_bytes : expr , $be_bytes : expr , $to_xe_bytes_doc : expr , $from_xe_bytes_doc : expr )=>{ doc_comment ! { concat ! ( "The smallest value that can be represented by this integer type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MIN, " , stringify ! ($Min ), ");" , $EndFeature , "\n```" ), # [ stable ( feature = "assoc_int_consts" , since = "1.43.0" )] pub const MIN : Self = ! 0 ^ ((! 0 as $UnsignedT )>> 1 ) as Self ; } doc_comment ! { concat ! ( "The largest value that can be represented by this integer type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MAX, " , stringify ! ($Max ), ");" , $EndFeature , "\n```" ), # [ stable ( feature = "assoc_int_consts" , since = "1.43.0" )] pub const MAX : Self = ! Self :: MIN ; } doc_comment ! { concat ! ( "The size of this integer type in bits.\n\n# Examples\n\n```\n" , $Feature , "#![feature(int_bits_const)]\nassert_eq!(" , stringify ! ($SelfT ), "::BITS, " , stringify ! ($BITS ), ");" , $EndFeature , "\n```" ), # [ unstable ( feature = "int_bits_const" , issue = "76904" )] pub const BITS : u32 = $BITS ; } doc_comment ! { concat ! ( "Converts a string slice in a given base to an integer.\n\nThe string is expected to be an optional `+` or `-` sign followed by digits.\nLeading and trailing whitespace represent an error. Digits are a subset of these characters,\ndepending on `radix`:\n\n * `0-9`\n * `a-z`\n * `A-Z`\n\n# Panics\n\nThis function panics if `radix` is not in the range from 2 to 36.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::from_str_radix(\"A\", 16), Ok(10));" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )] pub fn from_str_radix ( src : & str , radix : u32 )-> Result < Self , ParseIntError > { from_str_radix ( src , radix )}} doc_comment ! { concat ! ( "Returns the number of ones in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0b100_0000" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.count_ones(), 1);" , $EndFeature , "\n```\n" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn count_ones ( self )-> u32 {( self as $UnsignedT ). count_ones ()}} doc_comment ! { concat ! ( "Returns the number of zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MAX.count_zeros(), 1);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn count_zeros ( self )-> u32 {(! self ). count_ones ()}} doc_comment ! { concat ! ( "Returns the number of leading zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = -1" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.leading_zeros(), 0);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn leading_zeros ( self )-> u32 {( self as $UnsignedT ). leading_zeros ()}} doc_comment ! { concat ! ( "Returns the number of trailing zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = -4" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.trailing_zeros(), 2);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn trailing_zeros ( self )-> u32 {( self as $UnsignedT ). trailing_zeros ()}} doc_comment ! { concat ! ( "Returns the number of leading ones in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = -1" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.leading_ones(), " , stringify ! ($BITS ), ");" , $EndFeature , "\n```" ), # [ stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ rustc_const_stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ inline ] pub const fn leading_ones ( self )-> u32 {( self as $UnsignedT ). leading_ones ()}} doc_comment ! { concat ! ( "Returns the number of trailing ones in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 3" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.trailing_ones(), 2);" , $EndFeature , "\n```" ), # [ stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ rustc_const_stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ inline ] pub const fn trailing_ones ( self )-> u32 {( self as $UnsignedT ). trailing_ones ()}} doc_comment ! { concat ! ( "Shifts the bits to the left by a specified amount, `n`,\nwrapping the truncated bits to the end of the resulting integer.\n\nPlease note this isn't the same operation as the `<<` shifting operator!\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $rot_op , stringify ! ($SelfT ), ";\nlet m = " , $rot_result , ";\n\nassert_eq!(n.rotate_left(" , $rot , "), m);\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn rotate_left ( self , n : u32 )-> Self {( self as $UnsignedT ). rotate_left ( n ) as Self }} doc_comment ! { concat ! ( "Shifts the bits to the right by a specified amount, `n`,\nwrapping the truncated bits to the beginning of the resulting\ninteger.\n\nPlease note this isn't the same operation as the `>>` shifting operator!\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $rot_result , stringify ! ($SelfT ), ";\nlet m = " , $rot_op , ";\n\nassert_eq!(n.rotate_right(" , $rot , "), m);\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn rotate_right ( self , n : u32 )-> Self {( self as $UnsignedT ). rotate_right ( n ) as Self }} doc_comment ! { concat ! ( "Reverses the byte order of the integer.\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $swap_op , stringify ! ($SelfT ), ";\n\nlet m = n.swap_bytes();\n\nassert_eq!(m, " , $swapped , ");\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn swap_bytes ( self )-> Self {( self as $UnsignedT ). swap_bytes () as Self }} doc_comment ! { concat ! ( "Reverses the order of bits in the integer. The least significant bit becomes the most significant bit,\n second least-significant bit becomes second most-significant bit, etc.\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $swap_op , stringify ! ($SelfT ), ";\nlet m = n.reverse_bits();\n\nassert_eq!(m, " , $reversed , ");\nassert_eq!(0, 0" , stringify ! ($SelfT ), ".reverse_bits());\n```" ), # [ stable ( feature = "reverse_bits" , since = "1.37.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ]# [ must_use ] pub const fn reverse_bits ( self )-> Self {( self as $UnsignedT ). reverse_bits () as Self }} doc_comment ! { concat ! ( "Converts an integer from big endian to the target's endianness.\n\nOn big endian this is a no-op. On little endian the bytes are swapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"big\") {\n assert_eq!(" , stringify ! ($SelfT ), "::from_be(n), n)\n} else {\n assert_eq!(" , stringify ! ($SelfT ), "::from_be(n), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_conversions" , since = "1.32.0" )]# [ inline ] pub const fn from_be ( x : Self )-> Self {# [ cfg ( target_endian = "big" )]{ x }# [ cfg ( not ( target_endian = "big" ))]{ x . swap_bytes ()}}} doc_comment ! { concat ! ( "Converts an integer from little endian to the target's endianness.\n\nOn little endian this is a no-op. On big endian the bytes are swapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"little\") {\n assert_eq!(" , stringify ! ($SelfT ), "::from_le(n), n)\n} else {\n assert_eq!(" , stringify ! ($SelfT ), "::from_le(n), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_conversions" , since = "1.32.0" )]# [ inline ] pub const fn from_le ( x : Self )-> Self {# [ cfg ( target_endian = "little" )]{ x }# [ cfg ( not ( target_endian = "little" ))]{ x . swap_bytes ()}}} doc_comment ! { concat ! ( "Converts `self` to big endian from the target's endianness.\n\nOn big endian this is a no-op. On little endian the bytes are swapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"big\") {\n assert_eq!(n.to_be(), n)\n} else {\n assert_eq!(n.to_be(), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_conversions" , since = "1.32.0" )]# [ inline ] pub const fn to_be ( self )-> Self {# [ cfg ( target_endian = "big" )]{ self }# [ cfg ( not ( target_endian = "big" ))]{ self . swap_bytes ()}}} doc_comment ! { concat ! ( "Converts `self` to little endian from the target's endianness.\n\nOn little endian this is a no-op. On big endian the bytes are swapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"little\") {\n assert_eq!(n.to_le(), n)\n} else {\n assert_eq!(n.to_le(), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_conversions" , since = "1.32.0" )]# [ inline ] pub const fn to_le ( self )-> Self {# [ cfg ( target_endian = "little" )]{ self }# [ cfg ( not ( target_endian = "little" ))]{ self . swap_bytes ()}}} doc_comment ! { concat ! ( "Checked integer addition. Computes `self + rhs`, returning `None`\nif overflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!((" , stringify ! ($SelfT ), "::MAX - 2).checked_add(1), Some(" , stringify ! ($SelfT ), "::MAX - 1));\nassert_eq!((" , stringify ! ($SelfT ), "::MAX - 2).checked_add(3), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_add ( self , rhs : Self )-> Option < Self > { let ( a , b )= self . overflowing_add ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Unchecked integer addition. Computes `self + rhs`, assuming overflow\ncannot occur. This results in undefined behavior when `self + rhs > " , stringify ! ($SelfT ), "::MAX` or `self + rhs < " , stringify ! ($SelfT ), "::MIN`." ), # [ unstable ( feature = "unchecked_math" , reason = "niche optimization path" , issue = "none" , )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub unsafe fn unchecked_add ( self , rhs : Self )-> Self { unsafe { intrinsics :: unchecked_add ( self , rhs )}}} doc_comment ! { concat ! ( "Checked integer subtraction. Computes `self - rhs`, returning `None` if\noverflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!((" , stringify ! ($SelfT ), "::MIN + 2).checked_sub(1), Some(" , stringify ! ($SelfT ), "::MIN + 1));\nassert_eq!((" , stringify ! ($SelfT ), "::MIN + 2).checked_sub(3), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_sub ( self , rhs : Self )-> Option < Self > { let ( a , b )= self . overflowing_sub ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Unchecked integer subtraction. Computes `self - rhs`, assuming overflow\ncannot occur. This results in undefined behavior when `self - rhs > " , stringify ! ($SelfT ), "::MAX` or `self - rhs < " , stringify ! ($SelfT ), "::MIN`." ), # [ unstable ( feature = "unchecked_math" , reason = "niche optimization path" , issue = "none" , )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub unsafe fn unchecked_sub ( self , rhs : Self )-> Self { unsafe { intrinsics :: unchecked_sub ( self , rhs )}}} doc_comment ! { concat ! ( "Checked integer multiplication. Computes `self * rhs`, returning `None` if\noverflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MAX.checked_mul(1), Some(" , stringify ! ($SelfT ), "::MAX));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.checked_mul(2), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_mul ( self , rhs : Self )-> Option < Self > { let ( a , b )= self . overflowing_mul ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Unchecked integer multiplication. Computes `self * rhs`, assuming overflow\ncannot occur. This results in undefined behavior when `self * rhs > " , stringify ! ($SelfT ), "::MAX` or `self * rhs < " , stringify ! ($SelfT ), "::MIN`." ), # [ unstable ( feature = "unchecked_math" , reason = "niche optimization path" , issue = "none" , )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub unsafe fn unchecked_mul ( self , rhs : Self )-> Self { unsafe { intrinsics :: unchecked_mul ( self , rhs )}}} doc_comment ! { concat ! ( "Checked integer division. Computes `self / rhs`, returning `None` if `rhs == 0`\nor the division results in overflow.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!((" , stringify ! ($SelfT ), "::MIN + 1).checked_div(-1), Some(" , stringify ! ($Max ), "));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.checked_div(-1), None);\nassert_eq!((1" , stringify ! ($SelfT ), ").checked_div(0), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_unstable ( feature = "const_checked_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_div ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 || ( self == Self :: MIN && rhs == - 1 )){ None } else { Some ( unsafe { intrinsics :: unchecked_div ( self , rhs )})}}} doc_comment ! { concat ! ( "Checked Euclidean division. Computes `self.div_euclid(rhs)`,\nreturning `None` if `rhs == 0` or the division results in overflow.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!((" , stringify ! ($SelfT ), "::MIN + 1).checked_div_euclid(-1), Some(" , stringify ! ($Max ), "));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.checked_div_euclid(-1), None);\nassert_eq!((1" , stringify ! ($SelfT ), ").checked_div_euclid(0), None);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_div_euclid ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 || ( self == Self :: MIN && rhs == - 1 )){ None } else { Some ( self . div_euclid ( rhs ))}}} doc_comment ! { concat ! ( "Checked integer remainder. Computes `self % rhs`, returning `None` if\n`rhs == 0` or the division results in overflow.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem(2), Some(1));\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem(0), None);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.checked_rem(-1), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_unstable ( feature = "const_checked_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_rem ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 || ( self == Self :: MIN && rhs == - 1 )){ None } else { Some ( unsafe { intrinsics :: unchecked_rem ( self , rhs )})}}} doc_comment ! { concat ! ( "Checked Euclidean remainder. Computes `self.rem_euclid(rhs)`, returning `None`\nif `rhs == 0` or the division results in overflow.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem_euclid(2), Some(1));\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem_euclid(0), None);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.checked_rem_euclid(-1), None);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_rem_euclid ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 || ( self == Self :: MIN && rhs == - 1 )){ None } else { Some ( self . rem_euclid ( rhs ))}}} doc_comment ! { concat ! ( "Checked negation. Computes `-self`, returning `None` if `self == MIN`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_neg(), Some(-5));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.checked_neg(), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn checked_neg ( self )-> Option < Self > { let ( a , b )= self . overflowing_neg (); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Checked shift left. Computes `self << rhs`, returning `None` if `rhs` is larger\nthan or equal to the number of bits in `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0x1" , stringify ! ($SelfT ), ".checked_shl(4), Some(0x10));\nassert_eq!(0x1" , stringify ! ($SelfT ), ".checked_shl(129), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_shl ( self , rhs : u32 )-> Option < Self > { let ( a , b )= self . overflowing_shl ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Checked shift right. Computes `self >> rhs`, returning `None` if `rhs` is\nlarger than or equal to the number of bits in `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0x10" , stringify ! ($SelfT ), ".checked_shr(4), Some(0x1));\nassert_eq!(0x10" , stringify ! ($SelfT ), ".checked_shr(128), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_shr ( self , rhs : u32 )-> Option < Self > { let ( a , b )= self . overflowing_shr ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Checked absolute value. Computes `self.abs()`, returning `None` if\n`self == MIN`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!((-5" , stringify ! ($SelfT ), ").checked_abs(), Some(5));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.checked_abs(), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_abs" , since = "1.13.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn checked_abs ( self )-> Option < Self > { if self . is_negative (){ self . checked_neg ()} else { Some ( self )}}} doc_comment ! { concat ! ( "Checked exponentiation. Computes `self.pow(exp)`, returning `None` if\noverflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(8" , stringify ! ($SelfT ), ".checked_pow(2), Some(64));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.checked_pow(2), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_pow ( self , mut exp : u32 )-> Option < Self > { if exp == 0 { return Some ( 1 ); } let mut base = self ; let mut acc : Self = 1 ; while exp > 1 { if ( exp & 1 )== 1 { acc = try_opt ! ( acc . checked_mul ( base )); } exp /= 2 ; base = try_opt ! ( base . checked_mul ( base )); } Some ( try_opt ! ( acc . checked_mul ( base )))}} doc_comment ! { concat ! ( "Saturating integer addition. Computes `self + rhs`, saturating at the numeric\nbounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".saturating_add(1), 101);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.saturating_add(100), " , stringify ! ($SelfT ), "::MAX);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_add(-1), " , stringify ! ($SelfT ), "::MIN);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn saturating_add ( self , rhs : Self )-> Self { intrinsics :: saturating_add ( self , rhs )}} doc_comment ! { concat ! ( "Saturating integer subtraction. Computes `self - rhs`, saturating at the\nnumeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".saturating_sub(127), -27);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_sub(100), " , stringify ! ($SelfT ), "::MIN);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.saturating_sub(-1), " , stringify ! ($SelfT ), "::MAX);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn saturating_sub ( self , rhs : Self )-> Self { intrinsics :: saturating_sub ( self , rhs )}} doc_comment ! { concat ! ( "Saturating integer negation. Computes `-self`, returning `MAX` if `self == MIN`\ninstead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".saturating_neg(), -100);\nassert_eq!((-100" , stringify ! ($SelfT ), ").saturating_neg(), 100);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_neg(), " , stringify ! ($SelfT ), "::MAX);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.saturating_neg(), " , stringify ! ($SelfT ), "::MIN + 1);" , $EndFeature , "\n```" ), # [ stable ( feature = "saturating_neg" , since = "1.45.0" )]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn saturating_neg ( self )-> Self { intrinsics :: saturating_sub ( 0 , self )}} doc_comment ! { concat ! ( "Saturating absolute value. Computes `self.abs()`, returning `MAX` if `self ==\nMIN` instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".saturating_abs(), 100);\nassert_eq!((-100" , stringify ! ($SelfT ), ").saturating_abs(), 100);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_abs(), " , stringify ! ($SelfT ), "::MAX);\nassert_eq!((" , stringify ! ($SelfT ), "::MIN + 1).saturating_abs(), " , stringify ! ($SelfT ), "::MAX);" , $EndFeature , "\n```" ), # [ stable ( feature = "saturating_neg" , since = "1.45.0" )]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn saturating_abs ( self )-> Self { if self . is_negative (){ self . saturating_neg ()} else { self }}} doc_comment ! { concat ! ( "Saturating integer multiplication. Computes `self * rhs`, saturating at the\nnumeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(10" , stringify ! ($SelfT ), ".saturating_mul(12), 120);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.saturating_mul(10), " , stringify ! ($SelfT ), "::MAX);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_mul(10), " , stringify ! ($SelfT ), "::MIN);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn saturating_mul ( self , rhs : Self )-> Self { match self . checked_mul ( rhs ){ Some ( x )=> x , None => if ( self < 0 )== ( rhs < 0 ){ Self :: MAX } else { Self :: MIN }}}} doc_comment ! { concat ! ( "Saturating integer exponentiation. Computes `self.pow(exp)`,\nsaturating at the numeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!((-4" , stringify ! ($SelfT ), ").saturating_pow(3), -64);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_pow(2), " , stringify ! ($SelfT ), "::MAX);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_pow(3), " , stringify ! ($SelfT ), "::MIN);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn saturating_pow ( self , exp : u32 )-> Self { match self . checked_pow ( exp ){ Some ( x )=> x , None if self < 0 && exp % 2 == 1 => Self :: MIN , None => Self :: MAX , }}} doc_comment ! { concat ! ( "Wrapping (modular) addition. Computes `self + rhs`, wrapping around at the\nboundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_add(27), 127);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.wrapping_add(2), " , stringify ! ($SelfT ), "::MIN + 1);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_add ( self , rhs : Self )-> Self { intrinsics :: wrapping_add ( self , rhs )}} doc_comment ! { concat ! ( "Wrapping (modular) subtraction. Computes `self - rhs`, wrapping around at the\nboundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0" , stringify ! ($SelfT ), ".wrapping_sub(127), -127);\nassert_eq!((-2" , stringify ! ($SelfT ), ").wrapping_sub(" , stringify ! ($SelfT ), "::MAX), " , stringify ! ($SelfT ), "::MAX);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_sub ( self , rhs : Self )-> Self { intrinsics :: wrapping_sub ( self , rhs )}} doc_comment ! { concat ! ( "Wrapping (modular) multiplication. Computes `self * rhs`, wrapping around at\nthe boundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(10" , stringify ! ($SelfT ), ".wrapping_mul(12), 120);\nassert_eq!(11i8.wrapping_mul(12), -124);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_mul ( self , rhs : Self )-> Self { intrinsics :: wrapping_mul ( self , rhs )}} doc_comment ! { concat ! ( "Wrapping (modular) division. Computes `self / rhs`, wrapping around at the\nboundary of the type.\n\nThe only case where such wrapping can occur is when one divides `MIN / -1` on a signed type (where\n`MIN` is the negative minimal value for the type); this is equivalent to `-MIN`, a positive value\nthat is too large to represent in the type. In such a case, this function returns `MIN` itself.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_div(10), 10);\nassert_eq!((-128i8).wrapping_div(-1), -128);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_unstable ( feature = "const_wrapping_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_div ( self , rhs : Self )-> Self { self . overflowing_div ( rhs ). 0 }} doc_comment ! { concat ! ( "Wrapping Euclidean division. Computes `self.div_euclid(rhs)`,\nwrapping around at the boundary of the type.\n\nWrapping will only occur in `MIN / -1` on a signed type (where `MIN` is the negative minimal value\nfor the type). This is equivalent to `-MIN`, a positive value that is too large to represent in the\ntype. In this case, this method returns `MIN` itself.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(100" , stringify ! ($SelfT ), ".wrapping_div_euclid(10), 10);\nassert_eq!((-128i8).wrapping_div_euclid(-1), -128);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_div_euclid ( self , rhs : Self )-> Self { self . overflowing_div_euclid ( rhs ). 0 }} doc_comment ! { concat ! ( "Wrapping (modular) remainder. Computes `self % rhs`, wrapping around at the\nboundary of the type.\n\nSuch wrap-around never actually occurs mathematically; implementation artifacts make `x % y`\ninvalid for `MIN / -1` on a signed type (where `MIN` is the negative minimal value). In such a case,\nthis function returns `0`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_rem(10), 0);\nassert_eq!((-128i8).wrapping_rem(-1), 0);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_unstable ( feature = "const_wrapping_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_rem ( self , rhs : Self )-> Self { self . overflowing_rem ( rhs ). 0 }} doc_comment ! { concat ! ( "Wrapping Euclidean remainder. Computes `self.rem_euclid(rhs)`, wrapping around\nat the boundary of the type.\n\nWrapping will only occur in `MIN % -1` on a signed type (where `MIN` is the negative minimal value\nfor the type). In this case, this method returns 0.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(100" , stringify ! ($SelfT ), ".wrapping_rem_euclid(10), 0);\nassert_eq!((-128i8).wrapping_rem_euclid(-1), 0);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_rem_euclid ( self , rhs : Self )-> Self { self . overflowing_rem_euclid ( rhs ). 0 }} doc_comment ! { concat ! ( "Wrapping (modular) negation. Computes `-self`, wrapping around at the boundary\nof the type.\n\nThe only case where such wrapping can occur is when one negates `MIN` on a signed type (where `MIN`\nis the negative minimal value for the type); this is a positive value that is too large to represent\nin the type. In such a case, this function returns `MIN` itself.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_neg(), -100);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.wrapping_neg(), " , stringify ! ($SelfT ), "::MIN);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn wrapping_neg ( self )-> Self { self . overflowing_neg (). 0 }} doc_comment ! { concat ! ( "Panic-free bitwise shift-left; yields `self << mask(rhs)`, where `mask` removes\nany high-order bits of `rhs` that would cause the shift to exceed the bitwidth of the type.\n\nNote that this is *not* the same as a rotate-left; the RHS of a wrapping shift-left is restricted to\nthe range of the type, rather than the bits shifted out of the LHS being returned to the other end.\nThe primitive integer types all implement a `[`rotate_left`](#method.rotate_left) function,\nwhich may be what you want instead.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!((-1" , stringify ! ($SelfT ), ").wrapping_shl(7), -128);\nassert_eq!((-1" , stringify ! ($SelfT ), ").wrapping_shl(128), -1);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_shl ( self , rhs : u32 )-> Self { unsafe { intrinsics :: unchecked_shl ( self , ( rhs & ($BITS - 1 )) as $SelfT )}}} doc_comment ! { concat ! ( "Panic-free bitwise shift-right; yields `self >> mask(rhs)`, where `mask`\nremoves any high-order bits of `rhs` that would cause the shift to exceed the bitwidth of the type.\n\nNote that this is *not* the same as a rotate-right; the RHS of a wrapping shift-right is restricted\nto the range of the type, rather than the bits shifted out of the LHS being returned to the other\nend. The primitive integer types all implement a [`rotate_right`](#method.rotate_right) function,\nwhich may be what you want instead.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!((-128" , stringify ! ($SelfT ), ").wrapping_shr(7), -1);\nassert_eq!((-128i16).wrapping_shr(64), -128);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_shr ( self , rhs : u32 )-> Self { unsafe { intrinsics :: unchecked_shr ( self , ( rhs & ($BITS - 1 )) as $SelfT )}}} doc_comment ! { concat ! ( "Wrapping (modular) absolute value. Computes `self.abs()`, wrapping around at\nthe boundary of the type.\n\nThe only case where such wrapping can occur is when one takes the absolute value of the negative\nminimal value for the type; this is a positive value that is too large to represent in the type. In\nsuch a case, this function returns `MIN` itself.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_abs(), 100);\nassert_eq!((-100" , stringify ! ($SelfT ), ").wrapping_abs(), 100);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.wrapping_abs(), " , stringify ! ($SelfT ), "::MIN);\nassert_eq!((-128i8).wrapping_abs() as u8, 128);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_abs" , since = "1.13.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ allow ( unused_attributes )]# [ inline ] pub const fn wrapping_abs ( self )-> Self { if self . is_negative (){ self . wrapping_neg ()} else { self }}} doc_comment ! { concat ! ( "Computes the absolute value of `self` without any wrapping\nor panicking.\n\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "#![feature(unsigned_abs)]\nassert_eq!(100" , stringify ! ($SelfT ), ".unsigned_abs(), 100" , stringify ! ($UnsignedT ), ");\nassert_eq!((-100" , stringify ! ($SelfT ), ").unsigned_abs(), 100" , stringify ! ($UnsignedT ), ");\nassert_eq!((-128i8).unsigned_abs(), 128u8);" , $EndFeature , "\n```" ), # [ unstable ( feature = "unsigned_abs" , issue = "74913" )]# [ inline ] pub const fn unsigned_abs ( self )-> $UnsignedT { self . wrapping_abs () as $UnsignedT }} doc_comment ! { concat ! ( "Wrapping (modular) exponentiation. Computes `self.pow(exp)`,\nwrapping around at the boundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(3" , stringify ! ($SelfT ), ".wrapping_pow(4), 81);\nassert_eq!(3i8.wrapping_pow(5), -13);\nassert_eq!(3i8.wrapping_pow(6), -39);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_pow ( self , mut exp : u32 )-> Self { if exp == 0 { return 1 ; } let mut base = self ; let mut acc : Self = 1 ; while exp > 1 { if ( exp & 1 )== 1 { acc = acc . wrapping_mul ( base ); } exp /= 2 ; base = base . wrapping_mul ( base ); } acc . wrapping_mul ( base )}} doc_comment ! { concat ! ( "Calculates `self` + `rhs`\n\nReturns a tuple of the addition along with a boolean indicating whether an arithmetic overflow would\noccur. If an overflow would have occurred then the wrapped value is returned.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_add(2), (7, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.overflowing_add(1), (" , stringify ! ($SelfT ), "::MIN, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_add ( self , rhs : Self )-> ( Self , bool ){ let ( a , b )= intrinsics :: add_with_overflow ( self as $ActualT , rhs as $ActualT ); ( a as Self , b )}} doc_comment ! { concat ! ( "Calculates `self` - `rhs`\n\nReturns a tuple of the subtraction along with a boolean indicating whether an arithmetic overflow\nwould occur. If an overflow would have occurred then the wrapped value is returned.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_sub(2), (3, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.overflowing_sub(1), (" , stringify ! ($SelfT ), "::MAX, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_sub ( self , rhs : Self )-> ( Self , bool ){ let ( a , b )= intrinsics :: sub_with_overflow ( self as $ActualT , rhs as $ActualT ); ( a as Self , b )}} doc_comment ! { concat ! ( "Calculates the multiplication of `self` and `rhs`.\n\nReturns a tuple of the multiplication along with a boolean indicating whether an arithmetic overflow\nwould occur. If an overflow would have occurred then the wrapped value is returned.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(5" , stringify ! ($SelfT ), ".overflowing_mul(2), (10, false));\nassert_eq!(1_000_000_000i32.overflowing_mul(10), (1410065408, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_mul ( self , rhs : Self )-> ( Self , bool ){ let ( a , b )= intrinsics :: mul_with_overflow ( self as $ActualT , rhs as $ActualT ); ( a as Self , b )}} doc_comment ! { concat ! ( "Calculates the divisor when `self` is divided by `rhs`.\n\nReturns a tuple of the divisor along with a boolean indicating whether an arithmetic overflow would\noccur. If an overflow would occur then self is returned.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_div(2), (2, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.overflowing_div(-1), (" , stringify ! ($SelfT ), "::MIN, true));" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_unstable ( feature = "const_overflowing_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_div ( self , rhs : Self )-> ( Self , bool ){ if unlikely ! ( self == Self :: MIN && rhs == - 1 ){( self , true )} else {( self / rhs , false )}}} doc_comment ! { concat ! ( "Calculates the quotient of Euclidean division `self.div_euclid(rhs)`.\n\nReturns a tuple of the divisor along with a boolean indicating whether an arithmetic overflow would\noccur. If an overflow would occur then `self` is returned.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_div_euclid(2), (2, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.overflowing_div_euclid(-1), (" , stringify ! ($SelfT ), "::MIN, true));\n```" ), # [ inline ]# [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_div_euclid ( self , rhs : Self )-> ( Self , bool ){ if unlikely ! ( self == Self :: MIN && rhs == - 1 ){( self , true )} else {( self . div_euclid ( rhs ), false )}}} doc_comment ! { concat ! ( "Calculates the remainder when `self` is divided by `rhs`.\n\nReturns a tuple of the remainder after dividing along with a boolean indicating whether an\narithmetic overflow would occur. If an overflow would occur then 0 is returned.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_rem(2), (1, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.overflowing_rem(-1), (0, true));" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_unstable ( feature = "const_overflowing_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_rem ( self , rhs : Self )-> ( Self , bool ){ if unlikely ! ( self == Self :: MIN && rhs == - 1 ){( 0 , true )} else {( self % rhs , false )}}} doc_comment ! { concat ! ( "Overflowing Euclidean remainder. Calculates `self.rem_euclid(rhs)`.\n\nReturns a tuple of the remainder after dividing along with a boolean indicating whether an\narithmetic overflow would occur. If an overflow would occur then 0 is returned.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_rem_euclid(2), (1, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.overflowing_rem_euclid(-1), (0, true));\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_rem_euclid ( self , rhs : Self )-> ( Self , bool ){ if unlikely ! ( self == Self :: MIN && rhs == - 1 ){( 0 , true )} else {( self . rem_euclid ( rhs ), false )}}} doc_comment ! { concat ! ( "Negates self, overflowing if this is equal to the minimum value.\n\nReturns a tuple of the negated version of self along with a boolean indicating whether an overflow\nhappened. If `self` is the minimum value (e.g., `i32::MIN` for values of type `i32`), then the\nminimum value will be returned again and `true` will be returned for an overflow happening.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(2" , stringify ! ($SelfT ), ".overflowing_neg(), (-2, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.overflowing_neg(), (" , stringify ! ($SelfT ), "::MIN, true));" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ allow ( unused_attributes )] pub const fn overflowing_neg ( self )-> ( Self , bool ){ if unlikely ! ( self == Self :: MIN ){( Self :: MIN , true )} else {(- self , false )}}} doc_comment ! { concat ! ( "Shifts self left by `rhs` bits.\n\nReturns a tuple of the shifted version of self along with a boolean indicating whether the shift\nvalue was larger than or equal to the number of bits. If the shift value is too large, then value is\nmasked (N-1) where N is the number of bits, and this value is then used to perform the shift.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0x1" , stringify ! ($SelfT ), ".overflowing_shl(4), (0x10, false));\nassert_eq!(0x1i32.overflowing_shl(36), (0x10, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_shl ( self , rhs : u32 )-> ( Self , bool ){( self . wrapping_shl ( rhs ), ( rhs > ($BITS - 1 )))}} doc_comment ! { concat ! ( "Shifts self right by `rhs` bits.\n\nReturns a tuple of the shifted version of self along with a boolean indicating whether the shift\nvalue was larger than or equal to the number of bits. If the shift value is too large, then value is\nmasked (N-1) where N is the number of bits, and this value is then used to perform the shift.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0x10" , stringify ! ($SelfT ), ".overflowing_shr(4), (0x1, false));\nassert_eq!(0x10i32.overflowing_shr(36), (0x1, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_shr ( self , rhs : u32 )-> ( Self , bool ){( self . wrapping_shr ( rhs ), ( rhs > ($BITS - 1 )))}} doc_comment ! { concat ! ( "Computes the absolute value of `self`.\n\nReturns a tuple of the absolute version of self along with a boolean indicating whether an overflow\nhappened. If self is the minimum value (e.g., " , stringify ! ($SelfT ), "::MIN for values of type\n " , stringify ! ($SelfT ), "), then the minimum value will be returned again and true will be returned\nfor an overflow happening.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(10" , stringify ! ($SelfT ), ".overflowing_abs(), (10, false));\nassert_eq!((-10" , stringify ! ($SelfT ), ").overflowing_abs(), (10, false));\nassert_eq!((" , stringify ! ($SelfT ), "::MIN).overflowing_abs(), (" , stringify ! ($SelfT ), "::MIN, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_abs" , since = "1.13.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn overflowing_abs ( self )-> ( Self , bool ){( self . wrapping_abs (), self == Self :: MIN )}} doc_comment ! { concat ! ( "Raises self to the power of `exp`, using exponentiation by squaring.\n\nReturns a tuple of the exponentiation along with a bool indicating\nwhether an overflow happened.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(3" , stringify ! ($SelfT ), ".overflowing_pow(4), (81, false));\nassert_eq!(3i8.overflowing_pow(5), (-13, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_pow ( self , mut exp : u32 )-> ( Self , bool ){ if exp == 0 { return ( 1 , false ); } let mut base = self ; let mut acc : Self = 1 ; let mut overflown = false ; let mut r ; while exp > 1 { if ( exp & 1 )== 1 { r = acc . overflowing_mul ( base ); acc = r . 0 ; overflown |= r . 1 ; } exp /= 2 ; r = base . overflowing_mul ( base ); base = r . 0 ; overflown |= r . 1 ; } r = acc . overflowing_mul ( base ); r . 1 |= overflown ; r }} doc_comment ! { concat ! ( "Raises self to the power of `exp`, using exponentiation by squaring.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let x: " , stringify ! ($SelfT ), " = 2; // or any other integer type\n\nassert_eq!(x.pow(5), 32);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn pow ( self , mut exp : u32 )-> Self { if exp == 0 { return 1 ; } let mut base = self ; let mut acc = 1 ; while exp > 1 { if ( exp & 1 )== 1 { acc = acc * base ; } exp /= 2 ; base = base * base ; } acc * base }} doc_comment ! { concat ! ( "Calculates the quotient of Euclidean division of `self` by `rhs`.\n\nThis computes the integer `n` such that `self = n * rhs + self.rem_euclid(rhs)`,\nwith `0 <= self.rem_euclid(rhs) < rhs`.\n\nIn other words, the result is `self / rhs` rounded to the integer `n`\nsuch that `self >= n * rhs`.\nIf `self > 0`, this is equal to round towards zero (the default in Rust);\nif `self < 0`, this is equal to round towards +/- infinity.\n\n# Panics\n\nThis function will panic if `rhs` is 0 or the division results in overflow.\n\n# Examples\n\nBasic usage:\n\n```\nlet a: " , stringify ! ($SelfT ), " = 7; // or any other integer type\nlet b = 4;\n\nassert_eq!(a.div_euclid(b), 1); // 7 >= 4 * 1\nassert_eq!(a.div_euclid(-b), -1); // 7 >= -4 * -1\nassert_eq!((-a).div_euclid(b), -2); // -7 >= 4 * -2\nassert_eq!((-a).div_euclid(-b), 2); // -7 >= -4 * 2\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn div_euclid ( self , rhs : Self )-> Self { let q = self / rhs ; if self % rhs < 0 { return if rhs > 0 { q - 1 } else { q + 1 }} q }} doc_comment ! { concat ! ( "Calculates the least nonnegative remainder of `self (mod rhs)`.\n\nThis is done as if by the Euclidean division algorithm -- given\n`r = self.rem_euclid(rhs)`, `self = rhs * self.div_euclid(rhs) + r`, and\n`0 <= r < abs(rhs)`.\n\n# Panics\n\nThis function will panic if `rhs` is 0 or the division results in overflow.\n\n# Examples\n\nBasic usage:\n\n```\nlet a: " , stringify ! ($SelfT ), " = 7; // or any other integer type\nlet b = 4;\n\nassert_eq!(a.rem_euclid(b), 3);\nassert_eq!((-a).rem_euclid(b), 1);\nassert_eq!(a.rem_euclid(-b), 3);\nassert_eq!((-a).rem_euclid(-b), 1);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn rem_euclid ( self , rhs : Self )-> Self { let r = self % rhs ; if r < 0 { if rhs < 0 { r - rhs } else { r + rhs }} else { r }}} doc_comment ! { concat ! ( "Computes the absolute value of `self`.\n\n# Overflow behavior\n\nThe absolute value of `" , stringify ! ($SelfT ), "::MIN` cannot be represented as an\n`" , stringify ! ($SelfT ), "`, and attempting to calculate it will cause an overflow. This means that\ncode in debug mode will trigger a panic on this case and optimized code will return `" , stringify ! ($SelfT ), "::MIN` without a panic.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(10" , stringify ! ($SelfT ), ".abs(), 10);\nassert_eq!((-10" , stringify ! ($SelfT ), ").abs(), 10);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ allow ( unused_attributes )]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn abs ( self )-> Self { if self . is_negative (){- self } else { self }}} doc_comment ! { concat ! ( "Returns a number representing sign of `self`.\n\n - `0` if the number is zero\n - `1` if the number is positive\n - `-1` if the number is negative\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(10" , stringify ! ($SelfT ), ".signum(), 1);\nassert_eq!(0" , stringify ! ($SelfT ), ".signum(), 0);\nassert_eq!((-10" , stringify ! ($SelfT ), ").signum(), -1);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_sign" , since = "1.47.0" )]# [ inline ] pub const fn signum ( self )-> Self { match self { n if n > 0 => 1 , 0 => 0 , _ =>- 1 , }}} doc_comment ! { concat ! ( "Returns `true` if `self` is positive and `false` if the number is zero or\nnegative.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert!(10" , stringify ! ($SelfT ), ".is_positive());\nassert!(!(-10" , stringify ! ($SelfT ), ").is_positive());" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn is_positive ( self )-> bool { self > 0 }} doc_comment ! { concat ! ( "Returns `true` if `self` is negative and `false` if the number is zero or\npositive.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert!((-10" , stringify ! ($SelfT ), ").is_negative());\nassert!(!10" , stringify ! ($SelfT ), ".is_negative());" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn is_negative ( self )-> bool { self < 0 }} doc_comment ! { concat ! ( "Return the memory representation of this integer as a byte array in\nbig-endian (network) byte order.\n" , $to_xe_bytes_doc , "\n# Examples\n\n```\nlet bytes = " , $swap_op , stringify ! ($SelfT ), ".to_be_bytes();\nassert_eq!(bytes, " , $be_bytes , ");\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn to_be_bytes ( self )-> [ u8 ; mem :: size_of ::< Self > ()]{ self . to_be (). to_ne_bytes ()}} doc_comment ! { concat ! ( "Return the memory representation of this integer as a byte array in\nlittle-endian byte order.\n" , $to_xe_bytes_doc , "\n# Examples\n\n```\nlet bytes = " , $swap_op , stringify ! ($SelfT ), ".to_le_bytes();\nassert_eq!(bytes, " , $le_bytes , ");\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn to_le_bytes ( self )-> [ u8 ; mem :: size_of ::< Self > ()]{ self . to_le (). to_ne_bytes ()}} doc_comment ! { concat ! ( "\nReturn the memory representation of this integer as a byte array in\nnative byte order.\n\nAs the target platform's native endianness is used, portable code\nshould use [`to_be_bytes`] or [`to_le_bytes`], as appropriate,\ninstead.\n" , $to_xe_bytes_doc , "\n[`to_be_bytes`]: #method.to_be_bytes\n[`to_le_bytes`]: #method.to_le_bytes\n\n# Examples\n\n```\nlet bytes = " , $swap_op , stringify ! ($SelfT ), ".to_ne_bytes();\nassert_eq!(\n bytes,\n if cfg!(target_endian = \"big\") {\n " , $be_bytes , "\n } else {\n " , $le_bytes , "\n }\n);\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ cfg_attr ( not ( bootstrap ), rustc_allow_const_fn_unstable ( const_fn_transmute ))]# [ cfg_attr ( bootstrap , allow_internal_unstable ( const_fn_transmute ))]# [ inline ] pub const fn to_ne_bytes ( self )-> [ u8 ; mem :: size_of ::< Self > ()]{ unsafe { mem :: transmute ( self )}}} doc_comment ! { concat ! ( "\nReturn the memory representation of this integer as a byte array in\nnative byte order.\n\n[`to_ne_bytes`] should be preferred over this whenever possible.\n\n[`to_ne_bytes`]: #method.to_ne_bytes\n" , "\n# Examples\n\n```\n#![feature(num_as_ne_bytes)]\nlet num = " , $swap_op , stringify ! ($SelfT ), ";\nlet bytes = num.as_ne_bytes();\nassert_eq!(\n bytes,\n if cfg!(target_endian = \"big\") {\n &" , $be_bytes , "\n } else {\n &" , $le_bytes , "\n }\n);\n```" ), # [ unstable ( feature = "num_as_ne_bytes" , issue = "76976" )]# [ inline ] pub fn as_ne_bytes (& self )-> & [ u8 ; mem :: size_of ::< Self > ()]{ unsafe {&* ( self as * const Self as * const _)}}} doc_comment ! { concat ! ( "Create an integer value from its representation as a byte array in\nbig endian.\n" , $from_xe_bytes_doc , "\n# Examples\n\n```\nlet value = " , stringify ! ($SelfT ), "::from_be_bytes(" , $be_bytes , ");\nassert_eq!(value, " , $swap_op , ");\n```\n\nWhen starting from a slice rather than an array, fallible conversion APIs can be used:\n\n```\nuse std::convert::TryInto;\n\nfn read_be_" , stringify ! ($SelfT ), "(input: &mut &[u8]) -> " , stringify ! ($SelfT ), " {\n let (int_bytes, rest) = input.split_at(std::mem::size_of::<" , stringify ! ($SelfT ), ">());\n *input = rest;\n " , stringify ! ($SelfT ), "::from_be_bytes(int_bytes.try_into().unwrap())\n}\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn from_be_bytes ( bytes : [ u8 ; mem :: size_of ::< Self > ()])-> Self { Self :: from_be ( Self :: from_ne_bytes ( bytes ))}} doc_comment ! { concat ! ( "\nCreate an integer value from its representation as a byte array in\nlittle endian.\n" , $from_xe_bytes_doc , "\n# Examples\n\n```\nlet value = " , stringify ! ($SelfT ), "::from_le_bytes(" , $le_bytes , ");\nassert_eq!(value, " , $swap_op , ");\n```\n\nWhen starting from a slice rather than an array, fallible conversion APIs can be used:\n\n```\nuse std::convert::TryInto;\n\nfn read_le_" , stringify ! ($SelfT ), "(input: &mut &[u8]) -> " , stringify ! ($SelfT ), " {\n let (int_bytes, rest) = input.split_at(std::mem::size_of::<" , stringify ! ($SelfT ), ">());\n *input = rest;\n " , stringify ! ($SelfT ), "::from_le_bytes(int_bytes.try_into().unwrap())\n}\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn from_le_bytes ( bytes : [ u8 ; mem :: size_of ::< Self > ()])-> Self { Self :: from_le ( Self :: from_ne_bytes ( bytes ))}} doc_comment ! { concat ! ( "Create an integer value from its memory representation as a byte\narray in native endianness.\n\nAs the target platform's native endianness is used, portable code\nlikely wants to use [`from_be_bytes`] or [`from_le_bytes`], as\nappropriate instead.\n\n[`from_be_bytes`]: #method.from_be_bytes\n[`from_le_bytes`]: #method.from_le_bytes\n" , $from_xe_bytes_doc , "\n# Examples\n\n```\nlet value = " , stringify ! ($SelfT ), "::from_ne_bytes(if cfg!(target_endian = \"big\") {\n " , $be_bytes , "\n} else {\n " , $le_bytes , "\n});\nassert_eq!(value, " , $swap_op , ");\n```\n\nWhen starting from a slice rather than an array, fallible conversion APIs can be used:\n\n```\nuse std::convert::TryInto;\n\nfn read_ne_" , stringify ! ($SelfT ), "(input: &mut &[u8]) -> " , stringify ! ($SelfT ), " {\n let (int_bytes, rest) = input.split_at(std::mem::size_of::<" , stringify ! ($SelfT ), ">());\n *input = rest;\n " , stringify ! ($SelfT ), "::from_ne_bytes(int_bytes.try_into().unwrap())\n}\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ cfg_attr ( not ( bootstrap ), rustc_allow_const_fn_unstable ( const_fn_transmute ))]# [ cfg_attr ( bootstrap , allow_internal_unstable ( const_fn_transmute ))]# [ inline ] pub const fn from_ne_bytes ( bytes : [ u8 ; mem :: size_of ::< Self > ()])-> Self { unsafe { mem :: transmute ( bytes )}}} doc_comment ! { concat ! ( "**This method is soft-deprecated.**\n\nAlthough using it won’t cause a compilation warning,\nnew code should use [`" , stringify ! ($SelfT ), "::MIN" , "`](#associatedconstant.MIN) instead.\n\nReturns the smallest value that can be represented by this integer type." ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ inline ( always )]# [ rustc_promotable ]# [ rustc_const_stable ( feature = "const_min_value" , since = "1.32.0" )] pub const fn min_value ()-> Self { Self :: MIN }} doc_comment ! { concat ! ( "**This method is soft-deprecated.**\n\nAlthough using it won’t cause a compilation warning,\nnew code should use [`" , stringify ! ($SelfT ), "::MAX" , "`](#associatedconstant.MAX) instead.\n\nReturns the largest value that can be represented by this integer type." ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ inline ( always )]# [ rustc_promotable ]# [ rustc_const_stable ( feature = "const_max_value" , since = "1.32.0" )] pub const fn max_value ()-> Self { Self :: MAX }}}}
+macro_rules! __ra_macro_fixture429 {($x : expr , $($tt : tt )*)=>{# [ doc = $x ]$($tt )* }; }
+macro_rules! __ra_macro_fixture430 {()=>{ "\n\n**Note**: This function returns an array of length 2, 4 or 8 bytes\ndepending on the target pointer size.\n\n" }; }
+macro_rules! __ra_macro_fixture431 {()=>{ "\n\n**Note**: This function takes an array of length 2, 4 or 8 bytes\ndepending on the target pointer size.\n\n" }; }
+macro_rules! __ra_macro_fixture432 {($SelfT : ty , $ActualT : ty , $BITS : expr , $MaxV : expr , $Feature : expr , $EndFeature : expr , $rot : expr , $rot_op : expr , $rot_result : expr , $swap_op : expr , $swapped : expr , $reversed : expr , $le_bytes : expr , $be_bytes : expr , $to_xe_bytes_doc : expr , $from_xe_bytes_doc : expr )=>{ doc_comment ! { concat ! ( "The smallest value that can be represented by this integer type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MIN, 0);" , $EndFeature , "\n```" ), # [ stable ( feature = "assoc_int_consts" , since = "1.43.0" )] pub const MIN : Self = 0 ; } doc_comment ! { concat ! ( "The largest value that can be represented by this integer type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MAX, " , stringify ! ($MaxV ), ");" , $EndFeature , "\n```" ), # [ stable ( feature = "assoc_int_consts" , since = "1.43.0" )] pub const MAX : Self = ! 0 ; } doc_comment ! { concat ! ( "The size of this integer type in bits.\n\n# Examples\n\n```\n" , $Feature , "#![feature(int_bits_const)]\nassert_eq!(" , stringify ! ($SelfT ), "::BITS, " , stringify ! ($BITS ), ");" , $EndFeature , "\n```" ), # [ unstable ( feature = "int_bits_const" , issue = "76904" )] pub const BITS : u32 = $BITS ; } doc_comment ! { concat ! ( "Converts a string slice in a given base to an integer.\n\nThe string is expected to be an optional `+` sign\nfollowed by digits.\nLeading and trailing whitespace represent an error.\nDigits are a subset of these characters, depending on `radix`:\n\n* `0-9`\n* `a-z`\n* `A-Z`\n\n# Panics\n\nThis function panics if `radix` is not in the range from 2 to 36.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::from_str_radix(\"A\", 16), Ok(10));" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )] pub fn from_str_radix ( src : & str , radix : u32 )-> Result < Self , ParseIntError > { from_str_radix ( src , radix )}} doc_comment ! { concat ! ( "Returns the number of ones in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0b01001100" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.count_ones(), 3);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn count_ones ( self )-> u32 { intrinsics :: ctpop ( self as $ActualT ) as u32 }} doc_comment ! { concat ! ( "Returns the number of zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MAX.count_zeros(), 0);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn count_zeros ( self )-> u32 {(! self ). count_ones ()}} doc_comment ! { concat ! ( "Returns the number of leading zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = " , stringify ! ($SelfT ), "::MAX >> 2;\n\nassert_eq!(n.leading_zeros(), 2);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn leading_zeros ( self )-> u32 { intrinsics :: ctlz ( self as $ActualT ) as u32 }} doc_comment ! { concat ! ( "Returns the number of trailing zeros in the binary representation\nof `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0b0101000" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.trailing_zeros(), 3);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn trailing_zeros ( self )-> u32 { intrinsics :: cttz ( self ) as u32 }} doc_comment ! { concat ! ( "Returns the number of leading ones in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = !(" , stringify ! ($SelfT ), "::MAX >> 2);\n\nassert_eq!(n.leading_ones(), 2);" , $EndFeature , "\n```" ), # [ stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ rustc_const_stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ inline ] pub const fn leading_ones ( self )-> u32 {(! self ). leading_zeros ()}} doc_comment ! { concat ! ( "Returns the number of trailing ones in the binary representation\nof `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0b1010111" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.trailing_ones(), 3);" , $EndFeature , "\n```" ), # [ stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ rustc_const_stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ inline ] pub const fn trailing_ones ( self )-> u32 {(! self ). trailing_zeros ()}} doc_comment ! { concat ! ( "Shifts the bits to the left by a specified amount, `n`,\nwrapping the truncated bits to the end of the resulting integer.\n\nPlease note this isn't the same operation as the `<<` shifting operator!\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $rot_op , stringify ! ($SelfT ), ";\nlet m = " , $rot_result , ";\n\nassert_eq!(n.rotate_left(" , $rot , "), m);\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn rotate_left ( self , n : u32 )-> Self { intrinsics :: rotate_left ( self , n as $SelfT )}} doc_comment ! { concat ! ( "Shifts the bits to the right by a specified amount, `n`,\nwrapping the truncated bits to the beginning of the resulting\ninteger.\n\nPlease note this isn't the same operation as the `>>` shifting operator!\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $rot_result , stringify ! ($SelfT ), ";\nlet m = " , $rot_op , ";\n\nassert_eq!(n.rotate_right(" , $rot , "), m);\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn rotate_right ( self , n : u32 )-> Self { intrinsics :: rotate_right ( self , n as $SelfT )}} doc_comment ! { concat ! ( "\nReverses the byte order of the integer.\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $swap_op , stringify ! ($SelfT ), ";\nlet m = n.swap_bytes();\n\nassert_eq!(m, " , $swapped , ");\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn swap_bytes ( self )-> Self { intrinsics :: bswap ( self as $ActualT ) as Self }} doc_comment ! { concat ! ( "Reverses the order of bits in the integer. The least significant bit becomes the most significant bit,\n second least-significant bit becomes second most-significant bit, etc.\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $swap_op , stringify ! ($SelfT ), ";\nlet m = n.reverse_bits();\n\nassert_eq!(m, " , $reversed , ");\nassert_eq!(0, 0" , stringify ! ($SelfT ), ".reverse_bits());\n```" ), # [ stable ( feature = "reverse_bits" , since = "1.37.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ]# [ must_use ] pub const fn reverse_bits ( self )-> Self { intrinsics :: bitreverse ( self as $ActualT ) as Self }} doc_comment ! { concat ! ( "Converts an integer from big endian to the target's endianness.\n\nOn big endian this is a no-op. On little endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"big\") {\n assert_eq!(" , stringify ! ($SelfT ), "::from_be(n), n)\n} else {\n assert_eq!(" , stringify ! ($SelfT ), "::from_be(n), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn from_be ( x : Self )-> Self {# [ cfg ( target_endian = "big" )]{ x }# [ cfg ( not ( target_endian = "big" ))]{ x . swap_bytes ()}}} doc_comment ! { concat ! ( "Converts an integer from little endian to the target's endianness.\n\nOn little endian this is a no-op. On big endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"little\") {\n assert_eq!(" , stringify ! ($SelfT ), "::from_le(n), n)\n} else {\n assert_eq!(" , stringify ! ($SelfT ), "::from_le(n), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn from_le ( x : Self )-> Self {# [ cfg ( target_endian = "little" )]{ x }# [ cfg ( not ( target_endian = "little" ))]{ x . swap_bytes ()}}} doc_comment ! { concat ! ( "Converts `self` to big endian from the target's endianness.\n\nOn big endian this is a no-op. On little endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"big\") {\n assert_eq!(n.to_be(), n)\n} else {\n assert_eq!(n.to_be(), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn to_be ( self )-> Self {# [ cfg ( target_endian = "big" )]{ self }# [ cfg ( not ( target_endian = "big" ))]{ self . swap_bytes ()}}} doc_comment ! { concat ! ( "Converts `self` to little endian from the target's endianness.\n\nOn little endian this is a no-op. On big endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"little\") {\n assert_eq!(n.to_le(), n)\n} else {\n assert_eq!(n.to_le(), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn to_le ( self )-> Self {# [ cfg ( target_endian = "little" )]{ self }# [ cfg ( not ( target_endian = "little" ))]{ self . swap_bytes ()}}} doc_comment ! { concat ! ( "Checked integer addition. Computes `self + rhs`, returning `None`\nif overflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!((" , stringify ! ($SelfT ), "::MAX - 2).checked_add(1), " , "Some(" , stringify ! ($SelfT ), "::MAX - 1));\nassert_eq!((" , stringify ! ($SelfT ), "::MAX - 2).checked_add(3), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_add ( self , rhs : Self )-> Option < Self > { let ( a , b )= self . overflowing_add ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Unchecked integer addition. Computes `self + rhs`, assuming overflow\ncannot occur. This results in undefined behavior when `self + rhs > " , stringify ! ($SelfT ), "::MAX` or `self + rhs < " , stringify ! ($SelfT ), "::MIN`." ), # [ unstable ( feature = "unchecked_math" , reason = "niche optimization path" , issue = "none" , )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub unsafe fn unchecked_add ( self , rhs : Self )-> Self { unsafe { intrinsics :: unchecked_add ( self , rhs )}}} doc_comment ! { concat ! ( "Checked integer subtraction. Computes `self - rhs`, returning\n`None` if overflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(1" , stringify ! ($SelfT ), ".checked_sub(1), Some(0));\nassert_eq!(0" , stringify ! ($SelfT ), ".checked_sub(1), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_sub ( self , rhs : Self )-> Option < Self > { let ( a , b )= self . overflowing_sub ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Unchecked integer subtraction. Computes `self - rhs`, assuming overflow\ncannot occur. This results in undefined behavior when `self - rhs > " , stringify ! ($SelfT ), "::MAX` or `self - rhs < " , stringify ! ($SelfT ), "::MIN`." ), # [ unstable ( feature = "unchecked_math" , reason = "niche optimization path" , issue = "none" , )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub unsafe fn unchecked_sub ( self , rhs : Self )-> Self { unsafe { intrinsics :: unchecked_sub ( self , rhs )}}} doc_comment ! { concat ! ( "Checked integer multiplication. Computes `self * rhs`, returning\n`None` if overflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(5" , stringify ! ($SelfT ), ".checked_mul(1), Some(5));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.checked_mul(2), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_mul ( self , rhs : Self )-> Option < Self > { let ( a , b )= self . overflowing_mul ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Unchecked integer multiplication. Computes `self * rhs`, assuming overflow\ncannot occur. This results in undefined behavior when `self * rhs > " , stringify ! ($SelfT ), "::MAX` or `self * rhs < " , stringify ! ($SelfT ), "::MIN`." ), # [ unstable ( feature = "unchecked_math" , reason = "niche optimization path" , issue = "none" , )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub unsafe fn unchecked_mul ( self , rhs : Self )-> Self { unsafe { intrinsics :: unchecked_mul ( self , rhs )}}} doc_comment ! { concat ! ( "Checked integer division. Computes `self / rhs`, returning `None`\nif `rhs == 0`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(128" , stringify ! ($SelfT ), ".checked_div(2), Some(64));\nassert_eq!(1" , stringify ! ($SelfT ), ".checked_div(0), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_unstable ( feature = "const_checked_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_div ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 ){ None } else { Some ( unsafe { intrinsics :: unchecked_div ( self , rhs )})}}} doc_comment ! { concat ! ( "Checked Euclidean division. Computes `self.div_euclid(rhs)`, returning `None`\nif `rhs == 0`.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(128" , stringify ! ($SelfT ), ".checked_div_euclid(2), Some(64));\nassert_eq!(1" , stringify ! ($SelfT ), ".checked_div_euclid(0), None);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_div_euclid ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 ){ None } else { Some ( self . div_euclid ( rhs ))}}} doc_comment ! { concat ! ( "Checked integer remainder. Computes `self % rhs`, returning `None`\nif `rhs == 0`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(5" , stringify ! ($SelfT ), ".checked_rem(2), Some(1));\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem(0), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_unstable ( feature = "const_checked_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_rem ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 ){ None } else { Some ( unsafe { intrinsics :: unchecked_rem ( self , rhs )})}}} doc_comment ! { concat ! ( "Checked Euclidean modulo. Computes `self.rem_euclid(rhs)`, returning `None`\nif `rhs == 0`.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem_euclid(2), Some(1));\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem_euclid(0), None);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_rem_euclid ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 ){ None } else { Some ( self . rem_euclid ( rhs ))}}} doc_comment ! { concat ! ( "Checked negation. Computes `-self`, returning `None` unless `self ==\n0`.\n\nNote that negating any positive integer will overflow.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0" , stringify ! ($SelfT ), ".checked_neg(), Some(0));\nassert_eq!(1" , stringify ! ($SelfT ), ".checked_neg(), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn checked_neg ( self )-> Option < Self > { let ( a , b )= self . overflowing_neg (); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Checked shift left. Computes `self << rhs`, returning `None`\nif `rhs` is larger than or equal to the number of bits in `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0x1" , stringify ! ($SelfT ), ".checked_shl(4), Some(0x10));\nassert_eq!(0x10" , stringify ! ($SelfT ), ".checked_shl(129), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_shl ( self , rhs : u32 )-> Option < Self > { let ( a , b )= self . overflowing_shl ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Checked shift right. Computes `self >> rhs`, returning `None`\nif `rhs` is larger than or equal to the number of bits in `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0x10" , stringify ! ($SelfT ), ".checked_shr(4), Some(0x1));\nassert_eq!(0x10" , stringify ! ($SelfT ), ".checked_shr(129), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_shr ( self , rhs : u32 )-> Option < Self > { let ( a , b )= self . overflowing_shr ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Checked exponentiation. Computes `self.pow(exp)`, returning `None` if\noverflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(2" , stringify ! ($SelfT ), ".checked_pow(5), Some(32));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.checked_pow(2), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_pow ( self , mut exp : u32 )-> Option < Self > { if exp == 0 { return Some ( 1 ); } let mut base = self ; let mut acc : Self = 1 ; while exp > 1 { if ( exp & 1 )== 1 { acc = try_opt ! ( acc . checked_mul ( base )); } exp /= 2 ; base = try_opt ! ( base . checked_mul ( base )); } Some ( try_opt ! ( acc . checked_mul ( base )))}} doc_comment ! { concat ! ( "Saturating integer addition. Computes `self + rhs`, saturating at\nthe numeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".saturating_add(1), 101);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.saturating_add(127), " , stringify ! ($SelfT ), "::MAX);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn saturating_add ( self , rhs : Self )-> Self { intrinsics :: saturating_add ( self , rhs )}} doc_comment ! { concat ! ( "Saturating integer subtraction. Computes `self - rhs`, saturating\nat the numeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".saturating_sub(27), 73);\nassert_eq!(13" , stringify ! ($SelfT ), ".saturating_sub(127), 0);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn saturating_sub ( self , rhs : Self )-> Self { intrinsics :: saturating_sub ( self , rhs )}} doc_comment ! { concat ! ( "Saturating integer multiplication. Computes `self * rhs`,\nsaturating at the numeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(2" , stringify ! ($SelfT ), ".saturating_mul(10), 20);\nassert_eq!((" , stringify ! ($SelfT ), "::MAX).saturating_mul(10), " , stringify ! ($SelfT ), "::MAX);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn saturating_mul ( self , rhs : Self )-> Self { match self . checked_mul ( rhs ){ Some ( x )=> x , None => Self :: MAX , }}} doc_comment ! { concat ! ( "Saturating integer exponentiation. Computes `self.pow(exp)`,\nsaturating at the numeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(4" , stringify ! ($SelfT ), ".saturating_pow(3), 64);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.saturating_pow(2), " , stringify ! ($SelfT ), "::MAX);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn saturating_pow ( self , exp : u32 )-> Self { match self . checked_pow ( exp ){ Some ( x )=> x , None => Self :: MAX , }}} doc_comment ! { concat ! ( "Wrapping (modular) addition. Computes `self + rhs`,\nwrapping around at the boundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(200" , stringify ! ($SelfT ), ".wrapping_add(55), 255);\nassert_eq!(200" , stringify ! ($SelfT ), ".wrapping_add(" , stringify ! ($SelfT ), "::MAX), 199);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_add ( self , rhs : Self )-> Self { intrinsics :: wrapping_add ( self , rhs )}} doc_comment ! { concat ! ( "Wrapping (modular) subtraction. Computes `self - rhs`,\nwrapping around at the boundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_sub(100), 0);\nassert_eq!(100" , stringify ! ($SelfT ), ".wrapping_sub(" , stringify ! ($SelfT ), "::MAX), 101);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_sub ( self , rhs : Self )-> Self { intrinsics :: wrapping_sub ( self , rhs )}}# [ doc = " Wrapping (modular) multiplication. Computes `self *" ]# [ doc = " rhs`, wrapping around at the boundary of the type." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " Please note that this example is shared between integer types." ]# [ doc = " Which explains why `u8` is used here." ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " assert_eq!(10u8.wrapping_mul(12), 120);" ]# [ doc = " assert_eq!(25u8.wrapping_mul(12), 44);" ]# [ doc = " ```" ]# [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_mul ( self , rhs : Self )-> Self { intrinsics :: wrapping_mul ( self , rhs )} doc_comment ! { concat ! ( "Wrapping (modular) division. Computes `self / rhs`.\nWrapped division on unsigned types is just normal division.\nThere's no way wrapping could ever happen.\nThis function exists, so that all operations\nare accounted for in the wrapping operations.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_div(10), 10);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_unstable ( feature = "const_wrapping_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_div ( self , rhs : Self )-> Self { self / rhs }} doc_comment ! { concat ! ( "Wrapping Euclidean division. Computes `self.div_euclid(rhs)`.\nWrapped division on unsigned types is just normal division.\nThere's no way wrapping could ever happen.\nThis function exists, so that all operations\nare accounted for in the wrapping operations.\nSince, for the positive integers, all common\ndefinitions of division are equal, this\nis exactly equal to `self.wrapping_div(rhs)`.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(100" , stringify ! ($SelfT ), ".wrapping_div_euclid(10), 10);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_div_euclid ( self , rhs : Self )-> Self { self / rhs }} doc_comment ! { concat ! ( "Wrapping (modular) remainder. Computes `self % rhs`.\nWrapped remainder calculation on unsigned types is\njust the regular remainder calculation.\nThere's no way wrapping could ever happen.\nThis function exists, so that all operations\nare accounted for in the wrapping operations.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_rem(10), 0);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_unstable ( feature = "const_wrapping_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_rem ( self , rhs : Self )-> Self { self % rhs }} doc_comment ! { concat ! ( "Wrapping Euclidean modulo. Computes `self.rem_euclid(rhs)`.\nWrapped modulo calculation on unsigned types is\njust the regular remainder calculation.\nThere's no way wrapping could ever happen.\nThis function exists, so that all operations\nare accounted for in the wrapping operations.\nSince, for the positive integers, all common\ndefinitions of division are equal, this\nis exactly equal to `self.wrapping_rem(rhs)`.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(100" , stringify ! ($SelfT ), ".wrapping_rem_euclid(10), 0);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_rem_euclid ( self , rhs : Self )-> Self { self % rhs }}# [ doc = " Wrapping (modular) negation. Computes `-self`," ]# [ doc = " wrapping around at the boundary of the type." ]# [ doc = "" ]# [ doc = " Since unsigned types do not have negative equivalents" ]# [ doc = " all applications of this function will wrap (except for `-0`)." ]# [ doc = " For values smaller than the corresponding signed type\\\'s maximum" ]# [ doc = " the result is the same as casting the corresponding signed value." ]# [ doc = " Any larger values are equivalent to `MAX + 1 - (val - MAX - 1)` where" ]# [ doc = " `MAX` is the corresponding signed type\\\'s maximum." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " Please note that this example is shared between integer types." ]# [ doc = " Which explains why `i8` is used here." ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " assert_eq!(100i8.wrapping_neg(), -100);" ]# [ doc = " assert_eq!((-128i8).wrapping_neg(), -128);" ]# [ doc = " ```" ]# [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ inline ] pub const fn wrapping_neg ( self )-> Self { self . overflowing_neg (). 0 } doc_comment ! { concat ! ( "Panic-free bitwise shift-left; yields `self << mask(rhs)`,\nwhere `mask` removes any high-order bits of `rhs` that\nwould cause the shift to exceed the bitwidth of the type.\n\nNote that this is *not* the same as a rotate-left; the\nRHS of a wrapping shift-left is restricted to the range\nof the type, rather than the bits shifted out of the LHS\nbeing returned to the other end. The primitive integer\ntypes all implement a [`rotate_left`](#method.rotate_left) function,\nwhich may be what you want instead.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(1" , stringify ! ($SelfT ), ".wrapping_shl(7), 128);\nassert_eq!(1" , stringify ! ($SelfT ), ".wrapping_shl(128), 1);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_shl ( self , rhs : u32 )-> Self { unsafe { intrinsics :: unchecked_shl ( self , ( rhs & ($BITS - 1 )) as $SelfT )}}} doc_comment ! { concat ! ( "Panic-free bitwise shift-right; yields `self >> mask(rhs)`,\nwhere `mask` removes any high-order bits of `rhs` that\nwould cause the shift to exceed the bitwidth of the type.\n\nNote that this is *not* the same as a rotate-right; the\nRHS of a wrapping shift-right is restricted to the range\nof the type, rather than the bits shifted out of the LHS\nbeing returned to the other end. The primitive integer\ntypes all implement a [`rotate_right`](#method.rotate_right) function,\nwhich may be what you want instead.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(128" , stringify ! ($SelfT ), ".wrapping_shr(7), 1);\nassert_eq!(128" , stringify ! ($SelfT ), ".wrapping_shr(128), 128);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_shr ( self , rhs : u32 )-> Self { unsafe { intrinsics :: unchecked_shr ( self , ( rhs & ($BITS - 1 )) as $SelfT )}}} doc_comment ! { concat ! ( "Wrapping (modular) exponentiation. Computes `self.pow(exp)`,\nwrapping around at the boundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(3" , stringify ! ($SelfT ), ".wrapping_pow(5), 243);\nassert_eq!(3u8.wrapping_pow(6), 217);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_pow ( self , mut exp : u32 )-> Self { if exp == 0 { return 1 ; } let mut base = self ; let mut acc : Self = 1 ; while exp > 1 { if ( exp & 1 )== 1 { acc = acc . wrapping_mul ( base ); } exp /= 2 ; base = base . wrapping_mul ( base ); } acc . wrapping_mul ( base )}} doc_comment ! { concat ! ( "Calculates `self` + `rhs`\n\nReturns a tuple of the addition along with a boolean indicating\nwhether an arithmetic overflow would occur. If an overflow would\nhave occurred then the wrapped value is returned.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_add(2), (7, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.overflowing_add(1), (0, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_add ( self , rhs : Self )-> ( Self , bool ){ let ( a , b )= intrinsics :: add_with_overflow ( self as $ActualT , rhs as $ActualT ); ( a as Self , b )}} doc_comment ! { concat ! ( "Calculates `self` - `rhs`\n\nReturns a tuple of the subtraction along with a boolean indicating\nwhether an arithmetic overflow would occur. If an overflow would\nhave occurred then the wrapped value is returned.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_sub(2), (3, false));\nassert_eq!(0" , stringify ! ($SelfT ), ".overflowing_sub(1), (" , stringify ! ($SelfT ), "::MAX, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_sub ( self , rhs : Self )-> ( Self , bool ){ let ( a , b )= intrinsics :: sub_with_overflow ( self as $ActualT , rhs as $ActualT ); ( a as Self , b )}}# [ doc = " Calculates the multiplication of `self` and `rhs`." ]# [ doc = "" ]# [ doc = " Returns a tuple of the multiplication along with a boolean" ]# [ doc = " indicating whether an arithmetic overflow would occur. If an" ]# [ doc = " overflow would have occurred then the wrapped value is returned." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " Please note that this example is shared between integer types." ]# [ doc = " Which explains why `u32` is used here." ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " assert_eq!(5u32.overflowing_mul(2), (10, false));" ]# [ doc = " assert_eq!(1_000_000_000u32.overflowing_mul(10), (1410065408, true));" ]# [ doc = " ```" ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_mul ( self , rhs : Self )-> ( Self , bool ){ let ( a , b )= intrinsics :: mul_with_overflow ( self as $ActualT , rhs as $ActualT ); ( a as Self , b )} doc_comment ! { concat ! ( "Calculates the divisor when `self` is divided by `rhs`.\n\nReturns a tuple of the divisor along with a boolean indicating\nwhether an arithmetic overflow would occur. Note that for unsigned\nintegers overflow never occurs, so the second value is always\n`false`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "assert_eq!(5" , stringify ! ($SelfT ), ".overflowing_div(2), (2, false));" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_unstable ( feature = "const_overflowing_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_div ( self , rhs : Self )-> ( Self , bool ){( self / rhs , false )}} doc_comment ! { concat ! ( "Calculates the quotient of Euclidean division `self.div_euclid(rhs)`.\n\nReturns a tuple of the divisor along with a boolean indicating\nwhether an arithmetic overflow would occur. Note that for unsigned\nintegers overflow never occurs, so the second value is always\n`false`.\nSince, for the positive integers, all common\ndefinitions of division are equal, this\nis exactly equal to `self.overflowing_div(rhs)`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage\n\n```\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_div_euclid(2), (2, false));\n```" ), # [ inline ]# [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_div_euclid ( self , rhs : Self )-> ( Self , bool ){( self / rhs , false )}} doc_comment ! { concat ! ( "Calculates the remainder when `self` is divided by `rhs`.\n\nReturns a tuple of the remainder after dividing along with a boolean\nindicating whether an arithmetic overflow would occur. Note that for\nunsigned integers overflow never occurs, so the second value is\nalways `false`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "assert_eq!(5" , stringify ! ($SelfT ), ".overflowing_rem(2), (1, false));" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_unstable ( feature = "const_overflowing_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_rem ( self , rhs : Self )-> ( Self , bool ){( self % rhs , false )}} doc_comment ! { concat ! ( "Calculates the remainder `self.rem_euclid(rhs)` as if by Euclidean division.\n\nReturns a tuple of the modulo after dividing along with a boolean\nindicating whether an arithmetic overflow would occur. Note that for\nunsigned integers overflow never occurs, so the second value is\nalways `false`.\nSince, for the positive integers, all common\ndefinitions of division are equal, this operation\nis exactly equal to `self.overflowing_rem(rhs)`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage\n\n```\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_rem_euclid(2), (1, false));\n```" ), # [ inline ]# [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_rem_euclid ( self , rhs : Self )-> ( Self , bool ){( self % rhs , false )}} doc_comment ! { concat ! ( "Negates self in an overflowing fashion.\n\nReturns `!self + 1` using wrapping operations to return the value\nthat represents the negation of this unsigned value. Note that for\npositive unsigned values overflow always occurs, but negating 0 does\nnot overflow.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "assert_eq!(0" , stringify ! ($SelfT ), ".overflowing_neg(), (0, false));\nassert_eq!(2" , stringify ! ($SelfT ), ".overflowing_neg(), (-2i32 as " , stringify ! ($SelfT ), ", true));" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )] pub const fn overflowing_neg ( self )-> ( Self , bool ){((! self ). wrapping_add ( 1 ), self != 0 )}} doc_comment ! { concat ! ( "Shifts self left by `rhs` bits.\n\nReturns a tuple of the shifted version of self along with a boolean\nindicating whether the shift value was larger than or equal to the\nnumber of bits. If the shift value is too large, then value is\nmasked (N-1) where N is the number of bits, and this value is then\nused to perform the shift.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "assert_eq!(0x1" , stringify ! ($SelfT ), ".overflowing_shl(4), (0x10, false));\nassert_eq!(0x1" , stringify ! ($SelfT ), ".overflowing_shl(132), (0x10, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_shl ( self , rhs : u32 )-> ( Self , bool ){( self . wrapping_shl ( rhs ), ( rhs > ($BITS - 1 )))}} doc_comment ! { concat ! ( "Shifts self right by `rhs` bits.\n\nReturns a tuple of the shifted version of self along with a boolean\nindicating whether the shift value was larger than or equal to the\nnumber of bits. If the shift value is too large, then value is\nmasked (N-1) where N is the number of bits, and this value is then\nused to perform the shift.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "assert_eq!(0x10" , stringify ! ($SelfT ), ".overflowing_shr(4), (0x1, false));\nassert_eq!(0x10" , stringify ! ($SelfT ), ".overflowing_shr(132), (0x1, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_shr ( self , rhs : u32 )-> ( Self , bool ){( self . wrapping_shr ( rhs ), ( rhs > ($BITS - 1 )))}} doc_comment ! { concat ! ( "Raises self to the power of `exp`, using exponentiation by squaring.\n\nReturns a tuple of the exponentiation along with a bool indicating\nwhether an overflow happened.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(3" , stringify ! ($SelfT ), ".overflowing_pow(5), (243, false));\nassert_eq!(3u8.overflowing_pow(6), (217, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_pow ( self , mut exp : u32 )-> ( Self , bool ){ if exp == 0 { return ( 1 , false ); } let mut base = self ; let mut acc : Self = 1 ; let mut overflown = false ; let mut r ; while exp > 1 { if ( exp & 1 )== 1 { r = acc . overflowing_mul ( base ); acc = r . 0 ; overflown |= r . 1 ; } exp /= 2 ; r = base . overflowing_mul ( base ); base = r . 0 ; overflown |= r . 1 ; } r = acc . overflowing_mul ( base ); r . 1 |= overflown ; r }} doc_comment ! { concat ! ( "Raises self to the power of `exp`, using exponentiation by squaring.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(2" , stringify ! ($SelfT ), ".pow(5), 32);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn pow ( self , mut exp : u32 )-> Self { if exp == 0 { return 1 ; } let mut base = self ; let mut acc = 1 ; while exp > 1 { if ( exp & 1 )== 1 { acc = acc * base ; } exp /= 2 ; base = base * base ; } acc * base }} doc_comment ! { concat ! ( "Performs Euclidean division.\n\nSince, for the positive integers, all common\ndefinitions of division are equal, this\nis exactly equal to `self / rhs`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(7" , stringify ! ($SelfT ), ".div_euclid(4), 1); // or any other integer type\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn div_euclid ( self , rhs : Self )-> Self { self / rhs }} doc_comment ! { concat ! ( "Calculates the least remainder of `self (mod rhs)`.\n\nSince, for the positive integers, all common\ndefinitions of division are equal, this\nis exactly equal to `self % rhs`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(7" , stringify ! ($SelfT ), ".rem_euclid(4), 3); // or any other integer type\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn rem_euclid ( self , rhs : Self )-> Self { self % rhs }} doc_comment ! { concat ! ( "Returns `true` if and only if `self == 2^k` for some `k`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert!(16" , stringify ! ($SelfT ), ".is_power_of_two());\nassert!(!10" , stringify ! ($SelfT ), ".is_power_of_two());" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_is_power_of_two" , since = "1.32.0" )]# [ inline ] pub const fn is_power_of_two ( self )-> bool { self . count_ones ()== 1 }}# [ inline ]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )] const fn one_less_than_next_power_of_two ( self )-> Self { if self <= 1 { return 0 ; } let p = self - 1 ; let z = unsafe { intrinsics :: ctlz_nonzero ( p )}; <$SelfT >:: MAX >> z } doc_comment ! { concat ! ( "Returns the smallest power of two greater than or equal to `self`.\n\nWhen return value overflows (i.e., `self > (1 << (N-1))` for type\n`uN`), it panics in debug mode and return value is wrapped to 0 in\nrelease mode (the only situation in which method can return 0).\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(2" , stringify ! ($SelfT ), ".next_power_of_two(), 2);\nassert_eq!(3" , stringify ! ($SelfT ), ".next_power_of_two(), 4);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn next_power_of_two ( self )-> Self { self . one_less_than_next_power_of_two ()+ 1 }} doc_comment ! { concat ! ( "Returns the smallest power of two greater than or equal to `n`. If\nthe next power of two is greater than the type's maximum value,\n`None` is returned, otherwise the power of two is wrapped in `Some`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(2" , stringify ! ($SelfT ), ".checked_next_power_of_two(), Some(2));\nassert_eq!(3" , stringify ! ($SelfT ), ".checked_next_power_of_two(), Some(4));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.checked_next_power_of_two(), None);" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )] pub const fn checked_next_power_of_two ( self )-> Option < Self > { self . one_less_than_next_power_of_two (). checked_add ( 1 )}} doc_comment ! { concat ! ( "Returns the smallest power of two greater than or equal to `n`. If\nthe next power of two is greater than the type's maximum value,\nthe return value is wrapped to `0`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_next_power_of_two)]\n" , $Feature , "\nassert_eq!(2" , stringify ! ($SelfT ), ".wrapping_next_power_of_two(), 2);\nassert_eq!(3" , stringify ! ($SelfT ), ".wrapping_next_power_of_two(), 4);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.wrapping_next_power_of_two(), 0);" , $EndFeature , "\n```" ), # [ unstable ( feature = "wrapping_next_power_of_two" , issue = "32463" , reason = "needs decision on wrapping behaviour" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )] pub const fn wrapping_next_power_of_two ( self )-> Self { self . one_less_than_next_power_of_two (). wrapping_add ( 1 )}} doc_comment ! { concat ! ( "Return the memory representation of this integer as a byte array in\nbig-endian (network) byte order.\n" , $to_xe_bytes_doc , "\n# Examples\n\n```\nlet bytes = " , $swap_op , stringify ! ($SelfT ), ".to_be_bytes();\nassert_eq!(bytes, " , $be_bytes , ");\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn to_be_bytes ( self )-> [ u8 ; mem :: size_of ::< Self > ()]{ self . to_be (). to_ne_bytes ()}} doc_comment ! { concat ! ( "Return the memory representation of this integer as a byte array in\nlittle-endian byte order.\n" , $to_xe_bytes_doc , "\n# Examples\n\n```\nlet bytes = " , $swap_op , stringify ! ($SelfT ), ".to_le_bytes();\nassert_eq!(bytes, " , $le_bytes , ");\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn to_le_bytes ( self )-> [ u8 ; mem :: size_of ::< Self > ()]{ self . to_le (). to_ne_bytes ()}} doc_comment ! { concat ! ( "\nReturn the memory representation of this integer as a byte array in\nnative byte order.\n\nAs the target platform's native endianness is used, portable code\nshould use [`to_be_bytes`] or [`to_le_bytes`], as appropriate,\ninstead.\n" , $to_xe_bytes_doc , "\n[`to_be_bytes`]: #method.to_be_bytes\n[`to_le_bytes`]: #method.to_le_bytes\n\n# Examples\n\n```\nlet bytes = " , $swap_op , stringify ! ($SelfT ), ".to_ne_bytes();\nassert_eq!(\n bytes,\n if cfg!(target_endian = \"big\") {\n " , $be_bytes , "\n } else {\n " , $le_bytes , "\n }\n);\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ cfg_attr ( not ( bootstrap ), rustc_allow_const_fn_unstable ( const_fn_transmute ))]# [ cfg_attr ( bootstrap , allow_internal_unstable ( const_fn_transmute ))]# [ inline ] pub const fn to_ne_bytes ( self )-> [ u8 ; mem :: size_of ::< Self > ()]{ unsafe { mem :: transmute ( self )}}} doc_comment ! { concat ! ( "\nReturn the memory representation of this integer as a byte array in\nnative byte order.\n\n[`to_ne_bytes`] should be preferred over this whenever possible.\n\n[`to_ne_bytes`]: #method.to_ne_bytes\n" , "\n# Examples\n\n```\n#![feature(num_as_ne_bytes)]\nlet num = " , $swap_op , stringify ! ($SelfT ), ";\nlet bytes = num.as_ne_bytes();\nassert_eq!(\n bytes,\n if cfg!(target_endian = \"big\") {\n &" , $be_bytes , "\n } else {\n &" , $le_bytes , "\n }\n);\n```" ), # [ unstable ( feature = "num_as_ne_bytes" , issue = "76976" )]# [ inline ] pub fn as_ne_bytes (& self )-> & [ u8 ; mem :: size_of ::< Self > ()]{ unsafe {&* ( self as * const Self as * const _)}}} doc_comment ! { concat ! ( "Create a native endian integer value from its representation\nas a byte array in big endian.\n" , $from_xe_bytes_doc , "\n# Examples\n\n```\nlet value = " , stringify ! ($SelfT ), "::from_be_bytes(" , $be_bytes , ");\nassert_eq!(value, " , $swap_op , ");\n```\n\nWhen starting from a slice rather than an array, fallible conversion APIs can be used:\n\n```\nuse std::convert::TryInto;\n\nfn read_be_" , stringify ! ($SelfT ), "(input: &mut &[u8]) -> " , stringify ! ($SelfT ), " {\n let (int_bytes, rest) = input.split_at(std::mem::size_of::<" , stringify ! ($SelfT ), ">());\n *input = rest;\n " , stringify ! ($SelfT ), "::from_be_bytes(int_bytes.try_into().unwrap())\n}\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn from_be_bytes ( bytes : [ u8 ; mem :: size_of ::< Self > ()])-> Self { Self :: from_be ( Self :: from_ne_bytes ( bytes ))}} doc_comment ! { concat ! ( "\nCreate a native endian integer value from its representation\nas a byte array in little endian.\n" , $from_xe_bytes_doc , "\n# Examples\n\n```\nlet value = " , stringify ! ($SelfT ), "::from_le_bytes(" , $le_bytes , ");\nassert_eq!(value, " , $swap_op , ");\n```\n\nWhen starting from a slice rather than an array, fallible conversion APIs can be used:\n\n```\nuse std::convert::TryInto;\n\nfn read_le_" , stringify ! ($SelfT ), "(input: &mut &[u8]) -> " , stringify ! ($SelfT ), " {\n let (int_bytes, rest) = input.split_at(std::mem::size_of::<" , stringify ! ($SelfT ), ">());\n *input = rest;\n " , stringify ! ($SelfT ), "::from_le_bytes(int_bytes.try_into().unwrap())\n}\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn from_le_bytes ( bytes : [ u8 ; mem :: size_of ::< Self > ()])-> Self { Self :: from_le ( Self :: from_ne_bytes ( bytes ))}} doc_comment ! { concat ! ( "Create a native endian integer value from its memory representation\nas a byte array in native endianness.\n\nAs the target platform's native endianness is used, portable code\nlikely wants to use [`from_be_bytes`] or [`from_le_bytes`], as\nappropriate instead.\n\n[`from_be_bytes`]: #method.from_be_bytes\n[`from_le_bytes`]: #method.from_le_bytes\n" , $from_xe_bytes_doc , "\n# Examples\n\n```\nlet value = " , stringify ! ($SelfT ), "::from_ne_bytes(if cfg!(target_endian = \"big\") {\n " , $be_bytes , "\n} else {\n " , $le_bytes , "\n});\nassert_eq!(value, " , $swap_op , ");\n```\n\nWhen starting from a slice rather than an array, fallible conversion APIs can be used:\n\n```\nuse std::convert::TryInto;\n\nfn read_ne_" , stringify ! ($SelfT ), "(input: &mut &[u8]) -> " , stringify ! ($SelfT ), " {\n let (int_bytes, rest) = input.split_at(std::mem::size_of::<" , stringify ! ($SelfT ), ">());\n *input = rest;\n " , stringify ! ($SelfT ), "::from_ne_bytes(int_bytes.try_into().unwrap())\n}\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ cfg_attr ( not ( bootstrap ), rustc_allow_const_fn_unstable ( const_fn_transmute ))]# [ cfg_attr ( bootstrap , allow_internal_unstable ( const_fn_transmute ))]# [ inline ] pub const fn from_ne_bytes ( bytes : [ u8 ; mem :: size_of ::< Self > ()])-> Self { unsafe { mem :: transmute ( bytes )}}} doc_comment ! { concat ! ( "**This method is soft-deprecated.**\n\nAlthough using it won’t cause compilation warning,\nnew code should use [`" , stringify ! ($SelfT ), "::MIN" , "`](#associatedconstant.MIN) instead.\n\nReturns the smallest value that can be represented by this integer type." ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_promotable ]# [ inline ( always )]# [ rustc_const_stable ( feature = "const_max_value" , since = "1.32.0" )] pub const fn min_value ()-> Self { Self :: MIN }} doc_comment ! { concat ! ( "**This method is soft-deprecated.**\n\nAlthough using it won’t cause compilation warning,\nnew code should use [`" , stringify ! ($SelfT ), "::MAX" , "`](#associatedconstant.MAX) instead.\n\nReturns the largest value that can be represented by this integer type." ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_promotable ]# [ inline ( always )]# [ rustc_const_stable ( feature = "const_max_value" , since = "1.32.0" )] pub const fn max_value ()-> Self { Self :: MAX }}}}
+macro_rules! __ra_macro_fixture433 {($type : ident )=>{ const EXPLICIT_SIG_BITS : u8 = Self :: SIG_BITS - 1 ; const MAX_EXP : i16 = ( 1 << ( Self :: EXP_BITS - 1 ))- 1 ; const MIN_EXP : i16 = -< Self as RawFloat >:: MAX_EXP + 1 ; const MAX_EXP_INT : i16 = < Self as RawFloat >:: MAX_EXP - ( Self :: SIG_BITS as i16 - 1 ); const MAX_ENCODED_EXP : i16 = ( 1 << Self :: EXP_BITS )- 1 ; const MIN_EXP_INT : i16 = < Self as RawFloat >:: MIN_EXP - ( Self :: SIG_BITS as i16 - 1 ); const MAX_SIG : u64 = ( 1 << Self :: SIG_BITS )- 1 ; const MIN_SIG : u64 = 1 << ( Self :: SIG_BITS - 1 ); const INFINITY : Self = $type :: INFINITY ; const NAN : Self = $type :: NAN ; const ZERO : Self = 0.0 ; }; }
+macro_rules! __ra_macro_fixture434 {()=>{# [ inline ] unsafe fn forward_unchecked ( start : Self , n : usize )-> Self { unsafe { start . unchecked_add ( n as Self )}}# [ inline ] unsafe fn backward_unchecked ( start : Self , n : usize )-> Self { unsafe { start . unchecked_sub ( n as Self )}}# [ inline ] fn forward ( start : Self , n : usize )-> Self { if Self :: forward_checked ( start , n ). is_none (){ let _ = Add :: add ( Self :: MAX , 1 ); } start . wrapping_add ( n as Self )}# [ inline ] fn backward ( start : Self , n : usize )-> Self { if Self :: backward_checked ( start , n ). is_none (){ let _ = Sub :: sub ( Self :: MIN , 1 ); } start . wrapping_sub ( n as Self )}}; }
+macro_rules! __ra_macro_fixture435 {( u8 , $($tt : tt )*)=>{ "" }; ( i8 , $($tt : tt )*)=>{ "" }; ($_: ident , $($tt : tt )*)=>{$($tt )* }; }
+macro_rules! __ra_macro_fixture436 {( forward )=>{# [ inline ] fn haystack (& self )-> & 'a str { self . 0 . haystack ()}# [ inline ] fn next (& mut self )-> SearchStep { self . 0 . next ()}# [ inline ] fn next_match (& mut self )-> Option < ( usize , usize )> { self . 0 . next_match ()}# [ inline ] fn next_reject (& mut self )-> Option < ( usize , usize )> { self . 0 . next_reject ()}}; ( reverse )=>{# [ inline ] fn next_back (& mut self )-> SearchStep { self . 0 . next_back ()}# [ inline ] fn next_match_back (& mut self )-> Option < ( usize , usize )> { self . 0 . next_match_back ()}# [ inline ] fn next_reject_back (& mut self )-> Option < ( usize , usize )> { self . 0 . next_reject_back ()}}; }
+macro_rules! __ra_macro_fixture437 {($t : ty , $pmap : expr , $smap : expr )=>{ type Searcher = $t ; # [ inline ] fn into_searcher ( self , haystack : & 'a str )-> $t {($smap )(($pmap )( self ). into_searcher ( haystack ))}# [ inline ] fn is_contained_in ( self , haystack : & 'a str )-> bool {($pmap )( self ). is_contained_in ( haystack )}# [ inline ] fn is_prefix_of ( self , haystack : & 'a str )-> bool {($pmap )( self ). is_prefix_of ( haystack )}# [ inline ] fn strip_prefix_of ( self , haystack : & 'a str )-> Option <& 'a str > {($pmap )( self ). strip_prefix_of ( haystack )}# [ inline ] fn is_suffix_of ( self , haystack : & 'a str )-> bool where $t : ReverseSearcher < 'a >, {($pmap )( self ). is_suffix_of ( haystack )}# [ inline ] fn strip_suffix_of ( self , haystack : & 'a str )-> Option <& 'a str > where $t : ReverseSearcher < 'a >, {($pmap )( self ). strip_suffix_of ( haystack )}}; }
+macro_rules! __ra_macro_fixture438 {()=>{# [ inline ] fn is_ascii (& self )-> bool { self . is_ascii ()}# [ inline ] fn to_ascii_uppercase (& self )-> Self :: Owned { self . to_ascii_uppercase ()}# [ inline ] fn to_ascii_lowercase (& self )-> Self :: Owned { self . to_ascii_lowercase ()}# [ inline ] fn eq_ignore_ascii_case (& self , o : & Self )-> bool { self . eq_ignore_ascii_case ( o )}# [ inline ] fn make_ascii_uppercase (& mut self ){ self . make_ascii_uppercase (); }# [ inline ] fn make_ascii_lowercase (& mut self ){ self . make_ascii_lowercase (); }}; }
+macro_rules! __ra_macro_fixture439 {()=>($crate :: vec :: Vec :: new ()); ($elem : expr ; $n : expr )=>($crate :: vec :: from_elem ($elem , $n )); ($($x : expr ),+ $(,)?)=>(< [_]>:: into_vec ( box [$($x ),+])); }
+macro_rules! __ra_macro_fixture440 {($left : expr , $right : expr $(,)?)=>({ match (&$left , &$right ){( left_val , right_val )=>{ if ! (* left_val == * right_val ){ panic ! ( r#"assertion failed: `(left == right)`\n left: `{:?}`,\n right: `{:?}`"# , &* left_val , &* right_val )}}}}); ($left : expr , $right : expr , $($arg : tt )+)=>({ match (& ($left ), & ($right )){( left_val , right_val )=>{ if ! (* left_val == * right_val ){ panic ! ( r#"assertion failed: `(left == right)`\n left: `{:?}`,\n right: `{:?}`: {}"# , &* left_val , &* right_val , $crate :: format_args ! ($($arg )+))}}}}); }
+macro_rules! __ra_macro_fixture441 {()=>({$crate :: panic ! ( "explicit panic" )}); ($msg : expr $(,)?)=>({$crate :: rt :: begin_panic ($msg )}); ($fmt : expr , $($arg : tt )+)=>({$crate :: rt :: begin_panic_fmt (&$crate :: format_args ! ($fmt , $($arg )+))}); }
+macro_rules! __ra_macro_fixture442 {($expression : expr , $($pattern : pat )|+ $(if $guard : expr )? $(,)?)=>{ match $expression {$($pattern )|+ $(if $guard )? => true , _ => false }}}
+macro_rules! __ra_macro_fixture443 {()=>{# [ inline ] fn load_consume (& self )-> Self :: Val { self . load ( Ordering :: Acquire )}}; }
+macro_rules! __ra_macro_fixture444 {($($tt : tt )*)=>{$($tt )* }}
+macro_rules! __ra_macro_fixture445 {($tyname : ident , $($($field : ident ).+),*)=>{ fn fmt (& self , f : & mut :: std :: fmt :: Formatter )-> :: std :: fmt :: Result { f . debug_struct ( stringify ! ($tyname ))$(. field ( stringify ! ($($field ).+), & self .$($field ).+))* . finish ()}}}
+macro_rules! __ra_macro_fixture446 {($($field : ident ),*)=>{ fn clone (& self )-> Self { Self {$($field : self .$field . clone (),)* }}}}
+macro_rules! __ra_macro_fixture447 {($method : ident )=>{ fn $method < V > ( self , visitor : V )-> Result < V :: Value > where V : de :: Visitor < 'de >, { self . deserialize_number ( visitor )}}; }
+macro_rules! __ra_macro_fixture448 {($method : ident =>$visit : ident )=>{ fn $method < V > ( self , visitor : V )-> Result < V :: Value > where V : de :: Visitor < 'de >, { self . de . eat_char (); self . de . scratch . clear (); let string = tri ! ( self . de . read . parse_str (& mut self . de . scratch )); match ( string . parse (), string ){( Ok ( integer ), _)=> visitor .$visit ( integer ), ( Err (_), Reference :: Borrowed ( s ))=> visitor . visit_borrowed_str ( s ), ( Err (_), Reference :: Copied ( s ))=> visitor . visit_str ( s ), }}}; }
+macro_rules! __ra_macro_fixture449 {($method : ident )=>{# [ cfg ( not ( feature = "arbitrary_precision" ))] fn $method < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { match self { Value :: Number ( n )=> n . deserialize_any ( visitor ), _ => Err ( self . invalid_type (& visitor )), }}# [ cfg ( feature = "arbitrary_precision" )] fn $method < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { match self { Value :: Number ( n )=> n .$method ( visitor ), _ => self . deserialize_any ( visitor ), }}}; }
+macro_rules! __ra_macro_fixture450 {($method : ident )=>{# [ cfg ( not ( feature = "arbitrary_precision" ))] fn $method < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { match * self { Value :: Number ( ref n )=> n . deserialize_any ( visitor ), _ => Err ( self . invalid_type (& visitor )), }}# [ cfg ( feature = "arbitrary_precision" )] fn $method < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { match * self { Value :: Number ( ref n )=> n .$method ( visitor ), _ => self . deserialize_any ( visitor ), }}}; }
+macro_rules! __ra_macro_fixture451 {($method : ident =>$visit : ident )=>{ fn $method < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { match ( self . key . parse (), self . key ){( Ok ( integer ), _)=> visitor .$visit ( integer ), ( Err (_), Cow :: Borrowed ( s ))=> visitor . visit_borrowed_str ( s ), # [ cfg ( any ( feature = "std" , feature = "alloc" ))]( Err (_), Cow :: Owned ( s ))=> visitor . visit_string ( s ), }}}; }
+macro_rules! __ra_macro_fixture452 {(@ expand [$($num_string : tt )*])=>{# [ cfg ( not ( feature = "arbitrary_precision" ))]# [ inline ] fn deserialize_any < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { match self . n { N :: PosInt ( u )=> visitor . visit_u64 ( u ), N :: NegInt ( i )=> visitor . visit_i64 ( i ), N :: Float ( f )=> visitor . visit_f64 ( f ), }}# [ cfg ( feature = "arbitrary_precision" )]# [ inline ] fn deserialize_any < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de > { if let Some ( u )= self . as_u64 (){ return visitor . visit_u64 ( u ); } else if let Some ( i )= self . as_i64 (){ return visitor . visit_i64 ( i ); } else if let Some ( f )= self . as_f64 (){ if ryu :: Buffer :: new (). format_finite ( f )== self . n || f . to_string ()== self . n { return visitor . visit_f64 ( f ); }} visitor . visit_map ( NumberDeserializer { number : Some ( self .$($num_string )*), })}}; ( owned )=>{ deserialize_any ! (@ expand [ n ]); }; ( ref )=>{ deserialize_any ! (@ expand [ n . clone ()]); }; }
+macro_rules! __ra_macro_fixture453 {($deserialize : ident =>$visit : ident )=>{# [ cfg ( not ( feature = "arbitrary_precision" ))] fn $deserialize < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { self . deserialize_any ( visitor )}# [ cfg ( feature = "arbitrary_precision" )] fn $deserialize < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : de :: Visitor < 'de >, { visitor .$visit ( self . n . parse (). map_err (|_| invalid_number ())?)}}; }
+macro_rules! __ra_macro_fixture454 {()=>{ fn __rayon_private__ (& self )-> crate :: private :: PrivateMarker { crate :: private :: PrivateMarker }}; }
+macro_rules! __ra_macro_fixture455 {()=>{ fn __rayon_private__ (& self )-> crate :: private :: PrivateMarker { crate :: private :: PrivateMarker }}; }
+macro_rules! __ra_macro_fixture456 {($map_elt : expr )=>{ fn next (& mut self )-> Option < Self :: Item > { self . iter . next (). map ($map_elt )} fn size_hint (& self )-> ( usize , Option < usize >){ self . iter . size_hint ()} fn count ( self )-> usize { self . iter . len ()} fn nth (& mut self , n : usize )-> Option < Self :: Item > { self . iter . nth ( n ). map ($map_elt )} fn last ( mut self )-> Option < Self :: Item > { self . next_back ()} fn collect < C > ( self )-> C where C : FromIterator < Self :: Item >, { self . iter . map ($map_elt ). collect ()}}; }
+macro_rules! __ra_macro_fixture457 {($map_elt : expr )=>{ fn next_back (& mut self )-> Option < Self :: Item > { self . iter . next_back (). map ($map_elt )}}; }
+macro_rules! __ra_macro_fixture458 {()=>{# [ doc = " This trait is private; this method exists to make it" ]# [ doc = " impossible to implement outside the crate." ]# [ doc ( hidden )] fn __rayon_private__ (& self )-> crate :: private :: PrivateMarker ; }; }
+macro_rules! __ra_macro_fixture459 {($ident : ident )=>{{# [ cfg ( test )]{ extern "C" {# [ no_mangle ] static $ident : std :: sync :: atomic :: AtomicUsize ; } unsafe {$ident . fetch_add ( 1 , std :: sync :: atomic :: Ordering :: SeqCst ); }}}}; }
+macro_rules! __ra_macro_fixture460 {($ident : ident )=>{# [ no_mangle ] static $ident : std :: sync :: atomic :: AtomicUsize = std :: sync :: atomic :: AtomicUsize :: new ( 0 ); let _checker = $crate :: mark :: MarkChecker :: new (&$ident ); }; }
+macro_rules! __ra_macro_fixture461 {( target : $target : expr , $($arg : tt )+)=>( log ! ( target : $target , $crate :: Level :: Debug , $($arg )+)); ($($arg : tt )+)=>( log ! ($crate :: Level :: Debug , $($arg )+))}
+macro_rules! __ra_macro_fixture462 {()=>($crate :: eprint ! ( "\n" )); ($($arg : tt )*)=>({$crate :: io :: _eprint ($crate :: format_args_nl ! ($($arg )*)); })}
+macro_rules! __ra_macro_fixture463 {( target : $target : expr , $($arg : tt )+)=>( log ! ( target : $target , $crate :: Level :: Warn , $($arg )+)); ($($arg : tt )+)=>( log ! ($crate :: Level :: Warn , $($arg )+))}
+macro_rules! __ra_macro_fixture464 {( target : $target : expr , $lvl : expr , $($arg : tt )+)=>({ let lvl = $lvl ; if lvl <= $crate :: STATIC_MAX_LEVEL && lvl <= $crate :: max_level (){$crate :: __private_api_log ( __log_format_args ! ($($arg )+), lvl , & ($target , __log_module_path ! (), __log_file ! (), __log_line ! ()), ); }}); ($lvl : expr , $($arg : tt )+)=>( log ! ( target : __log_module_path ! (), $lvl , $($arg )+))}
+macro_rules! __ra_macro_fixture465 {($($args : tt )*)=>{ format_args ! ($($args )*)}; }
+macro_rules! __ra_macro_fixture466 {()=>{ module_path ! ()}; }
+macro_rules! __ra_macro_fixture467 {()=>{ file ! ()}; }
+macro_rules! __ra_macro_fixture468 {()=>{ line ! ()}; }
+macro_rules! __ra_macro_fixture469 {($left : expr , $right : expr )=>{ assert_eq_text ! ($left , $right ,)}; ($left : expr , $right : expr , $($tt : tt )*)=>{{ let left = $left ; let right = $right ; if left != right { if left . trim ()== right . trim (){ std :: eprintln ! ( "Left:\n{:?}\n\nRight:\n{:?}\n\nWhitespace difference\n" , left , right ); } else { let diff = $crate :: __diff ( left , right ); std :: eprintln ! ( "Left:\n{}\n\nRight:\n{}\n\nDiff:\n{}\n" , left , right , $crate :: format_diff ( diff )); } std :: eprintln ! ($($tt )*); panic ! ( "text differs" ); }}}; }
+macro_rules! __ra_macro_fixture470 {($($arg : tt )*)=>($crate :: io :: _eprint ($crate :: format_args ! ($($arg )*))); }
+macro_rules! __ra_macro_fixture471 {($left : expr , $right : expr $(,)?)=>({ match (&$left , &$right ){( left_val , right_val )=>{ if * left_val == * right_val { panic ! ( r#"assertion failed: `(left != right)`\n left: `{:?}`,\n right: `{:?}`"# , &* left_val , &* right_val )}}}}); ($left : expr , $right : expr , $($arg : tt )+)=>({ match (& ($left ), & ($right )){( left_val , right_val )=>{ if * left_val == * right_val { panic ! ( r#"assertion failed: `(left != right)`\n left: `{:?}`,\n right: `{:?}`: {}"# , &* left_val , &* right_val , $crate :: format_args ! ($($arg )+))}}}}); }
+macro_rules! __ra_macro_fixture472 {[[$data : literal ]]=>{$crate :: Expect { position : $crate :: Position { file : file ! (), line : line ! (), column : column ! (), }, data : $data , }}; [[]]=>{$crate :: expect ! [[ "" ]]}; }
+macro_rules! __ra_macro_fixture473 {( self )=>{$crate :: name :: known :: SELF_PARAM }; ( Self )=>{$crate :: name :: known :: SELF_TYPE }; ('static )=>{$crate :: name :: known :: STATIC_LIFETIME }; ($ident : ident )=>{$crate :: name :: known ::$ident }; }
+macro_rules! __ra_macro_fixture474 {()=>({ panic ! ( "internal error: entered unreachable code" )}); ($msg : expr $(,)?)=>({$crate :: unreachable ! ( "{}" , $msg )}); ($fmt : expr , $($arg : tt )*)=>({ panic ! ($crate :: concat ! ( "internal error: entered unreachable code: " , $fmt ), $($arg )*)}); }
+macro_rules! __ra_macro_fixture475 {( target : $target : expr , $($arg : tt )+)=>( log ! ( target : $target , $crate :: Level :: Error , $($arg )+)); ($($arg : tt )+)=>( log ! ($crate :: Level :: Error , $($arg )+))}
+macro_rules! __ra_macro_fixture476 {( target : $target : expr , $($arg : tt )+)=>( log ! ( target : $target , $crate :: Level :: Trace , $($arg )+)); ($($arg : tt )+)=>( log ! ($crate :: Level :: Trace , $($arg )+))}
+macro_rules! __ra_macro_fixture477 {($buf : expr )=>(); ($buf : expr , $lit : literal $($arg : tt )*)=>{{ use :: std :: fmt :: Write as _; let _ = :: std :: write ! ($buf , $lit $($arg )*); }}; }
+macro_rules! __ra_macro_fixture478 {( match $node : ident {$($tt : tt )* })=>{ match_ast ! ( match ($node ){$($tt )* })}; ( match ($node : expr ){$(ast ::$ast : ident ($it : ident )=>$res : expr , )* _ =>$catch_all : expr $(,)? })=>{{$(if let Some ($it )= ast ::$ast :: cast ($node . clone ()){$res } else )* {$catch_all }}}; }
+macro_rules! __ra_macro_fixture479 {($start : ident $(:: $seg : ident )*)=>({$crate :: __known_path ! ($start $(:: $seg )*); $crate :: path :: ModPath :: from_segments ($crate :: path :: PathKind :: Abs , vec ! [$crate :: path :: __name ! [$start ], $($crate :: path :: __name ! [$seg ],)* ])}); }
+macro_rules! __ra_macro_fixture480 {( core :: iter :: IntoIterator )=>{}; ( core :: iter :: Iterator )=>{}; ( core :: result :: Result )=>{}; ( core :: option :: Option )=>{}; ( core :: ops :: Range )=>{}; ( core :: ops :: RangeFrom )=>{}; ( core :: ops :: RangeFull )=>{}; ( core :: ops :: RangeTo )=>{}; ( core :: ops :: RangeToInclusive )=>{}; ( core :: ops :: RangeInclusive )=>{}; ( core :: future :: Future )=>{}; ( core :: ops :: Try )=>{}; ($path : path )=>{ compile_error ! ( "Please register your known path in the path module" )}; }
+macro_rules! __ra_macro_fixture481 {($changed : ident , ($this : ident / $def : ident ). $field : ident , $glob_imports : ident [$lookup : ident ], $def_import_type : ident )=>{{ let existing = $this .$field . entry ($lookup . 1 . clone ()); match ( existing , $def .$field ){( Entry :: Vacant ( entry ), Some (_))=>{ match $def_import_type { ImportType :: Glob =>{$glob_imports .$field . insert ($lookup . clone ()); } ImportType :: Named =>{$glob_imports .$field . remove (&$lookup ); }} if let Some ( fld )= $def .$field { entry . insert ( fld ); }$changed = true ; }( Entry :: Occupied ( mut entry ), Some (_)) if $glob_imports .$field . contains (&$lookup )&& matches ! ($def_import_type , ImportType :: Named )=>{ mark :: hit ! ( import_shadowed ); $glob_imports .$field . remove (&$lookup ); if let Some ( fld )= $def .$field { entry . insert ( fld ); }$changed = true ; }_ =>{}}}}; }
+macro_rules! __ra_macro_fixture482 {($(# $attr_args : tt )* const fn $($item : tt )* )=>{$(# $attr_args )* fn $($item )* }; ($(# $attr_args : tt )* pub const fn $($item : tt )* )=>{$(# $attr_args )* pub fn $($item )* }; ($(# $attr_args : tt )* pub const unsafe fn $($item : tt )* )=>{$(# $attr_args )* pub unsafe fn $($item )* }; }
+macro_rules! __ra_macro_fixture483 {{ type Mirror = $tinyname : ident ; $($(# [$attr : meta ])* $v : vis fn $fname : ident ($seif : ident : $seifty : ty $(,$argname : ident : $argtype : ty )*)$(-> $ret : ty )? ; )* }=>{$($(# [$attr ])* # [ inline ( always )]$v fn $fname ($seif : $seifty , $($argname : $argtype ),*)$(-> $ret )? { match $seif {$tinyname :: Inline ( i )=> i .$fname ($($argname ),*), $tinyname :: Heap ( h )=> h .$fname ($($argname ),*), }})* }; }
+macro_rules! __ra_macro_fixture484 {([$($stack : tt )*])=>{$($stack )* }; ([$($stack : tt )*]@ escape $_x : tt $($t : tt )*)=>{ remove_sections_inner ! ([$($stack )*]$($t )*); }; ([$($stack : tt )*]@ section $x : ident $($t : tt )*)=>{ remove_sections_inner ! ([$($stack )*]$($t )*); }; ([$($stack : tt )*]$t : tt $($tail : tt )*)=>{ remove_sections_inner ! ([$($stack )* $t ]$($tail )*); }; }
+macro_rules! __ra_macro_fixture485 {($name : ident , $($field : ident ),+ $(,)*)=>( fn clone (& self )-> Self {$name {$($field : self . $field . clone ()),* }}); }
+macro_rules! __ra_macro_fixture486 {( type FreeFunctions )=>( type FreeFunctions : 'static ;); ( type TokenStream )=>( type TokenStream : 'static + Clone ;); ( type TokenStreamBuilder )=>( type TokenStreamBuilder : 'static ;); ( type TokenStreamIter )=>( type TokenStreamIter : 'static + Clone ;); ( type Group )=>( type Group : 'static + Clone ;); ( type Punct )=>( type Punct : 'static + Copy + Eq + Hash ;); ( type Ident )=>( type Ident : 'static + Copy + Eq + Hash ;); ( type Literal )=>( type Literal : 'static + Clone ;); ( type SourceFile )=>( type SourceFile : 'static + Clone ;); ( type MultiSpan )=>( type MultiSpan : 'static ;); ( type Diagnostic )=>( type Diagnostic : 'static ;); ( type Span )=>( type Span : 'static + Copy + Eq + Hash ;); ( fn drop (& mut self , $arg : ident : $arg_ty : ty ))=>( fn drop (& mut self , $arg : $arg_ty ){ mem :: drop ($arg )}); ( fn clone (& mut self , $arg : ident : $arg_ty : ty )-> $ret_ty : ty )=>( fn clone (& mut self , $arg : $arg_ty )-> $ret_ty {$arg . clone ()}); ($($item : tt )*)=>($($item )*;)}
+macro_rules! __ra_macro_fixture487 {($bit : expr , $is_fn_name : ident , $set_fn_name : ident )=>{ fn $is_fn_name (& self )-> bool { self . bools & ( 0b1 << $bit )> 0 } fn $set_fn_name (& mut self , yes : bool ){ if yes { self . bools |= 1 << $bit ; } else { self . bools &= ! ( 1 << $bit ); }}}; }
+macro_rules! __ra_macro_fixture488 {($($(# [$cfg : meta ])* fn $method : ident -> $i : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$i > {( self . 0 ).$method ()})*}}
+macro_rules! __ra_macro_fixture489 {($($(# [$cfg : meta ])* fn $method : ident ($i : ident ); )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method ( n : $i )-> Option < Self > { T ::$method ( n ). map ( Wrapping )})*}}
+macro_rules! __ra_macro_fixture490 {($SrcT : ident : $($(# [$cfg : meta ])* fn $method : ident -> $DstT : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$DstT > { let min = $DstT :: MIN as $SrcT ; let max = $DstT :: MAX as $SrcT ; if size_of ::<$SrcT > ()<= size_of ::<$DstT > ()|| ( min <= * self && * self <= max ){ Some (* self as $DstT )} else { None }})*}}
+macro_rules! __ra_macro_fixture491 {($SrcT : ident : $($(# [$cfg : meta ])* fn $method : ident -> $DstT : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$DstT > { let max = $DstT :: MAX as $SrcT ; if 0 <= * self && ( size_of ::<$SrcT > ()<= size_of ::<$DstT > ()|| * self <= max ){ Some (* self as $DstT )} else { None }})*}}
+macro_rules! __ra_macro_fixture492 {($SrcT : ident : $($(# [$cfg : meta ])* fn $method : ident -> $DstT : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$DstT > { let max = $DstT :: MAX as $SrcT ; if size_of ::<$SrcT > ()< size_of ::<$DstT > ()|| * self <= max { Some (* self as $DstT )} else { None }})*}}
+macro_rules! __ra_macro_fixture493 {($SrcT : ident : $($(# [$cfg : meta ])* fn $method : ident -> $DstT : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$DstT > { let max = $DstT :: MAX as $SrcT ; if size_of ::<$SrcT > ()<= size_of ::<$DstT > ()|| * self <= max { Some (* self as $DstT )} else { None }})*}}
+macro_rules! __ra_macro_fixture494 {($f : ident : $($(# [$cfg : meta ])* fn $method : ident -> $i : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$i > { if size_of ::<$f > ()> size_of ::<$i > (){ const MIN_M1 : $f = $i :: MIN as $f - 1.0 ; const MAX_P1 : $f = $i :: MAX as $f + 1.0 ; if * self > MIN_M1 && * self < MAX_P1 { return Some ( float_to_int_unchecked ! (* self =>$i )); }} else { const MIN : $f = $i :: MIN as $f ; const MAX_P1 : $f = $i :: MAX as $f ; if * self >= MIN && * self < MAX_P1 { return Some ( float_to_int_unchecked ! (* self =>$i )); }} None })*}}
+macro_rules! __ra_macro_fixture495 {($f : ident : $($(# [$cfg : meta ])* fn $method : ident -> $u : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$u > { if size_of ::<$f > ()> size_of ::<$u > (){ const MAX_P1 : $f = $u :: MAX as $f + 1.0 ; if * self > - 1.0 && * self < MAX_P1 { return Some ( float_to_int_unchecked ! (* self =>$u )); }} else { const MAX_P1 : $f = $u :: MAX as $f ; if * self > - 1.0 && * self < MAX_P1 { return Some ( float_to_int_unchecked ! (* self =>$u )); }} None })*}}
+macro_rules! __ra_macro_fixture496 {($SrcT : ident : $(fn $method : ident -> $DstT : ident ; )*)=>{$(# [ inline ] fn $method (& self )-> Option <$DstT > { Some (* self as $DstT )})*}}
+macro_rules! __ra_macro_fixture497 {($($method : ident ()-> $ret : expr ; )*)=>{$(# [ inline ] fn $method ()-> Self {$ret })*}; }
+macro_rules! __ra_macro_fixture498 {($(Self :: $method : ident ( self $(, $arg : ident : $ty : ty )* )-> $ret : ty ; )*)=>{$(# [ inline ] fn $method ( self $(, $arg : $ty )* )-> $ret { Self ::$method ( self $(, $arg )* )})*}; ($($base : ident :: $method : ident ( self $(, $arg : ident : $ty : ty )* )-> $ret : ty ; )*)=>{$(# [ inline ] fn $method ( self $(, $arg : $ty )* )-> $ret {< Self as $base >::$method ( self $(, $arg )* )})*}; ($($base : ident :: $method : ident ($($arg : ident : $ty : ty ),* )-> $ret : ty ; )*)=>{$(# [ inline ] fn $method ($($arg : $ty ),* )-> $ret {< Self as $base >::$method ($($arg ),* )})*}}
+macro_rules! __ra_macro_fixture499 {($tyname : ident , $($($field : ident ).+),*)=>{ fn fmt (& self , f : & mut :: std :: fmt :: Formatter )-> :: std :: fmt :: Result { f . debug_struct ( stringify ! ($tyname ))$(. field ( stringify ! ($($field ).+), & self .$($field ).+))* . finish ()}}}
+macro_rules! __ra_macro_fixture500 {($($field : ident ),*)=>{ fn clone (& self )-> Self { Self {$($field : self .$field . clone (),)* }}}}
+macro_rules! __ra_macro_fixture501 {($($json : tt )+)=>{ json_internal ! ($($json )+)}; }
+macro_rules! __ra_macro_fixture502 {(@ array [$($elems : expr ,)*])=>{ json_internal_vec ! [$($elems ,)*]}; (@ array [$($elems : expr ),*])=>{ json_internal_vec ! [$($elems ),*]}; (@ array [$($elems : expr ,)*] null $($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ( null )]$($rest )*)}; (@ array [$($elems : expr ,)*] true $($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ( true )]$($rest )*)}; (@ array [$($elems : expr ,)*] false $($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ( false )]$($rest )*)}; (@ array [$($elems : expr ,)*][$($array : tt )*]$($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ([$($array )*])]$($rest )*)}; (@ array [$($elems : expr ,)*]{$($map : tt )*}$($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ({$($map )*})]$($rest )*)}; (@ array [$($elems : expr ,)*]$next : expr , $($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ($next ),]$($rest )*)}; (@ array [$($elems : expr ,)*]$last : expr )=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ($last )])}; (@ array [$($elems : expr ),*], $($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)*]$($rest )*)}; (@ array [$($elems : expr ),*]$unexpected : tt $($rest : tt )*)=>{ json_unexpected ! ($unexpected )}; (@ object $object : ident ()()())=>{}; (@ object $object : ident [$($key : tt )+]($value : expr ), $($rest : tt )*)=>{ let _ = $object . insert (($($key )+). into (), $value ); json_internal ! (@ object $object ()($($rest )*)($($rest )*)); }; (@ object $object : ident [$($key : tt )+]($value : expr )$unexpected : tt $($rest : tt )*)=>{ json_unexpected ! ($unexpected ); }; (@ object $object : ident [$($key : tt )+]($value : expr ))=>{ let _ = $object . insert (($($key )+). into (), $value ); }; (@ object $object : ident ($($key : tt )+)(: null $($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ( null ))$($rest )*); }; (@ object $object : ident ($($key : tt )+)(: true $($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ( true ))$($rest )*); }; (@ object $object : ident ($($key : tt )+)(: false $($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ( false ))$($rest )*); }; (@ object $object : ident ($($key : tt )+)(: [$($array : tt )*]$($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ([$($array )*]))$($rest )*); }; (@ object $object : ident ($($key : tt )+)(: {$($map : tt )*}$($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ({$($map )*}))$($rest )*); }; (@ object $object : ident ($($key : tt )+)(: $value : expr , $($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ($value )), $($rest )*); }; (@ object $object : ident ($($key : tt )+)(: $value : expr )$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ($value ))); }; (@ object $object : ident ($($key : tt )+)(:)$copy : tt )=>{ json_internal ! (); }; (@ object $object : ident ($($key : tt )+)()$copy : tt )=>{ json_internal ! (); }; (@ object $object : ident ()(: $($rest : tt )*)($colon : tt $($copy : tt )*))=>{ json_unexpected ! ($colon ); }; (@ object $object : ident ($($key : tt )*)(, $($rest : tt )*)($comma : tt $($copy : tt )*))=>{ json_unexpected ! ($comma ); }; (@ object $object : ident ()(($key : expr ): $($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object ($key )(: $($rest )*)(: $($rest )*)); }; (@ object $object : ident ($($key : tt )*)(: $($unexpected : tt )+)$copy : tt )=>{ json_expect_expr_comma ! ($($unexpected )+); }; (@ object $object : ident ($($key : tt )*)($tt : tt $($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object ($($key )* $tt )($($rest )*)($($rest )*)); }; ( null )=>{$crate :: Value :: Null }; ( true )=>{$crate :: Value :: Bool ( true )}; ( false )=>{$crate :: Value :: Bool ( false )}; ([])=>{$crate :: Value :: Array ( json_internal_vec ! [])}; ([$($tt : tt )+ ])=>{$crate :: Value :: Array ( json_internal ! (@ array []$($tt )+))}; ({})=>{$crate :: Value :: Object ($crate :: Map :: new ())}; ({$($tt : tt )+ })=>{$crate :: Value :: Object ({ let mut object = $crate :: Map :: new (); json_internal ! (@ object object ()($($tt )+)($($tt )+)); object })}; ($other : expr )=>{$crate :: to_value (&$other ). unwrap ()}; }
+macro_rules! __ra_macro_fixture503 {($($content : tt )*)=>{ vec ! [$($content )*]}; }
+macro_rules! __ra_macro_fixture504 {($($cfg : tt )*)=>{}; }
+macro_rules! __ra_macro_fixture505 {($($tokens : tt )*)=>{$crate :: crossbeam_channel_internal ! ($($tokens )* )}; }
+macro_rules! __ra_macro_fixture506 {(@ list ()($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ case ($($head )*)()())}; (@ list ( default =>$($tail : tt )*)($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ list ( default ()=>$($tail )*)($($head )*))}; (@ list ( default -> $($tail : tt )*)($($head : tt )*))=>{ compile_error ! ( "expected `=>` after `default` case, found `->`" )}; (@ list ( default $args : tt -> $($tail : tt )*)($($head : tt )*))=>{ compile_error ! ( "expected `=>` after `default` case, found `->`" )}; (@ list ( recv ($($args : tt )*)=>$($tail : tt )*)($($head : tt )*))=>{ compile_error ! ( "expected `->` after `recv` case, found `=>`" )}; (@ list ( send ($($args : tt )*)=>$($tail : tt )*)($($head : tt )*))=>{ compile_error ! ( "expected `->` after `send` operation, found `=>`" )}; (@ list ($case : ident $args : tt -> $res : tt -> $($tail : tt )*)($($head : tt )*))=>{ compile_error ! ( "expected `=>`, found `->`" )}; (@ list ($case : ident $args : tt $(-> $res : pat )* =>$body : block ; $($tail : tt )*)($($head : tt )*))=>{ compile_error ! ( "did you mean to put a comma instead of the semicolon after `}`?" )}; (@ list ($case : ident ($($args : tt )*)$(-> $res : pat )* =>$body : expr , $($tail : tt )*)($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ list ($($tail )*)($($head )* $case ($($args )*)$(-> $res )* =>{$body },))}; (@ list ($case : ident ($($args : tt )*)$(-> $res : pat )* =>$body : block $($tail : tt )*)($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ list ($($tail )*)($($head )* $case ($($args )*)$(-> $res )* =>{$body },))}; (@ list ($case : ident ($($args : tt )*)$(-> $res : pat )* =>$body : expr )($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ list ()($($head )* $case ($($args )*)$(-> $res )* =>{$body },))}; (@ list ($case : ident ($($args : tt )*)$(-> $res : pat )* =>$body : expr ,)($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ list ()($($head )* $case ($($args )*)$(-> $res )* =>{$body },))}; (@ list ($($tail : tt )*)($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ list_error1 $($tail )*)}; (@ list_error1 recv $($tail : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list_error2 recv $($tail )*)}; (@ list_error1 send $($tail : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list_error2 send $($tail )*)}; (@ list_error1 default $($tail : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list_error2 default $($tail )*)}; (@ list_error1 $t : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected one of `recv`, `send`, or `default`, found `" , stringify ! ($t ), "`" , ))}; (@ list_error1 $($tail : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list_error2 $($tail )*); }; (@ list_error2 $case : ident )=>{ compile_error ! ( concat ! ( "missing argument list after `" , stringify ! ($case ), "`" , ))}; (@ list_error2 $case : ident =>$($tail : tt )*)=>{ compile_error ! ( concat ! ( "missing argument list after `" , stringify ! ($case ), "`" , ))}; (@ list_error2 $($tail : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list_error3 $($tail )*)}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )*)=>{ compile_error ! ( concat ! ( "missing `=>` after `" , stringify ! ($case ), "` case" , ))}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>)=>{ compile_error ! ( "expected expression after `=>`" )}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>$body : expr ; $($tail : tt )*)=>{ compile_error ! ( concat ! ( "did you mean to put a comma instead of the semicolon after `" , stringify ! ($body ), "`?" , ))}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* => recv ($($a : tt )*)$($tail : tt )*)=>{ compile_error ! ( "expected an expression after `=>`" )}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* => send ($($a : tt )*)$($tail : tt )*)=>{ compile_error ! ( "expected an expression after `=>`" )}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* => default ($($a : tt )*)$($tail : tt )*)=>{ compile_error ! ( "expected an expression after `=>`" )}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>$f : ident ($($a : tt )*)$($tail : tt )*)=>{ compile_error ! ( concat ! ( "did you mean to put a comma after `" , stringify ! ($f ), "(" , stringify ! ($($a )*), ")`?" , ))}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>$f : ident ! ($($a : tt )*)$($tail : tt )*)=>{ compile_error ! ( concat ! ( "did you mean to put a comma after `" , stringify ! ($f ), "!(" , stringify ! ($($a )*), ")`?" , ))}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>$f : ident ! [$($a : tt )*]$($tail : tt )*)=>{ compile_error ! ( concat ! ( "did you mean to put a comma after `" , stringify ! ($f ), "![" , stringify ! ($($a )*), "]`?" , ))}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>$f : ident ! {$($a : tt )*}$($tail : tt )*)=>{ compile_error ! ( concat ! ( "did you mean to put a comma after `" , stringify ! ($f ), "!{" , stringify ! ($($a )*), "}`?" , ))}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>$body : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "did you mean to put a comma after `" , stringify ! ($body ), "`?" , ))}; (@ list_error3 $case : ident ($($args : tt )*)-> =>$($tail : tt )*)=>{ compile_error ! ( "missing pattern after `->`" )}; (@ list_error3 $case : ident ($($args : tt )*)$t : tt $(-> $r : pat )* =>$($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected `->`, found `" , stringify ! ($t ), "`" , ))}; (@ list_error3 $case : ident ($($args : tt )*)-> $t : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected a pattern, found `" , stringify ! ($t ), "`" , ))}; (@ list_error3 recv ($($args : tt )*)$t : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected `->`, found `" , stringify ! ($t ), "`" , ))}; (@ list_error3 send ($($args : tt )*)$t : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected `->`, found `" , stringify ! ($t ), "`" , ))}; (@ list_error3 recv $args : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected an argument list after `recv`, found `" , stringify ! ($args ), "`" , ))}; (@ list_error3 send $args : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected an argument list after `send`, found `" , stringify ! ($args ), "`" , ))}; (@ list_error3 default $args : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected an argument list or `=>` after `default`, found `" , stringify ! ($args ), "`" , ))}; (@ list_error3 $($tail : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list_error4 $($tail )*)}; (@ list_error4 $($tail : tt )*)=>{ compile_error ! ( "invalid syntax" )}; (@ case ()$cases : tt $default : tt )=>{$crate :: crossbeam_channel_internal ! (@ init $cases $default )}; (@ case ( recv ($r : expr )-> $res : pat =>$body : tt , $($tail : tt )*)($($cases : tt )*)$default : tt )=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)($($cases )* recv ($r )-> $res =>$body ,)$default )}; (@ case ( recv ($r : expr ,)-> $res : pat =>$body : tt , $($tail : tt )*)($($cases : tt )*)$default : tt )=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)($($cases )* recv ($r )-> $res =>$body ,)$default )}; (@ case ( recv ($($args : tt )*)-> $res : pat =>$body : tt , $($tail : tt )*)($($cases : tt )*)$default : tt )=>{ compile_error ! ( concat ! ( "invalid argument list in `recv(" , stringify ! ($($args )*), ")`" , ))}; (@ case ( recv $t : tt $($tail : tt )*)($($cases : tt )*)$default : tt )=>{ compile_error ! ( concat ! ( "expected an argument list after `recv`, found `" , stringify ! ($t ), "`" , ))}; (@ case ( send ($s : expr , $m : expr )-> $res : pat =>$body : tt , $($tail : tt )*)($($cases : tt )*)$default : tt )=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)($($cases )* send ($s , $m )-> $res =>$body ,)$default )}; (@ case ( send ($s : expr , $m : expr ,)-> $res : pat =>$body : tt , $($tail : tt )*)($($cases : tt )*)$default : tt )=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)($($cases )* send ($s , $m )-> $res =>$body ,)$default )}; (@ case ( send ($($args : tt )*)-> $res : pat =>$body : tt , $($tail : tt )*)($($cases : tt )*)$default : tt )=>{ compile_error ! ( concat ! ( "invalid argument list in `send(" , stringify ! ($($args )*), ")`" , ))}; (@ case ( send $t : tt $($tail : tt )*)($($cases : tt )*)$default : tt )=>{ compile_error ! ( concat ! ( "expected an argument list after `send`, found `" , stringify ! ($t ), "`" , ))}; (@ case ( default ()=>$body : tt , $($tail : tt )*)$cases : tt ())=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)$cases ( default ()=>$body ,))}; (@ case ( default ($timeout : expr )=>$body : tt , $($tail : tt )*)$cases : tt ())=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)$cases ( default ($timeout )=>$body ,))}; (@ case ( default ($timeout : expr ,)=>$body : tt , $($tail : tt )*)$cases : tt ())=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)$cases ( default ($timeout )=>$body ,))}; (@ case ( default $($tail : tt )*)$cases : tt ($($def : tt )+))=>{ compile_error ! ( "there can be only one `default` case in a `select!` block" )}; (@ case ( default ($($args : tt )*)=>$body : tt , $($tail : tt )*)$cases : tt $default : tt )=>{ compile_error ! ( concat ! ( "invalid argument list in `default(" , stringify ! ($($args )*), ")`" , ))}; (@ case ( default $t : tt $($tail : tt )*)$cases : tt $default : tt )=>{ compile_error ! ( concat ! ( "expected an argument list or `=>` after `default`, found `" , stringify ! ($t ), "`" , ))}; (@ case ($case : ident $($tail : tt )*)$cases : tt $default : tt )=>{ compile_error ! ( concat ! ( "expected one of `recv`, `send`, or `default`, found `" , stringify ! ($case ), "`" , ))}; (@ init ( recv ($r : expr )-> $res : pat =>$recv_body : tt ,)( default ()=>$default_body : tt ,))=>{{ match $r { ref _r =>{ let _r : &$crate :: Receiver <_> = _r ; match _r . try_recv (){:: std :: result :: Result :: Err ($crate :: TryRecvError :: Empty )=>{$default_body } _res =>{ let _res = _res . map_err (|_| $crate :: RecvError ); let $res = _res ; $recv_body }}}}}}; (@ init ( recv ($r : expr )-> $res : pat =>$body : tt ,)())=>{{ match $r { ref _r =>{ let _r : &$crate :: Receiver <_> = _r ; let _res = _r . recv (); let $res = _res ; $body }}}}; (@ init ( recv ($r : expr )-> $res : pat =>$recv_body : tt ,)( default ($timeout : expr )=>$default_body : tt ,))=>{{ match $r { ref _r =>{ let _r : &$crate :: Receiver <_> = _r ; match _r . recv_timeout ($timeout ){:: std :: result :: Result :: Err ($crate :: RecvTimeoutError :: Timeout )=>{$default_body } _res =>{ let _res = _res . map_err (|_| $crate :: RecvError ); let $res = _res ; $recv_body }}}}}}; (@ init ($($cases : tt )*)$default : tt )=>{{ const _LEN : usize = $crate :: crossbeam_channel_internal ! (@ count ($($cases )*)); let _handle : &$crate :: internal :: SelectHandle = &$crate :: never ::< ()> (); # [ allow ( unused_mut )] let mut _sel = [( _handle , 0 , :: std :: ptr :: null ()); _LEN ]; $crate :: crossbeam_channel_internal ! (@ add _sel ($($cases )*)$default (( 0usize _oper0 )( 1usize _oper1 )( 2usize _oper2 )( 3usize _oper3 )( 4usize _oper4 )( 5usize _oper5 )( 6usize _oper6 )( 7usize _oper7 )( 8usize _oper8 )( 9usize _oper9 )( 10usize _oper10 )( 11usize _oper11 )( 12usize _oper12 )( 13usize _oper13 )( 14usize _oper14 )( 15usize _oper15 )( 16usize _oper16 )( 17usize _oper17 )( 18usize _oper18 )( 19usize _oper19 )( 20usize _oper20 )( 21usize _oper21 )( 22usize _oper22 )( 23usize _oper23 )( 24usize _oper24 )( 25usize _oper25 )( 26usize _oper26 )( 27usize _oper27 )( 28usize _oper28 )( 29usize _oper29 )( 30usize _oper30 )( 31usize _oper31 ))())}}; (@ count ())=>{ 0 }; (@ count ($oper : ident $args : tt -> $res : pat =>$body : tt , $($cases : tt )*))=>{ 1 + $crate :: crossbeam_channel_internal ! (@ count ($($cases )*))}; (@ add $sel : ident ()()$labels : tt $cases : tt )=>{{ let _oper : $crate :: SelectedOperation < '_ > = { let _oper = $crate :: internal :: select (& mut $sel ); unsafe {:: std :: mem :: transmute ( _oper )}}; $crate :: crossbeam_channel_internal ! {@ complete $sel _oper $cases }}}; (@ add $sel : ident ()( default ()=>$body : tt ,)$labels : tt $cases : tt )=>{{ let _oper : :: std :: option :: Option <$crate :: SelectedOperation < '_ >> = { let _oper = $crate :: internal :: try_select (& mut $sel ); unsafe {:: std :: mem :: transmute ( _oper )}}; match _oper { None =>{{$sel }; $body } Some ( _oper )=>{$crate :: crossbeam_channel_internal ! {@ complete $sel _oper $cases }}}}}; (@ add $sel : ident ()( default ($timeout : expr )=>$body : tt ,)$labels : tt $cases : tt )=>{{ let _oper : :: std :: option :: Option <$crate :: SelectedOperation < '_ >> = { let _oper = $crate :: internal :: select_timeout (& mut $sel , $timeout ); unsafe {:: std :: mem :: transmute ( _oper )}}; match _oper {:: std :: option :: Option :: None =>{{$sel }; $body }:: std :: option :: Option :: Some ( _oper )=>{$crate :: crossbeam_channel_internal ! {@ complete $sel _oper $cases }}}}}; (@ add $sel : ident $input : tt $default : tt ()$cases : tt )=>{ compile_error ! ( "too many operations in a `select!` block" )}; (@ add $sel : ident ( recv ($r : expr )-> $res : pat =>$body : tt , $($tail : tt )*)$default : tt (($i : tt $var : ident )$($labels : tt )*)($($cases : tt )*))=>{{ match $r { ref _r =>{ let $var : &$crate :: Receiver <_> = unsafe { let _r : &$crate :: Receiver <_> = _r ; unsafe fn unbind < 'a , T > ( x : & T )-> & 'a T {:: std :: mem :: transmute ( x )} unbind ( _r )}; $sel [$i ]= ($var , $i , $var as * const $crate :: Receiver <_> as * const u8 ); $crate :: crossbeam_channel_internal ! (@ add $sel ($($tail )*)$default ($($labels )*)($($cases )* [$i ] recv ($var )-> $res =>$body ,))}}}}; (@ add $sel : ident ( send ($s : expr , $m : expr )-> $res : pat =>$body : tt , $($tail : tt )*)$default : tt (($i : tt $var : ident )$($labels : tt )*)($($cases : tt )*))=>{{ match $s { ref _s =>{ let $var : &$crate :: Sender <_> = unsafe { let _s : &$crate :: Sender <_> = _s ; unsafe fn unbind < 'a , T > ( x : & T )-> & 'a T {:: std :: mem :: transmute ( x )} unbind ( _s )}; $sel [$i ]= ($var , $i , $var as * const $crate :: Sender <_> as * const u8 ); $crate :: crossbeam_channel_internal ! (@ add $sel ($($tail )*)$default ($($labels )*)($($cases )* [$i ] send ($var , $m )-> $res =>$body ,))}}}}; (@ complete $sel : ident $oper : ident ([$i : tt ] recv ($r : ident )-> $res : pat =>$body : tt , $($tail : tt )*))=>{{ if $oper . index ()== $i { let _res = $oper . recv ($r ); {$sel }; let $res = _res ; $body } else {$crate :: crossbeam_channel_internal ! {@ complete $sel $oper ($($tail )*)}}}}; (@ complete $sel : ident $oper : ident ([$i : tt ] send ($s : ident , $m : expr )-> $res : pat =>$body : tt , $($tail : tt )*))=>{{ if $oper . index ()== $i { let _res = $oper . send ($s , $m ); {$sel }; let $res = _res ; $body } else {$crate :: crossbeam_channel_internal ! {@ complete $sel $oper ($($tail )*)}}}}; (@ complete $sel : ident $oper : ident ())=>{{ unreachable ! ( "internal error in crossbeam-channel: invalid case" )}}; (@$($tokens : tt )*)=>{ compile_error ! ( concat ! ( "internal error in crossbeam-channel: " , stringify ! (@$($tokens )*), ))}; ()=>{ compile_error ! ( "empty `select!` block" )}; ($($case : ident $(($($args : tt )*))* =>$body : expr $(,)*)*)=>{$crate :: crossbeam_channel_internal ! (@ list ($($case $(($($args )*))* =>{$body },)*)())}; ($($tokens : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list ($($tokens )*)())}; }
+macro_rules! __ra_macro_fixture507 {($($tokens : tt )*)=>{ return Err ( crate :: errors :: error ! ($($tokens )*))}}
+macro_rules! __ra_macro_fixture508 {($fmt : expr )=>{$crate :: SsrError :: new ( format ! ($fmt ))}; ($fmt : expr , $($arg : tt )+)=>{$crate :: SsrError :: new ( format ! ($fmt , $($arg )+))}}
+macro_rules! __ra_macro_fixture509 {[;]=>{$crate :: SyntaxKind :: SEMICOLON }; [,]=>{$crate :: SyntaxKind :: COMMA }; [ '(' ]=>{$crate :: SyntaxKind :: L_PAREN }; [ ')' ]=>{$crate :: SyntaxKind :: R_PAREN }; [ '{' ]=>{$crate :: SyntaxKind :: L_CURLY }; [ '}' ]=>{$crate :: SyntaxKind :: R_CURLY }; [ '[' ]=>{$crate :: SyntaxKind :: L_BRACK }; [ ']' ]=>{$crate :: SyntaxKind :: R_BRACK }; [<]=>{$crate :: SyntaxKind :: L_ANGLE }; [>]=>{$crate :: SyntaxKind :: R_ANGLE }; [@]=>{$crate :: SyntaxKind :: AT }; [#]=>{$crate :: SyntaxKind :: POUND }; [~]=>{$crate :: SyntaxKind :: TILDE }; [?]=>{$crate :: SyntaxKind :: QUESTION }; [$]=>{$crate :: SyntaxKind :: DOLLAR }; [&]=>{$crate :: SyntaxKind :: AMP }; [|]=>{$crate :: SyntaxKind :: PIPE }; [+]=>{$crate :: SyntaxKind :: PLUS }; [*]=>{$crate :: SyntaxKind :: STAR }; [/]=>{$crate :: SyntaxKind :: SLASH }; [^]=>{$crate :: SyntaxKind :: CARET }; [%]=>{$crate :: SyntaxKind :: PERCENT }; [_]=>{$crate :: SyntaxKind :: UNDERSCORE }; [.]=>{$crate :: SyntaxKind :: DOT }; [..]=>{$crate :: SyntaxKind :: DOT2 }; [...]=>{$crate :: SyntaxKind :: DOT3 }; [..=]=>{$crate :: SyntaxKind :: DOT2EQ }; [:]=>{$crate :: SyntaxKind :: COLON }; [::]=>{$crate :: SyntaxKind :: COLON2 }; [=]=>{$crate :: SyntaxKind :: EQ }; [==]=>{$crate :: SyntaxKind :: EQ2 }; [=>]=>{$crate :: SyntaxKind :: FAT_ARROW }; [!]=>{$crate :: SyntaxKind :: BANG }; [!=]=>{$crate :: SyntaxKind :: NEQ }; [-]=>{$crate :: SyntaxKind :: MINUS }; [->]=>{$crate :: SyntaxKind :: THIN_ARROW }; [<=]=>{$crate :: SyntaxKind :: LTEQ }; [>=]=>{$crate :: SyntaxKind :: GTEQ }; [+=]=>{$crate :: SyntaxKind :: PLUSEQ }; [-=]=>{$crate :: SyntaxKind :: MINUSEQ }; [|=]=>{$crate :: SyntaxKind :: PIPEEQ }; [&=]=>{$crate :: SyntaxKind :: AMPEQ }; [^=]=>{$crate :: SyntaxKind :: CARETEQ }; [/=]=>{$crate :: SyntaxKind :: SLASHEQ }; [*=]=>{$crate :: SyntaxKind :: STAREQ }; [%=]=>{$crate :: SyntaxKind :: PERCENTEQ }; [&&]=>{$crate :: SyntaxKind :: AMP2 }; [||]=>{$crate :: SyntaxKind :: PIPE2 }; [<<]=>{$crate :: SyntaxKind :: SHL }; [>>]=>{$crate :: SyntaxKind :: SHR }; [<<=]=>{$crate :: SyntaxKind :: SHLEQ }; [>>=]=>{$crate :: SyntaxKind :: SHREQ }; [ as ]=>{$crate :: SyntaxKind :: AS_KW }; [ async ]=>{$crate :: SyntaxKind :: ASYNC_KW }; [ await ]=>{$crate :: SyntaxKind :: AWAIT_KW }; [ box ]=>{$crate :: SyntaxKind :: BOX_KW }; [ break ]=>{$crate :: SyntaxKind :: BREAK_KW }; [ const ]=>{$crate :: SyntaxKind :: CONST_KW }; [ continue ]=>{$crate :: SyntaxKind :: CONTINUE_KW }; [ crate ]=>{$crate :: SyntaxKind :: CRATE_KW }; [ dyn ]=>{$crate :: SyntaxKind :: DYN_KW }; [ else ]=>{$crate :: SyntaxKind :: ELSE_KW }; [ enum ]=>{$crate :: SyntaxKind :: ENUM_KW }; [ extern ]=>{$crate :: SyntaxKind :: EXTERN_KW }; [ false ]=>{$crate :: SyntaxKind :: FALSE_KW }; [ fn ]=>{$crate :: SyntaxKind :: FN_KW }; [ for ]=>{$crate :: SyntaxKind :: FOR_KW }; [ if ]=>{$crate :: SyntaxKind :: IF_KW }; [ impl ]=>{$crate :: SyntaxKind :: IMPL_KW }; [ in ]=>{$crate :: SyntaxKind :: IN_KW }; [ let ]=>{$crate :: SyntaxKind :: LET_KW }; [ loop ]=>{$crate :: SyntaxKind :: LOOP_KW }; [ macro ]=>{$crate :: SyntaxKind :: MACRO_KW }; [ match ]=>{$crate :: SyntaxKind :: MATCH_KW }; [ mod ]=>{$crate :: SyntaxKind :: MOD_KW }; [ move ]=>{$crate :: SyntaxKind :: MOVE_KW }; [ mut ]=>{$crate :: SyntaxKind :: MUT_KW }; [ pub ]=>{$crate :: SyntaxKind :: PUB_KW }; [ ref ]=>{$crate :: SyntaxKind :: REF_KW }; [ return ]=>{$crate :: SyntaxKind :: RETURN_KW }; [ self ]=>{$crate :: SyntaxKind :: SELF_KW }; [ static ]=>{$crate :: SyntaxKind :: STATIC_KW }; [ struct ]=>{$crate :: SyntaxKind :: STRUCT_KW }; [ super ]=>{$crate :: SyntaxKind :: SUPER_KW }; [ trait ]=>{$crate :: SyntaxKind :: TRAIT_KW }; [ true ]=>{$crate :: SyntaxKind :: TRUE_KW }; [ try ]=>{$crate :: SyntaxKind :: TRY_KW }; [ type ]=>{$crate :: SyntaxKind :: TYPE_KW }; [ unsafe ]=>{$crate :: SyntaxKind :: UNSAFE_KW }; [ use ]=>{$crate :: SyntaxKind :: USE_KW }; [ where ]=>{$crate :: SyntaxKind :: WHERE_KW }; [ while ]=>{$crate :: SyntaxKind :: WHILE_KW }; [ yield ]=>{$crate :: SyntaxKind :: YIELD_KW }; [ auto ]=>{$crate :: SyntaxKind :: AUTO_KW }; [ default ]=>{$crate :: SyntaxKind :: DEFAULT_KW }; [ existential ]=>{$crate :: SyntaxKind :: EXISTENTIAL_KW }; [ union ]=>{$crate :: SyntaxKind :: UNION_KW }; [ raw ]=>{$crate :: SyntaxKind :: RAW_KW }; [ macro_rules ]=>{$crate :: SyntaxKind :: MACRO_RULES_KW }; [ lifetime_ident ]=>{$crate :: SyntaxKind :: LIFETIME_IDENT }; [ ident ]=>{$crate :: SyntaxKind :: IDENT }; [ shebang ]=>{$crate :: SyntaxKind :: SHEBANG }; }
+macro_rules! __ra_macro_fixture510 {($($args : tt )*)=>{ return Err ( match_error ! ($($args )*))}; }
+macro_rules! __ra_macro_fixture511 {($e : expr )=>{{ MatchFailed { reason : if recording_match_fail_reasons (){ Some ( format ! ( "{}" , $e ))} else { None }}}}; ($fmt : expr , $($arg : tt )+)=>{{ MatchFailed { reason : if recording_match_fail_reasons (){ Some ( format ! ($fmt , $($arg )+))} else { None }}}}; }
+macro_rules! __ra_macro_fixture512 {()=>($crate :: print ! ( "\n" )); ($($arg : tt )*)=>({$crate :: io :: _print ($crate :: format_args_nl ! ($($arg )*)); })}
+macro_rules! __ra_macro_fixture513 {($cmd : tt )=>{{# [ cfg ( trick_rust_analyzer_into_highlighting_interpolated_bits )] format_args ! ($cmd ); use $crate :: Cmd as __CMD ; let cmd : $crate :: Cmd = $crate :: __cmd ! ( __CMD $cmd ); cmd }}; }
+macro_rules! __ra_macro_fixture514 {($reader : ident , $s : ident ;)=>{}; ($reader : ident , $s : ident ; $first : ident : $first_ty : ty $(, $rest : ident : $rest_ty : ty )*)=>{ reverse_decode ! ($reader , $s ; $($rest : $rest_ty ),*); let $first = <$first_ty >:: decode (& mut $reader , $s ); }}
+macro_rules! __ra_macro_fixture515 {($kind : ident , $($ty : ty ),*)=>{ match $kind {$(stringify ! ($ty )=>{ let n : $ty = n . parse (). unwrap (); format ! ( concat ! ( "{}" , stringify ! ($ty )), n )})* _ => unimplemented ! ( "unknown args for typed_integer: n {}, kind {}" , n , $kind ), }}}
+macro_rules! __ra_macro_fixture516 {()=>( panic ! ( "not implemented" )); ($($arg : tt )+)=>( panic ! ( "not implemented: {}" , $crate :: format_args ! ($($arg )+))); }
+macro_rules! __ra_macro_fixture517 {($cond : expr )=>{{ let cond = !$crate :: always ! (!$cond ); cond }}; ($cond : expr , $fmt : literal $($arg : tt )*)=>{{ let cond = !$crate :: always ! (!$cond , $fmt $($arg )*); cond }}; }
+macro_rules! __ra_macro_fixture518 {($cond : expr )=>{$crate :: always ! ($cond , "assertion failed: {}" , stringify ! ($cond ))}; ($cond : expr , $fmt : literal $($arg : tt )*)=>{{ let cond = $cond ; if cfg ! ( debug_assertions )|| $crate :: __FORCE { assert ! ( cond , $fmt $($arg )*); } if ! cond {$crate :: __log_error ! ($fmt $($arg )*); } cond }}; }
+macro_rules! __ra_macro_fixture519 {($msg : literal $(,)?)=>{ return $crate :: private :: Err ($crate :: anyhow ! ($msg ))}; ($err : expr $(,)?)=>{ return $crate :: private :: Err ($crate :: anyhow ! ($err ))}; ($fmt : expr , $($arg : tt )*)=>{ return $crate :: private :: Err ($crate :: anyhow ! ($fmt , $($arg )*))}; }
+macro_rules! __ra_macro_fixture520 {($msg : literal $(,)?)=>{$crate :: private :: new_adhoc ($msg )}; ($err : expr $(,)?)=>({ use $crate :: private :: kind ::*; match $err { error =>(& error ). anyhow_kind (). new ( error ), }}); ($fmt : expr , $($arg : tt )*)=>{$crate :: private :: new_adhoc ( format ! ($fmt , $($arg )*))}; }
+macro_rules! __ra_macro_fixture521 {( target : $target : expr , $($arg : tt )+)=>( log ! ( target : $target , $crate :: Level :: Info , $($arg )+)); ($($arg : tt )+)=>( log ! ($crate :: Level :: Info , $($arg )+))}
+macro_rules! __ra_macro_fixture522 {[$($sl : expr , $sc : expr ; $el : expr , $ec : expr =>$text : expr ),+]=>{ vec ! [$(TextDocumentContentChangeEvent { range : Some ( Range { start : Position { line : $sl , character : $sc }, end : Position { line : $el , character : $ec }, }), range_length : None , text : String :: from ($text ), }),+]}; }
+macro_rules! __ra_macro_fixture523 {[$path : expr ]=>{$crate :: ExpectFile { path : std :: path :: PathBuf :: from ($path ), position : file ! (), }}; }
+macro_rules! __ra_macro_fixture524 {($($key : literal : $value : tt ),*$(,)?)=>{{$(map . insert ($key . into (), serde_json :: json ! ($value )); )*}}; }
+macro_rules! __ra_macro_fixture525 {($expr : expr , $or : expr )=>{ try_ ! ($expr ). unwrap_or ($or )}; }
+macro_rules! __ra_macro_fixture526 {($expr : expr )=>{|| -> _ { Some ($expr )}()}; }
+macro_rules! __ra_macro_fixture527 {($($arg : tt )*)=>($crate :: io :: _print ($crate :: format_args ! ($($arg )*))); }
+macro_rules! __ra_macro_fixture528 {($fmt : literal , $($tt : tt ),*)=>{ mbe :: ExpandError :: ProcMacroError ( tt :: ExpansionError :: Unknown ( format ! ($fmt , $($tt ),*)))}; ($fmt : literal )=>{ mbe :: ExpandError :: ProcMacroError ( tt :: ExpansionError :: Unknown ($fmt . to_string ()))}}
+macro_rules! __ra_macro_fixture529 {($($tt : tt )* )=>{$crate :: quote :: IntoTt :: to_subtree ($crate :: __quote ! ($($tt )*))}}
+macro_rules! __ra_macro_fixture530 {()=>{ Vec ::< tt :: TokenTree >:: new ()}; (@ SUBTREE $delim : ident $($tt : tt )* )=>{{ let children = $crate :: __quote ! ($($tt )*); tt :: Subtree { delimiter : Some ( tt :: Delimiter { kind : tt :: DelimiterKind ::$delim , id : tt :: TokenId :: unspecified (), }), token_trees : $crate :: quote :: IntoTt :: to_tokens ( children ), }}}; (@ PUNCT $first : literal )=>{{ vec ! [ tt :: Leaf :: Punct ( tt :: Punct { char : $first , spacing : tt :: Spacing :: Alone , id : tt :: TokenId :: unspecified (), }). into ()]}}; (@ PUNCT $first : literal , $sec : literal )=>{{ vec ! [ tt :: Leaf :: Punct ( tt :: Punct { char : $first , spacing : tt :: Spacing :: Joint , id : tt :: TokenId :: unspecified (), }). into (), tt :: Leaf :: Punct ( tt :: Punct { char : $sec , spacing : tt :: Spacing :: Alone , id : tt :: TokenId :: unspecified (), }). into ()]}}; (# $first : ident $($tail : tt )* )=>{{ let token = $crate :: quote :: ToTokenTree :: to_token ($first ); let mut tokens = vec ! [ token . into ()]; let mut tail_tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($($tail )*)); tokens . append (& mut tail_tokens ); tokens }}; (## $first : ident $($tail : tt )* )=>{{ let mut tokens = $first . into_iter (). map ($crate :: quote :: ToTokenTree :: to_token ). collect ::< Vec < tt :: TokenTree >> (); let mut tail_tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($($tail )*)); tokens . append (& mut tail_tokens ); tokens }}; ({$($tt : tt )* })=>{$crate :: __quote ! (@ SUBTREE Brace $($tt )*)}; ([$($tt : tt )* ])=>{$crate :: __quote ! (@ SUBTREE Bracket $($tt )*)}; (($($tt : tt )* ))=>{$crate :: __quote ! (@ SUBTREE Parenthesis $($tt )*)}; ($tt : literal )=>{ vec ! [$crate :: quote :: ToTokenTree :: to_token ($tt ). into ()]}; ($tt : ident )=>{ vec ! [{ tt :: Leaf :: Ident ( tt :: Ident { text : stringify ! ($tt ). into (), id : tt :: TokenId :: unspecified (), }). into ()}]}; (-> )=>{$crate :: __quote ! (@ PUNCT '-' , '>' )}; (& )=>{$crate :: __quote ! (@ PUNCT '&' )}; (, )=>{$crate :: __quote ! (@ PUNCT ',' )}; (: )=>{$crate :: __quote ! (@ PUNCT ':' )}; (; )=>{$crate :: __quote ! (@ PUNCT ';' )}; (:: )=>{$crate :: __quote ! (@ PUNCT ':' , ':' )}; (. )=>{$crate :: __quote ! (@ PUNCT '.' )}; (< )=>{$crate :: __quote ! (@ PUNCT '<' )}; (> )=>{$crate :: __quote ! (@ PUNCT '>' )}; ($first : tt $($tail : tt )+ )=>{{ let mut tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($first )); let mut tail_tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($($tail )*)); tokens . append (& mut tail_tokens ); tokens }}; }
+macro_rules! __ra_macro_fixture531 {($($name : ident )*)=>{$(if let Some ( it )= & self .$name { f . field ( stringify ! ($name ), it ); })*}}
+macro_rules! __ra_macro_fixture532 {($fmt : expr )=>{ RenameError ( format ! ($fmt ))}; ($fmt : expr , $($arg : tt )+)=>{ RenameError ( format ! ($fmt , $($arg )+))}}
+macro_rules! __ra_macro_fixture533 {($($tokens : tt )*)=>{ return Err ( format_err ! ($($tokens )*))}}
+macro_rules! __ra_macro_fixture534 {()=>{$crate :: __private :: TokenStream :: new ()}; ($($tt : tt )*)=>{{ let mut _s = $crate :: __private :: TokenStream :: new (); $crate :: quote_each_token ! ( _s $($tt )*); _s }}; }
+macro_rules! __ra_macro_fixture535 {($tokens : ident $($tts : tt )*)=>{$crate :: quote_tokens_with_context ! ($tokens (@ @ @ @ @ @ $($tts )*)(@ @ @ @ @ $($tts )* @)(@ @ @ @ $($tts )* @ @)(@ @ @ $(($tts ))* @ @ @)(@ @ $($tts )* @ @ @ @)(@ $($tts )* @ @ @ @ @)($($tts )* @ @ @ @ @ @)); }; }
+macro_rules! __ra_macro_fixture536 {($tokens : ident ($($b3 : tt )*)($($b2 : tt )*)($($b1 : tt )*)($($curr : tt )*)($($a1 : tt )*)($($a2 : tt )*)($($a3 : tt )*))=>{$($crate :: quote_token_with_context ! ($tokens $b3 $b2 $b1 $curr $a1 $a2 $a3 ); )* }; }
+macro_rules! __ra_macro_fixture537 {($tokens : ident $b3 : tt $b2 : tt $b1 : tt @ $a1 : tt $a2 : tt $a3 : tt )=>{}; ($tokens : ident $b3 : tt $b2 : tt $b1 : tt (#)($($inner : tt )* )* $a3 : tt )=>{{ use $crate :: __private :: ext ::*; let has_iter = $crate :: __private :: ThereIsNoIteratorInRepetition ; $crate :: pounded_var_names ! ( quote_bind_into_iter ! ( has_iter )()$($inner )*); let _: $crate :: __private :: HasIterator = has_iter ; while true {$crate :: pounded_var_names ! ( quote_bind_next_or_break ! ()()$($inner )*); $crate :: quote_each_token ! ($tokens $($inner )*); }}}; ($tokens : ident $b3 : tt $b2 : tt # (($($inner : tt )* ))* $a2 : tt $a3 : tt )=>{}; ($tokens : ident $b3 : tt # ($($inner : tt )* )(*)$a1 : tt $a2 : tt $a3 : tt )=>{}; ($tokens : ident $b3 : tt $b2 : tt $b1 : tt (#)($($inner : tt )* )$sep : tt *)=>{{ use $crate :: __private :: ext ::*; let mut _i = 0usize ; let has_iter = $crate :: __private :: ThereIsNoIteratorInRepetition ; $crate :: pounded_var_names ! ( quote_bind_into_iter ! ( has_iter )()$($inner )*); let _: $crate :: __private :: HasIterator = has_iter ; while true {$crate :: pounded_var_names ! ( quote_bind_next_or_break ! ()()$($inner )*); if _i > 0 {$crate :: quote_token ! ($tokens $sep ); } _i += 1 ; $crate :: quote_each_token ! ($tokens $($inner )*); }}}; ($tokens : ident $b3 : tt $b2 : tt # (($($inner : tt )* ))$sep : tt * $a3 : tt )=>{}; ($tokens : ident $b3 : tt # ($($inner : tt )* )($sep : tt )* $a2 : tt $a3 : tt )=>{}; ($tokens : ident # ($($inner : tt )* )* (*)$a1 : tt $a2 : tt $a3 : tt )=>{$crate :: quote_token ! ($tokens *); }; ($tokens : ident # ($($inner : tt )* )$sep : tt (*)$a1 : tt $a2 : tt $a3 : tt )=>{}; ($tokens : ident $b3 : tt $b2 : tt $b1 : tt (#)$var : ident $a2 : tt $a3 : tt )=>{$crate :: ToTokens :: to_tokens (&$var , & mut $tokens ); }; ($tokens : ident $b3 : tt $b2 : tt # ($var : ident )$a1 : tt $a2 : tt $a3 : tt )=>{}; ($tokens : ident $b3 : tt $b2 : tt $b1 : tt ($curr : tt )$a1 : tt $a2 : tt $a3 : tt )=>{$crate :: quote_token ! ($tokens $curr ); }; }
+macro_rules! __ra_macro_fixture538 {($tokens : ident ($($inner : tt )* ))=>{$crate :: __private :: push_group (& mut $tokens , $crate :: __private :: Delimiter :: Parenthesis , $crate :: quote ! ($($inner )*), ); }; ($tokens : ident [$($inner : tt )* ])=>{$crate :: __private :: push_group (& mut $tokens , $crate :: __private :: Delimiter :: Bracket , $crate :: quote ! ($($inner )*), ); }; ($tokens : ident {$($inner : tt )* })=>{$crate :: __private :: push_group (& mut $tokens , $crate :: __private :: Delimiter :: Brace , $crate :: quote ! ($($inner )*), ); }; ($tokens : ident +)=>{$crate :: __private :: push_add (& mut $tokens ); }; ($tokens : ident +=)=>{$crate :: __private :: push_add_eq (& mut $tokens ); }; ($tokens : ident &)=>{$crate :: __private :: push_and (& mut $tokens ); }; ($tokens : ident &&)=>{$crate :: __private :: push_and_and (& mut $tokens ); }; ($tokens : ident &=)=>{$crate :: __private :: push_and_eq (& mut $tokens ); }; ($tokens : ident @)=>{$crate :: __private :: push_at (& mut $tokens ); }; ($tokens : ident !)=>{$crate :: __private :: push_bang (& mut $tokens ); }; ($tokens : ident ^)=>{$crate :: __private :: push_caret (& mut $tokens ); }; ($tokens : ident ^=)=>{$crate :: __private :: push_caret_eq (& mut $tokens ); }; ($tokens : ident :)=>{$crate :: __private :: push_colon (& mut $tokens ); }; ($tokens : ident ::)=>{$crate :: __private :: push_colon2 (& mut $tokens ); }; ($tokens : ident ,)=>{$crate :: __private :: push_comma (& mut $tokens ); }; ($tokens : ident /)=>{$crate :: __private :: push_div (& mut $tokens ); }; ($tokens : ident /=)=>{$crate :: __private :: push_div_eq (& mut $tokens ); }; ($tokens : ident .)=>{$crate :: __private :: push_dot (& mut $tokens ); }; ($tokens : ident ..)=>{$crate :: __private :: push_dot2 (& mut $tokens ); }; ($tokens : ident ...)=>{$crate :: __private :: push_dot3 (& mut $tokens ); }; ($tokens : ident ..=)=>{$crate :: __private :: push_dot_dot_eq (& mut $tokens ); }; ($tokens : ident =)=>{$crate :: __private :: push_eq (& mut $tokens ); }; ($tokens : ident ==)=>{$crate :: __private :: push_eq_eq (& mut $tokens ); }; ($tokens : ident >=)=>{$crate :: __private :: push_ge (& mut $tokens ); }; ($tokens : ident >)=>{$crate :: __private :: push_gt (& mut $tokens ); }; ($tokens : ident <=)=>{$crate :: __private :: push_le (& mut $tokens ); }; ($tokens : ident <)=>{$crate :: __private :: push_lt (& mut $tokens ); }; ($tokens : ident *=)=>{$crate :: __private :: push_mul_eq (& mut $tokens ); }; ($tokens : ident !=)=>{$crate :: __private :: push_ne (& mut $tokens ); }; ($tokens : ident |)=>{$crate :: __private :: push_or (& mut $tokens ); }; ($tokens : ident |=)=>{$crate :: __private :: push_or_eq (& mut $tokens ); }; ($tokens : ident ||)=>{$crate :: __private :: push_or_or (& mut $tokens ); }; ($tokens : ident #)=>{$crate :: __private :: push_pound (& mut $tokens ); }; ($tokens : ident ?)=>{$crate :: __private :: push_question (& mut $tokens ); }; ($tokens : ident ->)=>{$crate :: __private :: push_rarrow (& mut $tokens ); }; ($tokens : ident <-)=>{$crate :: __private :: push_larrow (& mut $tokens ); }; ($tokens : ident %)=>{$crate :: __private :: push_rem (& mut $tokens ); }; ($tokens : ident %=)=>{$crate :: __private :: push_rem_eq (& mut $tokens ); }; ($tokens : ident =>)=>{$crate :: __private :: push_fat_arrow (& mut $tokens ); }; ($tokens : ident ;)=>{$crate :: __private :: push_semi (& mut $tokens ); }; ($tokens : ident <<)=>{$crate :: __private :: push_shl (& mut $tokens ); }; ($tokens : ident <<=)=>{$crate :: __private :: push_shl_eq (& mut $tokens ); }; ($tokens : ident >>)=>{$crate :: __private :: push_shr (& mut $tokens ); }; ($tokens : ident >>=)=>{$crate :: __private :: push_shr_eq (& mut $tokens ); }; ($tokens : ident *)=>{$crate :: __private :: push_star (& mut $tokens ); }; ($tokens : ident -)=>{$crate :: __private :: push_sub (& mut $tokens ); }; ($tokens : ident -=)=>{$crate :: __private :: push_sub_eq (& mut $tokens ); }; ($tokens : ident $ident : ident )=>{$crate :: __private :: push_ident (& mut $tokens , stringify ! ($ident )); }; ($tokens : ident $other : tt )=>{$crate :: __private :: parse (& mut $tokens , stringify ! ($other )); }; }
+macro_rules! __ra_macro_fixture539 {($call : ident ! $extra : tt $($tts : tt )*)=>{$crate :: pounded_var_names_with_context ! ($call ! $extra (@ $($tts )*)($($tts )* @))}; }
+macro_rules! __ra_macro_fixture540 {($call : ident ! $extra : tt ($($b1 : tt )*)($($curr : tt )*))=>{$($crate :: pounded_var_with_context ! ($call ! $extra $b1 $curr ); )* }; }
+macro_rules! __ra_macro_fixture541 {($call : ident ! $extra : tt $b1 : tt ($($inner : tt )* ))=>{$crate :: pounded_var_names ! ($call ! $extra $($inner )*); }; ($call : ident ! $extra : tt $b1 : tt [$($inner : tt )* ])=>{$crate :: pounded_var_names ! ($call ! $extra $($inner )*); }; ($call : ident ! $extra : tt $b1 : tt {$($inner : tt )* })=>{$crate :: pounded_var_names ! ($call ! $extra $($inner )*); }; ($call : ident ! ($($extra : tt )*)# $var : ident )=>{$crate ::$call ! ($($extra )* $var ); }; ($call : ident ! $extra : tt $b1 : tt $curr : tt )=>{}; }
+macro_rules! __ra_macro_fixture542 {($has_iter : ident $var : ident )=>{# [ allow ( unused_mut )] let ( mut $var , i )= $var . quote_into_iter (); let $has_iter = $has_iter | i ; }; }
+macro_rules! __ra_macro_fixture543 {($var : ident )=>{ let $var = match $var . next (){ Some ( _x )=>$crate :: __private :: RepInterp ( _x ), None => break , }; }; }
+macro_rules! __ra_macro_fixture544 {($fmt : expr )=>{$crate :: format_ident_impl ! ([:: std :: option :: Option :: None , $fmt ])}; ($fmt : expr , $($rest : tt )*)=>{$crate :: format_ident_impl ! ([:: std :: option :: Option :: None , $fmt ]$($rest )*)}; }
+macro_rules! __ra_macro_fixture545 {([$span : expr , $($fmt : tt )*])=>{$crate :: __private :: mk_ident (& format ! ($($fmt )*), $span )}; ([$old : expr , $($fmt : tt )*] span = $span : expr )=>{$crate :: format_ident_impl ! ([$old , $($fmt )*] span = $span ,)}; ([$old : expr , $($fmt : tt )*] span = $span : expr , $($rest : tt )*)=>{$crate :: format_ident_impl ! ([:: std :: option :: Option :: Some ::<$crate :: __private :: Span > ($span ), $($fmt )* ]$($rest )*)}; ([$span : expr , $($fmt : tt )*]$name : ident = $arg : expr )=>{$crate :: format_ident_impl ! ([$span , $($fmt )*]$name = $arg ,)}; ([$span : expr , $($fmt : tt )*]$name : ident = $arg : expr , $($rest : tt )*)=>{ match $crate :: __private :: IdentFragmentAdapter (&$arg ){ arg =>$crate :: format_ident_impl ! ([$span . or ( arg . span ()), $($fmt )*, $name = arg ]$($rest )*), }}; ([$span : expr , $($fmt : tt )*]$arg : expr )=>{$crate :: format_ident_impl ! ([$span , $($fmt )*]$arg ,)}; ([$span : expr , $($fmt : tt )*]$arg : expr , $($rest : tt )*)=>{ match $crate :: __private :: IdentFragmentAdapter (&$arg ){ arg =>$crate :: format_ident_impl ! ([$span . or ( arg . span ()), $($fmt )*, arg ]$($rest )*), }}; }
+macro_rules! __ra_macro_fixture546 {()=>( panic ! ( "not yet implemented" )); ($($arg : tt )+)=>( panic ! ( "not yet implemented: {}" , $crate :: format_args ! ($($arg )+))); }
+macro_rules! __ra_macro_fixture547 {($($name : expr ),+ $(,)?)=>{{ let mut v = ArrayVec ::< [ LangItemTarget ; 2 ]>:: new (); $(v . extend ( db . lang_item ( cur_crate , $name . into ())); )+ v }}; }
+macro_rules! __ra_macro_fixture548 {($ctor : pat , $param : pat )=>{ crate :: Ty :: Apply ( crate :: ApplicationTy { ctor : $ctor , parameters : $param })}; ($ctor : pat )=>{ ty_app ! ($ctor , _)}; }
+macro_rules! __ra_macro_fixture549 {(@ one $x : expr )=>( 1usize ); ($elem : expr ; $n : expr )=>({$crate :: SmallVec :: from_elem ($elem , $n )}); ($($x : expr ),*$(,)*)=>({ let count = 0usize $(+ $crate :: smallvec ! (@ one $x ))*; # [ allow ( unused_mut )] let mut vec = $crate :: SmallVec :: new (); if count <= vec . inline_size (){$(vec . push ($x );)* vec } else {$crate :: SmallVec :: from_vec ($crate :: alloc :: vec ! [$($x ,)*])}}); }
+macro_rules! __ra_macro_fixture550 {($($q : path )*)=>{$(let before = memory_usage (). allocated ; $q . in_db ( self ). sweep ( sweep ); let after = memory_usage (). allocated ; let q : $q = Default :: default (); let name = format ! ( "{:?}" , q ); acc . push (( name , before - after )); let before = memory_usage (). allocated ; $q . in_db ( self ). sweep ( sweep . discard_everything ()); let after = memory_usage (). allocated ; let q : $q = Default :: default (); let name = format ! ( "{:?} (deps)" , q ); acc . push (( name , before - after )); let before = memory_usage (). allocated ; $q . in_db ( self ). purge (); let after = memory_usage (). allocated ; let q : $q = Default :: default (); let name = format ! ( "{:?} (purge)" , q ); acc . push (( name , before - after )); )*}}
+macro_rules! __ra_macro_fixture551 {($($arg : tt )*)=>( if $crate :: cfg ! ( debug_assertions ){$crate :: assert ! ($($arg )*); })}
+macro_rules! __ra_macro_fixture552 {()=>{{ let anchor = match self . l_curly_token (){ Some ( it )=> it . into (), None => return self . clone (), }; InsertPosition :: After ( anchor )}}; }
+macro_rules! __ra_macro_fixture553 {($anchor : expr )=>{ if let Some ( comma )= $anchor . syntax (). siblings_with_tokens ( Direction :: Next ). find (| it | it . kind ()== T ! [,]){ InsertPosition :: After ( comma )} else { to_insert . insert ( 0 , make :: token ( T ! [,]). into ()); InsertPosition :: After ($anchor . syntax (). clone (). into ())}}; }
+macro_rules! __ra_macro_fixture554 {($anchor : expr )=>{ if let Some ( comma )= $anchor . syntax (). siblings_with_tokens ( Direction :: Next ). find (| it | it . kind ()== T ! [,]){ InsertPosition :: After ( comma )} else { to_insert . insert ( 0 , make :: token ( T ! [,]). into ()); InsertPosition :: After ($anchor . syntax (). clone (). into ())}}; }
+macro_rules! __ra_macro_fixture555 {()=>{{ let anchor = match self . l_angle_token (){ Some ( it )=> it . into (), None => return self . clone (), }; InsertPosition :: After ( anchor )}}; }
+macro_rules! __ra_macro_fixture556 {()=>{ for _ in 0 .. level { buf . push_str ( " " ); }}; }
+macro_rules! __ra_macro_fixture557 {()=>{ ExpandError :: BindingError ( format ! ( "" ))}; ($($tt : tt )*)=>{ ExpandError :: BindingError ( format ! ($($tt )*))}; }
+macro_rules! __ra_macro_fixture558 {($($tt : tt )*)=>{ return Err ( err ! ($($tt )*))}; }
+macro_rules! __ra_macro_fixture559 {($($tt : tt )*)=>{ ParseError :: UnexpectedToken (($($tt )*). to_string ())}; }
diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
new file mode 100644
index 000000000..f02a51ab6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "base-db"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+salsa = "0.17.0-pre.2"
+rustc-hash = "1.1.0"
+
+syntax = { path = "../syntax", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+test-utils = { path = "../test-utils", version = "0.0.0" }
+vfs = { path = "../vfs", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/base-db/src/change.rs b/src/tools/rust-analyzer/crates/base-db/src/change.rs
new file mode 100644
index 000000000..b57f23457
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/src/change.rs
@@ -0,0 +1,85 @@
+//! Defines a unit of change that can applied to the database to get the next
+//! state. Changes are transactional.
+
+use std::{fmt, sync::Arc};
+
+use salsa::Durability;
+use vfs::FileId;
+
+use crate::{CrateGraph, SourceDatabaseExt, SourceRoot, SourceRootId};
+
+/// Encapsulate a bunch of raw `.set` calls on the database.
+#[derive(Default)]
+pub struct Change {
+ pub roots: Option<Vec<SourceRoot>>,
+ pub files_changed: Vec<(FileId, Option<Arc<String>>)>,
+ pub crate_graph: Option<CrateGraph>,
+}
+
+impl fmt::Debug for Change {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut d = fmt.debug_struct("Change");
+ if let Some(roots) = &self.roots {
+ d.field("roots", roots);
+ }
+ if !self.files_changed.is_empty() {
+ d.field("files_changed", &self.files_changed.len());
+ }
+ if self.crate_graph.is_some() {
+ d.field("crate_graph", &self.crate_graph);
+ }
+ d.finish()
+ }
+}
+
+impl Change {
+ pub fn new() -> Change {
+ Change::default()
+ }
+
+ pub fn set_roots(&mut self, roots: Vec<SourceRoot>) {
+ self.roots = Some(roots);
+ }
+
+ pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<String>>) {
+ self.files_changed.push((file_id, new_text))
+ }
+
+ pub fn set_crate_graph(&mut self, graph: CrateGraph) {
+ self.crate_graph = Some(graph);
+ }
+
+ pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
+ let _p = profile::span("RootDatabase::apply_change");
+ if let Some(roots) = self.roots {
+ for (idx, root) in roots.into_iter().enumerate() {
+ let root_id = SourceRootId(idx as u32);
+ let durability = durability(&root);
+ for file_id in root.iter() {
+ db.set_file_source_root_with_durability(file_id, root_id, durability);
+ }
+ db.set_source_root_with_durability(root_id, Arc::new(root), durability);
+ }
+ }
+
+ for (file_id, text) in self.files_changed {
+ let source_root_id = db.file_source_root(file_id);
+ let source_root = db.source_root(source_root_id);
+ let durability = durability(&source_root);
+ // XXX: can't actually remove the file, just reset the text
+ let text = text.unwrap_or_default();
+ db.set_file_text_with_durability(file_id, text, durability)
+ }
+ if let Some(crate_graph) = self.crate_graph {
+ db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH)
+ }
+ }
+}
+
+fn durability(source_root: &SourceRoot) -> Durability {
+ if source_root.is_library {
+ Durability::HIGH
+ } else {
+ Durability::LOW
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
new file mode 100644
index 000000000..8e6e6a11a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
@@ -0,0 +1,494 @@
+//! A set of high-level utility fixture methods to use in tests.
+use std::{mem, str::FromStr, sync::Arc};
+
+use cfg::CfgOptions;
+use rustc_hash::FxHashMap;
+use test_utils::{
+ extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER, ESCAPED_CURSOR_MARKER,
+};
+use tt::Subtree;
+use vfs::{file_set::FileSet, VfsPath};
+
+use crate::{
+ input::{CrateName, CrateOrigin, LangCrateOrigin},
+ Change, CrateDisplayName, CrateGraph, CrateId, Dependency, Edition, Env, FileId, FilePosition,
+ FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, SourceDatabaseExt,
+ SourceRoot, SourceRootId,
+};
+
+pub const WORKSPACE: SourceRootId = SourceRootId(0);
+
+pub trait WithFixture: Default + SourceDatabaseExt + 'static {
+ fn with_single_file(ra_fixture: &str) -> (Self, FileId) {
+ let fixture = ChangeFixture::parse(ra_fixture);
+ let mut db = Self::default();
+ fixture.change.apply(&mut db);
+ assert_eq!(fixture.files.len(), 1);
+ (db, fixture.files[0])
+ }
+
+ fn with_many_files(ra_fixture: &str) -> (Self, Vec<FileId>) {
+ let fixture = ChangeFixture::parse(ra_fixture);
+ let mut db = Self::default();
+ fixture.change.apply(&mut db);
+ assert!(fixture.file_position.is_none());
+ (db, fixture.files)
+ }
+
+ fn with_files(ra_fixture: &str) -> Self {
+ let fixture = ChangeFixture::parse(ra_fixture);
+ let mut db = Self::default();
+ fixture.change.apply(&mut db);
+ assert!(fixture.file_position.is_none());
+ db
+ }
+
+ fn with_files_extra_proc_macros(
+ ra_fixture: &str,
+ proc_macros: Vec<(String, ProcMacro)>,
+ ) -> Self {
+ let fixture = ChangeFixture::parse_with_proc_macros(ra_fixture, proc_macros);
+ let mut db = Self::default();
+ fixture.change.apply(&mut db);
+ assert!(fixture.file_position.is_none());
+ db
+ }
+
+ fn with_position(ra_fixture: &str) -> (Self, FilePosition) {
+ let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
+ let offset = range_or_offset.expect_offset();
+ (db, FilePosition { file_id, offset })
+ }
+
+ fn with_range(ra_fixture: &str) -> (Self, FileRange) {
+ let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
+ let range = range_or_offset.expect_range();
+ (db, FileRange { file_id, range })
+ }
+
+ fn with_range_or_offset(ra_fixture: &str) -> (Self, FileId, RangeOrOffset) {
+ let fixture = ChangeFixture::parse(ra_fixture);
+ let mut db = Self::default();
+ fixture.change.apply(&mut db);
+ let (file_id, range_or_offset) = fixture
+ .file_position
+ .expect("Could not find file position in fixture. Did you forget to add an `$0`?");
+ (db, file_id, range_or_offset)
+ }
+
+ fn test_crate(&self) -> CrateId {
+ let crate_graph = self.crate_graph();
+ let mut it = crate_graph.iter();
+ let res = it.next().unwrap();
+ assert!(it.next().is_none());
+ res
+ }
+}
+
+impl<DB: SourceDatabaseExt + Default + 'static> WithFixture for DB {}
+
+pub struct ChangeFixture {
+ pub file_position: Option<(FileId, RangeOrOffset)>,
+ pub files: Vec<FileId>,
+ pub change: Change,
+}
+
+impl ChangeFixture {
+ pub fn parse(ra_fixture: &str) -> ChangeFixture {
+ Self::parse_with_proc_macros(ra_fixture, Vec::new())
+ }
+
+ pub fn parse_with_proc_macros(
+ ra_fixture: &str,
+ mut proc_macros: Vec<(String, ProcMacro)>,
+ ) -> ChangeFixture {
+ let (mini_core, proc_macro_names, fixture) = Fixture::parse(ra_fixture);
+ let mut change = Change::new();
+
+ let mut files = Vec::new();
+ let mut crate_graph = CrateGraph::default();
+ let mut crates = FxHashMap::default();
+ let mut crate_deps = Vec::new();
+ let mut default_crate_root: Option<FileId> = None;
+ let mut default_cfg = CfgOptions::default();
+
+ let mut file_set = FileSet::default();
+ let mut current_source_root_kind = SourceRootKind::Local;
+ let source_root_prefix = "/".to_string();
+ let mut file_id = FileId(0);
+ let mut roots = Vec::new();
+
+ let mut file_position = None;
+
+ for entry in fixture {
+ let text = if entry.text.contains(CURSOR_MARKER) {
+ if entry.text.contains(ESCAPED_CURSOR_MARKER) {
+ entry.text.replace(ESCAPED_CURSOR_MARKER, CURSOR_MARKER)
+ } else {
+ let (range_or_offset, text) = extract_range_or_offset(&entry.text);
+ assert!(file_position.is_none());
+ file_position = Some((file_id, range_or_offset));
+ text
+ }
+ } else {
+ entry.text.clone()
+ };
+
+ let meta = FileMeta::from(entry);
+ assert!(meta.path.starts_with(&source_root_prefix));
+ if !meta.deps.is_empty() {
+ assert!(meta.krate.is_some(), "can't specify deps without naming the crate")
+ }
+
+ if let Some(kind) = &meta.introduce_new_source_root {
+ let root = match current_source_root_kind {
+ SourceRootKind::Local => SourceRoot::new_local(mem::take(&mut file_set)),
+ SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)),
+ };
+ roots.push(root);
+ current_source_root_kind = *kind;
+ }
+
+ if let Some((krate, origin, version)) = meta.krate {
+ let crate_name = CrateName::normalize_dashes(&krate);
+ let crate_id = crate_graph.add_crate_root(
+ file_id,
+ meta.edition,
+ Some(crate_name.clone().into()),
+ version,
+ meta.cfg.clone(),
+ meta.cfg,
+ meta.env,
+ Ok(Vec::new()),
+ false,
+ origin,
+ );
+ let prev = crates.insert(crate_name.clone(), crate_id);
+ assert!(prev.is_none());
+ for dep in meta.deps {
+ let prelude = meta.extern_prelude.contains(&dep);
+ let dep = CrateName::normalize_dashes(&dep);
+ crate_deps.push((crate_name.clone(), dep, prelude))
+ }
+ } else if meta.path == "/main.rs" || meta.path == "/lib.rs" {
+ assert!(default_crate_root.is_none());
+ default_crate_root = Some(file_id);
+ default_cfg = meta.cfg;
+ }
+
+ change.change_file(file_id, Some(Arc::new(text)));
+ let path = VfsPath::new_virtual_path(meta.path);
+ file_set.insert(file_id, path);
+ files.push(file_id);
+ file_id.0 += 1;
+ }
+
+ if crates.is_empty() {
+ let crate_root = default_crate_root
+ .expect("missing default crate root, specify a main.rs or lib.rs");
+ crate_graph.add_crate_root(
+ crate_root,
+ Edition::CURRENT,
+ Some(CrateName::new("test").unwrap().into()),
+ None,
+ default_cfg.clone(),
+ default_cfg,
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ } else {
+ for (from, to, prelude) in crate_deps {
+ let from_id = crates[&from];
+ let to_id = crates[&to];
+ crate_graph
+ .add_dep(
+ from_id,
+ Dependency::with_prelude(CrateName::new(&to).unwrap(), to_id, prelude),
+ )
+ .unwrap();
+ }
+ }
+
+ if let Some(mini_core) = mini_core {
+ let core_file = file_id;
+ file_id.0 += 1;
+
+ let mut fs = FileSet::default();
+ fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string()));
+ roots.push(SourceRoot::new_library(fs));
+
+ change.change_file(core_file, Some(Arc::new(mini_core.source_code())));
+
+ let all_crates = crate_graph.crates_in_topological_order();
+
+ let core_crate = crate_graph.add_crate_root(
+ core_file,
+ Edition::Edition2021,
+ Some(CrateDisplayName::from_canonical_name("core".to_string())),
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::Lang(LangCrateOrigin::Core),
+ );
+
+ for krate in all_crates {
+ crate_graph
+ .add_dep(krate, Dependency::new(CrateName::new("core").unwrap(), core_crate))
+ .unwrap();
+ }
+ }
+
+ if !proc_macro_names.is_empty() {
+ let proc_lib_file = file_id;
+ file_id.0 += 1;
+
+ proc_macros.extend(default_test_proc_macros());
+ let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macros);
+ let mut fs = FileSet::default();
+ fs.insert(
+ proc_lib_file,
+ VfsPath::new_virtual_path("/sysroot/proc_macros/lib.rs".to_string()),
+ );
+ roots.push(SourceRoot::new_library(fs));
+
+ change.change_file(proc_lib_file, Some(Arc::new(source)));
+
+ let all_crates = crate_graph.crates_in_topological_order();
+
+ let proc_macros_crate = crate_graph.add_crate_root(
+ proc_lib_file,
+ Edition::Edition2021,
+ Some(CrateDisplayName::from_canonical_name("proc_macros".to_string())),
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(proc_macro),
+ true,
+ CrateOrigin::CratesIo { repo: None },
+ );
+
+ for krate in all_crates {
+ crate_graph
+ .add_dep(
+ krate,
+ Dependency::new(CrateName::new("proc_macros").unwrap(), proc_macros_crate),
+ )
+ .unwrap();
+ }
+ }
+
+ let root = match current_source_root_kind {
+ SourceRootKind::Local => SourceRoot::new_local(mem::take(&mut file_set)),
+ SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)),
+ };
+ roots.push(root);
+ change.set_roots(roots);
+ change.set_crate_graph(crate_graph);
+
+ ChangeFixture { file_position, files, change }
+ }
+}
+
+fn default_test_proc_macros() -> [(String, ProcMacro); 4] {
+ [
+ (
+ r#"
+#[proc_macro_attribute]
+pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
+ item
+}
+"#
+ .into(),
+ ProcMacro {
+ name: "identity".into(),
+ kind: crate::ProcMacroKind::Attr,
+ expander: Arc::new(IdentityProcMacroExpander),
+ },
+ ),
+ (
+ r#"
+#[proc_macro_derive(DeriveIdentity)]
+pub fn derive_identity(item: TokenStream) -> TokenStream {
+ item
+}
+"#
+ .into(),
+ ProcMacro {
+ name: "DeriveIdentity".into(),
+ kind: crate::ProcMacroKind::CustomDerive,
+ expander: Arc::new(IdentityProcMacroExpander),
+ },
+ ),
+ (
+ r#"
+#[proc_macro_attribute]
+pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream {
+ attr
+}
+"#
+ .into(),
+ ProcMacro {
+ name: "input_replace".into(),
+ kind: crate::ProcMacroKind::Attr,
+ expander: Arc::new(AttributeInputReplaceProcMacroExpander),
+ },
+ ),
+ (
+ r#"
+#[proc_macro]
+pub fn mirror(input: TokenStream) -> TokenStream {
+ input
+}
+"#
+ .into(),
+ ProcMacro {
+ name: "mirror".into(),
+ kind: crate::ProcMacroKind::FuncLike,
+ expander: Arc::new(MirrorProcMacroExpander),
+ },
+ ),
+ ]
+}
+
+fn filter_test_proc_macros(
+ proc_macro_names: &[String],
+ proc_macro_defs: Vec<(String, ProcMacro)>,
+) -> (Vec<ProcMacro>, String) {
+ // The source here is only required so that paths to the macros exist and are resolvable.
+ let mut source = String::new();
+ let mut proc_macros = Vec::new();
+
+ for (c, p) in proc_macro_defs {
+ if !proc_macro_names.iter().any(|name| name == &stdx::to_lower_snake_case(&p.name)) {
+ continue;
+ }
+ proc_macros.push(p);
+ source += &c;
+ }
+
+ (proc_macros, source)
+}
+
+#[derive(Debug, Clone, Copy)]
+enum SourceRootKind {
+ Local,
+ Library,
+}
+
+#[derive(Debug)]
+struct FileMeta {
+ path: String,
+ krate: Option<(String, CrateOrigin, Option<String>)>,
+ deps: Vec<String>,
+ extern_prelude: Vec<String>,
+ cfg: CfgOptions,
+ edition: Edition,
+ env: Env,
+ introduce_new_source_root: Option<SourceRootKind>,
+}
+
+fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option<String>) {
+ if let Some((a, b)) = crate_str.split_once('@') {
+ let (version, origin) = match b.split_once(':') {
+ Some(("CratesIo", data)) => match data.split_once(',') {
+ Some((version, url)) => {
+ (version, CrateOrigin::CratesIo { repo: Some(url.to_owned()) })
+ }
+ _ => panic!("Bad crates.io parameter: {}", data),
+ },
+ _ => panic!("Bad string for crate origin: {}", b),
+ };
+ (a.to_owned(), origin, Some(version.to_string()))
+ } else {
+ let crate_origin = match &*crate_str {
+ "std" => CrateOrigin::Lang(LangCrateOrigin::Std),
+ "core" => CrateOrigin::Lang(LangCrateOrigin::Core),
+ _ => CrateOrigin::CratesIo { repo: None },
+ };
+ (crate_str, crate_origin, None)
+ }
+}
+
+impl From<Fixture> for FileMeta {
+ fn from(f: Fixture) -> FileMeta {
+ let mut cfg = CfgOptions::default();
+ f.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into()));
+ f.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into()));
+ let deps = f.deps;
+ FileMeta {
+ path: f.path,
+ krate: f.krate.map(parse_crate),
+ extern_prelude: f.extern_prelude.unwrap_or_else(|| deps.clone()),
+ deps,
+ cfg,
+ edition: f.edition.as_ref().map_or(Edition::CURRENT, |v| Edition::from_str(v).unwrap()),
+ env: f.env.into_iter().collect(),
+ introduce_new_source_root: f.introduce_new_source_root.map(|kind| match &*kind {
+ "local" => SourceRootKind::Local,
+ "library" => SourceRootKind::Library,
+ invalid => panic!("invalid source root kind '{}'", invalid),
+ }),
+ }
+ }
+}
+
+// Identity mapping
+#[derive(Debug)]
+struct IdentityProcMacroExpander;
+impl ProcMacroExpander for IdentityProcMacroExpander {
+ fn expand(
+ &self,
+ subtree: &Subtree,
+ _: Option<&Subtree>,
+ _: &Env,
+ ) -> Result<Subtree, ProcMacroExpansionError> {
+ Ok(subtree.clone())
+ }
+}
+
+// Pastes the attribute input as its output
+#[derive(Debug)]
+struct AttributeInputReplaceProcMacroExpander;
+impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
+ fn expand(
+ &self,
+ _: &Subtree,
+ attrs: Option<&Subtree>,
+ _: &Env,
+ ) -> Result<Subtree, ProcMacroExpansionError> {
+ attrs
+ .cloned()
+ .ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
+ }
+}
+
+#[derive(Debug)]
+struct MirrorProcMacroExpander;
+impl ProcMacroExpander for MirrorProcMacroExpander {
+ fn expand(
+ &self,
+ input: &Subtree,
+ _: Option<&Subtree>,
+ _: &Env,
+ ) -> Result<Subtree, ProcMacroExpansionError> {
+ fn traverse(input: &Subtree) -> Subtree {
+ let mut res = Subtree::default();
+ res.delimiter = input.delimiter;
+ for tt in input.token_trees.iter().rev() {
+ let tt = match tt {
+ tt::TokenTree::Leaf(leaf) => tt::TokenTree::Leaf(leaf.clone()),
+ tt::TokenTree::Subtree(sub) => tt::TokenTree::Subtree(traverse(sub)),
+ };
+ res.token_trees.push(tt);
+ }
+ res
+ }
+ Ok(traverse(input))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs
new file mode 100644
index 000000000..9b5a10acf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs
@@ -0,0 +1,792 @@
+//! This module specifies the input to rust-analyzer. In some sense, this is
+//! **the** most important module, because all other fancy stuff is strictly
+//! derived from this input.
+//!
+//! Note that neither this module, nor any other part of the analyzer's core do
+//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
+//! actual IO is done and lowered to input.
+
+use std::{fmt, iter::FromIterator, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc};
+
+use cfg::CfgOptions;
+use rustc_hash::{FxHashMap, FxHashSet};
+use syntax::SmolStr;
+use tt::Subtree;
+use vfs::{file_set::FileSet, FileId, VfsPath};
+
+/// Files are grouped into source roots. A source root is a directory on the
+/// file systems which is watched for changes. Typically it corresponds to a
+/// Rust crate. Source roots *might* be nested: in this case, a file belongs to
+/// the nearest enclosing source root. Paths to files are always relative to a
+/// source root, and the analyzer does not know the root path of the source root at
+/// all. So, a file from one source root can't refer to a file in another source
+/// root by path.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct SourceRootId(pub u32);
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct SourceRoot {
+ /// Sysroot or crates.io library.
+ ///
+ /// Libraries are considered mostly immutable, this assumption is used to
+ /// optimize salsa's query structure
+ pub is_library: bool,
+ pub(crate) file_set: FileSet,
+}
+
+impl SourceRoot {
+ pub fn new_local(file_set: FileSet) -> SourceRoot {
+ SourceRoot { is_library: false, file_set }
+ }
+ pub fn new_library(file_set: FileSet) -> SourceRoot {
+ SourceRoot { is_library: true, file_set }
+ }
+ pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> {
+ self.file_set.path_for_file(file)
+ }
+ pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> {
+ self.file_set.file_for_path(path)
+ }
+ pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ {
+ self.file_set.iter()
+ }
+}
+
+/// `CrateGraph` is a bit of information which turns a set of text files into a
+/// number of Rust crates.
+///
+/// Each crate is defined by the `FileId` of its root module, the set of enabled
+/// `cfg` flags and the set of dependencies.
+///
+/// Note that, due to cfg's, there might be several crates for a single `FileId`!
+///
+/// For the purposes of analysis, a crate does not have a name. Instead, names
+/// are specified on dependency edges. That is, a crate might be known under
+/// different names in different dependent crates.
+///
+/// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust
+/// language proper, not a concept of the build system. In practice, we get
+/// `CrateGraph` by lowering `cargo metadata` output.
+///
+/// `CrateGraph` is `!Serialize` by design, see
+/// <https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#serialization>
+#[derive(Debug, Clone, Default /* Serialize, Deserialize */)]
+pub struct CrateGraph {
+ arena: FxHashMap<CrateId, CrateData>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct CrateId(pub u32);
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CrateName(SmolStr);
+
+impl CrateName {
+ /// Creates a crate name, checking for dashes in the string provided.
+ /// Dashes are not allowed in the crate names,
+ /// hence the input string is returned as `Err` for those cases.
+ pub fn new(name: &str) -> Result<CrateName, &str> {
+ if name.contains('-') {
+ Err(name)
+ } else {
+ Ok(Self(SmolStr::new(name)))
+ }
+ }
+
+ /// Creates a crate name, unconditionally replacing the dashes with underscores.
+ pub fn normalize_dashes(name: &str) -> CrateName {
+ Self(SmolStr::new(name.replace('-', "_")))
+ }
+
+ pub fn as_smol_str(&self) -> &SmolStr {
+ &self.0
+ }
+}
+
+impl fmt::Display for CrateName {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl ops::Deref for CrateName {
+ type Target = str;
+ fn deref(&self) -> &str {
+ &*self.0
+ }
+}
+
+/// Origin of the crates. It is used in emitting monikers.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum CrateOrigin {
+ /// Crates that are from crates.io official registry,
+ CratesIo { repo: Option<String> },
+ /// Crates that are provided by the language, like std, core, proc-macro, ...
+ Lang(LangCrateOrigin),
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum LangCrateOrigin {
+ Alloc,
+ Core,
+ ProcMacro,
+ Std,
+ Test,
+ Other,
+}
+
+impl From<&str> for LangCrateOrigin {
+ fn from(s: &str) -> Self {
+ match s {
+ "alloc" => LangCrateOrigin::Alloc,
+ "core" => LangCrateOrigin::Core,
+ "proc-macro" => LangCrateOrigin::ProcMacro,
+ "std" => LangCrateOrigin::Std,
+ "test" => LangCrateOrigin::Test,
+ _ => LangCrateOrigin::Other,
+ }
+ }
+}
+
+impl fmt::Display for LangCrateOrigin {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let text = match self {
+ LangCrateOrigin::Alloc => "alloc",
+ LangCrateOrigin::Core => "core",
+ LangCrateOrigin::ProcMacro => "proc_macro",
+ LangCrateOrigin::Std => "std",
+ LangCrateOrigin::Test => "test",
+ LangCrateOrigin::Other => "other",
+ };
+ f.write_str(text)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CrateDisplayName {
+ // The name we use to display various paths (with `_`).
+ crate_name: CrateName,
+ // The name as specified in Cargo.toml (with `-`).
+ canonical_name: String,
+}
+
+impl CrateDisplayName {
+ pub fn canonical_name(&self) -> &str {
+ &self.canonical_name
+ }
+ pub fn crate_name(&self) -> &CrateName {
+ &self.crate_name
+ }
+}
+
+impl From<CrateName> for CrateDisplayName {
+ fn from(crate_name: CrateName) -> CrateDisplayName {
+ let canonical_name = crate_name.to_string();
+ CrateDisplayName { crate_name, canonical_name }
+ }
+}
+
+impl fmt::Display for CrateDisplayName {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.crate_name.fmt(f)
+ }
+}
+
+impl ops::Deref for CrateDisplayName {
+ type Target = str;
+ fn deref(&self) -> &str {
+ &*self.crate_name
+ }
+}
+
+impl CrateDisplayName {
+ pub fn from_canonical_name(canonical_name: String) -> CrateDisplayName {
+ let crate_name = CrateName::normalize_dashes(&canonical_name);
+ CrateDisplayName { crate_name, canonical_name }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct ProcMacroId(pub u32);
+
+#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
+pub enum ProcMacroKind {
+ CustomDerive,
+ FuncLike,
+ Attr,
+}
+
+pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
+ fn expand(
+ &self,
+ subtree: &Subtree,
+ attrs: Option<&Subtree>,
+ env: &Env,
+ ) -> Result<Subtree, ProcMacroExpansionError>;
+}
+
+pub enum ProcMacroExpansionError {
+ Panic(String),
+ /// Things like "proc macro server was killed by OOM".
+ System(String),
+}
+
+pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, String>;
+
+#[derive(Debug, Clone)]
+pub struct ProcMacro {
+ pub name: SmolStr,
+ pub kind: ProcMacroKind,
+ pub expander: Arc<dyn ProcMacroExpander>,
+}
+
+#[derive(Debug, Clone)]
+pub struct CrateData {
+ pub root_file_id: FileId,
+ pub edition: Edition,
+ pub version: Option<String>,
+ /// A name used in the package's project declaration: for Cargo projects,
+ /// its `[package].name` can be different for other project types or even
+ /// absent (a dummy crate for the code snippet, for example).
+ ///
+ /// For purposes of analysis, crates are anonymous (only names in
+ /// `Dependency` matters), this name should only be used for UI.
+ pub display_name: Option<CrateDisplayName>,
+ pub cfg_options: CfgOptions,
+ pub potential_cfg_options: CfgOptions,
+ pub env: Env,
+ pub dependencies: Vec<Dependency>,
+ pub proc_macro: ProcMacroLoadResult,
+ pub origin: CrateOrigin,
+ pub is_proc_macro: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum Edition {
+ Edition2015,
+ Edition2018,
+ Edition2021,
+}
+
+impl Edition {
+ pub const CURRENT: Edition = Edition::Edition2018;
+}
+
+#[derive(Default, Debug, Clone, PartialEq, Eq)]
+pub struct Env {
+ entries: FxHashMap<String, String>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Dependency {
+ pub crate_id: CrateId,
+ pub name: CrateName,
+ prelude: bool,
+}
+
+impl Dependency {
+ pub fn new(name: CrateName, crate_id: CrateId) -> Self {
+ Self { name, crate_id, prelude: true }
+ }
+
+ pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool) -> Self {
+ Self { name, crate_id, prelude }
+ }
+
+ /// Whether this dependency is to be added to the depending crate's extern prelude.
+ pub fn is_prelude(&self) -> bool {
+ self.prelude
+ }
+}
+
+impl CrateGraph {
+ pub fn add_crate_root(
+ &mut self,
+ root_file_id: FileId,
+ edition: Edition,
+ display_name: Option<CrateDisplayName>,
+ version: Option<String>,
+ cfg_options: CfgOptions,
+ potential_cfg_options: CfgOptions,
+ env: Env,
+ proc_macro: ProcMacroLoadResult,
+ is_proc_macro: bool,
+ origin: CrateOrigin,
+ ) -> CrateId {
+ let data = CrateData {
+ root_file_id,
+ edition,
+ version,
+ display_name,
+ cfg_options,
+ potential_cfg_options,
+ env,
+ proc_macro,
+ dependencies: Vec::new(),
+ origin,
+ is_proc_macro,
+ };
+ let crate_id = CrateId(self.arena.len() as u32);
+ let prev = self.arena.insert(crate_id, data);
+ assert!(prev.is_none());
+ crate_id
+ }
+
+ pub fn add_dep(
+ &mut self,
+ from: CrateId,
+ dep: Dependency,
+ ) -> Result<(), CyclicDependenciesError> {
+ let _p = profile::span("add_dep");
+
+ // Check if adding a dep from `from` to `to` creates a cycle. To figure
+ // that out, look for a path in the *opposite* direction, from `to` to
+ // `from`.
+ if let Some(path) = self.find_path(&mut FxHashSet::default(), dep.crate_id, from) {
+ let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect();
+ let err = CyclicDependenciesError { path };
+ assert!(err.from().0 == from && err.to().0 == dep.crate_id);
+ return Err(err);
+ }
+
+ self.arena.get_mut(&from).unwrap().add_dep(dep);
+ Ok(())
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.arena.is_empty()
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = CrateId> + '_ {
+ self.arena.keys().copied()
+ }
+
+ /// Returns an iterator over all transitive dependencies of the given crate,
+ /// including the crate itself.
+ pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
+ let mut worklist = vec![of];
+ let mut deps = FxHashSet::default();
+
+ while let Some(krate) = worklist.pop() {
+ if !deps.insert(krate) {
+ continue;
+ }
+
+ worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id));
+ }
+
+ deps.into_iter()
+ }
+
+ /// Returns all transitive reverse dependencies of the given crate,
+ /// including the crate itself.
+ pub fn transitive_rev_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
+ let mut worklist = vec![of];
+ let mut rev_deps = FxHashSet::default();
+ rev_deps.insert(of);
+
+ let mut inverted_graph = FxHashMap::<_, Vec<_>>::default();
+ self.arena.iter().for_each(|(&krate, data)| {
+ data.dependencies
+ .iter()
+ .for_each(|dep| inverted_graph.entry(dep.crate_id).or_default().push(krate))
+ });
+
+ while let Some(krate) = worklist.pop() {
+ if let Some(krate_rev_deps) = inverted_graph.get(&krate) {
+ krate_rev_deps
+ .iter()
+ .copied()
+ .filter(|&rev_dep| rev_deps.insert(rev_dep))
+ .for_each(|rev_dep| worklist.push(rev_dep));
+ }
+ }
+
+ rev_deps.into_iter()
+ }
+
+ /// Returns all crates in the graph, sorted in topological order (ie. dependencies of a crate
+ /// come before the crate itself).
+ pub fn crates_in_topological_order(&self) -> Vec<CrateId> {
+ let mut res = Vec::new();
+ let mut visited = FxHashSet::default();
+
+ for krate in self.arena.keys().copied() {
+ go(self, &mut visited, &mut res, krate);
+ }
+
+ return res;
+
+ fn go(
+ graph: &CrateGraph,
+ visited: &mut FxHashSet<CrateId>,
+ res: &mut Vec<CrateId>,
+ source: CrateId,
+ ) {
+ if !visited.insert(source) {
+ return;
+ }
+ for dep in graph[source].dependencies.iter() {
+ go(graph, visited, res, dep.crate_id)
+ }
+ res.push(source)
+ }
+ }
+
+ // FIXME: this only finds one crate with the given root; we could have multiple
+ pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
+ let (&crate_id, _) =
+ self.arena.iter().find(|(_crate_id, data)| data.root_file_id == file_id)?;
+ Some(crate_id)
+ }
+
+ /// Extends this crate graph by adding a complete disjoint second crate
+ /// graph.
+ ///
+ /// The ids of the crates in the `other` graph are shifted by the return
+ /// amount.
+ pub fn extend(&mut self, other: CrateGraph) -> u32 {
+ let start = self.arena.len() as u32;
+ self.arena.extend(other.arena.into_iter().map(|(id, mut data)| {
+ let new_id = id.shift(start);
+ for dep in &mut data.dependencies {
+ dep.crate_id = dep.crate_id.shift(start);
+ }
+ (new_id, data)
+ }));
+ start
+ }
+
+ fn find_path(
+ &self,
+ visited: &mut FxHashSet<CrateId>,
+ from: CrateId,
+ to: CrateId,
+ ) -> Option<Vec<CrateId>> {
+ if !visited.insert(from) {
+ return None;
+ }
+
+ if from == to {
+ return Some(vec![to]);
+ }
+
+ for dep in &self[from].dependencies {
+ let crate_id = dep.crate_id;
+ if let Some(mut path) = self.find_path(visited, crate_id, to) {
+ path.push(from);
+ return Some(path);
+ }
+ }
+
+ None
+ }
+
+ // Work around for https://github.com/rust-lang/rust-analyzer/issues/6038.
+ // As hacky as it gets.
+ pub fn patch_cfg_if(&mut self) -> bool {
+ let cfg_if = self.hacky_find_crate("cfg_if");
+ let std = self.hacky_find_crate("std");
+ match (cfg_if, std) {
+ (Some(cfg_if), Some(std)) => {
+ self.arena.get_mut(&cfg_if).unwrap().dependencies.clear();
+ self.arena
+ .get_mut(&std)
+ .unwrap()
+ .dependencies
+ .push(Dependency::new(CrateName::new("cfg_if").unwrap(), cfg_if));
+ true
+ }
+ _ => false,
+ }
+ }
+
+ fn hacky_find_crate(&self, display_name: &str) -> Option<CrateId> {
+ self.iter().find(|it| self[*it].display_name.as_deref() == Some(display_name))
+ }
+}
+
+impl ops::Index<CrateId> for CrateGraph {
+ type Output = CrateData;
+ fn index(&self, crate_id: CrateId) -> &CrateData {
+ &self.arena[&crate_id]
+ }
+}
+
+impl CrateId {
+ fn shift(self, amount: u32) -> CrateId {
+ CrateId(self.0 + amount)
+ }
+}
+
+impl CrateData {
+ fn add_dep(&mut self, dep: Dependency) {
+ self.dependencies.push(dep)
+ }
+}
+
+impl FromStr for Edition {
+ type Err = ParseEditionError;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let res = match s {
+ "2015" => Edition::Edition2015,
+ "2018" => Edition::Edition2018,
+ "2021" => Edition::Edition2021,
+ _ => return Err(ParseEditionError { invalid_input: s.to_string() }),
+ };
+ Ok(res)
+ }
+}
+
+impl fmt::Display for Edition {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(match self {
+ Edition::Edition2015 => "2015",
+ Edition::Edition2018 => "2018",
+ Edition::Edition2021 => "2021",
+ })
+ }
+}
+
+impl FromIterator<(String, String)> for Env {
+ fn from_iter<T: IntoIterator<Item = (String, String)>>(iter: T) -> Self {
+ Env { entries: FromIterator::from_iter(iter) }
+ }
+}
+
+impl Env {
+ pub fn set(&mut self, env: &str, value: String) {
+ self.entries.insert(env.to_owned(), value);
+ }
+
+ pub fn get(&self, env: &str) -> Option<String> {
+ self.entries.get(env).cloned()
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = (&str, &str)> {
+ self.entries.iter().map(|(k, v)| (k.as_str(), v.as_str()))
+ }
+}
+
+#[derive(Debug)]
+pub struct ParseEditionError {
+ invalid_input: String,
+}
+
+impl fmt::Display for ParseEditionError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "invalid edition: {:?}", self.invalid_input)
+ }
+}
+
+impl std::error::Error for ParseEditionError {}
+
+#[derive(Debug)]
+pub struct CyclicDependenciesError {
+ path: Vec<(CrateId, Option<CrateDisplayName>)>,
+}
+
+impl CyclicDependenciesError {
+ fn from(&self) -> &(CrateId, Option<CrateDisplayName>) {
+ self.path.first().unwrap()
+ }
+ fn to(&self) -> &(CrateId, Option<CrateDisplayName>) {
+ self.path.last().unwrap()
+ }
+}
+
+impl fmt::Display for CyclicDependenciesError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let render = |(id, name): &(CrateId, Option<CrateDisplayName>)| match name {
+ Some(it) => format!("{}({:?})", it, id),
+ None => format!("{:?}", id),
+ };
+ let path = self.path.iter().rev().map(render).collect::<Vec<String>>().join(" -> ");
+ write!(
+ f,
+ "cyclic deps: {} -> {}, alternative path: {}",
+ render(self.from()),
+ render(self.to()),
+ path
+ )
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::CrateOrigin;
+
+ use super::{CfgOptions, CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId};
+
+ #[test]
+ fn detect_cyclic_dependency_indirect() {
+ let mut graph = CrateGraph::default();
+ let crate1 = graph.add_crate_root(
+ FileId(1u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ let crate2 = graph.add_crate_root(
+ FileId(2u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ let crate3 = graph.add_crate_root(
+ FileId(3u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ assert!(graph
+ .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .is_ok());
+ assert!(graph
+ .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3))
+ .is_ok());
+ assert!(graph
+ .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1))
+ .is_err());
+ }
+
+ #[test]
+ fn detect_cyclic_dependency_direct() {
+ let mut graph = CrateGraph::default();
+ let crate1 = graph.add_crate_root(
+ FileId(1u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ let crate2 = graph.add_crate_root(
+ FileId(2u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ assert!(graph
+ .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .is_ok());
+ assert!(graph
+ .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .is_err());
+ }
+
+ #[test]
+ fn it_works() {
+ let mut graph = CrateGraph::default();
+ let crate1 = graph.add_crate_root(
+ FileId(1u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ let crate2 = graph.add_crate_root(
+ FileId(2u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ let crate3 = graph.add_crate_root(
+ FileId(3u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ assert!(graph
+ .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .is_ok());
+ assert!(graph
+ .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3))
+ .is_ok());
+ }
+
+ #[test]
+ fn dashes_are_normalized() {
+ let mut graph = CrateGraph::default();
+ let crate1 = graph.add_crate_root(
+ FileId(1u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ let crate2 = graph.add_crate_root(
+ FileId(2u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ assert!(graph
+ .add_dep(
+ crate1,
+ Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2)
+ )
+ .is_ok());
+ assert_eq!(
+ graph[crate1].dependencies,
+ vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2)]
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
new file mode 100644
index 000000000..2d0a95b09
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
@@ -0,0 +1,131 @@
+//! base_db defines basic database traits. The concrete DB is defined by ide.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod input;
+mod change;
+pub mod fixture;
+
+use std::{panic, sync::Arc};
+
+use rustc_hash::FxHashSet;
+use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
+
+pub use crate::{
+ change::Change,
+ input::{
+ CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
+ Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
+ ProcMacroId, ProcMacroKind, ProcMacroLoadResult, SourceRoot, SourceRootId,
+ },
+};
+pub use salsa::{self, Cancelled};
+pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath};
+
+#[macro_export]
+macro_rules! impl_intern_key {
+ ($name:ident) => {
+ impl $crate::salsa::InternKey for $name {
+ fn from_intern_id(v: $crate::salsa::InternId) -> Self {
+ $name(v)
+ }
+ fn as_intern_id(&self) -> $crate::salsa::InternId {
+ self.0
+ }
+ }
+ };
+}
+
+pub trait Upcast<T: ?Sized> {
+ fn upcast(&self) -> &T;
+}
+
+#[derive(Clone, Copy, Debug)]
+pub struct FilePosition {
+ pub file_id: FileId,
+ pub offset: TextSize,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
+pub struct FileRange {
+ pub file_id: FileId,
+ pub range: TextRange,
+}
+
+pub const DEFAULT_LRU_CAP: usize = 128;
+
+pub trait FileLoader {
+ /// Text of the file.
+ fn file_text(&self, file_id: FileId) -> Arc<String>;
+ fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>;
+}
+
+/// Database which stores all significant input facts: source code and project
+/// model. Everything else in rust-analyzer is derived from these queries.
+#[salsa::query_group(SourceDatabaseStorage)]
+pub trait SourceDatabase: FileLoader + std::fmt::Debug {
+ // Parses the file into the syntax tree.
+ #[salsa::invoke(parse_query)]
+ fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
+
+ /// The crate graph.
+ #[salsa::input]
+ fn crate_graph(&self) -> Arc<CrateGraph>;
+}
+
+fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
+ let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id));
+ let text = db.file_text(file_id);
+ SourceFile::parse(&*text)
+}
+
+/// We don't want to give HIR knowledge of source roots, hence we extract these
+/// methods into a separate DB.
+#[salsa::query_group(SourceDatabaseExtStorage)]
+pub trait SourceDatabaseExt: SourceDatabase {
+ #[salsa::input]
+ fn file_text(&self, file_id: FileId) -> Arc<String>;
+ /// Path to a file, relative to the root of its source root.
+ /// Source root of the file.
+ #[salsa::input]
+ fn file_source_root(&self, file_id: FileId) -> SourceRootId;
+ /// Contents of the source root.
+ #[salsa::input]
+ fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
+
+ fn source_root_crates(&self, id: SourceRootId) -> Arc<FxHashSet<CrateId>>;
+}
+
+fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHashSet<CrateId>> {
+ let graph = db.crate_graph();
+ let res = graph
+ .iter()
+ .filter(|&krate| {
+ let root_file = graph[krate].root_file_id;
+ db.file_source_root(root_file) == id
+ })
+ .collect();
+ Arc::new(res)
+}
+
+/// Silly workaround for cyclic deps between the traits
+pub struct FileLoaderDelegate<T>(pub T);
+
+impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
+ fn file_text(&self, file_id: FileId) -> Arc<String> {
+ SourceDatabaseExt::file_text(self.0, file_id)
+ }
+ fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+ // FIXME: this *somehow* should be platform agnostic...
+ let source_root = self.0.file_source_root(path.anchor);
+ let source_root = self.0.source_root(source_root);
+ source_root.file_set.resolve_path(path)
+ }
+
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ let _p = profile::span("relevant_crates");
+ let source_root = self.0.file_source_root(file_id);
+ self.0.source_root_crates(source_root)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
new file mode 100644
index 000000000..c9664a83a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
@@ -0,0 +1,26 @@
+[package]
+name = "cfg"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+rustc-hash = "1.1.0"
+
+tt = { path = "../tt", version = "0.0.0" }
+
+[dev-dependencies]
+mbe = { path = "../mbe" }
+syntax = { path = "../syntax" }
+expect-test = "1.4.0"
+oorandom = "11.1.3"
+# We depend on both individually instead of using `features = ["derive"]` to microoptimize the
+# build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
+# supports `arbitrary`. This way, we avoid feature unification.
+arbitrary = "1.1.0"
+derive_arbitrary = "1.1.0"
diff --git a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
new file mode 100644
index 000000000..fd9e31ed3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
@@ -0,0 +1,145 @@
+//! The condition expression used in `#[cfg(..)]` attributes.
+//!
+//! See: <https://doc.rust-lang.org/reference/conditional-compilation.html#conditional-compilation>
+
+use std::{fmt, slice::Iter as SliceIter};
+
+use tt::SmolStr;
+
+/// A simple configuration value passed in from the outside.
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub enum CfgAtom {
+ /// eg. `#[cfg(test)]`
+ Flag(SmolStr),
+ /// eg. `#[cfg(target_os = "linux")]`
+ ///
+ /// Note that a key can have multiple values that are all considered "active" at the same time.
+ /// For example, `#[cfg(target_feature = "sse")]` and `#[cfg(target_feature = "sse2")]`.
+ KeyValue { key: SmolStr, value: SmolStr },
+}
+
+impl CfgAtom {
+ /// Returns `true` when the atom comes from the target specification.
+ ///
+ /// If this returns `true`, then changing this atom requires changing the compilation target. If
+ /// it returns `false`, the atom might come from a build script or the build system.
+ pub fn is_target_defined(&self) -> bool {
+ match self {
+ CfgAtom::Flag(flag) => matches!(&**flag, "unix" | "windows"),
+ CfgAtom::KeyValue { key, value: _ } => matches!(
+ &**key,
+ "target_arch"
+ | "target_os"
+ | "target_env"
+ | "target_family"
+ | "target_endian"
+ | "target_pointer_width"
+ | "target_vendor" // NOTE: `target_feature` is left out since it can be configured via `-Ctarget-feature`
+ ),
+ }
+ }
+}
+
+impl fmt::Display for CfgAtom {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ CfgAtom::Flag(name) => name.fmt(f),
+ CfgAtom::KeyValue { key, value } => write!(f, "{} = {:?}", key, value),
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[cfg_attr(test, derive(derive_arbitrary::Arbitrary))]
+pub enum CfgExpr {
+ Invalid,
+ Atom(CfgAtom),
+ All(Vec<CfgExpr>),
+ Any(Vec<CfgExpr>),
+ Not(Box<CfgExpr>),
+}
+
+impl From<CfgAtom> for CfgExpr {
+ fn from(atom: CfgAtom) -> Self {
+ CfgExpr::Atom(atom)
+ }
+}
+
+impl CfgExpr {
+ pub fn parse(tt: &tt::Subtree) -> CfgExpr {
+ next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid)
+ }
+ /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
+ pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> {
+ match self {
+ CfgExpr::Invalid => None,
+ CfgExpr::Atom(atom) => Some(query(atom)),
+ CfgExpr::All(preds) => {
+ preds.iter().try_fold(true, |s, pred| Some(s && pred.fold(query)?))
+ }
+ CfgExpr::Any(preds) => {
+ preds.iter().try_fold(false, |s, pred| Some(s || pred.fold(query)?))
+ }
+ CfgExpr::Not(pred) => pred.fold(query).map(|s| !s),
+ }
+ }
+}
+
+fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option<CfgExpr> {
+ let name = match it.next() {
+ None => return None,
+ Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(),
+ Some(_) => return Some(CfgExpr::Invalid),
+ };
+
+ // Peek
+ let ret = match it.as_slice().first() {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
+ match it.as_slice().get(1) {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => {
+ it.next();
+ it.next();
+ // FIXME: escape? raw string?
+ let value =
+ SmolStr::new(literal.text.trim_start_matches('"').trim_end_matches('"'));
+ CfgAtom::KeyValue { key: name, value }.into()
+ }
+ _ => return Some(CfgExpr::Invalid),
+ }
+ }
+ Some(tt::TokenTree::Subtree(subtree)) => {
+ it.next();
+ let mut sub_it = subtree.token_trees.iter();
+ let mut subs = std::iter::from_fn(|| next_cfg_expr(&mut sub_it)).collect();
+ match name.as_str() {
+ "all" => CfgExpr::All(subs),
+ "any" => CfgExpr::Any(subs),
+ "not" => CfgExpr::Not(Box::new(subs.pop().unwrap_or(CfgExpr::Invalid))),
+ _ => CfgExpr::Invalid,
+ }
+ }
+ _ => CfgAtom::Flag(name).into(),
+ };
+
+ // Eat comma separator
+ if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = it.as_slice().first() {
+ if punct.char == ',' {
+ it.next();
+ }
+ }
+ Some(ret)
+}
+
+#[cfg(test)]
+impl arbitrary::Arbitrary<'_> for CfgAtom {
+ fn arbitrary(u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result<Self> {
+ if u.arbitrary()? {
+ Ok(CfgAtom::Flag(String::arbitrary(u)?.into()))
+ } else {
+ Ok(CfgAtom::KeyValue {
+ key: String::arbitrary(u)?.into(),
+ value: String::arbitrary(u)?.into(),
+ })
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/cfg/src/dnf.rs b/src/tools/rust-analyzer/crates/cfg/src/dnf.rs
new file mode 100644
index 000000000..fd80e1ebe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/cfg/src/dnf.rs
@@ -0,0 +1,345 @@
+//! Disjunctive Normal Form construction.
+//!
+//! Algorithm from <https://www.cs.drexel.edu/~jjohnson/2015-16/fall/CS270/Lectures/3/dnf.pdf>,
+//! which would have been much easier to read if it used pattern matching. It's also missing the
+//! entire "distribute ANDs over ORs" part, which is not trivial. Oh well.
+//!
+//! This is currently both messy and inefficient. Feel free to improve, there are unit tests.
+
+use std::fmt::{self, Write};
+
+use rustc_hash::FxHashSet;
+
+use crate::{CfgAtom, CfgDiff, CfgExpr, CfgOptions, InactiveReason};
+
+/// A `#[cfg]` directive in Disjunctive Normal Form (DNF).
+pub struct DnfExpr {
+ conjunctions: Vec<Conjunction>,
+}
+
+struct Conjunction {
+ literals: Vec<Literal>,
+}
+
+struct Literal {
+ negate: bool,
+ var: Option<CfgAtom>, // None = Invalid
+}
+
+impl DnfExpr {
+ pub fn new(expr: CfgExpr) -> Self {
+ let builder = Builder { expr: DnfExpr { conjunctions: Vec::new() } };
+
+ builder.lower(expr)
+ }
+
+ /// Computes a list of present or absent atoms in `opts` that cause this expression to evaluate
+ /// to `false`.
+ ///
+ /// Note that flipping a subset of these atoms might be sufficient to make the whole expression
+ /// evaluate to `true`. For that, see `compute_enable_hints`.
+ ///
+ /// Returns `None` when `self` is already true, or contains errors.
+ pub fn why_inactive(&self, opts: &CfgOptions) -> Option<InactiveReason> {
+ let mut res = InactiveReason { enabled: Vec::new(), disabled: Vec::new() };
+
+ for conj in &self.conjunctions {
+ let mut conj_is_true = true;
+ for lit in &conj.literals {
+ let atom = lit.var.as_ref()?;
+ let enabled = opts.enabled.contains(atom);
+ if lit.negate == enabled {
+ // Literal is false, but needs to be true for this conjunction.
+ conj_is_true = false;
+
+ if enabled {
+ res.enabled.push(atom.clone());
+ } else {
+ res.disabled.push(atom.clone());
+ }
+ }
+ }
+
+ if conj_is_true {
+ // This expression is not actually inactive.
+ return None;
+ }
+ }
+
+ res.enabled.sort_unstable();
+ res.enabled.dedup();
+ res.disabled.sort_unstable();
+ res.disabled.dedup();
+ Some(res)
+ }
+
+ /// Returns `CfgDiff` objects that would enable this directive if applied to `opts`.
+ pub fn compute_enable_hints<'a>(
+ &'a self,
+ opts: &'a CfgOptions,
+ ) -> impl Iterator<Item = CfgDiff> + 'a {
+ // A cfg is enabled if any of `self.conjunctions` evaluate to `true`.
+
+ self.conjunctions.iter().filter_map(move |conj| {
+ let mut enable = FxHashSet::default();
+ let mut disable = FxHashSet::default();
+ for lit in &conj.literals {
+ let atom = lit.var.as_ref()?;
+ let enabled = opts.enabled.contains(atom);
+ if lit.negate && enabled {
+ disable.insert(atom.clone());
+ }
+ if !lit.negate && !enabled {
+ enable.insert(atom.clone());
+ }
+ }
+
+ // Check that this actually makes `conj` true.
+ for lit in &conj.literals {
+ let atom = lit.var.as_ref()?;
+ let enabled = enable.contains(atom)
+ || (opts.enabled.contains(atom) && !disable.contains(atom));
+ if enabled == lit.negate {
+ return None;
+ }
+ }
+
+ if enable.is_empty() && disable.is_empty() {
+ return None;
+ }
+
+ let mut diff = CfgDiff {
+ enable: enable.into_iter().collect(),
+ disable: disable.into_iter().collect(),
+ };
+
+ // Undo the FxHashMap randomization for consistent output.
+ diff.enable.sort_unstable();
+ diff.disable.sort_unstable();
+
+ Some(diff)
+ })
+ }
+}
+
+impl fmt::Display for DnfExpr {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if self.conjunctions.len() != 1 {
+ f.write_str("any(")?;
+ }
+ for (i, conj) in self.conjunctions.iter().enumerate() {
+ if i != 0 {
+ f.write_str(", ")?;
+ }
+
+ conj.fmt(f)?;
+ }
+ if self.conjunctions.len() != 1 {
+ f.write_char(')')?;
+ }
+
+ Ok(())
+ }
+}
+
+impl Conjunction {
+ fn new(parts: Vec<CfgExpr>) -> Self {
+ let mut literals = Vec::new();
+ for part in parts {
+ match part {
+ CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => {
+ literals.push(Literal::new(part));
+ }
+ CfgExpr::All(conj) => {
+ // Flatten.
+ literals.extend(Conjunction::new(conj).literals);
+ }
+ CfgExpr::Any(_) => unreachable!("disjunction in conjunction"),
+ }
+ }
+
+ Self { literals }
+ }
+}
+
+impl fmt::Display for Conjunction {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if self.literals.len() != 1 {
+ f.write_str("all(")?;
+ }
+ for (i, lit) in self.literals.iter().enumerate() {
+ if i != 0 {
+ f.write_str(", ")?;
+ }
+
+ lit.fmt(f)?;
+ }
+ if self.literals.len() != 1 {
+ f.write_str(")")?;
+ }
+
+ Ok(())
+ }
+}
+
+impl Literal {
+ fn new(expr: CfgExpr) -> Self {
+ match expr {
+ CfgExpr::Invalid => Self { negate: false, var: None },
+ CfgExpr::Atom(atom) => Self { negate: false, var: Some(atom) },
+ CfgExpr::Not(expr) => match *expr {
+ CfgExpr::Invalid => Self { negate: true, var: None },
+ CfgExpr::Atom(atom) => Self { negate: true, var: Some(atom) },
+ _ => unreachable!("non-atom {:?}", expr),
+ },
+ CfgExpr::Any(_) | CfgExpr::All(_) => unreachable!("non-literal {:?}", expr),
+ }
+ }
+}
+
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if self.negate {
+ write!(f, "not(")?;
+ }
+
+ match &self.var {
+ Some(var) => var.fmt(f)?,
+ None => f.write_str("<invalid>")?,
+ }
+
+ if self.negate {
+ f.write_char(')')?;
+ }
+
+ Ok(())
+ }
+}
+
+struct Builder {
+ expr: DnfExpr,
+}
+
+impl Builder {
+ fn lower(mut self, expr: CfgExpr) -> DnfExpr {
+ let expr = make_nnf(expr);
+ let expr = make_dnf(expr);
+
+ match expr {
+ CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => {
+ self.expr.conjunctions.push(Conjunction::new(vec![expr]));
+ }
+ CfgExpr::All(conj) => {
+ self.expr.conjunctions.push(Conjunction::new(conj));
+ }
+ CfgExpr::Any(mut disj) => {
+ disj.reverse();
+ while let Some(conj) = disj.pop() {
+ match conj {
+ CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::All(_) | CfgExpr::Not(_) => {
+ self.expr.conjunctions.push(Conjunction::new(vec![conj]));
+ }
+ CfgExpr::Any(inner_disj) => {
+ // Flatten.
+ disj.extend(inner_disj.into_iter().rev());
+ }
+ }
+ }
+ }
+ }
+
+ self.expr
+ }
+}
+
+fn make_dnf(expr: CfgExpr) -> CfgExpr {
+ match expr {
+ CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => expr,
+ CfgExpr::Any(e) => flatten(CfgExpr::Any(e.into_iter().map(make_dnf).collect())),
+ CfgExpr::All(e) => {
+ let e = e.into_iter().map(make_dnf).collect::<Vec<_>>();
+
+ flatten(CfgExpr::Any(distribute_conj(&e)))
+ }
+ }
+}
+
+/// Turns a conjunction of expressions into a disjunction of expressions.
+fn distribute_conj(conj: &[CfgExpr]) -> Vec<CfgExpr> {
+ fn go(out: &mut Vec<CfgExpr>, with: &mut Vec<CfgExpr>, rest: &[CfgExpr]) {
+ match rest {
+ [head, tail @ ..] => match head {
+ CfgExpr::Any(disj) => {
+ for part in disj {
+ with.push(part.clone());
+ go(out, with, tail);
+ with.pop();
+ }
+ }
+ _ => {
+ with.push(head.clone());
+ go(out, with, tail);
+ with.pop();
+ }
+ },
+ _ => {
+ // Turn accumulated parts into a new conjunction.
+ out.push(CfgExpr::All(with.clone()));
+ }
+ }
+ }
+
+ let mut out = Vec::new(); // contains only `all()`
+ let mut with = Vec::new();
+
+ go(&mut out, &mut with, conj);
+
+ out
+}
+
+fn make_nnf(expr: CfgExpr) -> CfgExpr {
+ match expr {
+ CfgExpr::Invalid | CfgExpr::Atom(_) => expr,
+ CfgExpr::Any(expr) => CfgExpr::Any(expr.into_iter().map(make_nnf).collect()),
+ CfgExpr::All(expr) => CfgExpr::All(expr.into_iter().map(make_nnf).collect()),
+ CfgExpr::Not(operand) => match *operand {
+ CfgExpr::Invalid | CfgExpr::Atom(_) => CfgExpr::Not(operand.clone()), // Original negated expr
+ CfgExpr::Not(expr) => {
+ // Remove double negation.
+ make_nnf(*expr)
+ }
+ // Convert negated conjunction/disjunction using DeMorgan's Law.
+ CfgExpr::Any(inner) => CfgExpr::All(
+ inner.into_iter().map(|expr| make_nnf(CfgExpr::Not(Box::new(expr)))).collect(),
+ ),
+ CfgExpr::All(inner) => CfgExpr::Any(
+ inner.into_iter().map(|expr| make_nnf(CfgExpr::Not(Box::new(expr)))).collect(),
+ ),
+ },
+ }
+}
+
+/// Collapses nested `any()` and `all()` predicates.
+fn flatten(expr: CfgExpr) -> CfgExpr {
+ match expr {
+ CfgExpr::All(inner) => CfgExpr::All(
+ inner
+ .into_iter()
+ .flat_map(|e| match e {
+ CfgExpr::All(inner) => inner,
+ _ => vec![e],
+ })
+ .collect(),
+ ),
+ CfgExpr::Any(inner) => CfgExpr::Any(
+ inner
+ .into_iter()
+ .flat_map(|e| match e {
+ CfgExpr::Any(inner) => inner,
+ _ => vec![e],
+ })
+ .collect(),
+ ),
+ _ => expr,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
new file mode 100644
index 000000000..d78ef4fb1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
@@ -0,0 +1,202 @@
+//! cfg defines conditional compiling options, `cfg` attribute parser and evaluator
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod cfg_expr;
+mod dnf;
+#[cfg(test)]
+mod tests;
+
+use std::fmt;
+
+use rustc_hash::FxHashSet;
+use tt::SmolStr;
+
+pub use cfg_expr::{CfgAtom, CfgExpr};
+pub use dnf::DnfExpr;
+
+/// Configuration options used for conditional compilation on items with `cfg` attributes.
+/// We have two kind of options in different namespaces: atomic options like `unix`, and
+/// key-value options like `target_arch="x86"`.
+///
+/// Note that for key-value options, one key can have multiple values (but not none).
+/// `feature` is an example. We have both `feature="foo"` and `feature="bar"` if features
+/// `foo` and `bar` are both enabled. And here, we store key-value options as a set of tuple
+/// of key and value in `key_values`.
+///
+/// See: <https://doc.rust-lang.org/reference/conditional-compilation.html#set-configuration-options>
+#[derive(Clone, PartialEq, Eq, Default)]
+pub struct CfgOptions {
+ enabled: FxHashSet<CfgAtom>,
+}
+
+impl fmt::Debug for CfgOptions {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut items = self
+ .enabled
+ .iter()
+ .map(|atom| match atom {
+ CfgAtom::Flag(it) => it.to_string(),
+ CfgAtom::KeyValue { key, value } => format!("{}={}", key, value),
+ })
+ .collect::<Vec<_>>();
+ items.sort();
+ f.debug_tuple("CfgOptions").field(&items).finish()
+ }
+}
+
+impl CfgOptions {
+ pub fn check(&self, cfg: &CfgExpr) -> Option<bool> {
+ cfg.fold(&|atom| self.enabled.contains(atom))
+ }
+
+ pub fn insert_atom(&mut self, key: SmolStr) {
+ self.enabled.insert(CfgAtom::Flag(key));
+ }
+
+ pub fn insert_key_value(&mut self, key: SmolStr, value: SmolStr) {
+ self.enabled.insert(CfgAtom::KeyValue { key, value });
+ }
+
+ pub fn apply_diff(&mut self, diff: CfgDiff) {
+ for atom in diff.enable {
+ self.enabled.insert(atom);
+ }
+
+ for atom in diff.disable {
+ self.enabled.remove(&atom);
+ }
+ }
+
+ pub fn get_cfg_keys(&self) -> impl Iterator<Item = &SmolStr> {
+ self.enabled.iter().map(|x| match x {
+ CfgAtom::Flag(key) => key,
+ CfgAtom::KeyValue { key, .. } => key,
+ })
+ }
+
+ pub fn get_cfg_values<'a>(
+ &'a self,
+ cfg_key: &'a str,
+ ) -> impl Iterator<Item = &'a SmolStr> + 'a {
+ self.enabled.iter().filter_map(move |x| match x {
+ CfgAtom::KeyValue { key, value } if cfg_key == key => Some(value),
+ _ => None,
+ })
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct CfgDiff {
+ // Invariants: No duplicates, no atom that's both in `enable` and `disable`.
+ enable: Vec<CfgAtom>,
+ disable: Vec<CfgAtom>,
+}
+
+impl CfgDiff {
+ /// Create a new CfgDiff. Will return None if the same item appears more than once in the set
+ /// of both.
+ pub fn new(enable: Vec<CfgAtom>, disable: Vec<CfgAtom>) -> Option<CfgDiff> {
+ let mut occupied = FxHashSet::default();
+ for item in enable.iter().chain(disable.iter()) {
+ if !occupied.insert(item) {
+ // was present
+ return None;
+ }
+ }
+
+ Some(CfgDiff { enable, disable })
+ }
+
+ /// Returns the total number of atoms changed by this diff.
+ pub fn len(&self) -> usize {
+ self.enable.len() + self.disable.len()
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+impl fmt::Display for CfgDiff {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if !self.enable.is_empty() {
+ f.write_str("enable ")?;
+ for (i, atom) in self.enable.iter().enumerate() {
+ let sep = match i {
+ 0 => "",
+ _ if i == self.enable.len() - 1 => " and ",
+ _ => ", ",
+ };
+ f.write_str(sep)?;
+
+ atom.fmt(f)?;
+ }
+
+ if !self.disable.is_empty() {
+ f.write_str("; ")?;
+ }
+ }
+
+ if !self.disable.is_empty() {
+ f.write_str("disable ")?;
+ for (i, atom) in self.disable.iter().enumerate() {
+ let sep = match i {
+ 0 => "",
+ _ if i == self.enable.len() - 1 => " and ",
+ _ => ", ",
+ };
+ f.write_str(sep)?;
+
+ atom.fmt(f)?;
+ }
+ }
+
+ Ok(())
+ }
+}
+
+pub struct InactiveReason {
+ enabled: Vec<CfgAtom>,
+ disabled: Vec<CfgAtom>,
+}
+
+impl fmt::Display for InactiveReason {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if !self.enabled.is_empty() {
+ for (i, atom) in self.enabled.iter().enumerate() {
+ let sep = match i {
+ 0 => "",
+ _ if i == self.enabled.len() - 1 => " and ",
+ _ => ", ",
+ };
+ f.write_str(sep)?;
+
+ atom.fmt(f)?;
+ }
+ let is_are = if self.enabled.len() == 1 { "is" } else { "are" };
+ write!(f, " {} enabled", is_are)?;
+
+ if !self.disabled.is_empty() {
+ f.write_str(" and ")?;
+ }
+ }
+
+ if !self.disabled.is_empty() {
+ for (i, atom) in self.disabled.iter().enumerate() {
+ let sep = match i {
+ 0 => "",
+ _ if i == self.disabled.len() - 1 => " and ",
+ _ => ", ",
+ };
+ f.write_str(sep)?;
+
+ atom.fmt(f)?;
+ }
+ let is_are = if self.disabled.len() == 1 { "is" } else { "are" };
+ write!(f, " {} disabled", is_are)?;
+ }
+
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
new file mode 100644
index 000000000..bdc3f854e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
@@ -0,0 +1,224 @@
+use arbitrary::{Arbitrary, Unstructured};
+use expect_test::{expect, Expect};
+use mbe::syntax_node_to_token_tree;
+use syntax::{ast, AstNode};
+
+use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
+
+fn assert_parse_result(input: &str, expected: CfgExpr) {
+ let (tt, _) = {
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ syntax_node_to_token_tree(tt.syntax())
+ };
+ let cfg = CfgExpr::parse(&tt);
+ assert_eq!(cfg, expected);
+}
+
+fn check_dnf(input: &str, expect: Expect) {
+ let (tt, _) = {
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ syntax_node_to_token_tree(tt.syntax())
+ };
+ let cfg = CfgExpr::parse(&tt);
+ let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
+ expect.assert_eq(&actual);
+}
+
+fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
+ let (tt, _) = {
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ syntax_node_to_token_tree(tt.syntax())
+ };
+ let cfg = CfgExpr::parse(&tt);
+ let dnf = DnfExpr::new(cfg);
+ let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
+ expect.assert_eq(&why_inactive);
+}
+
+#[track_caller]
+fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
+ let (tt, _) = {
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ syntax_node_to_token_tree(tt.syntax())
+ };
+ let cfg = CfgExpr::parse(&tt);
+ let dnf = DnfExpr::new(cfg);
+ let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
+ assert_eq!(hints, expected_hints);
+}
+
+#[test]
+fn test_cfg_expr_parser() {
+ assert_parse_result("#![cfg(foo)]", CfgAtom::Flag("foo".into()).into());
+ assert_parse_result("#![cfg(foo,)]", CfgAtom::Flag("foo".into()).into());
+ assert_parse_result(
+ "#![cfg(not(foo))]",
+ CfgExpr::Not(Box::new(CfgAtom::Flag("foo".into()).into())),
+ );
+ assert_parse_result("#![cfg(foo(bar))]", CfgExpr::Invalid);
+
+ // Only take the first
+ assert_parse_result(r#"#![cfg(foo, bar = "baz")]"#, CfgAtom::Flag("foo".into()).into());
+
+ assert_parse_result(
+ r#"#![cfg(all(foo, bar = "baz"))]"#,
+ CfgExpr::All(vec![
+ CfgAtom::Flag("foo".into()).into(),
+ CfgAtom::KeyValue { key: "bar".into(), value: "baz".into() }.into(),
+ ]),
+ );
+
+ assert_parse_result(
+ r#"#![cfg(any(not(), all(), , bar = "baz",))]"#,
+ CfgExpr::Any(vec![
+ CfgExpr::Not(Box::new(CfgExpr::Invalid)),
+ CfgExpr::All(vec![]),
+ CfgExpr::Invalid,
+ CfgAtom::KeyValue { key: "bar".into(), value: "baz".into() }.into(),
+ ]),
+ );
+}
+
+#[test]
+fn smoke() {
+ check_dnf("#![cfg(test)]", expect![[r#"#![cfg(test)]"#]]);
+ check_dnf("#![cfg(not(test))]", expect![[r#"#![cfg(not(test))]"#]]);
+ check_dnf("#![cfg(not(not(test)))]", expect![[r#"#![cfg(test)]"#]]);
+
+ check_dnf("#![cfg(all(a, b))]", expect![[r#"#![cfg(all(a, b))]"#]]);
+ check_dnf("#![cfg(any(a, b))]", expect![[r#"#![cfg(any(a, b))]"#]]);
+
+ check_dnf("#![cfg(not(a))]", expect![[r#"#![cfg(not(a))]"#]]);
+}
+
+#[test]
+fn distribute() {
+ check_dnf("#![cfg(all(any(a, b), c))]", expect![[r#"#![cfg(any(all(a, c), all(b, c)))]"#]]);
+ check_dnf("#![cfg(all(c, any(a, b)))]", expect![[r#"#![cfg(any(all(c, a), all(c, b)))]"#]]);
+ check_dnf(
+ "#![cfg(all(any(a, b), any(c, d)))]",
+ expect![[r#"#![cfg(any(all(a, c), all(a, d), all(b, c), all(b, d)))]"#]],
+ );
+
+ check_dnf(
+ "#![cfg(all(any(a, b, c), any(d, e, f), g))]",
+ expect![[
+ r#"#![cfg(any(all(a, d, g), all(a, e, g), all(a, f, g), all(b, d, g), all(b, e, g), all(b, f, g), all(c, d, g), all(c, e, g), all(c, f, g)))]"#
+ ]],
+ );
+}
+
+#[test]
+fn demorgan() {
+ check_dnf("#![cfg(not(all(a, b)))]", expect![[r#"#![cfg(any(not(a), not(b)))]"#]]);
+ check_dnf("#![cfg(not(any(a, b)))]", expect![[r#"#![cfg(all(not(a), not(b)))]"#]]);
+
+ check_dnf("#![cfg(not(all(not(a), b)))]", expect![[r#"#![cfg(any(a, not(b)))]"#]]);
+ check_dnf("#![cfg(not(any(a, not(b))))]", expect![[r#"#![cfg(all(not(a), b))]"#]]);
+}
+
+#[test]
+fn nested() {
+ check_dnf("#![cfg(all(any(a), not(all(any(b)))))]", expect![[r#"#![cfg(all(a, not(b)))]"#]]);
+
+ check_dnf("#![cfg(any(any(a, b)))]", expect![[r#"#![cfg(any(a, b))]"#]]);
+ check_dnf("#![cfg(not(any(any(a, b))))]", expect![[r#"#![cfg(all(not(a), not(b)))]"#]]);
+ check_dnf("#![cfg(all(all(a, b)))]", expect![[r#"#![cfg(all(a, b))]"#]]);
+ check_dnf("#![cfg(not(all(all(a, b))))]", expect![[r#"#![cfg(any(not(a), not(b)))]"#]]);
+}
+
+#[test]
+fn regression() {
+ check_dnf("#![cfg(all(not(not(any(any(any()))))))]", expect![[r##"#![cfg(any())]"##]]);
+ check_dnf("#![cfg(all(any(all(any()))))]", expect![[r##"#![cfg(any())]"##]]);
+ check_dnf("#![cfg(all(all(any())))]", expect![[r##"#![cfg(any())]"##]]);
+
+ check_dnf("#![cfg(all(all(any(), x)))]", expect![[r##"#![cfg(any())]"##]]);
+ check_dnf("#![cfg(all(all(any()), x))]", expect![[r##"#![cfg(any())]"##]]);
+ check_dnf("#![cfg(all(all(any(x))))]", expect![[r##"#![cfg(x)]"##]]);
+ check_dnf("#![cfg(all(all(any(x), x)))]", expect![[r##"#![cfg(all(x, x))]"##]]);
+}
+
+#[test]
+fn hints() {
+ let mut opts = CfgOptions::default();
+
+ check_enable_hints("#![cfg(test)]", &opts, &["enable test"]);
+ check_enable_hints("#![cfg(not(test))]", &opts, &[]);
+
+ check_enable_hints("#![cfg(any(a, b))]", &opts, &["enable a", "enable b"]);
+ check_enable_hints("#![cfg(any(b, a))]", &opts, &["enable b", "enable a"]);
+
+ check_enable_hints("#![cfg(all(a, b))]", &opts, &["enable a and b"]);
+
+ opts.insert_atom("test".into());
+
+ check_enable_hints("#![cfg(test)]", &opts, &[]);
+ check_enable_hints("#![cfg(not(test))]", &opts, &["disable test"]);
+}
+
+/// Tests that we don't suggest hints for cfgs that express an inconsistent formula.
+#[test]
+fn hints_impossible() {
+ let mut opts = CfgOptions::default();
+
+ check_enable_hints("#![cfg(all(test, not(test)))]", &opts, &[]);
+
+ opts.insert_atom("test".into());
+
+ check_enable_hints("#![cfg(all(test, not(test)))]", &opts, &[]);
+}
+
+#[test]
+fn why_inactive() {
+ let mut opts = CfgOptions::default();
+ opts.insert_atom("test".into());
+ opts.insert_atom("test2".into());
+
+ check_why_inactive("#![cfg(a)]", &opts, expect![["a is disabled"]]);
+ check_why_inactive("#![cfg(not(test))]", &opts, expect![["test is enabled"]]);
+
+ check_why_inactive(
+ "#![cfg(all(not(test), not(test2)))]",
+ &opts,
+ expect![["test and test2 are enabled"]],
+ );
+ check_why_inactive("#![cfg(all(a, b))]", &opts, expect![["a and b are disabled"]]);
+ check_why_inactive(
+ "#![cfg(all(not(test), a))]",
+ &opts,
+ expect![["test is enabled and a is disabled"]],
+ );
+ check_why_inactive(
+ "#![cfg(all(not(test), test2, a))]",
+ &opts,
+ expect![["test is enabled and a is disabled"]],
+ );
+ check_why_inactive(
+ "#![cfg(all(not(test), not(test2), a))]",
+ &opts,
+ expect![["test and test2 are enabled and a is disabled"]],
+ );
+}
+
+#[test]
+fn proptest() {
+ const REPEATS: usize = 512;
+
+ let mut rng = oorandom::Rand32::new(123456789);
+ let mut buf = Vec::new();
+ for _ in 0..REPEATS {
+ buf.clear();
+ while buf.len() < 512 {
+ buf.extend(rng.rand_u32().to_ne_bytes());
+ }
+
+ let mut u = Unstructured::new(&buf);
+ let cfg = CfgExpr::arbitrary(&mut u).unwrap();
+ DnfExpr::new(cfg);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
new file mode 100644
index 000000000..d3d180ece
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "flycheck"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+crossbeam-channel = "0.5.5"
+tracing = "0.1.35"
+cargo_metadata = "0.15.0"
+serde = { version = "1.0.137", features = ["derive"] }
+serde_json = "1.0.81"
+jod-thread = "0.1.2"
+
+toolchain = { path = "../toolchain", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
new file mode 100644
index 000000000..4e8bc881a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
@@ -0,0 +1,396 @@
+//! Flycheck provides the functionality needed to run `cargo check` or
+//! another compatible command (f.x. clippy) in a background thread and provide
+//! LSP diagnostics based on the output of the command.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::{
+ fmt, io,
+ process::{ChildStderr, ChildStdout, Command, Stdio},
+ time::Duration,
+};
+
+use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
+use paths::AbsPathBuf;
+use serde::Deserialize;
+use stdx::{process::streaming_output, JodChild};
+
+pub use cargo_metadata::diagnostic::{
+ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
+ DiagnosticSpanMacroExpansion,
+};
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum FlycheckConfig {
+ CargoCommand {
+ command: String,
+ target_triple: Option<String>,
+ all_targets: bool,
+ no_default_features: bool,
+ all_features: bool,
+ features: Vec<String>,
+ extra_args: Vec<String>,
+ },
+ CustomCommand {
+ command: String,
+ args: Vec<String>,
+ },
+}
+
+impl fmt::Display for FlycheckConfig {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {}", command),
+ FlycheckConfig::CustomCommand { command, args } => {
+ write!(f, "{} {}", command, args.join(" "))
+ }
+ }
+ }
+}
+
+/// Flycheck wraps the shared state and communication machinery used for
+/// running `cargo check` (or other compatible command) and providing
+/// diagnostics based on the output.
+/// The spawned thread is shut down when this struct is dropped.
+#[derive(Debug)]
+pub struct FlycheckHandle {
+ // XXX: drop order is significant
+ sender: Sender<Restart>,
+ _thread: jod_thread::JoinHandle,
+}
+
+impl FlycheckHandle {
+ pub fn spawn(
+ id: usize,
+ sender: Box<dyn Fn(Message) + Send>,
+ config: FlycheckConfig,
+ workspace_root: AbsPathBuf,
+ ) -> FlycheckHandle {
+ let actor = FlycheckActor::new(id, sender, config, workspace_root);
+ let (sender, receiver) = unbounded::<Restart>();
+ let thread = jod_thread::Builder::new()
+ .name("Flycheck".to_owned())
+ .spawn(move || actor.run(receiver))
+ .expect("failed to spawn thread");
+ FlycheckHandle { sender, _thread: thread }
+ }
+
+ /// Schedule a re-start of the cargo check worker.
+ pub fn update(&self) {
+ self.sender.send(Restart).unwrap();
+ }
+}
+
+pub enum Message {
+ /// Request adding a diagnostic with fixes included to a file
+ AddDiagnostic { workspace_root: AbsPathBuf, diagnostic: Diagnostic },
+
+ /// Request check progress notification to client
+ Progress {
+ /// Flycheck instance ID
+ id: usize,
+ progress: Progress,
+ },
+}
+
+impl fmt::Debug for Message {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Message::AddDiagnostic { workspace_root, diagnostic } => f
+ .debug_struct("AddDiagnostic")
+ .field("workspace_root", workspace_root)
+ .field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code))
+ .finish(),
+ Message::Progress { id, progress } => {
+ f.debug_struct("Progress").field("id", id).field("progress", progress).finish()
+ }
+ }
+ }
+}
+
+#[derive(Debug)]
+pub enum Progress {
+ DidStart,
+ DidCheckCrate(String),
+ DidFinish(io::Result<()>),
+ DidCancel,
+}
+
+struct Restart;
+
+struct FlycheckActor {
+ id: usize,
+ sender: Box<dyn Fn(Message) + Send>,
+ config: FlycheckConfig,
+ workspace_root: AbsPathBuf,
+ /// CargoHandle exists to wrap around the communication needed to be able to
+ /// run `cargo check` without blocking. Currently the Rust standard library
+ /// doesn't provide a way to read sub-process output without blocking, so we
+ /// have to wrap sub-processes output handling in a thread and pass messages
+ /// back over a channel.
+ cargo_handle: Option<CargoHandle>,
+}
+
+enum Event {
+ Restart(Restart),
+ CheckEvent(Option<CargoMessage>),
+}
+
+impl FlycheckActor {
+ fn new(
+ id: usize,
+ sender: Box<dyn Fn(Message) + Send>,
+ config: FlycheckConfig,
+ workspace_root: AbsPathBuf,
+ ) -> FlycheckActor {
+ FlycheckActor { id, sender, config, workspace_root, cargo_handle: None }
+ }
+ fn progress(&self, progress: Progress) {
+ self.send(Message::Progress { id: self.id, progress });
+ }
+ fn next_event(&self, inbox: &Receiver<Restart>) -> Option<Event> {
+ let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver);
+ select! {
+ recv(inbox) -> msg => msg.ok().map(Event::Restart),
+ recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
+ }
+ }
+ fn run(mut self, inbox: Receiver<Restart>) {
+ while let Some(event) = self.next_event(&inbox) {
+ match event {
+ Event::Restart(Restart) => {
+ // Cancel the previously spawned process
+ self.cancel_check_process();
+ while let Ok(Restart) = inbox.recv_timeout(Duration::from_millis(50)) {}
+
+ let command = self.check_command();
+ tracing::debug!(?command, "will restart flycheck");
+ match CargoHandle::spawn(command) {
+ Ok(cargo_handle) => {
+ tracing::debug!(
+ command = ?self.check_command(),
+ "did restart flycheck"
+ );
+ self.cargo_handle = Some(cargo_handle);
+ self.progress(Progress::DidStart);
+ }
+ Err(error) => {
+ tracing::error!(
+ command = ?self.check_command(),
+ %error, "failed to restart flycheck"
+ );
+ }
+ }
+ }
+ Event::CheckEvent(None) => {
+ tracing::debug!("flycheck finished");
+
+ // Watcher finished
+ let cargo_handle = self.cargo_handle.take().unwrap();
+ let res = cargo_handle.join();
+ if res.is_err() {
+ tracing::error!(
+ "Flycheck failed to run the following command: {:?}",
+ self.check_command()
+ );
+ }
+ self.progress(Progress::DidFinish(res));
+ }
+ Event::CheckEvent(Some(message)) => match message {
+ CargoMessage::CompilerArtifact(msg) => {
+ self.progress(Progress::DidCheckCrate(msg.target.name));
+ }
+
+ CargoMessage::Diagnostic(msg) => {
+ self.send(Message::AddDiagnostic {
+ workspace_root: self.workspace_root.clone(),
+ diagnostic: msg,
+ });
+ }
+ },
+ }
+ }
+ // If we rerun the thread, we need to discard the previous check results first
+ self.cancel_check_process();
+ }
+
+ fn cancel_check_process(&mut self) {
+ if let Some(cargo_handle) = self.cargo_handle.take() {
+ cargo_handle.cancel();
+ self.progress(Progress::DidCancel);
+ }
+ }
+
+ fn check_command(&self) -> Command {
+ let mut cmd = match &self.config {
+ FlycheckConfig::CargoCommand {
+ command,
+ target_triple,
+ no_default_features,
+ all_targets,
+ all_features,
+ extra_args,
+ features,
+ } => {
+ let mut cmd = Command::new(toolchain::cargo());
+ cmd.arg(command);
+ cmd.current_dir(&self.workspace_root);
+ cmd.args(&["--workspace", "--message-format=json", "--manifest-path"])
+ .arg(self.workspace_root.join("Cargo.toml").as_os_str());
+
+ if let Some(target) = target_triple {
+ cmd.args(&["--target", target.as_str()]);
+ }
+ if *all_targets {
+ cmd.arg("--all-targets");
+ }
+ if *all_features {
+ cmd.arg("--all-features");
+ } else {
+ if *no_default_features {
+ cmd.arg("--no-default-features");
+ }
+ if !features.is_empty() {
+ cmd.arg("--features");
+ cmd.arg(features.join(" "));
+ }
+ }
+ cmd.args(extra_args);
+ cmd
+ }
+ FlycheckConfig::CustomCommand { command, args } => {
+ let mut cmd = Command::new(command);
+ cmd.args(args);
+ cmd
+ }
+ };
+ cmd.current_dir(&self.workspace_root);
+ cmd
+ }
+
+ fn send(&self, check_task: Message) {
+ (self.sender)(check_task);
+ }
+}
+
+/// A handle to a cargo process used for fly-checking.
+struct CargoHandle {
+ /// The handle to the actual cargo process. As we cannot cancel directly from with
+ /// a read syscall dropping and therefor terminating the process is our best option.
+ child: JodChild,
+ thread: jod_thread::JoinHandle<io::Result<(bool, String)>>,
+ receiver: Receiver<CargoMessage>,
+}
+
+impl CargoHandle {
+ fn spawn(mut command: Command) -> std::io::Result<CargoHandle> {
+ command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
+ let mut child = JodChild::spawn(command)?;
+
+ let stdout = child.stdout.take().unwrap();
+ let stderr = child.stderr.take().unwrap();
+
+ let (sender, receiver) = unbounded();
+ let actor = CargoActor::new(sender, stdout, stderr);
+ let thread = jod_thread::Builder::new()
+ .name("CargoHandle".to_owned())
+ .spawn(move || actor.run())
+ .expect("failed to spawn thread");
+ Ok(CargoHandle { child, thread, receiver })
+ }
+
+ fn cancel(mut self) {
+ let _ = self.child.kill();
+ let _ = self.child.wait();
+ }
+
+ fn join(mut self) -> io::Result<()> {
+ let _ = self.child.kill();
+ let exit_status = self.child.wait()?;
+ let (read_at_least_one_message, error) = self.thread.join()?;
+ if read_at_least_one_message || exit_status.success() {
+ Ok(())
+ } else {
+ Err(io::Error::new(io::ErrorKind::Other, format!(
+ "Cargo watcher failed, the command produced no valid metadata (exit code: {:?}):\n{}",
+ exit_status, error
+ )))
+ }
+ }
+}
+
+struct CargoActor {
+ sender: Sender<CargoMessage>,
+ stdout: ChildStdout,
+ stderr: ChildStderr,
+}
+
+impl CargoActor {
+ fn new(sender: Sender<CargoMessage>, stdout: ChildStdout, stderr: ChildStderr) -> CargoActor {
+ CargoActor { sender, stdout, stderr }
+ }
+
+ fn run(self) -> io::Result<(bool, String)> {
+ // We manually read a line at a time, instead of using serde's
+ // stream deserializers, because the deserializer cannot recover
+ // from an error, resulting in it getting stuck, because we try to
+ // be resilient against failures.
+ //
+ // Because cargo only outputs one JSON object per line, we can
+ // simply skip a line if it doesn't parse, which just ignores any
+ // erroneus output.
+
+ let mut error = String::new();
+ let mut read_at_least_one_message = false;
+ let output = streaming_output(
+ self.stdout,
+ self.stderr,
+ &mut |line| {
+ read_at_least_one_message = true;
+
+ // Try to deserialize a message from Cargo or Rustc.
+ let mut deserializer = serde_json::Deserializer::from_str(line);
+ deserializer.disable_recursion_limit();
+ if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
+ match message {
+ // Skip certain kinds of messages to only spend time on what's useful
+ JsonMessage::Cargo(message) => match message {
+ cargo_metadata::Message::CompilerArtifact(artifact)
+ if !artifact.fresh =>
+ {
+ self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
+ }
+ cargo_metadata::Message::CompilerMessage(msg) => {
+ self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
+ }
+ _ => (),
+ },
+ JsonMessage::Rustc(message) => {
+ self.sender.send(CargoMessage::Diagnostic(message)).unwrap();
+ }
+ }
+ }
+ },
+ &mut |line| {
+ error.push_str(line);
+ error.push('\n');
+ },
+ );
+ match output {
+ Ok(_) => Ok((read_at_least_one_message, error)),
+ Err(e) => Err(io::Error::new(e.kind(), format!("{:?}: {}", e, error))),
+ }
+ }
+}
+
+enum CargoMessage {
+ CompilerArtifact(cargo_metadata::Artifact),
+ Diagnostic(Diagnostic),
+}
+
+#[derive(Deserialize)]
+#[serde(untagged)]
+enum JsonMessage {
+ Cargo(cargo_metadata::Message),
+ Rustc(Diagnostic),
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
new file mode 100644
index 000000000..e8cff2f3e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
@@ -0,0 +1,43 @@
+[package]
+name = "hir-def"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+anymap = "1.0.0-beta.2"
+arrayvec = "0.7.2"
+bitflags = "1.3.2"
+cov-mark = "2.0.0-pre.1"
+# We need to freeze the version of the crate, as the raw-api feature is considered unstable
+dashmap = { version = "=5.3.4", features = ["raw-api"] }
+drop_bomb = "0.1.5"
+either = "1.7.0"
+fst = { version = "0.4.7", default-features = false }
+hashbrown = { version = "0.12.1", default-features = false }
+indexmap = "1.9.1"
+itertools = "0.10.3"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+once_cell = "1.12.0"
+rustc-hash = "1.1.0"
+smallvec = "1.9.0"
+tracing = "0.1.35"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+hir-expand = { path = "../hir-expand", version = "0.0.0" }
+mbe = { path = "../mbe", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+expect-test = "1.4.0"
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/adt.rs
new file mode 100644
index 000000000..277135d6d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/adt.rs
@@ -0,0 +1,365 @@
+//! Defines hir-level representation of structs, enums and unions
+
+use std::sync::Arc;
+
+use base_db::CrateId;
+use either::Either;
+use hir_expand::{
+ name::{AsName, Name},
+ InFile,
+};
+use la_arena::{Arena, ArenaMap};
+use syntax::ast::{self, HasName, HasVisibility};
+use tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree};
+
+use crate::{
+ body::{CfgExpander, LowerCtx},
+ db::DefDatabase,
+ intern::Interned,
+ item_tree::{AttrOwner, Field, Fields, ItemTree, ModItem, RawVisibilityId},
+ src::HasChildSource,
+ src::HasSource,
+ trace::Trace,
+ type_ref::TypeRef,
+ visibility::RawVisibility,
+ EnumId, LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StructId, UnionId, VariantId,
+};
+use cfg::CfgOptions;
+
+/// Note that we use `StructData` for unions as well!
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct StructData {
+ pub name: Name,
+ pub variant_data: Arc<VariantData>,
+ pub repr: Option<ReprKind>,
+ pub visibility: RawVisibility,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct EnumData {
+ pub name: Name,
+ pub variants: Arena<EnumVariantData>,
+ pub visibility: RawVisibility,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct EnumVariantData {
+ pub name: Name,
+ pub variant_data: Arc<VariantData>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum VariantData {
+ Record(Arena<FieldData>),
+ Tuple(Arena<FieldData>),
+ Unit,
+}
+
+/// A single field of an enum variant or struct
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct FieldData {
+ pub name: Name,
+ pub type_ref: Interned<TypeRef>,
+ pub visibility: RawVisibility,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ReprKind {
+ Packed,
+ Other,
+}
+
+fn repr_from_value(
+ db: &dyn DefDatabase,
+ krate: CrateId,
+ item_tree: &ItemTree,
+ of: AttrOwner,
+) -> Option<ReprKind> {
+ item_tree.attrs(db, krate, of).by_key("repr").tt_values().find_map(parse_repr_tt)
+}
+
+fn parse_repr_tt(tt: &Subtree) -> Option<ReprKind> {
+ match tt.delimiter {
+ Some(Delimiter { kind: DelimiterKind::Parenthesis, .. }) => {}
+ _ => return None,
+ }
+
+ let mut it = tt.token_trees.iter();
+ match it.next()? {
+ TokenTree::Leaf(Leaf::Ident(ident)) if ident.text == "packed" => Some(ReprKind::Packed),
+ _ => Some(ReprKind::Other),
+ }
+}
+
+impl StructData {
+ pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc<StructData> {
+ let loc = id.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
+ let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
+
+ let strukt = &item_tree[loc.id.value];
+ let variant_data = lower_fields(db, krate, &item_tree, &cfg_options, &strukt.fields, None);
+ Arc::new(StructData {
+ name: strukt.name.clone(),
+ variant_data: Arc::new(variant_data),
+ repr,
+ visibility: item_tree[strukt.visibility].clone(),
+ })
+ }
+ pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc<StructData> {
+ let loc = id.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
+ let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
+
+ let union = &item_tree[loc.id.value];
+ let variant_data = lower_fields(db, krate, &item_tree, &cfg_options, &union.fields, None);
+
+ Arc::new(StructData {
+ name: union.name.clone(),
+ variant_data: Arc::new(variant_data),
+ repr,
+ visibility: item_tree[union.visibility].clone(),
+ })
+ }
+}
+
+impl EnumData {
+ pub(crate) fn enum_data_query(db: &dyn DefDatabase, e: EnumId) -> Arc<EnumData> {
+ let loc = e.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let cfg_options = db.crate_graph()[krate].cfg_options.clone();
+
+ let enum_ = &item_tree[loc.id.value];
+ let mut variants = Arena::new();
+ for tree_id in enum_.variants.clone() {
+ if item_tree.attrs(db, krate, tree_id.into()).is_cfg_enabled(&cfg_options) {
+ let var = &item_tree[tree_id];
+ let var_data = lower_fields(
+ db,
+ krate,
+ &item_tree,
+ &cfg_options,
+ &var.fields,
+ Some(enum_.visibility),
+ );
+
+ variants.alloc(EnumVariantData {
+ name: var.name.clone(),
+ variant_data: Arc::new(var_data),
+ });
+ }
+ }
+
+ Arc::new(EnumData {
+ name: enum_.name.clone(),
+ variants,
+ visibility: item_tree[enum_.visibility].clone(),
+ })
+ }
+
+ pub fn variant(&self, name: &Name) -> Option<LocalEnumVariantId> {
+ let (id, _) = self.variants.iter().find(|(_id, data)| &data.name == name)?;
+ Some(id)
+ }
+}
+
+impl HasChildSource<LocalEnumVariantId> for EnumId {
+ type Value = ast::Variant;
+ fn child_source(
+ &self,
+ db: &dyn DefDatabase,
+ ) -> InFile<ArenaMap<LocalEnumVariantId, Self::Value>> {
+ let src = self.lookup(db).source(db);
+ let mut trace = Trace::new_for_map();
+ lower_enum(db, &mut trace, &src, self.lookup(db).container);
+ src.with_value(trace.into_map())
+ }
+}
+
+fn lower_enum(
+ db: &dyn DefDatabase,
+ trace: &mut Trace<EnumVariantData, ast::Variant>,
+ ast: &InFile<ast::Enum>,
+ module_id: ModuleId,
+) {
+ let expander = CfgExpander::new(db, ast.file_id, module_id.krate);
+ let variants = ast
+ .value
+ .variant_list()
+ .into_iter()
+ .flat_map(|it| it.variants())
+ .filter(|var| expander.is_cfg_enabled(db, var));
+ for var in variants {
+ trace.alloc(
+ || var.clone(),
+ || EnumVariantData {
+ name: var.name().map_or_else(Name::missing, |it| it.as_name()),
+ variant_data: Arc::new(VariantData::new(db, ast.with_value(var.kind()), module_id)),
+ },
+ );
+ }
+}
+
+impl VariantData {
+ fn new(db: &dyn DefDatabase, flavor: InFile<ast::StructKind>, module_id: ModuleId) -> Self {
+ let mut expander = CfgExpander::new(db, flavor.file_id, module_id.krate);
+ let mut trace = Trace::new_for_arena();
+ match lower_struct(db, &mut expander, &mut trace, &flavor) {
+ StructKind::Tuple => VariantData::Tuple(trace.into_arena()),
+ StructKind::Record => VariantData::Record(trace.into_arena()),
+ StructKind::Unit => VariantData::Unit,
+ }
+ }
+
+ pub fn fields(&self) -> &Arena<FieldData> {
+ const EMPTY: &Arena<FieldData> = &Arena::new();
+ match &self {
+ VariantData::Record(fields) | VariantData::Tuple(fields) => fields,
+ _ => EMPTY,
+ }
+ }
+
+ pub fn field(&self, name: &Name) -> Option<LocalFieldId> {
+ self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None })
+ }
+
+ pub fn kind(&self) -> StructKind {
+ match self {
+ VariantData::Record(_) => StructKind::Record,
+ VariantData::Tuple(_) => StructKind::Tuple,
+ VariantData::Unit => StructKind::Unit,
+ }
+ }
+}
+
+impl HasChildSource<LocalFieldId> for VariantId {
+ type Value = Either<ast::TupleField, ast::RecordField>;
+
+ fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<LocalFieldId, Self::Value>> {
+ let (src, module_id) = match self {
+ VariantId::EnumVariantId(it) => {
+ // I don't really like the fact that we call into parent source
+ // here, this might add to more queries then necessary.
+ let src = it.parent.child_source(db);
+ (src.map(|map| map[it.local_id].kind()), it.parent.lookup(db).container)
+ }
+ VariantId::StructId(it) => {
+ (it.lookup(db).source(db).map(|it| it.kind()), it.lookup(db).container)
+ }
+ VariantId::UnionId(it) => (
+ it.lookup(db).source(db).map(|it| {
+ it.record_field_list()
+ .map(ast::StructKind::Record)
+ .unwrap_or(ast::StructKind::Unit)
+ }),
+ it.lookup(db).container,
+ ),
+ };
+ let mut expander = CfgExpander::new(db, src.file_id, module_id.krate);
+ let mut trace = Trace::new_for_map();
+ lower_struct(db, &mut expander, &mut trace, &src);
+ src.with_value(trace.into_map())
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum StructKind {
+ Tuple,
+ Record,
+ Unit,
+}
+
+fn lower_struct(
+ db: &dyn DefDatabase,
+ expander: &mut CfgExpander,
+ trace: &mut Trace<FieldData, Either<ast::TupleField, ast::RecordField>>,
+ ast: &InFile<ast::StructKind>,
+) -> StructKind {
+ let ctx = LowerCtx::new(db, ast.file_id);
+
+ match &ast.value {
+ ast::StructKind::Tuple(fl) => {
+ for (i, fd) in fl.fields().enumerate() {
+ if !expander.is_cfg_enabled(db, &fd) {
+ continue;
+ }
+
+ trace.alloc(
+ || Either::Left(fd.clone()),
+ || FieldData {
+ name: Name::new_tuple_field(i),
+ type_ref: Interned::new(TypeRef::from_ast_opt(&ctx, fd.ty())),
+ visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
+ },
+ );
+ }
+ StructKind::Tuple
+ }
+ ast::StructKind::Record(fl) => {
+ for fd in fl.fields() {
+ if !expander.is_cfg_enabled(db, &fd) {
+ continue;
+ }
+
+ trace.alloc(
+ || Either::Right(fd.clone()),
+ || FieldData {
+ name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing),
+ type_ref: Interned::new(TypeRef::from_ast_opt(&ctx, fd.ty())),
+ visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
+ },
+ );
+ }
+ StructKind::Record
+ }
+ ast::StructKind::Unit => StructKind::Unit,
+ }
+}
+
+fn lower_fields(
+ db: &dyn DefDatabase,
+ krate: CrateId,
+ item_tree: &ItemTree,
+ cfg_options: &CfgOptions,
+ fields: &Fields,
+ override_visibility: Option<RawVisibilityId>,
+) -> VariantData {
+ match fields {
+ Fields::Record(flds) => {
+ let mut arena = Arena::new();
+ for field_id in flds.clone() {
+ if item_tree.attrs(db, krate, field_id.into()).is_cfg_enabled(cfg_options) {
+ arena.alloc(lower_field(item_tree, &item_tree[field_id], override_visibility));
+ }
+ }
+ VariantData::Record(arena)
+ }
+ Fields::Tuple(flds) => {
+ let mut arena = Arena::new();
+ for field_id in flds.clone() {
+ if item_tree.attrs(db, krate, field_id.into()).is_cfg_enabled(cfg_options) {
+ arena.alloc(lower_field(item_tree, &item_tree[field_id], override_visibility));
+ }
+ }
+ VariantData::Tuple(arena)
+ }
+ Fields::Unit => VariantData::Unit,
+ }
+}
+
+fn lower_field(
+ item_tree: &ItemTree,
+ field: &Field,
+ override_visibility: Option<RawVisibilityId>,
+) -> FieldData {
+ FieldData {
+ name: field.name.clone(),
+ type_ref: field.type_ref.clone(),
+ visibility: item_tree[override_visibility.unwrap_or(field.visibility)].clone(),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
new file mode 100644
index 000000000..2b39c6f8d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
@@ -0,0 +1,1002 @@
+//! A higher level attributes based on TokenTree, with also some shortcuts.
+
+use std::{fmt, hash::Hash, ops, sync::Arc};
+
+use base_db::CrateId;
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
+use itertools::Itertools;
+use la_arena::{ArenaMap, Idx, RawIdx};
+use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
+use smallvec::{smallvec, SmallVec};
+use syntax::{
+ ast::{self, AstNode, HasAttrs, IsString},
+ match_ast, AstPtr, AstToken, SmolStr, SyntaxNode, TextRange, TextSize,
+};
+use tt::Subtree;
+
+use crate::{
+ db::DefDatabase,
+ intern::Interned,
+ item_tree::{AttrOwner, Fields, ItemTreeId, ItemTreeNode},
+ nameres::{ModuleOrigin, ModuleSource},
+ path::{ModPath, PathKind},
+ src::{HasChildSource, HasSource},
+ AdtId, AttrDefId, EnumId, GenericParamId, LocalEnumVariantId, LocalFieldId, Lookup, MacroId,
+ VariantId,
+};
+
+/// Holds documentation
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Documentation(String);
+
+impl Documentation {
+ pub fn new(s: String) -> Self {
+ Documentation(s)
+ }
+
+ pub fn as_str(&self) -> &str {
+ &self.0
+ }
+}
+
+impl From<Documentation> for String {
+ fn from(Documentation(string): Documentation) -> Self {
+ string
+ }
+}
+
+/// Syntactical attributes, without filtering of `cfg_attr`s.
+#[derive(Default, Debug, Clone, PartialEq, Eq)]
+pub(crate) struct RawAttrs {
+ entries: Option<Arc<[Attr]>>,
+}
+
+#[derive(Default, Debug, Clone, PartialEq, Eq)]
+pub struct Attrs(RawAttrs);
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct AttrsWithOwner {
+ attrs: Attrs,
+ owner: AttrDefId,
+}
+
+impl ops::Deref for RawAttrs {
+ type Target = [Attr];
+
+ fn deref(&self) -> &[Attr] {
+ match &self.entries {
+ Some(it) => &*it,
+ None => &[],
+ }
+ }
+}
+impl Attrs {
+ pub fn get(&self, id: AttrId) -> Option<&Attr> {
+ (**self).iter().find(|attr| attr.id == id)
+ }
+}
+
+impl ops::Deref for Attrs {
+ type Target = [Attr];
+
+ fn deref(&self) -> &[Attr] {
+ match &self.0.entries {
+ Some(it) => &*it,
+ None => &[],
+ }
+ }
+}
+
+impl ops::Deref for AttrsWithOwner {
+ type Target = Attrs;
+
+ fn deref(&self) -> &Attrs {
+ &self.attrs
+ }
+}
+
+impl RawAttrs {
+ pub(crate) const EMPTY: Self = Self { entries: None };
+
+ pub(crate) fn new(db: &dyn DefDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
+ let entries = collect_attrs(owner)
+ .filter_map(|(id, attr)| match attr {
+ Either::Left(attr) => {
+ attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
+ }
+ Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
+ id,
+ input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
+ path: Interned::new(ModPath::from(hir_expand::name!(doc))),
+ }),
+ })
+ .collect::<Arc<_>>();
+
+ Self { entries: if entries.is_empty() { None } else { Some(entries) } }
+ }
+
+ fn from_attrs_owner(db: &dyn DefDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
+ let hygiene = Hygiene::new(db.upcast(), owner.file_id);
+ Self::new(db, owner.value, &hygiene)
+ }
+
+ pub(crate) fn merge(&self, other: Self) -> Self {
+ // FIXME: This needs to fixup `AttrId`s
+ match (&self.entries, other.entries) {
+ (None, None) => Self::EMPTY,
+ (None, entries @ Some(_)) => Self { entries },
+ (Some(entries), None) => Self { entries: Some(entries.clone()) },
+ (Some(a), Some(b)) => {
+ let last_ast_index = a.last().map_or(0, |it| it.id.ast_index + 1);
+ Self {
+ entries: Some(
+ a.iter()
+ .cloned()
+ .chain(b.iter().map(|it| {
+ let mut it = it.clone();
+ it.id.ast_index += last_ast_index;
+ it
+ }))
+ .collect(),
+ ),
+ }
+ }
+ }
+ }
+
+ /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`.
+ pub(crate) fn filter(self, db: &dyn DefDatabase, krate: CrateId) -> Attrs {
+ let has_cfg_attrs = self.iter().any(|attr| {
+ attr.path.as_ident().map_or(false, |name| *name == hir_expand::name![cfg_attr])
+ });
+ if !has_cfg_attrs {
+ return Attrs(self);
+ }
+
+ let crate_graph = db.crate_graph();
+ let new_attrs = self
+ .iter()
+ .flat_map(|attr| -> SmallVec<[_; 1]> {
+ let is_cfg_attr =
+ attr.path.as_ident().map_or(false, |name| *name == hir_expand::name![cfg_attr]);
+ if !is_cfg_attr {
+ return smallvec![attr.clone()];
+ }
+
+ let subtree = match attr.token_tree_value() {
+ Some(it) => it,
+ _ => return smallvec![attr.clone()],
+ };
+
+ // Input subtree is: `(cfg, $(attr),+)`
+ // Split it up into a `cfg` subtree and the `attr` subtrees.
+ // FIXME: There should be a common API for this.
+ let mut parts = subtree.token_trees.split(|tt| {
+ matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))
+ });
+ let cfg = match parts.next() {
+ Some(it) => it,
+ None => return smallvec![],
+ };
+ let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
+ let cfg = CfgExpr::parse(&cfg);
+ let index = attr.id;
+ let attrs = parts.filter(|a| !a.is_empty()).filter_map(|attr| {
+ let tree = Subtree { delimiter: None, token_trees: attr.to_vec() };
+ // FIXME hygiene
+ let hygiene = Hygiene::new_unhygienic();
+ Attr::from_tt(db, &tree, &hygiene, index)
+ });
+
+ let cfg_options = &crate_graph[krate].cfg_options;
+ if cfg_options.check(&cfg) == Some(false) {
+ smallvec![]
+ } else {
+ cov_mark::hit!(cfg_attr_active);
+
+ attrs.collect()
+ }
+ })
+ .collect();
+
+ Attrs(RawAttrs { entries: Some(new_attrs) })
+ }
+}
+
+impl Attrs {
+ pub const EMPTY: Self = Self(RawAttrs::EMPTY);
+
+ pub(crate) fn variants_attrs_query(
+ db: &dyn DefDatabase,
+ e: EnumId,
+ ) -> Arc<ArenaMap<LocalEnumVariantId, Attrs>> {
+ // FIXME: There should be some proper form of mapping between item tree enum variant ids and hir enum variant ids
+ let mut res = ArenaMap::default();
+
+ let loc = e.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let enum_ = &item_tree[loc.id.value];
+ let crate_graph = db.crate_graph();
+ let cfg_options = &crate_graph[krate].cfg_options;
+
+ let mut idx = 0;
+ for variant in enum_.variants.clone() {
+ let attrs = item_tree.attrs(db, krate, variant.into());
+ if attrs.is_cfg_enabled(cfg_options) {
+ res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
+ idx += 1;
+ }
+ }
+
+ Arc::new(res)
+ }
+
+ pub(crate) fn fields_attrs_query(
+ db: &dyn DefDatabase,
+ v: VariantId,
+ ) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
+ // FIXME: There should be some proper form of mapping between item tree field ids and hir field ids
+ let mut res = ArenaMap::default();
+
+ let crate_graph = db.crate_graph();
+ let (fields, item_tree, krate) = match v {
+ VariantId::EnumVariantId(it) => {
+ let e = it.parent;
+ let loc = e.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let enum_ = &item_tree[loc.id.value];
+
+ let cfg_options = &crate_graph[krate].cfg_options;
+ let variant = 'tri: loop {
+ let mut idx = 0;
+ for variant in enum_.variants.clone() {
+ let attrs = item_tree.attrs(db, krate, variant.into());
+ if attrs.is_cfg_enabled(cfg_options) {
+ if it.local_id == Idx::from_raw(RawIdx::from(idx)) {
+ break 'tri variant;
+ }
+ idx += 1;
+ }
+ }
+ return Arc::new(res);
+ };
+ (item_tree[variant].fields.clone(), item_tree, krate)
+ }
+ VariantId::StructId(it) => {
+ let loc = it.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let struct_ = &item_tree[loc.id.value];
+ (struct_.fields.clone(), item_tree, krate)
+ }
+ VariantId::UnionId(it) => {
+ let loc = it.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let union_ = &item_tree[loc.id.value];
+ (union_.fields.clone(), item_tree, krate)
+ }
+ };
+
+ let fields = match fields {
+ Fields::Record(fields) | Fields::Tuple(fields) => fields,
+ Fields::Unit => return Arc::new(res),
+ };
+
+ let cfg_options = &crate_graph[krate].cfg_options;
+
+ let mut idx = 0;
+ for field in fields {
+ let attrs = item_tree.attrs(db, krate, field.into());
+ if attrs.is_cfg_enabled(cfg_options) {
+ res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
+ idx += 1;
+ }
+ }
+
+ Arc::new(res)
+ }
+
+ pub fn by_key(&self, key: &'static str) -> AttrQuery<'_> {
+ AttrQuery { attrs: self, key }
+ }
+}
+
+impl Attrs {
+ pub fn cfg(&self) -> Option<CfgExpr> {
+ let mut cfgs = self.by_key("cfg").tt_values().map(CfgExpr::parse);
+ let first = cfgs.next()?;
+ match cfgs.next() {
+ Some(second) => {
+ let cfgs = [first, second].into_iter().chain(cfgs);
+ Some(CfgExpr::All(cfgs.collect()))
+ }
+ None => Some(first),
+ }
+ }
+ pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool {
+ match self.cfg() {
+ None => true,
+ Some(cfg) => cfg_options.check(&cfg) != Some(false),
+ }
+ }
+
+ pub fn lang(&self) -> Option<&SmolStr> {
+ self.by_key("lang").string_value()
+ }
+
+ pub fn docs(&self) -> Option<Documentation> {
+ let docs = self.by_key("doc").attrs().filter_map(|attr| attr.string_value());
+ let indent = doc_indent(self);
+ let mut buf = String::new();
+ for doc in docs {
+ // str::lines doesn't yield anything for the empty string
+ if !doc.is_empty() {
+ buf.extend(Itertools::intersperse(
+ doc.lines().map(|line| {
+ line.char_indices()
+ .nth(indent)
+ .map_or(line, |(offset, _)| &line[offset..])
+ .trim_end()
+ }),
+ "\n",
+ ));
+ }
+ buf.push('\n');
+ }
+ buf.pop();
+ if buf.is_empty() {
+ None
+ } else {
+ Some(Documentation(buf))
+ }
+ }
+
+ pub fn has_doc_hidden(&self) -> bool {
+ self.by_key("doc").tt_values().any(|tt| {
+ tt.delimiter_kind() == Some(DelimiterKind::Parenthesis) &&
+ matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "hidden")
+ })
+ }
+
+ pub fn is_proc_macro(&self) -> bool {
+ self.by_key("proc_macro").exists()
+ }
+
+ pub fn is_proc_macro_attribute(&self) -> bool {
+ self.by_key("proc_macro_attribute").exists()
+ }
+
+ pub fn is_proc_macro_derive(&self) -> bool {
+ self.by_key("proc_macro_derive").exists()
+ }
+}
+
+impl AttrsWithOwner {
+ pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Self {
+ // FIXME: this should use `Trace` to avoid duplication in `source_map` below
+ let raw_attrs = match def {
+ AttrDefId::ModuleId(module) => {
+ let def_map = module.def_map(db);
+ let mod_data = &def_map[module.local_id];
+
+ match mod_data.origin {
+ ModuleOrigin::File { definition, declaration_tree_id, .. } => {
+ let decl_attrs = declaration_tree_id
+ .item_tree(db)
+ .raw_attrs(AttrOwner::ModItem(declaration_tree_id.value.into()))
+ .clone();
+ let tree = db.file_item_tree(definition.into());
+ let def_attrs = tree.raw_attrs(AttrOwner::TopLevel).clone();
+ decl_attrs.merge(def_attrs)
+ }
+ ModuleOrigin::CrateRoot { definition } => {
+ let tree = db.file_item_tree(definition.into());
+ tree.raw_attrs(AttrOwner::TopLevel).clone()
+ }
+ ModuleOrigin::Inline { definition_tree_id, .. } => definition_tree_id
+ .item_tree(db)
+ .raw_attrs(AttrOwner::ModItem(definition_tree_id.value.into()))
+ .clone(),
+ ModuleOrigin::BlockExpr { block } => RawAttrs::from_attrs_owner(
+ db,
+ InFile::new(block.file_id, block.to_node(db.upcast()))
+ .as_ref()
+ .map(|it| it as &dyn ast::HasAttrs),
+ ),
+ }
+ }
+ AttrDefId::FieldId(it) => {
+ return Self { attrs: db.fields_attrs(it.parent)[it.local_id].clone(), owner: def };
+ }
+ AttrDefId::EnumVariantId(it) => {
+ return Self {
+ attrs: db.variants_attrs(it.parent)[it.local_id].clone(),
+ owner: def,
+ };
+ }
+ AttrDefId::AdtId(it) => match it {
+ AdtId::StructId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AdtId::EnumId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AdtId::UnionId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ },
+ AttrDefId::TraitId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::MacroId(it) => match it {
+ MacroId::Macro2Id(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ MacroId::MacroRulesId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ MacroId::ProcMacroId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ },
+ AttrDefId::ImplId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::ConstId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::StaticId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::FunctionId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::TypeAliasId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::GenericParamId(it) => match it {
+ GenericParamId::ConstParamId(it) => {
+ let src = it.parent().child_source(db);
+ RawAttrs::from_attrs_owner(
+ db,
+ src.with_value(src.value[it.local_id()].as_ref().either(
+ |it| match it {
+ ast::TypeOrConstParam::Type(it) => it as _,
+ ast::TypeOrConstParam::Const(it) => it as _,
+ },
+ |it| it as _,
+ )),
+ )
+ }
+ GenericParamId::TypeParamId(it) => {
+ let src = it.parent().child_source(db);
+ RawAttrs::from_attrs_owner(
+ db,
+ src.with_value(src.value[it.local_id()].as_ref().either(
+ |it| match it {
+ ast::TypeOrConstParam::Type(it) => it as _,
+ ast::TypeOrConstParam::Const(it) => it as _,
+ },
+ |it| it as _,
+ )),
+ )
+ }
+ GenericParamId::LifetimeParamId(it) => {
+ let src = it.parent.child_source(db);
+ RawAttrs::from_attrs_owner(db, src.with_value(&src.value[it.local_id]))
+ }
+ },
+ AttrDefId::ExternBlockId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ };
+
+ let attrs = raw_attrs.filter(db, def.krate(db));
+ Self { attrs, owner: def }
+ }
+
+ pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap {
+ let owner = match self.owner {
+ AttrDefId::ModuleId(module) => {
+ // Modules can have 2 attribute owners (the `mod x;` item, and the module file itself).
+
+ let def_map = module.def_map(db);
+ let mod_data = &def_map[module.local_id];
+ match mod_data.declaration_source(db) {
+ Some(it) => {
+ let mut map = AttrSourceMap::new(InFile::new(it.file_id, &it.value));
+ if let InFile { file_id, value: ModuleSource::SourceFile(file) } =
+ mod_data.definition_source(db)
+ {
+ map.append_module_inline_attrs(AttrSourceMap::new(InFile::new(
+ file_id, &file,
+ )));
+ }
+ return map;
+ }
+ None => {
+ let InFile { file_id, value } = mod_data.definition_source(db);
+ let attrs_owner = match &value {
+ ModuleSource::SourceFile(file) => file as &dyn ast::HasAttrs,
+ ModuleSource::Module(module) => module as &dyn ast::HasAttrs,
+ ModuleSource::BlockExpr(block) => block as &dyn ast::HasAttrs,
+ };
+ return AttrSourceMap::new(InFile::new(file_id, attrs_owner));
+ }
+ }
+ }
+ AttrDefId::FieldId(id) => {
+ let map = db.fields_attrs_source_map(id.parent);
+ let file_id = id.parent.file_id(db);
+ let root = db.parse_or_expand(file_id).unwrap();
+ let owner = match &map[id.local_id] {
+ Either::Left(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
+ Either::Right(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
+ };
+ InFile::new(file_id, owner)
+ }
+ AttrDefId::AdtId(adt) => match adt {
+ AdtId::StructId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AdtId::UnionId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AdtId::EnumId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ },
+ AttrDefId::FunctionId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::EnumVariantId(id) => {
+ let map = db.variants_attrs_source_map(id.parent);
+ let file_id = id.parent.lookup(db).id.file_id();
+ let root = db.parse_or_expand(file_id).unwrap();
+ InFile::new(file_id, ast::AnyHasAttrs::new(map[id.local_id].to_node(&root)))
+ }
+ AttrDefId::StaticId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::ConstId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::TraitId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::TypeAliasId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::MacroId(id) => match id {
+ MacroId::Macro2Id(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ MacroId::MacroRulesId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ MacroId::ProcMacroId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ },
+ AttrDefId::ImplId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::GenericParamId(id) => match id {
+ GenericParamId::ConstParamId(id) => {
+ id.parent().child_source(db).map(|source| match &source[id.local_id()] {
+ Either::Left(ast::TypeOrConstParam::Type(id)) => {
+ ast::AnyHasAttrs::new(id.clone())
+ }
+ Either::Left(ast::TypeOrConstParam::Const(id)) => {
+ ast::AnyHasAttrs::new(id.clone())
+ }
+ Either::Right(id) => ast::AnyHasAttrs::new(id.clone()),
+ })
+ }
+ GenericParamId::TypeParamId(id) => {
+ id.parent().child_source(db).map(|source| match &source[id.local_id()] {
+ Either::Left(ast::TypeOrConstParam::Type(id)) => {
+ ast::AnyHasAttrs::new(id.clone())
+ }
+ Either::Left(ast::TypeOrConstParam::Const(id)) => {
+ ast::AnyHasAttrs::new(id.clone())
+ }
+ Either::Right(id) => ast::AnyHasAttrs::new(id.clone()),
+ })
+ }
+ GenericParamId::LifetimeParamId(id) => id
+ .parent
+ .child_source(db)
+ .map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())),
+ },
+ AttrDefId::ExternBlockId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ };
+
+ AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
+ }
+
+ pub fn docs_with_rangemap(
+ &self,
+ db: &dyn DefDatabase,
+ ) -> Option<(Documentation, DocsRangeMap)> {
+ let docs =
+ self.by_key("doc").attrs().filter_map(|attr| attr.string_value().map(|s| (s, attr.id)));
+ let indent = doc_indent(self);
+ let mut buf = String::new();
+ let mut mapping = Vec::new();
+ for (doc, idx) in docs {
+ if !doc.is_empty() {
+ let mut base_offset = 0;
+ for raw_line in doc.split('\n') {
+ let line = raw_line.trim_end();
+ let line_len = line.len();
+ let (offset, line) = match line.char_indices().nth(indent) {
+ Some((offset, _)) => (offset, &line[offset..]),
+ None => (0, line),
+ };
+ let buf_offset = buf.len();
+ buf.push_str(line);
+ mapping.push((
+ TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?),
+ idx,
+ TextRange::at(
+ (base_offset + offset).try_into().ok()?,
+ line_len.try_into().ok()?,
+ ),
+ ));
+ buf.push('\n');
+ base_offset += raw_line.len() + 1;
+ }
+ } else {
+ buf.push('\n');
+ }
+ }
+ buf.pop();
+ if buf.is_empty() {
+ None
+ } else {
+ Some((Documentation(buf), DocsRangeMap { mapping, source_map: self.source_map(db) }))
+ }
+ }
+}
+
+fn doc_indent(attrs: &Attrs) -> usize {
+ attrs
+ .by_key("doc")
+ .attrs()
+ .filter_map(|attr| attr.string_value())
+ .flat_map(|s| s.lines())
+ .filter(|line| !line.chars().all(|c| c.is_whitespace()))
+ .map(|line| line.chars().take_while(|c| c.is_whitespace()).count())
+ .min()
+ .unwrap_or(0)
+}
+
+fn inner_attributes(
+ syntax: &SyntaxNode,
+) -> Option<impl Iterator<Item = Either<ast::Attr, ast::Comment>>> {
+ let node = match_ast! {
+ match syntax {
+ ast::SourceFile(_) => syntax.clone(),
+ ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
+ ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
+ ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
+ ast::Module(it) => it.item_list()?.syntax().clone(),
+ ast::BlockExpr(it) => {
+ use syntax::SyntaxKind::{BLOCK_EXPR , EXPR_STMT};
+ // Block expressions accept outer and inner attributes, but only when they are the outer
+ // expression of an expression statement or the final expression of another block expression.
+ let may_carry_attributes = matches!(
+ it.syntax().parent().map(|it| it.kind()),
+ Some(BLOCK_EXPR | EXPR_STMT)
+ );
+ if !may_carry_attributes {
+ return None
+ }
+ syntax.clone()
+ },
+ _ => return None,
+ }
+ };
+
+ let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el {
+ Either::Left(attr) => attr.kind().is_inner(),
+ Either::Right(comment) => comment.is_inner(),
+ });
+ Some(attrs)
+}
+
+#[derive(Debug)]
+pub struct AttrSourceMap {
+ source: Vec<Either<ast::Attr, ast::Comment>>,
+ file_id: HirFileId,
+ /// If this map is for a module, this will be the [`HirFileId`] of the module's definition site,
+ /// while `file_id` will be the one of the module declaration site.
+ /// The usize is the index into `source` from which point on the entries reside in the def site
+ /// file.
+ mod_def_site_file_id: Option<(HirFileId, usize)>,
+}
+
+impl AttrSourceMap {
+ fn new(owner: InFile<&dyn ast::HasAttrs>) -> Self {
+ Self {
+ source: collect_attrs(owner.value).map(|(_, it)| it).collect(),
+ file_id: owner.file_id,
+ mod_def_site_file_id: None,
+ }
+ }
+
+ /// Append a second source map to this one, this is required for modules, whose outline and inline
+ /// attributes can reside in different files
+ fn append_module_inline_attrs(&mut self, other: Self) {
+ assert!(self.mod_def_site_file_id.is_none() && other.mod_def_site_file_id.is_none());
+ let len = self.source.len();
+ self.source.extend(other.source);
+ if other.file_id != self.file_id {
+ self.mod_def_site_file_id = Some((other.file_id, len));
+ }
+ }
+
+ /// Maps the lowered `Attr` back to its original syntax node.
+ ///
+ /// `attr` must come from the `owner` used for AttrSourceMap
+ ///
+ /// Note that the returned syntax node might be a `#[cfg_attr]`, or a doc comment, instead of
+ /// the attribute represented by `Attr`.
+ pub fn source_of(&self, attr: &Attr) -> InFile<&Either<ast::Attr, ast::Comment>> {
+ self.source_of_id(attr.id)
+ }
+
+ fn source_of_id(&self, id: AttrId) -> InFile<&Either<ast::Attr, ast::Comment>> {
+ let ast_idx = id.ast_index as usize;
+ let file_id = match self.mod_def_site_file_id {
+ Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id,
+ _ => self.file_id,
+ };
+
+ self.source
+ .get(ast_idx)
+ .map(|it| InFile::new(file_id, it))
+ .unwrap_or_else(|| panic!("cannot find attr at index {:?}", id))
+ }
+}
+
+/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree.
+#[derive(Debug)]
+pub struct DocsRangeMap {
+ source_map: AttrSourceMap,
+ // (docstring-line-range, attr_index, attr-string-range)
+ // a mapping from the text range of a line of the [`Documentation`] to the attribute index and
+ // the original (untrimmed) syntax doc line
+ mapping: Vec<(TextRange, AttrId, TextRange)>,
+}
+
+impl DocsRangeMap {
+ /// Maps a [`TextRange`] relative to the documentation string back to its AST range
+ pub fn map(&self, range: TextRange) -> Option<InFile<TextRange>> {
+ let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
+ let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
+ if !line_docs_range.contains_range(range) {
+ return None;
+ }
+
+ let relative_range = range - line_docs_range.start();
+
+ let InFile { file_id, value: source } = self.source_map.source_of_id(idx);
+ match source {
+ Either::Left(attr) => {
+ let string = get_doc_string_in_attr(attr)?;
+ let text_range = string.open_quote_text_range()?;
+ let range = TextRange::at(
+ text_range.end() + original_line_src_range.start() + relative_range.start(),
+ string.syntax().text_range().len().min(range.len()),
+ );
+ Some(InFile { file_id, value: range })
+ }
+ Either::Right(comment) => {
+ let text_range = comment.syntax().text_range();
+ let range = TextRange::at(
+ text_range.start()
+ + TextSize::try_from(comment.prefix().len()).ok()?
+ + original_line_src_range.start()
+ + relative_range.start(),
+ text_range.len().min(range.len()),
+ );
+ Some(InFile { file_id, value: range })
+ }
+ }
+ }
+}
+
+fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> {
+ match it.expr() {
+ // #[doc = lit]
+ Some(ast::Expr::Literal(lit)) => match lit.kind() {
+ ast::LiteralKind::String(it) => Some(it),
+ _ => None,
+ },
+ // #[cfg_attr(..., doc = "", ...)]
+ None => {
+ // FIXME: See highlight injection for what to do here
+ None
+ }
+ _ => None,
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct AttrId {
+ pub(crate) ast_index: u32,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Attr {
+ pub(crate) id: AttrId,
+ pub(crate) path: Interned<ModPath>,
+ pub(crate) input: Option<Interned<AttrInput>>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum AttrInput {
+ /// `#[attr = "string"]`
+ Literal(SmolStr),
+ /// `#[attr(subtree)]`
+ TokenTree(tt::Subtree, mbe::TokenMap),
+}
+
+impl fmt::Display for AttrInput {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
+ AttrInput::TokenTree(subtree, _) => subtree.fmt(f),
+ }
+ }
+}
+
+impl Attr {
+ fn from_src(
+ db: &dyn DefDatabase,
+ ast: ast::Meta,
+ hygiene: &Hygiene,
+ id: AttrId,
+ ) -> Option<Attr> {
+ let path = Interned::new(ModPath::from_src(db.upcast(), ast.path()?, hygiene)?);
+ let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
+ let value = match lit.kind() {
+ ast::LiteralKind::String(string) => string.value()?.into(),
+ _ => lit.syntax().first_token()?.text().trim_matches('"').into(),
+ };
+ Some(Interned::new(AttrInput::Literal(value)))
+ } else if let Some(tt) = ast.token_tree() {
+ let (tree, map) = syntax_node_to_token_tree(tt.syntax());
+ Some(Interned::new(AttrInput::TokenTree(tree, map)))
+ } else {
+ None
+ };
+ Some(Attr { id, path, input })
+ }
+
+ fn from_tt(
+ db: &dyn DefDatabase,
+ tt: &tt::Subtree,
+ hygiene: &Hygiene,
+ id: AttrId,
+ ) -> Option<Attr> {
+ let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
+ let ast = ast::Meta::cast(parse.syntax_node())?;
+
+ Self::from_src(db, ast, hygiene, id)
+ }
+
+ pub fn path(&self) -> &ModPath {
+ &self.path
+ }
+}
+
+impl Attr {
+ /// #[path = "string"]
+ pub fn string_value(&self) -> Option<&SmolStr> {
+ match self.input.as_deref()? {
+ AttrInput::Literal(it) => Some(it),
+ _ => None,
+ }
+ }
+
+ /// #[path(ident)]
+ pub fn single_ident_value(&self) -> Option<&tt::Ident> {
+ match self.input.as_deref()? {
+ AttrInput::TokenTree(subtree, _) => match &*subtree.token_trees {
+ [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
+ _ => None,
+ },
+ _ => None,
+ }
+ }
+
+ /// #[path TokenTree]
+ pub fn token_tree_value(&self) -> Option<&Subtree> {
+ match self.input.as_deref()? {
+ AttrInput::TokenTree(subtree, _) => Some(subtree),
+ _ => None,
+ }
+ }
+
+ /// Parses this attribute as a token tree consisting of comma separated paths.
+ pub fn parse_path_comma_token_tree(&self) -> Option<impl Iterator<Item = ModPath> + '_> {
+ let args = self.token_tree_value()?;
+
+ if args.delimiter_kind() != Some(DelimiterKind::Parenthesis) {
+ return None;
+ }
+ let paths = args
+ .token_trees
+ .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
+ .filter_map(|tts| {
+ if tts.is_empty() {
+ return None;
+ }
+ let segments = tts.iter().filter_map(|tt| match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => Some(id.as_name()),
+ _ => None,
+ });
+ Some(ModPath::from_segments(PathKind::Plain, segments))
+ });
+
+ Some(paths)
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+pub struct AttrQuery<'attr> {
+ attrs: &'attr Attrs,
+ key: &'static str,
+}
+
+impl<'attr> AttrQuery<'attr> {
+ pub fn tt_values(self) -> impl Iterator<Item = &'attr Subtree> {
+ self.attrs().filter_map(|attr| attr.token_tree_value())
+ }
+
+ pub fn string_value(self) -> Option<&'attr SmolStr> {
+ self.attrs().find_map(|attr| attr.string_value())
+ }
+
+ pub fn exists(self) -> bool {
+ self.attrs().next().is_some()
+ }
+
+ pub fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone {
+ let key = self.key;
+ self.attrs
+ .iter()
+ .filter(move |attr| attr.path.as_ident().map_or(false, |s| s.to_smol_str() == key))
+ }
+
+ /// Find string value for a specific key inside token tree
+ ///
+ /// ```ignore
+ /// #[doc(html_root_url = "url")]
+ /// ^^^^^^^^^^^^^ key
+ /// ```
+ pub fn find_string_value_in_tt(self, key: &'attr str) -> Option<&SmolStr> {
+ self.tt_values().find_map(|tt| {
+ let name = tt.token_trees.iter()
+ .skip_while(|tt| !matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text, ..} )) if text == key))
+ .nth(2);
+
+ match name {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal{ref text, ..}))) => Some(text),
+ _ => None
+ }
+ })
+ }
+}
+
+fn attrs_from_item_tree<N: ItemTreeNode>(id: ItemTreeId<N>, db: &dyn DefDatabase) -> RawAttrs {
+ let tree = id.item_tree(db);
+ let mod_item = N::id_to_mod_item(id.value);
+ tree.raw_attrs(mod_item.into()).clone()
+}
+
+fn collect_attrs(
+ owner: &dyn ast::HasAttrs,
+) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
+ let inner_attrs = inner_attributes(owner.syntax()).into_iter().flatten();
+ let outer_attrs =
+ ast::AttrDocCommentIter::from_syntax_node(owner.syntax()).filter(|el| match el {
+ Either::Left(attr) => attr.kind().is_outer(),
+ Either::Right(comment) => comment.is_outer(),
+ });
+ outer_attrs
+ .chain(inner_attrs)
+ .enumerate()
+ .map(|(id, attr)| (AttrId { ast_index: id as u32 }, attr))
+}
+
+pub(crate) fn variants_attrs_source_map(
+ db: &dyn DefDatabase,
+ def: EnumId,
+) -> Arc<ArenaMap<LocalEnumVariantId, AstPtr<ast::Variant>>> {
+ let mut res = ArenaMap::default();
+ let child_source = def.child_source(db);
+
+ for (idx, variant) in child_source.value.iter() {
+ res.insert(idx, AstPtr::new(variant));
+ }
+
+ Arc::new(res)
+}
+
+pub(crate) fn fields_attrs_source_map(
+ db: &dyn DefDatabase,
+ def: VariantId,
+) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>> {
+ let mut res = ArenaMap::default();
+ let child_source = def.child_source(db);
+
+ for (idx, variant) in child_source.value.iter() {
+ res.insert(
+ idx,
+ variant
+ .as_ref()
+ .either(|l| Either::Left(AstPtr::new(l)), |r| Either::Right(AstPtr::new(r))),
+ );
+ }
+
+ Arc::new(res)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
new file mode 100644
index 000000000..080a307b1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
@@ -0,0 +1,471 @@
+//! Defines `Body`: a lowered representation of bodies of functions, statics and
+//! consts.
+mod lower;
+#[cfg(test)]
+mod tests;
+pub mod scope;
+
+use std::{ops::Index, sync::Arc};
+
+use base_db::CrateId;
+use cfg::{CfgExpr, CfgOptions};
+use drop_bomb::DropBomb;
+use either::Either;
+use hir_expand::{hygiene::Hygiene, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId};
+use la_arena::{Arena, ArenaMap};
+use limit::Limit;
+use profile::Count;
+use rustc_hash::FxHashMap;
+use syntax::{ast, AstPtr, SyntaxNodePtr};
+
+use crate::{
+ attr::{Attrs, RawAttrs},
+ db::DefDatabase,
+ expr::{dummy_expr_id, Expr, ExprId, Label, LabelId, Pat, PatId},
+ item_scope::BuiltinShadowMode,
+ macro_id_to_def_id,
+ nameres::DefMap,
+ path::{ModPath, Path},
+ src::HasSource,
+ AsMacroCall, BlockId, DefWithBodyId, HasModule, LocalModuleId, Lookup, MacroId, ModuleId,
+ UnresolvedMacro,
+};
+
+pub use lower::LowerCtx;
+
+/// A subset of Expander that only deals with cfg attributes. We only need it to
+/// avoid cyclic queries in crate def map during enum processing.
+#[derive(Debug)]
+pub(crate) struct CfgExpander {
+ cfg_options: CfgOptions,
+ hygiene: Hygiene,
+ krate: CrateId,
+}
+
+#[derive(Debug)]
+pub struct Expander {
+ cfg_expander: CfgExpander,
+ def_map: Arc<DefMap>,
+ current_file_id: HirFileId,
+ module: LocalModuleId,
+ recursion_limit: usize,
+}
+
+impl CfgExpander {
+ pub(crate) fn new(
+ db: &dyn DefDatabase,
+ current_file_id: HirFileId,
+ krate: CrateId,
+ ) -> CfgExpander {
+ let hygiene = Hygiene::new(db.upcast(), current_file_id);
+ let cfg_options = db.crate_graph()[krate].cfg_options.clone();
+ CfgExpander { cfg_options, hygiene, krate }
+ }
+
+ pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
+ RawAttrs::new(db, owner, &self.hygiene).filter(db, self.krate)
+ }
+
+ pub(crate) fn is_cfg_enabled(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> bool {
+ let attrs = self.parse_attrs(db, owner);
+ attrs.is_cfg_enabled(&self.cfg_options)
+ }
+}
+
+impl Expander {
+ pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
+ let cfg_expander = CfgExpander::new(db, current_file_id, module.krate);
+ let def_map = module.def_map(db);
+ Expander {
+ cfg_expander,
+ def_map,
+ current_file_id,
+ module: module.local_id,
+ recursion_limit: 0,
+ }
+ }
+
+ pub fn enter_expand<T: ast::AstNode>(
+ &mut self,
+ db: &dyn DefDatabase,
+ macro_call: ast::MacroCall,
+ ) -> Result<ExpandResult<Option<(Mark, T)>>, UnresolvedMacro> {
+ if self.recursion_limit(db).check(self.recursion_limit + 1).is_err() {
+ cov_mark::hit!(your_stack_belongs_to_me);
+ return Ok(ExpandResult::only_err(ExpandError::Other(
+ "reached recursion limit during macro expansion".into(),
+ )));
+ }
+
+ let macro_call = InFile::new(self.current_file_id, &macro_call);
+
+ let resolver =
+ |path| self.resolve_path_as_macro(db, &path).map(|it| macro_id_to_def_id(db, it));
+
+ let mut err = None;
+ let call_id =
+ macro_call.as_call_id_with_errors(db, self.def_map.krate(), resolver, &mut |e| {
+ err.get_or_insert(e);
+ })?;
+ let call_id = match call_id {
+ Ok(it) => it,
+ Err(_) => {
+ return Ok(ExpandResult { value: None, err });
+ }
+ };
+
+ Ok(self.enter_expand_inner(db, call_id, err))
+ }
+
+ pub fn enter_expand_id<T: ast::AstNode>(
+ &mut self,
+ db: &dyn DefDatabase,
+ call_id: MacroCallId,
+ ) -> ExpandResult<Option<(Mark, T)>> {
+ self.enter_expand_inner(db, call_id, None)
+ }
+
+ fn enter_expand_inner<T: ast::AstNode>(
+ &mut self,
+ db: &dyn DefDatabase,
+ call_id: MacroCallId,
+ mut err: Option<ExpandError>,
+ ) -> ExpandResult<Option<(Mark, T)>> {
+ if err.is_none() {
+ err = db.macro_expand_error(call_id);
+ }
+
+ let file_id = call_id.as_file();
+
+ let raw_node = match db.parse_or_expand(file_id) {
+ Some(it) => it,
+ None => {
+ // Only `None` if the macro expansion produced no usable AST.
+ if err.is_none() {
+ tracing::warn!("no error despite `parse_or_expand` failing");
+ }
+
+ return ExpandResult::only_err(err.unwrap_or_else(|| {
+ ExpandError::Other("failed to parse macro invocation".into())
+ }));
+ }
+ };
+
+ let node = match T::cast(raw_node) {
+ Some(it) => it,
+ None => {
+ // This can happen without being an error, so only forward previous errors.
+ return ExpandResult { value: None, err };
+ }
+ };
+
+ tracing::debug!("macro expansion {:#?}", node.syntax());
+
+ self.recursion_limit += 1;
+ let mark =
+ Mark { file_id: self.current_file_id, bomb: DropBomb::new("expansion mark dropped") };
+ self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id);
+ self.current_file_id = file_id;
+
+ ExpandResult { value: Some((mark, node)), err }
+ }
+
+ pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
+ self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id);
+ self.current_file_id = mark.file_id;
+ self.recursion_limit -= 1;
+ mark.bomb.defuse();
+ }
+
+ pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
+ InFile { file_id: self.current_file_id, value }
+ }
+
+ pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
+ self.cfg_expander.parse_attrs(db, owner)
+ }
+
+ pub(crate) fn cfg_options(&self) -> &CfgOptions {
+ &self.cfg_expander.cfg_options
+ }
+
+ pub fn current_file_id(&self) -> HirFileId {
+ self.current_file_id
+ }
+
+ fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
+ let ctx = LowerCtx::with_hygiene(db, &self.cfg_expander.hygiene);
+ Path::from_src(path, &ctx)
+ }
+
+ fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<MacroId> {
+ self.def_map.resolve_path(db, self.module, path, BuiltinShadowMode::Other).0.take_macros()
+ }
+
+ fn recursion_limit(&self, db: &dyn DefDatabase) -> Limit {
+ let limit = db.crate_limits(self.cfg_expander.krate).recursion_limit as _;
+
+ #[cfg(not(test))]
+ return Limit::new(limit);
+
+ // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
+ #[cfg(test)]
+ return Limit::new(std::cmp::min(32, limit));
+ }
+}
+
+#[derive(Debug)]
+pub struct Mark {
+ file_id: HirFileId,
+ bomb: DropBomb,
+}
+
+/// The body of an item (function, const etc.).
+#[derive(Debug, Eq, PartialEq)]
+pub struct Body {
+ pub exprs: Arena<Expr>,
+ pub pats: Arena<Pat>,
+ pub or_pats: FxHashMap<PatId, Arc<[PatId]>>,
+ pub labels: Arena<Label>,
+ /// The patterns for the function's parameters. While the parameter types are
+ /// part of the function signature, the patterns are not (they don't change
+ /// the external type of the function).
+ ///
+ /// If this `Body` is for the body of a constant, this will just be
+ /// empty.
+ pub params: Vec<PatId>,
+ /// The `ExprId` of the actual body expression.
+ pub body_expr: ExprId,
+ /// Block expressions in this body that may contain inner items.
+ block_scopes: Vec<BlockId>,
+ _c: Count<Self>,
+}
+
+pub type ExprPtr = AstPtr<ast::Expr>;
+pub type ExprSource = InFile<ExprPtr>;
+
+pub type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>;
+pub type PatSource = InFile<PatPtr>;
+
+pub type LabelPtr = AstPtr<ast::Label>;
+pub type LabelSource = InFile<LabelPtr>;
+/// An item body together with the mapping from syntax nodes to HIR expression
+/// IDs. This is needed to go from e.g. a position in a file to the HIR
+/// expression containing it; but for type inference etc., we want to operate on
+/// a structure that is agnostic to the actual positions of expressions in the
+/// file, so that we don't recompute types whenever some whitespace is typed.
+///
+/// One complication here is that, due to macro expansion, a single `Body` might
+/// be spread across several files. So, for each ExprId and PatId, we record
+/// both the HirFileId and the position inside the file. However, we only store
+/// AST -> ExprId mapping for non-macro files, as it is not clear how to handle
+/// this properly for macros.
+#[derive(Default, Debug, Eq, PartialEq)]
+pub struct BodySourceMap {
+ expr_map: FxHashMap<ExprSource, ExprId>,
+ expr_map_back: ArenaMap<ExprId, Result<ExprSource, SyntheticSyntax>>,
+
+ pat_map: FxHashMap<PatSource, PatId>,
+ pat_map_back: ArenaMap<PatId, Result<PatSource, SyntheticSyntax>>,
+
+ label_map: FxHashMap<LabelSource, LabelId>,
+ label_map_back: ArenaMap<LabelId, LabelSource>,
+
+ /// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
+ /// Instead, we use id of expression (`92`) to identify the field.
+ field_map: FxHashMap<InFile<AstPtr<ast::RecordExprField>>, ExprId>,
+ field_map_back: FxHashMap<ExprId, InFile<AstPtr<ast::RecordExprField>>>,
+
+ expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
+
+ /// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in
+ /// the source map (since they're just as volatile).
+ diagnostics: Vec<BodyDiagnostic>,
+}
+
+#[derive(Default, Debug, Eq, PartialEq, Clone, Copy)]
+pub struct SyntheticSyntax;
+
+#[derive(Debug, Eq, PartialEq)]
+pub enum BodyDiagnostic {
+ InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions },
+ MacroError { node: InFile<AstPtr<ast::MacroCall>>, message: String },
+ UnresolvedProcMacro { node: InFile<AstPtr<ast::MacroCall>>, krate: CrateId },
+ UnresolvedMacroCall { node: InFile<AstPtr<ast::MacroCall>>, path: ModPath },
+}
+
+impl Body {
+ pub(crate) fn body_with_source_map_query(
+ db: &dyn DefDatabase,
+ def: DefWithBodyId,
+ ) -> (Arc<Body>, Arc<BodySourceMap>) {
+ let _p = profile::span("body_with_source_map_query");
+ let mut params = None;
+
+ let (file_id, module, body) = match def {
+ DefWithBodyId::FunctionId(f) => {
+ let f = f.lookup(db);
+ let src = f.source(db);
+ params = src.value.param_list();
+ (src.file_id, f.module(db), src.value.body().map(ast::Expr::from))
+ }
+ DefWithBodyId::ConstId(c) => {
+ let c = c.lookup(db);
+ let src = c.source(db);
+ (src.file_id, c.module(db), src.value.body())
+ }
+ DefWithBodyId::StaticId(s) => {
+ let s = s.lookup(db);
+ let src = s.source(db);
+ (src.file_id, s.module(db), src.value.body())
+ }
+ };
+ let expander = Expander::new(db, file_id, module);
+ let (mut body, source_map) = Body::new(db, expander, params, body);
+ body.shrink_to_fit();
+ (Arc::new(body), Arc::new(source_map))
+ }
+
+ pub(crate) fn body_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<Body> {
+ db.body_with_source_map(def).0
+ }
+
+ /// Returns an iterator over all block expressions in this body that define inner items.
+ pub fn blocks<'a>(
+ &'a self,
+ db: &'a dyn DefDatabase,
+ ) -> impl Iterator<Item = (BlockId, Arc<DefMap>)> + '_ {
+ self.block_scopes
+ .iter()
+ .map(move |&block| (block, db.block_def_map(block).expect("block ID without DefMap")))
+ }
+
+ pub fn pattern_representative(&self, pat: PatId) -> PatId {
+ self.or_pats.get(&pat).and_then(|pats| pats.first().copied()).unwrap_or(pat)
+ }
+
+ /// Retrieves all ident patterns this pattern shares the ident with.
+ pub fn ident_patterns_for<'slf>(&'slf self, pat: &'slf PatId) -> &'slf [PatId] {
+ match self.or_pats.get(pat) {
+ Some(pats) => &**pats,
+ None => std::slice::from_ref(pat),
+ }
+ }
+
+ fn new(
+ db: &dyn DefDatabase,
+ expander: Expander,
+ params: Option<ast::ParamList>,
+ body: Option<ast::Expr>,
+ ) -> (Body, BodySourceMap) {
+ lower::lower(db, expander, params, body)
+ }
+
+ fn shrink_to_fit(&mut self) {
+ let Self { _c: _, body_expr: _, block_scopes, or_pats, exprs, labels, params, pats } = self;
+ block_scopes.shrink_to_fit();
+ or_pats.shrink_to_fit();
+ exprs.shrink_to_fit();
+ labels.shrink_to_fit();
+ params.shrink_to_fit();
+ pats.shrink_to_fit();
+ }
+}
+
+impl Default for Body {
+ fn default() -> Self {
+ Self {
+ body_expr: dummy_expr_id(),
+ exprs: Default::default(),
+ pats: Default::default(),
+ or_pats: Default::default(),
+ labels: Default::default(),
+ params: Default::default(),
+ block_scopes: Default::default(),
+ _c: Default::default(),
+ }
+ }
+}
+
+impl Index<ExprId> for Body {
+ type Output = Expr;
+
+ fn index(&self, expr: ExprId) -> &Expr {
+ &self.exprs[expr]
+ }
+}
+
+impl Index<PatId> for Body {
+ type Output = Pat;
+
+ fn index(&self, pat: PatId) -> &Pat {
+ &self.pats[pat]
+ }
+}
+
+impl Index<LabelId> for Body {
+ type Output = Label;
+
+ fn index(&self, label: LabelId) -> &Label {
+ &self.labels[label]
+ }
+}
+
+// FIXME: Change `node_` prefix to something more reasonable.
+// Perhaps `expr_syntax` and `expr_id`?
+impl BodySourceMap {
+ pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprSource, SyntheticSyntax> {
+ self.expr_map_back[expr].clone()
+ }
+
+ pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option<ExprId> {
+ let src = node.map(AstPtr::new);
+ self.expr_map.get(&src).cloned()
+ }
+
+ pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<HirFileId> {
+ let src = node.map(AstPtr::new);
+ self.expansions.get(&src).cloned()
+ }
+
+ pub fn pat_syntax(&self, pat: PatId) -> Result<PatSource, SyntheticSyntax> {
+ self.pat_map_back[pat].clone()
+ }
+
+ pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> {
+ let src = node.map(|it| Either::Left(AstPtr::new(it)));
+ self.pat_map.get(&src).cloned()
+ }
+
+ pub fn node_self_param(&self, node: InFile<&ast::SelfParam>) -> Option<PatId> {
+ let src = node.map(|it| Either::Right(AstPtr::new(it)));
+ self.pat_map.get(&src).cloned()
+ }
+
+ pub fn label_syntax(&self, label: LabelId) -> LabelSource {
+ self.label_map_back[label].clone()
+ }
+
+ pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> {
+ let src = node.map(AstPtr::new);
+ self.label_map.get(&src).cloned()
+ }
+
+ pub fn field_syntax(&self, expr: ExprId) -> InFile<AstPtr<ast::RecordExprField>> {
+ self.field_map_back[&expr].clone()
+ }
+ pub fn node_field(&self, node: InFile<&ast::RecordExprField>) -> Option<ExprId> {
+ let src = node.map(AstPtr::new);
+ self.field_map.get(&src).cloned()
+ }
+
+ pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option<ExprId> {
+ let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::MacroExpr>).map(AstPtr::upcast);
+ self.expr_map.get(&src).copied()
+ }
+
+ /// Get a reference to the body source map's diagnostics.
+ pub fn diagnostics(&self) -> &[BodyDiagnostic] {
+ &self.diagnostics
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
new file mode 100644
index 000000000..66f9c24e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
@@ -0,0 +1,1023 @@
+//! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr`
+//! representation.
+
+use std::{mem, sync::Arc};
+
+use either::Either;
+use hir_expand::{
+ ast_id_map::AstIdMap,
+ hygiene::Hygiene,
+ name::{name, AsName, Name},
+ AstId, ExpandError, HirFileId, InFile,
+};
+use la_arena::Arena;
+use once_cell::unsync::OnceCell;
+use profile::Count;
+use rustc_hash::FxHashMap;
+use syntax::{
+ ast::{
+ self, ArrayExprKind, AstChildren, HasArgList, HasLoopBody, HasName, LiteralKind,
+ SlicePatComponents,
+ },
+ AstNode, AstPtr, SyntaxNodePtr,
+};
+
+use crate::{
+ adt::StructKind,
+ body::{Body, BodySourceMap, Expander, LabelSource, PatPtr, SyntheticSyntax},
+ body::{BodyDiagnostic, ExprSource, PatSource},
+ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
+ db::DefDatabase,
+ expr::{
+ dummy_expr_id, Array, BindingAnnotation, Expr, ExprId, FloatTypeWrapper, Label, LabelId,
+ Literal, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
+ },
+ intern::Interned,
+ item_scope::BuiltinShadowMode,
+ path::{GenericArgs, Path},
+ type_ref::{Mutability, Rawness, TypeRef},
+ AdtId, BlockLoc, ModuleDefId, UnresolvedMacro,
+};
+
+pub struct LowerCtx<'a> {
+ pub db: &'a dyn DefDatabase,
+ hygiene: Hygiene,
+ ast_id_map: Option<(HirFileId, OnceCell<Arc<AstIdMap>>)>,
+}
+
+impl<'a> LowerCtx<'a> {
+ pub fn new(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self {
+ LowerCtx {
+ db,
+ hygiene: Hygiene::new(db.upcast(), file_id),
+ ast_id_map: Some((file_id, OnceCell::new())),
+ }
+ }
+
+ pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: &Hygiene) -> Self {
+ LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: None }
+ }
+
+ pub(crate) fn hygiene(&self) -> &Hygiene {
+ &self.hygiene
+ }
+
+ pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
+ Path::from_src(ast, self)
+ }
+
+ pub(crate) fn ast_id<N: AstNode>(&self, db: &dyn DefDatabase, item: &N) -> Option<AstId<N>> {
+ let &(file_id, ref ast_id_map) = self.ast_id_map.as_ref()?;
+ let ast_id_map = ast_id_map.get_or_init(|| db.ast_id_map(file_id));
+ Some(InFile::new(file_id, ast_id_map.ast_id(item)))
+ }
+}
+
+pub(super) fn lower(
+ db: &dyn DefDatabase,
+ expander: Expander,
+ params: Option<ast::ParamList>,
+ body: Option<ast::Expr>,
+) -> (Body, BodySourceMap) {
+ ExprCollector {
+ db,
+ source_map: BodySourceMap::default(),
+ ast_id_map: db.ast_id_map(expander.current_file_id),
+ body: Body {
+ exprs: Arena::default(),
+ pats: Arena::default(),
+ labels: Arena::default(),
+ params: Vec::new(),
+ body_expr: dummy_expr_id(),
+ block_scopes: Vec::new(),
+ _c: Count::new(),
+ or_pats: Default::default(),
+ },
+ expander,
+ name_to_pat_grouping: Default::default(),
+ is_lowering_inside_or_pat: false,
+ is_lowering_assignee_expr: false,
+ }
+ .collect(params, body)
+}
+
+struct ExprCollector<'a> {
+ db: &'a dyn DefDatabase,
+ expander: Expander,
+ ast_id_map: Arc<AstIdMap>,
+ body: Body,
+ source_map: BodySourceMap,
+ // a poor-mans union-find?
+ name_to_pat_grouping: FxHashMap<Name, Vec<PatId>>,
+ is_lowering_inside_or_pat: bool,
+ is_lowering_assignee_expr: bool,
+}
+
+impl ExprCollector<'_> {
+ fn collect(
+ mut self,
+ param_list: Option<ast::ParamList>,
+ body: Option<ast::Expr>,
+ ) -> (Body, BodySourceMap) {
+ if let Some(param_list) = param_list {
+ if let Some(self_param) = param_list.self_param() {
+ let ptr = AstPtr::new(&self_param);
+ let param_pat = self.alloc_pat(
+ Pat::Bind {
+ name: name![self],
+ mode: BindingAnnotation::new(
+ self_param.mut_token().is_some() && self_param.amp_token().is_none(),
+ false,
+ ),
+ subpat: None,
+ },
+ Either::Right(ptr),
+ );
+ self.body.params.push(param_pat);
+ }
+
+ for pat in param_list.params().filter_map(|param| param.pat()) {
+ let param_pat = self.collect_pat(pat);
+ self.body.params.push(param_pat);
+ }
+ };
+
+ self.body.body_expr = self.collect_expr_opt(body);
+ (self.body, self.source_map)
+ }
+
+ fn ctx(&self) -> LowerCtx<'_> {
+ LowerCtx::new(self.db, self.expander.current_file_id)
+ }
+
+ fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr<ast::Expr>) -> ExprId {
+ let src = self.expander.to_source(ptr);
+ let id = self.make_expr(expr, Ok(src.clone()));
+ self.source_map.expr_map.insert(src, id);
+ id
+ }
+ // desugared exprs don't have ptr, that's wrong and should be fixed
+ // somehow.
+ fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId {
+ self.make_expr(expr, Err(SyntheticSyntax))
+ }
+ fn missing_expr(&mut self) -> ExprId {
+ self.alloc_expr_desugared(Expr::Missing)
+ }
+ fn make_expr(&mut self, expr: Expr, src: Result<ExprSource, SyntheticSyntax>) -> ExprId {
+ let id = self.body.exprs.alloc(expr);
+ self.source_map.expr_map_back.insert(id, src);
+ id
+ }
+
+ fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
+ let src = self.expander.to_source(ptr);
+ let id = self.make_pat(pat, Ok(src.clone()));
+ self.source_map.pat_map.insert(src, id);
+ id
+ }
+ fn missing_pat(&mut self) -> PatId {
+ self.make_pat(Pat::Missing, Err(SyntheticSyntax))
+ }
+ fn make_pat(&mut self, pat: Pat, src: Result<PatSource, SyntheticSyntax>) -> PatId {
+ let id = self.body.pats.alloc(pat);
+ self.source_map.pat_map_back.insert(id, src);
+ id
+ }
+
+ fn alloc_label(&mut self, label: Label, ptr: AstPtr<ast::Label>) -> LabelId {
+ let src = self.expander.to_source(ptr);
+ let id = self.make_label(label, src.clone());
+ self.source_map.label_map.insert(src, id);
+ id
+ }
+ fn make_label(&mut self, label: Label, src: LabelSource) -> LabelId {
+ let id = self.body.labels.alloc(label);
+ self.source_map.label_map_back.insert(id, src);
+ id
+ }
+
+ fn collect_expr(&mut self, expr: ast::Expr) -> ExprId {
+ self.maybe_collect_expr(expr).unwrap_or_else(|| self.missing_expr())
+ }
+
+ /// Returns `None` if and only if the expression is `#[cfg]`d out.
+ fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> {
+ let syntax_ptr = AstPtr::new(&expr);
+ self.check_cfg(&expr)?;
+
+ Some(match expr {
+ ast::Expr::IfExpr(e) => {
+ let then_branch = self.collect_block_opt(e.then_branch());
+
+ let else_branch = e.else_branch().map(|b| match b {
+ ast::ElseBranch::Block(it) => self.collect_block(it),
+ ast::ElseBranch::IfExpr(elif) => {
+ let expr: ast::Expr = ast::Expr::cast(elif.syntax().clone()).unwrap();
+ self.collect_expr(expr)
+ }
+ });
+
+ let condition = self.collect_expr_opt(e.condition());
+
+ self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr)
+ }
+ ast::Expr::LetExpr(e) => {
+ let pat = self.collect_pat_opt(e.pat());
+ let expr = self.collect_expr_opt(e.expr());
+ self.alloc_expr(Expr::Let { pat, expr }, syntax_ptr)
+ }
+ ast::Expr::BlockExpr(e) => match e.modifier() {
+ Some(ast::BlockModifier::Try(_)) => {
+ let body = self.collect_block(e);
+ self.alloc_expr(Expr::TryBlock { body }, syntax_ptr)
+ }
+ Some(ast::BlockModifier::Unsafe(_)) => {
+ let body = self.collect_block(e);
+ self.alloc_expr(Expr::Unsafe { body }, syntax_ptr)
+ }
+ // FIXME: we need to record these effects somewhere...
+ Some(ast::BlockModifier::Label(label)) => {
+ let label = self.collect_label(label);
+ let res = self.collect_block(e);
+ match &mut self.body.exprs[res] {
+ Expr::Block { label: block_label, .. } => {
+ *block_label = Some(label);
+ }
+ _ => unreachable!(),
+ }
+ res
+ }
+ Some(ast::BlockModifier::Async(_)) => {
+ let body = self.collect_block(e);
+ self.alloc_expr(Expr::Async { body }, syntax_ptr)
+ }
+ Some(ast::BlockModifier::Const(_)) => {
+ let body = self.collect_block(e);
+ self.alloc_expr(Expr::Const { body }, syntax_ptr)
+ }
+ None => self.collect_block(e),
+ },
+ ast::Expr::LoopExpr(e) => {
+ let label = e.label().map(|label| self.collect_label(label));
+ let body = self.collect_block_opt(e.loop_body());
+ self.alloc_expr(Expr::Loop { body, label }, syntax_ptr)
+ }
+ ast::Expr::WhileExpr(e) => {
+ let label = e.label().map(|label| self.collect_label(label));
+ let body = self.collect_block_opt(e.loop_body());
+
+ let condition = self.collect_expr_opt(e.condition());
+
+ self.alloc_expr(Expr::While { condition, body, label }, syntax_ptr)
+ }
+ ast::Expr::ForExpr(e) => {
+ let label = e.label().map(|label| self.collect_label(label));
+ let iterable = self.collect_expr_opt(e.iterable());
+ let pat = self.collect_pat_opt(e.pat());
+ let body = self.collect_block_opt(e.loop_body());
+ self.alloc_expr(Expr::For { iterable, pat, body, label }, syntax_ptr)
+ }
+ ast::Expr::CallExpr(e) => {
+ let callee = self.collect_expr_opt(e.expr());
+ let args = if let Some(arg_list) = e.arg_list() {
+ arg_list.args().filter_map(|e| self.maybe_collect_expr(e)).collect()
+ } else {
+ Box::default()
+ };
+ self.alloc_expr(
+ Expr::Call { callee, args, is_assignee_expr: self.is_lowering_assignee_expr },
+ syntax_ptr,
+ )
+ }
+ ast::Expr::MethodCallExpr(e) => {
+ let receiver = self.collect_expr_opt(e.receiver());
+ let args = if let Some(arg_list) = e.arg_list() {
+ arg_list.args().filter_map(|e| self.maybe_collect_expr(e)).collect()
+ } else {
+ Box::default()
+ };
+ let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
+ let generic_args = e
+ .generic_arg_list()
+ .and_then(|it| GenericArgs::from_ast(&self.ctx(), it))
+ .map(Box::new);
+ self.alloc_expr(
+ Expr::MethodCall { receiver, method_name, args, generic_args },
+ syntax_ptr,
+ )
+ }
+ ast::Expr::MatchExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ let arms = if let Some(match_arm_list) = e.match_arm_list() {
+ match_arm_list
+ .arms()
+ .filter_map(|arm| {
+ self.check_cfg(&arm).map(|()| MatchArm {
+ pat: self.collect_pat_opt(arm.pat()),
+ expr: self.collect_expr_opt(arm.expr()),
+ guard: arm
+ .guard()
+ .map(|guard| self.collect_expr_opt(guard.condition())),
+ })
+ })
+ .collect()
+ } else {
+ Box::default()
+ };
+ self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr)
+ }
+ ast::Expr::PathExpr(e) => {
+ let path = e
+ .path()
+ .and_then(|path| self.expander.parse_path(self.db, path))
+ .map(Expr::Path)
+ .unwrap_or(Expr::Missing);
+ self.alloc_expr(path, syntax_ptr)
+ }
+ ast::Expr::ContinueExpr(e) => self.alloc_expr(
+ Expr::Continue { label: e.lifetime().map(|l| Name::new_lifetime(&l)) },
+ syntax_ptr,
+ ),
+ ast::Expr::BreakExpr(e) => {
+ let expr = e.expr().map(|e| self.collect_expr(e));
+ self.alloc_expr(
+ Expr::Break { expr, label: e.lifetime().map(|l| Name::new_lifetime(&l)) },
+ syntax_ptr,
+ )
+ }
+ ast::Expr::ParenExpr(e) => {
+ let inner = self.collect_expr_opt(e.expr());
+ // make the paren expr point to the inner expression as well
+ let src = self.expander.to_source(syntax_ptr);
+ self.source_map.expr_map.insert(src, inner);
+ inner
+ }
+ ast::Expr::ReturnExpr(e) => {
+ let expr = e.expr().map(|e| self.collect_expr(e));
+ self.alloc_expr(Expr::Return { expr }, syntax_ptr)
+ }
+ ast::Expr::YieldExpr(e) => {
+ let expr = e.expr().map(|e| self.collect_expr(e));
+ self.alloc_expr(Expr::Yield { expr }, syntax_ptr)
+ }
+ ast::Expr::RecordExpr(e) => {
+ let path =
+ e.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
+ let is_assignee_expr = self.is_lowering_assignee_expr;
+ let record_lit = if let Some(nfl) = e.record_expr_field_list() {
+ let fields = nfl
+ .fields()
+ .filter_map(|field| {
+ self.check_cfg(&field)?;
+
+ let name = field.field_name()?.as_name();
+
+ let expr = match field.expr() {
+ Some(e) => self.collect_expr(e),
+ None => self.missing_expr(),
+ };
+ let src = self.expander.to_source(AstPtr::new(&field));
+ self.source_map.field_map.insert(src.clone(), expr);
+ self.source_map.field_map_back.insert(expr, src);
+ Some(RecordLitField { name, expr })
+ })
+ .collect();
+ let spread = nfl.spread().map(|s| self.collect_expr(s));
+ let ellipsis = nfl.dotdot_token().is_some();
+ Expr::RecordLit { path, fields, spread, ellipsis, is_assignee_expr }
+ } else {
+ Expr::RecordLit {
+ path,
+ fields: Box::default(),
+ spread: None,
+ ellipsis: false,
+ is_assignee_expr,
+ }
+ };
+
+ self.alloc_expr(record_lit, syntax_ptr)
+ }
+ ast::Expr::FieldExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ let name = match e.field_access() {
+ Some(kind) => kind.as_name(),
+ _ => Name::missing(),
+ };
+ self.alloc_expr(Expr::Field { expr, name }, syntax_ptr)
+ }
+ ast::Expr::AwaitExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ self.alloc_expr(Expr::Await { expr }, syntax_ptr)
+ }
+ ast::Expr::TryExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ self.alloc_expr(Expr::Try { expr }, syntax_ptr)
+ }
+ ast::Expr::CastExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ let type_ref = Interned::new(TypeRef::from_ast_opt(&self.ctx(), e.ty()));
+ self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr)
+ }
+ ast::Expr::RefExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ let raw_tok = e.raw_token().is_some();
+ let mutability = if raw_tok {
+ if e.mut_token().is_some() {
+ Mutability::Mut
+ } else if e.const_token().is_some() {
+ Mutability::Shared
+ } else {
+ unreachable!("parser only remaps to raw_token() if matching mutability token follows")
+ }
+ } else {
+ Mutability::from_mutable(e.mut_token().is_some())
+ };
+ let rawness = Rawness::from_raw(raw_tok);
+ self.alloc_expr(Expr::Ref { expr, rawness, mutability }, syntax_ptr)
+ }
+ ast::Expr::PrefixExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ match e.op_kind() {
+ Some(op) => self.alloc_expr(Expr::UnaryOp { expr, op }, syntax_ptr),
+ None => self.alloc_expr(Expr::Missing, syntax_ptr),
+ }
+ }
+ ast::Expr::ClosureExpr(e) => {
+ let mut args = Vec::new();
+ let mut arg_types = Vec::new();
+ if let Some(pl) = e.param_list() {
+ for param in pl.params() {
+ let pat = self.collect_pat_opt(param.pat());
+ let type_ref =
+ param.ty().map(|it| Interned::new(TypeRef::from_ast(&self.ctx(), it)));
+ args.push(pat);
+ arg_types.push(type_ref);
+ }
+ }
+ let ret_type = e
+ .ret_type()
+ .and_then(|r| r.ty())
+ .map(|it| Interned::new(TypeRef::from_ast(&self.ctx(), it)));
+ let body = self.collect_expr_opt(e.body());
+ self.alloc_expr(
+ Expr::Closure {
+ args: args.into(),
+ arg_types: arg_types.into(),
+ ret_type,
+ body,
+ },
+ syntax_ptr,
+ )
+ }
+ ast::Expr::BinExpr(e) => {
+ let op = e.op_kind();
+ if let Some(ast::BinaryOp::Assignment { op: None }) = op {
+ self.is_lowering_assignee_expr = true;
+ }
+ let lhs = self.collect_expr_opt(e.lhs());
+ self.is_lowering_assignee_expr = false;
+ let rhs = self.collect_expr_opt(e.rhs());
+ self.alloc_expr(Expr::BinaryOp { lhs, rhs, op }, syntax_ptr)
+ }
+ ast::Expr::TupleExpr(e) => {
+ let exprs = e.fields().map(|expr| self.collect_expr(expr)).collect();
+ self.alloc_expr(
+ Expr::Tuple { exprs, is_assignee_expr: self.is_lowering_assignee_expr },
+ syntax_ptr,
+ )
+ }
+ ast::Expr::BoxExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ self.alloc_expr(Expr::Box { expr }, syntax_ptr)
+ }
+
+ ast::Expr::ArrayExpr(e) => {
+ let kind = e.kind();
+
+ match kind {
+ ArrayExprKind::ElementList(e) => {
+ let elements = e.map(|expr| self.collect_expr(expr)).collect();
+ self.alloc_expr(
+ Expr::Array(Array::ElementList {
+ elements,
+ is_assignee_expr: self.is_lowering_assignee_expr,
+ }),
+ syntax_ptr,
+ )
+ }
+ ArrayExprKind::Repeat { initializer, repeat } => {
+ let initializer = self.collect_expr_opt(initializer);
+ let repeat = self.collect_expr_opt(repeat);
+ self.alloc_expr(
+ Expr::Array(Array::Repeat { initializer, repeat }),
+ syntax_ptr,
+ )
+ }
+ }
+ }
+
+ ast::Expr::Literal(e) => self.alloc_expr(Expr::Literal(e.kind().into()), syntax_ptr),
+ ast::Expr::IndexExpr(e) => {
+ let base = self.collect_expr_opt(e.base());
+ let index = self.collect_expr_opt(e.index());
+ self.alloc_expr(Expr::Index { base, index }, syntax_ptr)
+ }
+ ast::Expr::RangeExpr(e) => {
+ let lhs = e.start().map(|lhs| self.collect_expr(lhs));
+ let rhs = e.end().map(|rhs| self.collect_expr(rhs));
+ match e.op_kind() {
+ Some(range_type) => {
+ self.alloc_expr(Expr::Range { lhs, rhs, range_type }, syntax_ptr)
+ }
+ None => self.alloc_expr(Expr::Missing, syntax_ptr),
+ }
+ }
+ ast::Expr::MacroExpr(e) => {
+ let e = e.macro_call()?;
+ let macro_ptr = AstPtr::new(&e);
+ let id = self.collect_macro_call(e, macro_ptr, true, |this, expansion| {
+ expansion.map(|it| this.collect_expr(it))
+ });
+ match id {
+ Some(id) => {
+ // Make the macro-call point to its expanded expression so we can query
+ // semantics on syntax pointers to the macro
+ let src = self.expander.to_source(syntax_ptr);
+ self.source_map.expr_map.insert(src, id);
+ id
+ }
+ None => self.alloc_expr(Expr::Missing, syntax_ptr),
+ }
+ }
+ ast::Expr::MacroStmts(e) => {
+ let statements = e.statements().filter_map(|s| self.collect_stmt(s)).collect();
+ let tail = e.expr().map(|e| self.collect_expr(e));
+
+ self.alloc_expr(Expr::MacroStmts { tail, statements }, syntax_ptr)
+ }
+ ast::Expr::UnderscoreExpr(_) => self.alloc_expr(Expr::Underscore, syntax_ptr),
+ })
+ }
+
+ fn collect_macro_call<F, T, U>(
+ &mut self,
+ mcall: ast::MacroCall,
+ syntax_ptr: AstPtr<ast::MacroCall>,
+ record_diagnostics: bool,
+ collector: F,
+ ) -> U
+ where
+ F: FnOnce(&mut Self, Option<T>) -> U,
+ T: ast::AstNode,
+ {
+ // File containing the macro call. Expansion errors will be attached here.
+ let outer_file = self.expander.current_file_id;
+
+ let macro_call_ptr = self.expander.to_source(AstPtr::new(&mcall));
+ let res = self.expander.enter_expand(self.db, mcall);
+
+ let res = match res {
+ Ok(res) => res,
+ Err(UnresolvedMacro { path }) => {
+ if record_diagnostics {
+ self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedMacroCall {
+ node: InFile::new(outer_file, syntax_ptr),
+ path,
+ });
+ }
+ return collector(self, None);
+ }
+ };
+
+ if record_diagnostics {
+ match &res.err {
+ Some(ExpandError::UnresolvedProcMacro(krate)) => {
+ self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedProcMacro {
+ node: InFile::new(outer_file, syntax_ptr),
+ krate: *krate,
+ });
+ }
+ Some(err) => {
+ self.source_map.diagnostics.push(BodyDiagnostic::MacroError {
+ node: InFile::new(outer_file, syntax_ptr),
+ message: err.to_string(),
+ });
+ }
+ None => {}
+ }
+ }
+
+ match res.value {
+ Some((mark, expansion)) => {
+ self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id);
+ let prev_ast_id_map = mem::replace(
+ &mut self.ast_id_map,
+ self.db.ast_id_map(self.expander.current_file_id),
+ );
+
+ let id = collector(self, Some(expansion));
+ self.ast_id_map = prev_ast_id_map;
+ self.expander.exit(self.db, mark);
+ id
+ }
+ None => collector(self, None),
+ }
+ }
+
+ fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId {
+ match expr {
+ Some(expr) => self.collect_expr(expr),
+ None => self.missing_expr(),
+ }
+ }
+
+ fn collect_stmt(&mut self, s: ast::Stmt) -> Option<Statement> {
+ match s {
+ ast::Stmt::LetStmt(stmt) => {
+ if self.check_cfg(&stmt).is_none() {
+ return None;
+ }
+ let pat = self.collect_pat_opt(stmt.pat());
+ let type_ref =
+ stmt.ty().map(|it| Interned::new(TypeRef::from_ast(&self.ctx(), it)));
+ let initializer = stmt.initializer().map(|e| self.collect_expr(e));
+ let else_branch = stmt
+ .let_else()
+ .and_then(|let_else| let_else.block_expr())
+ .map(|block| self.collect_block(block));
+ Some(Statement::Let { pat, type_ref, initializer, else_branch })
+ }
+ ast::Stmt::ExprStmt(stmt) => {
+ let expr = stmt.expr();
+ if let Some(expr) = &expr {
+ if self.check_cfg(expr).is_none() {
+ return None;
+ }
+ }
+ let has_semi = stmt.semicolon_token().is_some();
+ // Note that macro could be expanded to multiple statements
+ if let Some(expr @ ast::Expr::MacroExpr(mac)) = &expr {
+ let mac_call = mac.macro_call()?;
+ let syntax_ptr = AstPtr::new(expr);
+ let macro_ptr = AstPtr::new(&mac_call);
+ let stmt = self.collect_macro_call(
+ mac_call,
+ macro_ptr,
+ false,
+ |this, expansion: Option<ast::MacroStmts>| match expansion {
+ Some(expansion) => {
+ let statements = expansion
+ .statements()
+ .filter_map(|stmt| this.collect_stmt(stmt))
+ .collect();
+ let tail = expansion.expr().map(|expr| this.collect_expr(expr));
+
+ let mac_stmts = this.alloc_expr(
+ Expr::MacroStmts { tail, statements },
+ AstPtr::new(&ast::Expr::MacroStmts(expansion)),
+ );
+
+ Some(mac_stmts)
+ }
+ None => None,
+ },
+ );
+
+ let expr = match stmt {
+ Some(expr) => {
+ // Make the macro-call point to its expanded expression so we can query
+ // semantics on syntax pointers to the macro
+ let src = self.expander.to_source(syntax_ptr);
+ self.source_map.expr_map.insert(src, expr);
+ expr
+ }
+ None => self.alloc_expr(Expr::Missing, syntax_ptr),
+ };
+ Some(Statement::Expr { expr, has_semi })
+ } else {
+ let expr = self.collect_expr_opt(expr);
+ Some(Statement::Expr { expr, has_semi })
+ }
+ }
+ ast::Stmt::Item(_item) => None,
+ }
+ }
+
+ fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId {
+ let file_local_id = self.ast_id_map.ast_id(&block);
+ let ast_id = AstId::new(self.expander.current_file_id, file_local_id);
+ let block_loc =
+ BlockLoc { ast_id, module: self.expander.def_map.module_id(self.expander.module) };
+ let block_id = self.db.intern_block(block_loc);
+
+ let (module, def_map) = match self.db.block_def_map(block_id) {
+ Some(def_map) => {
+ self.body.block_scopes.push(block_id);
+ (def_map.root(), def_map)
+ }
+ None => (self.expander.module, self.expander.def_map.clone()),
+ };
+ let prev_def_map = mem::replace(&mut self.expander.def_map, def_map);
+ let prev_local_module = mem::replace(&mut self.expander.module, module);
+
+ let mut statements: Vec<_> =
+ block.statements().filter_map(|s| self.collect_stmt(s)).collect();
+ let tail = block.tail_expr().and_then(|e| self.maybe_collect_expr(e));
+ let tail = tail.or_else(|| {
+ let stmt = statements.pop()?;
+ if let Statement::Expr { expr, has_semi: false } = stmt {
+ return Some(expr);
+ }
+ statements.push(stmt);
+ None
+ });
+
+ let syntax_node_ptr = AstPtr::new(&block.into());
+ let expr_id = self.alloc_expr(
+ Expr::Block {
+ id: block_id,
+ statements: statements.into_boxed_slice(),
+ tail,
+ label: None,
+ },
+ syntax_node_ptr,
+ );
+
+ self.expander.def_map = prev_def_map;
+ self.expander.module = prev_local_module;
+ expr_id
+ }
+
+ fn collect_block_opt(&mut self, expr: Option<ast::BlockExpr>) -> ExprId {
+ match expr {
+ Some(block) => self.collect_block(block),
+ None => self.missing_expr(),
+ }
+ }
+
+ fn collect_label(&mut self, ast_label: ast::Label) -> LabelId {
+ let label = Label {
+ name: ast_label.lifetime().as_ref().map_or_else(Name::missing, Name::new_lifetime),
+ };
+ self.alloc_label(label, AstPtr::new(&ast_label))
+ }
+
+ fn collect_pat(&mut self, pat: ast::Pat) -> PatId {
+ let pat_id = self.collect_pat_(pat);
+ for (_, pats) in self.name_to_pat_grouping.drain() {
+ let pats = Arc::<[_]>::from(pats);
+ self.body.or_pats.extend(pats.iter().map(|&pat| (pat, pats.clone())));
+ }
+ self.is_lowering_inside_or_pat = false;
+ pat_id
+ }
+
+ fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId {
+ match pat {
+ Some(pat) => self.collect_pat(pat),
+ None => self.missing_pat(),
+ }
+ }
+
+ fn collect_pat_(&mut self, pat: ast::Pat) -> PatId {
+ let pattern = match &pat {
+ ast::Pat::IdentPat(bp) => {
+ let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
+
+ let key = self.is_lowering_inside_or_pat.then(|| name.clone());
+ let annotation =
+ BindingAnnotation::new(bp.mut_token().is_some(), bp.ref_token().is_some());
+ let subpat = bp.pat().map(|subpat| self.collect_pat_(subpat));
+ let pattern = if annotation == BindingAnnotation::Unannotated && subpat.is_none() {
+ // This could also be a single-segment path pattern. To
+ // decide that, we need to try resolving the name.
+ let (resolved, _) = self.expander.def_map.resolve_path(
+ self.db,
+ self.expander.module,
+ &name.clone().into(),
+ BuiltinShadowMode::Other,
+ );
+ match resolved.take_values() {
+ Some(ModuleDefId::ConstId(_)) => Pat::Path(name.into()),
+ Some(ModuleDefId::EnumVariantId(_)) => {
+ // this is only really valid for unit variants, but
+ // shadowing other enum variants with a pattern is
+ // an error anyway
+ Pat::Path(name.into())
+ }
+ Some(ModuleDefId::AdtId(AdtId::StructId(s)))
+ if self.db.struct_data(s).variant_data.kind() != StructKind::Record =>
+ {
+ // Funnily enough, record structs *can* be shadowed
+ // by pattern bindings (but unit or tuple structs
+ // can't).
+ Pat::Path(name.into())
+ }
+ // shadowing statics is an error as well, so we just ignore that case here
+ _ => Pat::Bind { name, mode: annotation, subpat },
+ }
+ } else {
+ Pat::Bind { name, mode: annotation, subpat }
+ };
+
+ let ptr = AstPtr::new(&pat);
+ let pat = self.alloc_pat(pattern, Either::Left(ptr));
+ if let Some(key) = key {
+ self.name_to_pat_grouping.entry(key).or_default().push(pat);
+ }
+ return pat;
+ }
+ ast::Pat::TupleStructPat(p) => {
+ let path =
+ p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
+ let (args, ellipsis) = self.collect_tuple_pat(p.fields());
+ Pat::TupleStruct { path, args, ellipsis }
+ }
+ ast::Pat::RefPat(p) => {
+ let pat = self.collect_pat_opt(p.pat());
+ let mutability = Mutability::from_mutable(p.mut_token().is_some());
+ Pat::Ref { pat, mutability }
+ }
+ ast::Pat::PathPat(p) => {
+ let path =
+ p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
+ path.map(Pat::Path).unwrap_or(Pat::Missing)
+ }
+ ast::Pat::OrPat(p) => {
+ self.is_lowering_inside_or_pat = true;
+ let pats = p.pats().map(|p| self.collect_pat_(p)).collect();
+ Pat::Or(pats)
+ }
+ ast::Pat::ParenPat(p) => return self.collect_pat_opt_(p.pat()),
+ ast::Pat::TuplePat(p) => {
+ let (args, ellipsis) = self.collect_tuple_pat(p.fields());
+ Pat::Tuple { args, ellipsis }
+ }
+ ast::Pat::WildcardPat(_) => Pat::Wild,
+ ast::Pat::RecordPat(p) => {
+ let path =
+ p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
+ let args = p
+ .record_pat_field_list()
+ .expect("every struct should have a field list")
+ .fields()
+ .filter_map(|f| {
+ let ast_pat = f.pat()?;
+ let pat = self.collect_pat_(ast_pat);
+ let name = f.field_name()?.as_name();
+ Some(RecordFieldPat { name, pat })
+ })
+ .collect();
+
+ let ellipsis = p
+ .record_pat_field_list()
+ .expect("every struct should have a field list")
+ .rest_pat()
+ .is_some();
+
+ Pat::Record { path, args, ellipsis }
+ }
+ ast::Pat::SlicePat(p) => {
+ let SlicePatComponents { prefix, slice, suffix } = p.components();
+
+ // FIXME properly handle `RestPat`
+ Pat::Slice {
+ prefix: prefix.into_iter().map(|p| self.collect_pat_(p)).collect(),
+ slice: slice.map(|p| self.collect_pat_(p)),
+ suffix: suffix.into_iter().map(|p| self.collect_pat_(p)).collect(),
+ }
+ }
+ ast::Pat::LiteralPat(lit) => {
+ if let Some(ast_lit) = lit.literal() {
+ let expr = Expr::Literal(ast_lit.kind().into());
+ let expr_ptr = AstPtr::new(&ast::Expr::Literal(ast_lit));
+ let expr_id = self.alloc_expr(expr, expr_ptr);
+ Pat::Lit(expr_id)
+ } else {
+ Pat::Missing
+ }
+ }
+ ast::Pat::RestPat(_) => {
+ // `RestPat` requires special handling and should not be mapped
+ // to a Pat. Here we are using `Pat::Missing` as a fallback for
+ // when `RestPat` is mapped to `Pat`, which can easily happen
+ // when the source code being analyzed has a malformed pattern
+ // which includes `..` in a place where it isn't valid.
+
+ Pat::Missing
+ }
+ ast::Pat::BoxPat(boxpat) => {
+ let inner = self.collect_pat_opt_(boxpat.pat());
+ Pat::Box { inner }
+ }
+ ast::Pat::ConstBlockPat(const_block_pat) => {
+ if let Some(expr) = const_block_pat.block_expr() {
+ let expr_id = self.collect_block(expr);
+ Pat::ConstBlock(expr_id)
+ } else {
+ Pat::Missing
+ }
+ }
+ ast::Pat::MacroPat(mac) => match mac.macro_call() {
+ Some(call) => {
+ let macro_ptr = AstPtr::new(&call);
+ let src = self.expander.to_source(Either::Left(AstPtr::new(&pat)));
+ let pat =
+ self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
+ this.collect_pat_opt_(expanded_pat)
+ });
+ self.source_map.pat_map.insert(src, pat);
+ return pat;
+ }
+ None => Pat::Missing,
+ },
+ // FIXME: implement
+ ast::Pat::RangePat(_) => Pat::Missing,
+ };
+ let ptr = AstPtr::new(&pat);
+ self.alloc_pat(pattern, Either::Left(ptr))
+ }
+
+ fn collect_pat_opt_(&mut self, pat: Option<ast::Pat>) -> PatId {
+ match pat {
+ Some(pat) => self.collect_pat_(pat),
+ None => self.missing_pat(),
+ }
+ }
+
+ fn collect_tuple_pat(&mut self, args: AstChildren<ast::Pat>) -> (Box<[PatId]>, Option<usize>) {
+ // Find the location of the `..`, if there is one. Note that we do not
+ // consider the possibility of there being multiple `..` here.
+ let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_)));
+ // We want to skip the `..` pattern here, since we account for it above.
+ let args = args
+ .filter(|p| !matches!(p, ast::Pat::RestPat(_)))
+ .map(|p| self.collect_pat_(p))
+ .collect();
+
+ (args, ellipsis)
+ }
+
+ /// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when
+ /// not.
+ fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> Option<()> {
+ match self.expander.parse_attrs(self.db, owner).cfg() {
+ Some(cfg) => {
+ if self.expander.cfg_options().check(&cfg) != Some(false) {
+ return Some(());
+ }
+
+ self.source_map.diagnostics.push(BodyDiagnostic::InactiveCode {
+ node: InFile::new(
+ self.expander.current_file_id,
+ SyntaxNodePtr::new(owner.syntax()),
+ ),
+ cfg,
+ opts: self.expander.cfg_options().clone(),
+ });
+
+ None
+ }
+ None => Some(()),
+ }
+ }
+}
+
+impl From<ast::LiteralKind> for Literal {
+ fn from(ast_lit_kind: ast::LiteralKind) -> Self {
+ match ast_lit_kind {
+ // FIXME: these should have actual values filled in, but unsure on perf impact
+ LiteralKind::IntNumber(lit) => {
+ if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) {
+ Literal::Float(
+ FloatTypeWrapper::new(lit.float_value().unwrap_or(Default::default())),
+ builtin,
+ )
+ } else if let builtin @ Some(_) = lit.suffix().and_then(BuiltinInt::from_suffix) {
+ Literal::Int(lit.value().unwrap_or(0) as i128, builtin)
+ } else {
+ let builtin = lit.suffix().and_then(BuiltinUint::from_suffix);
+ Literal::Uint(lit.value().unwrap_or(0), builtin)
+ }
+ }
+ LiteralKind::FloatNumber(lit) => {
+ let ty = lit.suffix().and_then(BuiltinFloat::from_suffix);
+ Literal::Float(FloatTypeWrapper::new(lit.value().unwrap_or(Default::default())), ty)
+ }
+ LiteralKind::ByteString(bs) => {
+ let text = bs.value().map(Box::from).unwrap_or_else(Default::default);
+ Literal::ByteString(text)
+ }
+ LiteralKind::String(s) => {
+ let text = s.value().map(Box::from).unwrap_or_else(Default::default);
+ Literal::String(text)
+ }
+ LiteralKind::Byte(b) => {
+ Literal::Uint(b.value().unwrap_or_default() as u128, Some(BuiltinUint::U8))
+ }
+ LiteralKind::Char(c) => Literal::Char(c.value().unwrap_or_default()),
+ LiteralKind::Bool(val) => Literal::Bool(val),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
new file mode 100644
index 000000000..f4c390dce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
@@ -0,0 +1,571 @@
+//! Name resolution for expressions.
+use std::sync::Arc;
+
+use hir_expand::name::Name;
+use la_arena::{Arena, Idx};
+use rustc_hash::FxHashMap;
+
+use crate::{
+ body::Body,
+ db::DefDatabase,
+ expr::{Expr, ExprId, LabelId, Pat, PatId, Statement},
+ BlockId, DefWithBodyId,
+};
+
+pub type ScopeId = Idx<ScopeData>;
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ExprScopes {
+ scopes: Arena<ScopeData>,
+ scope_by_expr: FxHashMap<ExprId, ScopeId>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ScopeEntry {
+ name: Name,
+ pat: PatId,
+}
+
+impl ScopeEntry {
+ pub fn name(&self) -> &Name {
+ &self.name
+ }
+
+ pub fn pat(&self) -> PatId {
+ self.pat
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ScopeData {
+ parent: Option<ScopeId>,
+ block: Option<BlockId>,
+ label: Option<(LabelId, Name)>,
+ entries: Vec<ScopeEntry>,
+}
+
+impl ExprScopes {
+ pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<ExprScopes> {
+ let body = db.body(def);
+ Arc::new(ExprScopes::new(&*body))
+ }
+
+ fn new(body: &Body) -> ExprScopes {
+ let mut scopes =
+ ExprScopes { scopes: Arena::default(), scope_by_expr: FxHashMap::default() };
+ let mut root = scopes.root_scope();
+ scopes.add_params_bindings(body, root, &body.params);
+ compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root);
+ scopes
+ }
+
+ pub fn entries(&self, scope: ScopeId) -> &[ScopeEntry] {
+ &self.scopes[scope].entries
+ }
+
+ /// If `scope` refers to a block expression scope, returns the corresponding `BlockId`.
+ pub fn block(&self, scope: ScopeId) -> Option<BlockId> {
+ self.scopes[scope].block
+ }
+
+ /// If `scope` refers to a labeled expression scope, returns the corresponding `Label`.
+ pub fn label(&self, scope: ScopeId) -> Option<(LabelId, Name)> {
+ self.scopes[scope].label.clone()
+ }
+
+ pub fn scope_chain(&self, scope: Option<ScopeId>) -> impl Iterator<Item = ScopeId> + '_ {
+ std::iter::successors(scope, move |&scope| self.scopes[scope].parent)
+ }
+
+ pub fn resolve_name_in_scope(&self, scope: ScopeId, name: &Name) -> Option<&ScopeEntry> {
+ self.scope_chain(Some(scope))
+ .find_map(|scope| self.entries(scope).iter().find(|it| it.name == *name))
+ }
+
+ pub fn scope_for(&self, expr: ExprId) -> Option<ScopeId> {
+ self.scope_by_expr.get(&expr).copied()
+ }
+
+ pub fn scope_by_expr(&self) -> &FxHashMap<ExprId, ScopeId> {
+ &self.scope_by_expr
+ }
+
+ fn root_scope(&mut self) -> ScopeId {
+ self.scopes.alloc(ScopeData { parent: None, block: None, label: None, entries: vec![] })
+ }
+
+ fn new_scope(&mut self, parent: ScopeId) -> ScopeId {
+ self.scopes.alloc(ScopeData {
+ parent: Some(parent),
+ block: None,
+ label: None,
+ entries: vec![],
+ })
+ }
+
+ fn new_labeled_scope(&mut self, parent: ScopeId, label: Option<(LabelId, Name)>) -> ScopeId {
+ self.scopes.alloc(ScopeData { parent: Some(parent), block: None, label, entries: vec![] })
+ }
+
+ fn new_block_scope(
+ &mut self,
+ parent: ScopeId,
+ block: BlockId,
+ label: Option<(LabelId, Name)>,
+ ) -> ScopeId {
+ self.scopes.alloc(ScopeData {
+ parent: Some(parent),
+ block: Some(block),
+ label,
+ entries: vec![],
+ })
+ }
+
+ fn add_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) {
+ let pattern = &body[pat];
+ if let Pat::Bind { name, .. } = pattern {
+ let entry = ScopeEntry { name: name.clone(), pat };
+ self.scopes[scope].entries.push(entry);
+ }
+
+ pattern.walk_child_pats(|pat| self.add_bindings(body, scope, pat));
+ }
+
+ fn add_params_bindings(&mut self, body: &Body, scope: ScopeId, params: &[PatId]) {
+ params.iter().for_each(|pat| self.add_bindings(body, scope, *pat));
+ }
+
+ fn set_scope(&mut self, node: ExprId, scope: ScopeId) {
+ self.scope_by_expr.insert(node, scope);
+ }
+}
+
+fn compute_block_scopes(
+ statements: &[Statement],
+ tail: Option<ExprId>,
+ body: &Body,
+ scopes: &mut ExprScopes,
+ scope: &mut ScopeId,
+) {
+ for stmt in statements {
+ match stmt {
+ Statement::Let { pat, initializer, else_branch, .. } => {
+ if let Some(expr) = initializer {
+ compute_expr_scopes(*expr, body, scopes, scope);
+ }
+ if let Some(expr) = else_branch {
+ compute_expr_scopes(*expr, body, scopes, scope);
+ }
+
+ *scope = scopes.new_scope(*scope);
+ scopes.add_bindings(body, *scope, *pat);
+ }
+ Statement::Expr { expr, .. } => {
+ compute_expr_scopes(*expr, body, scopes, scope);
+ }
+ }
+ }
+ if let Some(expr) = tail {
+ compute_expr_scopes(expr, body, scopes, scope);
+ }
+}
+
+fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope: &mut ScopeId) {
+ let make_label =
+ |label: &Option<LabelId>| label.map(|label| (label, body.labels[label].name.clone()));
+
+ scopes.set_scope(expr, *scope);
+ match &body[expr] {
+ Expr::MacroStmts { statements, tail } => {
+ compute_block_scopes(statements, *tail, body, scopes, scope);
+ }
+ Expr::Block { statements, tail, id, label } => {
+ let mut scope = scopes.new_block_scope(*scope, *id, make_label(label));
+ // Overwrite the old scope for the block expr, so that every block scope can be found
+ // via the block itself (important for blocks that only contain items, no expressions).
+ scopes.set_scope(expr, scope);
+ compute_block_scopes(statements, *tail, body, scopes, &mut scope);
+ }
+ Expr::For { iterable, pat, body: body_expr, label } => {
+ compute_expr_scopes(*iterable, body, scopes, scope);
+ let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
+ scopes.add_bindings(body, scope, *pat);
+ compute_expr_scopes(*body_expr, body, scopes, &mut scope);
+ }
+ Expr::While { condition, body: body_expr, label } => {
+ let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
+ compute_expr_scopes(*condition, body, scopes, &mut scope);
+ compute_expr_scopes(*body_expr, body, scopes, &mut scope);
+ }
+ Expr::Loop { body: body_expr, label } => {
+ let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
+ compute_expr_scopes(*body_expr, body, scopes, &mut scope);
+ }
+ Expr::Closure { args, body: body_expr, .. } => {
+ let mut scope = scopes.new_scope(*scope);
+ scopes.add_params_bindings(body, scope, args);
+ compute_expr_scopes(*body_expr, body, scopes, &mut scope);
+ }
+ Expr::Match { expr, arms } => {
+ compute_expr_scopes(*expr, body, scopes, scope);
+ for arm in arms.iter() {
+ let mut scope = scopes.new_scope(*scope);
+ scopes.add_bindings(body, scope, arm.pat);
+ if let Some(guard) = arm.guard {
+ scope = scopes.new_scope(scope);
+ compute_expr_scopes(guard, body, scopes, &mut scope);
+ }
+ compute_expr_scopes(arm.expr, body, scopes, &mut scope);
+ }
+ }
+ &Expr::If { condition, then_branch, else_branch } => {
+ let mut then_branch_scope = scopes.new_scope(*scope);
+ compute_expr_scopes(condition, body, scopes, &mut then_branch_scope);
+ compute_expr_scopes(then_branch, body, scopes, &mut then_branch_scope);
+ if let Some(else_branch) = else_branch {
+ compute_expr_scopes(else_branch, body, scopes, scope);
+ }
+ }
+ &Expr::Let { pat, expr } => {
+ compute_expr_scopes(expr, body, scopes, scope);
+ *scope = scopes.new_scope(*scope);
+ scopes.add_bindings(body, *scope, pat);
+ }
+ e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)),
+ };
+}
+
+#[cfg(test)]
+mod tests {
+ use base_db::{fixture::WithFixture, FileId, SourceDatabase};
+ use hir_expand::{name::AsName, InFile};
+ use syntax::{algo::find_node_at_offset, ast, AstNode};
+ use test_utils::{assert_eq_text, extract_offset};
+
+ use crate::{db::DefDatabase, test_db::TestDB, FunctionId, ModuleDefId};
+
+ fn find_function(db: &TestDB, file_id: FileId) -> FunctionId {
+ let krate = db.test_crate();
+ let crate_def_map = db.crate_def_map(krate);
+
+ let module = crate_def_map.modules_for_file(file_id).next().unwrap();
+ let (_, def) = crate_def_map[module].scope.entries().next().unwrap();
+ match def.take_values().unwrap() {
+ ModuleDefId::FunctionId(it) => it,
+ _ => panic!(),
+ }
+ }
+
+ fn do_check(ra_fixture: &str, expected: &[&str]) {
+ let (offset, code) = extract_offset(ra_fixture);
+ let code = {
+ let mut buf = String::new();
+ let off: usize = offset.into();
+ buf.push_str(&code[..off]);
+ buf.push_str("$0marker");
+ buf.push_str(&code[off..]);
+ buf
+ };
+
+ let (db, position) = TestDB::with_position(&code);
+ let file_id = position.file_id;
+ let offset = position.offset;
+
+ let file_syntax = db.parse(file_id).syntax_node();
+ let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap();
+ let function = find_function(&db, file_id);
+
+ let scopes = db.expr_scopes(function.into());
+ let (_body, source_map) = db.body_with_source_map(function.into());
+
+ let expr_id = source_map
+ .node_expr(InFile { file_id: file_id.into(), value: &marker.into() })
+ .unwrap();
+ let scope = scopes.scope_for(expr_id);
+
+ let actual = scopes
+ .scope_chain(scope)
+ .flat_map(|scope| scopes.entries(scope))
+ .map(|it| it.name().to_smol_str())
+ .collect::<Vec<_>>()
+ .join("\n");
+ let expected = expected.join("\n");
+ assert_eq_text!(&expected, &actual);
+ }
+
+ #[test]
+ fn test_lambda_scope() {
+ do_check(
+ r"
+ fn quux(foo: i32) {
+ let f = |bar, baz: i32| {
+ $0
+ };
+ }",
+ &["bar", "baz", "foo"],
+ );
+ }
+
+ #[test]
+ fn test_call_scope() {
+ do_check(
+ r"
+ fn quux() {
+ f(|x| $0 );
+ }",
+ &["x"],
+ );
+ }
+
+ #[test]
+ fn test_method_call_scope() {
+ do_check(
+ r"
+ fn quux() {
+ z.f(|x| $0 );
+ }",
+ &["x"],
+ );
+ }
+
+ #[test]
+ fn test_loop_scope() {
+ do_check(
+ r"
+ fn quux() {
+ loop {
+ let x = ();
+ $0
+ };
+ }",
+ &["x"],
+ );
+ }
+
+ #[test]
+ fn test_match() {
+ do_check(
+ r"
+ fn quux() {
+ match () {
+ Some(x) => {
+ $0
+ }
+ };
+ }",
+ &["x"],
+ );
+ }
+
+ #[test]
+ fn test_shadow_variable() {
+ do_check(
+ r"
+ fn foo(x: String) {
+ let x : &str = &x$0;
+ }",
+ &["x"],
+ );
+ }
+
+ #[test]
+ fn test_bindings_after_at() {
+ do_check(
+ r"
+fn foo() {
+ match Some(()) {
+ opt @ Some(unit) => {
+ $0
+ }
+ _ => {}
+ }
+}
+",
+ &["opt", "unit"],
+ );
+ }
+
+ #[test]
+ fn macro_inner_item() {
+ do_check(
+ r"
+ macro_rules! mac {
+ () => {{
+ fn inner() {}
+ inner();
+ }};
+ }
+
+ fn foo() {
+ mac!();
+ $0
+ }
+ ",
+ &[],
+ );
+ }
+
+ #[test]
+ fn broken_inner_item() {
+ do_check(
+ r"
+ fn foo() {
+ trait {}
+ $0
+ }
+ ",
+ &[],
+ );
+ }
+
+ fn do_check_local_name(ra_fixture: &str, expected_offset: u32) {
+ let (db, position) = TestDB::with_position(ra_fixture);
+ let file_id = position.file_id;
+ let offset = position.offset;
+
+ let file = db.parse(file_id).ok().unwrap();
+ let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
+ .expect("failed to find a name at the target offset");
+ let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap();
+
+ let function = find_function(&db, file_id);
+
+ let scopes = db.expr_scopes(function.into());
+ let (_body, source_map) = db.body_with_source_map(function.into());
+
+ let expr_scope = {
+ let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
+ let expr_id =
+ source_map.node_expr(InFile { file_id: file_id.into(), value: &expr_ast }).unwrap();
+ scopes.scope_for(expr_id).unwrap()
+ };
+
+ let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
+ let pat_src = source_map.pat_syntax(resolved.pat()).unwrap();
+
+ let local_name = pat_src.value.either(
+ |it| it.syntax_node_ptr().to_node(file.syntax()),
+ |it| it.syntax_node_ptr().to_node(file.syntax()),
+ );
+ assert_eq!(local_name.text_range(), expected_name.syntax().text_range());
+ }
+
+ #[test]
+ fn test_resolve_local_name() {
+ do_check_local_name(
+ r#"
+fn foo(x: i32, y: u32) {
+ {
+ let z = x * 2;
+ }
+ {
+ let t = x$0 * 3;
+ }
+}
+"#,
+ 7,
+ );
+ }
+
+ #[test]
+ fn test_resolve_local_name_declaration() {
+ do_check_local_name(
+ r#"
+fn foo(x: String) {
+ let x : &str = &x$0;
+}
+"#,
+ 7,
+ );
+ }
+
+ #[test]
+ fn test_resolve_local_name_shadow() {
+ do_check_local_name(
+ r"
+fn foo(x: String) {
+ let x : &str = &x;
+ x$0
+}
+",
+ 28,
+ );
+ }
+
+ #[test]
+ fn ref_patterns_contribute_bindings() {
+ do_check_local_name(
+ r"
+fn foo() {
+ if let Some(&from) = bar() {
+ from$0;
+ }
+}
+",
+ 28,
+ );
+ }
+
+ #[test]
+ fn while_let_adds_binding() {
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<f32> = None;
+ while let Option::Some(spam) = foo {
+ spam$0
+ }
+}
+"#,
+ 75,
+ );
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<f32> = None;
+ while (((let Option::Some(_) = foo))) && let Option::Some(spam) = foo {
+ spam$0
+ }
+}
+"#,
+ 107,
+ );
+ }
+
+ #[test]
+ fn match_guard_if_let() {
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<f32> = None;
+ match foo {
+ _ if let Option::Some(spam) = foo => spam$0,
+ }
+}
+"#,
+ 93,
+ );
+ }
+
+ #[test]
+ fn let_chains_can_reference_previous_lets() {
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<i32> = None;
+ if let Some(spam) = foo && spa$0m > 1 && let Some(spam) = foo && spam > 1 {}
+}
+"#,
+ 61,
+ );
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<i32> = None;
+ if let Some(spam) = foo && spam > 1 && let Some(spam) = foo && sp$0am > 1 {}
+}
+"#,
+ 100,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
new file mode 100644
index 000000000..c9601f855
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
@@ -0,0 +1,127 @@
+mod block;
+
+use base_db::{fixture::WithFixture, SourceDatabase};
+use expect_test::Expect;
+
+use crate::ModuleDefId;
+
+use super::*;
+
+fn lower(ra_fixture: &str) -> Arc<Body> {
+ let db = crate::test_db::TestDB::with_files(ra_fixture);
+
+ let krate = db.crate_graph().iter().next().unwrap();
+ let def_map = db.crate_def_map(krate);
+ let mut fn_def = None;
+ 'outer: for (_, module) in def_map.modules() {
+ for decl in module.scope.declarations() {
+ if let ModuleDefId::FunctionId(it) = decl {
+ fn_def = Some(it);
+ break 'outer;
+ }
+ }
+ }
+
+ db.body(fn_def.unwrap().into())
+}
+
+fn block_def_map_at(ra_fixture: &str) -> String {
+ let (db, position) = crate::test_db::TestDB::with_position(ra_fixture);
+
+ let module = db.module_at_position(position);
+ module.def_map(&db).dump(&db)
+}
+
+fn check_block_scopes_at(ra_fixture: &str, expect: Expect) {
+ let (db, position) = crate::test_db::TestDB::with_position(ra_fixture);
+
+ let module = db.module_at_position(position);
+ let actual = module.def_map(&db).dump_block_scopes(&db);
+ expect.assert_eq(&actual);
+}
+
+fn check_at(ra_fixture: &str, expect: Expect) {
+ let actual = block_def_map_at(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn your_stack_belongs_to_me() {
+ cov_mark::check!(your_stack_belongs_to_me);
+ lower(
+ r#"
+macro_rules! n_nuple {
+ ($e:tt) => ();
+ ($($rest:tt)*) => {{
+ (n_nuple!($($rest)*)None,)
+ }};
+}
+fn main() { n_nuple!(1,2,3); }
+"#,
+ );
+}
+
+#[test]
+fn recursion_limit() {
+ cov_mark::check!(your_stack_belongs_to_me);
+
+ lower(
+ r#"
+#![recursion_limit = "2"]
+macro_rules! n_nuple {
+ ($e:tt) => ();
+ ($first:tt $($rest:tt)*) => {{
+ n_nuple!($($rest)*)
+ }};
+}
+fn main() { n_nuple!(1,2,3); }
+"#,
+ );
+}
+
+#[test]
+fn issue_3642_bad_macro_stackover() {
+ lower(
+ r#"
+#[macro_export]
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+
+ (match ($node:expr) {
+ $( ast::$ast:ident($it:ident) => $res:expr, )*
+ _ => $catch_all:expr $(,)?
+ }) => {{
+ $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )*
+ { $catch_all }
+ }};
+}
+
+fn main() {
+ let anchor = match_ast! {
+ match parent {
+ as => {},
+ _ => return None
+ }
+ };
+}"#,
+ );
+}
+
+#[test]
+fn macro_resolve() {
+ // Regression test for a path resolution bug introduced with inner item handling.
+ lower(
+ r#"
+macro_rules! vec {
+ () => { () };
+ ($elem:expr; $n:expr) => { () };
+ ($($x:expr),+ $(,)?) => { () };
+}
+mod m {
+ fn outer() {
+ let _ = vec![FileSet::default(); self.len()];
+ }
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
new file mode 100644
index 000000000..3bba08cfc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
@@ -0,0 +1,397 @@
+use super::*;
+use expect_test::expect;
+
+#[test]
+fn inner_item_smoke() {
+ check_at(
+ r#"
+struct inner {}
+fn outer() {
+ $0
+ fn inner() {}
+}
+"#,
+ expect![[r#"
+ block scope
+ inner: v
+
+ crate
+ inner: t
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn use_from_crate() {
+ check_at(
+ r#"
+struct Struct {}
+fn outer() {
+ fn Struct() {}
+ use Struct as PlainStruct;
+ use crate::Struct as CrateStruct;
+ use self::Struct as SelfStruct;
+ use super::Struct as SuperStruct;
+ $0
+}
+"#,
+ expect![[r#"
+ block scope
+ CrateStruct: t
+ PlainStruct: t v
+ SelfStruct: t
+ Struct: v
+ SuperStruct: _
+
+ crate
+ Struct: t
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn merge_namespaces() {
+ check_at(
+ r#"
+struct name {}
+fn outer() {
+ fn name() {}
+
+ use name as imported; // should import both `name`s
+
+ $0
+}
+"#,
+ expect![[r#"
+ block scope
+ imported: t v
+ name: v
+
+ crate
+ name: t
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_blocks() {
+ check_at(
+ r#"
+fn outer() {
+ struct inner1 {}
+ fn inner() {
+ use inner1;
+ use outer;
+ fn inner2() {}
+ $0
+ }
+}
+"#,
+ expect![[r#"
+ block scope
+ inner1: t
+ inner2: v
+ outer: v
+
+ block scope
+ inner: v
+ inner1: t
+
+ crate
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn super_imports() {
+ check_at(
+ r#"
+mod module {
+ fn f() {
+ use super::Struct;
+ $0
+ }
+}
+
+struct Struct {}
+"#,
+ expect![[r#"
+ block scope
+ Struct: t
+
+ crate
+ Struct: t
+ module: t
+
+ crate::module
+ f: v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_module_scoping() {
+ check_block_scopes_at(
+ r#"
+fn f() {
+ mod module {
+ struct Struct {}
+ fn f() {
+ use self::Struct;
+ $0
+ }
+ }
+}
+ "#,
+ expect![[r#"
+ BlockId(1) in ModuleId { krate: CrateId(0), block: Some(BlockId(0)), local_id: Idx::<ModuleData>(1) }
+ BlockId(0) in ModuleId { krate: CrateId(0), block: None, local_id: Idx::<ModuleData>(0) }
+ crate scope
+ "#]],
+ );
+}
+
+#[test]
+fn legacy_macro_items() {
+ // Checks that legacy-scoped `macro_rules!` from parent namespaces are resolved and expanded
+ // correctly.
+ check_at(
+ r#"
+macro_rules! mark {
+ () => {
+ struct Hit {}
+ }
+}
+
+fn f() {
+ mark!();
+ $0
+}
+"#,
+ expect![[r#"
+ block scope
+ Hit: t
+
+ crate
+ f: v
+ "#]],
+ );
+}
+
+#[test]
+fn macro_resolve() {
+ check_at(
+ r#"
+//- /lib.rs crate:lib deps:core
+use core::cov_mark;
+
+fn f() {
+ fn nested() {
+ cov_mark::mark!(Hit);
+ $0
+ }
+}
+//- /core.rs crate:core
+pub mod cov_mark {
+ #[macro_export]
+ macro_rules! _mark {
+ ($name:ident) => {
+ struct $name {}
+ }
+ }
+
+ pub use crate::_mark as mark;
+}
+"#,
+ expect![[r#"
+ block scope
+ Hit: t
+
+ block scope
+ nested: v
+
+ crate
+ cov_mark: t
+ f: v
+ "#]],
+ );
+}
+
+#[test]
+fn macro_resolve_legacy() {
+ check_at(
+ r#"
+//- /lib.rs
+mod module;
+
+//- /module.rs
+macro_rules! m {
+ () => {
+ struct Def {}
+ };
+}
+
+fn f() {
+ {
+ m!();
+ $0
+ }
+}
+ "#,
+ expect![[r#"
+ block scope
+ Def: t
+
+ crate
+ module: t
+
+ crate::module
+ f: v
+ "#]],
+ )
+}
+
+#[test]
+fn super_does_not_resolve_to_block_module() {
+ check_at(
+ r#"
+fn main() {
+ struct Struct {}
+ mod module {
+ use super::Struct;
+
+ $0
+ }
+}
+ "#,
+ expect![[r#"
+ block scope
+ Struct: t
+ module: t
+
+ block scope::module
+ Struct: _
+
+ crate
+ main: v
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_import() {
+ // This used to panic, because the default (private) visibility inside block expressions would
+ // point into the containing `DefMap`, which visibilities should never be able to do.
+ cov_mark::check!(adjust_vis_in_block_def_map);
+ check_at(
+ r#"
+mod m {
+ fn main() {
+ use Tr as _;
+ trait Tr {}
+ $0
+ }
+}
+ "#,
+ expect![[r#"
+ block scope
+ _: t
+ Tr: t
+
+ crate
+ m: t
+
+ crate::m
+ main: v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_macro_item_decl() {
+ cov_mark::check!(macro_call_in_macro_stmts_is_added_to_item_tree);
+ check_at(
+ r#"
+macro_rules! inner_declare {
+ ($ident:ident) => {
+ static $ident: u32 = 0;
+ };
+}
+macro_rules! declare {
+ ($ident:ident) => {
+ inner_declare!($ident);
+ };
+}
+
+fn foo() {
+ declare!(bar);
+ bar;
+ $0
+}
+ "#,
+ expect![[r#"
+ block scope
+ bar: v
+
+ crate
+ foo: v
+ "#]],
+ )
+}
+
+#[test]
+fn is_visible_from_same_def_map() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/9481
+ cov_mark::check!(is_visible_from_same_block_def_map);
+ check_at(
+ r#"
+fn outer() {
+ mod tests {
+ use super::*;
+ }
+ use crate::name;
+ $0
+}
+ "#,
+ expect![[r#"
+ block scope
+ name: _
+ tests: t
+
+ block scope::tests
+ name: _
+ outer: v
+
+ crate
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn stmt_macro_expansion_with_trailing_expr() {
+ cov_mark::check!(macro_stmt_with_trailing_macro_expr);
+ check_at(
+ r#"
+macro_rules! mac {
+ () => { mac!($) };
+ ($x:tt) => { fn inner() {} };
+}
+fn foo() {
+ mac!();
+ $0
+}
+ "#,
+ expect![[r#"
+ block scope
+ inner: v
+
+ crate
+ foo: v
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/builtin_attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/builtin_attr.rs
new file mode 100644
index 000000000..0e7ce5f85
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/builtin_attr.rs
@@ -0,0 +1,654 @@
+//! Builtin attributes resolved by nameres.
+//!
+//! The actual definitions were copied from rustc's `compiler/rustc_feature/src/builtin_attrs.rs`.
+//!
+//! It was last synchronized with upstream commit c1a2db3372a4d6896744919284f3287650a38ab7.
+//!
+//! The macros were adjusted to only expand to the attribute name, since that is all we need to do
+//! name resolution, and `BUILTIN_ATTRIBUTES` is almost entirely unchanged from the original, to
+//! ease updating.
+
+use once_cell::sync::OnceCell;
+use rustc_hash::FxHashMap;
+
+/// Ignored attribute namespaces used by tools.
+pub const TOOL_MODULES: &[&str] = &["rustfmt", "clippy"];
+
+pub struct BuiltinAttribute {
+ pub name: &'static str,
+ pub template: AttributeTemplate,
+}
+
+/// A template that the attribute input must match.
+/// Only top-level shape (`#[attr]` vs `#[attr(...)]` vs `#[attr = ...]`) is considered now.
+#[derive(Clone, Copy)]
+pub struct AttributeTemplate {
+ pub word: bool,
+ pub list: Option<&'static str>,
+ pub name_value_str: Option<&'static str>,
+}
+
+pub fn find_builtin_attr_idx(name: &str) -> Option<usize> {
+ static BUILTIN_LOOKUP_TABLE: OnceCell<FxHashMap<&'static str, usize>> = OnceCell::new();
+ BUILTIN_LOOKUP_TABLE
+ .get_or_init(|| {
+ INERT_ATTRIBUTES.iter().map(|attr| attr.name).enumerate().map(|(a, b)| (b, a)).collect()
+ })
+ .get(name)
+ .copied()
+}
+
+// impl AttributeTemplate {
+// const DEFAULT: AttributeTemplate =
+// AttributeTemplate { word: false, list: None, name_value_str: None };
+// }
+
+/// A convenience macro for constructing attribute templates.
+/// E.g., `template!(Word, List: "description")` means that the attribute
+/// supports forms `#[attr]` and `#[attr(description)]`.
+macro_rules! template {
+ (Word) => { template!(@ true, None, None) };
+ (List: $descr: expr) => { template!(@ false, Some($descr), None) };
+ (NameValueStr: $descr: expr) => { template!(@ false, None, Some($descr)) };
+ (Word, List: $descr: expr) => { template!(@ true, Some($descr), None) };
+ (Word, NameValueStr: $descr: expr) => { template!(@ true, None, Some($descr)) };
+ (List: $descr1: expr, NameValueStr: $descr2: expr) => {
+ template!(@ false, Some($descr1), Some($descr2))
+ };
+ (Word, List: $descr1: expr, NameValueStr: $descr2: expr) => {
+ template!(@ true, Some($descr1), Some($descr2))
+ };
+ (@ $word: expr, $list: expr, $name_value_str: expr) => {
+ AttributeTemplate {
+ word: $word, list: $list, name_value_str: $name_value_str
+ }
+ };
+}
+
+macro_rules! ungated {
+ ($attr:ident, $typ:expr, $tpl:expr, $duplicates:expr $(, @only_local: $only_local:expr)? $(,)?) => {
+ BuiltinAttribute { name: stringify!($attr), template: $tpl }
+ };
+}
+
+macro_rules! gated {
+ ($attr:ident, $typ:expr, $tpl:expr, $duplicates:expr $(, @only_local: $only_local:expr)?, $gate:ident, $msg:expr $(,)?) => {
+ BuiltinAttribute { name: stringify!($attr), template: $tpl }
+ };
+ ($attr:ident, $typ:expr, $tpl:expr, $duplicates:expr $(, @only_local: $only_local:expr)?, $msg:expr $(,)?) => {
+ BuiltinAttribute { name: stringify!($attr), template: $tpl }
+ };
+}
+
+macro_rules! rustc_attr {
+ (TEST, $attr:ident, $typ:expr, $tpl:expr, $duplicate:expr $(, @only_local: $only_local:expr)? $(,)?) => {
+ rustc_attr!(
+ $attr,
+ $typ,
+ $tpl,
+ $duplicate,
+ $(@only_local: $only_local,)?
+ concat!(
+ "the `#[",
+ stringify!($attr),
+ "]` attribute is just used for rustc unit tests \
+ and will never be stable",
+ ),
+ )
+ };
+ ($attr:ident, $typ:expr, $tpl:expr, $duplicates:expr $(, @only_local: $only_local:expr)?, $msg:expr $(,)?) => {
+ BuiltinAttribute { name: stringify!($attr), template: $tpl }
+ };
+}
+
+#[allow(unused_macros)]
+macro_rules! experimental {
+ ($attr:ident) => {
+ concat!("the `#[", stringify!($attr), "]` attribute is an experimental feature")
+ };
+}
+
+/// "Inert" built-in attributes that have a special meaning to rustc or rustdoc.
+#[rustfmt::skip]
+pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
+ // ==========================================================================
+ // Stable attributes:
+ // ==========================================================================
+
+ // Conditional compilation:
+ ungated!(cfg, Normal, template!(List: "predicate"), DuplicatesOk),
+ ungated!(cfg_attr, Normal, template!(List: "predicate, attr1, attr2, ..."), DuplicatesOk),
+
+ // Testing:
+ ungated!(ignore, Normal, template!(Word, NameValueStr: "reason"), WarnFollowing),
+ ungated!(
+ should_panic, Normal,
+ template!(Word, List: r#"expected = "reason"#, NameValueStr: "reason"), FutureWarnFollowing,
+ ),
+ // FIXME(Centril): This can be used on stable but shouldn't.
+ ungated!(reexport_test_harness_main, CrateLevel, template!(NameValueStr: "name"), ErrorFollowing),
+
+ // Macros:
+ ungated!(automatically_derived, Normal, template!(Word), WarnFollowing),
+ ungated!(macro_use, Normal, template!(Word, List: "name1, name2, ..."), WarnFollowingWordOnly),
+ ungated!(macro_escape, Normal, template!(Word), WarnFollowing), // Deprecated synonym for `macro_use`.
+ ungated!(macro_export, Normal, template!(Word, List: "local_inner_macros"), WarnFollowing),
+ ungated!(proc_macro, Normal, template!(Word), ErrorFollowing),
+ ungated!(
+ proc_macro_derive, Normal,
+ template!(List: "TraitName, /*opt*/ attributes(name1, name2, ...)"), ErrorFollowing,
+ ),
+ ungated!(proc_macro_attribute, Normal, template!(Word), ErrorFollowing),
+
+ // Lints:
+ ungated!(
+ warn, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), DuplicatesOk
+ ),
+ ungated!(
+ allow, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), DuplicatesOk
+ ),
+ gated!(
+ expect, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), DuplicatesOk,
+ lint_reasons, experimental!(expect)
+ ),
+ ungated!(
+ forbid, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), DuplicatesOk
+ ),
+ ungated!(
+ deny, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), DuplicatesOk
+ ),
+ ungated!(must_use, Normal, template!(Word, NameValueStr: "reason"), FutureWarnFollowing),
+ gated!(
+ must_not_suspend, Normal, template!(Word, NameValueStr: "reason"), WarnFollowing,
+ experimental!(must_not_suspend)
+ ),
+ ungated!(
+ deprecated, Normal,
+ template!(
+ Word,
+ List: r#"/*opt*/ since = "version", /*opt*/ note = "reason""#,
+ NameValueStr: "reason"
+ ),
+ ErrorFollowing
+ ),
+
+ // Crate properties:
+ ungated!(crate_name, CrateLevel, template!(NameValueStr: "name"), FutureWarnFollowing),
+ ungated!(crate_type, CrateLevel, template!(NameValueStr: "bin|lib|..."), DuplicatesOk),
+ // crate_id is deprecated
+ ungated!(crate_id, CrateLevel, template!(NameValueStr: "ignored"), FutureWarnFollowing),
+
+ // ABI, linking, symbols, and FFI
+ ungated!(
+ link, Normal,
+ template!(List: r#"name = "...", /*opt*/ kind = "dylib|static|...", /*opt*/ wasm_import_module = "...""#),
+ DuplicatesOk,
+ ),
+ ungated!(link_name, Normal, template!(NameValueStr: "name"), FutureWarnPreceding),
+ ungated!(no_link, Normal, template!(Word), WarnFollowing),
+ ungated!(repr, Normal, template!(List: "C"), DuplicatesOk),
+ ungated!(export_name, Normal, template!(NameValueStr: "name"), FutureWarnPreceding),
+ ungated!(link_section, Normal, template!(NameValueStr: "name"), FutureWarnPreceding),
+ ungated!(no_mangle, Normal, template!(Word), WarnFollowing, @only_local: true),
+ ungated!(used, Normal, template!(Word, List: "compiler|linker"), WarnFollowing, @only_local: true),
+
+ // Limits:
+ ungated!(recursion_limit, CrateLevel, template!(NameValueStr: "N"), FutureWarnFollowing),
+ ungated!(type_length_limit, CrateLevel, template!(NameValueStr: "N"), FutureWarnFollowing),
+ gated!(
+ const_eval_limit, CrateLevel, template!(NameValueStr: "N"), ErrorFollowing,
+ const_eval_limit, experimental!(const_eval_limit)
+ ),
+ gated!(
+ move_size_limit, CrateLevel, template!(NameValueStr: "N"), ErrorFollowing,
+ large_assignments, experimental!(move_size_limit)
+ ),
+
+ // Entry point:
+ ungated!(start, Normal, template!(Word), WarnFollowing),
+ ungated!(no_start, CrateLevel, template!(Word), WarnFollowing),
+ ungated!(no_main, CrateLevel, template!(Word), WarnFollowing),
+
+ // Modules, prelude, and resolution:
+ ungated!(path, Normal, template!(NameValueStr: "file"), FutureWarnFollowing),
+ ungated!(no_std, CrateLevel, template!(Word), WarnFollowing),
+ ungated!(no_implicit_prelude, Normal, template!(Word), WarnFollowing),
+ ungated!(non_exhaustive, Normal, template!(Word), WarnFollowing),
+
+ // Runtime
+ ungated!(
+ windows_subsystem, CrateLevel,
+ template!(NameValueStr: "windows|console"), FutureWarnFollowing
+ ),
+ ungated!(panic_handler, Normal, template!(Word), WarnFollowing), // RFC 2070
+
+ // Code generation:
+ ungated!(inline, Normal, template!(Word, List: "always|never"), FutureWarnFollowing, @only_local: true),
+ ungated!(cold, Normal, template!(Word), WarnFollowing, @only_local: true),
+ ungated!(no_builtins, CrateLevel, template!(Word), WarnFollowing),
+ ungated!(target_feature, Normal, template!(List: r#"enable = "name""#), DuplicatesOk),
+ ungated!(track_caller, Normal, template!(Word), WarnFollowing),
+ gated!(
+ no_sanitize, Normal,
+ template!(List: "address, memory, thread"), DuplicatesOk,
+ experimental!(no_sanitize)
+ ),
+ gated!(no_coverage, Normal, template!(Word), WarnFollowing, experimental!(no_coverage)),
+
+ ungated!(
+ doc, Normal, template!(List: "hidden|inline|...", NameValueStr: "string"), DuplicatesOk
+ ),
+
+ // ==========================================================================
+ // Unstable attributes:
+ // ==========================================================================
+
+ // RFC #3191: #[debugger_visualizer] support
+ gated!(
+ debugger_visualizer, Normal, template!(List: r#"natvis_file = "...", gdb_script_file = "...""#),
+ DuplicatesOk, experimental!(debugger_visualizer)
+ ),
+
+ // Linking:
+ gated!(naked, Normal, template!(Word), WarnFollowing, @only_local: true, naked_functions, experimental!(naked)),
+ gated!(
+ link_ordinal, Normal, template!(List: "ordinal"), ErrorPreceding, raw_dylib,
+ experimental!(link_ordinal)
+ ),
+
+ // Plugins:
+ // XXX Modified for use in rust-analyzer
+ // BuiltinAttribute {
+ // name: sym::plugin,
+ // only_local: false,
+ // type_: CrateLevel,
+ // template: template!(List: "name"),
+ // duplicates: DuplicatesOk,
+ // gate: Gated(
+ // Stability::Deprecated(
+ // "https://github.com/rust-lang/rust/pull/64675",
+ // Some("may be removed in a future compiler version"),
+ // ),
+ // sym::plugin,
+ // "compiler plugins are deprecated",
+ // cfg_fn!(plugin)
+ // ),
+ // },
+ BuiltinAttribute {
+ name: "plugin",
+ template: template!(List: "name"),
+ },
+
+ // Testing:
+ gated!(
+ test_runner, CrateLevel, template!(List: "path"), ErrorFollowing, custom_test_frameworks,
+ "custom test frameworks are an unstable feature",
+ ),
+ // RFC #1268
+ gated!(
+ marker, Normal, template!(Word), WarnFollowing, marker_trait_attr, experimental!(marker)
+ ),
+ gated!(
+ thread_local, Normal, template!(Word), WarnFollowing,
+ "`#[thread_local]` is an experimental feature, and does not currently handle destructors",
+ ),
+ gated!(no_core, CrateLevel, template!(Word), WarnFollowing, experimental!(no_core)),
+ // RFC 2412
+ gated!(
+ optimize, Normal, template!(List: "size|speed"), ErrorPreceding, optimize_attribute,
+ experimental!(optimize),
+ ),
+ // RFC 2867
+ gated!(
+ instruction_set, Normal, template!(List: "set"), ErrorPreceding,
+ isa_attribute, experimental!(instruction_set)
+ ),
+
+ gated!(
+ ffi_returns_twice, Normal, template!(Word), WarnFollowing, experimental!(ffi_returns_twice)
+ ),
+ gated!(ffi_pure, Normal, template!(Word), WarnFollowing, experimental!(ffi_pure)),
+ gated!(ffi_const, Normal, template!(Word), WarnFollowing, experimental!(ffi_const)),
+ gated!(
+ register_attr, CrateLevel, template!(List: "attr1, attr2, ..."), DuplicatesOk,
+ experimental!(register_attr),
+ ),
+ gated!(
+ register_tool, CrateLevel, template!(List: "tool1, tool2, ..."), DuplicatesOk,
+ experimental!(register_tool),
+ ),
+
+ gated!(
+ cmse_nonsecure_entry, Normal, template!(Word), WarnFollowing,
+ experimental!(cmse_nonsecure_entry)
+ ),
+ // RFC 2632
+ gated!(
+ const_trait, Normal, template!(Word), WarnFollowing, const_trait_impl,
+ "`const` is a temporary placeholder for marking a trait that is suitable for `const` \
+ `impls` and all default bodies as `const`, which may be removed or renamed in the \
+ future."
+ ),
+ // lang-team MCP 147
+ gated!(
+ deprecated_safe, Normal, template!(List: r#"since = "version", note = "...""#), ErrorFollowing,
+ experimental!(deprecated_safe),
+ ),
+
+ // ==========================================================================
+ // Internal attributes: Stability, deprecation, and unsafe:
+ // ==========================================================================
+
+ ungated!(feature, CrateLevel, template!(List: "name1, name2, ..."), DuplicatesOk),
+ // DuplicatesOk since it has its own validation
+ ungated!(
+ stable, Normal, template!(List: r#"feature = "name", since = "version""#), DuplicatesOk,
+ ),
+ ungated!(
+ unstable, Normal,
+ template!(List: r#"feature = "name", reason = "...", issue = "N""#), DuplicatesOk,
+ ),
+ ungated!(rustc_const_unstable, Normal, template!(List: r#"feature = "name""#), DuplicatesOk),
+ ungated!(rustc_const_stable, Normal, template!(List: r#"feature = "name""#), DuplicatesOk),
+ gated!(
+ allow_internal_unstable, Normal, template!(Word, List: "feat1, feat2, ..."), DuplicatesOk,
+ "allow_internal_unstable side-steps feature gating and stability checks",
+ ),
+ gated!(
+ rustc_allow_const_fn_unstable, Normal,
+ template!(Word, List: "feat1, feat2, ..."), DuplicatesOk,
+ "rustc_allow_const_fn_unstable side-steps feature gating and stability checks"
+ ),
+ gated!(
+ allow_internal_unsafe, Normal, template!(Word), WarnFollowing,
+ "allow_internal_unsafe side-steps the unsafe_code lint",
+ ),
+
+ // ==========================================================================
+ // Internal attributes: Type system related:
+ // ==========================================================================
+
+ gated!(fundamental, Normal, template!(Word), WarnFollowing, experimental!(fundamental)),
+ gated!(
+ may_dangle, Normal, template!(Word), WarnFollowing, dropck_eyepatch,
+ "`may_dangle` has unstable semantics and may be removed in the future",
+ ),
+
+ // ==========================================================================
+ // Internal attributes: Runtime related:
+ // ==========================================================================
+
+ rustc_attr!(rustc_allocator, Normal, template!(Word), WarnFollowing, IMPL_DETAIL),
+ rustc_attr!(rustc_allocator_nounwind, Normal, template!(Word), WarnFollowing, IMPL_DETAIL),
+ gated!(
+ alloc_error_handler, Normal, template!(Word), WarnFollowing,
+ experimental!(alloc_error_handler)
+ ),
+ gated!(
+ default_lib_allocator, Normal, template!(Word), WarnFollowing, allocator_internals,
+ experimental!(default_lib_allocator),
+ ),
+ gated!(
+ needs_allocator, Normal, template!(Word), WarnFollowing, allocator_internals,
+ experimental!(needs_allocator),
+ ),
+ gated!(panic_runtime, Normal, template!(Word), WarnFollowing, experimental!(panic_runtime)),
+ gated!(
+ needs_panic_runtime, Normal, template!(Word), WarnFollowing,
+ experimental!(needs_panic_runtime)
+ ),
+ gated!(
+ compiler_builtins, Normal, template!(Word), WarnFollowing,
+ "the `#[compiler_builtins]` attribute is used to identify the `compiler_builtins` crate \
+ which contains compiler-rt intrinsics and will never be stable",
+ ),
+ gated!(
+ profiler_runtime, Normal, template!(Word), WarnFollowing,
+ "the `#[profiler_runtime]` attribute is used to identify the `profiler_builtins` crate \
+ which contains the profiler runtime and will never be stable",
+ ),
+
+ // ==========================================================================
+ // Internal attributes, Linkage:
+ // ==========================================================================
+
+ gated!(
+ linkage, Normal, template!(NameValueStr: "external|internal|..."), ErrorPreceding, @only_local: true,
+ "the `linkage` attribute is experimental and not portable across platforms",
+ ),
+ rustc_attr!(
+ rustc_std_internal_symbol, Normal, template!(Word), WarnFollowing, @only_local: true, INTERNAL_UNSTABLE
+ ),
+
+ // ==========================================================================
+ // Internal attributes, Macro related:
+ // ==========================================================================
+
+ rustc_attr!(
+ rustc_builtin_macro, Normal,
+ template!(Word, List: "name, /*opt*/ attributes(name1, name2, ...)"), ErrorFollowing,
+ IMPL_DETAIL,
+ ),
+ rustc_attr!(rustc_proc_macro_decls, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE),
+ rustc_attr!(
+ rustc_macro_transparency, Normal,
+ template!(NameValueStr: "transparent|semitransparent|opaque"), ErrorFollowing,
+ "used internally for testing macro hygiene",
+ ),
+
+ // ==========================================================================
+ // Internal attributes, Diagnostics related:
+ // ==========================================================================
+
+ rustc_attr!(
+ rustc_on_unimplemented, Normal,
+ template!(
+ List: r#"/*opt*/ message = "...", /*opt*/ label = "...", /*opt*/ note = "...""#,
+ NameValueStr: "message"
+ ),
+ ErrorFollowing,
+ INTERNAL_UNSTABLE
+ ),
+ // Enumerates "identity-like" conversion methods to suggest on type mismatch.
+ rustc_attr!(
+ rustc_conversion_suggestion, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
+ ),
+ // Prevents field reads in the marked trait or method to be considered
+ // during dead code analysis.
+ rustc_attr!(
+ rustc_trivial_field_reads, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
+ ),
+ // Used by the `rustc::potential_query_instability` lint to warn methods which
+ // might not be stable during incremental compilation.
+ rustc_attr!(rustc_lint_query_instability, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE),
+ // Used by the `rustc::untranslatable_diagnostic` and `rustc::diagnostic_outside_of_impl` lints
+ // to assist in changes to diagnostic APIs.
+ rustc_attr!(rustc_lint_diagnostics, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE),
+
+ // ==========================================================================
+ // Internal attributes, Const related:
+ // ==========================================================================
+
+ rustc_attr!(rustc_promotable, Normal, template!(Word), WarnFollowing, IMPL_DETAIL),
+ rustc_attr!(
+ rustc_legacy_const_generics, Normal, template!(List: "N"), ErrorFollowing,
+ INTERNAL_UNSTABLE
+ ),
+ // Do not const-check this function's body. It will always get replaced during CTFE.
+ rustc_attr!(
+ rustc_do_not_const_check, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
+ ),
+
+ // ==========================================================================
+ // Internal attributes, Layout related:
+ // ==========================================================================
+
+ rustc_attr!(
+ rustc_layout_scalar_valid_range_start, Normal, template!(List: "value"), ErrorFollowing,
+ "the `#[rustc_layout_scalar_valid_range_start]` attribute is just used to enable \
+ niche optimizations in libcore and libstd and will never be stable",
+ ),
+ rustc_attr!(
+ rustc_layout_scalar_valid_range_end, Normal, template!(List: "value"), ErrorFollowing,
+ "the `#[rustc_layout_scalar_valid_range_end]` attribute is just used to enable \
+ niche optimizations in libcore and libstd and will never be stable",
+ ),
+ rustc_attr!(
+ rustc_nonnull_optimization_guaranteed, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_nonnull_optimization_guaranteed]` attribute is just used to enable \
+ niche optimizations in libcore and libstd and will never be stable",
+ ),
+
+ // ==========================================================================
+ // Internal attributes, Misc:
+ // ==========================================================================
+ gated!(
+ lang, Normal, template!(NameValueStr: "name"), DuplicatesOk, @only_local: true, lang_items,
+ "language items are subject to change",
+ ),
+ rustc_attr!(
+ rustc_pass_by_value, Normal,
+ template!(Word), ErrorFollowing,
+ "#[rustc_pass_by_value] is used to mark types that must be passed by value instead of reference."
+ ),
+ rustc_attr!(
+ rustc_coherence_is_core, AttributeType::CrateLevel, template!(Word), ErrorFollowing, @only_local: true,
+ "#![rustc_coherence_is_core] allows inherent methods on builtin types, only intended to be used in `core`."
+ ),
+ rustc_attr!(
+ rustc_allow_incoherent_impl, AttributeType::Normal, template!(Word), ErrorFollowing, @only_local: true,
+ "#[rustc_allow_incoherent_impl] has to be added to all impl items of an incoherent inherent impl."
+ ),
+ rustc_attr!(
+ rustc_has_incoherent_inherent_impls, AttributeType::Normal, template!(Word), ErrorFollowing,
+ "#[rustc_has_incoherent_inherent_impls] allows the addition of incoherent inherent impls for \
+ the given type by annotating all impl items with #[rustc_allow_incoherent_impl]."
+ ),
+ rustc_attr!(
+ rustc_box, AttributeType::Normal, template!(Word), ErrorFollowing,
+ "#[rustc_box] allows creating boxes \
+ and it is only intended to be used in `alloc`."
+ ),
+
+ // modified for r-a
+ // BuiltinAttribute {
+ // name: sym::rustc_diagnostic_item,
+ // // FIXME: This can be `true` once we always use `tcx.is_diagnostic_item`.
+ // only_local: false,
+ // type_: Normal,
+ // template: template!(NameValueStr: "name"),
+ // duplicates: ErrorFollowing,
+ // gate: Gated(
+ // Stability::Unstable,
+ // sym::rustc_attrs,
+ // "diagnostic items compiler internal support for linting",
+ // cfg_fn!(rustc_attrs),
+ // ),
+ // },
+ BuiltinAttribute {
+ name: "rustc_diagnostic_item",
+ template: template!(NameValueStr: "name"),
+ },
+ gated!(
+ // Used in resolve:
+ prelude_import, Normal, template!(Word), WarnFollowing,
+ "`#[prelude_import]` is for use by rustc only",
+ ),
+ gated!(
+ rustc_paren_sugar, Normal, template!(Word), WarnFollowing, unboxed_closures,
+ "unboxed_closures are still evolving",
+ ),
+ rustc_attr!(
+ rustc_inherit_overflow_checks, Normal, template!(Word), WarnFollowing, @only_local: true,
+ "the `#[rustc_inherit_overflow_checks]` attribute is just used to control \
+ overflow checking behavior of several libcore functions that are inlined \
+ across crates and will never be stable",
+ ),
+ rustc_attr!(
+ rustc_reservation_impl, Normal,
+ template!(NameValueStr: "reservation message"), ErrorFollowing,
+ "the `#[rustc_reservation_impl]` attribute is internally used \
+ for reserving for `for<T> From<!> for T` impl"
+ ),
+ rustc_attr!(
+ rustc_test_marker, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_test_marker]` attribute is used internally to track tests",
+ ),
+ rustc_attr!(
+ rustc_unsafe_specialization_marker, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_unsafe_specialization_marker]` attribute is used to check specializations"
+ ),
+ rustc_attr!(
+ rustc_specialization_trait, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_specialization_trait]` attribute is used to check specializations"
+ ),
+ rustc_attr!(
+ rustc_main, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_main]` attribute is used internally to specify test entry point function",
+ ),
+ rustc_attr!(
+ rustc_skip_array_during_method_dispatch, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_skip_array_during_method_dispatch]` attribute is used to exclude a trait \
+ from method dispatch when the receiver is an array, for compatibility in editions < 2021."
+ ),
+ rustc_attr!(
+ rustc_must_implement_one_of, Normal, template!(List: "function1, function2, ..."), ErrorFollowing,
+ "the `#[rustc_must_implement_one_of]` attribute is used to change minimal complete \
+ definition of a trait, it's currently in experimental form and should be changed before \
+ being exposed outside of the std"
+ ),
+
+ // ==========================================================================
+ // Internal attributes, Testing:
+ // ==========================================================================
+
+ rustc_attr!(TEST, rustc_outlives, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_capture_analysis, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_insignificant_dtor, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_strict_coherence, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_variance, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ..."), WarnFollowing),
+ rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(
+ TEST, rustc_error, Normal,
+ template!(Word, List: "delay_span_bug_from_inside_query"), WarnFollowingWordOnly
+ ),
+ rustc_attr!(TEST, rustc_dump_user_substs, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_evaluate_where_clauses, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(
+ TEST, rustc_if_this_changed, Normal, template!(Word, List: "DepNode"), DuplicatesOk
+ ),
+ rustc_attr!(
+ TEST, rustc_then_this_would_need, Normal, template!(List: "DepNode"), DuplicatesOk
+ ),
+ rustc_attr!(
+ TEST, rustc_clean, Normal,
+ template!(List: r#"cfg = "...", /*opt*/ label = "...", /*opt*/ except = "...""#),
+ DuplicatesOk,
+ ),
+ rustc_attr!(
+ TEST, rustc_partition_reused, Normal,
+ template!(List: r#"cfg = "...", module = "...""#), DuplicatesOk,
+ ),
+ rustc_attr!(
+ TEST, rustc_partition_codegened, Normal,
+ template!(List: r#"cfg = "...", module = "...""#), DuplicatesOk,
+ ),
+ rustc_attr!(
+ TEST, rustc_expected_cgu_reuse, Normal,
+ template!(List: r#"cfg = "...", module = "...", kind = "...""#), DuplicatesOk,
+ ),
+ rustc_attr!(TEST, rustc_symbol_name, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_polymorphize_error, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_def_path, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_mir, Normal, template!(List: "arg1, arg2, ..."), DuplicatesOk),
+ rustc_attr!(TEST, rustc_dump_program_clauses, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_dump_env_program_clauses, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_object_lifetime_default, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_dump_vtable, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_dummy, Normal, template!(Word /* doesn't matter*/), DuplicatesOk),
+ gated!(
+ omit_gdb_pretty_printer_section, Normal, template!(Word), WarnFollowing,
+ "the `#[omit_gdb_pretty_printer_section]` attribute is just used for the Rust test suite",
+ ),
+];
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs b/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs
new file mode 100644
index 000000000..25a408036
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs
@@ -0,0 +1,158 @@
+//! This module defines built-in types.
+//!
+//! A peculiarity of built-in types is that they are always available and are
+//! not associated with any particular crate.
+
+use std::fmt;
+
+use hir_expand::name::{name, AsName, Name};
+/// Different signed int types.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum BuiltinInt {
+ Isize,
+ I8,
+ I16,
+ I32,
+ I64,
+ I128,
+}
+
+/// Different unsigned int types.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum BuiltinUint {
+ Usize,
+ U8,
+ U16,
+ U32,
+ U64,
+ U128,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum BuiltinFloat {
+ F32,
+ F64,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum BuiltinType {
+ Char,
+ Bool,
+ Str,
+ Int(BuiltinInt),
+ Uint(BuiltinUint),
+ Float(BuiltinFloat),
+}
+
+impl BuiltinType {
+ #[rustfmt::skip]
+ pub const ALL: &'static [(Name, BuiltinType)] = &[
+ (name![char], BuiltinType::Char),
+ (name![bool], BuiltinType::Bool),
+ (name![str], BuiltinType::Str),
+
+ (name![isize], BuiltinType::Int(BuiltinInt::Isize)),
+ (name![i8], BuiltinType::Int(BuiltinInt::I8)),
+ (name![i16], BuiltinType::Int(BuiltinInt::I16)),
+ (name![i32], BuiltinType::Int(BuiltinInt::I32)),
+ (name![i64], BuiltinType::Int(BuiltinInt::I64)),
+ (name![i128], BuiltinType::Int(BuiltinInt::I128)),
+
+ (name![usize], BuiltinType::Uint(BuiltinUint::Usize)),
+ (name![u8], BuiltinType::Uint(BuiltinUint::U8)),
+ (name![u16], BuiltinType::Uint(BuiltinUint::U16)),
+ (name![u32], BuiltinType::Uint(BuiltinUint::U32)),
+ (name![u64], BuiltinType::Uint(BuiltinUint::U64)),
+ (name![u128], BuiltinType::Uint(BuiltinUint::U128)),
+
+ (name![f32], BuiltinType::Float(BuiltinFloat::F32)),
+ (name![f64], BuiltinType::Float(BuiltinFloat::F64)),
+ ];
+
+ pub fn by_name(name: &Name) -> Option<Self> {
+ Self::ALL.iter().find_map(|(n, ty)| if n == name { Some(*ty) } else { None })
+ }
+}
+
+impl AsName for BuiltinType {
+ fn as_name(&self) -> Name {
+ match self {
+ BuiltinType::Char => name![char],
+ BuiltinType::Bool => name![bool],
+ BuiltinType::Str => name![str],
+ BuiltinType::Int(it) => match it {
+ BuiltinInt::Isize => name![isize],
+ BuiltinInt::I8 => name![i8],
+ BuiltinInt::I16 => name![i16],
+ BuiltinInt::I32 => name![i32],
+ BuiltinInt::I64 => name![i64],
+ BuiltinInt::I128 => name![i128],
+ },
+ BuiltinType::Uint(it) => match it {
+ BuiltinUint::Usize => name![usize],
+ BuiltinUint::U8 => name![u8],
+ BuiltinUint::U16 => name![u16],
+ BuiltinUint::U32 => name![u32],
+ BuiltinUint::U64 => name![u64],
+ BuiltinUint::U128 => name![u128],
+ },
+ BuiltinType::Float(it) => match it {
+ BuiltinFloat::F32 => name![f32],
+ BuiltinFloat::F64 => name![f64],
+ },
+ }
+ }
+}
+
+impl fmt::Display for BuiltinType {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let type_name = self.as_name();
+ type_name.fmt(f)
+ }
+}
+
+#[rustfmt::skip]
+impl BuiltinInt {
+ pub fn from_suffix(suffix: &str) -> Option<BuiltinInt> {
+ let res = match suffix {
+ "isize" => Self::Isize,
+ "i8" => Self::I8,
+ "i16" => Self::I16,
+ "i32" => Self::I32,
+ "i64" => Self::I64,
+ "i128" => Self::I128,
+
+ _ => return None,
+ };
+ Some(res)
+ }
+}
+
+#[rustfmt::skip]
+impl BuiltinUint {
+ pub fn from_suffix(suffix: &str) -> Option<BuiltinUint> {
+ let res = match suffix {
+ "usize" => Self::Usize,
+ "u8" => Self::U8,
+ "u16" => Self::U16,
+ "u32" => Self::U32,
+ "u64" => Self::U64,
+ "u128" => Self::U128,
+
+ _ => return None,
+ };
+ Some(res)
+ }
+}
+
+#[rustfmt::skip]
+impl BuiltinFloat {
+ pub fn from_suffix(suffix: &str) -> Option<BuiltinFloat> {
+ let res = match suffix {
+ "f32" => BuiltinFloat::F32,
+ "f64" => BuiltinFloat::F64,
+ _ => return None,
+ };
+ Some(res)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
new file mode 100644
index 000000000..5b1435e8f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
@@ -0,0 +1,207 @@
+//! When *constructing* `hir`, we start at some parent syntax node and recursively
+//! lower the children.
+//!
+//! This modules allows one to go in the opposite direction: start with a syntax
+//! node for a *child*, and get its hir.
+
+use either::Either;
+use hir_expand::HirFileId;
+use syntax::ast::HasDocComments;
+
+use crate::{
+ db::DefDatabase,
+ dyn_map::DynMap,
+ item_scope::ItemScope,
+ keys,
+ src::{HasChildSource, HasSource},
+ AdtId, AssocItemId, DefWithBodyId, EnumId, EnumVariantId, FieldId, ImplId, Lookup, MacroId,
+ ModuleDefId, ModuleId, TraitId, VariantId,
+};
+
+pub trait ChildBySource {
+ fn child_by_source(&self, db: &dyn DefDatabase, file_id: HirFileId) -> DynMap {
+ let mut res = DynMap::default();
+ self.child_by_source_to(db, &mut res, file_id);
+ res
+ }
+ fn child_by_source_to(&self, db: &dyn DefDatabase, map: &mut DynMap, file_id: HirFileId);
+}
+
+impl ChildBySource for TraitId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let data = db.trait_data(*self);
+
+ data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
+ |(ast_id, call_id)| {
+ res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
+ },
+ );
+ data.items.iter().for_each(|&(_, item)| {
+ add_assoc_item(db, res, file_id, item);
+ });
+ }
+}
+
+impl ChildBySource for ImplId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let data = db.impl_data(*self);
+ data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
+ |(ast_id, call_id)| {
+ res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
+ },
+ );
+ data.items.iter().for_each(|&item| {
+ add_assoc_item(db, res, file_id, item);
+ });
+ }
+}
+
+fn add_assoc_item(db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId, item: AssocItemId) {
+ match item {
+ AssocItemId::FunctionId(func) => {
+ let loc = func.lookup(db);
+ if loc.id.file_id() == file_id {
+ res[keys::FUNCTION].insert(loc.source(db).value, func)
+ }
+ }
+ AssocItemId::ConstId(konst) => {
+ let loc = konst.lookup(db);
+ if loc.id.file_id() == file_id {
+ res[keys::CONST].insert(loc.source(db).value, konst)
+ }
+ }
+ AssocItemId::TypeAliasId(ty) => {
+ let loc = ty.lookup(db);
+ if loc.id.file_id() == file_id {
+ res[keys::TYPE_ALIAS].insert(loc.source(db).value, ty)
+ }
+ }
+ }
+}
+
+impl ChildBySource for ModuleId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let def_map = self.def_map(db);
+ let module_data = &def_map[self.local_id];
+ module_data.scope.child_by_source_to(db, res, file_id);
+ }
+}
+
+impl ChildBySource for ItemScope {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
+ self.impls().for_each(|imp| add_impl(db, res, file_id, imp));
+ self.unnamed_consts().for_each(|konst| {
+ let loc = konst.lookup(db);
+ if loc.id.file_id() == file_id {
+ res[keys::CONST].insert(loc.source(db).value, konst);
+ }
+ });
+ self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
+ |(ast_id, call_id)| {
+ res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
+ },
+ );
+ self.legacy_macros().for_each(|(_, ids)| {
+ ids.iter().for_each(|&id| {
+ if let MacroId::MacroRulesId(id) = id {
+ let loc = id.lookup(db);
+ if loc.id.file_id() == file_id {
+ res[keys::MACRO_RULES].insert(loc.source(db).value, id);
+ }
+ }
+ })
+ });
+ self.derive_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
+ |(ast_id, calls)| {
+ let adt = ast_id.to_node(db.upcast());
+ calls.for_each(|(attr_id, call_id, calls)| {
+ if let Some(Either::Left(attr)) =
+ adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize)
+ {
+ res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
+ }
+ });
+ },
+ );
+
+ fn add_module_def(
+ db: &dyn DefDatabase,
+ map: &mut DynMap,
+ file_id: HirFileId,
+ item: ModuleDefId,
+ ) {
+ macro_rules! insert {
+ ($map:ident[$key:path].$insert:ident($id:ident)) => {{
+ let loc = $id.lookup(db);
+ if loc.id.file_id() == file_id {
+ $map[$key].$insert(loc.source(db).value, $id)
+ }
+ }};
+ }
+ match item {
+ ModuleDefId::FunctionId(id) => insert!(map[keys::FUNCTION].insert(id)),
+ ModuleDefId::ConstId(id) => insert!(map[keys::CONST].insert(id)),
+ ModuleDefId::StaticId(id) => insert!(map[keys::STATIC].insert(id)),
+ ModuleDefId::TypeAliasId(id) => insert!(map[keys::TYPE_ALIAS].insert(id)),
+ ModuleDefId::TraitId(id) => insert!(map[keys::TRAIT].insert(id)),
+ ModuleDefId::AdtId(adt) => match adt {
+ AdtId::StructId(id) => insert!(map[keys::STRUCT].insert(id)),
+ AdtId::UnionId(id) => insert!(map[keys::UNION].insert(id)),
+ AdtId::EnumId(id) => insert!(map[keys::ENUM].insert(id)),
+ },
+ ModuleDefId::MacroId(id) => match id {
+ MacroId::Macro2Id(id) => insert!(map[keys::MACRO2].insert(id)),
+ MacroId::MacroRulesId(id) => insert!(map[keys::MACRO_RULES].insert(id)),
+ MacroId::ProcMacroId(id) => insert!(map[keys::PROC_MACRO].insert(id)),
+ },
+ ModuleDefId::ModuleId(_)
+ | ModuleDefId::EnumVariantId(_)
+ | ModuleDefId::BuiltinType(_) => (),
+ }
+ }
+ fn add_impl(db: &dyn DefDatabase, map: &mut DynMap, file_id: HirFileId, imp: ImplId) {
+ let loc = imp.lookup(db);
+ if loc.id.file_id() == file_id {
+ map[keys::IMPL].insert(loc.source(db).value, imp)
+ }
+ }
+ }
+}
+
+impl ChildBySource for VariantId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, _: HirFileId) {
+ let arena_map = self.child_source(db);
+ let arena_map = arena_map.as_ref();
+ let parent = *self;
+ for (local_id, source) in arena_map.value.iter() {
+ let id = FieldId { parent, local_id };
+ match source.clone() {
+ Either::Left(source) => res[keys::TUPLE_FIELD].insert(source, id),
+ Either::Right(source) => res[keys::RECORD_FIELD].insert(source, id),
+ }
+ }
+ }
+}
+
+impl ChildBySource for EnumId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, _: HirFileId) {
+ let arena_map = self.child_source(db);
+ let arena_map = arena_map.as_ref();
+ for (local_id, source) in arena_map.value.iter() {
+ let id = EnumVariantId { parent: *self, local_id };
+ res[keys::VARIANT].insert(source.clone(), id)
+ }
+ }
+}
+
+impl ChildBySource for DefWithBodyId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let body = db.body(*self);
+ for (_, def_map) in body.blocks(db) {
+ // All block expressions are merged into the same map, because they logically all add
+ // inner items to the containing `DefWithBodyId`.
+ def_map[def_map.root()].scope.child_by_source_to(db, res, file_id);
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
new file mode 100644
index 000000000..35c870895
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
@@ -0,0 +1,579 @@
+//! Contains basic data about various HIR declarations.
+
+use std::sync::Arc;
+
+use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, MacroCallId, MacroDefKind};
+use smallvec::SmallVec;
+use syntax::ast;
+
+use crate::{
+ attr::Attrs,
+ body::{Expander, Mark},
+ db::DefDatabase,
+ intern::Interned,
+ item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId},
+ nameres::{attr_resolution::ResolvedAttr, proc_macro::ProcMacroKind, DefMap},
+ type_ref::{TraitRef, TypeBound, TypeRef},
+ visibility::RawVisibility,
+ AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
+ Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, ProcMacroId,
+ StaticId, TraitId, TypeAliasId, TypeAliasLoc,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct FunctionData {
+ pub name: Name,
+ pub params: Vec<(Option<Name>, Interned<TypeRef>)>,
+ pub ret_type: Interned<TypeRef>,
+ pub async_ret_type: Option<Interned<TypeRef>>,
+ pub attrs: Attrs,
+ pub visibility: RawVisibility,
+ pub abi: Option<Interned<str>>,
+ pub legacy_const_generics_indices: Box<[u32]>,
+ flags: FnFlags,
+}
+
+impl FunctionData {
+ pub(crate) fn fn_data_query(db: &dyn DefDatabase, func: FunctionId) -> Arc<FunctionData> {
+ let loc = func.lookup(db);
+ let krate = loc.container.module(db).krate;
+ let crate_graph = db.crate_graph();
+ let cfg_options = &crate_graph[krate].cfg_options;
+ let item_tree = loc.id.item_tree(db);
+ let func = &item_tree[loc.id.value];
+ let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
+ db.trait_data(trait_id).visibility.clone()
+ } else {
+ item_tree[func.visibility].clone()
+ };
+
+ let enabled_params = func
+ .params
+ .clone()
+ .filter(|&param| item_tree.attrs(db, krate, param.into()).is_cfg_enabled(cfg_options));
+
+ // If last cfg-enabled param is a `...` param, it's a varargs function.
+ let is_varargs = enabled_params
+ .clone()
+ .next_back()
+ .map_or(false, |param| matches!(item_tree[param], Param::Varargs));
+
+ let mut flags = func.flags;
+ if is_varargs {
+ flags |= FnFlags::IS_VARARGS;
+ }
+ if flags.contains(FnFlags::HAS_SELF_PARAM) {
+ // If there's a self param in the syntax, but it is cfg'd out, remove the flag.
+ let is_cfgd_out = match func.params.clone().next() {
+ Some(param) => {
+ !item_tree.attrs(db, krate, param.into()).is_cfg_enabled(cfg_options)
+ }
+ None => {
+ stdx::never!("fn HAS_SELF_PARAM but no parameters allocated");
+ true
+ }
+ };
+ if is_cfgd_out {
+ cov_mark::hit!(cfgd_out_self_param);
+ flags.remove(FnFlags::HAS_SELF_PARAM);
+ }
+ }
+
+ let legacy_const_generics_indices = item_tree
+ .attrs(db, krate, ModItem::from(loc.id.value).into())
+ .by_key("rustc_legacy_const_generics")
+ .tt_values()
+ .next()
+ .map(parse_rustc_legacy_const_generics)
+ .unwrap_or_default();
+
+ Arc::new(FunctionData {
+ name: func.name.clone(),
+ params: enabled_params
+ .clone()
+ .filter_map(|id| match &item_tree[id] {
+ Param::Normal(name, ty) => Some((name.clone(), ty.clone())),
+ Param::Varargs => None,
+ })
+ .collect(),
+ ret_type: func.ret_type.clone(),
+ async_ret_type: func.async_ret_type.clone(),
+ attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()),
+ visibility,
+ abi: func.abi.clone(),
+ legacy_const_generics_indices,
+ flags,
+ })
+ }
+
+ pub fn has_body(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_BODY)
+ }
+
+ /// True if the first param is `self`. This is relevant to decide whether this
+ /// can be called as a method.
+ pub fn has_self_param(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_SELF_PARAM)
+ }
+
+ pub fn has_default_kw(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_DEFAULT_KW)
+ }
+
+ pub fn has_const_kw(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_CONST_KW)
+ }
+
+ pub fn has_async_kw(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_ASYNC_KW)
+ }
+
+ pub fn has_unsafe_kw(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_UNSAFE_KW)
+ }
+
+ pub fn is_varargs(&self) -> bool {
+ self.flags.contains(FnFlags::IS_VARARGS)
+ }
+}
+
+fn parse_rustc_legacy_const_generics(tt: &tt::Subtree) -> Box<[u32]> {
+ let mut indices = Vec::new();
+ for args in tt.token_trees.chunks(2) {
+ match &args[0] {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => match lit.text.parse() {
+ Ok(index) => indices.push(index),
+ Err(_) => break,
+ },
+ _ => break,
+ }
+
+ if let Some(comma) = args.get(1) {
+ match comma {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
+ _ => break,
+ }
+ }
+ }
+
+ indices.into_boxed_slice()
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct TypeAliasData {
+ pub name: Name,
+ pub type_ref: Option<Interned<TypeRef>>,
+ pub visibility: RawVisibility,
+ pub is_extern: bool,
+ /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl).
+ pub bounds: Vec<Interned<TypeBound>>,
+}
+
+impl TypeAliasData {
+ pub(crate) fn type_alias_data_query(
+ db: &dyn DefDatabase,
+ typ: TypeAliasId,
+ ) -> Arc<TypeAliasData> {
+ let loc = typ.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let typ = &item_tree[loc.id.value];
+ let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
+ db.trait_data(trait_id).visibility.clone()
+ } else {
+ item_tree[typ.visibility].clone()
+ };
+
+ Arc::new(TypeAliasData {
+ name: typ.name.clone(),
+ type_ref: typ.type_ref.clone(),
+ visibility,
+ is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
+ bounds: typ.bounds.to_vec(),
+ })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct TraitData {
+ pub name: Name,
+ pub items: Vec<(Name, AssocItemId)>,
+ pub is_auto: bool,
+ pub is_unsafe: bool,
+ pub visibility: RawVisibility,
+ /// Whether the trait has `#[rust_skip_array_during_method_dispatch]`. `hir_ty` will ignore
+ /// method calls to this trait's methods when the receiver is an array and the crate edition is
+ /// 2015 or 2018.
+ pub skip_array_during_method_dispatch: bool,
+ // box it as the vec is usually empty anyways
+ pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
+}
+
+impl TraitData {
+ pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitData> {
+ let tr_loc @ ItemLoc { container: module_id, id: tree_id } = tr.lookup(db);
+ let item_tree = tree_id.item_tree(db);
+ let tr_def = &item_tree[tree_id.value];
+ let _cx = stdx::panic_context::enter(format!(
+ "trait_data_query({:?} -> {:?} -> {:?})",
+ tr, tr_loc, tr_def
+ ));
+ let name = tr_def.name.clone();
+ let is_auto = tr_def.is_auto;
+ let is_unsafe = tr_def.is_unsafe;
+ let visibility = item_tree[tr_def.visibility].clone();
+ let skip_array_during_method_dispatch = item_tree
+ .attrs(db, module_id.krate(), ModItem::from(tree_id.value).into())
+ .by_key("rustc_skip_array_during_method_dispatch")
+ .exists();
+
+ let mut collector =
+ AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr));
+ collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items);
+ let (items, attribute_calls) = collector.finish();
+
+ Arc::new(TraitData {
+ name,
+ attribute_calls,
+ items,
+ is_auto,
+ is_unsafe,
+ visibility,
+ skip_array_during_method_dispatch,
+ })
+ }
+
+ pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
+ self.items.iter().filter_map(|(_name, item)| match item {
+ AssocItemId::TypeAliasId(t) => Some(*t),
+ _ => None,
+ })
+ }
+
+ pub fn associated_type_by_name(&self, name: &Name) -> Option<TypeAliasId> {
+ self.items.iter().find_map(|(item_name, item)| match item {
+ AssocItemId::TypeAliasId(t) if item_name == name => Some(*t),
+ _ => None,
+ })
+ }
+
+ pub fn method_by_name(&self, name: &Name) -> Option<FunctionId> {
+ self.items.iter().find_map(|(item_name, item)| match item {
+ AssocItemId::FunctionId(t) if item_name == name => Some(*t),
+ _ => None,
+ })
+ }
+
+ pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+ self.attribute_calls.iter().flat_map(|it| it.iter()).copied()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ImplData {
+ pub target_trait: Option<Interned<TraitRef>>,
+ pub self_ty: Interned<TypeRef>,
+ pub items: Vec<AssocItemId>,
+ pub is_negative: bool,
+ // box it as the vec is usually empty anyways
+ pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
+}
+
+impl ImplData {
+ pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplData> {
+ let _p = profile::span("impl_data_query");
+ let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
+
+ let item_tree = tree_id.item_tree(db);
+ let impl_def = &item_tree[tree_id.value];
+ let target_trait = impl_def.target_trait.clone();
+ let self_ty = impl_def.self_ty.clone();
+ let is_negative = impl_def.is_negative;
+
+ let mut collector =
+ AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id));
+ collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items);
+
+ let (items, attribute_calls) = collector.finish();
+ let items = items.into_iter().map(|(_, item)| item).collect();
+
+ Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls })
+ }
+
+ pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+ self.attribute_calls.iter().flat_map(|it| it.iter()).copied()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Macro2Data {
+ pub name: Name,
+ pub visibility: RawVisibility,
+}
+
+impl Macro2Data {
+ pub(crate) fn macro2_data_query(db: &dyn DefDatabase, makro: Macro2Id) -> Arc<Macro2Data> {
+ let loc = makro.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+
+ Arc::new(Macro2Data {
+ name: makro.name.clone(),
+ visibility: item_tree[makro.visibility].clone(),
+ })
+ }
+}
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct MacroRulesData {
+ pub name: Name,
+ pub macro_export: bool,
+}
+
+impl MacroRulesData {
+ pub(crate) fn macro_rules_data_query(
+ db: &dyn DefDatabase,
+ makro: MacroRulesId,
+ ) -> Arc<MacroRulesData> {
+ let loc = makro.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+
+ let macro_export = item_tree
+ .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
+ .by_key("macro_export")
+ .exists();
+
+ Arc::new(MacroRulesData { name: makro.name.clone(), macro_export })
+ }
+}
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ProcMacroData {
+ pub name: Name,
+ /// Derive helpers, if this is a derive
+ pub helpers: Option<Box<[Name]>>,
+}
+
+impl ProcMacroData {
+ pub(crate) fn proc_macro_data_query(
+ db: &dyn DefDatabase,
+ makro: ProcMacroId,
+ ) -> Arc<ProcMacroData> {
+ let loc = makro.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+
+ let (name, helpers) = if let Some(def) = item_tree
+ .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
+ .parse_proc_macro_decl(&makro.name)
+ {
+ (
+ def.name,
+ match def.kind {
+ ProcMacroKind::CustomDerive { helpers } => Some(helpers),
+ ProcMacroKind::FnLike | ProcMacroKind::Attr => None,
+ },
+ )
+ } else {
+ // eeeh...
+ stdx::never!("proc macro declaration is not a proc macro");
+ (makro.name.clone(), None)
+ };
+ Arc::new(ProcMacroData { name, helpers })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ConstData {
+ /// `None` for `const _: () = ();`
+ pub name: Option<Name>,
+ pub type_ref: Interned<TypeRef>,
+ pub visibility: RawVisibility,
+}
+
+impl ConstData {
+ pub(crate) fn const_data_query(db: &dyn DefDatabase, konst: ConstId) -> Arc<ConstData> {
+ let loc = konst.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let konst = &item_tree[loc.id.value];
+ let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
+ db.trait_data(trait_id).visibility.clone()
+ } else {
+ item_tree[konst.visibility].clone()
+ };
+
+ Arc::new(ConstData {
+ name: konst.name.clone(),
+ type_ref: konst.type_ref.clone(),
+ visibility,
+ })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct StaticData {
+ pub name: Name,
+ pub type_ref: Interned<TypeRef>,
+ pub visibility: RawVisibility,
+ pub mutable: bool,
+ pub is_extern: bool,
+}
+
+impl StaticData {
+ pub(crate) fn static_data_query(db: &dyn DefDatabase, konst: StaticId) -> Arc<StaticData> {
+ let loc = konst.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let statik = &item_tree[loc.id.value];
+
+ Arc::new(StaticData {
+ name: statik.name.clone(),
+ type_ref: statik.type_ref.clone(),
+ visibility: item_tree[statik.visibility].clone(),
+ mutable: statik.mutable,
+ is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
+ })
+ }
+}
+
+struct AssocItemCollector<'a> {
+ db: &'a dyn DefDatabase,
+ module_id: ModuleId,
+ def_map: Arc<DefMap>,
+ container: ItemContainerId,
+ expander: Expander,
+
+ items: Vec<(Name, AssocItemId)>,
+ attr_calls: Vec<(AstId<ast::Item>, MacroCallId)>,
+}
+
+impl<'a> AssocItemCollector<'a> {
+ fn new(
+ db: &'a dyn DefDatabase,
+ module_id: ModuleId,
+ file_id: HirFileId,
+ container: ItemContainerId,
+ ) -> Self {
+ Self {
+ db,
+ module_id,
+ def_map: module_id.def_map(db),
+ container,
+ expander: Expander::new(db, file_id, module_id),
+ items: Vec::new(),
+ attr_calls: Vec::new(),
+ }
+ }
+
+ fn finish(
+ self,
+ ) -> (Vec<(Name, AssocItemId)>, Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>) {
+ (
+ self.items,
+ if self.attr_calls.is_empty() { None } else { Some(Box::new(self.attr_calls)) },
+ )
+ }
+
+ // FIXME: proc-macro diagnostics
+ fn collect(&mut self, item_tree: &ItemTree, tree_id: TreeId, assoc_items: &[AssocItem]) {
+ let container = self.container;
+ self.items.reserve(assoc_items.len());
+
+ 'items: for &item in assoc_items {
+ let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into());
+ if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
+ continue;
+ }
+
+ 'attrs: for attr in &*attrs {
+ let ast_id =
+ AstId::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast());
+ let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id };
+
+ if let Ok(ResolvedAttr::Macro(call_id)) = self.def_map.resolve_attr_macro(
+ self.db,
+ self.module_id.local_id,
+ ast_id_with_path,
+ attr,
+ ) {
+ self.attr_calls.push((ast_id, call_id));
+ // If proc attribute macro expansion is disabled, skip expanding it here
+ if !self.db.enable_proc_attr_macros() {
+ continue 'attrs;
+ }
+ let loc = self.db.lookup_intern_macro_call(call_id);
+ if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind {
+ // If there's no expander for the proc macro (e.g. the
+ // proc macro is ignored, or building the proc macro
+ // crate failed), skip expansion like we would if it was
+ // disabled. This is analogous to the handling in
+ // `DefCollector::collect_macros`.
+ if exp.is_dummy() {
+ continue 'attrs;
+ }
+ }
+ match self.expander.enter_expand_id::<ast::MacroItems>(self.db, call_id) {
+ ExpandResult { value: Some((mark, _)), .. } => {
+ self.collect_macro_items(mark);
+ continue 'items;
+ }
+ ExpandResult { .. } => {}
+ }
+ }
+ }
+
+ match item {
+ AssocItem::Function(id) => {
+ let item = &item_tree[id];
+
+ let def =
+ FunctionLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db);
+ self.items.push((item.name.clone(), def.into()));
+ }
+ AssocItem::Const(id) => {
+ let item = &item_tree[id];
+
+ let name = match item.name.clone() {
+ Some(name) => name,
+ None => continue,
+ };
+ let def =
+ ConstLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db);
+ self.items.push((name, def.into()));
+ }
+ AssocItem::TypeAlias(id) => {
+ let item = &item_tree[id];
+
+ let def = TypeAliasLoc { container, id: ItemTreeId::new(tree_id, id) }
+ .intern(self.db);
+ self.items.push((item.name.clone(), def.into()));
+ }
+ AssocItem::MacroCall(call) => {
+ if let Some(root) = self.db.parse_or_expand(self.expander.current_file_id()) {
+ let call = &item_tree[call];
+
+ let ast_id_map = self.db.ast_id_map(self.expander.current_file_id());
+ let call = ast_id_map.get(call.ast_id).to_node(&root);
+ let _cx = stdx::panic_context::enter(format!(
+ "collect_items MacroCall: {}",
+ call
+ ));
+ let res = self.expander.enter_expand::<ast::MacroItems>(self.db, call);
+
+ if let Ok(ExpandResult { value: Some((mark, _)), .. }) = res {
+ self.collect_macro_items(mark);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fn collect_macro_items(&mut self, mark: Mark) {
+ let tree_id = item_tree::TreeId::new(self.expander.current_file_id(), None);
+ let item_tree = tree_id.item_tree(self.db);
+ let iter: SmallVec<[_; 2]> =
+ item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item).collect();
+
+ self.collect(&item_tree, tree_id, &iter);
+
+ self.expander.exit(self.db, mark);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
new file mode 100644
index 000000000..df6dcb024
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
@@ -0,0 +1,243 @@
+//! Defines database & queries for name resolution.
+use std::sync::Arc;
+
+use base_db::{salsa, CrateId, SourceDatabase, Upcast};
+use either::Either;
+use hir_expand::{db::AstDatabase, HirFileId};
+use la_arena::ArenaMap;
+use syntax::{ast, AstPtr, SmolStr};
+
+use crate::{
+ adt::{EnumData, StructData},
+ attr::{Attrs, AttrsWithOwner},
+ body::{scope::ExprScopes, Body, BodySourceMap},
+ data::{
+ ConstData, FunctionData, ImplData, Macro2Data, MacroRulesData, ProcMacroData, StaticData,
+ TraitData, TypeAliasData,
+ },
+ generics::GenericParams,
+ import_map::ImportMap,
+ intern::Interned,
+ item_tree::{AttrOwner, ItemTree},
+ lang_item::{LangItemTarget, LangItems},
+ nameres::DefMap,
+ visibility::{self, Visibility},
+ AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId,
+ ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalEnumVariantId,
+ LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc,
+ StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc,
+ UnionId, UnionLoc, VariantId,
+};
+
+#[salsa::query_group(InternDatabaseStorage)]
+pub trait InternDatabase: SourceDatabase {
+ #[salsa::interned]
+ fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
+ #[salsa::interned]
+ fn intern_struct(&self, loc: StructLoc) -> StructId;
+ #[salsa::interned]
+ fn intern_union(&self, loc: UnionLoc) -> UnionId;
+ #[salsa::interned]
+ fn intern_enum(&self, loc: EnumLoc) -> EnumId;
+ #[salsa::interned]
+ fn intern_const(&self, loc: ConstLoc) -> ConstId;
+ #[salsa::interned]
+ fn intern_static(&self, loc: StaticLoc) -> StaticId;
+ #[salsa::interned]
+ fn intern_trait(&self, loc: TraitLoc) -> TraitId;
+ #[salsa::interned]
+ fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId;
+ #[salsa::interned]
+ fn intern_impl(&self, loc: ImplLoc) -> ImplId;
+ #[salsa::interned]
+ fn intern_extern_block(&self, loc: ExternBlockLoc) -> ExternBlockId;
+ #[salsa::interned]
+ fn intern_block(&self, loc: BlockLoc) -> BlockId;
+ #[salsa::interned]
+ fn intern_macro2(&self, loc: Macro2Loc) -> Macro2Id;
+ #[salsa::interned]
+ fn intern_proc_macro(&self, loc: ProcMacroLoc) -> ProcMacroId;
+ #[salsa::interned]
+ fn intern_macro_rules(&self, loc: MacroRulesLoc) -> MacroRulesId;
+}
+
+#[salsa::query_group(DefDatabaseStorage)]
+pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
+ #[salsa::input]
+ fn enable_proc_attr_macros(&self) -> bool;
+
+ #[salsa::invoke(ItemTree::file_item_tree_query)]
+ fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
+
+ #[salsa::invoke(crate_def_map_wait)]
+ #[salsa::transparent]
+ fn crate_def_map(&self, krate: CrateId) -> Arc<DefMap>;
+
+ #[salsa::invoke(DefMap::crate_def_map_query)]
+ fn crate_def_map_query(&self, krate: CrateId) -> Arc<DefMap>;
+
+ /// Computes the block-level `DefMap`, returning `None` when `block` doesn't contain any inner
+ /// items directly.
+ ///
+ /// For example:
+ ///
+ /// ```
+ /// fn f() { // (0)
+ /// { // (1)
+ /// fn inner() {}
+ /// }
+ /// }
+ /// ```
+ ///
+ /// The `block_def_map` for block 0 would return `None`, while `block_def_map` of block 1 would
+ /// return a `DefMap` containing `inner`.
+ #[salsa::invoke(DefMap::block_def_map_query)]
+ fn block_def_map(&self, block: BlockId) -> Option<Arc<DefMap>>;
+
+ #[salsa::invoke(StructData::struct_data_query)]
+ fn struct_data(&self, id: StructId) -> Arc<StructData>;
+
+ #[salsa::invoke(StructData::union_data_query)]
+ fn union_data(&self, id: UnionId) -> Arc<StructData>;
+
+ #[salsa::invoke(EnumData::enum_data_query)]
+ fn enum_data(&self, e: EnumId) -> Arc<EnumData>;
+
+ #[salsa::invoke(ImplData::impl_data_query)]
+ fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
+
+ #[salsa::invoke(TraitData::trait_data_query)]
+ fn trait_data(&self, e: TraitId) -> Arc<TraitData>;
+
+ #[salsa::invoke(TypeAliasData::type_alias_data_query)]
+ fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;
+
+ #[salsa::invoke(FunctionData::fn_data_query)]
+ fn function_data(&self, func: FunctionId) -> Arc<FunctionData>;
+
+ #[salsa::invoke(ConstData::const_data_query)]
+ fn const_data(&self, konst: ConstId) -> Arc<ConstData>;
+
+ #[salsa::invoke(StaticData::static_data_query)]
+ fn static_data(&self, konst: StaticId) -> Arc<StaticData>;
+
+ #[salsa::invoke(Macro2Data::macro2_data_query)]
+ fn macro2_data(&self, makro: Macro2Id) -> Arc<Macro2Data>;
+
+ #[salsa::invoke(MacroRulesData::macro_rules_data_query)]
+ fn macro_rules_data(&self, makro: MacroRulesId) -> Arc<MacroRulesData>;
+
+ #[salsa::invoke(ProcMacroData::proc_macro_data_query)]
+ fn proc_macro_data(&self, makro: ProcMacroId) -> Arc<ProcMacroData>;
+
+ #[salsa::invoke(Body::body_with_source_map_query)]
+ fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>);
+
+ #[salsa::invoke(Body::body_query)]
+ fn body(&self, def: DefWithBodyId) -> Arc<Body>;
+
+ #[salsa::invoke(ExprScopes::expr_scopes_query)]
+ fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>;
+
+ #[salsa::invoke(GenericParams::generic_params_query)]
+ fn generic_params(&self, def: GenericDefId) -> Interned<GenericParams>;
+
+ #[salsa::invoke(Attrs::variants_attrs_query)]
+ fn variants_attrs(&self, def: EnumId) -> Arc<ArenaMap<LocalEnumVariantId, Attrs>>;
+
+ #[salsa::invoke(Attrs::fields_attrs_query)]
+ fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
+
+ #[salsa::invoke(crate::attr::variants_attrs_source_map)]
+ fn variants_attrs_source_map(
+ &self,
+ def: EnumId,
+ ) -> Arc<ArenaMap<LocalEnumVariantId, AstPtr<ast::Variant>>>;
+
+ #[salsa::invoke(crate::attr::fields_attrs_source_map)]
+ fn fields_attrs_source_map(
+ &self,
+ def: VariantId,
+ ) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>>;
+
+ #[salsa::invoke(AttrsWithOwner::attrs_query)]
+ fn attrs(&self, def: AttrDefId) -> AttrsWithOwner;
+
+ #[salsa::invoke(LangItems::crate_lang_items_query)]
+ fn crate_lang_items(&self, krate: CrateId) -> Arc<LangItems>;
+
+ #[salsa::invoke(LangItems::lang_item_query)]
+ fn lang_item(&self, start_crate: CrateId, item: SmolStr) -> Option<LangItemTarget>;
+
+ #[salsa::invoke(ImportMap::import_map_query)]
+ fn import_map(&self, krate: CrateId) -> Arc<ImportMap>;
+
+ #[salsa::invoke(visibility::field_visibilities_query)]
+ fn field_visibilities(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Visibility>>;
+
+ // FIXME: unify function_visibility and const_visibility?
+ #[salsa::invoke(visibility::function_visibility_query)]
+ fn function_visibility(&self, def: FunctionId) -> Visibility;
+
+ #[salsa::invoke(visibility::const_visibility_query)]
+ fn const_visibility(&self, def: ConstId) -> Visibility;
+
+ #[salsa::transparent]
+ fn crate_limits(&self, crate_id: CrateId) -> CrateLimits;
+
+ fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
+}
+
+fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
+ let _p = profile::span("crate_def_map:wait");
+ db.crate_def_map_query(krate)
+}
+
+pub struct CrateLimits {
+ /// The maximum depth for potentially infinitely-recursive compile-time operations like macro expansion or auto-dereference.
+ pub recursion_limit: u32,
+}
+
+fn crate_limits(db: &dyn DefDatabase, crate_id: CrateId) -> CrateLimits {
+ let def_map = db.crate_def_map(crate_id);
+
+ CrateLimits {
+ // 128 is the default in rustc.
+ recursion_limit: def_map.recursion_limit().unwrap_or(128),
+ }
+}
+
+fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
+ let file = db.crate_graph()[crate_id].root_file_id;
+ let item_tree = db.file_item_tree(file.into());
+ let attrs = item_tree.raw_attrs(AttrOwner::TopLevel);
+ for attr in &**attrs {
+ match attr.path().as_ident().and_then(|id| id.as_text()) {
+ Some(ident) if ident == "no_std" => return true,
+ Some(ident) if ident == "cfg_attr" => {}
+ _ => continue,
+ }
+
+ // This is a `cfg_attr`; check if it could possibly expand to `no_std`.
+ // Syntax is: `#[cfg_attr(condition(cfg, style), attr0, attr1, <...>)]`
+ let tt = match attr.token_tree_value() {
+ Some(tt) => &tt.token_trees,
+ None => continue,
+ };
+
+ let segments = tt.split(|tt| match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => true,
+ _ => false,
+ });
+ for output in segments.skip(1) {
+ match output {
+ [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "no_std" => {
+ return true
+ }
+ _ => {}
+ }
+ }
+ }
+
+ false
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
new file mode 100644
index 000000000..166aa04da
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
@@ -0,0 +1,116 @@
+//! This module defines a `DynMap` -- a container for heterogeneous maps.
+//!
+//! This means that `DynMap` stores a bunch of hash maps inside, and those maps
+//! can be of different types.
+//!
+//! It is used like this:
+//!
+//! ```
+//! // keys define submaps of a `DynMap`
+//! const STRING_TO_U32: Key<String, u32> = Key::new();
+//! const U32_TO_VEC: Key<u32, Vec<bool>> = Key::new();
+//!
+//! // Note: concrete type, no type params!
+//! let mut map = DynMap::new();
+//!
+//! // To access a specific map, index the `DynMap` by `Key`:
+//! map[STRING_TO_U32].insert("hello".to_string(), 92);
+//! let value = map[U32_TO_VEC].get(92);
+//! assert!(value.is_none());
+//! ```
+//!
+//! This is a work of fiction. Any similarities to Kotlin's `BindingContext` are
+//! a coincidence.
+use std::{
+ hash::Hash,
+ marker::PhantomData,
+ ops::{Index, IndexMut},
+};
+
+use anymap::Map;
+use rustc_hash::FxHashMap;
+
+pub struct Key<K, V, P = (K, V)> {
+ _phantom: PhantomData<(K, V, P)>,
+}
+
+impl<K, V, P> Key<K, V, P> {
+ pub(crate) const fn new() -> Key<K, V, P> {
+ Key { _phantom: PhantomData }
+ }
+}
+
+impl<K, V, P> Copy for Key<K, V, P> {}
+
+impl<K, V, P> Clone for Key<K, V, P> {
+ fn clone(&self) -> Key<K, V, P> {
+ *self
+ }
+}
+
+pub trait Policy {
+ type K;
+ type V;
+
+ fn insert(map: &mut DynMap, key: Self::K, value: Self::V);
+ fn get<'a>(map: &'a DynMap, key: &Self::K) -> Option<&'a Self::V>;
+ fn is_empty(map: &DynMap) -> bool;
+}
+
+impl<K: Hash + Eq + 'static, V: 'static> Policy for (K, V) {
+ type K = K;
+ type V = V;
+ fn insert(map: &mut DynMap, key: K, value: V) {
+ map.map.entry::<FxHashMap<K, V>>().or_insert_with(Default::default).insert(key, value);
+ }
+ fn get<'a>(map: &'a DynMap, key: &K) -> Option<&'a V> {
+ map.map.get::<FxHashMap<K, V>>()?.get(key)
+ }
+ fn is_empty(map: &DynMap) -> bool {
+ map.map.get::<FxHashMap<K, V>>().map_or(true, |it| it.is_empty())
+ }
+}
+
+pub struct DynMap {
+ pub(crate) map: Map,
+}
+
+impl Default for DynMap {
+ fn default() -> Self {
+ DynMap { map: Map::new() }
+ }
+}
+
+#[repr(transparent)]
+pub struct KeyMap<KEY> {
+ map: DynMap,
+ _phantom: PhantomData<KEY>,
+}
+
+impl<P: Policy> KeyMap<Key<P::K, P::V, P>> {
+ pub fn insert(&mut self, key: P::K, value: P::V) {
+ P::insert(&mut self.map, key, value)
+ }
+ pub fn get(&self, key: &P::K) -> Option<&P::V> {
+ P::get(&self.map, key)
+ }
+
+ pub fn is_empty(&self) -> bool {
+ P::is_empty(&self.map)
+ }
+}
+
+impl<P: Policy> Index<Key<P::K, P::V, P>> for DynMap {
+ type Output = KeyMap<Key<P::K, P::V, P>>;
+ fn index(&self, _key: Key<P::K, P::V, P>) -> &Self::Output {
+ // Safe due to `#[repr(transparent)]`.
+ unsafe { std::mem::transmute::<&DynMap, &KeyMap<Key<P::K, P::V, P>>>(self) }
+ }
+}
+
+impl<P: Policy> IndexMut<Key<P::K, P::V, P>> for DynMap {
+ fn index_mut(&mut self, _key: Key<P::K, P::V, P>) -> &mut Self::Output {
+ // Safe due to `#[repr(transparent)]`.
+ unsafe { std::mem::transmute::<&mut DynMap, &mut KeyMap<Key<P::K, P::V, P>>>(self) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr.rs
new file mode 100644
index 000000000..c1b3788ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr.rs
@@ -0,0 +1,444 @@
+//! This module describes hir-level representation of expressions.
+//!
+//! This representation is:
+//!
+//! 1. Identity-based. Each expression has an `id`, so we can distinguish
+//! between different `1` in `1 + 1`.
+//! 2. Independent of syntax. Though syntactic provenance information can be
+//! attached separately via id-based side map.
+//! 3. Unresolved. Paths are stored as sequences of names, and not as defs the
+//! names refer to.
+//! 4. Desugared. There's no `if let`.
+//!
+//! See also a neighboring `body` module.
+
+use hir_expand::name::Name;
+use la_arena::{Idx, RawIdx};
+
+use crate::{
+ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
+ intern::Interned,
+ path::{GenericArgs, Path},
+ type_ref::{Mutability, Rawness, TypeRef},
+ BlockId,
+};
+
+pub use syntax::ast::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp};
+
+pub type ExprId = Idx<Expr>;
+
+/// FIXME: this is a hacky function which should be removed
+pub(crate) fn dummy_expr_id() -> ExprId {
+ ExprId::from_raw(RawIdx::from(u32::MAX))
+}
+
+pub type PatId = Idx<Pat>;
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Label {
+ pub name: Name,
+}
+pub type LabelId = Idx<Label>;
+
+// We convert float values into bits and that's how we don't need to deal with f32 and f64.
+// For PartialEq, bits comparison should work, as ordering is not important
+// https://github.com/rust-lang/rust-analyzer/issues/12380#issuecomment-1137284360
+#[derive(Default, Debug, Clone, Eq, PartialEq)]
+pub struct FloatTypeWrapper(u64);
+
+impl FloatTypeWrapper {
+ pub fn new(value: f64) -> Self {
+ Self(value.to_bits())
+ }
+}
+
+impl std::fmt::Display for FloatTypeWrapper {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{:?}", f64::from_bits(self.0))
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Literal {
+ String(Box<str>),
+ ByteString(Box<[u8]>),
+ Char(char),
+ Bool(bool),
+ Int(i128, Option<BuiltinInt>),
+ Uint(u128, Option<BuiltinUint>),
+ // Here we are using a wrapper around float because f32 and f64 do not implement Eq, so they
+ // could not be used directly here, to understand how the wrapper works go to definition of
+ // FloatTypeWrapper
+ Float(FloatTypeWrapper, Option<BuiltinFloat>),
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Expr {
+ /// This is produced if the syntax tree does not have a required expression piece.
+ Missing,
+ Path(Path),
+ If {
+ condition: ExprId,
+ then_branch: ExprId,
+ else_branch: Option<ExprId>,
+ },
+ Let {
+ pat: PatId,
+ expr: ExprId,
+ },
+ Block {
+ id: BlockId,
+ statements: Box<[Statement]>,
+ tail: Option<ExprId>,
+ label: Option<LabelId>,
+ },
+ Loop {
+ body: ExprId,
+ label: Option<LabelId>,
+ },
+ While {
+ condition: ExprId,
+ body: ExprId,
+ label: Option<LabelId>,
+ },
+ For {
+ iterable: ExprId,
+ pat: PatId,
+ body: ExprId,
+ label: Option<LabelId>,
+ },
+ Call {
+ callee: ExprId,
+ args: Box<[ExprId]>,
+ is_assignee_expr: bool,
+ },
+ MethodCall {
+ receiver: ExprId,
+ method_name: Name,
+ args: Box<[ExprId]>,
+ generic_args: Option<Box<GenericArgs>>,
+ },
+ Match {
+ expr: ExprId,
+ arms: Box<[MatchArm]>,
+ },
+ Continue {
+ label: Option<Name>,
+ },
+ Break {
+ expr: Option<ExprId>,
+ label: Option<Name>,
+ },
+ Return {
+ expr: Option<ExprId>,
+ },
+ Yield {
+ expr: Option<ExprId>,
+ },
+ RecordLit {
+ path: Option<Box<Path>>,
+ fields: Box<[RecordLitField]>,
+ spread: Option<ExprId>,
+ ellipsis: bool,
+ is_assignee_expr: bool,
+ },
+ Field {
+ expr: ExprId,
+ name: Name,
+ },
+ Await {
+ expr: ExprId,
+ },
+ Try {
+ expr: ExprId,
+ },
+ TryBlock {
+ body: ExprId,
+ },
+ Async {
+ body: ExprId,
+ },
+ Const {
+ body: ExprId,
+ },
+ Cast {
+ expr: ExprId,
+ type_ref: Interned<TypeRef>,
+ },
+ Ref {
+ expr: ExprId,
+ rawness: Rawness,
+ mutability: Mutability,
+ },
+ Box {
+ expr: ExprId,
+ },
+ UnaryOp {
+ expr: ExprId,
+ op: UnaryOp,
+ },
+ BinaryOp {
+ lhs: ExprId,
+ rhs: ExprId,
+ op: Option<BinaryOp>,
+ },
+ Range {
+ lhs: Option<ExprId>,
+ rhs: Option<ExprId>,
+ range_type: RangeOp,
+ },
+ Index {
+ base: ExprId,
+ index: ExprId,
+ },
+ Closure {
+ args: Box<[PatId]>,
+ arg_types: Box<[Option<Interned<TypeRef>>]>,
+ ret_type: Option<Interned<TypeRef>>,
+ body: ExprId,
+ },
+ Tuple {
+ exprs: Box<[ExprId]>,
+ is_assignee_expr: bool,
+ },
+ Unsafe {
+ body: ExprId,
+ },
+ MacroStmts {
+ statements: Box<[Statement]>,
+ tail: Option<ExprId>,
+ },
+ Array(Array),
+ Literal(Literal),
+ Underscore,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Array {
+ ElementList { elements: Box<[ExprId]>, is_assignee_expr: bool },
+ Repeat { initializer: ExprId, repeat: ExprId },
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MatchArm {
+ pub pat: PatId,
+ pub guard: Option<ExprId>,
+ pub expr: ExprId,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct RecordLitField {
+ pub name: Name,
+ pub expr: ExprId,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Statement {
+ Let {
+ pat: PatId,
+ type_ref: Option<Interned<TypeRef>>,
+ initializer: Option<ExprId>,
+ else_branch: Option<ExprId>,
+ },
+ Expr {
+ expr: ExprId,
+ has_semi: bool,
+ },
+}
+
+impl Expr {
+ pub fn walk_child_exprs(&self, mut f: impl FnMut(ExprId)) {
+ match self {
+ Expr::Missing => {}
+ Expr::Path(_) => {}
+ Expr::If { condition, then_branch, else_branch } => {
+ f(*condition);
+ f(*then_branch);
+ if let &Some(else_branch) = else_branch {
+ f(else_branch);
+ }
+ }
+ Expr::Let { expr, .. } => {
+ f(*expr);
+ }
+ Expr::MacroStmts { tail, statements } | Expr::Block { statements, tail, .. } => {
+ for stmt in statements.iter() {
+ match stmt {
+ Statement::Let { initializer, .. } => {
+ if let &Some(expr) = initializer {
+ f(expr);
+ }
+ }
+ Statement::Expr { expr: expression, .. } => f(*expression),
+ }
+ }
+ if let &Some(expr) = tail {
+ f(expr);
+ }
+ }
+ Expr::TryBlock { body }
+ | Expr::Unsafe { body }
+ | Expr::Async { body }
+ | Expr::Const { body } => f(*body),
+ Expr::Loop { body, .. } => f(*body),
+ Expr::While { condition, body, .. } => {
+ f(*condition);
+ f(*body);
+ }
+ Expr::For { iterable, body, .. } => {
+ f(*iterable);
+ f(*body);
+ }
+ Expr::Call { callee, args, .. } => {
+ f(*callee);
+ args.iter().copied().for_each(f);
+ }
+ Expr::MethodCall { receiver, args, .. } => {
+ f(*receiver);
+ args.iter().copied().for_each(f);
+ }
+ Expr::Match { expr, arms } => {
+ f(*expr);
+ arms.iter().map(|arm| arm.expr).for_each(f);
+ }
+ Expr::Continue { .. } => {}
+ Expr::Break { expr, .. } | Expr::Return { expr } | Expr::Yield { expr } => {
+ if let &Some(expr) = expr {
+ f(expr);
+ }
+ }
+ Expr::RecordLit { fields, spread, .. } => {
+ for field in fields.iter() {
+ f(field.expr);
+ }
+ if let &Some(expr) = spread {
+ f(expr);
+ }
+ }
+ Expr::Closure { body, .. } => {
+ f(*body);
+ }
+ Expr::BinaryOp { lhs, rhs, .. } => {
+ f(*lhs);
+ f(*rhs);
+ }
+ Expr::Range { lhs, rhs, .. } => {
+ if let &Some(lhs) = rhs {
+ f(lhs);
+ }
+ if let &Some(rhs) = lhs {
+ f(rhs);
+ }
+ }
+ Expr::Index { base, index } => {
+ f(*base);
+ f(*index);
+ }
+ Expr::Field { expr, .. }
+ | Expr::Await { expr }
+ | Expr::Try { expr }
+ | Expr::Cast { expr, .. }
+ | Expr::Ref { expr, .. }
+ | Expr::UnaryOp { expr, .. }
+ | Expr::Box { expr } => {
+ f(*expr);
+ }
+ Expr::Tuple { exprs, .. } => exprs.iter().copied().for_each(f),
+ Expr::Array(a) => match a {
+ Array::ElementList { elements, .. } => elements.iter().copied().for_each(f),
+ Array::Repeat { initializer, repeat } => {
+ f(*initializer);
+ f(*repeat)
+ }
+ },
+ Expr::Literal(_) => {}
+ Expr::Underscore => {}
+ }
+ }
+}
+
+/// Explicit binding annotations given in the HIR for a binding. Note
+/// that this is not the final binding *mode* that we infer after type
+/// inference.
+#[derive(Clone, PartialEq, Eq, Debug, Copy)]
+pub enum BindingAnnotation {
+ /// No binding annotation given: this means that the final binding mode
+ /// will depend on whether we have skipped through a `&` reference
+ /// when matching. For example, the `x` in `Some(x)` will have binding
+ /// mode `None`; if you do `let Some(x) = &Some(22)`, it will
+ /// ultimately be inferred to be by-reference.
+ Unannotated,
+
+ /// Annotated with `mut x` -- could be either ref or not, similar to `None`.
+ Mutable,
+
+ /// Annotated as `ref`, like `ref x`
+ Ref,
+
+ /// Annotated as `ref mut x`.
+ RefMut,
+}
+
+impl BindingAnnotation {
+ pub fn new(is_mutable: bool, is_ref: bool) -> Self {
+ match (is_mutable, is_ref) {
+ (true, true) => BindingAnnotation::RefMut,
+ (false, true) => BindingAnnotation::Ref,
+ (true, false) => BindingAnnotation::Mutable,
+ (false, false) => BindingAnnotation::Unannotated,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct RecordFieldPat {
+ pub name: Name,
+ pub pat: PatId,
+}
+
+/// Close relative to rustc's hir::PatKind
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Pat {
+ Missing,
+ Wild,
+ Tuple { args: Box<[PatId]>, ellipsis: Option<usize> },
+ Or(Box<[PatId]>),
+ Record { path: Option<Box<Path>>, args: Box<[RecordFieldPat]>, ellipsis: bool },
+ Range { start: ExprId, end: ExprId },
+ Slice { prefix: Box<[PatId]>, slice: Option<PatId>, suffix: Box<[PatId]> },
+ Path(Box<Path>),
+ Lit(ExprId),
+ Bind { mode: BindingAnnotation, name: Name, subpat: Option<PatId> },
+ TupleStruct { path: Option<Box<Path>>, args: Box<[PatId]>, ellipsis: Option<usize> },
+ Ref { pat: PatId, mutability: Mutability },
+ Box { inner: PatId },
+ ConstBlock(ExprId),
+}
+
+impl Pat {
+ pub fn walk_child_pats(&self, mut f: impl FnMut(PatId)) {
+ match self {
+ Pat::Range { .. }
+ | Pat::Lit(..)
+ | Pat::Path(..)
+ | Pat::ConstBlock(..)
+ | Pat::Wild
+ | Pat::Missing => {}
+ Pat::Bind { subpat, .. } => {
+ subpat.iter().copied().for_each(f);
+ }
+ Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => {
+ args.iter().copied().for_each(f);
+ }
+ Pat::Ref { pat, .. } => f(*pat),
+ Pat::Slice { prefix, slice, suffix } => {
+ let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter());
+ total_iter.copied().for_each(f);
+ }
+ Pat::Record { args, .. } => {
+ args.iter().map(|f| f.pat).for_each(f);
+ }
+ Pat::Box { inner } => f(*inner),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
new file mode 100644
index 000000000..89e961f84
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
@@ -0,0 +1,1134 @@
+//! An algorithm to find a path to refer to a certain item.
+
+use std::iter;
+
+use hir_expand::name::{known, AsName, Name};
+use rustc_hash::FxHashSet;
+
+use crate::{
+ db::DefDatabase,
+ item_scope::ItemInNs,
+ nameres::DefMap,
+ path::{ModPath, PathKind},
+ visibility::Visibility,
+ ModuleDefId, ModuleId,
+};
+
+/// Find a path that can be used to refer to a certain item. This can depend on
+/// *from where* you're referring to the item, hence the `from` parameter.
+pub fn find_path(db: &dyn DefDatabase, item: ItemInNs, from: ModuleId) -> Option<ModPath> {
+ let _p = profile::span("find_path");
+ find_path_inner(db, item, from, None)
+}
+
+pub fn find_path_prefixed(
+ db: &dyn DefDatabase,
+ item: ItemInNs,
+ from: ModuleId,
+ prefix_kind: PrefixKind,
+) -> Option<ModPath> {
+ let _p = profile::span("find_path_prefixed");
+ find_path_inner(db, item, from, Some(prefix_kind))
+}
+
+const MAX_PATH_LEN: usize = 15;
+
+trait ModPathExt {
+ fn starts_with_std(&self) -> bool;
+ fn can_start_with_std(&self) -> bool;
+}
+
+impl ModPathExt for ModPath {
+ fn starts_with_std(&self) -> bool {
+ self.segments().first() == Some(&known::std)
+ }
+
+ // Can we replace the first segment with `std::` and still get a valid, identical path?
+ fn can_start_with_std(&self) -> bool {
+ let first_segment = self.segments().first();
+ first_segment == Some(&known::alloc) || first_segment == Some(&known::core)
+ }
+}
+
+fn check_self_super(def_map: &DefMap, item: ItemInNs, from: ModuleId) -> Option<ModPath> {
+ if item == ItemInNs::Types(from.into()) {
+ // - if the item is the module we're in, use `self`
+ Some(ModPath::from_segments(PathKind::Super(0), None))
+ } else if let Some(parent_id) = def_map[from.local_id].parent {
+ // - if the item is the parent module, use `super` (this is not used recursively, since `super::super` is ugly)
+ let parent_id = def_map.module_id(parent_id);
+ if item == ItemInNs::Types(ModuleDefId::ModuleId(parent_id)) {
+ Some(ModPath::from_segments(PathKind::Super(1), None))
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum PrefixKind {
+ /// Causes paths to always start with either `self`, `super`, `crate` or a crate-name.
+ /// This is the same as plain, just that paths will start with `self` iprepended f the path
+ /// starts with an identifier that is not a crate.
+ BySelf,
+ /// Causes paths to ignore imports in the local module.
+ Plain,
+ /// Causes paths to start with `crate` where applicable, effectively forcing paths to be absolute.
+ ByCrate,
+}
+
+impl PrefixKind {
+ #[inline]
+ fn prefix(self) -> PathKind {
+ match self {
+ PrefixKind::BySelf => PathKind::Super(0),
+ PrefixKind::Plain => PathKind::Plain,
+ PrefixKind::ByCrate => PathKind::Crate,
+ }
+ }
+
+ #[inline]
+ fn is_absolute(&self) -> bool {
+ self == &PrefixKind::ByCrate
+ }
+}
+/// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId
+fn find_path_inner(
+ db: &dyn DefDatabase,
+ item: ItemInNs,
+ from: ModuleId,
+ prefixed: Option<PrefixKind>,
+) -> Option<ModPath> {
+ // FIXME: Do fast path for std/core libs?
+
+ let mut visited_modules = FxHashSet::default();
+ let def_map = from.def_map(db);
+ find_path_inner_(db, &def_map, from, item, MAX_PATH_LEN, prefixed, &mut visited_modules)
+}
+
+fn find_path_inner_(
+ db: &dyn DefDatabase,
+ def_map: &DefMap,
+ from: ModuleId,
+ item: ItemInNs,
+ max_len: usize,
+ mut prefixed: Option<PrefixKind>,
+ visited_modules: &mut FxHashSet<ModuleId>,
+) -> Option<ModPath> {
+ if max_len == 0 {
+ return None;
+ }
+
+ // Base cases:
+
+ // - if the item is already in scope, return the name under which it is
+ let scope_name = def_map.with_ancestor_maps(db, from.local_id, &mut |def_map, local_id| {
+ def_map[local_id].scope.name_of(item).map(|(name, _)| name.clone())
+ });
+ if prefixed.is_none() {
+ if let Some(scope_name) = scope_name {
+ return Some(ModPath::from_segments(PathKind::Plain, Some(scope_name)));
+ }
+ }
+
+ // - if the item is a builtin, it's in scope
+ if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item {
+ return Some(ModPath::from_segments(PathKind::Plain, Some(builtin.as_name())));
+ }
+
+ // - if the item is the crate root, return `crate`
+ let crate_root = def_map.crate_root(db);
+ if item == ItemInNs::Types(ModuleDefId::ModuleId(crate_root)) {
+ return Some(ModPath::from_segments(PathKind::Crate, None));
+ }
+
+ if prefixed.filter(PrefixKind::is_absolute).is_none() {
+ if let modpath @ Some(_) = check_self_super(&def_map, item, from) {
+ return modpath;
+ }
+ }
+
+ // - if the item is the crate root of a dependency crate, return the name from the extern prelude
+ let root_def_map = crate_root.def_map(db);
+ if let ItemInNs::Types(ModuleDefId::ModuleId(item)) = item {
+ for (name, &def_id) in root_def_map.extern_prelude() {
+ if item == def_id {
+ let name = scope_name.unwrap_or_else(|| name.clone());
+
+ let name_already_occupied_in_type_ns = def_map
+ .with_ancestor_maps(db, from.local_id, &mut |def_map, local_id| {
+ def_map[local_id]
+ .scope
+ .type_(&name)
+ .filter(|&(id, _)| id != ModuleDefId::ModuleId(def_id))
+ })
+ .is_some();
+ let kind = if name_already_occupied_in_type_ns {
+ cov_mark::hit!(ambiguous_crate_start);
+ PathKind::Abs
+ } else {
+ PathKind::Plain
+ };
+ return Some(ModPath::from_segments(kind, Some(name)));
+ }
+ }
+ }
+
+ // - if the item is in the prelude, return the name from there
+ if let Some(prelude_module) = root_def_map.prelude() {
+ // Preludes in block DefMaps are ignored, only the crate DefMap is searched
+ let prelude_def_map = prelude_module.def_map(db);
+ let prelude_scope = &prelude_def_map[prelude_module.local_id].scope;
+ if let Some((name, vis)) = prelude_scope.name_of(item) {
+ if vis.is_visible_from(db, from) {
+ return Some(ModPath::from_segments(PathKind::Plain, Some(name.clone())));
+ }
+ }
+ }
+
+ // Recursive case:
+ // - if the item is an enum variant, refer to it via the enum
+ if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() {
+ if let Some(mut path) = find_path(db, ItemInNs::Types(variant.parent.into()), from) {
+ let data = db.enum_data(variant.parent);
+ path.push_segment(data.variants[variant.local_id].name.clone());
+ return Some(path);
+ }
+ // If this doesn't work, it seems we have no way of referring to the
+ // enum; that's very weird, but there might still be a reexport of the
+ // variant somewhere
+ }
+
+ // - otherwise, look for modules containing (reexporting) it and import it from one of those
+ let prefer_no_std = db.crate_supports_no_std(crate_root.krate);
+ let mut best_path = None;
+ let mut best_path_len = max_len;
+
+ if item.krate(db) == Some(from.krate) {
+ // Item was defined in the same crate that wants to import it. It cannot be found in any
+ // dependency in this case.
+ // FIXME: this should have a fast path that doesn't look through the prelude again?
+ for (module_id, name) in find_local_import_locations(db, item, from) {
+ if !visited_modules.insert(module_id) {
+ cov_mark::hit!(recursive_imports);
+ continue;
+ }
+ if let Some(mut path) = find_path_inner_(
+ db,
+ def_map,
+ from,
+ ItemInNs::Types(ModuleDefId::ModuleId(module_id)),
+ best_path_len - 1,
+ prefixed,
+ visited_modules,
+ ) {
+ path.push_segment(name);
+
+ let new_path = match best_path {
+ Some(best_path) => select_best_path(best_path, path, prefer_no_std),
+ None => path,
+ };
+ best_path_len = new_path.len();
+ best_path = Some(new_path);
+ }
+ }
+ } else {
+ // Item was defined in some upstream crate. This means that it must be exported from one,
+ // too (unless we can't name it at all). It could *also* be (re)exported by the same crate
+ // that wants to import it here, but we always prefer to use the external path here.
+
+ let crate_graph = db.crate_graph();
+ let extern_paths = crate_graph[from.krate].dependencies.iter().filter_map(|dep| {
+ let import_map = db.import_map(dep.crate_id);
+ import_map.import_info_for(item).and_then(|info| {
+ // Determine best path for containing module and append last segment from `info`.
+ // FIXME: we should guide this to look up the path locally, or from the same crate again?
+ let mut path = find_path_inner_(
+ db,
+ def_map,
+ from,
+ ItemInNs::Types(ModuleDefId::ModuleId(info.container)),
+ best_path_len - 1,
+ prefixed,
+ visited_modules,
+ )?;
+ cov_mark::hit!(partially_imported);
+ path.push_segment(info.path.segments.last()?.clone());
+ Some(path)
+ })
+ });
+
+ for path in extern_paths {
+ let new_path = match best_path {
+ Some(best_path) => select_best_path(best_path, path, prefer_no_std),
+ None => path,
+ };
+ best_path = Some(new_path);
+ }
+ }
+
+ // If the item is declared inside a block expression, don't use a prefix, as we don't handle
+ // that correctly (FIXME).
+ if let Some(item_module) = item.as_module_def_id().and_then(|did| did.module(db)) {
+ if item_module.def_map(db).block_id().is_some() && prefixed.is_some() {
+ cov_mark::hit!(prefixed_in_block_expression);
+ prefixed = Some(PrefixKind::Plain);
+ }
+ }
+
+ match prefixed.map(PrefixKind::prefix) {
+ Some(prefix) => best_path.or_else(|| {
+ scope_name.map(|scope_name| ModPath::from_segments(prefix, Some(scope_name)))
+ }),
+ None => best_path,
+ }
+}
+
+fn select_best_path(old_path: ModPath, new_path: ModPath, prefer_no_std: bool) -> ModPath {
+ if old_path.starts_with_std() && new_path.can_start_with_std() {
+ if prefer_no_std {
+ cov_mark::hit!(prefer_no_std_paths);
+ new_path
+ } else {
+ cov_mark::hit!(prefer_std_paths);
+ old_path
+ }
+ } else if new_path.starts_with_std() && old_path.can_start_with_std() {
+ if prefer_no_std {
+ cov_mark::hit!(prefer_no_std_paths);
+ old_path
+ } else {
+ cov_mark::hit!(prefer_std_paths);
+ new_path
+ }
+ } else if new_path.len() < old_path.len() {
+ new_path
+ } else {
+ old_path
+ }
+}
+
+/// Finds locations in `from.krate` from which `item` can be imported by `from`.
+fn find_local_import_locations(
+ db: &dyn DefDatabase,
+ item: ItemInNs,
+ from: ModuleId,
+) -> Vec<(ModuleId, Name)> {
+ let _p = profile::span("find_local_import_locations");
+
+ // `from` can import anything below `from` with visibility of at least `from`, and anything
+ // above `from` with any visibility. That means we do not need to descend into private siblings
+ // of `from` (and similar).
+
+ let def_map = from.def_map(db);
+
+ // Compute the initial worklist. We start with all direct child modules of `from` as well as all
+ // of its (recursive) parent modules.
+ let data = &def_map[from.local_id];
+ let mut worklist =
+ data.children.values().map(|child| def_map.module_id(*child)).collect::<Vec<_>>();
+ // FIXME: do we need to traverse out of block expressions here?
+ for ancestor in iter::successors(from.containing_module(db), |m| m.containing_module(db)) {
+ worklist.push(ancestor);
+ }
+
+ let def_map = def_map.crate_root(db).def_map(db);
+
+ let mut seen: FxHashSet<_> = FxHashSet::default();
+
+ let mut locations = Vec::new();
+ while let Some(module) = worklist.pop() {
+ if !seen.insert(module) {
+ continue; // already processed this module
+ }
+
+ let ext_def_map;
+ let data = if module.krate == from.krate {
+ if module.block.is_some() {
+ // Re-query the block's DefMap
+ ext_def_map = module.def_map(db);
+ &ext_def_map[module.local_id]
+ } else {
+ // Reuse the root DefMap
+ &def_map[module.local_id]
+ }
+ } else {
+ // The crate might reexport a module defined in another crate.
+ ext_def_map = module.def_map(db);
+ &ext_def_map[module.local_id]
+ };
+
+ if let Some((name, vis)) = data.scope.name_of(item) {
+ if vis.is_visible_from(db, from) {
+ let is_private = match vis {
+ Visibility::Module(private_to) => private_to.local_id == module.local_id,
+ Visibility::Public => false,
+ };
+ let is_original_def = match item.as_module_def_id() {
+ Some(module_def_id) => data.scope.declarations().any(|it| it == module_def_id),
+ None => false,
+ };
+
+ // Ignore private imports. these could be used if we are
+ // in a submodule of this module, but that's usually not
+ // what the user wants; and if this module can import
+ // the item and we're a submodule of it, so can we.
+ // Also this keeps the cached data smaller.
+ if !is_private || is_original_def {
+ locations.push((module, name.clone()));
+ }
+ }
+ }
+
+ // Descend into all modules visible from `from`.
+ for (ty, vis) in data.scope.types() {
+ if let ModuleDefId::ModuleId(module) = ty {
+ if vis.is_visible_from(db, from) {
+ worklist.push(module);
+ }
+ }
+ }
+ }
+
+ locations
+}
+
+#[cfg(test)]
+mod tests {
+ use base_db::fixture::WithFixture;
+ use hir_expand::hygiene::Hygiene;
+ use syntax::ast::AstNode;
+
+ use crate::test_db::TestDB;
+
+ use super::*;
+
+ /// `code` needs to contain a cursor marker; checks that `find_path` for the
+ /// item the `path` refers to returns that same path when called from the
+ /// module the cursor is in.
+ fn check_found_path_(ra_fixture: &str, path: &str, prefix_kind: Option<PrefixKind>) {
+ let (db, pos) = TestDB::with_position(ra_fixture);
+ let module = db.module_at_position(pos);
+ let parsed_path_file = syntax::SourceFile::parse(&format!("use {};", path));
+ let ast_path =
+ parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
+ let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap();
+
+ let def_map = module.def_map(&db);
+ let resolved = def_map
+ .resolve_path(
+ &db,
+ module.local_id,
+ &mod_path,
+ crate::item_scope::BuiltinShadowMode::Module,
+ )
+ .0
+ .take_types()
+ .unwrap();
+
+ let found_path = find_path_inner(&db, ItemInNs::Types(resolved), module, prefix_kind);
+ assert_eq!(found_path, Some(mod_path), "{:?}", prefix_kind);
+ }
+
+ fn check_found_path(
+ ra_fixture: &str,
+ unprefixed: &str,
+ prefixed: &str,
+ absolute: &str,
+ self_prefixed: &str,
+ ) {
+ check_found_path_(ra_fixture, unprefixed, None);
+ check_found_path_(ra_fixture, prefixed, Some(PrefixKind::Plain));
+ check_found_path_(ra_fixture, absolute, Some(PrefixKind::ByCrate));
+ check_found_path_(ra_fixture, self_prefixed, Some(PrefixKind::BySelf));
+ }
+
+ #[test]
+ fn same_module() {
+ check_found_path(
+ r#"
+struct S;
+$0
+ "#,
+ "S",
+ "S",
+ "crate::S",
+ "self::S",
+ );
+ }
+
+ #[test]
+ fn enum_variant() {
+ check_found_path(
+ r#"
+enum E { A }
+$0
+ "#,
+ "E::A",
+ "E::A",
+ "E::A",
+ "E::A",
+ );
+ }
+
+ #[test]
+ fn sub_module() {
+ check_found_path(
+ r#"
+mod foo {
+ pub struct S;
+}
+$0
+ "#,
+ "foo::S",
+ "foo::S",
+ "crate::foo::S",
+ "self::foo::S",
+ );
+ }
+
+ #[test]
+ fn super_module() {
+ check_found_path(
+ r#"
+//- /main.rs
+mod foo;
+//- /foo.rs
+mod bar;
+struct S;
+//- /foo/bar.rs
+$0
+ "#,
+ "super::S",
+ "super::S",
+ "crate::foo::S",
+ "super::S",
+ );
+ }
+
+ #[test]
+ fn self_module() {
+ check_found_path(
+ r#"
+//- /main.rs
+mod foo;
+//- /foo.rs
+$0
+ "#,
+ "self",
+ "self",
+ "crate::foo",
+ "self",
+ );
+ }
+
+ #[test]
+ fn crate_root() {
+ check_found_path(
+ r#"
+//- /main.rs
+mod foo;
+//- /foo.rs
+$0
+ "#,
+ "crate",
+ "crate",
+ "crate",
+ "crate",
+ );
+ }
+
+ #[test]
+ fn same_crate() {
+ check_found_path(
+ r#"
+//- /main.rs
+mod foo;
+struct S;
+//- /foo.rs
+$0
+ "#,
+ "crate::S",
+ "crate::S",
+ "crate::S",
+ "crate::S",
+ );
+ }
+
+ #[test]
+ fn different_crate() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:std
+$0
+//- /std.rs crate:std
+pub struct S;
+ "#,
+ "std::S",
+ "std::S",
+ "std::S",
+ "std::S",
+ );
+ }
+
+ #[test]
+ fn different_crate_renamed() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std as std_renamed;
+$0
+//- /std.rs crate:std
+pub struct S;
+ "#,
+ "std_renamed::S",
+ "std_renamed::S",
+ "std_renamed::S",
+ "std_renamed::S",
+ );
+ }
+
+ #[test]
+ fn partially_imported() {
+ cov_mark::check!(partially_imported);
+ // Tests that short paths are used even for external items, when parts of the path are
+ // already in scope.
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:syntax
+
+use syntax::ast;
+$0
+
+//- /lib.rs crate:syntax
+pub mod ast {
+ pub enum ModuleItem {
+ A, B, C,
+ }
+}
+ "#,
+ "ast::ModuleItem",
+ "syntax::ast::ModuleItem",
+ "syntax::ast::ModuleItem",
+ "syntax::ast::ModuleItem",
+ );
+
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:syntax
+$0
+
+//- /lib.rs crate:syntax
+pub mod ast {
+ pub enum ModuleItem {
+ A, B, C,
+ }
+}
+ "#,
+ "syntax::ast::ModuleItem",
+ "syntax::ast::ModuleItem",
+ "syntax::ast::ModuleItem",
+ "syntax::ast::ModuleItem",
+ );
+ }
+
+ #[test]
+ fn same_crate_reexport() {
+ check_found_path(
+ r#"
+mod bar {
+ mod foo { pub(super) struct S; }
+ pub(crate) use foo::*;
+}
+$0
+ "#,
+ "bar::S",
+ "bar::S",
+ "crate::bar::S",
+ "self::bar::S",
+ );
+ }
+
+ #[test]
+ fn same_crate_reexport_rename() {
+ check_found_path(
+ r#"
+mod bar {
+ mod foo { pub(super) struct S; }
+ pub(crate) use foo::S as U;
+}
+$0
+ "#,
+ "bar::U",
+ "bar::U",
+ "crate::bar::U",
+ "self::bar::U",
+ );
+ }
+
+ #[test]
+ fn different_crate_reexport() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:std
+$0
+//- /std.rs crate:std deps:core
+pub use core::S;
+//- /core.rs crate:core
+pub struct S;
+ "#,
+ "std::S",
+ "std::S",
+ "std::S",
+ "std::S",
+ );
+ }
+
+ #[test]
+ fn prelude() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:std
+$0
+//- /std.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct S;
+ }
+}
+ "#,
+ "S",
+ "S",
+ "S",
+ "S",
+ );
+ }
+
+ #[test]
+ fn enum_variant_from_prelude() {
+ let code = r#"
+//- /main.rs crate:main deps:std
+$0
+//- /std.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ pub enum Option<T> { Some(T), None }
+ pub use Option::*;
+ }
+}
+ "#;
+ check_found_path(code, "None", "None", "None", "None");
+ check_found_path(code, "Some", "Some", "Some", "Some");
+ }
+
+ #[test]
+ fn shortest_path() {
+ check_found_path(
+ r#"
+//- /main.rs
+pub mod foo;
+pub mod baz;
+struct S;
+$0
+//- /foo.rs
+pub mod bar { pub struct S; }
+//- /baz.rs
+pub use crate::foo::bar::S;
+ "#,
+ "baz::S",
+ "baz::S",
+ "crate::baz::S",
+ "self::baz::S",
+ );
+ }
+
+ #[test]
+ fn discount_private_imports() {
+ check_found_path(
+ r#"
+//- /main.rs
+mod foo;
+pub mod bar { pub struct S; }
+use bar::S;
+//- /foo.rs
+$0
+ "#,
+ // crate::S would be shorter, but using private imports seems wrong
+ "crate::bar::S",
+ "crate::bar::S",
+ "crate::bar::S",
+ "crate::bar::S",
+ );
+ }
+
+ #[test]
+ fn import_cycle() {
+ check_found_path(
+ r#"
+//- /main.rs
+pub mod foo;
+pub mod bar;
+pub mod baz;
+//- /bar.rs
+$0
+//- /foo.rs
+pub use super::baz;
+pub struct S;
+//- /baz.rs
+pub use super::foo;
+ "#,
+ "crate::foo::S",
+ "crate::foo::S",
+ "crate::foo::S",
+ "crate::foo::S",
+ );
+ }
+
+ #[test]
+ fn prefer_std_paths_over_alloc() {
+ cov_mark::check!(prefer_std_paths);
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:alloc,std
+$0
+
+//- /std.rs crate:std deps:alloc
+pub mod sync {
+ pub use alloc::sync::Arc;
+}
+
+//- /zzz.rs crate:alloc
+pub mod sync {
+ pub struct Arc;
+}
+ "#,
+ "std::sync::Arc",
+ "std::sync::Arc",
+ "std::sync::Arc",
+ "std::sync::Arc",
+ );
+ }
+
+ #[test]
+ fn prefer_core_paths_over_std() {
+ cov_mark::check!(prefer_no_std_paths);
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:core,std
+#![no_std]
+
+$0
+
+//- /std.rs crate:std deps:core
+
+pub mod fmt {
+ pub use core::fmt::Error;
+}
+
+//- /zzz.rs crate:core
+
+pub mod fmt {
+ pub struct Error;
+}
+ "#,
+ "core::fmt::Error",
+ "core::fmt::Error",
+ "core::fmt::Error",
+ "core::fmt::Error",
+ );
+
+ // Should also work (on a best-effort basis) if `no_std` is conditional.
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:core,std
+#![cfg_attr(not(test), no_std)]
+
+$0
+
+//- /std.rs crate:std deps:core
+
+pub mod fmt {
+ pub use core::fmt::Error;
+}
+
+//- /zzz.rs crate:core
+
+pub mod fmt {
+ pub struct Error;
+}
+ "#,
+ "core::fmt::Error",
+ "core::fmt::Error",
+ "core::fmt::Error",
+ "core::fmt::Error",
+ );
+ }
+
+ #[test]
+ fn prefer_alloc_paths_over_std() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:alloc,std
+#![no_std]
+
+$0
+
+//- /std.rs crate:std deps:alloc
+
+pub mod sync {
+ pub use alloc::sync::Arc;
+}
+
+//- /zzz.rs crate:alloc
+
+pub mod sync {
+ pub struct Arc;
+}
+ "#,
+ "alloc::sync::Arc",
+ "alloc::sync::Arc",
+ "alloc::sync::Arc",
+ "alloc::sync::Arc",
+ );
+ }
+
+ #[test]
+ fn prefer_shorter_paths_if_not_alloc() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:megaalloc,std
+$0
+
+//- /std.rs crate:std deps:megaalloc
+pub mod sync {
+ pub use megaalloc::sync::Arc;
+}
+
+//- /zzz.rs crate:megaalloc
+pub struct Arc;
+ "#,
+ "megaalloc::Arc",
+ "megaalloc::Arc",
+ "megaalloc::Arc",
+ "megaalloc::Arc",
+ );
+ }
+
+ #[test]
+ fn builtins_are_in_scope() {
+ let code = r#"
+$0
+
+pub mod primitive {
+ pub use u8;
+}
+ "#;
+ check_found_path(code, "u8", "u8", "u8", "u8");
+ check_found_path(code, "u16", "u16", "u16", "u16");
+ }
+
+ #[test]
+ fn inner_items() {
+ check_found_path(
+ r#"
+fn main() {
+ struct Inner {}
+ $0
+}
+ "#,
+ "Inner",
+ "Inner",
+ "Inner",
+ "Inner",
+ );
+ }
+
+ #[test]
+ fn inner_items_from_outer_scope() {
+ check_found_path(
+ r#"
+fn main() {
+ struct Struct {}
+ {
+ $0
+ }
+}
+ "#,
+ "Struct",
+ "Struct",
+ "Struct",
+ "Struct",
+ );
+ }
+
+ #[test]
+ fn inner_items_from_inner_module() {
+ cov_mark::check!(prefixed_in_block_expression);
+ check_found_path(
+ r#"
+fn main() {
+ mod module {
+ struct Struct {}
+ }
+ {
+ $0
+ }
+}
+ "#,
+ "module::Struct",
+ "module::Struct",
+ "module::Struct",
+ "module::Struct",
+ );
+ }
+
+ #[test]
+ fn outer_items_with_inner_items_present() {
+ check_found_path(
+ r#"
+mod module {
+ pub struct CompleteMe;
+}
+
+fn main() {
+ fn inner() {}
+ $0
+}
+ "#,
+ // FIXME: these could use fewer/better prefixes
+ "module::CompleteMe",
+ "crate::module::CompleteMe",
+ "crate::module::CompleteMe",
+ "crate::module::CompleteMe",
+ )
+ }
+
+ #[test]
+ fn from_inside_module() {
+ // This worked correctly, but the test suite logic was broken.
+ cov_mark::check!(submodule_in_testdb);
+ check_found_path(
+ r#"
+mod baz {
+ pub struct Foo {}
+}
+
+mod bar {
+ fn bar() {
+ $0
+ }
+}
+ "#,
+ "crate::baz::Foo",
+ "crate::baz::Foo",
+ "crate::baz::Foo",
+ "crate::baz::Foo",
+ )
+ }
+
+ #[test]
+ fn from_inside_module_with_inner_items() {
+ check_found_path(
+ r#"
+mod baz {
+ pub struct Foo {}
+}
+
+mod bar {
+ fn bar() {
+ fn inner() {}
+ $0
+ }
+}
+ "#,
+ "crate::baz::Foo",
+ "crate::baz::Foo",
+ "crate::baz::Foo",
+ "crate::baz::Foo",
+ )
+ }
+
+ #[test]
+ fn recursive_pub_mod_reexport() {
+ cov_mark::check!(recursive_imports);
+ check_found_path(
+ r#"
+fn main() {
+ let _ = 22_i32.as_name$0();
+}
+
+pub mod name {
+ pub trait AsName {
+ fn as_name(&self) -> String;
+ }
+ impl AsName for i32 {
+ fn as_name(&self) -> String {
+ format!("Name: {}", self)
+ }
+ }
+ pub use crate::name;
+}
+"#,
+ "name::AsName",
+ "name::AsName",
+ "crate::name::AsName",
+ "self::name::AsName",
+ );
+ }
+
+ #[test]
+ fn extern_crate() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:dep
+$0
+//- /dep.rs crate:dep
+"#,
+ "dep",
+ "dep",
+ "dep",
+ "dep",
+ );
+
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:dep
+fn f() {
+ fn inner() {}
+ $0
+}
+//- /dep.rs crate:dep
+"#,
+ "dep",
+ "dep",
+ "dep",
+ "dep",
+ );
+ }
+
+ #[test]
+ fn prelude_with_inner_items() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:std
+fn f() {
+ fn inner() {}
+ $0
+}
+//- /std.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ pub enum Option { None }
+ pub use Option::*;
+ }
+}
+ "#,
+ "None",
+ "None",
+ "None",
+ "None",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
new file mode 100644
index 000000000..2397cf501
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
@@ -0,0 +1,522 @@
+//! Many kinds of items or constructs can have generic parameters: functions,
+//! structs, impls, traits, etc. This module provides a common HIR for these
+//! generic parameters. See also the `Generics` type and the `generics_of` query
+//! in rustc.
+
+use base_db::FileId;
+use either::Either;
+use hir_expand::{
+ name::{AsName, Name},
+ ExpandResult, HirFileId, InFile,
+};
+use la_arena::{Arena, ArenaMap, Idx};
+use once_cell::unsync::Lazy;
+use std::ops::DerefMut;
+use stdx::impl_from;
+use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds};
+
+use crate::{
+ body::{Expander, LowerCtx},
+ child_by_source::ChildBySource,
+ db::DefDatabase,
+ dyn_map::DynMap,
+ intern::Interned,
+ keys,
+ src::{HasChildSource, HasSource},
+ type_ref::{LifetimeRef, TypeBound, TypeRef},
+ AdtId, ConstParamId, GenericDefId, HasModule, LifetimeParamId, LocalLifetimeParamId,
+ LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
+};
+
+/// Data about a generic type parameter (to a function, struct, impl, ...).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct TypeParamData {
+ pub name: Option<Name>,
+ pub default: Option<Interned<TypeRef>>,
+ pub provenance: TypeParamProvenance,
+}
+
+/// Data about a generic lifetime parameter (to a function, struct, impl, ...).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct LifetimeParamData {
+ pub name: Name,
+}
+
+/// Data about a generic const parameter (to a function, struct, impl, ...).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct ConstParamData {
+ pub name: Name,
+ pub ty: Interned<TypeRef>,
+ pub has_default: bool,
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub enum TypeParamProvenance {
+ TypeParamList,
+ TraitSelf,
+ ArgumentImplTrait,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum TypeOrConstParamData {
+ TypeParamData(TypeParamData),
+ ConstParamData(ConstParamData),
+}
+
+impl TypeOrConstParamData {
+ pub fn name(&self) -> Option<&Name> {
+ match self {
+ TypeOrConstParamData::TypeParamData(x) => x.name.as_ref(),
+ TypeOrConstParamData::ConstParamData(x) => Some(&x.name),
+ }
+ }
+
+ pub fn has_default(&self) -> bool {
+ match self {
+ TypeOrConstParamData::TypeParamData(x) => x.default.is_some(),
+ TypeOrConstParamData::ConstParamData(x) => x.has_default,
+ }
+ }
+
+ pub fn type_param(&self) -> Option<&TypeParamData> {
+ match self {
+ TypeOrConstParamData::TypeParamData(x) => Some(x),
+ TypeOrConstParamData::ConstParamData(_) => None,
+ }
+ }
+
+ pub fn const_param(&self) -> Option<&ConstParamData> {
+ match self {
+ TypeOrConstParamData::TypeParamData(_) => None,
+ TypeOrConstParamData::ConstParamData(x) => Some(x),
+ }
+ }
+
+ pub fn is_trait_self(&self) -> bool {
+ match self {
+ TypeOrConstParamData::TypeParamData(x) => {
+ x.provenance == TypeParamProvenance::TraitSelf
+ }
+ TypeOrConstParamData::ConstParamData(_) => false,
+ }
+ }
+}
+
+impl_from!(TypeParamData, ConstParamData for TypeOrConstParamData);
+
+/// Data about the generic parameters of a function, struct, impl, etc.
+#[derive(Clone, PartialEq, Eq, Debug, Default, Hash)]
+pub struct GenericParams {
+ pub type_or_consts: Arena<TypeOrConstParamData>,
+ pub lifetimes: Arena<LifetimeParamData>,
+ pub where_predicates: Vec<WherePredicate>,
+}
+
+/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined
+/// where clauses like `where T: Foo + Bar` are turned into multiple of these.
+/// It might still result in multiple actual predicates though, because of
+/// associated type bindings like `Iterator<Item = u32>`.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum WherePredicate {
+ TypeBound {
+ target: WherePredicateTypeTarget,
+ bound: Interned<TypeBound>,
+ },
+ Lifetime {
+ target: LifetimeRef,
+ bound: LifetimeRef,
+ },
+ ForLifetime {
+ lifetimes: Box<[Name]>,
+ target: WherePredicateTypeTarget,
+ bound: Interned<TypeBound>,
+ },
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum WherePredicateTypeTarget {
+ TypeRef(Interned<TypeRef>),
+ /// For desugared where predicates that can directly refer to a type param.
+ TypeOrConstParam(LocalTypeOrConstParamId),
+}
+
+impl GenericParams {
+ /// Iterator of type_or_consts field
+ pub fn iter<'a>(
+ &'a self,
+ ) -> impl DoubleEndedIterator<Item = (Idx<TypeOrConstParamData>, &TypeOrConstParamData)> {
+ self.type_or_consts.iter()
+ }
+
+ pub(crate) fn generic_params_query(
+ db: &dyn DefDatabase,
+ def: GenericDefId,
+ ) -> Interned<GenericParams> {
+ let _p = profile::span("generic_params_query");
+
+ macro_rules! id_to_generics {
+ ($id:ident) => {{
+ let id = $id.lookup(db).id;
+ let tree = id.item_tree(db);
+ let item = &tree[id.value];
+ item.generic_params.clone()
+ }};
+ }
+
+ match def {
+ GenericDefId::FunctionId(id) => {
+ let loc = id.lookup(db);
+ let tree = loc.id.item_tree(db);
+ let item = &tree[loc.id.value];
+
+ let mut generic_params = GenericParams::clone(&item.explicit_generic_params);
+
+ let module = loc.container.module(db);
+ let func_data = db.function_data(id);
+
+ // Don't create an `Expander` nor call `loc.source(db)` if not needed since this
+ // causes a reparse after the `ItemTree` has been created.
+ let mut expander = Lazy::new(|| Expander::new(db, loc.source(db).file_id, module));
+ for (_, param) in &func_data.params {
+ generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
+ }
+
+ Interned::new(generic_params)
+ }
+ GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics!(id),
+ GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics!(id),
+ GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics!(id),
+ GenericDefId::TraitId(id) => id_to_generics!(id),
+ GenericDefId::TypeAliasId(id) => id_to_generics!(id),
+ GenericDefId::ImplId(id) => id_to_generics!(id),
+ GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => {
+ Interned::new(GenericParams::default())
+ }
+ }
+ }
+
+ pub(crate) fn fill(&mut self, lower_ctx: &LowerCtx<'_>, node: &dyn HasGenericParams) {
+ if let Some(params) = node.generic_param_list() {
+ self.fill_params(lower_ctx, params)
+ }
+ if let Some(where_clause) = node.where_clause() {
+ self.fill_where_predicates(lower_ctx, where_clause);
+ }
+ }
+
+ pub(crate) fn fill_bounds(
+ &mut self,
+ lower_ctx: &LowerCtx<'_>,
+ node: &dyn ast::HasTypeBounds,
+ target: Either<TypeRef, LifetimeRef>,
+ ) {
+ for bound in
+ node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
+ {
+ self.add_where_predicate_from_bound(lower_ctx, bound, None, target.clone());
+ }
+ }
+
+ fn fill_params(&mut self, lower_ctx: &LowerCtx<'_>, params: ast::GenericParamList) {
+ for type_or_const_param in params.type_or_const_params() {
+ match type_or_const_param {
+ ast::TypeOrConstParam::Type(type_param) => {
+ let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
+ // FIXME: Use `Path::from_src`
+ let default = type_param
+ .default_type()
+ .map(|it| Interned::new(TypeRef::from_ast(lower_ctx, it)));
+ let param = TypeParamData {
+ name: Some(name.clone()),
+ default,
+ provenance: TypeParamProvenance::TypeParamList,
+ };
+ self.type_or_consts.alloc(param.into());
+ let type_ref = TypeRef::Path(name.into());
+ self.fill_bounds(lower_ctx, &type_param, Either::Left(type_ref));
+ }
+ ast::TypeOrConstParam::Const(const_param) => {
+ let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
+ let ty = const_param
+ .ty()
+ .map_or(TypeRef::Error, |it| TypeRef::from_ast(lower_ctx, it));
+ let param = ConstParamData {
+ name,
+ ty: Interned::new(ty),
+ has_default: const_param.default_val().is_some(),
+ };
+ self.type_or_consts.alloc(param.into());
+ }
+ }
+ }
+ for lifetime_param in params.lifetime_params() {
+ let name =
+ lifetime_param.lifetime().map_or_else(Name::missing, |lt| Name::new_lifetime(&lt));
+ let param = LifetimeParamData { name: name.clone() };
+ self.lifetimes.alloc(param);
+ let lifetime_ref = LifetimeRef::new_name(name);
+ self.fill_bounds(lower_ctx, &lifetime_param, Either::Right(lifetime_ref));
+ }
+ }
+
+ fn fill_where_predicates(&mut self, lower_ctx: &LowerCtx<'_>, where_clause: ast::WhereClause) {
+ for pred in where_clause.predicates() {
+ let target = if let Some(type_ref) = pred.ty() {
+ Either::Left(TypeRef::from_ast(lower_ctx, type_ref))
+ } else if let Some(lifetime) = pred.lifetime() {
+ Either::Right(LifetimeRef::new(&lifetime))
+ } else {
+ continue;
+ };
+
+ let lifetimes: Option<Box<_>> = pred.generic_param_list().map(|param_list| {
+ // Higher-Ranked Trait Bounds
+ param_list
+ .lifetime_params()
+ .map(|lifetime_param| {
+ lifetime_param
+ .lifetime()
+ .map_or_else(Name::missing, |lt| Name::new_lifetime(&lt))
+ })
+ .collect()
+ });
+ for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
+ self.add_where_predicate_from_bound(
+ lower_ctx,
+ bound,
+ lifetimes.as_ref(),
+ target.clone(),
+ );
+ }
+ }
+ }
+
+ fn add_where_predicate_from_bound(
+ &mut self,
+ lower_ctx: &LowerCtx<'_>,
+ bound: ast::TypeBound,
+ hrtb_lifetimes: Option<&Box<[Name]>>,
+ target: Either<TypeRef, LifetimeRef>,
+ ) {
+ let bound = TypeBound::from_ast(lower_ctx, bound);
+ let predicate = match (target, bound) {
+ (Either::Left(type_ref), bound) => match hrtb_lifetimes {
+ Some(hrtb_lifetimes) => WherePredicate::ForLifetime {
+ lifetimes: hrtb_lifetimes.clone(),
+ target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)),
+ bound: Interned::new(bound),
+ },
+ None => WherePredicate::TypeBound {
+ target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)),
+ bound: Interned::new(bound),
+ },
+ },
+ (Either::Right(lifetime), TypeBound::Lifetime(bound)) => {
+ WherePredicate::Lifetime { target: lifetime, bound }
+ }
+ _ => return,
+ };
+ self.where_predicates.push(predicate);
+ }
+
+ pub(crate) fn fill_implicit_impl_trait_args(
+ &mut self,
+ db: &dyn DefDatabase,
+ expander: &mut impl DerefMut<Target = Expander>,
+ type_ref: &TypeRef,
+ ) {
+ type_ref.walk(&mut |type_ref| {
+ if let TypeRef::ImplTrait(bounds) = type_ref {
+ let param = TypeParamData {
+ name: None,
+ default: None,
+ provenance: TypeParamProvenance::ArgumentImplTrait,
+ };
+ let param_id = self.type_or_consts.alloc(param.into());
+ for bound in bounds {
+ self.where_predicates.push(WherePredicate::TypeBound {
+ target: WherePredicateTypeTarget::TypeOrConstParam(param_id),
+ bound: bound.clone(),
+ });
+ }
+ }
+ if let TypeRef::Macro(mc) = type_ref {
+ let macro_call = mc.to_node(db.upcast());
+ match expander.enter_expand::<ast::Type>(db, macro_call) {
+ Ok(ExpandResult { value: Some((mark, expanded)), .. }) => {
+ let ctx = LowerCtx::new(db, expander.current_file_id());
+ let type_ref = TypeRef::from_ast(&ctx, expanded);
+ self.fill_implicit_impl_trait_args(db, expander, &type_ref);
+ expander.exit(db, mark);
+ }
+ _ => {}
+ }
+ }
+ });
+ }
+
+ pub(crate) fn shrink_to_fit(&mut self) {
+ let Self { lifetimes, type_or_consts: types, where_predicates } = self;
+ lifetimes.shrink_to_fit();
+ types.shrink_to_fit();
+ where_predicates.shrink_to_fit();
+ }
+
+ pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option<TypeParamId> {
+ self.type_or_consts.iter().find_map(|(id, p)| {
+ if p.name().as_ref() == Some(&name) && p.type_param().is_some() {
+ Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
+ } else {
+ None
+ }
+ })
+ }
+
+ pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option<ConstParamId> {
+ self.type_or_consts.iter().find_map(|(id, p)| {
+ if p.name().as_ref() == Some(&name) && p.const_param().is_some() {
+ Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
+ } else {
+ None
+ }
+ })
+ }
+
+ pub fn find_trait_self_param(&self) -> Option<LocalTypeOrConstParamId> {
+ self.type_or_consts.iter().find_map(|(id, p)| {
+ matches!(
+ p,
+ TypeOrConstParamData::TypeParamData(TypeParamData {
+ provenance: TypeParamProvenance::TraitSelf,
+ ..
+ })
+ )
+ .then(|| id)
+ })
+ }
+}
+
+fn file_id_and_params_of(
+ def: GenericDefId,
+ db: &dyn DefDatabase,
+) -> (HirFileId, Option<ast::GenericParamList>) {
+ match def {
+ GenericDefId::FunctionId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::AdtId(AdtId::StructId(it)) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::AdtId(AdtId::UnionId(it)) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::AdtId(AdtId::EnumId(it)) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::TraitId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::TypeAliasId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::ImplId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ // We won't be using this ID anyway
+ GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId(!0).into(), None),
+ }
+}
+
+impl HasChildSource<LocalTypeOrConstParamId> for GenericDefId {
+ type Value = Either<ast::TypeOrConstParam, ast::Trait>;
+ fn child_source(
+ &self,
+ db: &dyn DefDatabase,
+ ) -> InFile<ArenaMap<LocalTypeOrConstParamId, Self::Value>> {
+ let generic_params = db.generic_params(*self);
+ let mut idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx);
+
+ let (file_id, generic_params_list) = file_id_and_params_of(*self, db);
+
+ let mut params = ArenaMap::default();
+
+ // For traits the first type index is `Self`, we need to add it before the other params.
+ if let GenericDefId::TraitId(id) = *self {
+ let trait_ref = id.lookup(db).source(db).value;
+ let idx = idx_iter.next().unwrap();
+ params.insert(idx, Either::Right(trait_ref))
+ }
+
+ if let Some(generic_params_list) = generic_params_list {
+ for (idx, ast_param) in idx_iter.zip(generic_params_list.type_or_const_params()) {
+ params.insert(idx, Either::Left(ast_param));
+ }
+ }
+
+ InFile::new(file_id, params)
+ }
+}
+
+impl HasChildSource<LocalLifetimeParamId> for GenericDefId {
+ type Value = ast::LifetimeParam;
+ fn child_source(
+ &self,
+ db: &dyn DefDatabase,
+ ) -> InFile<ArenaMap<LocalLifetimeParamId, Self::Value>> {
+ let generic_params = db.generic_params(*self);
+ let idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx);
+
+ let (file_id, generic_params_list) = file_id_and_params_of(*self, db);
+
+ let mut params = ArenaMap::default();
+
+ if let Some(generic_params_list) = generic_params_list {
+ for (idx, ast_param) in idx_iter.zip(generic_params_list.lifetime_params()) {
+ params.insert(idx, ast_param);
+ }
+ }
+
+ InFile::new(file_id, params)
+ }
+}
+
+impl ChildBySource for GenericDefId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let (gfile_id, generic_params_list) = file_id_and_params_of(*self, db);
+ if gfile_id != file_id {
+ return;
+ }
+
+ let generic_params = db.generic_params(*self);
+ let mut toc_idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx);
+ let lts_idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx);
+
+ // For traits the first type index is `Self`, skip it.
+ if let GenericDefId::TraitId(_) = *self {
+ toc_idx_iter.next().unwrap(); // advance_by(1);
+ }
+
+ if let Some(generic_params_list) = generic_params_list {
+ for (local_id, ast_param) in
+ toc_idx_iter.zip(generic_params_list.type_or_const_params())
+ {
+ let id = TypeOrConstParamId { parent: *self, local_id };
+ match ast_param {
+ ast::TypeOrConstParam::Type(a) => res[keys::TYPE_PARAM].insert(a, id),
+ ast::TypeOrConstParam::Const(a) => res[keys::CONST_PARAM].insert(a, id),
+ }
+ }
+ for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) {
+ let id = LifetimeParamId { parent: *self, local_id };
+ res[keys::LIFETIME_PARAM].insert(ast_param, id);
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
new file mode 100644
index 000000000..688055e43
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
@@ -0,0 +1,1108 @@
+//! A map of all publicly exported items in a crate.
+
+use std::{fmt, hash::BuildHasherDefault, sync::Arc};
+
+use base_db::CrateId;
+use fst::{self, Streamer};
+use hir_expand::name::Name;
+use indexmap::{map::Entry, IndexMap};
+use itertools::Itertools;
+use rustc_hash::{FxHashSet, FxHasher};
+
+use crate::{
+ db::DefDatabase, item_scope::ItemInNs, visibility::Visibility, AssocItemId, ModuleDefId,
+ ModuleId, TraitId,
+};
+
+type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
+
+/// Item import details stored in the `ImportMap`.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ImportInfo {
+ /// A path that can be used to import the item, relative to the crate's root.
+ pub path: ImportPath,
+ /// The module containing this item.
+ pub container: ModuleId,
+ /// Whether the import is a trait associated item or not.
+ pub is_trait_assoc_item: bool,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ImportPath {
+ pub segments: Vec<Name>,
+}
+
+impl fmt::Display for ImportPath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.segments.iter().format("::"), f)
+ }
+}
+
+impl ImportPath {
+ fn len(&self) -> usize {
+ self.segments.len()
+ }
+}
+
+/// A map from publicly exported items to the path needed to import/name them from a downstream
+/// crate.
+///
+/// Reexports of items are taken into account, ie. if something is exported under multiple
+/// names, the one with the shortest import path will be used.
+///
+/// Note that all paths are relative to the containing crate's root, so the crate name still needs
+/// to be prepended to the `ModPath` before the path is valid.
+#[derive(Default)]
+pub struct ImportMap {
+ map: FxIndexMap<ItemInNs, ImportInfo>,
+
+ /// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the
+ /// values returned by running `fst`.
+ ///
+ /// Since a path can refer to multiple items due to namespacing, we store all items with the
+ /// same path right after each other. This allows us to find all items after the FST gives us
+ /// the index of the first one.
+ importables: Vec<ItemInNs>,
+ fst: fst::Map<Vec<u8>>,
+}
+
+impl ImportMap {
+ pub fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("import_map_query");
+
+ let mut import_map = collect_import_map(db, krate);
+
+ let mut importables = import_map
+ .map
+ .iter()
+ .map(|(item, info)| (item, fst_path(&info.path)))
+ .collect::<Vec<_>>();
+ importables.sort_by(|(_, fst_path), (_, fst_path2)| fst_path.cmp(fst_path2));
+
+ // Build the FST, taking care not to insert duplicate values.
+
+ let mut builder = fst::MapBuilder::memory();
+ let mut last_batch_start = 0;
+
+ for idx in 0..importables.len() {
+ let key = &importables[last_batch_start].1;
+ if let Some((_, fst_path)) = importables.get(idx + 1) {
+ if key == fst_path {
+ continue;
+ }
+ }
+
+ let _ = builder.insert(key, last_batch_start as u64);
+
+ last_batch_start = idx + 1;
+ }
+
+ import_map.fst = builder.into_map();
+ import_map.importables = importables.iter().map(|&(&item, _)| item).collect();
+
+ Arc::new(import_map)
+ }
+
+ /// Returns the `ModPath` needed to import/mention `item`, relative to this crate's root.
+ pub fn path_of(&self, item: ItemInNs) -> Option<&ImportPath> {
+ self.import_info_for(item).map(|it| &it.path)
+ }
+
+ pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> {
+ self.map.get(&item)
+ }
+
+ fn collect_trait_assoc_items(
+ &mut self,
+ db: &dyn DefDatabase,
+ tr: TraitId,
+ is_type_in_ns: bool,
+ original_import_info: &ImportInfo,
+ ) {
+ let _p = profile::span("collect_trait_assoc_items");
+ for (assoc_item_name, item) in &db.trait_data(tr).items {
+ let module_def_id = match item {
+ AssocItemId::FunctionId(f) => ModuleDefId::from(*f),
+ AssocItemId::ConstId(c) => ModuleDefId::from(*c),
+ // cannot use associated type aliases directly: need a `<Struct as Trait>::TypeAlias`
+ // qualifier, ergo no need to store it for imports in import_map
+ AssocItemId::TypeAliasId(_) => {
+ cov_mark::hit!(type_aliases_ignored);
+ continue;
+ }
+ };
+ let assoc_item = if is_type_in_ns {
+ ItemInNs::Types(module_def_id)
+ } else {
+ ItemInNs::Values(module_def_id)
+ };
+
+ let mut assoc_item_info = original_import_info.clone();
+ assoc_item_info.path.segments.push(assoc_item_name.to_owned());
+ assoc_item_info.is_trait_assoc_item = true;
+ self.map.insert(assoc_item, assoc_item_info);
+ }
+ }
+}
+
+fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMap {
+ let _p = profile::span("collect_import_map");
+
+ let def_map = db.crate_def_map(krate);
+ let mut import_map = ImportMap::default();
+
+ // We look only into modules that are public(ly reexported), starting with the crate root.
+ let empty = ImportPath { segments: vec![] };
+ let root = def_map.module_id(def_map.root());
+ let mut worklist = vec![(root, empty)];
+ while let Some((module, mod_path)) = worklist.pop() {
+ let ext_def_map;
+ let mod_data = if module.krate == krate {
+ &def_map[module.local_id]
+ } else {
+ // The crate might reexport a module defined in another crate.
+ ext_def_map = module.def_map(db);
+ &ext_def_map[module.local_id]
+ };
+
+ let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| {
+ let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public);
+ if per_ns.is_none() { None } else { Some((name, per_ns)) }
+ });
+
+ for (name, per_ns) in visible_items {
+ let mk_path = || {
+ let mut path = mod_path.clone();
+ path.segments.push(name.clone());
+ path
+ };
+
+ for item in per_ns.iter_items() {
+ let path = mk_path();
+ let path_len = path.len();
+ let import_info =
+ ImportInfo { path, container: module, is_trait_assoc_item: false };
+
+ if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
+ import_map.collect_trait_assoc_items(
+ db,
+ tr,
+ matches!(item, ItemInNs::Types(_)),
+ &import_info,
+ );
+ }
+
+ match import_map.map.entry(item) {
+ Entry::Vacant(entry) => {
+ entry.insert(import_info);
+ }
+ Entry::Occupied(mut entry) => {
+ // If the new path is shorter, prefer that one.
+ if path_len < entry.get().path.len() {
+ *entry.get_mut() = import_info;
+ } else {
+ continue;
+ }
+ }
+ }
+
+ // If we've just added a path to a module, descend into it. We might traverse
+ // modules multiple times, but only if the new path to it is shorter than the
+ // first (else we `continue` above).
+ if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() {
+ worklist.push((mod_id, mk_path()));
+ }
+ }
+ }
+ }
+
+ import_map
+}
+
+impl PartialEq for ImportMap {
+ fn eq(&self, other: &Self) -> bool {
+ // `fst` and `importables` are built from `map`, so we don't need to compare them.
+ self.map == other.map
+ }
+}
+
+impl Eq for ImportMap {}
+
+impl fmt::Debug for ImportMap {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut importable_paths: Vec<_> = self
+ .map
+ .iter()
+ .map(|(item, info)| {
+ let ns = match item {
+ ItemInNs::Types(_) => "t",
+ ItemInNs::Values(_) => "v",
+ ItemInNs::Macros(_) => "m",
+ };
+ format!("- {} ({})", info.path, ns)
+ })
+ .collect();
+
+ importable_paths.sort();
+ f.write_str(&importable_paths.join("\n"))
+ }
+}
+
+fn fst_path(path: &ImportPath) -> String {
+ let _p = profile::span("fst_path");
+ let mut s = path.to_string();
+ s.make_ascii_lowercase();
+ s
+}
+
+#[derive(Debug, Eq, PartialEq, Hash)]
+pub enum ImportKind {
+ Module,
+ Function,
+ Adt,
+ EnumVariant,
+ Const,
+ Static,
+ Trait,
+ TypeAlias,
+ BuiltinType,
+ AssociatedItem,
+ Macro,
+}
+
+/// A way to match import map contents against the search query.
+#[derive(Debug)]
+pub enum SearchMode {
+ /// Import map entry should strictly match the query string.
+ Equals,
+ /// Import map entry should contain the query string.
+ Contains,
+ /// Import map entry should contain all letters from the query string,
+ /// in the same order, but not necessary adjacent.
+ Fuzzy,
+}
+
+#[derive(Debug)]
+pub struct Query {
+ query: String,
+ lowercased: String,
+ name_only: bool,
+ assoc_items_only: bool,
+ search_mode: SearchMode,
+ case_sensitive: bool,
+ limit: usize,
+ exclude_import_kinds: FxHashSet<ImportKind>,
+}
+
+impl Query {
+ pub fn new(query: String) -> Self {
+ let lowercased = query.to_lowercase();
+ Self {
+ query,
+ lowercased,
+ name_only: false,
+ assoc_items_only: false,
+ search_mode: SearchMode::Contains,
+ case_sensitive: false,
+ limit: usize::max_value(),
+ exclude_import_kinds: FxHashSet::default(),
+ }
+ }
+
+ /// Matches entries' names only, ignoring the rest of
+ /// the qualifier.
+ /// Example: for `std::marker::PhantomData`, the name is `PhantomData`.
+ pub fn name_only(self) -> Self {
+ Self { name_only: true, ..self }
+ }
+
+ /// Matches only the entries that are associated items, ignoring the rest.
+ pub fn assoc_items_only(self) -> Self {
+ Self { assoc_items_only: true, ..self }
+ }
+
+ /// Specifies the way to search for the entries using the query.
+ pub fn search_mode(self, search_mode: SearchMode) -> Self {
+ Self { search_mode, ..self }
+ }
+
+ /// Limits the returned number of items to `limit`.
+ pub fn limit(self, limit: usize) -> Self {
+ Self { limit, ..self }
+ }
+
+ /// Respect casing of the query string when matching.
+ pub fn case_sensitive(self) -> Self {
+ Self { case_sensitive: true, ..self }
+ }
+
+ /// Do not include imports of the specified kind in the search results.
+ pub fn exclude_import_kind(mut self, import_kind: ImportKind) -> Self {
+ self.exclude_import_kinds.insert(import_kind);
+ self
+ }
+
+ fn import_matches(&self, import: &ImportInfo, enforce_lowercase: bool) -> bool {
+ let _p = profile::span("import_map::Query::import_matches");
+ if import.is_trait_assoc_item {
+ if self.exclude_import_kinds.contains(&ImportKind::AssociatedItem) {
+ return false;
+ }
+ } else if self.assoc_items_only {
+ return false;
+ }
+
+ let mut input = if import.is_trait_assoc_item || self.name_only {
+ import.path.segments.last().unwrap().to_string()
+ } else {
+ import.path.to_string()
+ };
+ if enforce_lowercase || !self.case_sensitive {
+ input.make_ascii_lowercase();
+ }
+
+ let query_string =
+ if !enforce_lowercase && self.case_sensitive { &self.query } else { &self.lowercased };
+
+ match self.search_mode {
+ SearchMode::Equals => &input == query_string,
+ SearchMode::Contains => input.contains(query_string),
+ SearchMode::Fuzzy => {
+ let mut unchecked_query_chars = query_string.chars();
+ let mut mismatching_query_char = unchecked_query_chars.next();
+
+ for input_char in input.chars() {
+ match mismatching_query_char {
+ None => return true,
+ Some(matching_query_char) if matching_query_char == input_char => {
+ mismatching_query_char = unchecked_query_chars.next();
+ }
+ _ => (),
+ }
+ }
+ mismatching_query_char.is_none()
+ }
+ }
+ }
+}
+
+/// Searches dependencies of `krate` for an importable path matching `query`.
+///
+/// This returns a list of items that could be imported from dependencies of `krate`.
+pub fn search_dependencies<'a>(
+ db: &'a dyn DefDatabase,
+ krate: CrateId,
+ query: Query,
+) -> FxHashSet<ItemInNs> {
+ let _p = profile::span("search_dependencies").detail(|| format!("{:?}", query));
+
+ let graph = db.crate_graph();
+ let import_maps: Vec<_> =
+ graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect();
+
+ let automaton = fst::automaton::Subsequence::new(&query.lowercased);
+
+ let mut op = fst::map::OpBuilder::new();
+ for map in &import_maps {
+ op = op.add(map.fst.search(&automaton));
+ }
+
+ let mut stream = op.union();
+
+ let mut all_indexed_values = FxHashSet::default();
+ while let Some((_, indexed_values)) = stream.next() {
+ all_indexed_values.extend(indexed_values.iter().copied());
+ }
+
+ let mut res = FxHashSet::default();
+ for indexed_value in all_indexed_values {
+ let import_map = &import_maps[indexed_value.index];
+ let importables = &import_map.importables[indexed_value.value as usize..];
+
+ let common_importable_data = &import_map.map[&importables[0]];
+ if !query.import_matches(common_importable_data, true) {
+ continue;
+ }
+
+ // Path shared by the importable items in this group.
+ let common_importables_path_fst = fst_path(&common_importable_data.path);
+ // Add the items from this `ModPath` group. Those are all subsequent items in
+ // `importables` whose paths match `path`.
+ let iter = importables
+ .iter()
+ .copied()
+ .take_while(|item| common_importables_path_fst == fst_path(&import_map.map[item].path))
+ .filter(|&item| match item_import_kind(item) {
+ Some(import_kind) => !query.exclude_import_kinds.contains(&import_kind),
+ None => true,
+ })
+ .filter(|item| {
+ !query.case_sensitive // we've already checked the common importables path case-insensitively
+ || query.import_matches(&import_map.map[item], false)
+ });
+ res.extend(iter);
+
+ if res.len() >= query.limit {
+ return res;
+ }
+ }
+
+ res
+}
+
+fn item_import_kind(item: ItemInNs) -> Option<ImportKind> {
+ Some(match item.as_module_def_id()? {
+ ModuleDefId::ModuleId(_) => ImportKind::Module,
+ ModuleDefId::FunctionId(_) => ImportKind::Function,
+ ModuleDefId::AdtId(_) => ImportKind::Adt,
+ ModuleDefId::EnumVariantId(_) => ImportKind::EnumVariant,
+ ModuleDefId::ConstId(_) => ImportKind::Const,
+ ModuleDefId::StaticId(_) => ImportKind::Static,
+ ModuleDefId::TraitId(_) => ImportKind::Trait,
+ ModuleDefId::TypeAliasId(_) => ImportKind::TypeAlias,
+ ModuleDefId::BuiltinType(_) => ImportKind::BuiltinType,
+ ModuleDefId::MacroId(_) => ImportKind::Macro,
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use base_db::{fixture::WithFixture, SourceDatabase, Upcast};
+ use expect_test::{expect, Expect};
+
+ use crate::{test_db::TestDB, ItemContainerId, Lookup};
+
+ use super::*;
+
+ fn check_search(ra_fixture: &str, crate_name: &str, query: Query, expect: Expect) {
+ let db = TestDB::with_files(ra_fixture);
+ let crate_graph = db.crate_graph();
+ let krate = crate_graph
+ .iter()
+ .find(|krate| {
+ crate_graph[*krate].display_name.as_ref().map(|n| n.to_string())
+ == Some(crate_name.to_string())
+ })
+ .unwrap();
+
+ let actual = search_dependencies(db.upcast(), krate, query)
+ .into_iter()
+ .filter_map(|dependency| {
+ let dependency_krate = dependency.krate(db.upcast())?;
+ let dependency_imports = db.import_map(dependency_krate);
+
+ let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) {
+ Some(assoc_item_path) => (assoc_item_path, "a"),
+ None => (
+ dependency_imports.path_of(dependency)?.to_string(),
+ match dependency {
+ ItemInNs::Types(ModuleDefId::FunctionId(_))
+ | ItemInNs::Values(ModuleDefId::FunctionId(_)) => "f",
+ ItemInNs::Types(_) => "t",
+ ItemInNs::Values(_) => "v",
+ ItemInNs::Macros(_) => "m",
+ },
+ ),
+ };
+
+ Some(format!(
+ "{}::{} ({})\n",
+ crate_graph[dependency_krate].display_name.as_ref()?,
+ path,
+ mark
+ ))
+ })
+ // HashSet iteration order isn't defined - it's different on
+ // x86_64 and i686 at the very least
+ .sorted()
+ .collect::<String>();
+ expect.assert_eq(&actual)
+ }
+
+ fn assoc_item_path(
+ db: &dyn DefDatabase,
+ dependency_imports: &ImportMap,
+ dependency: ItemInNs,
+ ) -> Option<String> {
+ let dependency_assoc_item_id = match dependency {
+ ItemInNs::Types(ModuleDefId::FunctionId(id))
+ | ItemInNs::Values(ModuleDefId::FunctionId(id)) => AssocItemId::from(id),
+ ItemInNs::Types(ModuleDefId::ConstId(id))
+ | ItemInNs::Values(ModuleDefId::ConstId(id)) => AssocItemId::from(id),
+ ItemInNs::Types(ModuleDefId::TypeAliasId(id))
+ | ItemInNs::Values(ModuleDefId::TypeAliasId(id)) => AssocItemId::from(id),
+ _ => return None,
+ };
+
+ let trait_ = assoc_to_trait(db, dependency)?;
+ if let ModuleDefId::TraitId(tr) = trait_.as_module_def_id()? {
+ let trait_data = db.trait_data(tr);
+ let assoc_item_name =
+ trait_data.items.iter().find_map(|(assoc_item_name, assoc_item_id)| {
+ if &dependency_assoc_item_id == assoc_item_id {
+ Some(assoc_item_name)
+ } else {
+ None
+ }
+ })?;
+ return Some(format!("{}::{}", dependency_imports.path_of(trait_)?, assoc_item_name));
+ }
+ None
+ }
+
+ fn assoc_to_trait(db: &dyn DefDatabase, item: ItemInNs) -> Option<ItemInNs> {
+ let assoc: AssocItemId = match item {
+ ItemInNs::Types(it) | ItemInNs::Values(it) => match it {
+ ModuleDefId::TypeAliasId(it) => it.into(),
+ ModuleDefId::FunctionId(it) => it.into(),
+ ModuleDefId::ConstId(it) => it.into(),
+ _ => return None,
+ },
+ _ => return None,
+ };
+
+ let container = match assoc {
+ AssocItemId::FunctionId(it) => it.lookup(db).container,
+ AssocItemId::ConstId(it) => it.lookup(db).container,
+ AssocItemId::TypeAliasId(it) => it.lookup(db).container,
+ };
+
+ match container {
+ ItemContainerId::TraitId(it) => Some(ItemInNs::Types(it.into())),
+ _ => None,
+ }
+ }
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let db = TestDB::with_files(ra_fixture);
+ let crate_graph = db.crate_graph();
+
+ let actual = crate_graph
+ .iter()
+ .filter_map(|krate| {
+ let cdata = &crate_graph[krate];
+ let name = cdata.display_name.as_ref()?;
+
+ let map = db.import_map(krate);
+
+ Some(format!("{}:\n{:?}\n", name, map))
+ })
+ .sorted()
+ .collect::<String>();
+
+ expect.assert_eq(&actual)
+ }
+
+ #[test]
+ fn smoke() {
+ check(
+ r"
+ //- /main.rs crate:main deps:lib
+
+ mod private {
+ pub use lib::Pub;
+ pub struct InPrivateModule;
+ }
+
+ pub mod publ1 {
+ use lib::Pub;
+ }
+
+ pub mod real_pub {
+ pub use lib::Pub;
+ }
+ pub mod real_pu2 { // same path length as above
+ pub use lib::Pub;
+ }
+
+ //- /lib.rs crate:lib
+ pub struct Pub {}
+ pub struct Pub2; // t + v
+ struct Priv;
+ ",
+ expect![[r#"
+ lib:
+ - Pub (t)
+ - Pub2 (t)
+ - Pub2 (v)
+ main:
+ - publ1 (t)
+ - real_pu2 (t)
+ - real_pub (t)
+ - real_pub::Pub (t)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn prefers_shortest_path() {
+ check(
+ r"
+ //- /main.rs crate:main
+
+ pub mod sub {
+ pub mod subsub {
+ pub struct Def {}
+ }
+
+ pub use super::sub::subsub::Def;
+ }
+ ",
+ expect![[r#"
+ main:
+ - sub (t)
+ - sub::Def (t)
+ - sub::subsub (t)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn type_reexport_cross_crate() {
+ // Reexports need to be visible from a crate, even if the original crate exports the item
+ // at a shorter path.
+ check(
+ r"
+ //- /main.rs crate:main deps:lib
+ pub mod m {
+ pub use lib::S;
+ }
+ //- /lib.rs crate:lib
+ pub struct S;
+ ",
+ expect![[r#"
+ lib:
+ - S (t)
+ - S (v)
+ main:
+ - m (t)
+ - m::S (t)
+ - m::S (v)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_reexport() {
+ check(
+ r"
+ //- /main.rs crate:main deps:lib
+ pub mod m {
+ pub use lib::pub_macro;
+ }
+ //- /lib.rs crate:lib
+ #[macro_export]
+ macro_rules! pub_macro {
+ () => {};
+ }
+ ",
+ expect![[r#"
+ lib:
+ - pub_macro (m)
+ main:
+ - m (t)
+ - m::pub_macro (m)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn module_reexport() {
+ // Reexporting modules from a dependency adds all contents to the import map.
+ check(
+ r"
+ //- /main.rs crate:main deps:lib
+ pub use lib::module as reexported_module;
+ //- /lib.rs crate:lib
+ pub mod module {
+ pub struct S;
+ }
+ ",
+ expect![[r#"
+ lib:
+ - module (t)
+ - module::S (t)
+ - module::S (v)
+ main:
+ - reexported_module (t)
+ - reexported_module::S (t)
+ - reexported_module::S (v)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn cyclic_module_reexport() {
+ // A cyclic reexport does not hang.
+ check(
+ r"
+ //- /lib.rs crate:lib
+ pub mod module {
+ pub struct S;
+ pub use super::sub::*;
+ }
+
+ pub mod sub {
+ pub use super::module;
+ }
+ ",
+ expect![[r#"
+ lib:
+ - module (t)
+ - module::S (t)
+ - module::S (v)
+ - sub (t)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn private_macro() {
+ check(
+ r"
+ //- /lib.rs crate:lib
+ macro_rules! private_macro {
+ () => {};
+ }
+ ",
+ expect![[r#"
+ lib:
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn namespacing() {
+ check(
+ r"
+ //- /lib.rs crate:lib
+ pub struct Thing; // t + v
+ #[macro_export]
+ macro_rules! Thing { // m
+ () => {};
+ }
+ ",
+ expect![[r#"
+ lib:
+ - Thing (m)
+ - Thing (t)
+ - Thing (v)
+ "#]],
+ );
+
+ check(
+ r"
+ //- /lib.rs crate:lib
+ pub mod Thing {} // t
+ #[macro_export]
+ macro_rules! Thing { // m
+ () => {};
+ }
+ ",
+ expect![[r#"
+ lib:
+ - Thing (m)
+ - Thing (t)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn fuzzy_import_trait_and_assoc_items() {
+ cov_mark::check!(type_aliases_ignored);
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep
+ pub mod fmt {
+ pub trait Display {
+ type FmtTypeAlias;
+ const FMT_CONST: bool;
+
+ fn format_function();
+ fn format_method(&self);
+ }
+ }
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy),
+ expect![[r#"
+ dep::fmt (t)
+ dep::fmt::Display (t)
+ dep::fmt::Display::FMT_CONST (a)
+ dep::fmt::Display::format_function (a)
+ dep::fmt::Display::format_method (a)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn assoc_items_filtering() {
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep
+ pub mod fmt {
+ pub trait Display {
+ type FmtTypeAlias;
+ const FMT_CONST: bool;
+
+ fn format_function();
+ fn format_method(&self);
+ }
+ }
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy).assoc_items_only(),
+ expect![[r#"
+ dep::fmt::Display::FMT_CONST (a)
+ dep::fmt::Display::format_function (a)
+ dep::fmt::Display::format_method (a)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string())
+ .search_mode(SearchMode::Fuzzy)
+ .exclude_import_kind(ImportKind::AssociatedItem),
+ expect![[r#"
+ dep::fmt (t)
+ dep::fmt::Display (t)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string())
+ .search_mode(SearchMode::Fuzzy)
+ .assoc_items_only()
+ .exclude_import_kind(ImportKind::AssociatedItem),
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn search_mode() {
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep deps:tdep
+ use tdep::fmt as fmt_dep;
+ pub mod fmt {
+ pub trait Display {
+ fn fmt();
+ }
+ }
+ #[macro_export]
+ macro_rules! Fmt {
+ () => {};
+ }
+ pub struct Fmt;
+
+ pub fn format() {}
+ pub fn no() {}
+
+ //- /tdep.rs crate:tdep
+ pub mod fmt {
+ pub struct NotImportableFromMain;
+ }
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy),
+ expect![[r#"
+ dep::Fmt (m)
+ dep::Fmt (t)
+ dep::Fmt (v)
+ dep::fmt (t)
+ dep::fmt::Display (t)
+ dep::fmt::Display::fmt (a)
+ dep::format (f)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()).search_mode(SearchMode::Equals),
+ expect![[r#"
+ dep::Fmt (m)
+ dep::Fmt (t)
+ dep::Fmt (v)
+ dep::fmt (t)
+ dep::fmt::Display::fmt (a)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()).search_mode(SearchMode::Contains),
+ expect![[r#"
+ dep::Fmt (m)
+ dep::Fmt (t)
+ dep::Fmt (v)
+ dep::fmt (t)
+ dep::fmt::Display (t)
+ dep::fmt::Display::fmt (a)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn name_only() {
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep deps:tdep
+ use tdep::fmt as fmt_dep;
+ pub mod fmt {
+ pub trait Display {
+ fn fmt();
+ }
+ }
+ #[macro_export]
+ macro_rules! Fmt {
+ () => {};
+ }
+ pub struct Fmt;
+
+ pub fn format() {}
+ pub fn no() {}
+
+ //- /tdep.rs crate:tdep
+ pub mod fmt {
+ pub struct NotImportableFromMain;
+ }
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()),
+ expect![[r#"
+ dep::Fmt (m)
+ dep::Fmt (t)
+ dep::Fmt (v)
+ dep::fmt (t)
+ dep::fmt::Display (t)
+ dep::fmt::Display::fmt (a)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()).name_only(),
+ expect![[r#"
+ dep::Fmt (m)
+ dep::Fmt (t)
+ dep::Fmt (v)
+ dep::fmt (t)
+ dep::fmt::Display::fmt (a)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn search_casing() {
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep
+
+ pub struct fmt;
+ pub struct FMT;
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("FMT".to_string()),
+ expect![[r#"
+ dep::FMT (t)
+ dep::FMT (v)
+ dep::fmt (t)
+ dep::fmt (v)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("FMT".to_string()).case_sensitive(),
+ expect![[r#"
+ dep::FMT (t)
+ dep::FMT (v)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn search_limit() {
+ check_search(
+ r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep
+ pub mod fmt {
+ pub trait Display {
+ fn fmt();
+ }
+ }
+ #[macro_export]
+ macro_rules! Fmt {
+ () => {};
+ }
+ pub struct Fmt;
+
+ pub fn format() {}
+ pub fn no() {}
+ "#,
+ "main",
+ Query::new("".to_string()).limit(2),
+ expect![[r#"
+ dep::Fmt (m)
+ dep::Fmt (t)
+ dep::Fmt (v)
+ dep::fmt (t)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn search_exclusions() {
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep
+
+ pub struct fmt;
+ pub struct FMT;
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("FMT".to_string()),
+ expect![[r#"
+ dep::FMT (t)
+ dep::FMT (v)
+ dep::fmt (t)
+ dep::fmt (v)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("FMT".to_string()).exclude_import_kind(ImportKind::Adt),
+ expect![[r#""#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/intern.rs b/src/tools/rust-analyzer/crates/hir-def/src/intern.rs
new file mode 100644
index 000000000..f08521a34
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/intern.rs
@@ -0,0 +1,227 @@
+//! Global `Arc`-based object interning infrastructure.
+//!
+//! Eventually this should probably be replaced with salsa-based interning.
+
+use std::{
+ fmt::{self, Debug, Display},
+ hash::{BuildHasherDefault, Hash, Hasher},
+ ops::Deref,
+ sync::Arc,
+};
+
+use dashmap::{DashMap, SharedValue};
+use hashbrown::HashMap;
+use once_cell::sync::OnceCell;
+use rustc_hash::FxHasher;
+
+use crate::generics::GenericParams;
+
+type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>;
+type Guard<T> = dashmap::RwLockWriteGuard<
+ 'static,
+ HashMap<Arc<T>, SharedValue<()>, BuildHasherDefault<FxHasher>>,
+>;
+
+pub struct Interned<T: Internable + ?Sized> {
+ arc: Arc<T>,
+}
+
+impl<T: Internable> Interned<T> {
+ pub fn new(obj: T) -> Self {
+ match Interned::lookup(&obj) {
+ Ok(this) => this,
+ Err(shard) => {
+ let arc = Arc::new(obj);
+ Self::alloc(arc, shard)
+ }
+ }
+ }
+}
+
+impl<T: Internable + ?Sized> Interned<T> {
+ fn lookup(obj: &T) -> Result<Self, Guard<T>> {
+ let storage = T::storage().get();
+ let shard_idx = storage.determine_map(obj);
+ let shard = &storage.shards()[shard_idx];
+ let shard = shard.write();
+
+ // Atomically,
+ // - check if `obj` is already in the map
+ // - if so, clone its `Arc` and return it
+ // - if not, box it up, insert it, and return a clone
+ // This needs to be atomic (locking the shard) to avoid races with other thread, which could
+ // insert the same object between us looking it up and inserting it.
+
+ // FIXME: avoid double lookup/hashing by using raw entry API (once stable, or when
+ // hashbrown can be plugged into dashmap)
+ match shard.get_key_value(obj) {
+ Some((arc, _)) => Ok(Self { arc: arc.clone() }),
+ None => Err(shard),
+ }
+ }
+
+ fn alloc(arc: Arc<T>, mut shard: Guard<T>) -> Self {
+ let arc2 = arc.clone();
+
+ shard.insert(arc2, SharedValue::new(()));
+
+ Self { arc }
+ }
+}
+
+impl Interned<str> {
+ pub fn new_str(s: &str) -> Self {
+ match Interned::lookup(s) {
+ Ok(this) => this,
+ Err(shard) => {
+ let arc = Arc::<str>::from(s);
+ Self::alloc(arc, shard)
+ }
+ }
+ }
+}
+
+impl<T: Internable + ?Sized> Drop for Interned<T> {
+ #[inline]
+ fn drop(&mut self) {
+ // When the last `Ref` is dropped, remove the object from the global map.
+ if Arc::strong_count(&self.arc) == 2 {
+ // Only `self` and the global map point to the object.
+
+ self.drop_slow();
+ }
+ }
+}
+
+impl<T: Internable + ?Sized> Interned<T> {
+ #[cold]
+ fn drop_slow(&mut self) {
+ let storage = T::storage().get();
+ let shard_idx = storage.determine_map(&self.arc);
+ let shard = &storage.shards()[shard_idx];
+ let mut shard = shard.write();
+
+ // FIXME: avoid double lookup
+ let (arc, _) = shard.get_key_value(&self.arc).expect("interned value removed prematurely");
+
+ if Arc::strong_count(arc) != 2 {
+ // Another thread has interned another copy
+ return;
+ }
+
+ shard.remove(&self.arc);
+
+ // Shrink the backing storage if the shard is less than 50% occupied.
+ if shard.len() * 2 < shard.capacity() {
+ shard.shrink_to_fit();
+ }
+ }
+}
+
+/// Compares interned `Ref`s using pointer equality.
+impl<T: Internable> PartialEq for Interned<T> {
+ // NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects.
+
+ #[inline]
+ fn eq(&self, other: &Self) -> bool {
+ Arc::ptr_eq(&self.arc, &other.arc)
+ }
+}
+
+impl<T: Internable> Eq for Interned<T> {}
+
+impl PartialEq for Interned<str> {
+ fn eq(&self, other: &Self) -> bool {
+ Arc::ptr_eq(&self.arc, &other.arc)
+ }
+}
+
+impl Eq for Interned<str> {}
+
+impl<T: Internable + ?Sized> Hash for Interned<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ // NOTE: Cast disposes vtable pointer / slice/str length.
+ state.write_usize(Arc::as_ptr(&self.arc) as *const () as usize)
+ }
+}
+
+impl<T: Internable + ?Sized> AsRef<T> for Interned<T> {
+ #[inline]
+ fn as_ref(&self) -> &T {
+ &self.arc
+ }
+}
+
+impl<T: Internable + ?Sized> Deref for Interned<T> {
+ type Target = T;
+
+ #[inline]
+ fn deref(&self) -> &Self::Target {
+ &self.arc
+ }
+}
+
+impl<T: Internable + ?Sized> Clone for Interned<T> {
+ fn clone(&self) -> Self {
+ Self { arc: self.arc.clone() }
+ }
+}
+
+impl<T: Debug + Internable + ?Sized> Debug for Interned<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ (*self.arc).fmt(f)
+ }
+}
+
+impl<T: Display + Internable + ?Sized> Display for Interned<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ (*self.arc).fmt(f)
+ }
+}
+
+pub struct InternStorage<T: ?Sized> {
+ map: OnceCell<InternMap<T>>,
+}
+
+impl<T: ?Sized> InternStorage<T> {
+ pub const fn new() -> Self {
+ Self { map: OnceCell::new() }
+ }
+}
+
+impl<T: Internable + ?Sized> InternStorage<T> {
+ fn get(&self) -> &InternMap<T> {
+ self.map.get_or_init(DashMap::default)
+ }
+}
+
+pub trait Internable: Hash + Eq + 'static {
+ fn storage() -> &'static InternStorage<Self>;
+}
+
+/// Implements `Internable` for a given list of types, making them usable with `Interned`.
+#[macro_export]
+#[doc(hidden)]
+macro_rules! _impl_internable {
+ ( $($t:path),+ $(,)? ) => { $(
+ impl Internable for $t {
+ fn storage() -> &'static InternStorage<Self> {
+ static STORAGE: InternStorage<$t> = InternStorage::new();
+ &STORAGE
+ }
+ }
+ )+ };
+}
+
+pub use crate::_impl_internable as impl_internable;
+
+impl_internable!(
+ crate::type_ref::TypeRef,
+ crate::type_ref::TraitRef,
+ crate::type_ref::TypeBound,
+ crate::path::ModPath,
+ crate::path::GenericArgs,
+ crate::attr::AttrInput,
+ GenericParams,
+ str,
+);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
new file mode 100644
index 000000000..a11a92204
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
@@ -0,0 +1,464 @@
+//! Describes items defined or visible (ie, imported) in a certain scope.
+//! This is shared between modules and blocks.
+
+use std::collections::hash_map::Entry;
+
+use base_db::CrateId;
+use hir_expand::{name::Name, AstId, MacroCallId};
+use itertools::Itertools;
+use once_cell::sync::Lazy;
+use profile::Count;
+use rustc_hash::{FxHashMap, FxHashSet};
+use smallvec::{smallvec, SmallVec};
+use stdx::format_to;
+use syntax::ast;
+
+use crate::{
+ attr::AttrId, db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType,
+ ConstId, HasModule, ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId,
+};
+
+#[derive(Copy, Clone)]
+pub(crate) enum ImportType {
+ Glob,
+ Named,
+}
+
+#[derive(Debug, Default)]
+pub struct PerNsGlobImports {
+ types: FxHashSet<(LocalModuleId, Name)>,
+ values: FxHashSet<(LocalModuleId, Name)>,
+ macros: FxHashSet<(LocalModuleId, Name)>,
+}
+
+#[derive(Debug, Default, PartialEq, Eq)]
+pub struct ItemScope {
+ _c: Count<Self>,
+
+ /// Defs visible in this scope. This includes `declarations`, but also
+ /// imports.
+ types: FxHashMap<Name, (ModuleDefId, Visibility)>,
+ values: FxHashMap<Name, (ModuleDefId, Visibility)>,
+ macros: FxHashMap<Name, (MacroId, Visibility)>,
+ unresolved: FxHashSet<Name>,
+
+ /// The defs declared in this scope. Each def has a single scope where it is
+ /// declared.
+ declarations: Vec<ModuleDefId>,
+
+ impls: Vec<ImplId>,
+ unnamed_consts: Vec<ConstId>,
+ /// Traits imported via `use Trait as _;`.
+ unnamed_trait_imports: FxHashMap<TraitId, Visibility>,
+ /// Macros visible in current module in legacy textual scope
+ ///
+ /// For macros invoked by an unqualified identifier like `bar!()`, `legacy_macros` will be searched in first.
+ /// If it yields no result, then it turns to module scoped `macros`.
+ /// It macros with name qualified with a path like `crate::foo::bar!()`, `legacy_macros` will be skipped,
+ /// and only normal scoped `macros` will be searched in.
+ ///
+ /// Note that this automatically inherit macros defined textually before the definition of module itself.
+ ///
+ /// Module scoped macros will be inserted into `items` instead of here.
+ // FIXME: Macro shadowing in one module is not properly handled. Non-item place macros will
+ // be all resolved to the last one defined if shadowing happens.
+ legacy_macros: FxHashMap<Name, SmallVec<[MacroId; 1]>>,
+ /// The derive macro invocations in this scope.
+ attr_macros: FxHashMap<AstId<ast::Item>, MacroCallId>,
+ /// The derive macro invocations in this scope, keyed by the owner item over the actual derive attributes
+ /// paired with the derive macro invocations for the specific attribute.
+ derive_macros: FxHashMap<AstId<ast::Adt>, SmallVec<[DeriveMacroInvocation; 1]>>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+struct DeriveMacroInvocation {
+ attr_id: AttrId,
+ attr_call_id: MacroCallId,
+ derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
+}
+
+pub(crate) static BUILTIN_SCOPE: Lazy<FxHashMap<Name, PerNs>> = Lazy::new(|| {
+ BuiltinType::ALL
+ .iter()
+ .map(|(name, ty)| (name.clone(), PerNs::types((*ty).into(), Visibility::Public)))
+ .collect()
+});
+
+/// Shadow mode for builtin type which can be shadowed by module.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub(crate) enum BuiltinShadowMode {
+ /// Prefer user-defined modules (or other types) over builtins.
+ Module,
+ /// Prefer builtins over user-defined modules (but not other types).
+ Other,
+}
+
+/// Legacy macros can only be accessed through special methods like `get_legacy_macros`.
+/// Other methods will only resolve values, types and module scoped macros only.
+impl ItemScope {
+ pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a {
+ // FIXME: shadowing
+ self.types
+ .keys()
+ .chain(self.values.keys())
+ .chain(self.macros.keys())
+ .chain(self.unresolved.iter())
+ .sorted()
+ .unique()
+ .map(move |name| (name, self.get(name)))
+ }
+
+ pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ {
+ self.declarations.iter().copied()
+ }
+
+ pub fn impls(&self) -> impl Iterator<Item = ImplId> + ExactSizeIterator + '_ {
+ self.impls.iter().copied()
+ }
+
+ pub fn values(
+ &self,
+ ) -> impl Iterator<Item = (ModuleDefId, Visibility)> + ExactSizeIterator + '_ {
+ self.values.values().copied()
+ }
+
+ pub fn types(
+ &self,
+ ) -> impl Iterator<Item = (ModuleDefId, Visibility)> + ExactSizeIterator + '_ {
+ self.types.values().copied()
+ }
+
+ pub fn unnamed_consts(&self) -> impl Iterator<Item = ConstId> + '_ {
+ self.unnamed_consts.iter().copied()
+ }
+
+ /// Iterate over all module scoped macros
+ pub(crate) fn macros(&self) -> impl Iterator<Item = (&Name, MacroId)> + '_ {
+ self.entries().filter_map(|(name, def)| def.take_macros().map(|macro_| (name, macro_)))
+ }
+
+ /// Iterate over all legacy textual scoped macros visible at the end of the module
+ pub fn legacy_macros(&self) -> impl Iterator<Item = (&Name, &[MacroId])> + '_ {
+ self.legacy_macros.iter().map(|(name, def)| (name, &**def))
+ }
+
+ /// Get a name from current module scope, legacy macros are not included
+ pub(crate) fn get(&self, name: &Name) -> PerNs {
+ PerNs {
+ types: self.types.get(name).copied(),
+ values: self.values.get(name).copied(),
+ macros: self.macros.get(name).copied(),
+ }
+ }
+
+ pub(crate) fn type_(&self, name: &Name) -> Option<(ModuleDefId, Visibility)> {
+ self.types.get(name).copied()
+ }
+
+ /// XXX: this is O(N) rather than O(1), try to not introduce new usages.
+ pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> {
+ let (def, mut iter) = match item {
+ ItemInNs::Macros(def) => {
+ return self
+ .macros
+ .iter()
+ .find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis)));
+ }
+ ItemInNs::Types(def) => (def, self.types.iter()),
+ ItemInNs::Values(def) => (def, self.values.iter()),
+ };
+ iter.find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis)))
+ }
+
+ pub(crate) fn traits<'a>(&'a self) -> impl Iterator<Item = TraitId> + 'a {
+ self.types
+ .values()
+ .filter_map(|&(def, _)| match def {
+ ModuleDefId::TraitId(t) => Some(t),
+ _ => None,
+ })
+ .chain(self.unnamed_trait_imports.keys().copied())
+ }
+
+ pub(crate) fn declare(&mut self, def: ModuleDefId) {
+ self.declarations.push(def)
+ }
+
+ pub(crate) fn get_legacy_macro(&self, name: &Name) -> Option<&[MacroId]> {
+ self.legacy_macros.get(name).map(|it| &**it)
+ }
+
+ pub(crate) fn define_impl(&mut self, imp: ImplId) {
+ self.impls.push(imp)
+ }
+
+ pub(crate) fn define_unnamed_const(&mut self, konst: ConstId) {
+ self.unnamed_consts.push(konst);
+ }
+
+ pub(crate) fn define_legacy_macro(&mut self, name: Name, mac: MacroId) {
+ self.legacy_macros.entry(name).or_default().push(mac);
+ }
+
+ pub(crate) fn add_attr_macro_invoc(&mut self, item: AstId<ast::Item>, call: MacroCallId) {
+ self.attr_macros.insert(item, call);
+ }
+
+ pub(crate) fn attr_macro_invocs(
+ &self,
+ ) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+ self.attr_macros.iter().map(|(k, v)| (*k, *v))
+ }
+
+ pub(crate) fn set_derive_macro_invoc(
+ &mut self,
+ adt: AstId<ast::Adt>,
+ call: MacroCallId,
+ id: AttrId,
+ idx: usize,
+ ) {
+ if let Some(derives) = self.derive_macros.get_mut(&adt) {
+ if let Some(DeriveMacroInvocation { derive_call_ids, .. }) =
+ derives.iter_mut().find(|&&mut DeriveMacroInvocation { attr_id, .. }| id == attr_id)
+ {
+ derive_call_ids[idx] = Some(call);
+ }
+ }
+ }
+
+ /// We are required to set this up front as derive invocation recording happens out of order
+ /// due to the fixed pointer iteration loop being able to record some derives later than others
+ /// independent of their indices.
+ pub(crate) fn init_derive_attribute(
+ &mut self,
+ adt: AstId<ast::Adt>,
+ attr_id: AttrId,
+ attr_call_id: MacroCallId,
+ len: usize,
+ ) {
+ self.derive_macros.entry(adt).or_default().push(DeriveMacroInvocation {
+ attr_id,
+ attr_call_id,
+ derive_call_ids: smallvec![None; len],
+ });
+ }
+
+ pub(crate) fn derive_macro_invocs(
+ &self,
+ ) -> impl Iterator<
+ Item = (
+ AstId<ast::Adt>,
+ impl Iterator<Item = (AttrId, MacroCallId, &[Option<MacroCallId>])>,
+ ),
+ > + '_ {
+ self.derive_macros.iter().map(|(k, v)| {
+ (
+ *k,
+ v.iter().map(|DeriveMacroInvocation { attr_id, attr_call_id, derive_call_ids }| {
+ (*attr_id, *attr_call_id, &**derive_call_ids)
+ }),
+ )
+ })
+ }
+
+ pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
+ self.unnamed_trait_imports.get(&tr).copied()
+ }
+
+ pub(crate) fn push_unnamed_trait(&mut self, tr: TraitId, vis: Visibility) {
+ self.unnamed_trait_imports.insert(tr, vis);
+ }
+
+ pub(crate) fn push_res_with_import(
+ &mut self,
+ glob_imports: &mut PerNsGlobImports,
+ lookup: (LocalModuleId, Name),
+ def: PerNs,
+ def_import_type: ImportType,
+ ) -> bool {
+ let mut changed = false;
+
+ macro_rules! check_changed {
+ (
+ $changed:ident,
+ ( $this:ident / $def:ident ) . $field:ident,
+ $glob_imports:ident [ $lookup:ident ],
+ $def_import_type:ident
+ ) => {{
+ if let Some(fld) = $def.$field {
+ let existing = $this.$field.entry($lookup.1.clone());
+ match existing {
+ Entry::Vacant(entry) => {
+ match $def_import_type {
+ ImportType::Glob => {
+ $glob_imports.$field.insert($lookup.clone());
+ }
+ ImportType::Named => {
+ $glob_imports.$field.remove(&$lookup);
+ }
+ }
+
+ entry.insert(fld);
+ $changed = true;
+ }
+ Entry::Occupied(mut entry)
+ if $glob_imports.$field.contains(&$lookup)
+ && matches!($def_import_type, ImportType::Named) =>
+ {
+ cov_mark::hit!(import_shadowed);
+ $glob_imports.$field.remove(&$lookup);
+ entry.insert(fld);
+ $changed = true;
+ }
+ _ => {}
+ }
+ }
+ }};
+ }
+
+ check_changed!(changed, (self / def).types, glob_imports[lookup], def_import_type);
+ check_changed!(changed, (self / def).values, glob_imports[lookup], def_import_type);
+ check_changed!(changed, (self / def).macros, glob_imports[lookup], def_import_type);
+
+ if def.is_none() && self.unresolved.insert(lookup.1) {
+ changed = true;
+ }
+
+ changed
+ }
+
+ pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator<Item = (Option<Name>, PerNs)> + 'a {
+ self.entries().map(|(name, res)| (Some(name.clone()), res)).chain(
+ self.unnamed_trait_imports
+ .iter()
+ .map(|(tr, vis)| (None, PerNs::types(ModuleDefId::TraitId(*tr), *vis))),
+ )
+ }
+
+ pub(crate) fn collect_legacy_macros(&self) -> FxHashMap<Name, SmallVec<[MacroId; 1]>> {
+ self.legacy_macros.clone()
+ }
+
+ /// Marks everything that is not a procedural macro as private to `this_module`.
+ pub(crate) fn censor_non_proc_macros(&mut self, this_module: ModuleId) {
+ self.types
+ .values_mut()
+ .chain(self.values.values_mut())
+ .map(|(_, v)| v)
+ .chain(self.unnamed_trait_imports.values_mut())
+ .for_each(|vis| *vis = Visibility::Module(this_module));
+
+ for (mac, vis) in self.macros.values_mut() {
+ if let MacroId::ProcMacroId(_) = mac {
+ // FIXME: Technically this is insufficient since reexports of proc macros are also
+ // forbidden. Practically nobody does that.
+ continue;
+ }
+
+ *vis = Visibility::Module(this_module);
+ }
+ }
+
+ pub(crate) fn dump(&self, buf: &mut String) {
+ let mut entries: Vec<_> = self.resolutions().collect();
+ entries.sort_by_key(|(name, _)| name.clone());
+
+ for (name, def) in entries {
+ format_to!(buf, "{}:", name.map_or("_".to_string(), |name| name.to_string()));
+
+ if def.types.is_some() {
+ buf.push_str(" t");
+ }
+ if def.values.is_some() {
+ buf.push_str(" v");
+ }
+ if def.macros.is_some() {
+ buf.push_str(" m");
+ }
+ if def.is_none() {
+ buf.push_str(" _");
+ }
+
+ buf.push('\n');
+ }
+ }
+
+ pub(crate) fn shrink_to_fit(&mut self) {
+ // Exhaustive match to require handling new fields.
+ let Self {
+ _c: _,
+ types,
+ values,
+ macros,
+ unresolved,
+ declarations,
+ impls,
+ unnamed_consts,
+ unnamed_trait_imports,
+ legacy_macros,
+ attr_macros,
+ derive_macros,
+ } = self;
+ types.shrink_to_fit();
+ values.shrink_to_fit();
+ macros.shrink_to_fit();
+ unresolved.shrink_to_fit();
+ declarations.shrink_to_fit();
+ impls.shrink_to_fit();
+ unnamed_consts.shrink_to_fit();
+ unnamed_trait_imports.shrink_to_fit();
+ legacy_macros.shrink_to_fit();
+ attr_macros.shrink_to_fit();
+ derive_macros.shrink_to_fit();
+ }
+}
+
+impl PerNs {
+ pub(crate) fn from_def(def: ModuleDefId, v: Visibility, has_constructor: bool) -> PerNs {
+ match def {
+ ModuleDefId::ModuleId(_) => PerNs::types(def, v),
+ ModuleDefId::FunctionId(_) => PerNs::values(def, v),
+ ModuleDefId::AdtId(adt) => match adt {
+ AdtId::UnionId(_) => PerNs::types(def, v),
+ AdtId::EnumId(_) => PerNs::types(def, v),
+ AdtId::StructId(_) => {
+ if has_constructor {
+ PerNs::both(def, def, v)
+ } else {
+ PerNs::types(def, v)
+ }
+ }
+ },
+ ModuleDefId::EnumVariantId(_) => PerNs::both(def, def, v),
+ ModuleDefId::ConstId(_) | ModuleDefId::StaticId(_) => PerNs::values(def, v),
+ ModuleDefId::TraitId(_) => PerNs::types(def, v),
+ ModuleDefId::TypeAliasId(_) => PerNs::types(def, v),
+ ModuleDefId::BuiltinType(_) => PerNs::types(def, v),
+ ModuleDefId::MacroId(mac) => PerNs::macros(mac, v),
+ }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum ItemInNs {
+ Types(ModuleDefId),
+ Values(ModuleDefId),
+ Macros(MacroId),
+}
+
+impl ItemInNs {
+ pub fn as_module_def_id(self) -> Option<ModuleDefId> {
+ match self {
+ ItemInNs::Types(id) | ItemInNs::Values(id) => Some(id),
+ ItemInNs::Macros(_) => None,
+ }
+ }
+
+ /// Returns the crate defining this item (or `None` if `self` is built-in).
+ pub fn krate(&self, db: &dyn DefDatabase) -> Option<CrateId> {
+ match self {
+ ItemInNs::Types(did) | ItemInNs::Values(did) => did.module(db).map(|m| m.krate),
+ ItemInNs::Macros(id) => Some(id.module(db).krate),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
new file mode 100644
index 000000000..375587ee9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
@@ -0,0 +1,961 @@
+//! A simplified AST that only contains items.
+//!
+//! This is the primary IR used throughout `hir_def`. It is the input to the name resolution
+//! algorithm, as well as to the queries defined in `adt.rs`, `data.rs`, and most things in
+//! `attr.rs`.
+//!
+//! `ItemTree`s are built per `HirFileId`, from the syntax tree of the parsed file. This means that
+//! they are crate-independent: they don't know which `#[cfg]`s are active or which module they
+//! belong to, since those concepts don't exist at this level (a single `ItemTree` might be part of
+//! multiple crates, or might be included into the same crate twice via `#[path]`).
+//!
+//! One important purpose of this layer is to provide an "invalidation barrier" for incremental
+//! computations: when typing inside an item body, the `ItemTree` of the modified file is typically
+//! unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`).
+//!
+//! The `ItemTree` for the currently open file can be displayed by using the VS Code command
+//! "Rust Analyzer: Debug ItemTree".
+//!
+//! Compared to rustc's architecture, `ItemTree` has properties from both rustc's AST and HIR: many
+//! syntax-level Rust features are already desugared to simpler forms in the `ItemTree`, but name
+//! resolution has not yet been performed. `ItemTree`s are per-file, while rustc's AST and HIR are
+//! per-crate, because we are interested in incrementally computing it.
+//!
+//! The representation of items in the `ItemTree` should generally mirror the surface syntax: it is
+//! usually a bad idea to desugar a syntax-level construct to something that is structurally
+//! different here. Name resolution needs to be able to process attributes and expand macros
+//! (including attribute macros), and having a 1-to-1 mapping between syntax and the `ItemTree`
+//! avoids introducing subtle bugs.
+//!
+//! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its
+//! surface syntax.
+
+mod lower;
+mod pretty;
+#[cfg(test)]
+mod tests;
+
+use std::{
+ fmt::{self, Debug},
+ hash::{Hash, Hasher},
+ marker::PhantomData,
+ ops::Index,
+ sync::Arc,
+};
+
+use ast::{AstNode, HasName, StructKind};
+use base_db::CrateId;
+use either::Either;
+use hir_expand::{
+ ast_id_map::FileAstId,
+ hygiene::Hygiene,
+ name::{name, AsName, Name},
+ ExpandTo, HirFileId, InFile,
+};
+use la_arena::{Arena, Idx, IdxRange, RawIdx};
+use profile::Count;
+use rustc_hash::FxHashMap;
+use smallvec::SmallVec;
+use stdx::never;
+use syntax::{ast, match_ast, SyntaxKind};
+
+use crate::{
+ attr::{Attrs, RawAttrs},
+ db::DefDatabase,
+ generics::GenericParams,
+ intern::Interned,
+ path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind},
+ type_ref::{Mutability, TraitRef, TypeBound, TypeRef},
+ visibility::RawVisibility,
+ BlockId,
+};
+
+#[derive(Copy, Clone, Eq, PartialEq)]
+pub struct RawVisibilityId(u32);
+
+impl RawVisibilityId {
+ pub const PUB: Self = RawVisibilityId(u32::max_value());
+ pub const PRIV: Self = RawVisibilityId(u32::max_value() - 1);
+ pub const PUB_CRATE: Self = RawVisibilityId(u32::max_value() - 2);
+}
+
+impl fmt::Debug for RawVisibilityId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut f = f.debug_tuple("RawVisibilityId");
+ match *self {
+ Self::PUB => f.field(&"pub"),
+ Self::PRIV => f.field(&"pub(self)"),
+ Self::PUB_CRATE => f.field(&"pub(crate)"),
+ _ => f.field(&self.0),
+ };
+ f.finish()
+ }
+}
+
+/// The item tree of a source file.
+#[derive(Debug, Default, Eq, PartialEq)]
+pub struct ItemTree {
+ _c: Count<Self>,
+
+ top_level: SmallVec<[ModItem; 1]>,
+ attrs: FxHashMap<AttrOwner, RawAttrs>,
+
+ data: Option<Box<ItemTreeData>>,
+}
+
+impl ItemTree {
+ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
+ let _p = profile::span("file_item_tree_query").detail(|| format!("{:?}", file_id));
+ let syntax = match db.parse_or_expand(file_id) {
+ Some(node) => node,
+ None => return Default::default(),
+ };
+ if never!(syntax.kind() == SyntaxKind::ERROR) {
+ // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
+ return Default::default();
+ }
+
+ let ctx = lower::Ctx::new(db, file_id);
+ let mut top_attrs = None;
+ let mut item_tree = match_ast! {
+ match syntax {
+ ast::SourceFile(file) => {
+ top_attrs = Some(RawAttrs::new(db, &file, ctx.hygiene()));
+ ctx.lower_module_items(&file)
+ },
+ ast::MacroItems(items) => {
+ ctx.lower_module_items(&items)
+ },
+ ast::MacroStmts(stmts) => {
+ // The produced statements can include items, which should be added as top-level
+ // items.
+ ctx.lower_macro_stmts(stmts)
+ },
+ _ => {
+ panic!("cannot create item tree from {:?} {}", syntax, syntax);
+ },
+ }
+ };
+
+ if let Some(attrs) = top_attrs {
+ item_tree.attrs.insert(AttrOwner::TopLevel, attrs);
+ }
+ item_tree.shrink_to_fit();
+ Arc::new(item_tree)
+ }
+
+ /// Returns an iterator over all items located at the top level of the `HirFileId` this
+ /// `ItemTree` was created from.
+ pub fn top_level_items(&self) -> &[ModItem] {
+ &self.top_level
+ }
+
+ /// Returns the inner attributes of the source file.
+ pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: CrateId) -> Attrs {
+ self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone().filter(db, krate)
+ }
+
+ pub(crate) fn raw_attrs(&self, of: AttrOwner) -> &RawAttrs {
+ self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY)
+ }
+
+ pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: CrateId, of: AttrOwner) -> Attrs {
+ self.raw_attrs(of).clone().filter(db, krate)
+ }
+
+ pub fn pretty_print(&self) -> String {
+ pretty::print_item_tree(self)
+ }
+
+ fn data(&self) -> &ItemTreeData {
+ self.data.as_ref().expect("attempted to access data of empty ItemTree")
+ }
+
+ fn data_mut(&mut self) -> &mut ItemTreeData {
+ self.data.get_or_insert_with(Box::default)
+ }
+
+ fn block_item_tree(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
+ let loc = db.lookup_intern_block(block);
+ let block = loc.ast_id.to_node(db.upcast());
+ let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
+ Arc::new(ctx.lower_block(&block))
+ }
+
+ fn shrink_to_fit(&mut self) {
+ if let Some(data) = &mut self.data {
+ let ItemTreeData {
+ imports,
+ extern_crates,
+ extern_blocks,
+ functions,
+ params,
+ structs,
+ fields,
+ unions,
+ enums,
+ variants,
+ consts,
+ statics,
+ traits,
+ impls,
+ type_aliases,
+ mods,
+ macro_calls,
+ macro_rules,
+ macro_defs,
+ vis,
+ } = &mut **data;
+
+ imports.shrink_to_fit();
+ extern_crates.shrink_to_fit();
+ extern_blocks.shrink_to_fit();
+ functions.shrink_to_fit();
+ params.shrink_to_fit();
+ structs.shrink_to_fit();
+ fields.shrink_to_fit();
+ unions.shrink_to_fit();
+ enums.shrink_to_fit();
+ variants.shrink_to_fit();
+ consts.shrink_to_fit();
+ statics.shrink_to_fit();
+ traits.shrink_to_fit();
+ impls.shrink_to_fit();
+ type_aliases.shrink_to_fit();
+ mods.shrink_to_fit();
+ macro_calls.shrink_to_fit();
+ macro_rules.shrink_to_fit();
+ macro_defs.shrink_to_fit();
+
+ vis.arena.shrink_to_fit();
+ }
+ }
+}
+
+#[derive(Default, Debug, Eq, PartialEq)]
+struct ItemVisibilities {
+ arena: Arena<RawVisibility>,
+}
+
+impl ItemVisibilities {
+ fn alloc(&mut self, vis: RawVisibility) -> RawVisibilityId {
+ match &vis {
+ RawVisibility::Public => RawVisibilityId::PUB,
+ RawVisibility::Module(path) if path.segments().is_empty() => match &path.kind {
+ PathKind::Super(0) => RawVisibilityId::PRIV,
+ PathKind::Crate => RawVisibilityId::PUB_CRATE,
+ _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()),
+ },
+ _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()),
+ }
+ }
+}
+
+static VIS_PUB: RawVisibility = RawVisibility::Public;
+static VIS_PRIV: RawVisibility = RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)));
+static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module(ModPath::from_kind(PathKind::Crate));
+
+#[derive(Default, Debug, Eq, PartialEq)]
+struct ItemTreeData {
+ imports: Arena<Import>,
+ extern_crates: Arena<ExternCrate>,
+ extern_blocks: Arena<ExternBlock>,
+ functions: Arena<Function>,
+ params: Arena<Param>,
+ structs: Arena<Struct>,
+ fields: Arena<Field>,
+ unions: Arena<Union>,
+ enums: Arena<Enum>,
+ variants: Arena<Variant>,
+ consts: Arena<Const>,
+ statics: Arena<Static>,
+ traits: Arena<Trait>,
+ impls: Arena<Impl>,
+ type_aliases: Arena<TypeAlias>,
+ mods: Arena<Mod>,
+ macro_calls: Arena<MacroCall>,
+ macro_rules: Arena<MacroRules>,
+ macro_defs: Arena<MacroDef>,
+
+ vis: ItemVisibilities,
+}
+
+#[derive(Debug, Eq, PartialEq, Hash)]
+pub enum AttrOwner {
+ /// Attributes on an item.
+ ModItem(ModItem),
+ /// Inner attributes of the source file.
+ TopLevel,
+
+ Variant(Idx<Variant>),
+ Field(Idx<Field>),
+ Param(Idx<Param>),
+}
+
+macro_rules! from_attrs {
+ ( $( $var:ident($t:ty) ),+ ) => {
+ $(
+ impl From<$t> for AttrOwner {
+ fn from(t: $t) -> AttrOwner {
+ AttrOwner::$var(t)
+ }
+ }
+ )+
+ };
+}
+
+from_attrs!(ModItem(ModItem), Variant(Idx<Variant>), Field(Idx<Field>), Param(Idx<Param>));
+
+/// Trait implemented by all item nodes in the item tree.
+pub trait ItemTreeNode: Clone {
+ type Source: AstNode + Into<ast::Item>;
+
+ fn ast_id(&self) -> FileAstId<Self::Source>;
+
+ /// Looks up an instance of `Self` in an item tree.
+ fn lookup(tree: &ItemTree, index: Idx<Self>) -> &Self;
+
+ /// Downcasts a `ModItem` to a `FileItemTreeId` specific to this type.
+ fn id_from_mod_item(mod_item: ModItem) -> Option<FileItemTreeId<Self>>;
+
+ /// Upcasts a `FileItemTreeId` to a generic `ModItem`.
+ fn id_to_mod_item(id: FileItemTreeId<Self>) -> ModItem;
+}
+
+pub struct FileItemTreeId<N: ItemTreeNode> {
+ index: Idx<N>,
+ _p: PhantomData<N>,
+}
+
+impl<N: ItemTreeNode> Clone for FileItemTreeId<N> {
+ fn clone(&self) -> Self {
+ Self { index: self.index, _p: PhantomData }
+ }
+}
+impl<N: ItemTreeNode> Copy for FileItemTreeId<N> {}
+
+impl<N: ItemTreeNode> PartialEq for FileItemTreeId<N> {
+ fn eq(&self, other: &FileItemTreeId<N>) -> bool {
+ self.index == other.index
+ }
+}
+impl<N: ItemTreeNode> Eq for FileItemTreeId<N> {}
+
+impl<N: ItemTreeNode> Hash for FileItemTreeId<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.index.hash(state)
+ }
+}
+
+impl<N: ItemTreeNode> fmt::Debug for FileItemTreeId<N> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.index.fmt(f)
+ }
+}
+
+/// Identifies a particular [`ItemTree`].
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct TreeId {
+ file: HirFileId,
+ block: Option<BlockId>,
+}
+
+impl TreeId {
+ pub(crate) fn new(file: HirFileId, block: Option<BlockId>) -> Self {
+ Self { file, block }
+ }
+
+ pub(crate) fn item_tree(&self, db: &dyn DefDatabase) -> Arc<ItemTree> {
+ match self.block {
+ Some(block) => ItemTree::block_item_tree(db, block),
+ None => db.file_item_tree(self.file),
+ }
+ }
+
+ pub(crate) fn file_id(self) -> HirFileId {
+ self.file
+ }
+
+ pub(crate) fn is_block(self) -> bool {
+ self.block.is_some()
+ }
+}
+
+#[derive(Debug)]
+pub struct ItemTreeId<N: ItemTreeNode> {
+ tree: TreeId,
+ pub value: FileItemTreeId<N>,
+}
+
+impl<N: ItemTreeNode> ItemTreeId<N> {
+ pub fn new(tree: TreeId, idx: FileItemTreeId<N>) -> Self {
+ Self { tree, value: idx }
+ }
+
+ pub fn file_id(self) -> HirFileId {
+ self.tree.file
+ }
+
+ pub fn tree_id(self) -> TreeId {
+ self.tree
+ }
+
+ pub fn item_tree(self, db: &dyn DefDatabase) -> Arc<ItemTree> {
+ self.tree.item_tree(db)
+ }
+}
+
+impl<N: ItemTreeNode> Copy for ItemTreeId<N> {}
+impl<N: ItemTreeNode> Clone for ItemTreeId<N> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl<N: ItemTreeNode> PartialEq for ItemTreeId<N> {
+ fn eq(&self, other: &Self) -> bool {
+ self.tree == other.tree && self.value == other.value
+ }
+}
+
+impl<N: ItemTreeNode> Eq for ItemTreeId<N> {}
+
+impl<N: ItemTreeNode> Hash for ItemTreeId<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.tree.hash(state);
+ self.value.hash(state);
+ }
+}
+
+macro_rules! mod_items {
+ ( $( $typ:ident in $fld:ident -> $ast:ty ),+ $(,)? ) => {
+ #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
+ pub enum ModItem {
+ $(
+ $typ(FileItemTreeId<$typ>),
+ )+
+ }
+
+ $(
+ impl From<FileItemTreeId<$typ>> for ModItem {
+ fn from(id: FileItemTreeId<$typ>) -> ModItem {
+ ModItem::$typ(id)
+ }
+ }
+ )+
+
+ $(
+ impl ItemTreeNode for $typ {
+ type Source = $ast;
+
+ fn ast_id(&self) -> FileAstId<Self::Source> {
+ self.ast_id
+ }
+
+ fn lookup(tree: &ItemTree, index: Idx<Self>) -> &Self {
+ &tree.data().$fld[index]
+ }
+
+ fn id_from_mod_item(mod_item: ModItem) -> Option<FileItemTreeId<Self>> {
+ match mod_item {
+ ModItem::$typ(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ fn id_to_mod_item(id: FileItemTreeId<Self>) -> ModItem {
+ ModItem::$typ(id)
+ }
+ }
+
+ impl Index<Idx<$typ>> for ItemTree {
+ type Output = $typ;
+
+ fn index(&self, index: Idx<$typ>) -> &Self::Output {
+ &self.data().$fld[index]
+ }
+ }
+ )+
+ };
+}
+
+mod_items! {
+ Import in imports -> ast::Use,
+ ExternCrate in extern_crates -> ast::ExternCrate,
+ ExternBlock in extern_blocks -> ast::ExternBlock,
+ Function in functions -> ast::Fn,
+ Struct in structs -> ast::Struct,
+ Union in unions -> ast::Union,
+ Enum in enums -> ast::Enum,
+ Const in consts -> ast::Const,
+ Static in statics -> ast::Static,
+ Trait in traits -> ast::Trait,
+ Impl in impls -> ast::Impl,
+ TypeAlias in type_aliases -> ast::TypeAlias,
+ Mod in mods -> ast::Module,
+ MacroCall in macro_calls -> ast::MacroCall,
+ MacroRules in macro_rules -> ast::MacroRules,
+ MacroDef in macro_defs -> ast::MacroDef,
+}
+
+macro_rules! impl_index {
+ ( $($fld:ident: $t:ty),+ $(,)? ) => {
+ $(
+ impl Index<Idx<$t>> for ItemTree {
+ type Output = $t;
+
+ fn index(&self, index: Idx<$t>) -> &Self::Output {
+ &self.data().$fld[index]
+ }
+ }
+ )+
+ };
+}
+
+impl_index!(fields: Field, variants: Variant, params: Param);
+
+impl Index<RawVisibilityId> for ItemTree {
+ type Output = RawVisibility;
+ fn index(&self, index: RawVisibilityId) -> &Self::Output {
+ match index {
+ RawVisibilityId::PRIV => &VIS_PRIV,
+ RawVisibilityId::PUB => &VIS_PUB,
+ RawVisibilityId::PUB_CRATE => &VIS_PUB_CRATE,
+ _ => &self.data().vis.arena[Idx::from_raw(index.0.into())],
+ }
+ }
+}
+
+impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree {
+ type Output = N;
+ fn index(&self, id: FileItemTreeId<N>) -> &N {
+ N::lookup(self, id.index)
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Import {
+ pub visibility: RawVisibilityId,
+ pub ast_id: FileAstId<ast::Use>,
+ pub use_tree: UseTree,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct UseTree {
+ pub index: Idx<ast::UseTree>,
+ kind: UseTreeKind,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum UseTreeKind {
+ /// ```
+ /// use path::to::Item;
+ /// use path::to::Item as Renamed;
+ /// use path::to::Trait as _;
+ /// ```
+ Single { path: Interned<ModPath>, alias: Option<ImportAlias> },
+
+ /// ```
+ /// use *; // (invalid, but can occur in nested tree)
+ /// use path::*;
+ /// ```
+ Glob { path: Option<Interned<ModPath>> },
+
+ /// ```
+ /// use prefix::{self, Item, ...};
+ /// ```
+ Prefixed { prefix: Option<Interned<ModPath>>, list: Box<[UseTree]> },
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ExternCrate {
+ pub name: Name,
+ pub alias: Option<ImportAlias>,
+ pub visibility: RawVisibilityId,
+ pub ast_id: FileAstId<ast::ExternCrate>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ExternBlock {
+ pub abi: Option<Interned<str>>,
+ pub ast_id: FileAstId<ast::ExternBlock>,
+ pub children: Box<[ModItem]>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Function {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub explicit_generic_params: Interned<GenericParams>,
+ pub abi: Option<Interned<str>>,
+ pub params: IdxRange<Param>,
+ pub ret_type: Interned<TypeRef>,
+ pub async_ret_type: Option<Interned<TypeRef>>,
+ pub ast_id: FileAstId<ast::Fn>,
+ pub(crate) flags: FnFlags,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Param {
+ Normal(Option<Name>, Interned<TypeRef>),
+ Varargs,
+}
+
+bitflags::bitflags! {
+ #[derive(Default)]
+ pub(crate) struct FnFlags: u8 {
+ const HAS_SELF_PARAM = 1 << 0;
+ const HAS_BODY = 1 << 1;
+ const HAS_DEFAULT_KW = 1 << 2;
+ const HAS_CONST_KW = 1 << 3;
+ const HAS_ASYNC_KW = 1 << 4;
+ const HAS_UNSAFE_KW = 1 << 5;
+ const IS_VARARGS = 1 << 6;
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Struct {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub fields: Fields,
+ pub ast_id: FileAstId<ast::Struct>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Union {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub fields: Fields,
+ pub ast_id: FileAstId<ast::Union>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Enum {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub variants: IdxRange<Variant>,
+ pub ast_id: FileAstId<ast::Enum>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Const {
+ /// `None` for `const _: () = ();`
+ pub name: Option<Name>,
+ pub visibility: RawVisibilityId,
+ pub type_ref: Interned<TypeRef>,
+ pub ast_id: FileAstId<ast::Const>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Static {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub mutable: bool,
+ pub type_ref: Interned<TypeRef>,
+ pub ast_id: FileAstId<ast::Static>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Trait {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub is_auto: bool,
+ pub is_unsafe: bool,
+ pub items: Box<[AssocItem]>,
+ pub ast_id: FileAstId<ast::Trait>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Impl {
+ pub generic_params: Interned<GenericParams>,
+ pub target_trait: Option<Interned<TraitRef>>,
+ pub self_ty: Interned<TypeRef>,
+ pub is_negative: bool,
+ pub items: Box<[AssocItem]>,
+ pub ast_id: FileAstId<ast::Impl>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct TypeAlias {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`.
+ pub bounds: Box<[Interned<TypeBound>]>,
+ pub generic_params: Interned<GenericParams>,
+ pub type_ref: Option<Interned<TypeRef>>,
+ pub ast_id: FileAstId<ast::TypeAlias>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Mod {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub kind: ModKind,
+ pub ast_id: FileAstId<ast::Module>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum ModKind {
+ /// `mod m { ... }`
+ Inline { items: Box<[ModItem]> },
+
+ /// `mod m;`
+ Outline,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroCall {
+ /// Path to the called macro.
+ pub path: Interned<ModPath>,
+ pub ast_id: FileAstId<ast::MacroCall>,
+ pub expand_to: ExpandTo,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroRules {
+ /// The name of the declared macro.
+ pub name: Name,
+ pub ast_id: FileAstId<ast::MacroRules>,
+}
+
+/// "Macros 2.0" macro definition.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroDef {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub ast_id: FileAstId<ast::MacroDef>,
+}
+
+impl Import {
+ /// Maps a `UseTree` contained in this import back to its AST node.
+ pub fn use_tree_to_ast(
+ &self,
+ db: &dyn DefDatabase,
+ file_id: HirFileId,
+ index: Idx<ast::UseTree>,
+ ) -> ast::UseTree {
+ // Re-lower the AST item and get the source map.
+ // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
+ let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
+ let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
+ let hygiene = Hygiene::new(db.upcast(), file_id);
+ let (_, source_map) =
+ lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree");
+ source_map[index].clone()
+ }
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum ImportKind {
+ /// The `ModPath` is imported normally.
+ Plain,
+ /// This is a glob-import of all names in the `ModPath`.
+ Glob,
+ /// This is a `some::path::self` import, which imports `some::path` only in type namespace.
+ TypeOnly,
+}
+
+impl UseTree {
+ /// Expands the `UseTree` into individually imported `ModPath`s.
+ pub fn expand(
+ &self,
+ mut cb: impl FnMut(Idx<ast::UseTree>, ModPath, ImportKind, Option<ImportAlias>),
+ ) {
+ self.expand_impl(None, &mut cb)
+ }
+
+ fn expand_impl(
+ &self,
+ prefix: Option<ModPath>,
+ cb: &mut dyn FnMut(Idx<ast::UseTree>, ModPath, ImportKind, Option<ImportAlias>),
+ ) {
+ fn concat_mod_paths(
+ prefix: Option<ModPath>,
+ path: &ModPath,
+ ) -> Option<(ModPath, ImportKind)> {
+ match (prefix, &path.kind) {
+ (None, _) => Some((path.clone(), ImportKind::Plain)),
+ (Some(mut prefix), PathKind::Plain) => {
+ for segment in path.segments() {
+ prefix.push_segment(segment.clone());
+ }
+ Some((prefix, ImportKind::Plain))
+ }
+ (Some(mut prefix), PathKind::Super(n))
+ if *n > 0 && prefix.segments().is_empty() =>
+ {
+ // `super::super` + `super::rest`
+ match &mut prefix.kind {
+ PathKind::Super(m) => {
+ cov_mark::hit!(concat_super_mod_paths);
+ *m += *n;
+ for segment in path.segments() {
+ prefix.push_segment(segment.clone());
+ }
+ Some((prefix, ImportKind::Plain))
+ }
+ _ => None,
+ }
+ }
+ (Some(prefix), PathKind::Super(0)) if path.segments().is_empty() => {
+ // `some::path::self` == `some::path`
+ Some((prefix, ImportKind::TypeOnly))
+ }
+ (Some(_), _) => None,
+ }
+ }
+
+ match &self.kind {
+ UseTreeKind::Single { path, alias } => {
+ if let Some((path, kind)) = concat_mod_paths(prefix, path) {
+ cb(self.index, path, kind, alias.clone());
+ }
+ }
+ UseTreeKind::Glob { path: Some(path) } => {
+ if let Some((path, _)) = concat_mod_paths(prefix, path) {
+ cb(self.index, path, ImportKind::Glob, None);
+ }
+ }
+ UseTreeKind::Glob { path: None } => {
+ if let Some(prefix) = prefix {
+ cb(self.index, prefix, ImportKind::Glob, None);
+ }
+ }
+ UseTreeKind::Prefixed { prefix: additional_prefix, list } => {
+ let prefix = match additional_prefix {
+ Some(path) => match concat_mod_paths(prefix, path) {
+ Some((path, ImportKind::Plain)) => Some(path),
+ _ => return,
+ },
+ None => prefix,
+ };
+ for tree in &**list {
+ tree.expand_impl(prefix.clone(), cb);
+ }
+ }
+ }
+ }
+}
+
+macro_rules! impl_froms {
+ ($e:ident { $($v:ident ($t:ty)),* $(,)? }) => {
+ $(
+ impl From<$t> for $e {
+ fn from(it: $t) -> $e {
+ $e::$v(it)
+ }
+ }
+ )*
+ }
+}
+
+impl ModItem {
+ pub fn as_assoc_item(&self) -> Option<AssocItem> {
+ match self {
+ ModItem::Import(_)
+ | ModItem::ExternCrate(_)
+ | ModItem::ExternBlock(_)
+ | ModItem::Struct(_)
+ | ModItem::Union(_)
+ | ModItem::Enum(_)
+ | ModItem::Static(_)
+ | ModItem::Trait(_)
+ | ModItem::Impl(_)
+ | ModItem::Mod(_)
+ | ModItem::MacroRules(_)
+ | ModItem::MacroDef(_) => None,
+ ModItem::MacroCall(call) => Some(AssocItem::MacroCall(*call)),
+ ModItem::Const(konst) => Some(AssocItem::Const(*konst)),
+ ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(*alias)),
+ ModItem::Function(func) => Some(AssocItem::Function(*func)),
+ }
+ }
+
+ pub fn downcast<N: ItemTreeNode>(self) -> Option<FileItemTreeId<N>> {
+ N::id_from_mod_item(self)
+ }
+
+ pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
+ match self {
+ ModItem::Import(it) => tree[it.index].ast_id().upcast(),
+ ModItem::ExternCrate(it) => tree[it.index].ast_id().upcast(),
+ ModItem::ExternBlock(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Function(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Struct(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Union(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Enum(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Const(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Static(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Trait(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Impl(it) => tree[it.index].ast_id().upcast(),
+ ModItem::TypeAlias(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Mod(it) => tree[it.index].ast_id().upcast(),
+ ModItem::MacroCall(it) => tree[it.index].ast_id().upcast(),
+ ModItem::MacroRules(it) => tree[it.index].ast_id().upcast(),
+ ModItem::MacroDef(it) => tree[it.index].ast_id().upcast(),
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, Eq, PartialEq)]
+pub enum AssocItem {
+ Function(FileItemTreeId<Function>),
+ TypeAlias(FileItemTreeId<TypeAlias>),
+ Const(FileItemTreeId<Const>),
+ MacroCall(FileItemTreeId<MacroCall>),
+}
+
+impl_froms!(AssocItem {
+ Function(FileItemTreeId<Function>),
+ TypeAlias(FileItemTreeId<TypeAlias>),
+ Const(FileItemTreeId<Const>),
+ MacroCall(FileItemTreeId<MacroCall>),
+});
+
+impl From<AssocItem> for ModItem {
+ fn from(item: AssocItem) -> Self {
+ match item {
+ AssocItem::Function(it) => it.into(),
+ AssocItem::TypeAlias(it) => it.into(),
+ AssocItem::Const(it) => it.into(),
+ AssocItem::MacroCall(it) => it.into(),
+ }
+ }
+}
+
+impl AssocItem {
+ pub fn ast_id(self, tree: &ItemTree) -> FileAstId<ast::AssocItem> {
+ match self {
+ AssocItem::Function(id) => tree[id].ast_id.upcast(),
+ AssocItem::TypeAlias(id) => tree[id].ast_id.upcast(),
+ AssocItem::Const(id) => tree[id].ast_id.upcast(),
+ AssocItem::MacroCall(id) => tree[id].ast_id.upcast(),
+ }
+ }
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub struct Variant {
+ pub name: Name,
+ pub fields: Fields,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Fields {
+ Record(IdxRange<Field>),
+ Tuple(IdxRange<Field>),
+ Unit,
+}
+
+/// A single field of an enum variant or struct
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Field {
+ pub name: Name,
+ pub type_ref: Interned<TypeRef>,
+ pub visibility: RawVisibilityId,
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
new file mode 100644
index 000000000..7f2551e94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
@@ -0,0 +1,773 @@
+//! AST -> `ItemTree` lowering code.
+
+use std::{collections::hash_map::Entry, sync::Arc};
+
+use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
+use syntax::ast::{self, HasModuleItem};
+
+use crate::{
+ generics::{GenericParams, TypeParamData, TypeParamProvenance},
+ type_ref::{LifetimeRef, TraitBoundModifier, TraitRef},
+};
+
+use super::*;
+
+fn id<N: ItemTreeNode>(index: Idx<N>) -> FileItemTreeId<N> {
+ FileItemTreeId { index, _p: PhantomData }
+}
+
+pub(super) struct Ctx<'a> {
+ db: &'a dyn DefDatabase,
+ tree: ItemTree,
+ source_ast_id_map: Arc<AstIdMap>,
+ body_ctx: crate::body::LowerCtx<'a>,
+}
+
+impl<'a> Ctx<'a> {
+ pub(super) fn new(db: &'a dyn DefDatabase, file: HirFileId) -> Self {
+ Self {
+ db,
+ tree: ItemTree::default(),
+ source_ast_id_map: db.ast_id_map(file),
+ body_ctx: crate::body::LowerCtx::new(db, file),
+ }
+ }
+
+ pub(super) fn hygiene(&self) -> &Hygiene {
+ self.body_ctx.hygiene()
+ }
+
+ pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
+ self.tree.top_level =
+ item_owner.items().flat_map(|item| self.lower_mod_item(&item)).collect();
+ self.tree
+ }
+
+ pub(super) fn lower_macro_stmts(mut self, stmts: ast::MacroStmts) -> ItemTree {
+ self.tree.top_level = stmts
+ .statements()
+ .filter_map(|stmt| {
+ match stmt {
+ ast::Stmt::Item(item) => Some(item),
+ // Macro calls can be both items and expressions. The syntax library always treats
+ // them as expressions here, so we undo that.
+ ast::Stmt::ExprStmt(es) => match es.expr()? {
+ ast::Expr::MacroExpr(expr) => {
+ cov_mark::hit!(macro_call_in_macro_stmts_is_added_to_item_tree);
+ Some(expr.macro_call()?.into())
+ }
+ _ => None,
+ },
+ _ => None,
+ }
+ })
+ .flat_map(|item| self.lower_mod_item(&item))
+ .collect();
+
+ if let Some(ast::Expr::MacroExpr(tail_macro)) = stmts.expr() {
+ if let Some(call) = tail_macro.macro_call() {
+ cov_mark::hit!(macro_stmt_with_trailing_macro_expr);
+ if let Some(mod_item) = self.lower_mod_item(&call.into()) {
+ self.tree.top_level.push(mod_item);
+ }
+ }
+ }
+
+ self.tree
+ }
+
+ pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
+ self.tree.top_level = block
+ .statements()
+ .filter_map(|stmt| match stmt {
+ ast::Stmt::Item(item) => self.lower_mod_item(&item),
+ // Macro calls can be both items and expressions. The syntax library always treats
+ // them as expressions here, so we undo that.
+ ast::Stmt::ExprStmt(es) => match es.expr()? {
+ ast::Expr::MacroExpr(expr) => self.lower_mod_item(&expr.macro_call()?.into()),
+ _ => None,
+ },
+ _ => None,
+ })
+ .collect();
+
+ self.tree
+ }
+
+ fn data(&mut self) -> &mut ItemTreeData {
+ self.tree.data_mut()
+ }
+
+ fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
+ let attrs = RawAttrs::new(self.db, item, self.hygiene());
+ let item: ModItem = match item {
+ ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
+ ast::Item::Union(ast) => self.lower_union(ast)?.into(),
+ ast::Item::Enum(ast) => self.lower_enum(ast)?.into(),
+ ast::Item::Fn(ast) => self.lower_function(ast)?.into(),
+ ast::Item::TypeAlias(ast) => self.lower_type_alias(ast)?.into(),
+ ast::Item::Static(ast) => self.lower_static(ast)?.into(),
+ ast::Item::Const(ast) => self.lower_const(ast).into(),
+ ast::Item::Module(ast) => self.lower_module(ast)?.into(),
+ ast::Item::Trait(ast) => self.lower_trait(ast)?.into(),
+ ast::Item::Impl(ast) => self.lower_impl(ast)?.into(),
+ ast::Item::Use(ast) => self.lower_use(ast)?.into(),
+ ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast)?.into(),
+ ast::Item::MacroCall(ast) => self.lower_macro_call(ast)?.into(),
+ ast::Item::MacroRules(ast) => self.lower_macro_rules(ast)?.into(),
+ ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
+ ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
+ };
+
+ self.add_attrs(item.into(), attrs);
+
+ Some(item)
+ }
+
+ fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) {
+ match self.tree.attrs.entry(item) {
+ Entry::Occupied(mut entry) => {
+ *entry.get_mut() = entry.get().merge(attrs);
+ }
+ Entry::Vacant(entry) => {
+ entry.insert(attrs);
+ }
+ }
+ }
+
+ fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option<AssocItem> {
+ match item {
+ ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
+ ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
+ ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
+ ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
+ }
+ }
+
+ fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
+ let visibility = self.lower_visibility(strukt);
+ let name = strukt.name()?.as_name();
+ let generic_params = self.lower_generic_params(GenericsOwner::Struct, strukt);
+ let fields = self.lower_fields(&strukt.kind());
+ let ast_id = self.source_ast_id_map.ast_id(strukt);
+ let res = Struct { name, visibility, generic_params, fields, ast_id };
+ Some(id(self.data().structs.alloc(res)))
+ }
+
+ fn lower_fields(&mut self, strukt_kind: &ast::StructKind) -> Fields {
+ match strukt_kind {
+ ast::StructKind::Record(it) => {
+ let range = self.lower_record_fields(it);
+ Fields::Record(range)
+ }
+ ast::StructKind::Tuple(it) => {
+ let range = self.lower_tuple_fields(it);
+ Fields::Tuple(range)
+ }
+ ast::StructKind::Unit => Fields::Unit,
+ }
+ }
+
+ fn lower_record_fields(&mut self, fields: &ast::RecordFieldList) -> IdxRange<Field> {
+ let start = self.next_field_idx();
+ for field in fields.fields() {
+ if let Some(data) = self.lower_record_field(&field) {
+ let idx = self.data().fields.alloc(data);
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &field, self.hygiene()));
+ }
+ }
+ let end = self.next_field_idx();
+ IdxRange::new(start..end)
+ }
+
+ fn lower_record_field(&mut self, field: &ast::RecordField) -> Option<Field> {
+ let name = field.name()?.as_name();
+ let visibility = self.lower_visibility(field);
+ let type_ref = self.lower_type_ref_opt(field.ty());
+ let res = Field { name, type_ref, visibility };
+ Some(res)
+ }
+
+ fn lower_tuple_fields(&mut self, fields: &ast::TupleFieldList) -> IdxRange<Field> {
+ let start = self.next_field_idx();
+ for (i, field) in fields.fields().enumerate() {
+ let data = self.lower_tuple_field(i, &field);
+ let idx = self.data().fields.alloc(data);
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &field, self.hygiene()));
+ }
+ let end = self.next_field_idx();
+ IdxRange::new(start..end)
+ }
+
+ fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleField) -> Field {
+ let name = Name::new_tuple_field(idx);
+ let visibility = self.lower_visibility(field);
+ let type_ref = self.lower_type_ref_opt(field.ty());
+ Field { name, type_ref, visibility }
+ }
+
+ fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
+ let visibility = self.lower_visibility(union);
+ let name = union.name()?.as_name();
+ let generic_params = self.lower_generic_params(GenericsOwner::Union, union);
+ let fields = match union.record_field_list() {
+ Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
+ None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
+ };
+ let ast_id = self.source_ast_id_map.ast_id(union);
+ let res = Union { name, visibility, generic_params, fields, ast_id };
+ Some(id(self.data().unions.alloc(res)))
+ }
+
+ fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
+ let visibility = self.lower_visibility(enum_);
+ let name = enum_.name()?.as_name();
+ let generic_params = self.lower_generic_params(GenericsOwner::Enum, enum_);
+ let variants = match &enum_.variant_list() {
+ Some(variant_list) => self.lower_variants(variant_list),
+ None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
+ };
+ let ast_id = self.source_ast_id_map.ast_id(enum_);
+ let res = Enum { name, visibility, generic_params, variants, ast_id };
+ Some(id(self.data().enums.alloc(res)))
+ }
+
+ fn lower_variants(&mut self, variants: &ast::VariantList) -> IdxRange<Variant> {
+ let start = self.next_variant_idx();
+ for variant in variants.variants() {
+ if let Some(data) = self.lower_variant(&variant) {
+ let idx = self.data().variants.alloc(data);
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &variant, self.hygiene()));
+ }
+ }
+ let end = self.next_variant_idx();
+ IdxRange::new(start..end)
+ }
+
+ fn lower_variant(&mut self, variant: &ast::Variant) -> Option<Variant> {
+ let name = variant.name()?.as_name();
+ let fields = self.lower_fields(&variant.kind());
+ let res = Variant { name, fields };
+ Some(res)
+ }
+
+ fn lower_function(&mut self, func: &ast::Fn) -> Option<FileItemTreeId<Function>> {
+ let visibility = self.lower_visibility(func);
+ let name = func.name()?.as_name();
+
+ let mut has_self_param = false;
+ let start_param = self.next_param_idx();
+ if let Some(param_list) = func.param_list() {
+ if let Some(self_param) = param_list.self_param() {
+ let self_type = match self_param.ty() {
+ Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref),
+ None => {
+ let self_type = TypeRef::Path(name![Self].into());
+ match self_param.kind() {
+ ast::SelfParamKind::Owned => self_type,
+ ast::SelfParamKind::Ref => TypeRef::Reference(
+ Box::new(self_type),
+ self_param.lifetime().as_ref().map(LifetimeRef::new),
+ Mutability::Shared,
+ ),
+ ast::SelfParamKind::MutRef => TypeRef::Reference(
+ Box::new(self_type),
+ self_param.lifetime().as_ref().map(LifetimeRef::new),
+ Mutability::Mut,
+ ),
+ }
+ }
+ };
+ let ty = Interned::new(self_type);
+ let idx = self.data().params.alloc(Param::Normal(None, ty));
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &self_param, self.hygiene()));
+ has_self_param = true;
+ }
+ for param in param_list.params() {
+ let idx = match param.dotdotdot_token() {
+ Some(_) => self.data().params.alloc(Param::Varargs),
+ None => {
+ let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
+ let ty = Interned::new(type_ref);
+ let mut pat = param.pat();
+ // FIXME: This really shouldn't be here, in fact FunctionData/ItemTree's function shouldn't know about
+ // pattern names at all
+ let name = 'name: loop {
+ match pat {
+ Some(ast::Pat::RefPat(ref_pat)) => pat = ref_pat.pat(),
+ Some(ast::Pat::IdentPat(ident)) => {
+ break 'name ident.name().map(|it| it.as_name())
+ }
+ _ => break 'name None,
+ }
+ };
+ self.data().params.alloc(Param::Normal(name, ty))
+ }
+ };
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &param, self.hygiene()));
+ }
+ }
+ let end_param = self.next_param_idx();
+ let params = IdxRange::new(start_param..end_param);
+
+ let ret_type = match func.ret_type() {
+ Some(rt) => match rt.ty() {
+ Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref),
+ None if rt.thin_arrow_token().is_some() => TypeRef::Error,
+ None => TypeRef::unit(),
+ },
+ None => TypeRef::unit(),
+ };
+
+ let (ret_type, async_ret_type) = if func.async_token().is_some() {
+ let async_ret_type = ret_type.clone();
+ let future_impl = desugar_future_path(ret_type);
+ let ty_bound = Interned::new(TypeBound::Path(future_impl, TraitBoundModifier::None));
+ (TypeRef::ImplTrait(vec![ty_bound]), Some(async_ret_type))
+ } else {
+ (ret_type, None)
+ };
+
+ let abi = func.abi().map(lower_abi);
+
+ let ast_id = self.source_ast_id_map.ast_id(func);
+
+ let mut flags = FnFlags::default();
+ if func.body().is_some() {
+ flags |= FnFlags::HAS_BODY;
+ }
+ if has_self_param {
+ flags |= FnFlags::HAS_SELF_PARAM;
+ }
+ if func.default_token().is_some() {
+ flags |= FnFlags::HAS_DEFAULT_KW;
+ }
+ if func.const_token().is_some() {
+ flags |= FnFlags::HAS_CONST_KW;
+ }
+ if func.async_token().is_some() {
+ flags |= FnFlags::HAS_ASYNC_KW;
+ }
+ if func.unsafe_token().is_some() {
+ flags |= FnFlags::HAS_UNSAFE_KW;
+ }
+
+ let mut res = Function {
+ name,
+ visibility,
+ explicit_generic_params: Interned::new(GenericParams::default()),
+ abi,
+ params,
+ ret_type: Interned::new(ret_type),
+ async_ret_type: async_ret_type.map(Interned::new),
+ ast_id,
+ flags,
+ };
+ res.explicit_generic_params =
+ self.lower_generic_params(GenericsOwner::Function(&res), func);
+
+ Some(id(self.data().functions.alloc(res)))
+ }
+
+ fn lower_type_alias(
+ &mut self,
+ type_alias: &ast::TypeAlias,
+ ) -> Option<FileItemTreeId<TypeAlias>> {
+ let name = type_alias.name()?.as_name();
+ let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
+ let visibility = self.lower_visibility(type_alias);
+ let bounds = self.lower_type_bounds(type_alias);
+ let generic_params = self.lower_generic_params(GenericsOwner::TypeAlias, type_alias);
+ let ast_id = self.source_ast_id_map.ast_id(type_alias);
+ let res = TypeAlias {
+ name,
+ visibility,
+ bounds: bounds.into_boxed_slice(),
+ generic_params,
+ type_ref,
+ ast_id,
+ };
+ Some(id(self.data().type_aliases.alloc(res)))
+ }
+
+ fn lower_static(&mut self, static_: &ast::Static) -> Option<FileItemTreeId<Static>> {
+ let name = static_.name()?.as_name();
+ let type_ref = self.lower_type_ref_opt(static_.ty());
+ let visibility = self.lower_visibility(static_);
+ let mutable = static_.mut_token().is_some();
+ let ast_id = self.source_ast_id_map.ast_id(static_);
+ let res = Static { name, visibility, mutable, type_ref, ast_id };
+ Some(id(self.data().statics.alloc(res)))
+ }
+
+ fn lower_const(&mut self, konst: &ast::Const) -> FileItemTreeId<Const> {
+ let name = konst.name().map(|it| it.as_name());
+ let type_ref = self.lower_type_ref_opt(konst.ty());
+ let visibility = self.lower_visibility(konst);
+ let ast_id = self.source_ast_id_map.ast_id(konst);
+ let res = Const { name, visibility, type_ref, ast_id };
+ id(self.data().consts.alloc(res))
+ }
+
+ fn lower_module(&mut self, module: &ast::Module) -> Option<FileItemTreeId<Mod>> {
+ let name = module.name()?.as_name();
+ let visibility = self.lower_visibility(module);
+ let kind = if module.semicolon_token().is_some() {
+ ModKind::Outline
+ } else {
+ ModKind::Inline {
+ items: module
+ .item_list()
+ .map(|list| list.items().flat_map(|item| self.lower_mod_item(&item)).collect())
+ .unwrap_or_else(|| {
+ cov_mark::hit!(name_res_works_for_broken_modules);
+ Box::new([]) as Box<[_]>
+ }),
+ }
+ };
+ let ast_id = self.source_ast_id_map.ast_id(module);
+ let res = Mod { name, visibility, kind, ast_id };
+ Some(id(self.data().mods.alloc(res)))
+ }
+
+ fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
+ let name = trait_def.name()?.as_name();
+ let visibility = self.lower_visibility(trait_def);
+ let generic_params = self.lower_generic_params(GenericsOwner::Trait(trait_def), trait_def);
+ let is_auto = trait_def.auto_token().is_some();
+ let is_unsafe = trait_def.unsafe_token().is_some();
+ let items = trait_def.assoc_item_list().map(|list| {
+ list.assoc_items()
+ .filter_map(|item| {
+ let attrs = RawAttrs::new(self.db, &item, self.hygiene());
+ self.lower_assoc_item(&item).map(|item| {
+ self.add_attrs(ModItem::from(item).into(), attrs);
+ item
+ })
+ })
+ .collect()
+ });
+ let ast_id = self.source_ast_id_map.ast_id(trait_def);
+ let res = Trait {
+ name,
+ visibility,
+ generic_params,
+ is_auto,
+ is_unsafe,
+ items: items.unwrap_or_default(),
+ ast_id,
+ };
+ Some(id(self.data().traits.alloc(res)))
+ }
+
+ fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
+ let generic_params = self.lower_generic_params(GenericsOwner::Impl, impl_def);
+ // FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl
+ // as if it was an non-trait impl. Ideally we want to create a unique missing ref that only
+ // equals itself.
+ let target_trait = impl_def.trait_().and_then(|tr| self.lower_trait_ref(&tr));
+ let self_ty = self.lower_type_ref(&impl_def.self_ty()?);
+ let is_negative = impl_def.excl_token().is_some();
+
+ // We cannot use `assoc_items()` here as that does not include macro calls.
+ let items = impl_def
+ .assoc_item_list()
+ .into_iter()
+ .flat_map(|it| it.assoc_items())
+ .filter_map(|item| {
+ let assoc = self.lower_assoc_item(&item)?;
+ let attrs = RawAttrs::new(self.db, &item, self.hygiene());
+ self.add_attrs(ModItem::from(assoc).into(), attrs);
+ Some(assoc)
+ })
+ .collect();
+ let ast_id = self.source_ast_id_map.ast_id(impl_def);
+ let res = Impl { generic_params, target_trait, self_ty, is_negative, items, ast_id };
+ Some(id(self.data().impls.alloc(res)))
+ }
+
+ fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Import>> {
+ let visibility = self.lower_visibility(use_item);
+ let ast_id = self.source_ast_id_map.ast_id(use_item);
+ let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?;
+
+ let res = Import { visibility, ast_id, use_tree };
+ Some(id(self.data().imports.alloc(res)))
+ }
+
+ fn lower_extern_crate(
+ &mut self,
+ extern_crate: &ast::ExternCrate,
+ ) -> Option<FileItemTreeId<ExternCrate>> {
+ let name = extern_crate.name_ref()?.as_name();
+ let alias = extern_crate.rename().map(|a| {
+ a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias)
+ });
+ let visibility = self.lower_visibility(extern_crate);
+ let ast_id = self.source_ast_id_map.ast_id(extern_crate);
+
+ let res = ExternCrate { name, alias, visibility, ast_id };
+ Some(id(self.data().extern_crates.alloc(res)))
+ }
+
+ fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
+ let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, self.hygiene())?);
+ let ast_id = self.source_ast_id_map.ast_id(m);
+ let expand_to = hir_expand::ExpandTo::from_call_site(m);
+ let res = MacroCall { path, ast_id, expand_to };
+ Some(id(self.data().macro_calls.alloc(res)))
+ }
+
+ fn lower_macro_rules(&mut self, m: &ast::MacroRules) -> Option<FileItemTreeId<MacroRules>> {
+ let name = m.name().map(|it| it.as_name())?;
+ let ast_id = self.source_ast_id_map.ast_id(m);
+
+ let res = MacroRules { name, ast_id };
+ Some(id(self.data().macro_rules.alloc(res)))
+ }
+
+ fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<FileItemTreeId<MacroDef>> {
+ let name = m.name().map(|it| it.as_name())?;
+
+ let ast_id = self.source_ast_id_map.ast_id(m);
+ let visibility = self.lower_visibility(m);
+
+ let res = MacroDef { name, ast_id, visibility };
+ Some(id(self.data().macro_defs.alloc(res)))
+ }
+
+ fn lower_extern_block(&mut self, block: &ast::ExternBlock) -> FileItemTreeId<ExternBlock> {
+ let ast_id = self.source_ast_id_map.ast_id(block);
+ let abi = block.abi().map(lower_abi);
+ let children: Box<[_]> = block.extern_item_list().map_or(Box::new([]), |list| {
+ list.extern_items()
+ .filter_map(|item| {
+ // Note: All items in an `extern` block need to be lowered as if they're outside of one
+ // (in other words, the knowledge that they're in an extern block must not be used).
+ // This is because an extern block can contain macros whose ItemTree's top-level items
+ // should be considered to be in an extern block too.
+ let attrs = RawAttrs::new(self.db, &item, self.hygiene());
+ let id: ModItem = match item {
+ ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(),
+ ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(),
+ ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(),
+ ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(),
+ };
+ self.add_attrs(id.into(), attrs);
+ Some(id)
+ })
+ .collect()
+ });
+
+ let res = ExternBlock { abi, ast_id, children };
+ id(self.data().extern_blocks.alloc(res))
+ }
+
+ fn lower_generic_params(
+ &mut self,
+ owner: GenericsOwner<'_>,
+ node: &dyn ast::HasGenericParams,
+ ) -> Interned<GenericParams> {
+ let mut generics = GenericParams::default();
+ match owner {
+ GenericsOwner::Function(_)
+ | GenericsOwner::Struct
+ | GenericsOwner::Enum
+ | GenericsOwner::Union
+ | GenericsOwner::TypeAlias => {
+ generics.fill(&self.body_ctx, node);
+ }
+ GenericsOwner::Trait(trait_def) => {
+ // traits get the Self type as an implicit first type parameter
+ generics.type_or_consts.alloc(
+ TypeParamData {
+ name: Some(name![Self]),
+ default: None,
+ provenance: TypeParamProvenance::TraitSelf,
+ }
+ .into(),
+ );
+ // add super traits as bounds on Self
+ // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
+ let self_param = TypeRef::Path(name![Self].into());
+ generics.fill_bounds(&self.body_ctx, trait_def, Either::Left(self_param));
+ generics.fill(&self.body_ctx, node);
+ }
+ GenericsOwner::Impl => {
+ // Note that we don't add `Self` here: in `impl`s, `Self` is not a
+ // type-parameter, but rather is a type-alias for impl's target
+ // type, so this is handled by the resolver.
+ generics.fill(&self.body_ctx, node);
+ }
+ }
+
+ generics.shrink_to_fit();
+ Interned::new(generics)
+ }
+
+ fn lower_type_bounds(&mut self, node: &dyn ast::HasTypeBounds) -> Vec<Interned<TypeBound>> {
+ match node.type_bound_list() {
+ Some(bound_list) => bound_list
+ .bounds()
+ .map(|it| Interned::new(TypeBound::from_ast(&self.body_ctx, it)))
+ .collect(),
+ None => Vec::new(),
+ }
+ }
+
+ fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
+ let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.hygiene());
+ self.data().vis.alloc(vis)
+ }
+
+ fn lower_trait_ref(&mut self, trait_ref: &ast::Type) -> Option<Interned<TraitRef>> {
+ let trait_ref = TraitRef::from_ast(&self.body_ctx, trait_ref.clone())?;
+ Some(Interned::new(trait_ref))
+ }
+
+ fn lower_type_ref(&mut self, type_ref: &ast::Type) -> Interned<TypeRef> {
+ let tyref = TypeRef::from_ast(&self.body_ctx, type_ref.clone());
+ Interned::new(tyref)
+ }
+
+ fn lower_type_ref_opt(&mut self, type_ref: Option<ast::Type>) -> Interned<TypeRef> {
+ match type_ref.map(|ty| self.lower_type_ref(&ty)) {
+ Some(it) => it,
+ None => Interned::new(TypeRef::Error),
+ }
+ }
+
+ fn next_field_idx(&self) -> Idx<Field> {
+ Idx::from_raw(RawIdx::from(
+ self.tree.data.as_ref().map_or(0, |data| data.fields.len() as u32),
+ ))
+ }
+ fn next_variant_idx(&self) -> Idx<Variant> {
+ Idx::from_raw(RawIdx::from(
+ self.tree.data.as_ref().map_or(0, |data| data.variants.len() as u32),
+ ))
+ }
+ fn next_param_idx(&self) -> Idx<Param> {
+ Idx::from_raw(RawIdx::from(
+ self.tree.data.as_ref().map_or(0, |data| data.params.len() as u32),
+ ))
+ }
+}
+
+fn desugar_future_path(orig: TypeRef) -> Path {
+ let path = path![core::future::Future];
+ let mut generic_args: Vec<_> =
+ std::iter::repeat(None).take(path.segments().len() - 1).collect();
+ let mut last = GenericArgs::empty();
+ let binding =
+ AssociatedTypeBinding { name: name![Output], type_ref: Some(orig), bounds: Vec::new() };
+ last.bindings.push(binding);
+ generic_args.push(Some(Interned::new(last)));
+
+ Path::from_known_path(path, generic_args)
+}
+
+enum GenericsOwner<'a> {
+ /// We need access to the partially-lowered `Function` for lowering `impl Trait` in argument
+ /// position.
+ Function(&'a Function),
+ Struct,
+ Enum,
+ Union,
+ /// The `TraitDef` is needed to fill the source map for the implicit `Self` parameter.
+ Trait(&'a ast::Trait),
+ TypeAlias,
+ Impl,
+}
+
+fn lower_abi(abi: ast::Abi) -> Interned<str> {
+ // FIXME: Abi::abi() -> Option<SyntaxToken>?
+ match abi.syntax().last_token() {
+ Some(tok) if tok.kind() == SyntaxKind::STRING => {
+ // FIXME: Better way to unescape?
+ Interned::new_str(tok.text().trim_matches('"'))
+ }
+ _ => {
+ // `extern` default to be `extern "C"`.
+ Interned::new_str("C")
+ }
+ }
+}
+
+struct UseTreeLowering<'a> {
+ db: &'a dyn DefDatabase,
+ hygiene: &'a Hygiene,
+ mapping: Arena<ast::UseTree>,
+}
+
+impl UseTreeLowering<'_> {
+ fn lower_use_tree(&mut self, tree: ast::UseTree) -> Option<UseTree> {
+ if let Some(use_tree_list) = tree.use_tree_list() {
+ let prefix = match tree.path() {
+ // E.g. use something::{{{inner}}};
+ None => None,
+ // E.g. `use something::{inner}` (prefix is `None`, path is `something`)
+ // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
+ Some(path) => {
+ match ModPath::from_src(self.db.upcast(), path, self.hygiene) {
+ Some(it) => Some(it),
+ None => return None, // FIXME: report errors somewhere
+ }
+ }
+ };
+
+ let list =
+ use_tree_list.use_trees().filter_map(|tree| self.lower_use_tree(tree)).collect();
+
+ Some(
+ self.use_tree(
+ UseTreeKind::Prefixed { prefix: prefix.map(Interned::new), list },
+ tree,
+ ),
+ )
+ } else {
+ let is_glob = tree.star_token().is_some();
+ let path = match tree.path() {
+ Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.hygiene)?),
+ None => None,
+ };
+ let alias = tree.rename().map(|a| {
+ a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias)
+ });
+ if alias.is_some() && is_glob {
+ return None;
+ }
+
+ match (path, alias, is_glob) {
+ (path, None, true) => {
+ if path.is_none() {
+ cov_mark::hit!(glob_enum_group);
+ }
+ Some(self.use_tree(UseTreeKind::Glob { path: path.map(Interned::new) }, tree))
+ }
+ // Globs can't be renamed
+ (_, Some(_), true) | (None, None, false) => None,
+ // `bla::{ as Name}` is invalid
+ (None, Some(_), false) => None,
+ (Some(path), alias, false) => Some(
+ self.use_tree(UseTreeKind::Single { path: Interned::new(path), alias }, tree),
+ ),
+ }
+ }
+ }
+
+ fn use_tree(&mut self, kind: UseTreeKind, ast: ast::UseTree) -> UseTree {
+ let index = self.mapping.alloc(ast);
+ UseTree { index, kind }
+ }
+}
+
+pub(super) fn lower_use_tree(
+ db: &dyn DefDatabase,
+ hygiene: &Hygiene,
+ tree: ast::UseTree,
+) -> Option<(UseTree, Arena<ast::UseTree>)> {
+ let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() };
+ let tree = lowering.lower_use_tree(tree)?;
+ Some((tree, lowering.mapping))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
new file mode 100644
index 000000000..f12d9a127
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
@@ -0,0 +1,754 @@
+//! `ItemTree` debug printer.
+
+use std::fmt::{self, Write};
+
+use itertools::Itertools;
+
+use crate::{
+ attr::RawAttrs,
+ generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget},
+ path::GenericArg,
+ type_ref::TraitBoundModifier,
+ visibility::RawVisibility,
+};
+
+use super::*;
+
+pub(super) fn print_item_tree(tree: &ItemTree) -> String {
+ let mut p = Printer { tree, buf: String::new(), indent_level: 0, needs_indent: true };
+
+ if let Some(attrs) = tree.attrs.get(&AttrOwner::TopLevel) {
+ p.print_attrs(attrs, true);
+ }
+ p.blank();
+
+ for item in tree.top_level_items() {
+ p.print_mod_item(*item);
+ }
+
+ let mut s = p.buf.trim_end_matches('\n').to_string();
+ s.push('\n');
+ s
+}
+
+macro_rules! w {
+ ($dst:expr, $($arg:tt)*) => {
+ { let _ = write!($dst, $($arg)*); }
+ };
+}
+
+macro_rules! wln {
+ ($dst:expr) => {
+ { let _ = writeln!($dst); }
+ };
+ ($dst:expr, $($arg:tt)*) => {
+ { let _ = writeln!($dst, $($arg)*); }
+ };
+}
+
+struct Printer<'a> {
+ tree: &'a ItemTree,
+ buf: String,
+ indent_level: usize,
+ needs_indent: bool,
+}
+
+impl<'a> Printer<'a> {
+ fn indented(&mut self, f: impl FnOnce(&mut Self)) {
+ self.indent_level += 1;
+ wln!(self);
+ f(self);
+ self.indent_level -= 1;
+ self.buf = self.buf.trim_end_matches('\n').to_string();
+ }
+
+ /// Ensures that a blank line is output before the next text.
+ fn blank(&mut self) {
+ let mut iter = self.buf.chars().rev().fuse();
+ match (iter.next(), iter.next()) {
+ (Some('\n'), Some('\n') | None) | (None, None) => {}
+ (Some('\n'), Some(_)) => {
+ self.buf.push('\n');
+ }
+ (Some(_), _) => {
+ self.buf.push('\n');
+ self.buf.push('\n');
+ }
+ (None, Some(_)) => unreachable!(),
+ }
+ }
+
+ fn whitespace(&mut self) {
+ match self.buf.chars().next_back() {
+ None | Some('\n' | ' ') => {}
+ _ => self.buf.push(' '),
+ }
+ }
+
+ fn print_attrs(&mut self, attrs: &RawAttrs, inner: bool) {
+ let inner = if inner { "!" } else { "" };
+ for attr in &**attrs {
+ wln!(
+ self,
+ "#{}[{}{}]",
+ inner,
+ attr.path,
+ attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(),
+ );
+ }
+ }
+
+ fn print_attrs_of(&mut self, of: impl Into<AttrOwner>) {
+ if let Some(attrs) = self.tree.attrs.get(&of.into()) {
+ self.print_attrs(attrs, false);
+ }
+ }
+
+ fn print_visibility(&mut self, vis: RawVisibilityId) {
+ match &self.tree[vis] {
+ RawVisibility::Module(path) => w!(self, "pub({}) ", path),
+ RawVisibility::Public => w!(self, "pub "),
+ };
+ }
+
+ fn print_fields(&mut self, fields: &Fields) {
+ match fields {
+ Fields::Record(fields) => {
+ self.whitespace();
+ w!(self, "{{");
+ self.indented(|this| {
+ for field in fields.clone() {
+ let Field { visibility, name, type_ref } = &this.tree[field];
+ this.print_attrs_of(field);
+ this.print_visibility(*visibility);
+ w!(this, "{}: ", name);
+ this.print_type_ref(type_ref);
+ wln!(this, ",");
+ }
+ });
+ w!(self, "}}");
+ }
+ Fields::Tuple(fields) => {
+ w!(self, "(");
+ self.indented(|this| {
+ for field in fields.clone() {
+ let Field { visibility, name, type_ref } = &this.tree[field];
+ this.print_attrs_of(field);
+ this.print_visibility(*visibility);
+ w!(this, "{}: ", name);
+ this.print_type_ref(type_ref);
+ wln!(this, ",");
+ }
+ });
+ w!(self, ")");
+ }
+ Fields::Unit => {}
+ }
+ }
+
+ fn print_fields_and_where_clause(&mut self, fields: &Fields, params: &GenericParams) {
+ match fields {
+ Fields::Record(_) => {
+ if self.print_where_clause(params) {
+ wln!(self);
+ }
+ self.print_fields(fields);
+ }
+ Fields::Unit => {
+ self.print_where_clause(params);
+ self.print_fields(fields);
+ }
+ Fields::Tuple(_) => {
+ self.print_fields(fields);
+ self.print_where_clause(params);
+ }
+ }
+ }
+
+ fn print_use_tree(&mut self, use_tree: &UseTree) {
+ match &use_tree.kind {
+ UseTreeKind::Single { path, alias } => {
+ w!(self, "{}", path);
+ if let Some(alias) = alias {
+ w!(self, " as {}", alias);
+ }
+ }
+ UseTreeKind::Glob { path } => {
+ if let Some(path) = path {
+ w!(self, "{}::", path);
+ }
+ w!(self, "*");
+ }
+ UseTreeKind::Prefixed { prefix, list } => {
+ if let Some(prefix) = prefix {
+ w!(self, "{}::", prefix);
+ }
+ w!(self, "{{");
+ for (i, tree) in list.iter().enumerate() {
+ if i != 0 {
+ w!(self, ", ");
+ }
+ self.print_use_tree(tree);
+ }
+ w!(self, "}}");
+ }
+ }
+ }
+
+ fn print_mod_item(&mut self, item: ModItem) {
+ self.print_attrs_of(item);
+
+ match item {
+ ModItem::Import(it) => {
+ let Import { visibility, use_tree, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "use ");
+ self.print_use_tree(use_tree);
+ wln!(self, ";");
+ }
+ ModItem::ExternCrate(it) => {
+ let ExternCrate { name, alias, visibility, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "extern crate {}", name);
+ if let Some(alias) = alias {
+ w!(self, " as {}", alias);
+ }
+ wln!(self, ";");
+ }
+ ModItem::ExternBlock(it) => {
+ let ExternBlock { abi, ast_id: _, children } = &self.tree[it];
+ w!(self, "extern ");
+ if let Some(abi) = abi {
+ w!(self, "\"{}\" ", abi);
+ }
+ w!(self, "{{");
+ self.indented(|this| {
+ for child in &**children {
+ this.print_mod_item(*child);
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModItem::Function(it) => {
+ let Function {
+ name,
+ visibility,
+ explicit_generic_params,
+ abi,
+ params,
+ ret_type,
+ async_ret_type: _,
+ ast_id: _,
+ flags,
+ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ if flags.contains(FnFlags::HAS_DEFAULT_KW) {
+ w!(self, "default ");
+ }
+ if flags.contains(FnFlags::HAS_CONST_KW) {
+ w!(self, "const ");
+ }
+ if flags.contains(FnFlags::HAS_ASYNC_KW) {
+ w!(self, "async ");
+ }
+ if flags.contains(FnFlags::HAS_UNSAFE_KW) {
+ w!(self, "unsafe ");
+ }
+ if let Some(abi) = abi {
+ w!(self, "extern \"{}\" ", abi);
+ }
+ w!(self, "fn {}", name);
+ self.print_generic_params(explicit_generic_params);
+ w!(self, "(");
+ if !params.is_empty() {
+ self.indented(|this| {
+ for (i, param) in params.clone().enumerate() {
+ this.print_attrs_of(param);
+ match &this.tree[param] {
+ Param::Normal(name, ty) => {
+ match name {
+ Some(name) => w!(this, "{}: ", name),
+ None => w!(this, "_: "),
+ }
+ this.print_type_ref(ty);
+ w!(this, ",");
+ if flags.contains(FnFlags::HAS_SELF_PARAM) && i == 0 {
+ wln!(this, " // self");
+ } else {
+ wln!(this);
+ }
+ }
+ Param::Varargs => {
+ wln!(this, "...");
+ }
+ };
+ }
+ });
+ }
+ w!(self, ") -> ");
+ self.print_type_ref(ret_type);
+ self.print_where_clause(explicit_generic_params);
+ if flags.contains(FnFlags::HAS_BODY) {
+ wln!(self, " {{ ... }}");
+ } else {
+ wln!(self, ";");
+ }
+ }
+ ModItem::Struct(it) => {
+ let Struct { visibility, name, fields, generic_params, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "struct {}", name);
+ self.print_generic_params(generic_params);
+ self.print_fields_and_where_clause(fields, generic_params);
+ if matches!(fields, Fields::Record(_)) {
+ wln!(self);
+ } else {
+ wln!(self, ";");
+ }
+ }
+ ModItem::Union(it) => {
+ let Union { name, visibility, fields, generic_params, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "union {}", name);
+ self.print_generic_params(generic_params);
+ self.print_fields_and_where_clause(fields, generic_params);
+ if matches!(fields, Fields::Record(_)) {
+ wln!(self);
+ } else {
+ wln!(self, ";");
+ }
+ }
+ ModItem::Enum(it) => {
+ let Enum { name, visibility, variants, generic_params, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "enum {}", name);
+ self.print_generic_params(generic_params);
+ self.print_where_clause_and_opening_brace(generic_params);
+ self.indented(|this| {
+ for variant in variants.clone() {
+ let Variant { name, fields } = &this.tree[variant];
+ this.print_attrs_of(variant);
+ w!(this, "{}", name);
+ this.print_fields(fields);
+ wln!(this, ",");
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModItem::Const(it) => {
+ let Const { name, visibility, type_ref, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "const ");
+ match name {
+ Some(name) => w!(self, "{}", name),
+ None => w!(self, "_"),
+ }
+ w!(self, ": ");
+ self.print_type_ref(type_ref);
+ wln!(self, " = _;");
+ }
+ ModItem::Static(it) => {
+ let Static { name, visibility, mutable, type_ref, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "static ");
+ if *mutable {
+ w!(self, "mut ");
+ }
+ w!(self, "{}: ", name);
+ self.print_type_ref(type_ref);
+ w!(self, " = _;");
+ wln!(self);
+ }
+ ModItem::Trait(it) => {
+ let Trait {
+ name,
+ visibility,
+ is_auto,
+ is_unsafe,
+ items,
+ generic_params,
+ ast_id: _,
+ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ if *is_unsafe {
+ w!(self, "unsafe ");
+ }
+ if *is_auto {
+ w!(self, "auto ");
+ }
+ w!(self, "trait {}", name);
+ self.print_generic_params(generic_params);
+ self.print_where_clause_and_opening_brace(generic_params);
+ self.indented(|this| {
+ for item in &**items {
+ this.print_mod_item((*item).into());
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModItem::Impl(it) => {
+ let Impl { target_trait, self_ty, is_negative, items, generic_params, ast_id: _ } =
+ &self.tree[it];
+ w!(self, "impl");
+ self.print_generic_params(generic_params);
+ w!(self, " ");
+ if *is_negative {
+ w!(self, "!");
+ }
+ if let Some(tr) = target_trait {
+ self.print_path(&tr.path);
+ w!(self, " for ");
+ }
+ self.print_type_ref(self_ty);
+ self.print_where_clause_and_opening_brace(generic_params);
+ self.indented(|this| {
+ for item in &**items {
+ this.print_mod_item((*item).into());
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModItem::TypeAlias(it) => {
+ let TypeAlias { name, visibility, bounds, type_ref, generic_params, ast_id: _ } =
+ &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "type {}", name);
+ self.print_generic_params(generic_params);
+ if !bounds.is_empty() {
+ w!(self, ": ");
+ self.print_type_bounds(bounds);
+ }
+ if let Some(ty) = type_ref {
+ w!(self, " = ");
+ self.print_type_ref(ty);
+ }
+ self.print_where_clause(generic_params);
+ w!(self, ";");
+ wln!(self);
+ }
+ ModItem::Mod(it) => {
+ let Mod { name, visibility, kind, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "mod {}", name);
+ match kind {
+ ModKind::Inline { items } => {
+ w!(self, " {{");
+ self.indented(|this| {
+ for item in &**items {
+ this.print_mod_item(*item);
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModKind::Outline => {
+ wln!(self, ";");
+ }
+ }
+ }
+ ModItem::MacroCall(it) => {
+ let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it];
+ wln!(self, "{}!(...);", path);
+ }
+ ModItem::MacroRules(it) => {
+ let MacroRules { name, ast_id: _ } = &self.tree[it];
+ wln!(self, "macro_rules! {} {{ ... }}", name);
+ }
+ ModItem::MacroDef(it) => {
+ let MacroDef { name, visibility, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ wln!(self, "macro {} {{ ... }}", name);
+ }
+ }
+
+ self.blank();
+ }
+
+ fn print_type_ref(&mut self, type_ref: &TypeRef) {
+ // FIXME: deduplicate with `HirDisplay` impl
+ match type_ref {
+ TypeRef::Never => w!(self, "!"),
+ TypeRef::Placeholder => w!(self, "_"),
+ TypeRef::Tuple(fields) => {
+ w!(self, "(");
+ for (i, field) in fields.iter().enumerate() {
+ if i != 0 {
+ w!(self, ", ");
+ }
+ self.print_type_ref(field);
+ }
+ w!(self, ")");
+ }
+ TypeRef::Path(path) => self.print_path(path),
+ TypeRef::RawPtr(pointee, mtbl) => {
+ let mtbl = match mtbl {
+ Mutability::Shared => "*const",
+ Mutability::Mut => "*mut",
+ };
+ w!(self, "{} ", mtbl);
+ self.print_type_ref(pointee);
+ }
+ TypeRef::Reference(pointee, lt, mtbl) => {
+ let mtbl = match mtbl {
+ Mutability::Shared => "",
+ Mutability::Mut => "mut ",
+ };
+ w!(self, "&");
+ if let Some(lt) = lt {
+ w!(self, "{} ", lt.name);
+ }
+ w!(self, "{}", mtbl);
+ self.print_type_ref(pointee);
+ }
+ TypeRef::Array(elem, len) => {
+ w!(self, "[");
+ self.print_type_ref(elem);
+ w!(self, "; {}]", len);
+ }
+ TypeRef::Slice(elem) => {
+ w!(self, "[");
+ self.print_type_ref(elem);
+ w!(self, "]");
+ }
+ TypeRef::Fn(args_and_ret, varargs) => {
+ let ((_, return_type), args) =
+ args_and_ret.split_last().expect("TypeRef::Fn is missing return type");
+ w!(self, "fn(");
+ for (i, (_, typeref)) in args.iter().enumerate() {
+ if i != 0 {
+ w!(self, ", ");
+ }
+ self.print_type_ref(typeref);
+ }
+ if *varargs {
+ if !args.is_empty() {
+ w!(self, ", ");
+ }
+ w!(self, "...");
+ }
+ w!(self, ") -> ");
+ self.print_type_ref(return_type);
+ }
+ TypeRef::Macro(_ast_id) => {
+ w!(self, "<macro>");
+ }
+ TypeRef::Error => w!(self, "{{unknown}}"),
+ TypeRef::ImplTrait(bounds) => {
+ w!(self, "impl ");
+ self.print_type_bounds(bounds);
+ }
+ TypeRef::DynTrait(bounds) => {
+ w!(self, "dyn ");
+ self.print_type_bounds(bounds);
+ }
+ }
+ }
+
+ fn print_type_bounds(&mut self, bounds: &[Interned<TypeBound>]) {
+ for (i, bound) in bounds.iter().enumerate() {
+ if i != 0 {
+ w!(self, " + ");
+ }
+
+ match bound.as_ref() {
+ TypeBound::Path(path, modifier) => {
+ match modifier {
+ TraitBoundModifier::None => (),
+ TraitBoundModifier::Maybe => w!(self, "?"),
+ }
+ self.print_path(path)
+ }
+ TypeBound::ForLifetime(lifetimes, path) => {
+ w!(self, "for<{}> ", lifetimes.iter().format(", "));
+ self.print_path(path);
+ }
+ TypeBound::Lifetime(lt) => w!(self, "{}", lt.name),
+ TypeBound::Error => w!(self, "{{unknown}}"),
+ }
+ }
+ }
+
+ fn print_path(&mut self, path: &Path) {
+ match path.type_anchor() {
+ Some(anchor) => {
+ w!(self, "<");
+ self.print_type_ref(anchor);
+ w!(self, ">::");
+ }
+ None => match path.kind() {
+ PathKind::Plain => {}
+ PathKind::Super(0) => w!(self, "self::"),
+ PathKind::Super(n) => {
+ for _ in 0..*n {
+ w!(self, "super::");
+ }
+ }
+ PathKind::Crate => w!(self, "crate::"),
+ PathKind::Abs => w!(self, "::"),
+ PathKind::DollarCrate(_) => w!(self, "$crate::"),
+ },
+ }
+
+ for (i, segment) in path.segments().iter().enumerate() {
+ if i != 0 {
+ w!(self, "::");
+ }
+
+ w!(self, "{}", segment.name);
+ if let Some(generics) = segment.args_and_bindings {
+ // NB: these are all in type position, so `::<` turbofish syntax is not necessary
+ w!(self, "<");
+ let mut first = true;
+ let args = if generics.has_self_type {
+ let (self_ty, args) = generics.args.split_first().unwrap();
+ w!(self, "Self=");
+ self.print_generic_arg(self_ty);
+ first = false;
+ args
+ } else {
+ &generics.args
+ };
+ for arg in args {
+ if !first {
+ w!(self, ", ");
+ }
+ first = false;
+ self.print_generic_arg(arg);
+ }
+ for binding in &generics.bindings {
+ if !first {
+ w!(self, ", ");
+ }
+ first = false;
+ w!(self, "{}", binding.name);
+ if !binding.bounds.is_empty() {
+ w!(self, ": ");
+ self.print_type_bounds(&binding.bounds);
+ }
+ if let Some(ty) = &binding.type_ref {
+ w!(self, " = ");
+ self.print_type_ref(ty);
+ }
+ }
+
+ w!(self, ">");
+ }
+ }
+ }
+
+ fn print_generic_arg(&mut self, arg: &GenericArg) {
+ match arg {
+ GenericArg::Type(ty) => self.print_type_ref(ty),
+ GenericArg::Const(c) => w!(self, "{}", c),
+ GenericArg::Lifetime(lt) => w!(self, "{}", lt.name),
+ }
+ }
+
+ fn print_generic_params(&mut self, params: &GenericParams) {
+ if params.type_or_consts.is_empty() && params.lifetimes.is_empty() {
+ return;
+ }
+
+ w!(self, "<");
+ let mut first = true;
+ for (_, lt) in params.lifetimes.iter() {
+ if !first {
+ w!(self, ", ");
+ }
+ first = false;
+ w!(self, "{}", lt.name);
+ }
+ for (idx, x) in params.type_or_consts.iter() {
+ if !first {
+ w!(self, ", ");
+ }
+ first = false;
+ match x {
+ TypeOrConstParamData::TypeParamData(ty) => match &ty.name {
+ Some(name) => w!(self, "{}", name),
+ None => w!(self, "_anon_{}", idx.into_raw()),
+ },
+ TypeOrConstParamData::ConstParamData(konst) => {
+ w!(self, "const {}: ", konst.name);
+ self.print_type_ref(&konst.ty);
+ }
+ }
+ }
+ w!(self, ">");
+ }
+
+ fn print_where_clause_and_opening_brace(&mut self, params: &GenericParams) {
+ if self.print_where_clause(params) {
+ w!(self, "\n{{");
+ } else {
+ self.whitespace();
+ w!(self, "{{");
+ }
+ }
+
+ fn print_where_clause(&mut self, params: &GenericParams) -> bool {
+ if params.where_predicates.is_empty() {
+ return false;
+ }
+
+ w!(self, "\nwhere");
+ self.indented(|this| {
+ for (i, pred) in params.where_predicates.iter().enumerate() {
+ if i != 0 {
+ wln!(this, ",");
+ }
+
+ let (target, bound) = match pred {
+ WherePredicate::TypeBound { target, bound } => (target, bound),
+ WherePredicate::Lifetime { target, bound } => {
+ wln!(this, "{}: {},", target.name, bound.name);
+ continue;
+ }
+ WherePredicate::ForLifetime { lifetimes, target, bound } => {
+ w!(this, "for<");
+ for (i, lt) in lifetimes.iter().enumerate() {
+ if i != 0 {
+ w!(this, ", ");
+ }
+ w!(this, "{}", lt);
+ }
+ w!(this, "> ");
+ (target, bound)
+ }
+ };
+
+ match target {
+ WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty),
+ WherePredicateTypeTarget::TypeOrConstParam(id) => {
+ match &params.type_or_consts[*id].name() {
+ Some(name) => w!(this, "{}", name),
+ None => w!(this, "_anon_{}", id.into_raw()),
+ }
+ }
+ }
+ w!(this, ": ");
+ this.print_type_bounds(std::slice::from_ref(bound));
+ }
+ });
+ true
+ }
+}
+
+impl<'a> Write for Printer<'a> {
+ fn write_str(&mut self, s: &str) -> fmt::Result {
+ for line in s.split_inclusive('\n') {
+ if self.needs_indent {
+ match self.buf.chars().last() {
+ Some('\n') | None => {}
+ _ => self.buf.push('\n'),
+ }
+ self.buf.push_str(&" ".repeat(self.indent_level));
+ self.needs_indent = false;
+ }
+
+ self.buf.push_str(line);
+ self.needs_indent = line.ends_with('\n');
+ }
+
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
new file mode 100644
index 000000000..5cdf36cc6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
@@ -0,0 +1,360 @@
+use base_db::fixture::WithFixture;
+use expect_test::{expect, Expect};
+
+use crate::{db::DefDatabase, test_db::TestDB};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let (db, file_id) = TestDB::with_single_file(ra_fixture);
+ let item_tree = db.file_item_tree(file_id.into());
+ let pretty = item_tree.pretty_print();
+ expect.assert_eq(&pretty);
+}
+
+#[test]
+fn imports() {
+ check(
+ r#"
+//! file comment
+#![no_std]
+//! another file comment
+
+extern crate self as renamed;
+pub(super) extern crate bli;
+
+pub use crate::path::{nested, items as renamed, Trait as _};
+use globs::*;
+
+/// docs on import
+use crate::{A, B};
+
+use a::{c, d::{e}};
+ "#,
+ expect![[r##"
+ #![doc = " file comment"]
+ #![no_std]
+ #![doc = " another file comment"]
+
+ pub(self) extern crate self as renamed;
+
+ pub(super) extern crate bli;
+
+ pub use crate::path::{nested, items as renamed, Trait as _};
+
+ pub(self) use globs::*;
+
+ #[doc = " docs on import"]
+ pub(self) use crate::{A, B};
+
+ pub(self) use a::{c, d::{e}};
+ "##]],
+ );
+}
+
+#[test]
+fn extern_blocks() {
+ check(
+ r#"
+#[on_extern_block]
+extern "C" {
+ #[on_extern_type]
+ type ExType;
+
+ #[on_extern_static]
+ static EX_STATIC: u8;
+
+ #[on_extern_fn]
+ fn ex_fn();
+}
+ "#,
+ expect![[r##"
+ #[on_extern_block]
+ extern "C" {
+ #[on_extern_type]
+ pub(self) type ExType;
+
+ #[on_extern_static]
+ pub(self) static EX_STATIC: u8 = _;
+
+ #[on_extern_fn]
+ pub(self) fn ex_fn() -> ();
+ }
+ "##]],
+ );
+}
+
+#[test]
+fn adts() {
+ check(
+ r#"
+struct Unit;
+
+#[derive(Debug)]
+struct Struct {
+ /// fld docs
+ fld: (),
+}
+
+struct Tuple(#[attr] u8);
+
+union Ize {
+ a: (),
+ b: (),
+}
+
+enum E {
+ /// comment on Unit
+ Unit,
+ /// comment on Tuple
+ Tuple(u8),
+ Struct {
+ /// comment on a: u8
+ a: u8,
+ }
+}
+ "#,
+ expect![[r##"
+ pub(self) struct Unit;
+
+ #[derive(Debug)]
+ pub(self) struct Struct {
+ #[doc = " fld docs"]
+ pub(self) fld: (),
+ }
+
+ pub(self) struct Tuple(
+ #[attr]
+ pub(self) 0: u8,
+ );
+
+ pub(self) union Ize {
+ pub(self) a: (),
+ pub(self) b: (),
+ }
+
+ pub(self) enum E {
+ #[doc = " comment on Unit"]
+ Unit,
+ #[doc = " comment on Tuple"]
+ Tuple(
+ pub(self) 0: u8,
+ ),
+ Struct {
+ #[doc = " comment on a: u8"]
+ pub(self) a: u8,
+ },
+ }
+ "##]],
+ );
+}
+
+#[test]
+fn misc() {
+ check(
+ r#"
+pub static mut ST: () = ();
+
+const _: Anon = ();
+
+#[attr]
+fn f(#[attr] arg: u8, _: ()) {
+ #![inner_attr_in_fn]
+}
+
+trait Tr: SuperTrait + 'lifetime {
+ type Assoc: AssocBound = Default;
+ fn method(&self);
+}
+ "#,
+ expect![[r##"
+ pub static mut ST: () = _;
+
+ pub(self) const _: Anon = _;
+
+ #[attr]
+ #[inner_attr_in_fn]
+ pub(self) fn f(
+ #[attr]
+ arg: u8,
+ _: (),
+ ) -> () { ... }
+
+ pub(self) trait Tr<Self>
+ where
+ Self: SuperTrait,
+ Self: 'lifetime
+ {
+ pub(self) type Assoc: AssocBound = Default;
+
+ pub(self) fn method(
+ _: &Self, // self
+ ) -> ();
+ }
+ "##]],
+ );
+}
+
+#[test]
+fn modules() {
+ check(
+ r#"
+/// outer
+mod inline {
+ //! inner
+
+ use super::*;
+
+ fn fn_in_module() {}
+}
+
+mod outline;
+ "#,
+ expect![[r##"
+ #[doc = " outer"]
+ #[doc = " inner"]
+ pub(self) mod inline {
+ pub(self) use super::*;
+
+ pub(self) fn fn_in_module() -> () { ... }
+ }
+
+ pub(self) mod outline;
+ "##]],
+ );
+}
+
+#[test]
+fn macros() {
+ check(
+ r#"
+macro_rules! m {
+ () => {};
+}
+
+pub macro m2() {}
+
+m!();
+ "#,
+ expect![[r#"
+ macro_rules! m { ... }
+
+ pub macro m2 { ... }
+
+ m!(...);
+ "#]],
+ );
+}
+
+#[test]
+fn mod_paths() {
+ check(
+ r#"
+struct S {
+ a: self::Ty,
+ b: super::SuperTy,
+ c: super::super::SuperSuperTy,
+ d: ::abs::Path,
+ e: crate::Crate,
+ f: plain::path::Ty,
+}
+ "#,
+ expect![[r#"
+ pub(self) struct S {
+ pub(self) a: self::Ty,
+ pub(self) b: super::SuperTy,
+ pub(self) c: super::super::SuperSuperTy,
+ pub(self) d: ::abs::Path,
+ pub(self) e: crate::Crate,
+ pub(self) f: plain::path::Ty,
+ }
+ "#]],
+ )
+}
+
+#[test]
+fn types() {
+ check(
+ r#"
+struct S {
+ a: Mixed<'a, T, Item=(), OtherItem=u8>,
+ b: <Fully as Qualified>::Syntax,
+ c: <TypeAnchored>::Path::<'a>,
+ d: dyn for<'a> Trait<'a>,
+}
+ "#,
+ expect![[r#"
+ pub(self) struct S {
+ pub(self) a: Mixed<'a, T, Item = (), OtherItem = u8>,
+ pub(self) b: Qualified<Self=Fully>::Syntax,
+ pub(self) c: <TypeAnchored>::Path<'a>,
+ pub(self) d: dyn for<'a> Trait<'a>,
+ }
+ "#]],
+ )
+}
+
+#[test]
+fn generics() {
+ check(
+ r#"
+struct S<'a, 'b: 'a, T: Copy + 'a + 'b, const K: u8 = 0> {
+ field: &'a &'b T,
+}
+
+struct Tuple<T: Copy, U: ?Sized>(T, U);
+
+impl<'a, 'b: 'a, T: Copy + 'a + 'b, const K: u8 = 0> S<'a, 'b, T, K> {
+ fn f<G: 'a>(arg: impl Copy) -> impl Copy {}
+}
+
+enum Enum<'a, T, const U: u8> {}
+union Union<'a, T, const U: u8> {}
+
+trait Tr<'a, T: 'a>: Super where Self: for<'a> Tr<'a, T> {}
+ "#,
+ expect![[r#"
+ pub(self) struct S<'a, 'b, T, const K: u8>
+ where
+ T: Copy,
+ T: 'a,
+ T: 'b
+ {
+ pub(self) field: &'a &'b T,
+ }
+
+ pub(self) struct Tuple<T, U>(
+ pub(self) 0: T,
+ pub(self) 1: U,
+ )
+ where
+ T: Copy,
+ U: ?Sized;
+
+ impl<'a, 'b, T, const K: u8> S<'a, 'b, T, K>
+ where
+ T: Copy,
+ T: 'a,
+ T: 'b
+ {
+ pub(self) fn f<G>(
+ arg: impl Copy,
+ ) -> impl Copy
+ where
+ G: 'a { ... }
+ }
+
+ pub(self) enum Enum<'a, T, const U: u8> {
+ }
+
+ pub(self) union Union<'a, T, const U: u8> {
+ }
+
+ pub(self) trait Tr<'a, Self, T>
+ where
+ Self: Super,
+ T: 'a,
+ Self: for<'a> Tr<'a, T>
+ {
+ }
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/keys.rs b/src/tools/rust-analyzer/crates/hir-def/src/keys.rs
new file mode 100644
index 000000000..c5cb9a2af
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/keys.rs
@@ -0,0 +1,70 @@
+//! keys to be used with `DynMap`
+
+use std::marker::PhantomData;
+
+use hir_expand::MacroCallId;
+use rustc_hash::FxHashMap;
+use syntax::{ast, AstNode, AstPtr};
+
+use crate::{
+ attr::AttrId,
+ dyn_map::{DynMap, Policy},
+ ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id,
+ MacroRulesId, ProcMacroId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId,
+ UnionId,
+};
+
+pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>;
+
+pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new();
+pub const CONST: Key<ast::Const, ConstId> = Key::new();
+pub const STATIC: Key<ast::Static, StaticId> = Key::new();
+pub const TYPE_ALIAS: Key<ast::TypeAlias, TypeAliasId> = Key::new();
+pub const IMPL: Key<ast::Impl, ImplId> = Key::new();
+pub const TRAIT: Key<ast::Trait, TraitId> = Key::new();
+pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
+pub const UNION: Key<ast::Union, UnionId> = Key::new();
+pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
+
+pub const VARIANT: Key<ast::Variant, EnumVariantId> = Key::new();
+pub const TUPLE_FIELD: Key<ast::TupleField, FieldId> = Key::new();
+pub const RECORD_FIELD: Key<ast::RecordField, FieldId> = Key::new();
+pub const TYPE_PARAM: Key<ast::TypeParam, TypeOrConstParamId> = Key::new();
+pub const CONST_PARAM: Key<ast::ConstParam, TypeOrConstParamId> = Key::new();
+pub const LIFETIME_PARAM: Key<ast::LifetimeParam, LifetimeParamId> = Key::new();
+
+pub const MACRO_RULES: Key<ast::MacroRules, MacroRulesId> = Key::new();
+pub const MACRO2: Key<ast::MacroDef, Macro2Id> = Key::new();
+pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new();
+pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
+pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> =
+ Key::new();
+
+/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are
+/// equal if they point to exactly the same object.
+///
+/// In general, we do not guarantee that we have exactly one instance of a
+/// syntax tree for each file. We probably should add such guarantee, but, for
+/// the time being, we will use identity-less AstPtr comparison.
+pub struct AstPtrPolicy<AST, ID> {
+ _phantom: PhantomData<(AST, ID)>,
+}
+
+impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> {
+ type K = AST;
+ type V = ID;
+ fn insert(map: &mut DynMap, key: AST, value: ID) {
+ let key = AstPtr::new(&key);
+ map.map
+ .entry::<FxHashMap<AstPtr<AST>, ID>>()
+ .or_insert_with(Default::default)
+ .insert(key, value);
+ }
+ fn get<'a>(map: &'a DynMap, key: &AST) -> Option<&'a ID> {
+ let key = AstPtr::new(key);
+ map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(&key)
+ }
+ fn is_empty(map: &DynMap) -> bool {
+ map.map.get::<FxHashMap<AstPtr<AST>, ID>>().map_or(true, |it| it.is_empty())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
new file mode 100644
index 000000000..877850184
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
@@ -0,0 +1,174 @@
+//! Collects lang items: items marked with `#[lang = "..."]` attribute.
+//!
+//! This attribute to tell the compiler about semi built-in std library
+//! features, such as Fn family of traits.
+use std::sync::Arc;
+
+use rustc_hash::FxHashMap;
+use syntax::SmolStr;
+
+use crate::{
+ db::DefDatabase, AdtId, AttrDefId, CrateId, EnumId, EnumVariantId, FunctionId, ImplId,
+ ModuleDefId, StaticId, StructId, TraitId,
+};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum LangItemTarget {
+ EnumId(EnumId),
+ FunctionId(FunctionId),
+ ImplDefId(ImplId),
+ StaticId(StaticId),
+ StructId(StructId),
+ TraitId(TraitId),
+ EnumVariantId(EnumVariantId),
+}
+
+impl LangItemTarget {
+ pub fn as_enum(self) -> Option<EnumId> {
+ match self {
+ LangItemTarget::EnumId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ pub fn as_function(self) -> Option<FunctionId> {
+ match self {
+ LangItemTarget::FunctionId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ pub fn as_impl_def(self) -> Option<ImplId> {
+ match self {
+ LangItemTarget::ImplDefId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ pub fn as_static(self) -> Option<StaticId> {
+ match self {
+ LangItemTarget::StaticId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ pub fn as_struct(self) -> Option<StructId> {
+ match self {
+ LangItemTarget::StructId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ pub fn as_trait(self) -> Option<TraitId> {
+ match self {
+ LangItemTarget::TraitId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ pub fn as_enum_variant(self) -> Option<EnumVariantId> {
+ match self {
+ LangItemTarget::EnumVariantId(id) => Some(id),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Default, Debug, Clone, PartialEq, Eq)]
+pub struct LangItems {
+ items: FxHashMap<SmolStr, LangItemTarget>,
+}
+
+impl LangItems {
+ pub fn target(&self, item: &str) -> Option<LangItemTarget> {
+ self.items.get(item).copied()
+ }
+
+ /// Salsa query. This will look for lang items in a specific crate.
+ pub(crate) fn crate_lang_items_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<LangItems> {
+ let _p = profile::span("crate_lang_items_query");
+
+ let mut lang_items = LangItems::default();
+
+ let crate_def_map = db.crate_def_map(krate);
+
+ for (_, module_data) in crate_def_map.modules() {
+ for impl_def in module_data.scope.impls() {
+ lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDefId)
+ }
+
+ for def in module_data.scope.declarations() {
+ match def {
+ ModuleDefId::TraitId(trait_) => {
+ lang_items.collect_lang_item(db, trait_, LangItemTarget::TraitId);
+ db.trait_data(trait_).items.iter().for_each(|&(_, assoc_id)| {
+ if let crate::AssocItemId::FunctionId(f) = assoc_id {
+ lang_items.collect_lang_item(db, f, LangItemTarget::FunctionId);
+ }
+ });
+ }
+ ModuleDefId::AdtId(AdtId::EnumId(e)) => {
+ lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
+ db.enum_data(e).variants.iter().for_each(|(local_id, _)| {
+ lang_items.collect_lang_item(
+ db,
+ EnumVariantId { parent: e, local_id },
+ LangItemTarget::EnumVariantId,
+ );
+ });
+ }
+ ModuleDefId::AdtId(AdtId::StructId(s)) => {
+ lang_items.collect_lang_item(db, s, LangItemTarget::StructId);
+ }
+ ModuleDefId::FunctionId(f) => {
+ lang_items.collect_lang_item(db, f, LangItemTarget::FunctionId);
+ }
+ ModuleDefId::StaticId(s) => {
+ lang_items.collect_lang_item(db, s, LangItemTarget::StaticId);
+ }
+ _ => {}
+ }
+ }
+ }
+
+ Arc::new(lang_items)
+ }
+
+ /// Salsa query. Look for a lang item, starting from the specified crate and recursively
+ /// traversing its dependencies.
+ pub(crate) fn lang_item_query(
+ db: &dyn DefDatabase,
+ start_crate: CrateId,
+ item: SmolStr,
+ ) -> Option<LangItemTarget> {
+ let _p = profile::span("lang_item_query");
+ let lang_items = db.crate_lang_items(start_crate);
+ let start_crate_target = lang_items.items.get(&item);
+ if let Some(&target) = start_crate_target {
+ return Some(target);
+ }
+ db.crate_graph()[start_crate]
+ .dependencies
+ .iter()
+ .find_map(|dep| db.lang_item(dep.crate_id, item.clone()))
+ }
+
+ fn collect_lang_item<T>(
+ &mut self,
+ db: &dyn DefDatabase,
+ item: T,
+ constructor: fn(T) -> LangItemTarget,
+ ) where
+ T: Into<AttrDefId> + Copy,
+ {
+ let _p = profile::span("collect_lang_item");
+ if let Some(lang_item_name) = lang_attr(db, item) {
+ self.items.entry(lang_item_name).or_insert_with(|| constructor(item));
+ }
+ }
+}
+
+pub fn lang_attr(db: &dyn DefDatabase, item: impl Into<AttrDefId> + Copy) -> Option<SmolStr> {
+ let attrs = db.attrs(item.into());
+ attrs.by_key("lang").string_value().cloned()
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
new file mode 100644
index 000000000..56603f4b1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
@@ -0,0 +1,980 @@
+//! `hir_def` crate contains everything between macro expansion and type
+//! inference.
+//!
+//! It defines various items (structs, enums, traits) which comprises Rust code,
+//! as well as an algorithm for resolving paths to such entities.
+//!
+//! Note that `hir_def` is a work in progress, so not all of the above is
+//! actually true.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+pub mod db;
+
+pub mod attr;
+pub mod path;
+pub mod type_ref;
+pub mod builtin_type;
+pub mod builtin_attr;
+pub mod per_ns;
+pub mod item_scope;
+
+pub mod dyn_map;
+pub mod keys;
+
+pub mod item_tree;
+pub mod intern;
+
+pub mod adt;
+pub mod data;
+pub mod generics;
+pub mod lang_item;
+
+pub mod expr;
+pub mod body;
+pub mod resolver;
+
+mod trace;
+pub mod nameres;
+
+pub mod src;
+pub mod child_by_source;
+
+pub mod visibility;
+pub mod find_path;
+pub mod import_map;
+
+#[cfg(test)]
+mod test_db;
+#[cfg(test)]
+mod macro_expansion_tests;
+
+use std::{
+ hash::{Hash, Hasher},
+ sync::Arc,
+};
+
+use attr::Attr;
+use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
+use hir_expand::{
+ ast_id_map::FileAstId,
+ builtin_attr_macro::BuiltinAttrExpander,
+ builtin_derive_macro::BuiltinDeriveExpander,
+ builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
+ eager::{expand_eager_macro, ErrorEmitted, ErrorSink},
+ hygiene::Hygiene,
+ proc_macro::ProcMacroExpander,
+ AstId, ExpandError, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId,
+ MacroDefKind, UnresolvedMacro,
+};
+use item_tree::ExternBlock;
+use la_arena::Idx;
+use nameres::DefMap;
+use stdx::impl_from;
+use syntax::ast;
+
+use crate::{
+ adt::VariantData,
+ attr::AttrId,
+ builtin_type::BuiltinType,
+ item_tree::{
+ Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, ModItem,
+ Static, Struct, Trait, TypeAlias, Union,
+ },
+};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ModuleId {
+ krate: CrateId,
+ /// If this `ModuleId` was derived from a `DefMap` for a block expression, this stores the
+ /// `BlockId` of that block expression. If `None`, this module is part of the crate-level
+ /// `DefMap` of `krate`.
+ block: Option<BlockId>,
+ /// The module's ID in its originating `DefMap`.
+ pub local_id: LocalModuleId,
+}
+
+impl ModuleId {
+ pub fn def_map(&self, db: &dyn db::DefDatabase) -> Arc<DefMap> {
+ match self.block {
+ Some(block) => {
+ db.block_def_map(block).unwrap_or_else(|| {
+ // NOTE: This should be unreachable - all `ModuleId`s come from their `DefMap`s,
+ // so the `DefMap` here must exist.
+ unreachable!("no `block_def_map` for `ModuleId` {:?}", self);
+ })
+ }
+ None => db.crate_def_map(self.krate),
+ }
+ }
+
+ pub fn krate(&self) -> CrateId {
+ self.krate
+ }
+
+ pub fn containing_module(&self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
+ self.def_map(db).containing_module(self.local_id)
+ }
+
+ pub fn containing_block(&self) -> Option<BlockId> {
+ self.block
+ }
+}
+
+/// An ID of a module, **local** to a specific crate
+pub type LocalModuleId = Idx<nameres::ModuleData>;
+
+#[derive(Debug)]
+pub struct ItemLoc<N: ItemTreeNode> {
+ pub container: ModuleId,
+ pub id: ItemTreeId<N>,
+}
+
+impl<N: ItemTreeNode> Clone for ItemLoc<N> {
+ fn clone(&self) -> Self {
+ Self { container: self.container, id: self.id }
+ }
+}
+
+impl<N: ItemTreeNode> Copy for ItemLoc<N> {}
+
+impl<N: ItemTreeNode> PartialEq for ItemLoc<N> {
+ fn eq(&self, other: &Self) -> bool {
+ self.container == other.container && self.id == other.id
+ }
+}
+
+impl<N: ItemTreeNode> Eq for ItemLoc<N> {}
+
+impl<N: ItemTreeNode> Hash for ItemLoc<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.container.hash(state);
+ self.id.hash(state);
+ }
+}
+
+#[derive(Debug)]
+pub struct AssocItemLoc<N: ItemTreeNode> {
+ pub container: ItemContainerId,
+ pub id: ItemTreeId<N>,
+}
+
+impl<N: ItemTreeNode> Clone for AssocItemLoc<N> {
+ fn clone(&self) -> Self {
+ Self { container: self.container, id: self.id }
+ }
+}
+
+impl<N: ItemTreeNode> Copy for AssocItemLoc<N> {}
+
+impl<N: ItemTreeNode> PartialEq for AssocItemLoc<N> {
+ fn eq(&self, other: &Self) -> bool {
+ self.container == other.container && self.id == other.id
+ }
+}
+
+impl<N: ItemTreeNode> Eq for AssocItemLoc<N> {}
+
+impl<N: ItemTreeNode> Hash for AssocItemLoc<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.container.hash(state);
+ self.id.hash(state);
+ }
+}
+
+macro_rules! impl_intern {
+ ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
+ impl_intern_key!($id);
+
+ impl Intern for $loc {
+ type ID = $id;
+ fn intern(self, db: &dyn db::DefDatabase) -> $id {
+ db.$intern(self)
+ }
+ }
+
+ impl Lookup for $id {
+ type Data = $loc;
+ fn lookup(&self, db: &dyn db::DefDatabase) -> $loc {
+ db.$lookup(*self)
+ }
+ }
+ };
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct FunctionId(salsa::InternId);
+type FunctionLoc = AssocItemLoc<Function>;
+impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct StructId(salsa::InternId);
+type StructLoc = ItemLoc<Struct>;
+impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct UnionId(salsa::InternId);
+pub type UnionLoc = ItemLoc<Union>;
+impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct EnumId(salsa::InternId);
+pub type EnumLoc = ItemLoc<Enum>;
+impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum);
+
+// FIXME: rename to `VariantId`, only enums can ave variants
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct EnumVariantId {
+ pub parent: EnumId,
+ pub local_id: LocalEnumVariantId,
+}
+
+pub type LocalEnumVariantId = Idx<adt::EnumVariantData>;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct FieldId {
+ pub parent: VariantId,
+ pub local_id: LocalFieldId,
+}
+
+pub type LocalFieldId = Idx<adt::FieldData>;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ConstId(salsa::InternId);
+type ConstLoc = AssocItemLoc<Const>;
+impl_intern!(ConstId, ConstLoc, intern_const, lookup_intern_const);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct StaticId(salsa::InternId);
+pub type StaticLoc = AssocItemLoc<Static>;
+impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TraitId(salsa::InternId);
+pub type TraitLoc = ItemLoc<Trait>;
+impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TypeAliasId(salsa::InternId);
+type TypeAliasLoc = AssocItemLoc<TypeAlias>;
+impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct ImplId(salsa::InternId);
+type ImplLoc = ItemLoc<Impl>;
+impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct ExternBlockId(salsa::InternId);
+type ExternBlockLoc = ItemLoc<ExternBlock>;
+impl_intern!(ExternBlockId, ExternBlockLoc, intern_extern_block, lookup_intern_extern_block);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum MacroExpander {
+ Declarative,
+ BuiltIn(BuiltinFnLikeExpander),
+ BuiltInAttr(BuiltinAttrExpander),
+ BuiltInDerive(BuiltinDeriveExpander),
+ BuiltInEager(EagerExpander),
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct Macro2Id(salsa::InternId);
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Macro2Loc {
+ pub container: ModuleId,
+ pub id: ItemTreeId<MacroDef>,
+ pub expander: MacroExpander,
+}
+impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct MacroRulesId(salsa::InternId);
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroRulesLoc {
+ pub container: ModuleId,
+ pub id: ItemTreeId<MacroRules>,
+ pub local_inner: bool,
+ pub expander: MacroExpander,
+}
+impl_intern!(MacroRulesId, MacroRulesLoc, intern_macro_rules, lookup_intern_macro_rules);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct ProcMacroId(salsa::InternId);
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ProcMacroLoc {
+ // FIXME: this should be a crate? or just a crate-root module
+ pub container: ModuleId,
+ pub id: ItemTreeId<Function>,
+ pub expander: ProcMacroExpander,
+ pub kind: ProcMacroKind,
+}
+impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct BlockId(salsa::InternId);
+#[derive(Debug, Hash, PartialEq, Eq, Clone)]
+pub struct BlockLoc {
+ ast_id: AstId<ast::BlockExpr>,
+ /// The containing module.
+ module: ModuleId,
+}
+impl_intern!(BlockId, BlockLoc, intern_block, lookup_intern_block);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TypeOrConstParamId {
+ pub parent: GenericDefId,
+ pub local_id: LocalTypeOrConstParamId,
+}
+
+/// A TypeOrConstParamId with an invariant that it actually belongs to a type
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TypeParamId(TypeOrConstParamId);
+
+impl TypeParamId {
+ pub fn parent(&self) -> GenericDefId {
+ self.0.parent
+ }
+ pub fn local_id(&self) -> LocalTypeOrConstParamId {
+ self.0.local_id
+ }
+}
+
+impl TypeParamId {
+ /// Caller should check if this toc id really belongs to a type
+ pub fn from_unchecked(x: TypeOrConstParamId) -> Self {
+ Self(x)
+ }
+}
+
+impl From<TypeParamId> for TypeOrConstParamId {
+ fn from(x: TypeParamId) -> Self {
+ x.0
+ }
+}
+
+/// A TypeOrConstParamId with an invariant that it actually belongs to a const
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ConstParamId(TypeOrConstParamId);
+
+impl ConstParamId {
+ pub fn parent(&self) -> GenericDefId {
+ self.0.parent
+ }
+ pub fn local_id(&self) -> LocalTypeOrConstParamId {
+ self.0.local_id
+ }
+}
+
+impl ConstParamId {
+ /// Caller should check if this toc id really belongs to a const
+ pub fn from_unchecked(x: TypeOrConstParamId) -> Self {
+ Self(x)
+ }
+}
+
+impl From<ConstParamId> for TypeOrConstParamId {
+ fn from(x: ConstParamId) -> Self {
+ x.0
+ }
+}
+
+pub type LocalTypeOrConstParamId = Idx<generics::TypeOrConstParamData>;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct LifetimeParamId {
+ pub parent: GenericDefId,
+ pub local_id: LocalLifetimeParamId,
+}
+pub type LocalLifetimeParamId = Idx<generics::LifetimeParamData>;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ItemContainerId {
+ ExternBlockId(ExternBlockId),
+ ModuleId(ModuleId),
+ ImplId(ImplId),
+ TraitId(TraitId),
+}
+impl_from!(ModuleId for ItemContainerId);
+
+/// A Data Type
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum AdtId {
+ StructId(StructId),
+ UnionId(UnionId),
+ EnumId(EnumId),
+}
+impl_from!(StructId, UnionId, EnumId for AdtId);
+
+/// A macro
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum MacroId {
+ Macro2Id(Macro2Id),
+ MacroRulesId(MacroRulesId),
+ ProcMacroId(ProcMacroId),
+}
+impl_from!(Macro2Id, MacroRulesId, ProcMacroId for MacroId);
+
+impl MacroId {
+ pub fn is_attribute(self, db: &dyn db::DefDatabase) -> bool {
+ match self {
+ MacroId::ProcMacroId(it) => it.lookup(db).kind == ProcMacroKind::Attr,
+ _ => false,
+ }
+ }
+}
+
+/// A generic param
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum GenericParamId {
+ TypeParamId(TypeParamId),
+ ConstParamId(ConstParamId),
+ LifetimeParamId(LifetimeParamId),
+}
+impl_from!(TypeParamId, LifetimeParamId, ConstParamId for GenericParamId);
+
+/// The defs which can be visible in the module.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ModuleDefId {
+ ModuleId(ModuleId),
+ FunctionId(FunctionId),
+ AdtId(AdtId),
+ // Can't be directly declared, but can be imported.
+ EnumVariantId(EnumVariantId),
+ ConstId(ConstId),
+ StaticId(StaticId),
+ TraitId(TraitId),
+ TypeAliasId(TypeAliasId),
+ BuiltinType(BuiltinType),
+ MacroId(MacroId),
+}
+impl_from!(
+ MacroId(Macro2Id, MacroRulesId, ProcMacroId),
+ ModuleId,
+ FunctionId,
+ AdtId(StructId, EnumId, UnionId),
+ EnumVariantId,
+ ConstId,
+ StaticId,
+ TraitId,
+ TypeAliasId,
+ BuiltinType
+ for ModuleDefId
+);
+
+/// The defs which have a body.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum DefWithBodyId {
+ FunctionId(FunctionId),
+ StaticId(StaticId),
+ ConstId(ConstId),
+}
+
+impl_from!(FunctionId, ConstId, StaticId for DefWithBodyId);
+
+impl DefWithBodyId {
+ pub fn as_generic_def_id(self) -> Option<GenericDefId> {
+ match self {
+ DefWithBodyId::FunctionId(f) => Some(f.into()),
+ DefWithBodyId::StaticId(_) => None,
+ DefWithBodyId::ConstId(c) => Some(c.into()),
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItemId {
+ FunctionId(FunctionId),
+ ConstId(ConstId),
+ TypeAliasId(TypeAliasId),
+}
+// FIXME: not every function, ... is actually an assoc item. maybe we should make
+// sure that you can only turn actual assoc items into AssocItemIds. This would
+// require not implementing From, and instead having some checked way of
+// casting them, and somehow making the constructors private, which would be annoying.
+impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId);
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum GenericDefId {
+ FunctionId(FunctionId),
+ AdtId(AdtId),
+ TraitId(TraitId),
+ TypeAliasId(TypeAliasId),
+ ImplId(ImplId),
+ // enum variants cannot have generics themselves, but their parent enums
+ // can, and this makes some code easier to write
+ EnumVariantId(EnumVariantId),
+ // consts can have type parameters from their parents (i.e. associated consts of traits)
+ ConstId(ConstId),
+}
+impl_from!(
+ FunctionId,
+ AdtId(StructId, EnumId, UnionId),
+ TraitId,
+ TypeAliasId,
+ ImplId,
+ EnumVariantId,
+ ConstId
+ for GenericDefId
+);
+
+impl From<AssocItemId> for GenericDefId {
+ fn from(item: AssocItemId) -> Self {
+ match item {
+ AssocItemId::FunctionId(f) => f.into(),
+ AssocItemId::ConstId(c) => c.into(),
+ AssocItemId::TypeAliasId(t) => t.into(),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum AttrDefId {
+ ModuleId(ModuleId),
+ FieldId(FieldId),
+ AdtId(AdtId),
+ FunctionId(FunctionId),
+ EnumVariantId(EnumVariantId),
+ StaticId(StaticId),
+ ConstId(ConstId),
+ TraitId(TraitId),
+ TypeAliasId(TypeAliasId),
+ MacroId(MacroId),
+ ImplId(ImplId),
+ GenericParamId(GenericParamId),
+ ExternBlockId(ExternBlockId),
+}
+
+impl_from!(
+ ModuleId,
+ FieldId,
+ AdtId(StructId, EnumId, UnionId),
+ EnumVariantId,
+ StaticId,
+ ConstId,
+ FunctionId,
+ TraitId,
+ TypeAliasId,
+ MacroId(Macro2Id, MacroRulesId, ProcMacroId),
+ ImplId,
+ GenericParamId
+ for AttrDefId
+);
+
+impl From<ItemContainerId> for AttrDefId {
+ fn from(acid: ItemContainerId) -> Self {
+ match acid {
+ ItemContainerId::ModuleId(mid) => AttrDefId::ModuleId(mid),
+ ItemContainerId::ImplId(iid) => AttrDefId::ImplId(iid),
+ ItemContainerId::TraitId(tid) => AttrDefId::TraitId(tid),
+ ItemContainerId::ExternBlockId(id) => AttrDefId::ExternBlockId(id),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum VariantId {
+ EnumVariantId(EnumVariantId),
+ StructId(StructId),
+ UnionId(UnionId),
+}
+impl_from!(EnumVariantId, StructId, UnionId for VariantId);
+
+impl VariantId {
+ pub fn variant_data(self, db: &dyn db::DefDatabase) -> Arc<VariantData> {
+ match self {
+ VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
+ VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
+ VariantId::EnumVariantId(it) => {
+ db.enum_data(it.parent).variants[it.local_id].variant_data.clone()
+ }
+ }
+ }
+
+ pub fn file_id(self, db: &dyn db::DefDatabase) -> HirFileId {
+ match self {
+ VariantId::EnumVariantId(it) => it.parent.lookup(db).id.file_id(),
+ VariantId::StructId(it) => it.lookup(db).id.file_id(),
+ VariantId::UnionId(it) => it.lookup(db).id.file_id(),
+ }
+ }
+
+ pub fn adt_id(self) -> AdtId {
+ match self {
+ VariantId::EnumVariantId(it) => it.parent.into(),
+ VariantId::StructId(it) => it.into(),
+ VariantId::UnionId(it) => it.into(),
+ }
+ }
+}
+
+trait Intern {
+ type ID;
+ fn intern(self, db: &dyn db::DefDatabase) -> Self::ID;
+}
+
+pub trait Lookup {
+ type Data;
+ fn lookup(&self, db: &dyn db::DefDatabase) -> Self::Data;
+}
+
+pub trait HasModule {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId;
+}
+
+impl HasModule for ItemContainerId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ match *self {
+ ItemContainerId::ModuleId(it) => it,
+ ItemContainerId::ImplId(it) => it.lookup(db).container,
+ ItemContainerId::TraitId(it) => it.lookup(db).container,
+ ItemContainerId::ExternBlockId(it) => it.lookup(db).container,
+ }
+ }
+}
+
+impl<N: ItemTreeNode> HasModule for AssocItemLoc<N> {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ self.container.module(db)
+ }
+}
+
+impl HasModule for AdtId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ match self {
+ AdtId::StructId(it) => it.lookup(db).container,
+ AdtId::UnionId(it) => it.lookup(db).container,
+ AdtId::EnumId(it) => it.lookup(db).container,
+ }
+ }
+}
+
+impl HasModule for VariantId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ match self {
+ VariantId::EnumVariantId(it) => it.parent.lookup(db).container,
+ VariantId::StructId(it) => it.lookup(db).container,
+ VariantId::UnionId(it) => it.lookup(db).container,
+ }
+ }
+}
+
+impl HasModule for MacroId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ match self {
+ MacroId::MacroRulesId(it) => it.lookup(db).container,
+ MacroId::Macro2Id(it) => it.lookup(db).container,
+ MacroId::ProcMacroId(it) => it.lookup(db).container,
+ }
+ }
+}
+
+impl HasModule for DefWithBodyId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ match self {
+ DefWithBodyId::FunctionId(it) => it.lookup(db).module(db),
+ DefWithBodyId::StaticId(it) => it.lookup(db).module(db),
+ DefWithBodyId::ConstId(it) => it.lookup(db).module(db),
+ }
+ }
+}
+
+impl DefWithBodyId {
+ pub fn as_mod_item(self, db: &dyn db::DefDatabase) -> ModItem {
+ match self {
+ DefWithBodyId::FunctionId(it) => it.lookup(db).id.value.into(),
+ DefWithBodyId::StaticId(it) => it.lookup(db).id.value.into(),
+ DefWithBodyId::ConstId(it) => it.lookup(db).id.value.into(),
+ }
+ }
+}
+
+impl HasModule for GenericDefId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ match self {
+ GenericDefId::FunctionId(it) => it.lookup(db).module(db),
+ GenericDefId::AdtId(it) => it.module(db),
+ GenericDefId::TraitId(it) => it.lookup(db).container,
+ GenericDefId::TypeAliasId(it) => it.lookup(db).module(db),
+ GenericDefId::ImplId(it) => it.lookup(db).container,
+ GenericDefId::EnumVariantId(it) => it.parent.lookup(db).container,
+ GenericDefId::ConstId(it) => it.lookup(db).module(db),
+ }
+ }
+}
+
+impl HasModule for TypeAliasId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ self.lookup(db).module(db)
+ }
+}
+
+impl HasModule for TraitId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ self.lookup(db).container
+ }
+}
+
+impl ModuleDefId {
+ /// Returns the module containing `self` (or `self`, if `self` is itself a module).
+ ///
+ /// Returns `None` if `self` refers to a primitive type.
+ pub fn module(&self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
+ Some(match self {
+ ModuleDefId::ModuleId(id) => *id,
+ ModuleDefId::FunctionId(id) => id.lookup(db).module(db),
+ ModuleDefId::AdtId(id) => id.module(db),
+ ModuleDefId::EnumVariantId(id) => id.parent.lookup(db).container,
+ ModuleDefId::ConstId(id) => id.lookup(db).container.module(db),
+ ModuleDefId::StaticId(id) => id.lookup(db).module(db),
+ ModuleDefId::TraitId(id) => id.lookup(db).container,
+ ModuleDefId::TypeAliasId(id) => id.lookup(db).module(db),
+ ModuleDefId::MacroId(id) => id.module(db),
+ ModuleDefId::BuiltinType(_) => return None,
+ })
+ }
+}
+
+impl AttrDefId {
+ pub fn krate(&self, db: &dyn db::DefDatabase) -> CrateId {
+ match self {
+ AttrDefId::ModuleId(it) => it.krate,
+ AttrDefId::FieldId(it) => it.parent.module(db).krate,
+ AttrDefId::AdtId(it) => it.module(db).krate,
+ AttrDefId::FunctionId(it) => it.lookup(db).module(db).krate,
+ AttrDefId::EnumVariantId(it) => it.parent.lookup(db).container.krate,
+ AttrDefId::StaticId(it) => it.lookup(db).module(db).krate,
+ AttrDefId::ConstId(it) => it.lookup(db).module(db).krate,
+ AttrDefId::TraitId(it) => it.lookup(db).container.krate,
+ AttrDefId::TypeAliasId(it) => it.lookup(db).module(db).krate,
+ AttrDefId::ImplId(it) => it.lookup(db).container.krate,
+ AttrDefId::ExternBlockId(it) => it.lookup(db).container.krate,
+ AttrDefId::GenericParamId(it) => {
+ match it {
+ GenericParamId::TypeParamId(it) => it.parent(),
+ GenericParamId::ConstParamId(it) => it.parent(),
+ GenericParamId::LifetimeParamId(it) => it.parent,
+ }
+ .module(db)
+ .krate
+ }
+ AttrDefId::MacroId(it) => it.module(db).krate,
+ }
+ }
+}
+
+/// A helper trait for converting to MacroCallId
+pub trait AsMacroCall {
+ fn as_call_id(
+ &self,
+ db: &dyn db::DefDatabase,
+ krate: CrateId,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ ) -> Option<MacroCallId> {
+ self.as_call_id_with_errors(db, krate, resolver, &mut |_| ()).ok()?.ok()
+ }
+
+ fn as_call_id_with_errors(
+ &self,
+ db: &dyn db::DefDatabase,
+ krate: CrateId,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ error_sink: &mut dyn FnMut(ExpandError),
+ ) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro>;
+}
+
+impl AsMacroCall for InFile<&ast::MacroCall> {
+ fn as_call_id_with_errors(
+ &self,
+ db: &dyn db::DefDatabase,
+ krate: CrateId,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ mut error_sink: &mut dyn FnMut(ExpandError),
+ ) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro> {
+ let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
+ let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
+ let h = Hygiene::new(db.upcast(), self.file_id);
+ let path =
+ self.value.path().and_then(|path| path::ModPath::from_src(db.upcast(), path, &h));
+
+ let path = match error_sink
+ .option(path, || ExpandError::Other("malformed macro invocation".into()))
+ {
+ Ok(path) => path,
+ Err(error) => {
+ return Ok(Err(error));
+ }
+ };
+
+ macro_call_as_call_id(
+ db,
+ &AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
+ expands_to,
+ krate,
+ resolver,
+ error_sink,
+ )
+ }
+}
+
+/// Helper wrapper for `AstId` with `ModPath`
+#[derive(Clone, Debug, Eq, PartialEq)]
+struct AstIdWithPath<T: ast::AstNode> {
+ ast_id: AstId<T>,
+ path: path::ModPath,
+}
+
+impl<T: ast::AstNode> AstIdWithPath<T> {
+ fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
+ AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
+ }
+}
+
+fn macro_call_as_call_id(
+ db: &dyn db::DefDatabase,
+ call: &AstIdWithPath<ast::MacroCall>,
+ expand_to: ExpandTo,
+ krate: CrateId,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ error_sink: &mut dyn FnMut(ExpandError),
+) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro> {
+ let def =
+ resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?;
+
+ let res = if let MacroDefKind::BuiltInEager(..) = def.kind {
+ let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db.upcast()));
+
+ expand_eager_macro(db.upcast(), krate, macro_call, def, &resolver, error_sink)?
+ } else {
+ Ok(def.as_lazy_macro(
+ db.upcast(),
+ krate,
+ MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
+ ))
+ };
+ Ok(res)
+}
+
+pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId {
+ match id {
+ MacroId::Macro2Id(it) => {
+ let loc = it.lookup(db);
+
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+ let in_file = |m: FileAstId<ast::MacroDef>| InFile::new(loc.id.file_id(), m.upcast());
+ MacroDefId {
+ krate: loc.container.krate,
+ kind: match loc.expander {
+ MacroExpander::Declarative => MacroDefKind::Declarative(in_file(makro.ast_id)),
+ MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file(makro.ast_id)),
+ MacroExpander::BuiltInAttr(it) => {
+ MacroDefKind::BuiltInAttr(it, in_file(makro.ast_id))
+ }
+ MacroExpander::BuiltInDerive(it) => {
+ MacroDefKind::BuiltInDerive(it, in_file(makro.ast_id))
+ }
+ MacroExpander::BuiltInEager(it) => {
+ MacroDefKind::BuiltInEager(it, in_file(makro.ast_id))
+ }
+ },
+ local_inner: false,
+ }
+ }
+ MacroId::MacroRulesId(it) => {
+ let loc = it.lookup(db);
+
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+ let in_file = |m: FileAstId<ast::MacroRules>| InFile::new(loc.id.file_id(), m.upcast());
+ MacroDefId {
+ krate: loc.container.krate,
+ kind: match loc.expander {
+ MacroExpander::Declarative => MacroDefKind::Declarative(in_file(makro.ast_id)),
+ MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file(makro.ast_id)),
+ MacroExpander::BuiltInAttr(it) => {
+ MacroDefKind::BuiltInAttr(it, in_file(makro.ast_id))
+ }
+ MacroExpander::BuiltInDerive(it) => {
+ MacroDefKind::BuiltInDerive(it, in_file(makro.ast_id))
+ }
+ MacroExpander::BuiltInEager(it) => {
+ MacroDefKind::BuiltInEager(it, in_file(makro.ast_id))
+ }
+ },
+ local_inner: loc.local_inner,
+ }
+ }
+ MacroId::ProcMacroId(it) => {
+ let loc = it.lookup(db);
+
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+ MacroDefId {
+ krate: loc.container.krate,
+ kind: MacroDefKind::ProcMacro(
+ loc.expander,
+ loc.kind,
+ InFile::new(loc.id.file_id(), makro.ast_id),
+ ),
+ local_inner: false,
+ }
+ }
+ }
+}
+
+fn derive_macro_as_call_id(
+ db: &dyn db::DefDatabase,
+ item_attr: &AstIdWithPath<ast::Adt>,
+ derive_attr: AttrId,
+ derive_pos: u32,
+ krate: CrateId,
+ resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
+) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
+ let (macro_id, def_id) = resolver(item_attr.path.clone())
+ .ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
+ let call_id = def_id.as_lazy_macro(
+ db.upcast(),
+ krate,
+ MacroCallKind::Derive {
+ ast_id: item_attr.ast_id,
+ derive_index: derive_pos,
+ derive_attr_index: derive_attr.ast_index,
+ },
+ );
+ Ok((macro_id, def_id, call_id))
+}
+
+fn attr_macro_as_call_id(
+ db: &dyn db::DefDatabase,
+ item_attr: &AstIdWithPath<ast::Item>,
+ macro_attr: &Attr,
+ krate: CrateId,
+ def: MacroDefId,
+ is_derive: bool,
+) -> MacroCallId {
+ let mut arg = match macro_attr.input.as_deref() {
+ Some(attr::AttrInput::TokenTree(tt, map)) => (tt.clone(), map.clone()),
+ _ => Default::default(),
+ };
+
+ // The parentheses are always disposed here.
+ arg.0.delimiter = None;
+
+ let res = def.as_lazy_macro(
+ db.upcast(),
+ krate,
+ MacroCallKind::Attr {
+ ast_id: item_attr.ast_id,
+ attr_args: Arc::new(arg),
+ invoc_attr_index: macro_attr.id.ast_index,
+ is_derive,
+ },
+ );
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs
new file mode 100644
index 000000000..81b9c5c4b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs
@@ -0,0 +1,354 @@
+//! This module contains tests for macro expansion. Effectively, it covers `tt`,
+//! `mbe`, `proc_macro_api` and `hir_expand` crates. This might seem like a
+//! wrong architecture at the first glance, but is intentional.
+//!
+//! Physically, macro expansion process is intertwined with name resolution. You
+//! can not expand *just* the syntax. So, to be able to write integration tests
+//! of the "expand this code please" form, we have to do it after name
+//! resolution. That is, in this crate. We *could* fake some dependencies and
+//! write unit-tests (in fact, we used to do that), but that makes tests brittle
+//! and harder to understand.
+
+mod mbe;
+mod builtin_fn_macro;
+mod builtin_derive_macro;
+mod proc_macros;
+
+use std::{iter, ops::Range, sync::Arc};
+
+use ::mbe::TokenMap;
+use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
+use expect_test::Expect;
+use hir_expand::{
+ db::{AstDatabase, TokenExpander},
+ AstId, InFile, MacroDefId, MacroDefKind, MacroFile,
+};
+use stdx::format_to;
+use syntax::{
+ ast::{self, edit::IndentLevel},
+ AstNode, SyntaxElement,
+ SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT},
+ SyntaxNode, TextRange, T,
+};
+use tt::{Subtree, TokenId};
+
+use crate::{
+ db::DefDatabase, macro_id_to_def_id, nameres::ModuleSource, resolver::HasResolver,
+ src::HasSource, test_db::TestDB, AdtId, AsMacroCall, Lookup, ModuleDefId,
+};
+
+#[track_caller]
+fn check(ra_fixture: &str, mut expect: Expect) {
+ let extra_proc_macros = vec![(
+ r#"
+#[proc_macro_attribute]
+pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream {
+ item
+}
+"#
+ .into(),
+ ProcMacro {
+ name: "identity_when_valid".into(),
+ kind: base_db::ProcMacroKind::Attr,
+ expander: Arc::new(IdentityWhenValidProcMacroExpander),
+ },
+ )];
+ let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros);
+ let krate = db.crate_graph().iter().next().unwrap();
+ let def_map = db.crate_def_map(krate);
+ let local_id = def_map.root();
+ let module = def_map.module_id(local_id);
+ let resolver = module.resolver(&db);
+ let source = def_map[local_id].definition_source(&db);
+ let source_file = match source.value {
+ ModuleSource::SourceFile(it) => it,
+ ModuleSource::Module(_) | ModuleSource::BlockExpr(_) => panic!(),
+ };
+
+ // What we want to do is to replace all macros (fn-like, derive, attr) with
+ // their expansions. Turns out, we don't actually store enough information
+ // to do this precisely though! Specifically, if a macro expands to nothing,
+ // it leaves zero traces in def-map, so we can't get its expansion after the
+ // fact.
+ //
+ // This is the usual
+ // <https://github.com/rust-lang/rust-analyzer/issues/3407>
+ // resolve/record tension!
+ //
+ // So here we try to do a resolve, which is necessary a heuristic. For macro
+ // calls, we use `as_call_id_with_errors`. For derives, we look at the impls
+ // in the module and assume that, if impls's source is a different
+ // `HirFileId`, than it came from macro expansion.
+
+ let mut text_edits = Vec::new();
+ let mut expansions = Vec::new();
+
+ for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) {
+ let mut show_token_ids = false;
+ for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
+ show_token_ids |= comment.to_string().contains("+tokenids");
+ }
+ if !show_token_ids {
+ continue;
+ }
+
+ let call_offset = macro_.syntax().text_range().start().into();
+ let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
+ let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
+ let kind = MacroDefKind::Declarative(ast_id);
+
+ let macro_def = db.macro_def(MacroDefId { krate, kind, local_inner: false }).unwrap();
+ if let TokenExpander::DeclarativeMacro { mac, def_site_token_map } = &*macro_def {
+ let tt = match &macro_ {
+ ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
+ ast::Macro::MacroDef(_) => unimplemented!(""),
+ };
+
+ let tt_start = tt.syntax().text_range().start();
+ tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
+ |token| {
+ let range = token.text_range().checked_sub(tt_start).unwrap();
+ if let Some(id) = def_site_token_map.token_by_range(range) {
+ let offset = (range.end() + tt_start).into();
+ text_edits.push((offset..offset, format!("#{}", id.0)));
+ }
+ },
+ );
+ text_edits.push((
+ call_offset..call_offset,
+ format!("// call ids will be shifted by {:?}\n", mac.shift()),
+ ));
+ }
+ }
+
+ for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
+ let macro_call = InFile::new(source.file_id, &macro_call);
+ let mut error = None;
+ let macro_call_id = macro_call
+ .as_call_id_with_errors(
+ &db,
+ krate,
+ |path| {
+ resolver.resolve_path_as_macro(&db, &path).map(|it| macro_id_to_def_id(&db, it))
+ },
+ &mut |err| error = Some(err),
+ )
+ .unwrap()
+ .unwrap();
+ let macro_file = MacroFile { macro_call_id };
+ let mut expansion_result = db.parse_macro_expansion(macro_file);
+ expansion_result.err = expansion_result.err.or(error);
+ expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id)));
+ }
+
+ for (call, exp, arg) in expansions.into_iter().rev() {
+ let mut tree = false;
+ let mut expect_errors = false;
+ let mut show_token_ids = false;
+ for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
+ tree |= comment.to_string().contains("+tree");
+ expect_errors |= comment.to_string().contains("+errors");
+ show_token_ids |= comment.to_string().contains("+tokenids");
+ }
+
+ let mut expn_text = String::new();
+ if let Some(err) = exp.err {
+ format_to!(expn_text, "/* error: {} */", err);
+ }
+ if let Some((parse, token_map)) = exp.value {
+ if expect_errors {
+ assert!(!parse.errors().is_empty(), "no parse errors in expansion");
+ for e in parse.errors() {
+ format_to!(expn_text, "/* parse error: {} */\n", e);
+ }
+ } else {
+ assert!(
+ parse.errors().is_empty(),
+ "parse errors in expansion: \n{:#?}",
+ parse.errors()
+ );
+ }
+ let pp = pretty_print_macro_expansion(
+ parse.syntax_node(),
+ show_token_ids.then(|| &*token_map),
+ );
+ let indent = IndentLevel::from_node(call.syntax());
+ let pp = reindent(indent, pp);
+ format_to!(expn_text, "{}", pp);
+
+ if tree {
+ let tree = format!("{:#?}", parse.syntax_node())
+ .split_inclusive('\n')
+ .map(|line| format!("// {}", line))
+ .collect::<String>();
+ format_to!(expn_text, "\n{}", tree)
+ }
+ }
+ let range = call.syntax().text_range();
+ let range: Range<usize> = range.into();
+
+ if show_token_ids {
+ if let Some((tree, map, _)) = arg.as_deref() {
+ let tt_range = call.token_tree().unwrap().syntax().text_range();
+ let mut ranges = Vec::new();
+ extract_id_ranges(&mut ranges, map, tree);
+ for (range, id) in ranges {
+ let idx = (tt_range.start() + range.end()).into();
+ text_edits.push((idx..idx, format!("#{}", id.0)));
+ }
+ }
+ text_edits.push((range.start..range.start, "// ".into()));
+ call.to_string().match_indices('\n').for_each(|(offset, _)| {
+ let offset = offset + 1 + range.start;
+ text_edits.push((offset..offset, "// ".into()));
+ });
+ text_edits.push((range.end..range.end, "\n".into()));
+ text_edits.push((range.end..range.end, expn_text));
+ } else {
+ text_edits.push((range, expn_text));
+ }
+ }
+
+ text_edits.sort_by_key(|(range, _)| range.start);
+ text_edits.reverse();
+ let mut expanded_text = source_file.to_string();
+ for (range, text) in text_edits {
+ expanded_text.replace_range(range, &text);
+ }
+
+ for decl_id in def_map[local_id].scope.declarations() {
+ // FIXME: I'm sure there's already better way to do this
+ let src = match decl_id {
+ ModuleDefId::AdtId(AdtId::StructId(struct_id)) => {
+ Some(struct_id.lookup(&db).source(&db).syntax().cloned())
+ }
+ ModuleDefId::FunctionId(function_id) => {
+ Some(function_id.lookup(&db).source(&db).syntax().cloned())
+ }
+ _ => None,
+ };
+ if let Some(src) = src {
+ if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
+ let pp = pretty_print_macro_expansion(src.value, None);
+ format_to!(expanded_text, "\n{}", pp)
+ }
+ }
+ }
+
+ for impl_id in def_map[local_id].scope.impls() {
+ let src = impl_id.lookup(&db).source(&db);
+ if src.file_id.is_builtin_derive(&db).is_some() {
+ let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
+ format_to!(expanded_text, "\n{}", pp)
+ }
+ }
+
+ expect.indent(false);
+ expect.assert_eq(&expanded_text);
+}
+
+fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) {
+ tree.token_trees.iter().for_each(|tree| match tree {
+ tt::TokenTree::Leaf(leaf) => {
+ let id = match leaf {
+ tt::Leaf::Literal(it) => it.id,
+ tt::Leaf::Punct(it) => it.id,
+ tt::Leaf::Ident(it) => it.id,
+ };
+ ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id)));
+ }
+ tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree),
+ });
+}
+
+fn reindent(indent: IndentLevel, pp: String) -> String {
+ if !pp.contains('\n') {
+ return pp;
+ }
+ let mut lines = pp.split_inclusive('\n');
+ let mut res = lines.next().unwrap().to_string();
+ for line in lines {
+ if line.trim().is_empty() {
+ res.push_str(line)
+ } else {
+ format_to!(res, "{}{}", indent, line)
+ }
+ }
+ res
+}
+
+fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String {
+ let mut res = String::new();
+ let mut prev_kind = EOF;
+ let mut indent_level = 0;
+ for token in iter::successors(expn.first_token(), |t| t.next_token()) {
+ let curr_kind = token.kind();
+ let space = match (prev_kind, curr_kind) {
+ _ if prev_kind.is_trivia() || curr_kind.is_trivia() => "",
+ (T!['{'], T!['}']) => "",
+ (T![=], _) | (_, T![=]) => " ",
+ (_, T!['{']) => " ",
+ (T![;] | T!['{'] | T!['}'], _) => "\n",
+ (_, T!['}']) => "\n",
+ (IDENT | LIFETIME_IDENT, IDENT | LIFETIME_IDENT) => " ",
+ _ if prev_kind.is_keyword() && curr_kind.is_keyword() => " ",
+ (IDENT, _) if curr_kind.is_keyword() => " ",
+ (_, IDENT) if prev_kind.is_keyword() => " ",
+ (T![>], IDENT) => " ",
+ (T![>], _) if curr_kind.is_keyword() => " ",
+ (T![->], _) | (_, T![->]) => " ",
+ (T![&&], _) | (_, T![&&]) => " ",
+ (T![,], _) => " ",
+ (T![:], IDENT | T!['(']) => " ",
+ (T![:], _) if curr_kind.is_keyword() => " ",
+ (T![fn], T!['(']) => "",
+ (T![']'], _) if curr_kind.is_keyword() => " ",
+ (T![']'], T![#]) => "\n",
+ (T![Self], T![::]) => "",
+ _ if prev_kind.is_keyword() => " ",
+ _ => "",
+ };
+
+ match prev_kind {
+ T!['{'] => indent_level += 1,
+ T!['}'] => indent_level -= 1,
+ _ => (),
+ }
+
+ res.push_str(space);
+ if space == "\n" {
+ let level = if curr_kind == T!['}'] { indent_level - 1 } else { indent_level };
+ res.push_str(&" ".repeat(level));
+ }
+ prev_kind = curr_kind;
+ format_to!(res, "{}", token);
+ if let Some(map) = map {
+ if let Some(id) = map.token_by_range(token.text_range()) {
+ format_to!(res, "#{}", id.0);
+ }
+ }
+ }
+ res
+}
+
+// Identity mapping, but only works when the input is syntactically valid. This
+// simulates common proc macros that unnecessarily parse their input and return
+// compile errors.
+#[derive(Debug)]
+struct IdentityWhenValidProcMacroExpander;
+impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
+ fn expand(
+ &self,
+ subtree: &Subtree,
+ _: Option<&Subtree>,
+ _: &base_db::Env,
+ ) -> Result<Subtree, base_db::ProcMacroExpansionError> {
+ let (parse, _) =
+ ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
+ if parse.errors().is_empty() {
+ Ok(subtree.clone())
+ } else {
+ panic!("got invalid macro input: {:?}", parse.errors());
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
new file mode 100644
index 000000000..6819e9114
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
@@ -0,0 +1,95 @@
+//! Tests for `builtin_derive_macro.rs` from `hir_expand`.
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn test_copy_expand_simple() {
+ check(
+ r#"
+//- minicore: derive, copy
+#[derive(Copy)]
+struct Foo;
+"#,
+ expect![[r##"
+#[derive(Copy)]
+struct Foo;
+
+impl < > core::marker::Copy for Foo< > {}"##]],
+ );
+}
+
+#[test]
+fn test_copy_expand_in_core() {
+ cov_mark::check!(test_copy_expand_in_core);
+ check(
+ r#"
+//- /lib.rs crate:core
+#[rustc_builtin_macro]
+macro derive {}
+#[rustc_builtin_macro]
+macro Copy {}
+#[derive(Copy)]
+struct Foo;
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro derive {}
+#[rustc_builtin_macro]
+macro Copy {}
+#[derive(Copy)]
+struct Foo;
+
+impl < > crate ::marker::Copy for Foo< > {}"##]],
+ );
+}
+
+#[test]
+fn test_copy_expand_with_type_params() {
+ check(
+ r#"
+//- minicore: derive, copy
+#[derive(Copy)]
+struct Foo<A, B>;
+"#,
+ expect![[r##"
+#[derive(Copy)]
+struct Foo<A, B>;
+
+impl <T0: core::marker::Copy, T1: core::marker::Copy> core::marker::Copy for Foo<T0, T1> {}"##]],
+ );
+}
+
+#[test]
+fn test_copy_expand_with_lifetimes() {
+ // We currently just ignore lifetimes
+ check(
+ r#"
+//- minicore: derive, copy
+#[derive(Copy)]
+struct Foo<A, B, 'a, 'b>;
+"#,
+ expect![[r##"
+#[derive(Copy)]
+struct Foo<A, B, 'a, 'b>;
+
+impl <T0: core::marker::Copy, T1: core::marker::Copy> core::marker::Copy for Foo<T0, T1> {}"##]],
+ );
+}
+
+#[test]
+fn test_clone_expand() {
+ check(
+ r#"
+//- minicore: derive, clone
+#[derive(Clone)]
+struct Foo<A, B>;
+"#,
+ expect![[r##"
+#[derive(Clone)]
+struct Foo<A, B>;
+
+impl <T0: core::clone::Clone, T1: core::clone::Clone> core::clone::Clone for Foo<T0, T1> {}"##]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
new file mode 100644
index 000000000..92dffa7f3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -0,0 +1,377 @@
+//! Tests for `builtin_fn_macro.rs` from `hir_expand`.
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn test_column_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! column {() => {}}
+
+fn main() { column!(); }
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! column {() => {}}
+
+fn main() { 0; }
+"##]],
+ );
+}
+
+#[test]
+fn test_line_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! line {() => {}}
+
+fn main() { line!() }
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! line {() => {}}
+
+fn main() { 0 }
+"##]],
+ );
+}
+
+#[test]
+fn test_stringify_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! stringify {() => {}}
+
+fn main() {
+ stringify!(
+ a
+ b
+ c
+ );
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! stringify {() => {}}
+
+fn main() {
+ "a b c";
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_env_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! env {() => {}}
+
+fn main() { env!("TEST_ENV_VAR"); }
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! env {() => {}}
+
+fn main() { "__RA_UNIMPLEMENTED__"; }
+"##]],
+ );
+}
+
+#[test]
+fn test_option_env_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! option_env {() => {}}
+
+fn main() { option_env!("TEST_ENV_VAR"); }
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! option_env {() => {}}
+
+fn main() { std::option::Option::None:: < &str>; }
+"##]],
+ );
+}
+
+#[test]
+fn test_file_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! file {() => {}}
+
+fn main() { file!(); }
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! file {() => {}}
+
+fn main() { ""; }
+"##]],
+ );
+}
+
+#[test]
+fn test_assert_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! assert {
+ ($cond:expr) => ({ /* compiler built-in */ });
+ ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ assert!(true, "{} {:?}", arg1(a, b, c), arg2);
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! assert {
+ ($cond:expr) => ({ /* compiler built-in */ });
+ ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ {
+ if !true {
+ $crate::panic!("{} {:?}", arg1(a, b, c), arg2);
+ }
+ };
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_compile_error_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! compile_error {
+ ($msg:expr) => ({ /* compiler built-in */ });
+ ($msg:expr,) => ({ /* compiler built-in */ })
+}
+
+// This expands to nothing (since it's in item position), but emits an error.
+compile_error!("error!");
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! compile_error {
+ ($msg:expr) => ({ /* compiler built-in */ });
+ ($msg:expr,) => ({ /* compiler built-in */ })
+}
+
+/* error: error! */
+"##]],
+ );
+}
+
+#[test]
+fn test_format_args_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ format_args!("{} {:?}", arg1(a, b, c), arg2);
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(arg1(a, b, c)), std::fmt::Display::fmt), std::fmt::ArgumentV1::new(&(arg2), std::fmt::Display::fmt), ]);
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_format_args_expand_with_comma_exprs() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ format_args!("{} {:?}", a::<A,B>(), b);
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(a::<A, B>()), std::fmt::Display::fmt), std::fmt::ArgumentV1::new(&(b), std::fmt::Display::fmt), ]);
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_format_args_expand_with_broken_member_access() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ let _ =
+ format_args!/*+errors*/("{} {:?}", a.);
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ let _ =
+ /* parse error: expected field name or number */
+std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(a.), std::fmt::Display::fmt), ]);
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_include_bytes_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! include_bytes {
+ ($file:expr) => {{ /* compiler built-in */ }};
+ ($file:expr,) => {{ /* compiler built-in */ }};
+}
+
+fn main() { include_bytes("foo"); }
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! include_bytes {
+ ($file:expr) => {{ /* compiler built-in */ }};
+ ($file:expr,) => {{ /* compiler built-in */ }};
+}
+
+fn main() { include_bytes("foo"); }
+"##]],
+ );
+}
+
+#[test]
+fn test_concat_expand() {
+ check(
+ r##"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+fn main() { concat!("foo", "r", 0, r#"bar"#, "\n", false); }
+"##,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+fn main() { "foor0bar\nfalse"; }
+"##]],
+ );
+}
+
+#[test]
+fn test_concat_bytes_expand() {
+ check(
+ r##"
+#[rustc_builtin_macro]
+macro_rules! concat_bytes {}
+
+fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); }
+"##,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! concat_bytes {}
+
+fn main() { [b'A', 66, 67, 68, b'E', 70]; }
+"##]],
+ );
+}
+
+#[test]
+fn test_concat_with_captured_expr() {
+ check(
+ r##"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+macro_rules! surprise {
+ () => { "s" };
+}
+
+macro_rules! stuff {
+ ($string:expr) => { concat!($string) };
+}
+
+fn main() { concat!(surprise!()); }
+"##,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+macro_rules! surprise {
+ () => { "s" };
+}
+
+macro_rules! stuff {
+ ($string:expr) => { concat!($string) };
+}
+
+fn main() { "s"; }
+"##]],
+ );
+}
+
+#[test]
+fn test_concat_idents_expand() {
+ check(
+ r##"
+#[rustc_builtin_macro]
+macro_rules! concat_idents {}
+
+fn main() { concat_idents!(foo, bar); }
+"##,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! concat_idents {}
+
+fn main() { foobar; }
+"##]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
new file mode 100644
index 000000000..30d39d52f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -0,0 +1,1632 @@
+//! Tests specific to declarative macros, aka macros by example. This covers
+//! both stable `macro_rules!` macros as well as unstable `macro` macros.
+
+mod tt_conversion;
+mod matching;
+mod meta_syntax;
+mod regression;
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn token_mapping_smoke_test() {
+ check(
+ r#"
+// +tokenids
+macro_rules! f {
+ ( struct $ident:ident ) => {
+ struct $ident {
+ map: ::std::collections::HashSet<()>,
+ }
+ };
+}
+
+// +tokenids
+f!(struct MyTraitMap2);
+"#,
+ expect![[r##"
+// call ids will be shifted by Shift(30)
+// +tokenids
+macro_rules! f {#0
+ (#1 struct#2 $#3ident#4:#5ident#6 )#1 =#7>#8 {#9
+ struct#10 $#11ident#12 {#13
+ map#14:#15 :#16:#17std#18:#19:#20collections#21:#22:#23HashSet#24<#25(#26)#26>#27,#28
+ }#13
+ }#9;#29
+}#0
+
+// // +tokenids
+// f!(struct#1 MyTraitMap2#2);
+struct#10 MyTraitMap2#32 {#13
+ map#14:#15 ::std#18::collections#21::HashSet#24<#25(#26)#26>#27,#28
+}#13
+"##]],
+ );
+}
+
+#[test]
+fn token_mapping_floats() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12216
+ // (and related issues)
+ check(
+ r#"
+// +tokenids
+macro_rules! f {
+ ($($tt:tt)*) => {
+ $($tt)*
+ };
+}
+
+// +tokenids
+f! {
+ fn main() {
+ 1;
+ 1.0;
+ let x = 1;
+ }
+}
+
+
+"#,
+ expect![[r##"
+// call ids will be shifted by Shift(18)
+// +tokenids
+macro_rules! f {#0
+ (#1$#2(#3$#4tt#5:#6tt#7)#3*#8)#1 =#9>#10 {#11
+ $#12(#13$#14tt#15)#13*#16
+ }#11;#17
+}#0
+
+// // +tokenids
+// f! {
+// fn#1 main#2() {
+// 1#5;#6
+// 1.0#7;#8
+// let#9 x#10 =#11 1#12;#13
+// }
+// }
+fn#19 main#20(#21)#21 {#22
+ 1#23;#24
+ 1.0#25;#26
+ let#27 x#28 =#29 1#30;#31
+}#22
+
+
+"##]],
+ );
+}
+
+#[test]
+fn mbe_smoke_test() {
+ check(
+ r#"
+macro_rules! impl_froms {
+ ($e:ident: $($v:ident),*) => {
+ $(
+ impl From<$v> for $e {
+ fn from(it: $v) -> $e { $e::$v(it) }
+ }
+ )*
+ }
+}
+impl_froms!(TokenTree: Leaf, Subtree);
+"#,
+ expect![[r#"
+macro_rules! impl_froms {
+ ($e:ident: $($v:ident),*) => {
+ $(
+ impl From<$v> for $e {
+ fn from(it: $v) -> $e { $e::$v(it) }
+ }
+ )*
+ }
+}
+impl From<Leaf> for TokenTree {
+ fn from(it: Leaf) -> TokenTree {
+ TokenTree::Leaf(it)
+ }
+}
+impl From<Subtree> for TokenTree {
+ fn from(it: Subtree) -> TokenTree {
+ TokenTree::Subtree(it)
+ }
+}
+"#]],
+ );
+}
+
+#[test]
+fn wrong_nesting_level() {
+ check(
+ r#"
+macro_rules! m {
+ ($($i:ident);*) => ($i)
+}
+m!{a}
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($i:ident);*) => ($i)
+}
+/* error: expected simple binding, found nested binding `i` */
+"#]],
+ );
+}
+
+#[test]
+fn match_by_first_token_literally() {
+ check(
+ r#"
+macro_rules! m {
+ ($i:ident) => ( mod $i {} );
+ (= $i:ident) => ( fn $i() {} );
+ (+ $i:ident) => ( struct $i; )
+}
+m! { foo }
+m! { = bar }
+m! { + Baz }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($i:ident) => ( mod $i {} );
+ (= $i:ident) => ( fn $i() {} );
+ (+ $i:ident) => ( struct $i; )
+}
+mod foo {}
+fn bar() {}
+struct Baz;
+"#]],
+ );
+}
+
+#[test]
+fn match_by_last_token_literally() {
+ check(
+ r#"
+macro_rules! m {
+ ($i:ident) => ( mod $i {} );
+ ($i:ident =) => ( fn $i() {} );
+ ($i:ident +) => ( struct $i; )
+}
+m! { foo }
+m! { bar = }
+m! { Baz + }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($i:ident) => ( mod $i {} );
+ ($i:ident =) => ( fn $i() {} );
+ ($i:ident +) => ( struct $i; )
+}
+mod foo {}
+fn bar() {}
+struct Baz;
+"#]],
+ );
+}
+
+#[test]
+fn match_by_ident() {
+ check(
+ r#"
+macro_rules! m {
+ ($i:ident) => ( mod $i {} );
+ (spam $i:ident) => ( fn $i() {} );
+ (eggs $i:ident) => ( struct $i; )
+}
+m! { foo }
+m! { spam bar }
+m! { eggs Baz }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($i:ident) => ( mod $i {} );
+ (spam $i:ident) => ( fn $i() {} );
+ (eggs $i:ident) => ( struct $i; )
+}
+mod foo {}
+fn bar() {}
+struct Baz;
+"#]],
+ );
+}
+
+#[test]
+fn match_by_separator_token() {
+ check(
+ r#"
+macro_rules! m {
+ ($($i:ident),*) => ($(mod $i {} )*);
+ ($($i:ident)#*) => ($(fn $i() {} )*);
+ ($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
+}
+
+m! { foo, bar }
+
+m! { foo# bar }
+
+m! { Foo,# Bar }
+"#,
+ expect![[r##"
+macro_rules! m {
+ ($($i:ident),*) => ($(mod $i {} )*);
+ ($($i:ident)#*) => ($(fn $i() {} )*);
+ ($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
+}
+
+mod foo {}
+mod bar {}
+
+fn foo() {}
+fn bar() {}
+
+struct Foo;
+struct Bar;
+"##]],
+ );
+}
+
+#[test]
+fn test_match_group_pattern_with_multiple_defs() {
+ check(
+ r#"
+macro_rules! m {
+ ($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
+}
+m! { foo, bar }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
+}
+impl Bar {
+ fn foo() {}
+ fn bar() {}
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_match_group_pattern_with_multiple_statement() {
+ check(
+ r#"
+macro_rules! m {
+ ($($i:ident),*) => ( fn baz() { $($i ();)* } );
+}
+m! { foo, bar }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($i:ident),*) => ( fn baz() { $($i ();)* } );
+}
+fn baz() {
+ foo();
+ bar();
+}
+"#]],
+ )
+}
+
+#[test]
+fn test_match_group_pattern_with_multiple_statement_without_semi() {
+ check(
+ r#"
+macro_rules! m {
+ ($($i:ident),*) => ( fn baz() { $($i() );* } );
+}
+m! { foo, bar }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($i:ident),*) => ( fn baz() { $($i() );* } );
+}
+fn baz() {
+ foo();
+ bar()
+}
+"#]],
+ )
+}
+
+#[test]
+fn test_match_group_empty_fixed_token() {
+ check(
+ r#"
+macro_rules! m {
+ ($($i:ident)* #abc) => ( fn baz() { $($i ();)* } );
+}
+m!{#abc}
+"#,
+ expect![[r##"
+macro_rules! m {
+ ($($i:ident)* #abc) => ( fn baz() { $($i ();)* } );
+}
+fn baz() {}
+"##]],
+ )
+}
+
+#[test]
+fn test_match_group_in_subtree() {
+ check(
+ r#"
+macro_rules! m {
+ (fn $name:ident { $($i:ident)* } ) => ( fn $name() { $($i ();)* } );
+}
+m! { fn baz { a b } }
+"#,
+ expect![[r#"
+macro_rules! m {
+ (fn $name:ident { $($i:ident)* } ) => ( fn $name() { $($i ();)* } );
+}
+fn baz() {
+ a();
+ b();
+}
+"#]],
+ )
+}
+
+#[test]
+fn test_expr_order() {
+ check(
+ r#"
+macro_rules! m {
+ ($ i:expr) => { fn bar() { $ i * 3; } }
+}
+// +tree
+m! { 1 + 2 }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($ i:expr) => { fn bar() { $ i * 3; } }
+}
+fn bar() {
+ (1+2)*3;
+}
+// MACRO_ITEMS@0..17
+// FN@0..17
+// FN_KW@0..2 "fn"
+// NAME@2..5
+// IDENT@2..5 "bar"
+// PARAM_LIST@5..7
+// L_PAREN@5..6 "("
+// R_PAREN@6..7 ")"
+// BLOCK_EXPR@7..17
+// STMT_LIST@7..17
+// L_CURLY@7..8 "{"
+// EXPR_STMT@8..16
+// BIN_EXPR@8..15
+// PAREN_EXPR@8..13
+// L_PAREN@8..9 "("
+// BIN_EXPR@9..12
+// LITERAL@9..10
+// INT_NUMBER@9..10 "1"
+// PLUS@10..11 "+"
+// LITERAL@11..12
+// INT_NUMBER@11..12 "2"
+// R_PAREN@12..13 ")"
+// STAR@13..14 "*"
+// LITERAL@14..15
+// INT_NUMBER@14..15 "3"
+// SEMICOLON@15..16 ";"
+// R_CURLY@16..17 "}"
+
+"#]],
+ )
+}
+
+#[test]
+fn test_match_group_with_multichar_sep() {
+ check(
+ r#"
+macro_rules! m {
+ (fn $name:ident { $($i:literal)* }) => ( fn $name() -> bool { $($i)&&* } );
+}
+m! (fn baz { true false } );
+"#,
+ expect![[r#"
+macro_rules! m {
+ (fn $name:ident { $($i:literal)* }) => ( fn $name() -> bool { $($i)&&* } );
+}
+fn baz() -> bool {
+ true && false
+}
+"#]],
+ );
+
+ check(
+ r#"
+macro_rules! m {
+ (fn $name:ident { $($i:literal)&&* }) => ( fn $name() -> bool { $($i)&&* } );
+}
+m! (fn baz { true && false } );
+"#,
+ expect![[r#"
+macro_rules! m {
+ (fn $name:ident { $($i:literal)&&* }) => ( fn $name() -> bool { $($i)&&* } );
+}
+fn baz() -> bool {
+ true && false
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_match_group_zero_match() {
+ check(
+ r#"
+macro_rules! m { ( $($i:ident)* ) => (); }
+m!();
+"#,
+ expect![[r#"
+macro_rules! m { ( $($i:ident)* ) => (); }
+
+"#]],
+ );
+}
+
+#[test]
+fn test_match_group_in_group() {
+ check(
+ r#"
+macro_rules! m {
+ [ $( ( $($i:ident)* ) )* ] => [ ok![$( ( $($i)* ) )*]; ]
+}
+m! ( (a b) );
+"#,
+ expect![[r#"
+macro_rules! m {
+ [ $( ( $($i:ident)* ) )* ] => [ ok![$( ( $($i)* ) )*]; ]
+}
+ok![(a b)];
+"#]],
+ )
+}
+
+#[test]
+fn test_expand_to_item_list() {
+ check(
+ r#"
+macro_rules! structs {
+ ($($i:ident),*) => { $(struct $i { field: u32 } )* }
+}
+
+// +tree
+structs!(Foo, Bar);
+ "#,
+ expect![[r#"
+macro_rules! structs {
+ ($($i:ident),*) => { $(struct $i { field: u32 } )* }
+}
+
+struct Foo {
+ field: u32
+}
+struct Bar {
+ field: u32
+}
+// MACRO_ITEMS@0..40
+// STRUCT@0..20
+// STRUCT_KW@0..6 "struct"
+// NAME@6..9
+// IDENT@6..9 "Foo"
+// RECORD_FIELD_LIST@9..20
+// L_CURLY@9..10 "{"
+// RECORD_FIELD@10..19
+// NAME@10..15
+// IDENT@10..15 "field"
+// COLON@15..16 ":"
+// PATH_TYPE@16..19
+// PATH@16..19
+// PATH_SEGMENT@16..19
+// NAME_REF@16..19
+// IDENT@16..19 "u32"
+// R_CURLY@19..20 "}"
+// STRUCT@20..40
+// STRUCT_KW@20..26 "struct"
+// NAME@26..29
+// IDENT@26..29 "Bar"
+// RECORD_FIELD_LIST@29..40
+// L_CURLY@29..30 "{"
+// RECORD_FIELD@30..39
+// NAME@30..35
+// IDENT@30..35 "field"
+// COLON@35..36 ":"
+// PATH_TYPE@36..39
+// PATH@36..39
+// PATH_SEGMENT@36..39
+// NAME_REF@36..39
+// IDENT@36..39 "u32"
+// R_CURLY@39..40 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_two_idents() {
+ check(
+ r#"
+macro_rules! m {
+ ($i:ident, $j:ident) => { fn foo() { let a = $i; let b = $j; } }
+}
+m! { foo, bar }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($i:ident, $j:ident) => { fn foo() { let a = $i; let b = $j; } }
+}
+fn foo() {
+ let a = foo;
+ let b = bar;
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_tt_to_stmts() {
+ check(
+ r#"
+macro_rules! m {
+ () => {
+ let a = 0;
+ a = 10 + 1;
+ a
+ }
+}
+
+fn f() -> i32 {
+ m!/*+tree*/{}
+}
+"#,
+ expect![[r#"
+macro_rules! m {
+ () => {
+ let a = 0;
+ a = 10 + 1;
+ a
+ }
+}
+
+fn f() -> i32 {
+ let a = 0;
+ a = 10+1;
+ a
+// MACRO_STMTS@0..15
+// LET_STMT@0..7
+// LET_KW@0..3 "let"
+// IDENT_PAT@3..4
+// NAME@3..4
+// IDENT@3..4 "a"
+// EQ@4..5 "="
+// LITERAL@5..6
+// INT_NUMBER@5..6 "0"
+// SEMICOLON@6..7 ";"
+// EXPR_STMT@7..14
+// BIN_EXPR@7..13
+// PATH_EXPR@7..8
+// PATH@7..8
+// PATH_SEGMENT@7..8
+// NAME_REF@7..8
+// IDENT@7..8 "a"
+// EQ@8..9 "="
+// BIN_EXPR@9..13
+// LITERAL@9..11
+// INT_NUMBER@9..11 "10"
+// PLUS@11..12 "+"
+// LITERAL@12..13
+// INT_NUMBER@12..13 "1"
+// SEMICOLON@13..14 ";"
+// PATH_EXPR@14..15
+// PATH@14..15
+// PATH_SEGMENT@14..15
+// NAME_REF@14..15
+// IDENT@14..15 "a"
+
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_match_literal() {
+ check(
+ r#"
+macro_rules! m {
+ ('(') => { fn l_paren() {} }
+}
+m!['('];
+"#,
+ expect![[r#"
+macro_rules! m {
+ ('(') => { fn l_paren() {} }
+}
+fn l_paren() {}
+"#]],
+ );
+}
+
+#[test]
+fn test_parse_macro_def_simple() {
+ cov_mark::check!(parse_macro_def_simple);
+ check(
+ r#"
+macro m($id:ident) { fn $id() {} }
+m!(bar);
+"#,
+ expect![[r#"
+macro m($id:ident) { fn $id() {} }
+fn bar() {}
+"#]],
+ );
+}
+
+#[test]
+fn test_parse_macro_def_rules() {
+ cov_mark::check!(parse_macro_def_rules);
+
+ check(
+ r#"
+macro m {
+ ($id:ident) => { fn $id() {} }
+}
+m!(bar);
+"#,
+ expect![[r#"
+macro m {
+ ($id:ident) => { fn $id() {} }
+}
+fn bar() {}
+"#]],
+ );
+}
+
+#[test]
+fn test_macro_2_0_panic_2015() {
+ check(
+ r#"
+macro panic_2015 {
+ () => (),
+ (bar) => (),
+}
+panic_2015!(bar);
+"#,
+ expect![[r#"
+macro panic_2015 {
+ () => (),
+ (bar) => (),
+}
+
+"#]],
+ );
+}
+
+#[test]
+fn test_path() {
+ check(
+ r#"
+macro_rules! m {
+ ($p:path) => { fn foo() { let a = $p; } }
+}
+
+m! { foo }
+
+m! { bar::<u8>::baz::<u8> }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($p:path) => { fn foo() { let a = $p; } }
+}
+
+fn foo() {
+ let a = foo;
+}
+
+fn foo() {
+ let a = bar::<u8>::baz::<u8> ;
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_two_paths() {
+ check(
+ r#"
+macro_rules! m {
+ ($i:path, $j:path) => { fn foo() { let a = $ i; let b = $j; } }
+}
+m! { foo, bar }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($i:path, $j:path) => { fn foo() { let a = $ i; let b = $j; } }
+}
+fn foo() {
+ let a = foo;
+ let b = bar;
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_path_with_path() {
+ check(
+ r#"
+macro_rules! m {
+ ($p:path) => { fn foo() { let a = $p::bar; } }
+}
+m! { foo }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($p:path) => { fn foo() { let a = $p::bar; } }
+}
+fn foo() {
+ let a = foo::bar;
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_expr() {
+ check(
+ r#"
+macro_rules! m {
+ ($e:expr) => { fn bar() { $e; } }
+}
+
+m! { 2 + 2 * baz(3).quux() }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($e:expr) => { fn bar() { $e; } }
+}
+
+fn bar() {
+ (2+2*baz(3).quux());
+}
+"#]],
+ )
+}
+
+#[test]
+fn test_last_expr() {
+ check(
+ r#"
+macro_rules! vec {
+ ($($item:expr),*) => {{
+ let mut v = Vec::new();
+ $( v.push($item); )*
+ v
+ }};
+}
+
+fn f() {
+ vec![1,2,3];
+}
+"#,
+ expect![[r#"
+macro_rules! vec {
+ ($($item:expr),*) => {{
+ let mut v = Vec::new();
+ $( v.push($item); )*
+ v
+ }};
+}
+
+fn f() {
+ {
+ let mut v = Vec::new();
+ v.push(1);
+ v.push(2);
+ v.push(3);
+ v
+ };
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_expr_with_attr() {
+ check(
+ r#"
+macro_rules! m { ($a:expr) => { ok!(); } }
+m!(#[allow(a)]());
+"#,
+ expect![[r#"
+macro_rules! m { ($a:expr) => { ok!(); } }
+ok!();
+"#]],
+ )
+}
+
+#[test]
+fn test_ty() {
+ check(
+ r#"
+macro_rules! m {
+ ($t:ty) => ( fn bar() -> $t {} )
+}
+m! { Baz<u8> }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($t:ty) => ( fn bar() -> $t {} )
+}
+fn bar() -> Baz<u8> {}
+"#]],
+ )
+}
+
+#[test]
+fn test_ty_with_complex_type() {
+ check(
+ r#"
+macro_rules! m {
+ ($t:ty) => ( fn bar() -> $ t {} )
+}
+
+m! { &'a Baz<u8> }
+
+m! { extern "Rust" fn() -> Ret }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($t:ty) => ( fn bar() -> $ t {} )
+}
+
+fn bar() -> & 'a Baz<u8> {}
+
+fn bar() -> extern "Rust"fn() -> Ret {}
+"#]],
+ );
+}
+
+#[test]
+fn test_pat_() {
+ check(
+ r#"
+macro_rules! m {
+ ($p:pat) => { fn foo() { let $p; } }
+}
+m! { (a, b) }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($p:pat) => { fn foo() { let $p; } }
+}
+fn foo() {
+ let (a, b);
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_stmt() {
+ check(
+ r#"
+macro_rules! m {
+ ($s:stmt) => ( fn bar() { $s; } )
+}
+m! { 2 }
+m! { let a = 0 }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($s:stmt) => ( fn bar() { $s; } )
+}
+fn bar() {
+ 2;
+}
+fn bar() {
+ let a = 0;
+}
+"#]],
+ )
+}
+
+#[test]
+fn test_single_item() {
+ check(
+ r#"
+macro_rules! m { ($i:item) => ( $i ) }
+m! { mod c {} }
+"#,
+ expect![[r#"
+macro_rules! m { ($i:item) => ( $i ) }
+mod c {}
+"#]],
+ )
+}
+
+#[test]
+fn test_all_items() {
+ check(
+ r#"
+macro_rules! m { ($($i:item)*) => ($($i )*) }
+m! {
+ extern crate a;
+ mod b;
+ mod c {}
+ use d;
+ const E: i32 = 0;
+ static F: i32 = 0;
+ impl G {}
+ struct H;
+ enum I { Foo }
+ trait J {}
+ fn h() {}
+ extern {}
+ type T = u8;
+}
+"#,
+ expect![[r#"
+macro_rules! m { ($($i:item)*) => ($($i )*) }
+extern crate a;
+mod b;
+mod c {}
+use d;
+const E: i32 = 0;
+static F: i32 = 0;
+impl G {}
+struct H;
+enum I {
+ Foo
+}
+trait J {}
+fn h() {}
+extern {}
+type T = u8;
+"#]],
+ );
+}
+
+#[test]
+fn test_block() {
+ check(
+ r#"
+macro_rules! m { ($b:block) => { fn foo() $b } }
+m! { { 1; } }
+"#,
+ expect![[r#"
+macro_rules! m { ($b:block) => { fn foo() $b } }
+fn foo() {
+ 1;
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_meta() {
+ check(
+ r#"
+macro_rules! m {
+ ($m:meta) => ( #[$m] fn bar() {} )
+}
+m! { cfg(target_os = "windows") }
+m! { hello::world }
+"#,
+ expect![[r##"
+macro_rules! m {
+ ($m:meta) => ( #[$m] fn bar() {} )
+}
+#[cfg(target_os = "windows")] fn bar() {}
+#[hello::world] fn bar() {}
+"##]],
+ );
+}
+
+#[test]
+fn test_meta_doc_comments() {
+ cov_mark::check!(test_meta_doc_comments);
+ check(
+ r#"
+macro_rules! m {
+ ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} )
+}
+m! {
+ /// Single Line Doc 1
+ /**
+ MultiLines Doc
+ */
+}
+"#,
+ expect![[r##"
+macro_rules! m {
+ ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} )
+}
+#[doc = " Single Line Doc 1"]
+#[doc = "\n MultiLines Doc\n "] fn bar() {}
+"##]],
+ );
+}
+
+#[test]
+fn test_meta_extended_key_value_attributes() {
+ check(
+ r#"
+macro_rules! m {
+ (#[$m:meta]) => ( #[$m] fn bar() {} )
+}
+m! { #[doc = concat!("The `", "bla", "` lang item.")] }
+"#,
+ expect![[r##"
+macro_rules! m {
+ (#[$m:meta]) => ( #[$m] fn bar() {} )
+}
+#[doc = concat!("The `", "bla", "` lang item.")] fn bar() {}
+"##]],
+ );
+}
+
+#[test]
+fn test_meta_doc_comments_non_latin() {
+ check(
+ r#"
+macro_rules! m {
+ ($(#[$ m:meta])+) => ( $(#[$m])+ fn bar() {} )
+}
+m! {
+ /// 錦瑟無端五十弦,一弦一柱思華年。
+ /**
+ 莊生曉夢迷蝴蝶,望帝春心託杜鵑。
+ */
+}
+"#,
+ expect![[r##"
+macro_rules! m {
+ ($(#[$ m:meta])+) => ( $(#[$m])+ fn bar() {} )
+}
+#[doc = " 錦瑟無端五十弦,一弦一柱思華年。"]
+#[doc = "\n 莊生曉夢迷蝴蝶,望帝春心託杜鵑。\n "] fn bar() {}
+"##]],
+ );
+}
+
+#[test]
+fn test_meta_doc_comments_escaped_characters() {
+ check(
+ r#"
+macro_rules! m {
+ ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} )
+}
+m! {
+ /// \ " '
+}
+"#,
+ expect![[r##"
+macro_rules! m {
+ ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} )
+}
+#[doc = " \\ \" \'"] fn bar() {}
+"##]],
+ );
+}
+
+#[test]
+fn test_tt_block() {
+ check(
+ r#"
+macro_rules! m { ($tt:tt) => { fn foo() $tt } }
+m! { { 1; } }
+"#,
+ expect![[r#"
+macro_rules! m { ($tt:tt) => { fn foo() $tt } }
+fn foo() {
+ 1;
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_tt_group() {
+ check(
+ r#"
+macro_rules! m { ($($tt:tt)*) => { $($tt)* } }
+m! { fn foo() {} }"
+"#,
+ expect![[r#"
+macro_rules! m { ($($tt:tt)*) => { $($tt)* } }
+fn foo() {}"
+"#]],
+ );
+}
+
+#[test]
+fn test_tt_composite() {
+ check(
+ r#"
+macro_rules! m { ($tt:tt) => { ok!(); } }
+m! { => }
+m! { = > }
+"#,
+ expect![[r#"
+macro_rules! m { ($tt:tt) => { ok!(); } }
+ok!();
+/* error: leftover tokens */ok!();
+"#]],
+ );
+}
+
+#[test]
+fn test_tt_composite2() {
+ check(
+ r#"
+macro_rules! m { ($($tt:tt)*) => { abs!(=> $($tt)*); } }
+m! {#}
+"#,
+ expect![[r##"
+macro_rules! m { ($($tt:tt)*) => { abs!(=> $($tt)*); } }
+abs!( = > #);
+"##]],
+ );
+}
+
+#[test]
+fn test_tt_with_composite_without_space() {
+ // Test macro input without any spaces
+ // See https://github.com/rust-lang/rust-analyzer/issues/6692
+ check(
+ r#"
+macro_rules! m { ($ op:tt, $j:path) => ( ok!(); ) }
+m!(==,Foo::Bool)
+"#,
+ expect![[r#"
+macro_rules! m { ($ op:tt, $j:path) => ( ok!(); ) }
+ok!();
+"#]],
+ );
+}
+
+#[test]
+fn test_underscore() {
+ check(
+ r#"
+macro_rules! m { ($_:tt) => { ok!(); } }
+m! { => }
+"#,
+ expect![[r#"
+macro_rules! m { ($_:tt) => { ok!(); } }
+ok!();
+"#]],
+ );
+}
+
+#[test]
+fn test_underscore_not_greedily() {
+ check(
+ r#"
+// `_` overlaps with `$a:ident` but rustc matches it under the `_` token.
+macro_rules! m1 {
+ ($($a:ident)* _) => { ok!(); }
+}
+m1![a b c d _];
+
+// `_ => ou` overlaps with `$a:expr => $b:ident` but rustc matches it under `_ => $c:expr`.
+macro_rules! m2 {
+ ($($a:expr => $b:ident)* _ => $c:expr) => { ok!(); }
+}
+m2![a => b c => d _ => ou]
+"#,
+ expect![[r#"
+// `_` overlaps with `$a:ident` but rustc matches it under the `_` token.
+macro_rules! m1 {
+ ($($a:ident)* _) => { ok!(); }
+}
+ok!();
+
+// `_ => ou` overlaps with `$a:expr => $b:ident` but rustc matches it under `_ => $c:expr`.
+macro_rules! m2 {
+ ($($a:expr => $b:ident)* _ => $c:expr) => { ok!(); }
+}
+ok!();
+"#]],
+ );
+}
+
+#[test]
+fn test_underscore_flavors() {
+ check(
+ r#"
+macro_rules! m1 { ($a:ty) => { ok!(); } }
+m1![_];
+
+macro_rules! m2 { ($a:lifetime) => { ok!(); } }
+m2!['_];
+"#,
+ expect![[r#"
+macro_rules! m1 { ($a:ty) => { ok!(); } }
+ok!();
+
+macro_rules! m2 { ($a:lifetime) => { ok!(); } }
+ok!();
+"#]],
+ );
+}
+
+#[test]
+fn test_vertical_bar_with_pat() {
+ check(
+ r#"
+macro_rules! m { (|$pat:pat| ) => { ok!(); } }
+m! { |x| }
+ "#,
+ expect![[r#"
+macro_rules! m { (|$pat:pat| ) => { ok!(); } }
+ok!();
+ "#]],
+ );
+}
+
+#[test]
+fn test_dollar_crate_lhs_is_not_meta() {
+ check(
+ r#"
+macro_rules! m {
+ ($crate) => { err!(); };
+ () => { ok!(); };
+}
+m!{}
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($crate) => { err!(); };
+ () => { ok!(); };
+}
+ok!();
+"#]],
+ );
+}
+
+#[test]
+fn test_lifetime() {
+ check(
+ r#"
+macro_rules! m {
+ ($lt:lifetime) => { struct Ref<$lt>{ s: &$ lt str } }
+}
+m! {'a}
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($lt:lifetime) => { struct Ref<$lt>{ s: &$ lt str } }
+}
+struct Ref<'a> {
+ s: &'a str
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_literal() {
+ check(
+ r#"
+macro_rules! m {
+ ($type:ty, $lit:literal) => { const VALUE: $type = $ lit; };
+}
+m!(u8, 0);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($type:ty, $lit:literal) => { const VALUE: $type = $ lit; };
+}
+const VALUE: u8 = 0;
+"#]],
+ );
+
+ check(
+ r#"
+macro_rules! m {
+ ($type:ty, $lit:literal) => { const VALUE: $ type = $ lit; };
+}
+m!(i32, -1);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($type:ty, $lit:literal) => { const VALUE: $ type = $ lit; };
+}
+const VALUE: i32 = -1;
+"#]],
+ );
+}
+
+#[test]
+fn test_boolean_is_ident() {
+ check(
+ r#"
+macro_rules! m {
+ ($lit0:literal, $lit1:literal) => { const VALUE: (bool, bool) = ($lit0, $lit1); };
+}
+m!(true, false);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($lit0:literal, $lit1:literal) => { const VALUE: (bool, bool) = ($lit0, $lit1); };
+}
+const VALUE: (bool, bool) = (true , false );
+"#]],
+ );
+}
+
+#[test]
+fn test_vis() {
+ check(
+ r#"
+macro_rules! m {
+ ($vis:vis $name:ident) => { $vis fn $name() {} }
+}
+m!(pub foo);
+m!(foo);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($vis:vis $name:ident) => { $vis fn $name() {} }
+}
+pub fn foo() {}
+fn foo() {}
+"#]],
+ );
+}
+
+#[test]
+fn test_inner_macro_rules() {
+ check(
+ r#"
+macro_rules! m {
+ ($a:ident, $b:ident, $c:tt) => {
+ macro_rules! inner {
+ ($bi:ident) => { fn $bi() -> u8 { $c } }
+ }
+
+ inner!($a);
+ fn $b() -> u8 { $c }
+ }
+}
+m!(x, y, 1);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($a:ident, $b:ident, $c:tt) => {
+ macro_rules! inner {
+ ($bi:ident) => { fn $bi() -> u8 { $c } }
+ }
+
+ inner!($a);
+ fn $b() -> u8 { $c }
+ }
+}
+macro_rules !inner {
+ ($bi: ident) = > {
+ fn $bi()-> u8 {
+ 1
+ }
+ }
+}
+inner!(x);
+fn y() -> u8 {
+ 1
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_expr_after_path_colons() {
+ check(
+ r#"
+macro_rules! m {
+ ($k:expr) => { fn f() { K::$k; } }
+}
+// +tree +errors
+m!(C("0"));
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($k:expr) => { fn f() { K::$k; } }
+}
+/* parse error: expected identifier */
+/* parse error: expected SEMICOLON */
+/* parse error: expected SEMICOLON */
+/* parse error: expected expression */
+fn f() {
+ K::(C("0"));
+}
+// MACRO_ITEMS@0..19
+// FN@0..19
+// FN_KW@0..2 "fn"
+// NAME@2..3
+// IDENT@2..3 "f"
+// PARAM_LIST@3..5
+// L_PAREN@3..4 "("
+// R_PAREN@4..5 ")"
+// BLOCK_EXPR@5..19
+// STMT_LIST@5..19
+// L_CURLY@5..6 "{"
+// EXPR_STMT@6..10
+// PATH_EXPR@6..10
+// PATH@6..10
+// PATH@6..7
+// PATH_SEGMENT@6..7
+// NAME_REF@6..7
+// IDENT@6..7 "K"
+// COLON2@7..9 "::"
+// ERROR@9..10
+// L_PAREN@9..10 "("
+// EXPR_STMT@10..16
+// CALL_EXPR@10..16
+// PATH_EXPR@10..11
+// PATH@10..11
+// PATH_SEGMENT@10..11
+// NAME_REF@10..11
+// IDENT@10..11 "C"
+// ARG_LIST@11..16
+// L_PAREN@11..12 "("
+// LITERAL@12..15
+// STRING@12..15 "\"0\""
+// R_PAREN@15..16 ")"
+// ERROR@16..17
+// R_PAREN@16..17 ")"
+// SEMICOLON@17..18 ";"
+// R_CURLY@18..19 "}"
+
+"#]],
+ );
+}
+
+#[test]
+fn test_match_is_not_greedy() {
+ check(
+ r#"
+macro_rules! foo {
+ ($($i:ident $(,)*),*) => {};
+}
+foo!(a,b);
+"#,
+ expect![[r#"
+macro_rules! foo {
+ ($($i:ident $(,)*),*) => {};
+}
+
+"#]],
+ );
+}
+
+#[test]
+fn expr_interpolation() {
+ check(
+ r#"
+macro_rules! m { ($expr:expr) => { map($expr) } }
+fn f() {
+ let _ = m!(x + foo);
+}
+"#,
+ expect![[r#"
+macro_rules! m { ($expr:expr) => { map($expr) } }
+fn f() {
+ let _ = map((x+foo));
+}
+"#]],
+ )
+}
+
+#[test]
+fn mbe_are_not_attributes() {
+ check(
+ r#"
+macro_rules! error {
+ () => {struct Bar}
+}
+
+#[error]
+struct Foo;
+"#,
+ expect![[r##"
+macro_rules! error {
+ () => {struct Bar}
+}
+
+#[error]
+struct Foo;
+"##]],
+ )
+}
+
+#[test]
+fn test_dollar_dollar() {
+ check(
+ r#"
+macro_rules! register_struct { ($Struct:ident) => {
+ macro_rules! register_methods { ($$($method:ident),*) => {
+ macro_rules! implement_methods { ($$$$($$val:expr),*) => {
+ struct $Struct;
+ impl $Struct { $$(fn $method() -> &'static [u32] { &[$$$$($$$$val),*] })*}
+ }}
+ }}
+}}
+
+register_struct!(Foo);
+register_methods!(alpha, beta);
+implement_methods!(1, 2, 3);
+"#,
+ expect![[r#"
+macro_rules! register_struct { ($Struct:ident) => {
+ macro_rules! register_methods { ($$($method:ident),*) => {
+ macro_rules! implement_methods { ($$$$($$val:expr),*) => {
+ struct $Struct;
+ impl $Struct { $$(fn $method() -> &'static [u32] { &[$$$$($$$$val),*] })*}
+ }}
+ }}
+}}
+
+macro_rules !register_methods {
+ ($($method: ident), *) = > {
+ macro_rules!implement_methods {
+ ($$($val: expr), *) = > {
+ struct Foo;
+ impl Foo {
+ $(fn $method()-> & 'static[u32] {
+ &[$$($$val), *]
+ }
+ )*
+ }
+ }
+ }
+ }
+}
+macro_rules !implement_methods {
+ ($($val: expr), *) = > {
+ struct Foo;
+ impl Foo {
+ fn alpha()-> & 'static[u32] {
+ &[$($val), *]
+ }
+ fn beta()-> & 'static[u32] {
+ &[$($val), *]
+ }
+ }
+ }
+}
+struct Foo;
+impl Foo {
+ fn alpha() -> & 'static[u32] {
+ &[1, 2, 3]
+ }
+ fn beta() -> & 'static[u32] {
+ &[1, 2, 3]
+ }
+}
+"#]],
+ )
+}
+
+#[test]
+fn test_metavar_exprs() {
+ check(
+ r#"
+macro_rules! m {
+ ( $( $t:tt )* ) => ( $( ${ignore(t)} -${index()} )-* );
+}
+const _: i32 = m!(a b c);
+ "#,
+ expect![[r#"
+macro_rules! m {
+ ( $( $t:tt )* ) => ( $( ${ignore(t)} -${index()} )-* );
+}
+const _: i32 = -0--1--2;
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
new file mode 100644
index 000000000..bc162d0fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
@@ -0,0 +1,138 @@
+//! Test that `$var:expr` captures function correctly.
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn unary_minus_is_a_literal() {
+ check(
+ r#"
+macro_rules! m { ($x:literal) => (literal!();); ($x:tt) => (not_a_literal!();); }
+m!(92);
+m!(-92);
+m!(-9.2);
+m!(--92);
+"#,
+ expect![[r#"
+macro_rules! m { ($x:literal) => (literal!();); ($x:tt) => (not_a_literal!();); }
+literal!();
+literal!();
+literal!();
+/* error: leftover tokens */not_a_literal!();
+"#]],
+ )
+}
+
+#[test]
+fn test_expand_bad_literal() {
+ check(
+ r#"
+macro_rules! m { ($i:literal) => {}; }
+m!(&k");
+"#,
+ expect![[r#"
+macro_rules! m { ($i:literal) => {}; }
+/* error: Failed to lower macro args to token tree */"#]],
+ );
+}
+
+#[test]
+fn test_empty_comments() {
+ check(
+ r#"
+macro_rules! m{ ($fmt:expr) => (); }
+m!(/**/);
+"#,
+ expect![[r#"
+macro_rules! m{ ($fmt:expr) => (); }
+/* error: expected Expr */
+"#]],
+ );
+}
+
+#[test]
+fn asi() {
+ // Thanks, Christopher!
+ //
+ // https://internals.rust-lang.org/t/understanding-decisions-behind-semicolons/15181/29
+ check(
+ r#"
+macro_rules! asi { ($($stmt:stmt)*) => ($($stmt)*); }
+
+fn main() {
+ asi! {
+ let a = 2
+ let b = 5
+ drop(b-a)
+ println!("{}", a+b)
+ }
+}
+"#,
+ expect![[r#"
+macro_rules! asi { ($($stmt:stmt)*) => ($($stmt)*); }
+
+fn main() {
+ let a = 2let b = 5drop(b-a)println!("{}", a+b)
+}
+"#]],
+ )
+}
+
+#[test]
+fn stmt_boundaries() {
+ // FIXME: this actually works OK under rustc.
+ check(
+ r#"
+macro_rules! m {
+ ($($s:stmt)*) => (stringify!($($s |)*);)
+}
+m!(;;92;let x = 92; loop {};);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($s:stmt)*) => (stringify!($($s |)*);)
+}
+stringify!(;
+|;
+|92|;
+|let x = 92|;
+|loop {}
+|;
+|);
+"#]],
+ );
+}
+
+#[test]
+fn range_patterns() {
+ // FIXME: rustc thinks there are three patterns here, not one.
+ check(
+ r#"
+macro_rules! m {
+ ($($p:pat)*) => (stringify!($($p |)*);)
+}
+m!(.. .. ..);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($p:pat)*) => (stringify!($($p |)*);)
+}
+stringify!(.. .. ..|);
+"#]],
+ );
+}
+
+#[test]
+fn trailing_vis() {
+ check(
+ r#"
+macro_rules! m { ($($i:ident)? $vis:vis) => () }
+m!(x pub);
+"#,
+ expect![[r#"
+macro_rules! m { ($($i:ident)? $vis:vis) => () }
+
+"#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs
new file mode 100644
index 000000000..8aff78408
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs
@@ -0,0 +1,154 @@
+//! Test for the syntax of macros themselves.
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn well_formed_macro_rules() {
+ check(
+ r#"
+macro_rules! m {
+ ($i:ident) => ();
+ ($(x),*) => ();
+ ($(x)_*) => ();
+ ($(x)i*) => ();
+ ($($i:ident)*) => ($_);
+ ($($true:ident)*) => ($true);
+ ($($false:ident)*) => ($false);
+ (double_dollar) => ($$);
+ ($) => (m!($););
+ ($($t:tt)*) => ($( ${ignore(t)} ${index()} )-*);
+}
+m!($);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($i:ident) => ();
+ ($(x),*) => ();
+ ($(x)_*) => ();
+ ($(x)i*) => ();
+ ($($i:ident)*) => ($_);
+ ($($true:ident)*) => ($true);
+ ($($false:ident)*) => ($false);
+ (double_dollar) => ($$);
+ ($) => (m!($););
+ ($($t:tt)*) => ($( ${ignore(t)} ${index()} )-*);
+}
+m!($);
+"#]],
+ )
+}
+
+#[test]
+fn malformed_macro_rules() {
+ check(
+ r#"
+macro_rules! i1 { invalid }
+i1!();
+
+macro_rules! e1 { $i:ident => () }
+e1!();
+macro_rules! e2 { ($i:ident) () }
+e2!();
+macro_rules! e3 { ($(i:ident)_) => () }
+e3!();
+
+macro_rules! f1 { ($i) => ($i) }
+f1!();
+macro_rules! f2 { ($i:) => ($i) }
+f2!();
+macro_rules! f3 { ($i:_) => () }
+f3!();
+
+macro_rules! m1 { ($$i) => () }
+m1!();
+macro_rules! m2 { () => ( ${invalid()} ) }
+m2!();
+"#,
+ expect![[r#"
+macro_rules! i1 { invalid }
+/* error: invalid macro definition: expected subtree */
+
+macro_rules! e1 { $i:ident => () }
+/* error: invalid macro definition: expected subtree */
+macro_rules! e2 { ($i:ident) () }
+/* error: invalid macro definition: expected `=` */
+macro_rules! e3 { ($(i:ident)_) => () }
+/* error: invalid macro definition: invalid repeat */
+
+macro_rules! f1 { ($i) => ($i) }
+/* error: invalid macro definition: missing fragment specifier */
+macro_rules! f2 { ($i:) => ($i) }
+/* error: invalid macro definition: missing fragment specifier */
+macro_rules! f3 { ($i:_) => () }
+/* error: invalid macro definition: missing fragment specifier */
+
+macro_rules! m1 { ($$i) => () }
+/* error: invalid macro definition: `$$` is not allowed on the pattern side */
+macro_rules! m2 { () => ( ${invalid()} ) }
+/* error: invalid macro definition: invalid metavariable expression */
+"#]],
+ )
+}
+
+#[test]
+fn test_rustc_issue_57597() {
+ // <https://github.com/rust-lang/rust/blob/master/src/test/ui/issues/issue-57597.rs>
+ check(
+ r#"
+macro_rules! m0 { ($($($i:ident)?)+) => {}; }
+macro_rules! m1 { ($($($i:ident)?)*) => {}; }
+macro_rules! m2 { ($($($i:ident)?)?) => {}; }
+macro_rules! m3 { ($($($($i:ident)?)?)?) => {}; }
+macro_rules! m4 { ($($($($i:ident)*)?)?) => {}; }
+macro_rules! m5 { ($($($($i:ident)?)*)?) => {}; }
+macro_rules! m6 { ($($($($i:ident)?)?)*) => {}; }
+macro_rules! m7 { ($($($($i:ident)*)*)?) => {}; }
+macro_rules! m8 { ($($($($i:ident)?)*)*) => {}; }
+macro_rules! m9 { ($($($($i:ident)?)*)+) => {}; }
+macro_rules! mA { ($($($($i:ident)+)?)*) => {}; }
+macro_rules! mB { ($($($($i:ident)+)*)?) => {}; }
+
+m0!();
+m1!();
+m2!();
+m3!();
+m4!();
+m5!();
+m6!();
+m7!();
+m8!();
+m9!();
+mA!();
+mB!();
+ "#,
+ expect![[r#"
+macro_rules! m0 { ($($($i:ident)?)+) => {}; }
+macro_rules! m1 { ($($($i:ident)?)*) => {}; }
+macro_rules! m2 { ($($($i:ident)?)?) => {}; }
+macro_rules! m3 { ($($($($i:ident)?)?)?) => {}; }
+macro_rules! m4 { ($($($($i:ident)*)?)?) => {}; }
+macro_rules! m5 { ($($($($i:ident)?)*)?) => {}; }
+macro_rules! m6 { ($($($($i:ident)?)?)*) => {}; }
+macro_rules! m7 { ($($($($i:ident)*)*)?) => {}; }
+macro_rules! m8 { ($($($($i:ident)?)*)*) => {}; }
+macro_rules! m9 { ($($($($i:ident)?)*)+) => {}; }
+macro_rules! mA { ($($($($i:ident)+)?)*) => {}; }
+macro_rules! mB { ($($($($i:ident)+)*)?) => {}; }
+
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
new file mode 100644
index 000000000..2dff4adf2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
@@ -0,0 +1,911 @@
+//! Real world regressions and issues, not particularly minimized.
+//!
+//! While it's OK to just dump large macros here, it's preferable to come up
+//! with a minimal example for the program and put a specific test to the parent
+//! directory.
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn test_vec() {
+ check(
+ r#"
+macro_rules! vec {
+ ($($item:expr),*) => {{
+ let mut v = Vec::new();
+ $( v.push($item); )*
+ v
+ }};
+}
+fn main() {
+ vec!();
+ vec![1u32,2];
+}
+"#,
+ expect![[r#"
+macro_rules! vec {
+ ($($item:expr),*) => {{
+ let mut v = Vec::new();
+ $( v.push($item); )*
+ v
+ }};
+}
+fn main() {
+ {
+ let mut v = Vec::new();
+ v
+ };
+ {
+ let mut v = Vec::new();
+ v.push(1u32);
+ v.push(2);
+ v
+ };
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_winapi_struct() {
+ // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366
+
+ check(
+ r#"
+macro_rules! STRUCT {
+ ($(#[$attrs:meta])* struct $name:ident {
+ $($field:ident: $ftype:ty,)+
+ }) => (
+ #[repr(C)] #[derive(Copy)] $(#[$attrs])*
+ pub struct $name {
+ $(pub $field: $ftype,)+
+ }
+ impl Clone for $name {
+ #[inline]
+ fn clone(&self) -> $name { *self }
+ }
+ #[cfg(feature = "impl-default")]
+ impl Default for $name {
+ #[inline]
+ fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } }
+ }
+ );
+}
+
+// from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/shared/d3d9caps.rs
+STRUCT!{struct D3DVSHADERCAPS2_0 {Caps: u8,}}
+
+STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}
+"#,
+ expect![[r##"
+macro_rules! STRUCT {
+ ($(#[$attrs:meta])* struct $name:ident {
+ $($field:ident: $ftype:ty,)+
+ }) => (
+ #[repr(C)] #[derive(Copy)] $(#[$attrs])*
+ pub struct $name {
+ $(pub $field: $ftype,)+
+ }
+ impl Clone for $name {
+ #[inline]
+ fn clone(&self) -> $name { *self }
+ }
+ #[cfg(feature = "impl-default")]
+ impl Default for $name {
+ #[inline]
+ fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } }
+ }
+ );
+}
+
+#[repr(C)]
+#[derive(Copy)] pub struct D3DVSHADERCAPS2_0 {
+ pub Caps: u8,
+}
+impl Clone for D3DVSHADERCAPS2_0 {
+ #[inline] fn clone(&self ) -> D3DVSHADERCAPS2_0 {
+ *self
+ }
+}
+#[cfg(feature = "impl-default")] impl Default for D3DVSHADERCAPS2_0 {
+ #[inline] fn default() -> D3DVSHADERCAPS2_0 {
+ unsafe {
+ $crate::_core::mem::zeroed()
+ }
+ }
+}
+
+#[repr(C)]
+#[derive(Copy)]
+#[cfg_attr(target_arch = "x86", repr(packed))] pub struct D3DCONTENTPROTECTIONCAPS {
+ pub Caps: u8,
+}
+impl Clone for D3DCONTENTPROTECTIONCAPS {
+ #[inline] fn clone(&self ) -> D3DCONTENTPROTECTIONCAPS {
+ *self
+ }
+}
+#[cfg(feature = "impl-default")] impl Default for D3DCONTENTPROTECTIONCAPS {
+ #[inline] fn default() -> D3DCONTENTPROTECTIONCAPS {
+ unsafe {
+ $crate::_core::mem::zeroed()
+ }
+ }
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_int_base() {
+ check(
+ r#"
+macro_rules! int_base {
+ ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => {
+ #[stable(feature = "rust1", since = "1.0.0")]
+ impl fmt::$Trait for $T {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ $Radix.fmt_int(*self as $U, f)
+ }
+ }
+ }
+}
+int_base!{Binary for isize as usize -> Binary}
+"#,
+ expect![[r##"
+macro_rules! int_base {
+ ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => {
+ #[stable(feature = "rust1", since = "1.0.0")]
+ impl fmt::$Trait for $T {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ $Radix.fmt_int(*self as $U, f)
+ }
+ }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")] impl fmt::Binary for isize {
+ fn fmt(&self , f: &mut fmt::Formatter< '_>) -> fmt::Result {
+ Binary.fmt_int(*self as usize, f)
+ }
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_generate_pattern_iterators() {
+ // From <https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/str/mod.rs>.
+ check(
+ r#"
+macro_rules! generate_pattern_iterators {
+ { double ended; with $(#[$common_stability_attribute:meta])*,
+ $forward_iterator:ident,
+ $reverse_iterator:ident, $iterty:ty
+ } => { ok!(); }
+}
+generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );
+"#,
+ expect![[r##"
+macro_rules! generate_pattern_iterators {
+ { double ended; with $(#[$common_stability_attribute:meta])*,
+ $forward_iterator:ident,
+ $reverse_iterator:ident, $iterty:ty
+ } => { ok!(); }
+}
+ok!();
+"##]],
+ );
+}
+
+#[test]
+fn test_impl_fn_for_zst() {
+ // From <https://github.com/rust-lang/rust/blob/5d20ff4d2718c820632b38c1e49d4de648a9810b/src/libcore/internal_macros.rs>.
+ check(
+ r#"
+macro_rules! impl_fn_for_zst {
+ {$( $( #[$attr: meta] )*
+ struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )? Fn =
+ |$( $arg: ident: $ArgTy: ty ),*| -> $ReturnTy: ty $body: block;
+ )+} => {$(
+ $( #[$attr] )*
+ struct $Name;
+
+ impl $( <$( $lifetime ),+> )? Fn<($( $ArgTy, )*)> for $Name {
+ #[inline]
+ extern "rust-call" fn call(&self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy {
+ $body
+ }
+ }
+
+ impl $( <$( $lifetime ),+> )? FnMut<($( $ArgTy, )*)> for $Name {
+ #[inline]
+ extern "rust-call" fn call_mut(
+ &mut self,
+ ($( $arg, )*): ($( $ArgTy, )*)
+ ) -> $ReturnTy {
+ Fn::call(&*self, ($( $arg, )*))
+ }
+ }
+
+ impl $( <$( $lifetime ),+> )? FnOnce<($( $ArgTy, )*)> for $Name {
+ type Output = $ReturnTy;
+
+ #[inline]
+ extern "rust-call" fn call_once(self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy {
+ Fn::call(&self, ($( $arg, )*))
+ }
+ }
+ )+}
+}
+
+impl_fn_for_zst ! {
+ #[derive(Clone)]
+ struct CharEscapeDebugContinue impl Fn = |c: char| -> char::EscapeDebug {
+ c.escape_debug_ext(false)
+ };
+
+ #[derive(Clone)]
+ struct CharEscapeUnicode impl Fn = |c: char| -> char::EscapeUnicode {
+ c.escape_unicode()
+ };
+
+ #[derive(Clone)]
+ struct CharEscapeDefault impl Fn = |c: char| -> char::EscapeDefault {
+ c.escape_default()
+ };
+}
+
+"#,
+ expect![[r##"
+macro_rules! impl_fn_for_zst {
+ {$( $( #[$attr: meta] )*
+ struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )? Fn =
+ |$( $arg: ident: $ArgTy: ty ),*| -> $ReturnTy: ty $body: block;
+ )+} => {$(
+ $( #[$attr] )*
+ struct $Name;
+
+ impl $( <$( $lifetime ),+> )? Fn<($( $ArgTy, )*)> for $Name {
+ #[inline]
+ extern "rust-call" fn call(&self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy {
+ $body
+ }
+ }
+
+ impl $( <$( $lifetime ),+> )? FnMut<($( $ArgTy, )*)> for $Name {
+ #[inline]
+ extern "rust-call" fn call_mut(
+ &mut self,
+ ($( $arg, )*): ($( $ArgTy, )*)
+ ) -> $ReturnTy {
+ Fn::call(&*self, ($( $arg, )*))
+ }
+ }
+
+ impl $( <$( $lifetime ),+> )? FnOnce<($( $ArgTy, )*)> for $Name {
+ type Output = $ReturnTy;
+
+ #[inline]
+ extern "rust-call" fn call_once(self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy {
+ Fn::call(&self, ($( $arg, )*))
+ }
+ }
+ )+}
+}
+
+#[derive(Clone)] struct CharEscapeDebugContinue;
+impl Fn<(char, )> for CharEscapeDebugContinue {
+ #[inline] extern "rust-call"fn call(&self , (c, ): (char, )) -> char::EscapeDebug { {
+ c.escape_debug_ext(false )
+ }
+ }
+}
+impl FnMut<(char, )> for CharEscapeDebugContinue {
+ #[inline] extern "rust-call"fn call_mut(&mut self , (c, ): (char, )) -> char::EscapeDebug {
+ Fn::call(&*self , (c, ))
+ }
+}
+impl FnOnce<(char, )> for CharEscapeDebugContinue {
+ type Output = char::EscapeDebug;
+ #[inline] extern "rust-call"fn call_once(self , (c, ): (char, )) -> char::EscapeDebug {
+ Fn::call(&self , (c, ))
+ }
+}
+#[derive(Clone)] struct CharEscapeUnicode;
+impl Fn<(char, )> for CharEscapeUnicode {
+ #[inline] extern "rust-call"fn call(&self , (c, ): (char, )) -> char::EscapeUnicode { {
+ c.escape_unicode()
+ }
+ }
+}
+impl FnMut<(char, )> for CharEscapeUnicode {
+ #[inline] extern "rust-call"fn call_mut(&mut self , (c, ): (char, )) -> char::EscapeUnicode {
+ Fn::call(&*self , (c, ))
+ }
+}
+impl FnOnce<(char, )> for CharEscapeUnicode {
+ type Output = char::EscapeUnicode;
+ #[inline] extern "rust-call"fn call_once(self , (c, ): (char, )) -> char::EscapeUnicode {
+ Fn::call(&self , (c, ))
+ }
+}
+#[derive(Clone)] struct CharEscapeDefault;
+impl Fn<(char, )> for CharEscapeDefault {
+ #[inline] extern "rust-call"fn call(&self , (c, ): (char, )) -> char::EscapeDefault { {
+ c.escape_default()
+ }
+ }
+}
+impl FnMut<(char, )> for CharEscapeDefault {
+ #[inline] extern "rust-call"fn call_mut(&mut self , (c, ): (char, )) -> char::EscapeDefault {
+ Fn::call(&*self , (c, ))
+ }
+}
+impl FnOnce<(char, )> for CharEscapeDefault {
+ type Output = char::EscapeDefault;
+ #[inline] extern "rust-call"fn call_once(self , (c, ): (char, )) -> char::EscapeDefault {
+ Fn::call(&self , (c, ))
+ }
+}
+
+"##]],
+ );
+}
+
+#[test]
+fn test_impl_nonzero_fmt() {
+ // From <https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/num/mod.rs#L12>.
+ check(
+ r#"
+macro_rules! impl_nonzero_fmt {
+ ( #[$stability: meta] ( $( $Trait: ident ),+ ) for $Ty: ident ) => { ok!(); }
+}
+impl_nonzero_fmt! {
+ #[stable(feature= "nonzero",since="1.28.0")]
+ (Debug, Display, Binary, Octal, LowerHex, UpperHex) for NonZeroU8
+}
+"#,
+ expect![[r##"
+macro_rules! impl_nonzero_fmt {
+ ( #[$stability: meta] ( $( $Trait: ident ),+ ) for $Ty: ident ) => { ok!(); }
+}
+ok!();
+"##]],
+ );
+}
+
+#[test]
+fn test_cfg_if_items() {
+ // From <https://github.com/rust-lang/rust/blob/33fe1131cadba69d317156847be9a402b89f11bb/src/libstd/macros.rs#L986>.
+ check(
+ r#"
+macro_rules! __cfg_if_items {
+ (($($not:meta,)*) ; ) => {};
+ (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => {
+ __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* }
+ }
+}
+__cfg_if_items! {
+ (rustdoc,);
+ ( () (
+ #[ cfg(any(target_os = "redox", unix))]
+ #[ stable(feature = "rust1", since = "1.0.0")]
+ pub use sys::ext as unix;
+
+ #[cfg(windows)]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub use sys::ext as windows;
+
+ #[cfg(any(target_os = "linux", target_os = "l4re"))]
+ pub mod linux;
+ )),
+}
+"#,
+ expect![[r#"
+macro_rules! __cfg_if_items {
+ (($($not:meta,)*) ; ) => {};
+ (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => {
+ __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* }
+ }
+}
+__cfg_if_items! {
+ (rustdoc, );
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_cfg_if_main() {
+ // From <https://github.com/rust-lang/rust/blob/3d211248393686e0f73851fc7548f6605220fbe1/src/libpanic_unwind/macros.rs#L9>.
+ check(
+ r#"
+macro_rules! cfg_if {
+ ($(if #[cfg($($meta:meta),*)] { $($it:item)* } )else* else { $($it2:item)* })
+ => {
+ __cfg_if_items! {
+ () ;
+ $( ( ($($meta),*) ($($it)*) ), )*
+ ( () ($($it2)*) ),
+ }
+ };
+
+ // Internal macro to Apply a cfg attribute to a list of items
+ (@__apply $m:meta, $($it:item)*) => { $(#[$m] $it)* };
+}
+
+cfg_if! {
+ if #[cfg(target_env = "msvc")] {
+ // no extra unwinder support needed
+ } else if #[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))] {
+ // no unwinder on the system!
+ } else {
+ mod libunwind;
+ pub use libunwind::*;
+ }
+}
+
+cfg_if! {
+ @__apply cfg(all(not(any(not(any(target_os = "solaris", target_os = "illumos")))))),
+}
+"#,
+ expect![[r##"
+macro_rules! cfg_if {
+ ($(if #[cfg($($meta:meta),*)] { $($it:item)* } )else* else { $($it2:item)* })
+ => {
+ __cfg_if_items! {
+ () ;
+ $( ( ($($meta),*) ($($it)*) ), )*
+ ( () ($($it2)*) ),
+ }
+ };
+
+ // Internal macro to Apply a cfg attribute to a list of items
+ (@__apply $m:meta, $($it:item)*) => { $(#[$m] $it)* };
+}
+
+__cfg_if_items! {
+ ();
+ ((target_env = "msvc")()), ((all(target_arch = "wasm32", not(target_os = "emscripten")))()), (()(mod libunwind;
+ pub use libunwind::*;
+ )),
+}
+
+
+"##]],
+ );
+}
+
+#[test]
+fn test_proptest_arbitrary() {
+ // From <https://github.com/AltSysrq/proptest/blob/d1c4b049337d2f75dd6f49a095115f7c532e5129/proptest/src/arbitrary/macros.rs#L16>.
+ check(
+ r#"
+macro_rules! arbitrary {
+ ([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty;
+ $args: ident => $logic: expr) => {
+ impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ {
+ type Parameters = $params;
+ type Strategy = $strat;
+ fn arbitrary_with($args: Self::Parameters) -> Self::Strategy {
+ $logic
+ }
+ }
+ };
+}
+
+arbitrary!(
+ [A:Arbitrary]
+ Vec<A> ,
+ VecStrategy<A::Strategy>,
+ RangedParams1<A::Parameters>;
+ args => {
+ let product_unpack![range, a] = args;
+ vec(any_with::<A>(a), range)
+ }
+);
+"#,
+ expect![[r#"
+macro_rules! arbitrary {
+ ([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty;
+ $args: ident => $logic: expr) => {
+ impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ {
+ type Parameters = $params;
+ type Strategy = $strat;
+ fn arbitrary_with($args: Self::Parameters) -> Self::Strategy {
+ $logic
+ }
+ }
+ };
+}
+
+impl <A: Arbitrary> $crate::arbitrary::Arbitrary for Vec<A> {
+ type Parameters = RangedParams1<A::Parameters>;
+ type Strategy = VecStrategy<A::Strategy>;
+ fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { {
+ let product_unpack![range, a] = args;
+ vec(any_with::<A>(a), range)
+ }
+ }
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_old_ridl() {
+ // This is from winapi 2.8, which do not have a link from github.
+ check(
+ r#"
+#[macro_export]
+macro_rules! RIDL {
+ (interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident)
+ {$(
+ fn $method:ident(&mut self $(,$p:ident : $t:ty)*) -> $rtr:ty
+ ),+}
+ ) => {
+ impl $interface {
+ $(pub unsafe fn $method(&mut self) -> $rtr {
+ ((*self.lpVtbl).$method)(self $(,$p)*)
+ })+
+ }
+ };
+}
+
+RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID3D11DeviceChildVtbl) {
+ fn GetDataSize(&mut self) -> UINT
+}}
+"#,
+ expect![[r##"
+#[macro_export]
+macro_rules! RIDL {
+ (interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident)
+ {$(
+ fn $method:ident(&mut self $(,$p:ident : $t:ty)*) -> $rtr:ty
+ ),+}
+ ) => {
+ impl $interface {
+ $(pub unsafe fn $method(&mut self) -> $rtr {
+ ((*self.lpVtbl).$method)(self $(,$p)*)
+ })+
+ }
+ };
+}
+
+impl ID3D11Asynchronous {
+ pub unsafe fn GetDataSize(&mut self ) -> UINT {
+ ((*self .lpVtbl).GetDataSize)(self )
+ }
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_quick_error() {
+ check(
+ r#"
+macro_rules! quick_error {
+ (SORT [enum $name:ident $( #[$meta:meta] )*]
+ items [$($( #[$imeta:meta] )*
+ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*]
+ {$( $ifuncs:tt )*} )* ]
+ buf [ ]
+ queue [ ]
+ ) => {
+ quick_error!(ENUMINITION [enum $name $( #[$meta] )*]
+ body []
+ queue [$(
+ $( #[$imeta] )*
+ =>
+ $iitem: $imode [$( $ivar: $ityp ),*]
+ )*]
+ );
+ };
+}
+quick_error ! (
+ SORT
+ [enum Wrapped #[derive(Debug)]]
+ items [
+ => One: UNIT [] {}
+ => Two: TUPLE [s :String] {display ("two: {}" , s) from ()} ]
+ buf [ ]
+ queue [ ]
+);
+
+"#,
+ expect![[r##"
+macro_rules! quick_error {
+ (SORT [enum $name:ident $( #[$meta:meta] )*]
+ items [$($( #[$imeta:meta] )*
+ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*]
+ {$( $ifuncs:tt )*} )* ]
+ buf [ ]
+ queue [ ]
+ ) => {
+ quick_error!(ENUMINITION [enum $name $( #[$meta] )*]
+ body []
+ queue [$(
+ $( #[$imeta] )*
+ =>
+ $iitem: $imode [$( $ivar: $ityp ),*]
+ )*]
+ );
+ };
+}
+quick_error!(ENUMINITION[enum Wrapped#[derive(Debug)]]body[]queue[ = > One: UNIT[] = > Two: TUPLE[s: String]]);
+
+"##]],
+ )
+}
+
+#[test]
+fn test_empty_repeat_vars_in_empty_repeat_vars() {
+ check(
+ r#"
+macro_rules! delegate_impl {
+ ([$self_type:ident, $self_wrap:ty, $self_map:ident]
+ pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* {
+
+ $(
+ @escape [type $assoc_name_ext:ident]
+ )*
+ $(
+ @section type
+ $(
+ $(#[$_assoc_attr:meta])*
+ type $assoc_name:ident $(: $assoc_bound:ty)*;
+ )+
+ )*
+ $(
+ @section self
+ $(
+ $(#[$_method_attr:meta])*
+ fn $method_name:ident(self $(: $self_selftype:ty)* $(,$marg:ident : $marg_ty:ty)*) -> $mret:ty;
+ )+
+ )*
+ $(
+ @section nodelegate
+ $($tail:tt)*
+ )*
+ }) => {
+ impl<> $name for $self_wrap where $self_type: $name {
+ $(
+ $(
+ fn $method_name(self $(: $self_selftype)* $(,$marg: $marg_ty)*) -> $mret {
+ $self_map!(self).$method_name($($marg),*)
+ }
+ )*
+ )*
+ }
+ }
+}
+delegate_impl ! {
+ [G, &'a mut G, deref] pub trait Data: GraphBase {@section type type NodeWeight;}
+}
+"#,
+ expect![[r##"
+macro_rules! delegate_impl {
+ ([$self_type:ident, $self_wrap:ty, $self_map:ident]
+ pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* {
+
+ $(
+ @escape [type $assoc_name_ext:ident]
+ )*
+ $(
+ @section type
+ $(
+ $(#[$_assoc_attr:meta])*
+ type $assoc_name:ident $(: $assoc_bound:ty)*;
+ )+
+ )*
+ $(
+ @section self
+ $(
+ $(#[$_method_attr:meta])*
+ fn $method_name:ident(self $(: $self_selftype:ty)* $(,$marg:ident : $marg_ty:ty)*) -> $mret:ty;
+ )+
+ )*
+ $(
+ @section nodelegate
+ $($tail:tt)*
+ )*
+ }) => {
+ impl<> $name for $self_wrap where $self_type: $name {
+ $(
+ $(
+ fn $method_name(self $(: $self_selftype)* $(,$marg: $marg_ty)*) -> $mret {
+ $self_map!(self).$method_name($($marg),*)
+ }
+ )*
+ )*
+ }
+ }
+}
+impl <> Data for & 'amut G where G: Data {}
+"##]],
+ );
+}
+
+#[test]
+fn test_issue_2520() {
+ check(
+ r#"
+macro_rules! my_macro {
+ {
+ ( $(
+ $( [] $sname:ident : $stype:ty )?
+ $( [$expr:expr] $nname:ident : $ntype:ty )?
+ ),* )
+ } => {ok!(
+ Test {
+ $(
+ $( $sname, )?
+ )*
+ }
+ );};
+}
+
+my_macro! {
+ ([] p1: u32, [|_| S0K0] s: S0K0, [] k0: i32)
+}
+ "#,
+ expect![[r#"
+macro_rules! my_macro {
+ {
+ ( $(
+ $( [] $sname:ident : $stype:ty )?
+ $( [$expr:expr] $nname:ident : $ntype:ty )?
+ ),* )
+ } => {ok!(
+ Test {
+ $(
+ $( $sname, )?
+ )*
+ }
+ );};
+}
+
+ok!(Test {
+ p1, k0,
+}
+);
+ "#]],
+ );
+}
+
+#[test]
+fn test_repeat_bad_var() {
+ // FIXME: the second rule of the macro should be removed and an error about
+ // `$( $c )+` raised
+ check(
+ r#"
+macro_rules! foo {
+ ($( $b:ident )+) => { ok!($( $c )+); };
+ ($( $b:ident )+) => { ok!($( $b )+); }
+}
+
+foo!(b0 b1);
+"#,
+ expect![[r#"
+macro_rules! foo {
+ ($( $b:ident )+) => { ok!($( $c )+); };
+ ($( $b:ident )+) => { ok!($( $b )+); }
+}
+
+ok!(b0 b1);
+"#]],
+ );
+}
+
+#[test]
+fn test_issue_3861() {
+ // This is should (and does) produce a parse error. It used to infinite loop
+ // instead.
+ check(
+ r#"
+macro_rules! rgb_color {
+ ($p:expr, $t:ty) => {
+ pub fn new() {
+ let _ = 0 as $t << $p;
+ }
+ };
+}
+// +tree +errors
+rgb_color!(8 + 8, u32);
+"#,
+ expect![[r#"
+macro_rules! rgb_color {
+ ($p:expr, $t:ty) => {
+ pub fn new() {
+ let _ = 0 as $t << $p;
+ }
+ };
+}
+/* parse error: expected type */
+/* parse error: expected R_PAREN */
+/* parse error: expected R_ANGLE */
+/* parse error: expected COMMA */
+/* parse error: expected R_ANGLE */
+/* parse error: expected SEMICOLON */
+/* parse error: expected SEMICOLON */
+/* parse error: expected expression */
+pub fn new() {
+ let _ = 0as u32<<(8+8);
+}
+// MACRO_ITEMS@0..31
+// FN@0..31
+// VISIBILITY@0..3
+// PUB_KW@0..3 "pub"
+// FN_KW@3..5 "fn"
+// NAME@5..8
+// IDENT@5..8 "new"
+// PARAM_LIST@8..10
+// L_PAREN@8..9 "("
+// R_PAREN@9..10 ")"
+// BLOCK_EXPR@10..31
+// STMT_LIST@10..31
+// L_CURLY@10..11 "{"
+// LET_STMT@11..27
+// LET_KW@11..14 "let"
+// WILDCARD_PAT@14..15
+// UNDERSCORE@14..15 "_"
+// EQ@15..16 "="
+// CAST_EXPR@16..27
+// LITERAL@16..17
+// INT_NUMBER@16..17 "0"
+// AS_KW@17..19 "as"
+// PATH_TYPE@19..27
+// PATH@19..27
+// PATH_SEGMENT@19..27
+// NAME_REF@19..22
+// IDENT@19..22 "u32"
+// GENERIC_ARG_LIST@22..27
+// L_ANGLE@22..23 "<"
+// TYPE_ARG@23..27
+// DYN_TRAIT_TYPE@23..27
+// TYPE_BOUND_LIST@23..27
+// TYPE_BOUND@23..26
+// PATH_TYPE@23..26
+// PATH@23..26
+// PATH_SEGMENT@23..26
+// L_ANGLE@23..24 "<"
+// PAREN_TYPE@24..26
+// L_PAREN@24..25 "("
+// ERROR@25..26
+// INT_NUMBER@25..26 "8"
+// PLUS@26..27 "+"
+// EXPR_STMT@27..28
+// LITERAL@27..28
+// INT_NUMBER@27..28 "8"
+// ERROR@28..29
+// R_PAREN@28..29 ")"
+// SEMICOLON@29..30 ";"
+// R_CURLY@30..31 "}"
+
+"#]],
+ );
+}
+
+#[test]
+fn test_no_space_after_semi_colon() {
+ check(
+ r#"
+macro_rules! with_std {
+ ($($i:item)*) => ($(#[cfg(feature = "std")]$i)*)
+}
+
+with_std! {mod m;mod f;}
+"#,
+ expect![[r##"
+macro_rules! with_std {
+ ($($i:item)*) => ($(#[cfg(feature = "std")]$i)*)
+}
+
+#[cfg(feature = "std")] mod m;
+#[cfg(feature = "std")] mod f;
+"##]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs
new file mode 100644
index 000000000..0710b1ac3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs
@@ -0,0 +1,200 @@
+//! Unlike rustc, rust-analyzer's syntax tree are not "made of" token trees.
+//! Rather, token trees are an explicit bridge between the parser and
+//! (procedural or declarative) macros.
+//!
+//! This module tests tt <-> syntax tree conversion specifically. In particular,
+//! it, among other things, check that we convert `tt` to the right kind of
+//! syntax node depending on the macro call-site.
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn round_trips_compound_tokens() {
+ check(
+ r#"
+macro_rules! m {
+ () => { type qual: ::T = qual::T; }
+}
+m!();
+"#,
+ expect![[r#"
+macro_rules! m {
+ () => { type qual: ::T = qual::T; }
+}
+type qual: ::T = qual::T;
+"#]],
+ )
+}
+
+#[test]
+fn round_trips_literals() {
+ check(
+ r#"
+macro_rules! m {
+ () => {
+ let _ = 'c';
+ let _ = 1000;
+ let _ = 12E+99_f64;
+ let _ = "rust1";
+ let _ = -92;
+ }
+}
+fn f() {
+ m!()
+}
+"#,
+ expect![[r#"
+macro_rules! m {
+ () => {
+ let _ = 'c';
+ let _ = 1000;
+ let _ = 12E+99_f64;
+ let _ = "rust1";
+ let _ = -92;
+ }
+}
+fn f() {
+ let _ = 'c';
+ let _ = 1000;
+ let _ = 12E+99_f64;
+ let _ = "rust1";
+ let _ = -92;
+}
+"#]],
+ );
+}
+
+#[test]
+fn roundtrip_lifetime() {
+ check(
+ r#"
+macro_rules! m {
+ ($($t:tt)*) => { $($t)*}
+}
+m!(static bar: &'static str = "hello";);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($t:tt)*) => { $($t)*}
+}
+static bar: & 'static str = "hello";
+"#]],
+ );
+}
+
+#[test]
+fn broken_parenthesis_sequence() {
+ check(
+ r#"
+macro_rules! m1 { ($x:ident) => { ($x } }
+macro_rules! m2 { ($x:ident) => {} }
+
+m1!();
+m2!(x
+"#,
+ expect![[r#"
+macro_rules! m1 { ($x:ident) => { ($x } }
+macro_rules! m2 { ($x:ident) => {} }
+
+/* error: invalid macro definition: expected subtree */
+/* error: Failed to lower macro args to token tree */
+"#]],
+ )
+}
+
+#[test]
+fn expansion_does_not_parse_as_expression() {
+ check(
+ r#"
+macro_rules! stmts {
+ () => { fn foo() {} }
+}
+
+fn f() { let _ = stmts!/*+errors*/(); }
+"#,
+ expect![[r#"
+macro_rules! stmts {
+ () => { fn foo() {} }
+}
+
+fn f() { let _ = /* parse error: expected expression */
+fn foo() {}; }
+"#]],
+ )
+}
+
+#[test]
+fn broken_pat() {
+ check(
+ r#"
+macro_rules! m1 { () => (Some(x) left overs) }
+macro_rules! m2 { () => ($) }
+
+fn main() {
+ let m1!() = ();
+ let m2!/*+errors*/() = ();
+}
+"#,
+ expect![[r#"
+macro_rules! m1 { () => (Some(x) left overs) }
+macro_rules! m2 { () => ($) }
+
+fn main() {
+ let Some(x)left overs = ();
+ let /* parse error: expected pattern */
+$ = ();
+}
+"#]],
+ )
+}
+
+#[test]
+fn float_literal_in_tt() {
+ check(
+ r#"
+macro_rules! constant {
+ ($( $ret:expr; )*) => {};
+}
+macro_rules! float_const_impl {
+ () => ( constant!(0.3; 3.3;); );
+}
+float_const_impl! {}
+"#,
+ expect![[r#"
+macro_rules! constant {
+ ($( $ret:expr; )*) => {};
+}
+macro_rules! float_const_impl {
+ () => ( constant!(0.3; 3.3;); );
+}
+constant!(0.3;
+3.3;
+);
+"#]],
+ );
+}
+
+#[test]
+fn float_literal_in_output() {
+ check(
+ r#"
+macro_rules! constant {
+ ($e:expr ;) => {$e};
+}
+
+const _: () = constant!(0.0;);
+const _: () = constant!(0.;);
+const _: () = constant!(0e0;);
+"#,
+ expect![[r#"
+macro_rules! constant {
+ ($e:expr ;) => {$e};
+}
+
+const _: () = 0.0;
+const _: () = 0.;
+const _: () = 0e0;
+"#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
new file mode 100644
index 000000000..72c44a0fb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -0,0 +1,130 @@
+//! Tests for user-defined procedural macros.
+//!
+//! Note `//- proc_macros: identity` fixture metas in tests -- we don't use real
+//! proc-macros here, as that would be slow. Instead, we use several hard-coded
+//! in-memory macros.
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn attribute_macro_attr_censoring() {
+ cov_mark::check!(attribute_macro_attr_censoring);
+ check(
+ r#"
+//- proc_macros: identity
+#[attr1] #[proc_macros::identity] #[attr2]
+struct S;
+"#,
+ expect![[r##"
+#[attr1] #[proc_macros::identity] #[attr2]
+struct S;
+
+#[attr1]
+#[attr2] struct S;"##]],
+ );
+}
+
+#[test]
+fn derive_censoring() {
+ cov_mark::check!(derive_censoring);
+ check(
+ r#"
+//- proc_macros: derive_identity
+//- minicore:derive
+#[attr1]
+#[derive(Foo)]
+#[derive(proc_macros::DeriveIdentity)]
+#[derive(Bar)]
+#[attr2]
+struct S;
+"#,
+ expect![[r##"
+#[attr1]
+#[derive(Foo)]
+#[derive(proc_macros::DeriveIdentity)]
+#[derive(Bar)]
+#[attr2]
+struct S;
+
+#[attr1]
+#[derive(Bar)]
+#[attr2] struct S;"##]],
+ );
+}
+
+#[test]
+fn attribute_macro_syntax_completion_1() {
+ // this is just the case where the input is actually valid
+ check(
+ r#"
+//- proc_macros: identity_when_valid
+#[proc_macros::identity_when_valid]
+fn foo() { bar.baz(); blub }
+"#,
+ expect![[r##"
+#[proc_macros::identity_when_valid]
+fn foo() { bar.baz(); blub }
+
+fn foo() {
+ bar.baz();
+ blub
+}"##]],
+ );
+}
+
+#[test]
+fn attribute_macro_syntax_completion_2() {
+ // common case of dot completion while typing
+ check(
+ r#"
+//- proc_macros: identity_when_valid
+#[proc_macros::identity_when_valid]
+fn foo() { bar.; blub }
+"#,
+ expect![[r##"
+#[proc_macros::identity_when_valid]
+fn foo() { bar.; blub }
+
+fn foo() {
+ bar. ;
+ blub
+}"##]],
+ );
+}
+
+#[test]
+fn float_parsing_panic() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
+ check(
+ r#"
+//- proc_macros: identity
+macro_rules! id {
+ ($($t:tt)*) => {
+ $($t)*
+ };
+}
+id /*+errors*/! {
+ #[proc_macros::identity]
+ impl Foo for WrapBj {
+ async fn foo(&self) {
+ self.0. id().await;
+ }
+ }
+}
+"#,
+ expect![[r##"
+macro_rules! id {
+ ($($t:tt)*) => {
+ $($t)*
+ };
+}
+/* parse error: expected SEMICOLON */
+#[proc_macros::identity] impl Foo for WrapBj {
+ async fn foo(&self ) {
+ self .0.id().await ;
+ }
+}
+"##]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
new file mode 100644
index 000000000..6eb530ecc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
@@ -0,0 +1,545 @@
+//! This module implements import-resolution/macro expansion algorithm.
+//!
+//! The result of this module is `DefMap`: a data structure which contains:
+//!
+//! * a tree of modules for the crate
+//! * for each module, a set of items visible in the module (directly declared
+//! or imported)
+//!
+//! Note that `DefMap` contains fully macro expanded code.
+//!
+//! Computing `DefMap` can be partitioned into several logically
+//! independent "phases". The phases are mutually recursive though, there's no
+//! strict ordering.
+//!
+//! ## Collecting RawItems
+//!
+//! This happens in the `raw` module, which parses a single source file into a
+//! set of top-level items. Nested imports are desugared to flat imports in this
+//! phase. Macro calls are represented as a triple of (Path, Option<Name>,
+//! TokenTree).
+//!
+//! ## Collecting Modules
+//!
+//! This happens in the `collector` module. In this phase, we recursively walk
+//! tree of modules, collect raw items from submodules, populate module scopes
+//! with defined items (so, we assign item ids in this phase) and record the set
+//! of unresolved imports and macros.
+//!
+//! While we walk tree of modules, we also record macro_rules definitions and
+//! expand calls to macro_rules defined macros.
+//!
+//! ## Resolving Imports
+//!
+//! We maintain a list of currently unresolved imports. On every iteration, we
+//! try to resolve some imports from this list. If the import is resolved, we
+//! record it, by adding an item to current module scope and, if necessary, by
+//! recursively populating glob imports.
+//!
+//! ## Resolving Macros
+//!
+//! macro_rules from the same crate use a global mutable namespace. We expand
+//! them immediately, when we collect modules.
+//!
+//! Macros from other crates (including proc-macros) can be used with
+//! `foo::bar!` syntax. We handle them similarly to imports. There's a list of
+//! unexpanded macros. On every iteration, we try to resolve each macro call
+//! path and, upon success, we run macro expansion and "collect module" phase on
+//! the result
+
+pub mod attr_resolution;
+pub mod proc_macro;
+pub mod diagnostics;
+mod collector;
+mod mod_resolution;
+mod path_resolution;
+
+#[cfg(test)]
+mod tests;
+
+use std::{cmp::Ord, ops::Deref, sync::Arc};
+
+use base_db::{CrateId, Edition, FileId};
+use hir_expand::{name::Name, InFile, MacroCallId, MacroDefId};
+use itertools::Itertools;
+use la_arena::Arena;
+use profile::Count;
+use rustc_hash::FxHashMap;
+use stdx::format_to;
+use syntax::{ast, SmolStr};
+
+use crate::{
+ db::DefDatabase,
+ item_scope::{BuiltinShadowMode, ItemScope},
+ item_tree::{ItemTreeId, Mod, TreeId},
+ nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode},
+ path::ModPath,
+ per_ns::PerNs,
+ visibility::Visibility,
+ AstId, BlockId, BlockLoc, FunctionId, LocalModuleId, MacroId, ModuleId, ProcMacroId,
+};
+
+/// Contains the results of (early) name resolution.
+///
+/// A `DefMap` stores the module tree and the definitions that are in scope in every module after
+/// item-level macros have been expanded.
+///
+/// Every crate has a primary `DefMap` whose root is the crate's main file (`main.rs`/`lib.rs`),
+/// computed by the `crate_def_map` query. Additionally, every block expression introduces the
+/// opportunity to write arbitrary item and module hierarchies, and thus gets its own `DefMap` that
+/// is computed by the `block_def_map` query.
+#[derive(Debug, PartialEq, Eq)]
+pub struct DefMap {
+ _c: Count<Self>,
+ block: Option<BlockInfo>,
+ root: LocalModuleId,
+ modules: Arena<ModuleData>,
+ krate: CrateId,
+ /// The prelude module for this crate. This either comes from an import
+ /// marked with the `prelude_import` attribute, or (in the normal case) from
+ /// a dependency (`std` or `core`).
+ prelude: Option<ModuleId>,
+ extern_prelude: FxHashMap<Name, ModuleId>,
+
+ /// Side table for resolving derive helpers.
+ exported_derives: FxHashMap<MacroDefId, Box<[Name]>>,
+ fn_proc_macro_mapping: FxHashMap<FunctionId, ProcMacroId>,
+ /// The error that occurred when failing to load the proc-macro dll.
+ proc_macro_loading_error: Option<Box<str>>,
+ /// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
+ /// attributes.
+ derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>,
+
+ /// Custom attributes registered with `#![register_attr]`.
+ registered_attrs: Vec<SmolStr>,
+ /// Custom tool modules registered with `#![register_tool]`.
+ registered_tools: Vec<SmolStr>,
+
+ edition: Edition,
+ recursion_limit: Option<u32>,
+ diagnostics: Vec<DefDiagnostic>,
+}
+
+/// For `DefMap`s computed for a block expression, this stores its location in the parent map.
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+struct BlockInfo {
+ /// The `BlockId` this `DefMap` was created from.
+ block: BlockId,
+ /// The containing module.
+ parent: ModuleId,
+}
+
+impl std::ops::Index<LocalModuleId> for DefMap {
+ type Output = ModuleData;
+ fn index(&self, id: LocalModuleId) -> &ModuleData {
+ &self.modules[id]
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub enum ModuleOrigin {
+ CrateRoot {
+ definition: FileId,
+ },
+ /// Note that non-inline modules, by definition, live inside non-macro file.
+ File {
+ is_mod_rs: bool,
+ declaration: AstId<ast::Module>,
+ declaration_tree_id: ItemTreeId<Mod>,
+ definition: FileId,
+ },
+ Inline {
+ definition_tree_id: ItemTreeId<Mod>,
+ definition: AstId<ast::Module>,
+ },
+ /// Pseudo-module introduced by a block scope (contains only inner items).
+ BlockExpr {
+ block: AstId<ast::BlockExpr>,
+ },
+}
+
+impl ModuleOrigin {
+ pub fn declaration(&self) -> Option<AstId<ast::Module>> {
+ match self {
+ ModuleOrigin::File { declaration: module, .. }
+ | ModuleOrigin::Inline { definition: module, .. } => Some(*module),
+ ModuleOrigin::CrateRoot { .. } | ModuleOrigin::BlockExpr { .. } => None,
+ }
+ }
+
+ pub fn file_id(&self) -> Option<FileId> {
+ match self {
+ ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
+ Some(*definition)
+ }
+ _ => None,
+ }
+ }
+
+ pub fn is_inline(&self) -> bool {
+ match self {
+ ModuleOrigin::Inline { .. } | ModuleOrigin::BlockExpr { .. } => true,
+ ModuleOrigin::CrateRoot { .. } | ModuleOrigin::File { .. } => false,
+ }
+ }
+
+ /// Returns a node which defines this module.
+ /// That is, a file or a `mod foo {}` with items.
+ fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
+ match self {
+ ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
+ let file_id = *definition;
+ let sf = db.parse(file_id).tree();
+ InFile::new(file_id.into(), ModuleSource::SourceFile(sf))
+ }
+ ModuleOrigin::Inline { definition, .. } => InFile::new(
+ definition.file_id,
+ ModuleSource::Module(definition.to_node(db.upcast())),
+ ),
+ ModuleOrigin::BlockExpr { block } => {
+ InFile::new(block.file_id, ModuleSource::BlockExpr(block.to_node(db.upcast())))
+ }
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ModuleData {
+ /// Where does this module come from?
+ pub origin: ModuleOrigin,
+ /// Declared visibility of this module.
+ pub visibility: Visibility,
+
+ pub parent: Option<LocalModuleId>,
+ pub children: FxHashMap<Name, LocalModuleId>,
+ pub scope: ItemScope,
+}
+
+impl DefMap {
+ pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
+ let _p = profile::span("crate_def_map_query").detail(|| {
+ db.crate_graph()[krate].display_name.as_deref().unwrap_or_default().to_string()
+ });
+
+ let crate_graph = db.crate_graph();
+
+ let edition = crate_graph[krate].edition;
+ let origin = ModuleOrigin::CrateRoot { definition: crate_graph[krate].root_file_id };
+ let def_map = DefMap::empty(krate, edition, ModuleData::new(origin, Visibility::Public));
+ let def_map = collector::collect_defs(
+ db,
+ def_map,
+ TreeId::new(crate_graph[krate].root_file_id.into(), None),
+ );
+
+ Arc::new(def_map)
+ }
+
+ pub(crate) fn block_def_map_query(
+ db: &dyn DefDatabase,
+ block_id: BlockId,
+ ) -> Option<Arc<DefMap>> {
+ let block: BlockLoc = db.lookup_intern_block(block_id);
+
+ let tree_id = TreeId::new(block.ast_id.file_id, Some(block_id));
+ let item_tree = tree_id.item_tree(db);
+ if item_tree.top_level_items().is_empty() {
+ return None;
+ }
+
+ let parent_map = block.module.def_map(db);
+ let krate = block.module.krate;
+ let local_id = LocalModuleId::from_raw(la_arena::RawIdx::from(0));
+ // NB: we use `None` as block here, which would be wrong for implicit
+ // modules declared by blocks with items. At the moment, we don't use
+ // this visibility for anything outside IDE, so that's probably OK.
+ let visibility = Visibility::Module(ModuleId { krate, local_id, block: None });
+ let module_data =
+ ModuleData::new(ModuleOrigin::BlockExpr { block: block.ast_id }, visibility);
+
+ let mut def_map = DefMap::empty(krate, parent_map.edition, module_data);
+ def_map.block = Some(BlockInfo { block: block_id, parent: block.module });
+
+ let def_map = collector::collect_defs(db, def_map, tree_id);
+ Some(Arc::new(def_map))
+ }
+
+ fn empty(krate: CrateId, edition: Edition, module_data: ModuleData) -> DefMap {
+ let mut modules: Arena<ModuleData> = Arena::default();
+ let root = modules.alloc(module_data);
+
+ DefMap {
+ _c: Count::new(),
+ block: None,
+ krate,
+ edition,
+ recursion_limit: None,
+ extern_prelude: FxHashMap::default(),
+ exported_derives: FxHashMap::default(),
+ fn_proc_macro_mapping: FxHashMap::default(),
+ proc_macro_loading_error: None,
+ derive_helpers_in_scope: FxHashMap::default(),
+ prelude: None,
+ root,
+ modules,
+ registered_attrs: Vec::new(),
+ registered_tools: Vec::new(),
+ diagnostics: Vec::new(),
+ }
+ }
+
+ pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ {
+ self.modules
+ .iter()
+ .filter(move |(_id, data)| data.origin.file_id() == Some(file_id))
+ .map(|(id, _data)| id)
+ }
+
+ pub fn modules(&self) -> impl Iterator<Item = (LocalModuleId, &ModuleData)> + '_ {
+ self.modules.iter()
+ }
+
+ pub fn derive_helpers_in_scope(
+ &self,
+ id: AstId<ast::Adt>,
+ ) -> Option<&[(Name, MacroId, MacroCallId)]> {
+ self.derive_helpers_in_scope.get(&id.map(|it| it.upcast())).map(Deref::deref)
+ }
+
+ pub fn registered_tools(&self) -> &[SmolStr] {
+ &self.registered_tools
+ }
+
+ pub fn registered_attrs(&self) -> &[SmolStr] {
+ &self.registered_attrs
+ }
+
+ pub fn root(&self) -> LocalModuleId {
+ self.root
+ }
+
+ pub fn fn_as_proc_macro(&self, id: FunctionId) -> Option<ProcMacroId> {
+ self.fn_proc_macro_mapping.get(&id).copied()
+ }
+
+ pub fn proc_macro_loading_error(&self) -> Option<&str> {
+ self.proc_macro_loading_error.as_deref()
+ }
+
+ pub(crate) fn krate(&self) -> CrateId {
+ self.krate
+ }
+
+ pub(crate) fn block_id(&self) -> Option<BlockId> {
+ self.block.as_ref().map(|block| block.block)
+ }
+
+ pub(crate) fn prelude(&self) -> Option<ModuleId> {
+ self.prelude
+ }
+
+ pub(crate) fn extern_prelude(&self) -> impl Iterator<Item = (&Name, &ModuleId)> + '_ {
+ self.extern_prelude.iter()
+ }
+
+ pub fn module_id(&self, local_id: LocalModuleId) -> ModuleId {
+ let block = self.block.as_ref().map(|b| b.block);
+ ModuleId { krate: self.krate, local_id, block }
+ }
+
+ pub(crate) fn crate_root(&self, db: &dyn DefDatabase) -> ModuleId {
+ self.with_ancestor_maps(db, self.root, &mut |def_map, _module| {
+ if def_map.block.is_none() { Some(def_map.module_id(def_map.root)) } else { None }
+ })
+ .expect("DefMap chain without root")
+ }
+
+ pub(crate) fn resolve_path(
+ &self,
+ db: &dyn DefDatabase,
+ original_module: LocalModuleId,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
+ ) -> (PerNs, Option<usize>) {
+ let res =
+ self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path, shadow);
+ (res.resolved_def, res.segment_index)
+ }
+
+ pub(crate) fn resolve_path_locally(
+ &self,
+ db: &dyn DefDatabase,
+ original_module: LocalModuleId,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
+ ) -> (PerNs, Option<usize>) {
+ let res = self.resolve_path_fp_with_macro_single(
+ db,
+ ResolveMode::Other,
+ original_module,
+ path,
+ shadow,
+ );
+ (res.resolved_def, res.segment_index)
+ }
+
+ /// Ascends the `DefMap` hierarchy and calls `f` with every `DefMap` and containing module.
+ ///
+ /// If `f` returns `Some(val)`, iteration is stopped and `Some(val)` is returned. If `f` returns
+ /// `None`, iteration continues.
+ pub fn with_ancestor_maps<T>(
+ &self,
+ db: &dyn DefDatabase,
+ local_mod: LocalModuleId,
+ f: &mut dyn FnMut(&DefMap, LocalModuleId) -> Option<T>,
+ ) -> Option<T> {
+ if let Some(it) = f(self, local_mod) {
+ return Some(it);
+ }
+ let mut block = self.block;
+ while let Some(block_info) = block {
+ let parent = block_info.parent.def_map(db);
+ if let Some(it) = f(&parent, block_info.parent.local_id) {
+ return Some(it);
+ }
+ block = parent.block;
+ }
+
+ None
+ }
+
+ /// If this `DefMap` is for a block expression, returns the module containing the block (which
+ /// might again be a block, or a module inside a block).
+ pub fn parent(&self) -> Option<ModuleId> {
+ Some(self.block?.parent)
+ }
+
+ /// Returns the module containing `local_mod`, either the parent `mod`, or the module containing
+ /// the block, if `self` corresponds to a block expression.
+ pub fn containing_module(&self, local_mod: LocalModuleId) -> Option<ModuleId> {
+ match &self[local_mod].parent {
+ Some(parent) => Some(self.module_id(*parent)),
+ None => self.block.as_ref().map(|block| block.parent),
+ }
+ }
+
+ // FIXME: this can use some more human-readable format (ideally, an IR
+ // even), as this should be a great debugging aid.
+ pub fn dump(&self, db: &dyn DefDatabase) -> String {
+ let mut buf = String::new();
+ let mut arc;
+ let mut current_map = self;
+ while let Some(block) = &current_map.block {
+ go(&mut buf, current_map, "block scope", current_map.root);
+ buf.push('\n');
+ arc = block.parent.def_map(db);
+ current_map = &*arc;
+ }
+ go(&mut buf, current_map, "crate", current_map.root);
+ return buf;
+
+ fn go(buf: &mut String, map: &DefMap, path: &str, module: LocalModuleId) {
+ format_to!(buf, "{}\n", path);
+
+ map.modules[module].scope.dump(buf);
+
+ for (name, child) in
+ map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0))
+ {
+ let path = format!("{}::{}", path, name);
+ buf.push('\n');
+ go(buf, map, &path, *child);
+ }
+ }
+ }
+
+ pub fn dump_block_scopes(&self, db: &dyn DefDatabase) -> String {
+ let mut buf = String::new();
+ let mut arc;
+ let mut current_map = self;
+ while let Some(block) = &current_map.block {
+ format_to!(buf, "{:?} in {:?}\n", block.block, block.parent);
+ arc = block.parent.def_map(db);
+ current_map = &*arc;
+ }
+
+ format_to!(buf, "crate scope\n");
+ buf
+ }
+
+ fn shrink_to_fit(&mut self) {
+ // Exhaustive match to require handling new fields.
+ let Self {
+ _c: _,
+ exported_derives,
+ extern_prelude,
+ diagnostics,
+ modules,
+ registered_attrs,
+ registered_tools,
+ fn_proc_macro_mapping,
+ derive_helpers_in_scope,
+ proc_macro_loading_error: _,
+ block: _,
+ edition: _,
+ recursion_limit: _,
+ krate: _,
+ prelude: _,
+ root: _,
+ } = self;
+
+ extern_prelude.shrink_to_fit();
+ exported_derives.shrink_to_fit();
+ diagnostics.shrink_to_fit();
+ modules.shrink_to_fit();
+ registered_attrs.shrink_to_fit();
+ registered_tools.shrink_to_fit();
+ fn_proc_macro_mapping.shrink_to_fit();
+ derive_helpers_in_scope.shrink_to_fit();
+ for (_, module) in modules.iter_mut() {
+ module.children.shrink_to_fit();
+ module.scope.shrink_to_fit();
+ }
+ }
+
+ /// Get a reference to the def map's diagnostics.
+ pub fn diagnostics(&self) -> &[DefDiagnostic] {
+ self.diagnostics.as_slice()
+ }
+
+ pub fn recursion_limit(&self) -> Option<u32> {
+ self.recursion_limit
+ }
+}
+
+impl ModuleData {
+ pub(crate) fn new(origin: ModuleOrigin, visibility: Visibility) -> Self {
+ ModuleData {
+ origin,
+ visibility,
+ parent: None,
+ children: FxHashMap::default(),
+ scope: ItemScope::default(),
+ }
+ }
+
+ /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
+ pub fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
+ self.origin.definition_source(db)
+ }
+
+ /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
+ /// `None` for the crate root or block.
+ pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> {
+ let decl = self.origin.declaration()?;
+ let value = decl.to_node(db.upcast());
+ Some(InFile { file_id: decl.file_id, value })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ModuleSource {
+ SourceFile(ast::SourceFile),
+ Module(ast::Module),
+ BlockExpr(ast::BlockExpr),
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
new file mode 100644
index 000000000..3650204ee
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
@@ -0,0 +1,98 @@
+//! Post-nameres attribute resolution.
+
+use hir_expand::MacroCallId;
+use syntax::{ast, SmolStr};
+
+use crate::{
+ attr::Attr,
+ attr_macro_as_call_id, builtin_attr,
+ db::DefDatabase,
+ item_scope::BuiltinShadowMode,
+ macro_id_to_def_id,
+ nameres::path_resolution::ResolveMode,
+ path::{ModPath, PathKind},
+ AstIdWithPath, LocalModuleId, UnresolvedMacro,
+};
+
+use super::DefMap;
+
+pub enum ResolvedAttr {
+ /// Attribute resolved to an attribute macro.
+ Macro(MacroCallId),
+ /// Attribute resolved to something else that does not require expansion.
+ Other,
+}
+
+impl DefMap {
+ pub(crate) fn resolve_attr_macro(
+ &self,
+ db: &dyn DefDatabase,
+ original_module: LocalModuleId,
+ ast_id: AstIdWithPath<ast::Item>,
+ attr: &Attr,
+ ) -> Result<ResolvedAttr, UnresolvedMacro> {
+ // NB: does not currently work for derive helpers as they aren't recorded in the `DefMap`
+
+ if self.is_builtin_or_registered_attr(&ast_id.path) {
+ return Ok(ResolvedAttr::Other);
+ }
+
+ let resolved_res = self.resolve_path_fp_with_macro(
+ db,
+ ResolveMode::Other,
+ original_module,
+ &ast_id.path,
+ BuiltinShadowMode::Module,
+ );
+ let def = match resolved_res.resolved_def.take_macros() {
+ Some(def) => {
+ if def.is_attribute(db) {
+ def
+ } else {
+ return Ok(ResolvedAttr::Other);
+ }
+ }
+ None => return Err(UnresolvedMacro { path: ast_id.path }),
+ };
+
+ Ok(ResolvedAttr::Macro(attr_macro_as_call_id(
+ db,
+ &ast_id,
+ attr,
+ self.krate,
+ macro_id_to_def_id(db, def),
+ false,
+ )))
+ }
+
+ pub(crate) fn is_builtin_or_registered_attr(&self, path: &ModPath) -> bool {
+ if path.kind != PathKind::Plain {
+ return false;
+ }
+
+ let segments = path.segments();
+
+ if let Some(name) = segments.first() {
+ let name = name.to_smol_str();
+ let pred = |n: &_| *n == name;
+
+ let registered = self.registered_tools.iter().map(SmolStr::as_str);
+ let is_tool = builtin_attr::TOOL_MODULES.iter().copied().chain(registered).any(pred);
+ // FIXME: tool modules can be shadowed by actual modules
+ if is_tool {
+ return true;
+ }
+
+ if segments.len() == 1 {
+ let registered = self.registered_attrs.iter().map(SmolStr::as_str);
+ let is_inert = builtin_attr::INERT_ATTRIBUTES
+ .iter()
+ .map(|it| it.name)
+ .chain(registered)
+ .any(pred);
+ return is_inert;
+ }
+ }
+ false
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
new file mode 100644
index 000000000..8a6bb929c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -0,0 +1,2202 @@
+//! The core of the module-level name resolution algorithm.
+//!
+//! `DefCollector::collect` contains the fixed-point iteration loop which
+//! resolves imports and expands macros.
+
+use std::{iter, mem};
+
+use base_db::{CrateId, Edition, FileId};
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_expand::{
+ ast_id_map::FileAstId,
+ builtin_attr_macro::find_builtin_attr,
+ builtin_derive_macro::find_builtin_derive,
+ builtin_fn_macro::find_builtin_macro,
+ name::{name, AsName, Name},
+ proc_macro::ProcMacroExpander,
+ ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
+ MacroDefKind,
+};
+use itertools::{izip, Itertools};
+use la_arena::Idx;
+use limit::Limit;
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::always;
+use syntax::{ast, SmolStr};
+
+use crate::{
+ attr::{Attr, AttrId, Attrs},
+ attr_macro_as_call_id,
+ db::DefDatabase,
+ derive_macro_as_call_id,
+ item_scope::{ImportType, PerNsGlobImports},
+ item_tree::{
+ self, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode, MacroCall,
+ MacroDef, MacroRules, Mod, ModItem, ModKind, TreeId,
+ },
+ macro_call_as_call_id, macro_id_to_def_id,
+ nameres::{
+ diagnostics::DefDiagnostic,
+ mod_resolution::ModDir,
+ path_resolution::ReachedFixedPoint,
+ proc_macro::{ProcMacroDef, ProcMacroKind},
+ BuiltinShadowMode, DefMap, ModuleData, ModuleOrigin, ResolveMode,
+ },
+ path::{ImportAlias, ModPath, PathKind},
+ per_ns::PerNs,
+ visibility::{RawVisibility, Visibility},
+ AdtId, AstId, AstIdWithPath, ConstLoc, EnumLoc, EnumVariantId, ExternBlockLoc, FunctionId,
+ FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId, Macro2Id, Macro2Loc,
+ MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId,
+ ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro,
+};
+
+static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
+static EXPANSION_DEPTH_LIMIT: Limit = Limit::new(128);
+static FIXED_POINT_LIMIT: Limit = Limit::new(8192);
+
+pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: TreeId) -> DefMap {
+ let crate_graph = db.crate_graph();
+
+ let mut deps = FxHashMap::default();
+ // populate external prelude and dependency list
+ let krate = &crate_graph[def_map.krate];
+ for dep in &krate.dependencies {
+ tracing::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id);
+ let dep_def_map = db.crate_def_map(dep.crate_id);
+ let dep_root = dep_def_map.module_id(dep_def_map.root);
+
+ deps.insert(dep.as_name(), dep_root.into());
+
+ if dep.is_prelude() && !tree_id.is_block() {
+ def_map.extern_prelude.insert(dep.as_name(), dep_root);
+ }
+ }
+
+ let cfg_options = &krate.cfg_options;
+ let proc_macros = match &krate.proc_macro {
+ Ok(proc_macros) => {
+ proc_macros
+ .iter()
+ .enumerate()
+ .map(|(idx, it)| {
+ // FIXME: a hacky way to create a Name from string.
+ let name = tt::Ident { text: it.name.clone(), id: tt::TokenId::unspecified() };
+ (
+ name.as_name(),
+ ProcMacroExpander::new(def_map.krate, base_db::ProcMacroId(idx as u32)),
+ )
+ })
+ .collect()
+ }
+ Err(e) => {
+ def_map.proc_macro_loading_error = Some(e.clone().into_boxed_str());
+ Vec::new()
+ }
+ };
+ let is_proc_macro = krate.is_proc_macro;
+
+ let mut collector = DefCollector {
+ db,
+ def_map,
+ deps,
+ glob_imports: FxHashMap::default(),
+ unresolved_imports: Vec::new(),
+ indeterminate_imports: Vec::new(),
+ unresolved_macros: Vec::new(),
+ mod_dirs: FxHashMap::default(),
+ cfg_options,
+ proc_macros,
+ from_glob_import: Default::default(),
+ skip_attrs: Default::default(),
+ is_proc_macro,
+ };
+ if tree_id.is_block() {
+ collector.seed_with_inner(tree_id);
+ } else {
+ collector.seed_with_top_level();
+ }
+ collector.collect();
+ let mut def_map = collector.finish();
+ def_map.shrink_to_fit();
+ def_map
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+enum PartialResolvedImport {
+ /// None of any namespaces is resolved
+ Unresolved,
+ /// One of namespaces is resolved
+ Indeterminate(PerNs),
+ /// All namespaces are resolved, OR it comes from other crate
+ Resolved(PerNs),
+}
+
+impl PartialResolvedImport {
+ fn namespaces(self) -> PerNs {
+ match self {
+ PartialResolvedImport::Unresolved => PerNs::none(),
+ PartialResolvedImport::Indeterminate(ns) | PartialResolvedImport::Resolved(ns) => ns,
+ }
+ }
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+enum ImportSource {
+ Import { id: ItemTreeId<item_tree::Import>, use_tree: Idx<ast::UseTree> },
+ ExternCrate(ItemTreeId<item_tree::ExternCrate>),
+}
+
+#[derive(Debug, Eq, PartialEq)]
+struct Import {
+ path: ModPath,
+ alias: Option<ImportAlias>,
+ visibility: RawVisibility,
+ kind: ImportKind,
+ is_prelude: bool,
+ is_extern_crate: bool,
+ is_macro_use: bool,
+ source: ImportSource,
+}
+
+impl Import {
+ fn from_use(
+ db: &dyn DefDatabase,
+ krate: CrateId,
+ tree: &ItemTree,
+ id: ItemTreeId<item_tree::Import>,
+ ) -> Vec<Self> {
+ let it = &tree[id.value];
+ let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into());
+ let visibility = &tree[it.visibility];
+ let is_prelude = attrs.by_key("prelude_import").exists();
+
+ let mut res = Vec::new();
+ it.use_tree.expand(|idx, path, kind, alias| {
+ res.push(Self {
+ path,
+ alias,
+ visibility: visibility.clone(),
+ kind,
+ is_prelude,
+ is_extern_crate: false,
+ is_macro_use: false,
+ source: ImportSource::Import { id, use_tree: idx },
+ });
+ });
+ res
+ }
+
+ fn from_extern_crate(
+ db: &dyn DefDatabase,
+ krate: CrateId,
+ tree: &ItemTree,
+ id: ItemTreeId<item_tree::ExternCrate>,
+ ) -> Self {
+ let it = &tree[id.value];
+ let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into());
+ let visibility = &tree[it.visibility];
+ Self {
+ path: ModPath::from_segments(PathKind::Plain, iter::once(it.name.clone())),
+ alias: it.alias.clone(),
+ visibility: visibility.clone(),
+ kind: ImportKind::Plain,
+ is_prelude: false,
+ is_extern_crate: true,
+ is_macro_use: attrs.by_key("macro_use").exists(),
+ source: ImportSource::ExternCrate(id),
+ }
+ }
+}
+
+#[derive(Debug, Eq, PartialEq)]
+struct ImportDirective {
+ module_id: LocalModuleId,
+ import: Import,
+ status: PartialResolvedImport,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+struct MacroDirective {
+ module_id: LocalModuleId,
+ depth: usize,
+ kind: MacroDirectiveKind,
+ container: ItemContainerId,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+enum MacroDirectiveKind {
+ FnLike { ast_id: AstIdWithPath<ast::MacroCall>, expand_to: ExpandTo },
+ Derive { ast_id: AstIdWithPath<ast::Adt>, derive_attr: AttrId, derive_pos: usize },
+ Attr { ast_id: AstIdWithPath<ast::Item>, attr: Attr, mod_item: ModItem, tree: TreeId },
+}
+
+/// Walks the tree of module recursively
+struct DefCollector<'a> {
+ db: &'a dyn DefDatabase,
+ def_map: DefMap,
+ deps: FxHashMap<Name, ModuleId>,
+ glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, Visibility)>>,
+ unresolved_imports: Vec<ImportDirective>,
+ indeterminate_imports: Vec<ImportDirective>,
+ unresolved_macros: Vec<MacroDirective>,
+ mod_dirs: FxHashMap<LocalModuleId, ModDir>,
+ cfg_options: &'a CfgOptions,
+ /// List of procedural macros defined by this crate. This is read from the dynamic library
+ /// built by the build system, and is the list of proc. macros we can actually expand. It is
+ /// empty when proc. macro support is disabled (in which case we still do name resolution for
+ /// them).
+ proc_macros: Vec<(Name, ProcMacroExpander)>,
+ is_proc_macro: bool,
+ from_glob_import: PerNsGlobImports,
+ /// If we fail to resolve an attribute on a `ModItem`, we fall back to ignoring the attribute.
+ /// This map is used to skip all attributes up to and including the one that failed to resolve,
+ /// in order to not expand them twice.
+ ///
+ /// This also stores the attributes to skip when we resolve derive helpers and non-macro
+ /// non-builtin attributes in general.
+ skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
+}
+
+impl DefCollector<'_> {
+ fn seed_with_top_level(&mut self) {
+ let _p = profile::span("seed_with_top_level");
+
+ let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id;
+ let item_tree = self.db.file_item_tree(file_id.into());
+ let module_id = self.def_map.root;
+
+ let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
+ if attrs.cfg().map_or(true, |cfg| self.cfg_options.check(&cfg) != Some(false)) {
+ self.inject_prelude(&attrs);
+
+ // Process other crate-level attributes.
+ for attr in &*attrs {
+ let attr_name = match attr.path.as_ident() {
+ Some(name) => name,
+ None => continue,
+ };
+
+ if *attr_name == hir_expand::name![recursion_limit] {
+ if let Some(limit) = attr.string_value() {
+ if let Ok(limit) = limit.parse() {
+ self.def_map.recursion_limit = Some(limit);
+ }
+ }
+ continue;
+ }
+
+ if *attr_name == hir_expand::name![crate_type] {
+ if let Some("proc-macro") = attr.string_value().map(SmolStr::as_str) {
+ self.is_proc_macro = true;
+ }
+ continue;
+ }
+
+ let attr_is_register_like = *attr_name == hir_expand::name![register_attr]
+ || *attr_name == hir_expand::name![register_tool];
+ if !attr_is_register_like {
+ continue;
+ }
+
+ let registered_name = match attr.single_ident_value() {
+ Some(ident) => ident.as_name(),
+ _ => continue,
+ };
+
+ if *attr_name == hir_expand::name![register_attr] {
+ self.def_map.registered_attrs.push(registered_name.to_smol_str());
+ cov_mark::hit!(register_attr);
+ } else {
+ self.def_map.registered_tools.push(registered_name.to_smol_str());
+ cov_mark::hit!(register_tool);
+ }
+ }
+
+ ModCollector {
+ def_collector: self,
+ macro_depth: 0,
+ module_id,
+ tree_id: TreeId::new(file_id.into(), None),
+ item_tree: &item_tree,
+ mod_dir: ModDir::root(),
+ }
+ .collect_in_top_module(item_tree.top_level_items());
+ }
+ }
+
+ fn seed_with_inner(&mut self, tree_id: TreeId) {
+ let item_tree = tree_id.item_tree(self.db);
+ let module_id = self.def_map.root;
+
+ let is_cfg_enabled = item_tree
+ .top_level_attrs(self.db, self.def_map.krate)
+ .cfg()
+ .map_or(true, |cfg| self.cfg_options.check(&cfg) != Some(false));
+ if is_cfg_enabled {
+ ModCollector {
+ def_collector: self,
+ macro_depth: 0,
+ module_id,
+ tree_id,
+ item_tree: &item_tree,
+ mod_dir: ModDir::root(),
+ }
+ .collect_in_top_module(item_tree.top_level_items());
+ }
+ }
+
+ fn resolution_loop(&mut self) {
+ let _p = profile::span("DefCollector::resolution_loop");
+
+ // main name resolution fixed-point loop.
+ let mut i = 0;
+ 'resolve_attr: loop {
+ 'resolve_macros: loop {
+ self.db.unwind_if_cancelled();
+
+ {
+ let _p = profile::span("resolve_imports loop");
+
+ 'resolve_imports: loop {
+ if self.resolve_imports() == ReachedFixedPoint::Yes {
+ break 'resolve_imports;
+ }
+ }
+ }
+ if self.resolve_macros() == ReachedFixedPoint::Yes {
+ break 'resolve_macros;
+ }
+
+ i += 1;
+ if FIXED_POINT_LIMIT.check(i).is_err() {
+ tracing::error!("name resolution is stuck");
+ break 'resolve_attr;
+ }
+ }
+
+ if self.reseed_with_unresolved_attribute() == ReachedFixedPoint::Yes {
+ break 'resolve_attr;
+ }
+ }
+ }
+
+ fn collect(&mut self) {
+ let _p = profile::span("DefCollector::collect");
+
+ self.resolution_loop();
+
+ // Resolve all indeterminate resolved imports again
+ // As some of the macros will expand newly import shadowing partial resolved imports
+ // FIXME: We maybe could skip this, if we handle the indeterminate imports in `resolve_imports`
+ // correctly
+ let partial_resolved = self.indeterminate_imports.drain(..).map(|directive| {
+ ImportDirective { status: PartialResolvedImport::Unresolved, ..directive }
+ });
+ self.unresolved_imports.extend(partial_resolved);
+ self.resolve_imports();
+
+ let unresolved_imports = mem::take(&mut self.unresolved_imports);
+ // show unresolved imports in completion, etc
+ for directive in &unresolved_imports {
+ self.record_resolved_import(directive);
+ }
+ self.unresolved_imports = unresolved_imports;
+
+ if self.is_proc_macro {
+ // A crate exporting procedural macros is not allowed to export anything else.
+ //
+ // Additionally, while the proc macro entry points must be `pub`, they are not publicly
+ // exported in type/value namespace. This function reduces the visibility of all items
+ // in the crate root that aren't proc macros.
+ let root = self.def_map.root;
+ let module_id = self.def_map.module_id(root);
+ let root = &mut self.def_map.modules[root];
+ root.scope.censor_non_proc_macros(module_id);
+ }
+ }
+
+ /// When the fixed-point loop reaches a stable state, we might still have
+ /// some unresolved attributes left over. This takes one of them, and feeds
+ /// the item it's applied to back into name resolution.
+ ///
+ /// This effectively ignores the fact that the macro is there and just treats the items as
+ /// normal code.
+ ///
+ /// This improves UX for unresolved attributes, and replicates the
+ /// behavior before we supported proc. attribute macros.
+ fn reseed_with_unresolved_attribute(&mut self) -> ReachedFixedPoint {
+ cov_mark::hit!(unresolved_attribute_fallback);
+
+ let unresolved_attr =
+ self.unresolved_macros.iter().enumerate().find_map(|(idx, directive)| match &directive
+ .kind
+ {
+ MacroDirectiveKind::Attr { ast_id, mod_item, attr, tree } => {
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
+ directive.module_id,
+ MacroCallKind::Attr {
+ ast_id: ast_id.ast_id,
+ attr_args: Default::default(),
+ invoc_attr_index: attr.id.ast_index,
+ is_derive: false,
+ },
+ attr.path().clone(),
+ ));
+
+ self.skip_attrs.insert(ast_id.ast_id.with_value(*mod_item), attr.id);
+
+ Some((idx, directive, *mod_item, *tree))
+ }
+ _ => None,
+ });
+
+ match unresolved_attr {
+ Some((pos, &MacroDirective { module_id, depth, container, .. }, mod_item, tree_id)) => {
+ let item_tree = &tree_id.item_tree(self.db);
+ let mod_dir = self.mod_dirs[&module_id].clone();
+ ModCollector {
+ def_collector: self,
+ macro_depth: depth,
+ module_id,
+ tree_id,
+ item_tree,
+ mod_dir,
+ }
+ .collect(&[mod_item], container);
+
+ self.unresolved_macros.swap_remove(pos);
+ // Continue name resolution with the new data.
+ ReachedFixedPoint::No
+ }
+ None => ReachedFixedPoint::Yes,
+ }
+ }
+
+ fn inject_prelude(&mut self, crate_attrs: &Attrs) {
+ // See compiler/rustc_builtin_macros/src/standard_library_imports.rs
+
+ if crate_attrs.by_key("no_core").exists() {
+ // libcore does not get a prelude.
+ return;
+ }
+
+ let krate = if crate_attrs.by_key("no_std").exists() {
+ name![core]
+ } else {
+ let std = name![std];
+ if self.def_map.extern_prelude().any(|(name, _)| *name == std) {
+ std
+ } else {
+ // If `std` does not exist for some reason, fall back to core. This mostly helps
+ // keep r-a's own tests minimal.
+ name![core]
+ }
+ };
+
+ let edition = match self.def_map.edition {
+ Edition::Edition2015 => name![rust_2015],
+ Edition::Edition2018 => name![rust_2018],
+ Edition::Edition2021 => name![rust_2021],
+ };
+
+ let path_kind = if self.def_map.edition == Edition::Edition2015 {
+ PathKind::Plain
+ } else {
+ PathKind::Abs
+ };
+ let path =
+ ModPath::from_segments(path_kind, [krate.clone(), name![prelude], edition].into_iter());
+ // Fall back to the older `std::prelude::v1` for compatibility with Rust <1.52.0
+ // FIXME remove this fallback
+ let fallback_path =
+ ModPath::from_segments(path_kind, [krate, name![prelude], name![v1]].into_iter());
+
+ for path in &[path, fallback_path] {
+ let (per_ns, _) = self.def_map.resolve_path(
+ self.db,
+ self.def_map.root,
+ path,
+ BuiltinShadowMode::Other,
+ );
+
+ match per_ns.types {
+ Some((ModuleDefId::ModuleId(m), _)) => {
+ self.def_map.prelude = Some(m);
+ return;
+ }
+ types => {
+ tracing::debug!(
+ "could not resolve prelude path `{}` to module (resolved to {:?})",
+ path,
+ types
+ );
+ }
+ }
+ }
+ }
+
+ /// Adds a definition of procedural macro `name` to the root module.
+ ///
+ /// # Notes on procedural macro resolution
+ ///
+ /// Procedural macro functionality is provided by the build system: It has to build the proc
+ /// macro and pass the resulting dynamic library to rust-analyzer.
+ ///
+ /// When procedural macro support is enabled, the list of proc macros exported by a crate is
+ /// known before we resolve names in the crate. This list is stored in `self.proc_macros` and is
+ /// derived from the dynamic library.
+ ///
+ /// However, we *also* would like to be able to at least *resolve* macros on our own, without
+ /// help by the build system. So, when the macro isn't found in `self.proc_macros`, we instead
+ /// use a dummy expander that always errors. This comes with the drawback of macros potentially
+ /// going out of sync with what the build system sees (since we resolve using VFS state, but
+ /// Cargo builds only on-disk files). We could and probably should add diagnostics for that.
+ fn export_proc_macro(
+ &mut self,
+ def: ProcMacroDef,
+ id: ItemTreeId<item_tree::Function>,
+ fn_id: FunctionId,
+ module_id: ModuleId,
+ ) {
+ let kind = def.kind.to_basedb_kind();
+ let (expander, kind) = match self.proc_macros.iter().find(|(n, _)| n == &def.name) {
+ Some(&(_, expander)) => (expander, kind),
+ None => (ProcMacroExpander::dummy(self.def_map.krate), kind),
+ };
+
+ let proc_macro_id =
+ ProcMacroLoc { container: module_id, id, expander, kind }.intern(self.db);
+ self.define_proc_macro(def.name.clone(), proc_macro_id);
+ if let ProcMacroKind::CustomDerive { helpers } = def.kind {
+ self.def_map
+ .exported_derives
+ .insert(macro_id_to_def_id(self.db, proc_macro_id.into()), helpers);
+ }
+ self.def_map.fn_proc_macro_mapping.insert(fn_id, proc_macro_id);
+ }
+
+ /// Define a macro with `macro_rules`.
+ ///
+ /// It will define the macro in legacy textual scope, and if it has `#[macro_export]`,
+ /// then it is also defined in the root module scope.
+ /// You can `use` or invoke it by `crate::macro_name` anywhere, before or after the definition.
+ ///
+ /// It is surprising that the macro will never be in the current module scope.
+ /// These code fails with "unresolved import/macro",
+ /// ```rust,compile_fail
+ /// mod m { macro_rules! foo { () => {} } }
+ /// use m::foo as bar;
+ /// ```
+ ///
+ /// ```rust,compile_fail
+ /// macro_rules! foo { () => {} }
+ /// self::foo!();
+ /// crate::foo!();
+ /// ```
+ ///
+ /// Well, this code compiles, because the plain path `foo` in `use` is searched
+ /// in the legacy textual scope only.
+ /// ```rust
+ /// macro_rules! foo { () => {} }
+ /// use foo as bar;
+ /// ```
+ fn define_macro_rules(
+ &mut self,
+ module_id: LocalModuleId,
+ name: Name,
+ macro_: MacroRulesId,
+ export: bool,
+ ) {
+ // Textual scoping
+ self.define_legacy_macro(module_id, name.clone(), macro_.into());
+
+ // Module scoping
+ // In Rust, `#[macro_export]` macros are unconditionally visible at the
+ // crate root, even if the parent modules is **not** visible.
+ if export {
+ let module_id = self.def_map.root;
+ self.def_map.modules[module_id].scope.declare(macro_.into());
+ self.update(
+ module_id,
+ &[(Some(name), PerNs::macros(macro_.into(), Visibility::Public))],
+ Visibility::Public,
+ ImportType::Named,
+ );
+ }
+ }
+
+ /// Define a legacy textual scoped macro in module
+ ///
+ /// We use a map `legacy_macros` to store all legacy textual scoped macros visible per module.
+ /// It will clone all macros from parent legacy scope, whose definition is prior to
+ /// the definition of current module.
+ /// And also, `macro_use` on a module will import all legacy macros visible inside to
+ /// current legacy scope, with possible shadowing.
+ fn define_legacy_macro(&mut self, module_id: LocalModuleId, name: Name, mac: MacroId) {
+ // Always shadowing
+ self.def_map.modules[module_id].scope.define_legacy_macro(name, mac);
+ }
+
+ /// Define a macro 2.0 macro
+ ///
+ /// The scoped of macro 2.0 macro is equal to normal function
+ fn define_macro_def(
+ &mut self,
+ module_id: LocalModuleId,
+ name: Name,
+ macro_: Macro2Id,
+ vis: &RawVisibility,
+ ) {
+ let vis =
+ self.def_map.resolve_visibility(self.db, module_id, vis).unwrap_or(Visibility::Public);
+ self.def_map.modules[module_id].scope.declare(macro_.into());
+ self.update(
+ module_id,
+ &[(Some(name), PerNs::macros(macro_.into(), Visibility::Public))],
+ vis,
+ ImportType::Named,
+ );
+ }
+
+ /// Define a proc macro
+ ///
+ /// A proc macro is similar to normal macro scope, but it would not visible in legacy textual scoped.
+ /// And unconditionally exported.
+ fn define_proc_macro(&mut self, name: Name, macro_: ProcMacroId) {
+ let module_id = self.def_map.root;
+ self.def_map.modules[module_id].scope.declare(macro_.into());
+ self.update(
+ module_id,
+ &[(Some(name), PerNs::macros(macro_.into(), Visibility::Public))],
+ Visibility::Public,
+ ImportType::Named,
+ );
+ }
+
+ /// Import macros from `#[macro_use] extern crate`.
+ fn import_macros_from_extern_crate(
+ &mut self,
+ current_module_id: LocalModuleId,
+ extern_crate: &item_tree::ExternCrate,
+ ) {
+ tracing::debug!(
+ "importing macros from extern crate: {:?} ({:?})",
+ extern_crate,
+ self.def_map.edition,
+ );
+
+ if let Some(m) = self.resolve_extern_crate(&extern_crate.name) {
+ if m == self.def_map.module_id(current_module_id) {
+ cov_mark::hit!(ignore_macro_use_extern_crate_self);
+ return;
+ }
+
+ cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
+ self.import_all_macros_exported(current_module_id, m.krate);
+ }
+ }
+
+ /// Import all exported macros from another crate
+ ///
+ /// Exported macros are just all macros in the root module scope.
+ /// Note that it contains not only all `#[macro_export]` macros, but also all aliases
+ /// created by `use` in the root module, ignoring the visibility of `use`.
+ fn import_all_macros_exported(&mut self, current_module_id: LocalModuleId, krate: CrateId) {
+ let def_map = self.db.crate_def_map(krate);
+ for (name, def) in def_map[def_map.root].scope.macros() {
+ // `#[macro_use]` brings macros into legacy scope. Yes, even non-`macro_rules!` macros.
+ self.define_legacy_macro(current_module_id, name.clone(), def);
+ }
+ }
+
+ /// Tries to resolve every currently unresolved import.
+ fn resolve_imports(&mut self) -> ReachedFixedPoint {
+ let mut res = ReachedFixedPoint::Yes;
+ let imports = mem::take(&mut self.unresolved_imports);
+
+ self.unresolved_imports = imports
+ .into_iter()
+ .filter_map(|mut directive| {
+ directive.status = self.resolve_import(directive.module_id, &directive.import);
+ match directive.status {
+ PartialResolvedImport::Indeterminate(_) => {
+ self.record_resolved_import(&directive);
+ self.indeterminate_imports.push(directive);
+ res = ReachedFixedPoint::No;
+ None
+ }
+ PartialResolvedImport::Resolved(_) => {
+ self.record_resolved_import(&directive);
+ res = ReachedFixedPoint::No;
+ None
+ }
+ PartialResolvedImport::Unresolved => Some(directive),
+ }
+ })
+ .collect();
+ res
+ }
+
+ fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
+ let _p = profile::span("resolve_import").detail(|| format!("{}", import.path));
+ tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.edition);
+ if import.is_extern_crate {
+ let name = import
+ .path
+ .as_ident()
+ .expect("extern crate should have been desugared to one-element path");
+
+ let res = self.resolve_extern_crate(name);
+
+ match res {
+ Some(res) => {
+ PartialResolvedImport::Resolved(PerNs::types(res.into(), Visibility::Public))
+ }
+ None => PartialResolvedImport::Unresolved,
+ }
+ } else {
+ let res = self.def_map.resolve_path_fp_with_macro(
+ self.db,
+ ResolveMode::Import,
+ module_id,
+ &import.path,
+ BuiltinShadowMode::Module,
+ );
+
+ let def = res.resolved_def;
+ if res.reached_fixedpoint == ReachedFixedPoint::No || def.is_none() {
+ return PartialResolvedImport::Unresolved;
+ }
+
+ if let Some(krate) = res.krate {
+ if krate != self.def_map.krate {
+ return PartialResolvedImport::Resolved(
+ def.filter_visibility(|v| matches!(v, Visibility::Public)),
+ );
+ }
+ }
+
+ // Check whether all namespace is resolved
+ if def.take_types().is_some()
+ && def.take_values().is_some()
+ && def.take_macros().is_some()
+ {
+ PartialResolvedImport::Resolved(def)
+ } else {
+ PartialResolvedImport::Indeterminate(def)
+ }
+ }
+ }
+
+ fn resolve_extern_crate(&self, name: &Name) -> Option<ModuleId> {
+ if *name == name!(self) {
+ cov_mark::hit!(extern_crate_self_as);
+ let root = match self.def_map.block {
+ Some(_) => {
+ let def_map = self.def_map.crate_root(self.db).def_map(self.db);
+ def_map.module_id(def_map.root())
+ }
+ None => self.def_map.module_id(self.def_map.root()),
+ };
+ Some(root)
+ } else {
+ self.deps.get(name).copied()
+ }
+ }
+
+ fn record_resolved_import(&mut self, directive: &ImportDirective) {
+ let _p = profile::span("record_resolved_import");
+
+ let module_id = directive.module_id;
+ let import = &directive.import;
+ let mut def = directive.status.namespaces();
+ let vis = self
+ .def_map
+ .resolve_visibility(self.db, module_id, &directive.import.visibility)
+ .unwrap_or(Visibility::Public);
+
+ match import.kind {
+ ImportKind::Plain | ImportKind::TypeOnly => {
+ let name = match &import.alias {
+ Some(ImportAlias::Alias(name)) => Some(name),
+ Some(ImportAlias::Underscore) => None,
+ None => match import.path.segments().last() {
+ Some(last_segment) => Some(last_segment),
+ None => {
+ cov_mark::hit!(bogus_paths);
+ return;
+ }
+ },
+ };
+
+ if import.kind == ImportKind::TypeOnly {
+ def.values = None;
+ def.macros = None;
+ }
+
+ tracing::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
+
+ // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
+ if import.is_extern_crate && module_id == self.def_map.root {
+ if let (Some(ModuleDefId::ModuleId(def)), Some(name)) = (def.take_types(), name)
+ {
+ self.def_map.extern_prelude.insert(name.clone(), def);
+ }
+ }
+
+ self.update(module_id, &[(name.cloned(), def)], vis, ImportType::Named);
+ }
+ ImportKind::Glob => {
+ tracing::debug!("glob import: {:?}", import);
+ match def.take_types() {
+ Some(ModuleDefId::ModuleId(m)) => {
+ if import.is_prelude {
+ // Note: This dodgily overrides the injected prelude. The rustc
+ // implementation seems to work the same though.
+ cov_mark::hit!(std_prelude);
+ self.def_map.prelude = Some(m);
+ } else if m.krate != self.def_map.krate {
+ cov_mark::hit!(glob_across_crates);
+ // glob import from other crate => we can just import everything once
+ let item_map = m.def_map(self.db);
+ let scope = &item_map[m.local_id].scope;
+
+ // Module scoped macros is included
+ let items = scope
+ .resolutions()
+ // only keep visible names...
+ .map(|(n, res)| {
+ (n, res.filter_visibility(|v| v.is_visible_from_other_crate()))
+ })
+ .filter(|(_, res)| !res.is_none())
+ .collect::<Vec<_>>();
+
+ self.update(module_id, &items, vis, ImportType::Glob);
+ } else {
+ // glob import from same crate => we do an initial
+ // import, and then need to propagate any further
+ // additions
+ let def_map;
+ let scope = if m.block == self.def_map.block_id() {
+ &self.def_map[m.local_id].scope
+ } else {
+ def_map = m.def_map(self.db);
+ &def_map[m.local_id].scope
+ };
+
+ // Module scoped macros is included
+ let items = scope
+ .resolutions()
+ // only keep visible names...
+ .map(|(n, res)| {
+ (
+ n,
+ res.filter_visibility(|v| {
+ v.is_visible_from_def_map(
+ self.db,
+ &self.def_map,
+ module_id,
+ )
+ }),
+ )
+ })
+ .filter(|(_, res)| !res.is_none())
+ .collect::<Vec<_>>();
+
+ self.update(module_id, &items, vis, ImportType::Glob);
+ // record the glob import in case we add further items
+ let glob = self.glob_imports.entry(m.local_id).or_default();
+ if !glob.iter().any(|(mid, _)| *mid == module_id) {
+ glob.push((module_id, vis));
+ }
+ }
+ }
+ Some(ModuleDefId::AdtId(AdtId::EnumId(e))) => {
+ cov_mark::hit!(glob_enum);
+ // glob import from enum => just import all the variants
+
+ // XXX: urgh, so this works by accident! Here, we look at
+ // the enum data, and, in theory, this might require us to
+ // look back at the crate_def_map, creating a cycle. For
+ // example, `enum E { crate::some_macro!(); }`. Luckily, the
+ // only kind of macro that is allowed inside enum is a
+ // `cfg_macro`, and we don't need to run name resolution for
+ // it, but this is sheer luck!
+ let enum_data = self.db.enum_data(e);
+ let resolutions = enum_data
+ .variants
+ .iter()
+ .map(|(local_id, variant_data)| {
+ let name = variant_data.name.clone();
+ let variant = EnumVariantId { parent: e, local_id };
+ let res = PerNs::both(variant.into(), variant.into(), vis);
+ (Some(name), res)
+ })
+ .collect::<Vec<_>>();
+ self.update(module_id, &resolutions, vis, ImportType::Glob);
+ }
+ Some(d) => {
+ tracing::debug!("glob import {:?} from non-module/enum {:?}", import, d);
+ }
+ None => {
+ tracing::debug!("glob import {:?} didn't resolve as type", import);
+ }
+ }
+ }
+ }
+ }
+
+ fn update(
+ &mut self,
+ module_id: LocalModuleId,
+ resolutions: &[(Option<Name>, PerNs)],
+ vis: Visibility,
+ import_type: ImportType,
+ ) {
+ self.db.unwind_if_cancelled();
+ self.update_recursive(module_id, resolutions, vis, import_type, 0)
+ }
+
+ fn update_recursive(
+ &mut self,
+ module_id: LocalModuleId,
+ resolutions: &[(Option<Name>, PerNs)],
+ // All resolutions are imported with this visibility; the visibilities in
+ // the `PerNs` values are ignored and overwritten
+ vis: Visibility,
+ import_type: ImportType,
+ depth: usize,
+ ) {
+ if GLOB_RECURSION_LIMIT.check(depth).is_err() {
+ // prevent stack overflows (but this shouldn't be possible)
+ panic!("infinite recursion in glob imports!");
+ }
+ let mut changed = false;
+
+ for (name, res) in resolutions {
+ match name {
+ Some(name) => {
+ let scope = &mut self.def_map.modules[module_id].scope;
+ changed |= scope.push_res_with_import(
+ &mut self.from_glob_import,
+ (module_id, name.clone()),
+ res.with_visibility(vis),
+ import_type,
+ );
+ }
+ None => {
+ let tr = match res.take_types() {
+ Some(ModuleDefId::TraitId(tr)) => tr,
+ Some(other) => {
+ tracing::debug!("non-trait `_` import of {:?}", other);
+ continue;
+ }
+ None => continue,
+ };
+ let old_vis = self.def_map.modules[module_id].scope.unnamed_trait_vis(tr);
+ let should_update = match old_vis {
+ None => true,
+ Some(old_vis) => {
+ let max_vis = old_vis.max(vis, &self.def_map).unwrap_or_else(|| {
+ panic!("`Tr as _` imports with unrelated visibilities {:?} and {:?} (trait {:?})", old_vis, vis, tr);
+ });
+
+ if max_vis == old_vis {
+ false
+ } else {
+ cov_mark::hit!(upgrade_underscore_visibility);
+ true
+ }
+ }
+ };
+
+ if should_update {
+ changed = true;
+ self.def_map.modules[module_id].scope.push_unnamed_trait(tr, vis);
+ }
+ }
+ }
+ }
+
+ if !changed {
+ return;
+ }
+ let glob_imports = self
+ .glob_imports
+ .get(&module_id)
+ .into_iter()
+ .flatten()
+ .filter(|(glob_importing_module, _)| {
+ // we know all resolutions have the same visibility (`vis`), so we
+ // just need to check that once
+ vis.is_visible_from_def_map(self.db, &self.def_map, *glob_importing_module)
+ })
+ .cloned()
+ .collect::<Vec<_>>();
+
+ for (glob_importing_module, glob_import_vis) in glob_imports {
+ self.update_recursive(
+ glob_importing_module,
+ resolutions,
+ glob_import_vis,
+ ImportType::Glob,
+ depth + 1,
+ );
+ }
+ }
+
+ fn resolve_macros(&mut self) -> ReachedFixedPoint {
+ let mut macros = mem::take(&mut self.unresolved_macros);
+ let mut resolved = Vec::new();
+ let mut push_resolved = |directive: &MacroDirective, call_id| {
+ resolved.push((directive.module_id, directive.depth, directive.container, call_id));
+ };
+ let mut res = ReachedFixedPoint::Yes;
+ macros.retain(|directive| {
+ let resolver = |path| {
+ let resolved_res = self.def_map.resolve_path_fp_with_macro(
+ self.db,
+ ResolveMode::Other,
+ directive.module_id,
+ &path,
+ BuiltinShadowMode::Module,
+ );
+ resolved_res
+ .resolved_def
+ .take_macros()
+ .map(|it| (it, macro_id_to_def_id(self.db, it)))
+ };
+ let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
+
+ match &directive.kind {
+ MacroDirectiveKind::FnLike { ast_id, expand_to } => {
+ let call_id = macro_call_as_call_id(
+ self.db,
+ ast_id,
+ *expand_to,
+ self.def_map.krate,
+ &resolver_def_id,
+ &mut |_err| (),
+ );
+ if let Ok(Ok(call_id)) = call_id {
+ push_resolved(directive, call_id);
+ res = ReachedFixedPoint::No;
+ return false;
+ }
+ }
+ MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
+ let id = derive_macro_as_call_id(
+ self.db,
+ ast_id,
+ *derive_attr,
+ *derive_pos as u32,
+ self.def_map.krate,
+ &resolver,
+ );
+
+ if let Ok((macro_id, def_id, call_id)) = id {
+ self.def_map.modules[directive.module_id].scope.set_derive_macro_invoc(
+ ast_id.ast_id,
+ call_id,
+ *derive_attr,
+ *derive_pos,
+ );
+ // Record its helper attributes.
+ if def_id.krate != self.def_map.krate {
+ let def_map = self.db.crate_def_map(def_id.krate);
+ if let Some(helpers) = def_map.exported_derives.get(&def_id) {
+ self.def_map
+ .derive_helpers_in_scope
+ .entry(ast_id.ast_id.map(|it| it.upcast()))
+ .or_default()
+ .extend(izip!(
+ helpers.iter().cloned(),
+ iter::repeat(macro_id),
+ iter::repeat(call_id),
+ ));
+ }
+ }
+
+ push_resolved(directive, call_id);
+ res = ReachedFixedPoint::No;
+ return false;
+ }
+ }
+ MacroDirectiveKind::Attr { ast_id: file_ast_id, mod_item, attr, tree } => {
+ let &AstIdWithPath { ast_id, ref path } = file_ast_id;
+ let file_id = ast_id.file_id;
+
+ let mut recollect_without = |collector: &mut Self| {
+ // Remove the original directive since we resolved it.
+ let mod_dir = collector.mod_dirs[&directive.module_id].clone();
+ collector.skip_attrs.insert(InFile::new(file_id, *mod_item), attr.id);
+
+ let item_tree = tree.item_tree(self.db);
+ ModCollector {
+ def_collector: collector,
+ macro_depth: directive.depth,
+ module_id: directive.module_id,
+ tree_id: *tree,
+ item_tree: &item_tree,
+ mod_dir,
+ }
+ .collect(&[*mod_item], directive.container);
+ res = ReachedFixedPoint::No;
+ false
+ };
+
+ if let Some(ident) = path.as_ident() {
+ if let Some(helpers) = self.def_map.derive_helpers_in_scope.get(&ast_id) {
+ if helpers.iter().any(|(it, ..)| it == ident) {
+ cov_mark::hit!(resolved_derive_helper);
+ // Resolved to derive helper. Collect the item's attributes again,
+ // starting after the derive helper.
+ return recollect_without(self);
+ }
+ }
+ }
+
+ let def = match resolver_def_id(path.clone()) {
+ Some(def) if def.is_attribute() => def,
+ _ => return true,
+ };
+ if matches!(
+ def,
+ MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. }
+ if expander.is_derive()
+ ) {
+ // Resolved to `#[derive]`
+
+ let item_tree = tree.item_tree(self.db);
+ let ast_adt_id: FileAstId<ast::Adt> = match *mod_item {
+ ModItem::Struct(strukt) => item_tree[strukt].ast_id().upcast(),
+ ModItem::Union(union) => item_tree[union].ast_id().upcast(),
+ ModItem::Enum(enum_) => item_tree[enum_].ast_id().upcast(),
+ _ => {
+ let diag = DefDiagnostic::invalid_derive_target(
+ directive.module_id,
+ ast_id,
+ attr.id,
+ );
+ self.def_map.diagnostics.push(diag);
+ return recollect_without(self);
+ }
+ };
+ let ast_id = ast_id.with_value(ast_adt_id);
+
+ match attr.parse_path_comma_token_tree() {
+ Some(derive_macros) => {
+ let mut len = 0;
+ for (idx, path) in derive_macros.enumerate() {
+ let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
+ self.unresolved_macros.push(MacroDirective {
+ module_id: directive.module_id,
+ depth: directive.depth + 1,
+ kind: MacroDirectiveKind::Derive {
+ ast_id,
+ derive_attr: attr.id,
+ derive_pos: idx,
+ },
+ container: directive.container,
+ });
+ len = idx;
+ }
+
+ // We treat the #[derive] macro as an attribute call, but we do not resolve it for nameres collection.
+ // This is just a trick to be able to resolve the input to derives as proper paths.
+ // Check the comment in [`builtin_attr_macro`].
+ let call_id = attr_macro_as_call_id(
+ self.db,
+ file_ast_id,
+ attr,
+ self.def_map.krate,
+ def,
+ true,
+ );
+ self.def_map.modules[directive.module_id]
+ .scope
+ .init_derive_attribute(ast_id, attr.id, call_id, len + 1);
+ }
+ None => {
+ let diag = DefDiagnostic::malformed_derive(
+ directive.module_id,
+ ast_id,
+ attr.id,
+ );
+ self.def_map.diagnostics.push(diag);
+ }
+ }
+
+ return recollect_without(self);
+ }
+
+ // Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute.
+ let call_id = attr_macro_as_call_id(
+ self.db,
+ file_ast_id,
+ attr,
+ self.def_map.krate,
+ def,
+ false,
+ );
+ let loc: MacroCallLoc = self.db.lookup_intern_macro_call(call_id);
+
+ // If proc attribute macro expansion is disabled, skip expanding it here
+ if !self.db.enable_proc_attr_macros() {
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
+ directive.module_id,
+ loc.kind,
+ loc.def.krate,
+ ));
+ return recollect_without(self);
+ }
+
+ // Skip #[test]/#[bench] expansion, which would merely result in more memory usage
+ // due to duplicating functions into macro expansions
+ if matches!(
+ loc.def.kind,
+ MacroDefKind::BuiltInAttr(expander, _)
+ if expander.is_test() || expander.is_bench()
+ ) {
+ return recollect_without(self);
+ }
+
+ if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind {
+ if exp.is_dummy() {
+ // If there's no expander for the proc macro (e.g.
+ // because proc macros are disabled, or building the
+ // proc macro crate failed), report this and skip
+ // expansion like we would if it was disabled
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
+ directive.module_id,
+ loc.kind,
+ loc.def.krate,
+ ));
+
+ return recollect_without(self);
+ }
+ }
+
+ self.def_map.modules[directive.module_id]
+ .scope
+ .add_attr_macro_invoc(ast_id, call_id);
+
+ push_resolved(directive, call_id);
+ res = ReachedFixedPoint::No;
+ return false;
+ }
+ }
+
+ true
+ });
+ // Attribute resolution can add unresolved macro invocations, so concatenate the lists.
+ macros.extend(mem::take(&mut self.unresolved_macros));
+ self.unresolved_macros = macros;
+
+ for (module_id, depth, container, macro_call_id) in resolved {
+ self.collect_macro_expansion(module_id, macro_call_id, depth, container);
+ }
+
+ res
+ }
+
+ fn collect_macro_expansion(
+ &mut self,
+ module_id: LocalModuleId,
+ macro_call_id: MacroCallId,
+ depth: usize,
+ container: ItemContainerId,
+ ) {
+ if EXPANSION_DEPTH_LIMIT.check(depth).is_err() {
+ cov_mark::hit!(macro_expansion_overflow);
+ tracing::warn!("macro expansion is too deep");
+ return;
+ }
+ let file_id = macro_call_id.as_file();
+
+ // First, fetch the raw expansion result for purposes of error reporting. This goes through
+ // `macro_expand_error` to avoid depending on the full expansion result (to improve
+ // incrementality).
+ let loc: MacroCallLoc = self.db.lookup_intern_macro_call(macro_call_id);
+ let err = self.db.macro_expand_error(macro_call_id);
+ if let Some(err) = err {
+ let diag = match err {
+ hir_expand::ExpandError::UnresolvedProcMacro(krate) => {
+ always!(krate == loc.def.krate);
+ // Missing proc macros are non-fatal, so they are handled specially.
+ DefDiagnostic::unresolved_proc_macro(module_id, loc.kind.clone(), loc.def.krate)
+ }
+ _ => DefDiagnostic::macro_error(module_id, loc.kind.clone(), err.to_string()),
+ };
+
+ self.def_map.diagnostics.push(diag);
+ }
+
+ // Then, fetch and process the item tree. This will reuse the expansion result from above.
+ let item_tree = self.db.file_item_tree(file_id);
+ let mod_dir = self.mod_dirs[&module_id].clone();
+ ModCollector {
+ def_collector: &mut *self,
+ macro_depth: depth,
+ tree_id: TreeId::new(file_id, None),
+ module_id,
+ item_tree: &item_tree,
+ mod_dir,
+ }
+ .collect(item_tree.top_level_items(), container);
+ }
+
+ fn finish(mut self) -> DefMap {
+ // Emit diagnostics for all remaining unexpanded macros.
+
+ let _p = profile::span("DefCollector::finish");
+
+ for directive in &self.unresolved_macros {
+ match &directive.kind {
+ MacroDirectiveKind::FnLike { ast_id, expand_to } => {
+ let macro_call_as_call_id = macro_call_as_call_id(
+ self.db,
+ ast_id,
+ *expand_to,
+ self.def_map.krate,
+ |path| {
+ let resolved_res = self.def_map.resolve_path_fp_with_macro(
+ self.db,
+ ResolveMode::Other,
+ directive.module_id,
+ &path,
+ BuiltinShadowMode::Module,
+ );
+ resolved_res
+ .resolved_def
+ .take_macros()
+ .map(|it| macro_id_to_def_id(self.db, it))
+ },
+ &mut |_| (),
+ );
+ if let Err(UnresolvedMacro { path }) = macro_call_as_call_id {
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
+ directive.module_id,
+ MacroCallKind::FnLike { ast_id: ast_id.ast_id, expand_to: *expand_to },
+ path,
+ ));
+ }
+ }
+ MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
+ directive.module_id,
+ MacroCallKind::Derive {
+ ast_id: ast_id.ast_id,
+ derive_attr_index: derive_attr.ast_index,
+ derive_index: *derive_pos as u32,
+ },
+ ast_id.path.clone(),
+ ));
+ }
+ // These are diagnosed by `reseed_with_unresolved_attribute`, as that function consumes them
+ MacroDirectiveKind::Attr { .. } => {}
+ }
+ }
+
+ // Emit diagnostics for all remaining unresolved imports.
+
+ // We'd like to avoid emitting a diagnostics avalanche when some `extern crate` doesn't
+ // resolve. We first emit diagnostics for unresolved extern crates and collect the missing
+ // crate names. Then we emit diagnostics for unresolved imports, but only if the import
+ // doesn't start with an unresolved crate's name. Due to renaming and reexports, this is a
+ // heuristic, but it works in practice.
+ let mut diagnosed_extern_crates = FxHashSet::default();
+ for directive in &self.unresolved_imports {
+ if let ImportSource::ExternCrate(krate) = directive.import.source {
+ let item_tree = krate.item_tree(self.db);
+ let extern_crate = &item_tree[krate.value];
+
+ diagnosed_extern_crates.insert(extern_crate.name.clone());
+
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_extern_crate(
+ directive.module_id,
+ InFile::new(krate.file_id(), extern_crate.ast_id),
+ ));
+ }
+ }
+
+ for directive in &self.unresolved_imports {
+ if let ImportSource::Import { id: import, use_tree } = directive.import.source {
+ if matches!(
+ (directive.import.path.segments().first(), &directive.import.path.kind),
+ (Some(krate), PathKind::Plain | PathKind::Abs) if diagnosed_extern_crates.contains(krate)
+ ) {
+ continue;
+ }
+
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_import(
+ directive.module_id,
+ import,
+ use_tree,
+ ));
+ }
+ }
+
+ self.def_map
+ }
+}
+
+/// Walks a single module, populating defs, imports and macros
+struct ModCollector<'a, 'b> {
+ def_collector: &'a mut DefCollector<'b>,
+ macro_depth: usize,
+ module_id: LocalModuleId,
+ tree_id: TreeId,
+ item_tree: &'a ItemTree,
+ mod_dir: ModDir,
+}
+
+impl ModCollector<'_, '_> {
+ fn collect_in_top_module(&mut self, items: &[ModItem]) {
+ let module = self.def_collector.def_map.module_id(self.module_id);
+ self.collect(items, module.into())
+ }
+
+ fn collect(&mut self, items: &[ModItem], container: ItemContainerId) {
+ let krate = self.def_collector.def_map.krate;
+
+ // Note: don't assert that inserted value is fresh: it's simply not true
+ // for macros.
+ self.def_collector.mod_dirs.insert(self.module_id, self.mod_dir.clone());
+
+ // Prelude module is always considered to be `#[macro_use]`.
+ if let Some(prelude_module) = self.def_collector.def_map.prelude {
+ if prelude_module.krate != krate {
+ cov_mark::hit!(prelude_is_macro_use);
+ self.def_collector.import_all_macros_exported(self.module_id, prelude_module.krate);
+ }
+ }
+
+ // This should be processed eagerly instead of deferred to resolving.
+ // `#[macro_use] extern crate` is hoisted to imports macros before collecting
+ // any other items.
+ for &item in items {
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, item.into());
+ if attrs.cfg().map_or(true, |cfg| self.is_cfg_enabled(&cfg)) {
+ if let ModItem::ExternCrate(id) = item {
+ let import = &self.item_tree[id];
+ let attrs = self.item_tree.attrs(
+ self.def_collector.db,
+ krate,
+ ModItem::from(id).into(),
+ );
+ if attrs.by_key("macro_use").exists() {
+ self.def_collector.import_macros_from_extern_crate(self.module_id, import);
+ }
+ }
+ }
+ }
+
+ for &item in items {
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, item.into());
+ if let Some(cfg) = attrs.cfg() {
+ if !self.is_cfg_enabled(&cfg) {
+ self.emit_unconfigured_diagnostic(item, &cfg);
+ continue;
+ }
+ }
+
+ if let Err(()) = self.resolve_attributes(&attrs, item, container) {
+ // Do not process the item. It has at least one non-builtin attribute, so the
+ // fixed-point algorithm is required to resolve the rest of them.
+ continue;
+ }
+
+ let db = self.def_collector.db;
+ let module = self.def_collector.def_map.module_id(self.module_id);
+ let def_map = &mut self.def_collector.def_map;
+ let update_def =
+ |def_collector: &mut DefCollector<'_>, id, name: &Name, vis, has_constructor| {
+ def_collector.def_map.modules[self.module_id].scope.declare(id);
+ def_collector.update(
+ self.module_id,
+ &[(Some(name.clone()), PerNs::from_def(id, vis, has_constructor))],
+ vis,
+ ImportType::Named,
+ )
+ };
+ let resolve_vis = |def_map: &DefMap, visibility| {
+ def_map
+ .resolve_visibility(db, self.module_id, visibility)
+ .unwrap_or(Visibility::Public)
+ };
+
+ match item {
+ ModItem::Mod(m) => self.collect_module(m, &attrs),
+ ModItem::Import(import_id) => {
+ let imports = Import::from_use(
+ db,
+ krate,
+ self.item_tree,
+ ItemTreeId::new(self.tree_id, import_id),
+ );
+ self.def_collector.unresolved_imports.extend(imports.into_iter().map(
+ |import| ImportDirective {
+ module_id: self.module_id,
+ import,
+ status: PartialResolvedImport::Unresolved,
+ },
+ ));
+ }
+ ModItem::ExternCrate(import_id) => {
+ self.def_collector.unresolved_imports.push(ImportDirective {
+ module_id: self.module_id,
+ import: Import::from_extern_crate(
+ db,
+ krate,
+ self.item_tree,
+ ItemTreeId::new(self.tree_id, import_id),
+ ),
+ status: PartialResolvedImport::Unresolved,
+ })
+ }
+ ModItem::ExternBlock(block) => self.collect(
+ &self.item_tree[block].children,
+ ItemContainerId::ExternBlockId(
+ ExternBlockLoc {
+ container: module,
+ id: ItemTreeId::new(self.tree_id, block),
+ }
+ .intern(db),
+ ),
+ ),
+ ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container),
+ ModItem::MacroRules(id) => self.collect_macro_rules(id, module),
+ ModItem::MacroDef(id) => self.collect_macro_def(id, module),
+ ModItem::Impl(imp) => {
+ let impl_id =
+ ImplLoc { container: module, id: ItemTreeId::new(self.tree_id, imp) }
+ .intern(db);
+ self.def_collector.def_map.modules[self.module_id].scope.define_impl(impl_id)
+ }
+ ModItem::Function(id) => {
+ let it = &self.item_tree[id];
+ let fn_id =
+ FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ if self.def_collector.is_proc_macro {
+ if self.module_id == def_map.root {
+ if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) {
+ let crate_root = def_map.module_id(def_map.root);
+ self.def_collector.export_proc_macro(
+ proc_macro,
+ ItemTreeId::new(self.tree_id, id),
+ fn_id,
+ crate_root,
+ );
+ }
+ }
+ }
+
+ update_def(self.def_collector, fn_id.into(), &it.name, vis, false);
+ }
+ ModItem::Struct(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ StructLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ !matches!(it.fields, Fields::Record(_)),
+ );
+ }
+ ModItem::Union(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ UnionLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ ModItem::Enum(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ EnumLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ ModItem::Const(id) => {
+ let it = &self.item_tree[id];
+ let const_id =
+ ConstLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
+
+ match &it.name {
+ Some(name) => {
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(self.def_collector, const_id.into(), name, vis, false);
+ }
+ None => {
+ // const _: T = ...;
+ self.def_collector.def_map.modules[self.module_id]
+ .scope
+ .define_unnamed_const(const_id);
+ }
+ }
+ }
+ ModItem::Static(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ StaticLoc { container, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ ModItem::Trait(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ TraitLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ ModItem::TypeAlias(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ TypeAliasLoc { container, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ }
+ }
+ }
+
+ fn collect_module(&mut self, module_id: FileItemTreeId<Mod>, attrs: &Attrs) {
+ let path_attr = attrs.by_key("path").string_value();
+ let is_macro_use = attrs.by_key("macro_use").exists();
+ let module = &self.item_tree[module_id];
+ match &module.kind {
+ // inline module, just recurse
+ ModKind::Inline { items } => {
+ let module_id = self.push_child_module(
+ module.name.clone(),
+ AstId::new(self.file_id(), module.ast_id),
+ None,
+ &self.item_tree[module.visibility],
+ module_id,
+ );
+
+ if let Some(mod_dir) = self.mod_dir.descend_into_definition(&module.name, path_attr)
+ {
+ ModCollector {
+ def_collector: &mut *self.def_collector,
+ macro_depth: self.macro_depth,
+ module_id,
+ tree_id: self.tree_id,
+ item_tree: self.item_tree,
+ mod_dir,
+ }
+ .collect_in_top_module(&*items);
+ if is_macro_use {
+ self.import_all_legacy_macros(module_id);
+ }
+ }
+ }
+ // out of line module, resolve, parse and recurse
+ ModKind::Outline => {
+ let ast_id = AstId::new(self.tree_id.file_id(), module.ast_id);
+ let db = self.def_collector.db;
+ match self.mod_dir.resolve_declaration(db, self.file_id(), &module.name, path_attr)
+ {
+ Ok((file_id, is_mod_rs, mod_dir)) => {
+ let item_tree = db.file_item_tree(file_id.into());
+ let krate = self.def_collector.def_map.krate;
+ let is_enabled = item_tree
+ .top_level_attrs(db, krate)
+ .cfg()
+ .map_or(true, |cfg| self.is_cfg_enabled(&cfg));
+ if is_enabled {
+ let module_id = self.push_child_module(
+ module.name.clone(),
+ ast_id,
+ Some((file_id, is_mod_rs)),
+ &self.item_tree[module.visibility],
+ module_id,
+ );
+ ModCollector {
+ def_collector: self.def_collector,
+ macro_depth: self.macro_depth,
+ module_id,
+ tree_id: TreeId::new(file_id.into(), None),
+ item_tree: &item_tree,
+ mod_dir,
+ }
+ .collect_in_top_module(item_tree.top_level_items());
+ let is_macro_use = is_macro_use
+ || item_tree
+ .top_level_attrs(db, krate)
+ .by_key("macro_use")
+ .exists();
+ if is_macro_use {
+ self.import_all_legacy_macros(module_id);
+ }
+ }
+ }
+ Err(candidates) => {
+ self.push_child_module(
+ module.name.clone(),
+ ast_id,
+ None,
+ &self.item_tree[module.visibility],
+ module_id,
+ );
+ self.def_collector.def_map.diagnostics.push(
+ DefDiagnostic::unresolved_module(self.module_id, ast_id, candidates),
+ );
+ }
+ };
+ }
+ }
+ }
+
+ fn push_child_module(
+ &mut self,
+ name: Name,
+ declaration: AstId<ast::Module>,
+ definition: Option<(FileId, bool)>,
+ visibility: &crate::visibility::RawVisibility,
+ mod_tree_id: FileItemTreeId<Mod>,
+ ) -> LocalModuleId {
+ let def_map = &mut self.def_collector.def_map;
+ let vis = def_map
+ .resolve_visibility(self.def_collector.db, self.module_id, visibility)
+ .unwrap_or(Visibility::Public);
+ let modules = &mut def_map.modules;
+ let origin = match definition {
+ None => ModuleOrigin::Inline {
+ definition: declaration,
+ definition_tree_id: ItemTreeId::new(self.tree_id, mod_tree_id),
+ },
+ Some((definition, is_mod_rs)) => ModuleOrigin::File {
+ declaration,
+ definition,
+ is_mod_rs,
+ declaration_tree_id: ItemTreeId::new(self.tree_id, mod_tree_id),
+ },
+ };
+
+ let res = modules.alloc(ModuleData::new(origin, vis));
+ modules[res].parent = Some(self.module_id);
+ for (name, mac) in modules[self.module_id].scope.collect_legacy_macros() {
+ for &mac in &mac {
+ modules[res].scope.define_legacy_macro(name.clone(), mac);
+ }
+ }
+ modules[self.module_id].children.insert(name.clone(), res);
+
+ let module = def_map.module_id(res);
+ let def = ModuleDefId::from(module);
+
+ def_map.modules[self.module_id].scope.declare(def);
+ self.def_collector.update(
+ self.module_id,
+ &[(Some(name), PerNs::from_def(def, vis, false))],
+ vis,
+ ImportType::Named,
+ );
+ res
+ }
+
+ /// Resolves attributes on an item.
+ ///
+ /// Returns `Err` when some attributes could not be resolved to builtins and have been
+ /// registered as unresolved.
+ ///
+ /// If `ignore_up_to` is `Some`, attributes preceding and including that attribute will be
+ /// assumed to be resolved already.
+ fn resolve_attributes(
+ &mut self,
+ attrs: &Attrs,
+ mod_item: ModItem,
+ container: ItemContainerId,
+ ) -> Result<(), ()> {
+ let mut ignore_up_to =
+ self.def_collector.skip_attrs.get(&InFile::new(self.file_id(), mod_item)).copied();
+ let iter = attrs
+ .iter()
+ .dedup_by(|a, b| {
+ // FIXME: this should not be required, all attributes on an item should have a
+ // unique ID!
+ // Still, this occurs because `#[cfg_attr]` can "expand" to multiple attributes:
+ // #[cfg_attr(not(off), unresolved, unresolved)]
+ // struct S;
+ // We should come up with a different way to ID attributes.
+ a.id == b.id
+ })
+ .skip_while(|attr| match ignore_up_to {
+ Some(id) if attr.id == id => {
+ ignore_up_to = None;
+ true
+ }
+ Some(_) => true,
+ None => false,
+ });
+
+ for attr in iter {
+ if self.def_collector.def_map.is_builtin_or_registered_attr(&attr.path) {
+ continue;
+ }
+ tracing::debug!("non-builtin attribute {}", attr.path);
+
+ let ast_id = AstIdWithPath::new(
+ self.file_id(),
+ mod_item.ast_id(self.item_tree),
+ attr.path.as_ref().clone(),
+ );
+ self.def_collector.unresolved_macros.push(MacroDirective {
+ module_id: self.module_id,
+ depth: self.macro_depth + 1,
+ kind: MacroDirectiveKind::Attr {
+ ast_id,
+ attr: attr.clone(),
+ mod_item,
+ tree: self.tree_id,
+ },
+ container,
+ });
+
+ return Err(());
+ }
+
+ Ok(())
+ }
+
+ fn collect_macro_rules(&mut self, id: FileItemTreeId<MacroRules>, module: ModuleId) {
+ let krate = self.def_collector.def_map.krate;
+ let mac = &self.item_tree[id];
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into());
+ let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast());
+
+ let export_attr = attrs.by_key("macro_export");
+
+ let is_export = export_attr.exists();
+ let local_inner = if is_export {
+ export_attr.tt_values().flat_map(|it| &it.token_trees).any(|it| match it {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ ident.text.contains("local_inner_macros")
+ }
+ _ => false,
+ })
+ } else {
+ false
+ };
+
+ // Case 1: builtin macros
+ let expander = if attrs.by_key("rustc_builtin_macro").exists() {
+ // `#[rustc_builtin_macro = "builtin_name"]` overrides the `macro_rules!` name.
+ let name;
+ let name = match attrs.by_key("rustc_builtin_macro").string_value() {
+ Some(it) => {
+ // FIXME: a hacky way to create a Name from string.
+ name = tt::Ident { text: it.clone(), id: tt::TokenId::unspecified() }.as_name();
+ &name
+ }
+ None => {
+ let explicit_name =
+ attrs.by_key("rustc_builtin_macro").tt_values().next().and_then(|tt| {
+ match tt.token_trees.first() {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Ident(name))) => Some(name),
+ _ => None,
+ }
+ });
+ match explicit_name {
+ Some(ident) => {
+ name = ident.as_name();
+ &name
+ }
+ None => &mac.name,
+ }
+ }
+ };
+ match find_builtin_macro(name) {
+ Some(Either::Left(it)) => MacroExpander::BuiltIn(it),
+ Some(Either::Right(it)) => MacroExpander::BuiltInEager(it),
+ None => {
+ self.def_collector
+ .def_map
+ .diagnostics
+ .push(DefDiagnostic::unimplemented_builtin_macro(self.module_id, ast_id));
+ return;
+ }
+ }
+ } else {
+ // Case 2: normal `macro_rules!` macro
+ MacroExpander::Declarative
+ };
+
+ let macro_id = MacroRulesLoc {
+ container: module,
+ id: ItemTreeId::new(self.tree_id, id),
+ local_inner,
+ expander,
+ }
+ .intern(self.def_collector.db);
+ self.def_collector.define_macro_rules(
+ self.module_id,
+ mac.name.clone(),
+ macro_id,
+ is_export,
+ );
+ }
+
+ fn collect_macro_def(&mut self, id: FileItemTreeId<MacroDef>, module: ModuleId) {
+ let krate = self.def_collector.def_map.krate;
+ let mac = &self.item_tree[id];
+ let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast());
+
+ // Case 1: builtin macros
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into());
+ let expander = if attrs.by_key("rustc_builtin_macro").exists() {
+ if let Some(expander) = find_builtin_macro(&mac.name) {
+ match expander {
+ Either::Left(it) => MacroExpander::BuiltIn(it),
+ Either::Right(it) => MacroExpander::BuiltInEager(it),
+ }
+ } else if let Some(expander) = find_builtin_derive(&mac.name) {
+ MacroExpander::BuiltInDerive(expander)
+ } else if let Some(expander) = find_builtin_attr(&mac.name) {
+ MacroExpander::BuiltInAttr(expander)
+ } else {
+ self.def_collector
+ .def_map
+ .diagnostics
+ .push(DefDiagnostic::unimplemented_builtin_macro(self.module_id, ast_id));
+ return;
+ }
+ } else {
+ // Case 2: normal `macro`
+ MacroExpander::Declarative
+ };
+
+ let macro_id =
+ Macro2Loc { container: module, id: ItemTreeId::new(self.tree_id, id), expander }
+ .intern(self.def_collector.db);
+ self.def_collector.define_macro_def(
+ self.module_id,
+ mac.name.clone(),
+ macro_id,
+ &self.item_tree[mac.visibility],
+ );
+ }
+
+ fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) {
+ let ast_id = AstIdWithPath::new(self.file_id(), mac.ast_id, ModPath::clone(&mac.path));
+
+ // Case 1: try to resolve in legacy scope and expand macro_rules
+ let mut error = None;
+ match macro_call_as_call_id(
+ self.def_collector.db,
+ &ast_id,
+ mac.expand_to,
+ self.def_collector.def_map.krate,
+ |path| {
+ path.as_ident().and_then(|name| {
+ self.def_collector.def_map.with_ancestor_maps(
+ self.def_collector.db,
+ self.module_id,
+ &mut |map, module| {
+ map[module]
+ .scope
+ .get_legacy_macro(name)
+ .and_then(|it| it.last())
+ .map(|&it| macro_id_to_def_id(self.def_collector.db, it.into()))
+ },
+ )
+ })
+ },
+ &mut |err| {
+ error.get_or_insert(err);
+ },
+ ) {
+ Ok(Ok(macro_call_id)) => {
+ // Legacy macros need to be expanded immediately, so that any macros they produce
+ // are in scope.
+ self.def_collector.collect_macro_expansion(
+ self.module_id,
+ macro_call_id,
+ self.macro_depth + 1,
+ container,
+ );
+
+ if let Some(err) = error {
+ self.def_collector.def_map.diagnostics.push(DefDiagnostic::macro_error(
+ self.module_id,
+ MacroCallKind::FnLike { ast_id: ast_id.ast_id, expand_to: mac.expand_to },
+ err.to_string(),
+ ));
+ }
+
+ return;
+ }
+ Ok(Err(_)) => {
+ // Built-in macro failed eager expansion.
+
+ self.def_collector.def_map.diagnostics.push(DefDiagnostic::macro_error(
+ self.module_id,
+ MacroCallKind::FnLike { ast_id: ast_id.ast_id, expand_to: mac.expand_to },
+ error.unwrap().to_string(),
+ ));
+ return;
+ }
+ Err(UnresolvedMacro { .. }) => (),
+ }
+
+ // Case 2: resolve in module scope, expand during name resolution.
+ self.def_collector.unresolved_macros.push(MacroDirective {
+ module_id: self.module_id,
+ depth: self.macro_depth + 1,
+ kind: MacroDirectiveKind::FnLike { ast_id, expand_to: mac.expand_to },
+ container,
+ });
+ }
+
+ fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) {
+ let macros = self.def_collector.def_map[module_id].scope.collect_legacy_macros();
+ for (name, macs) in macros {
+ macs.last().map(|&mac| {
+ self.def_collector.define_legacy_macro(self.module_id, name.clone(), mac)
+ });
+ }
+ }
+
+ fn is_cfg_enabled(&self, cfg: &CfgExpr) -> bool {
+ self.def_collector.cfg_options.check(cfg) != Some(false)
+ }
+
+ fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) {
+ let ast_id = item.ast_id(self.item_tree);
+
+ let ast_id = InFile::new(self.file_id(), ast_id);
+ self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
+ self.module_id,
+ ast_id,
+ cfg.clone(),
+ self.def_collector.cfg_options.clone(),
+ ));
+ }
+
+ fn file_id(&self) -> HirFileId {
+ self.tree_id.file_id()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{db::DefDatabase, test_db::TestDB};
+ use base_db::{fixture::WithFixture, SourceDatabase};
+
+ use super::*;
+
+ fn do_collect_defs(db: &dyn DefDatabase, def_map: DefMap) -> DefMap {
+ let mut collector = DefCollector {
+ db,
+ def_map,
+ deps: FxHashMap::default(),
+ glob_imports: FxHashMap::default(),
+ unresolved_imports: Vec::new(),
+ indeterminate_imports: Vec::new(),
+ unresolved_macros: Vec::new(),
+ mod_dirs: FxHashMap::default(),
+ cfg_options: &CfgOptions::default(),
+ proc_macros: Default::default(),
+ from_glob_import: Default::default(),
+ skip_attrs: Default::default(),
+ is_proc_macro: false,
+ };
+ collector.seed_with_top_level();
+ collector.collect();
+ collector.def_map
+ }
+
+ fn do_resolve(not_ra_fixture: &str) -> DefMap {
+ let (db, file_id) = TestDB::with_single_file(not_ra_fixture);
+ let krate = db.test_crate();
+
+ let edition = db.crate_graph()[krate].edition;
+ let module_origin = ModuleOrigin::CrateRoot { definition: file_id };
+ let def_map =
+ DefMap::empty(krate, edition, ModuleData::new(module_origin, Visibility::Public));
+ do_collect_defs(&db, def_map)
+ }
+
+ #[test]
+ fn test_macro_expand_will_stop_1() {
+ do_resolve(
+ r#"
+macro_rules! foo {
+ ($($ty:ty)*) => { foo!($($ty)*); }
+}
+foo!(KABOOM);
+"#,
+ );
+ do_resolve(
+ r#"
+macro_rules! foo {
+ ($($ty:ty)*) => { foo!(() $($ty)*); }
+}
+foo!(KABOOM);
+"#,
+ );
+ }
+
+ #[ignore]
+ #[test]
+ fn test_macro_expand_will_stop_2() {
+ // FIXME: this test does succeed, but takes quite a while: 90 seconds in
+ // the release mode. That's why the argument is not an ra_fixture --
+ // otherwise injection highlighting gets stuck.
+ //
+ // We need to find a way to fail this faster.
+ do_resolve(
+ r#"
+macro_rules! foo {
+ ($($ty:ty)*) => { foo!($($ty)* $($ty)*); }
+}
+foo!(KABOOM);
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
new file mode 100644
index 000000000..0d01f6d0a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
@@ -0,0 +1,137 @@
+//! Diagnostics emitted during DefMap construction.
+
+use base_db::CrateId;
+use cfg::{CfgExpr, CfgOptions};
+use hir_expand::MacroCallKind;
+use la_arena::Idx;
+use syntax::ast;
+
+use crate::{
+ attr::AttrId,
+ item_tree::{self, ItemTreeId},
+ nameres::LocalModuleId,
+ path::ModPath,
+ AstId,
+};
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum DefDiagnosticKind {
+ UnresolvedModule { ast: AstId<ast::Module>, candidates: Box<[String]> },
+
+ UnresolvedExternCrate { ast: AstId<ast::ExternCrate> },
+
+ UnresolvedImport { id: ItemTreeId<item_tree::Import>, index: Idx<ast::UseTree> },
+
+ UnconfiguredCode { ast: AstId<ast::Item>, cfg: CfgExpr, opts: CfgOptions },
+
+ UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId },
+
+ UnresolvedMacroCall { ast: MacroCallKind, path: ModPath },
+
+ MacroError { ast: MacroCallKind, message: String },
+
+ UnimplementedBuiltinMacro { ast: AstId<ast::Macro> },
+
+ InvalidDeriveTarget { ast: AstId<ast::Item>, id: u32 },
+
+ MalformedDerive { ast: AstId<ast::Adt>, id: u32 },
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct DefDiagnostic {
+ pub in_module: LocalModuleId,
+ pub kind: DefDiagnosticKind,
+}
+
+impl DefDiagnostic {
+ pub(super) fn unresolved_module(
+ container: LocalModuleId,
+ declaration: AstId<ast::Module>,
+ candidates: Box<[String]>,
+ ) -> Self {
+ Self {
+ in_module: container,
+ kind: DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates },
+ }
+ }
+
+ pub(super) fn unresolved_extern_crate(
+ container: LocalModuleId,
+ declaration: AstId<ast::ExternCrate>,
+ ) -> Self {
+ Self {
+ in_module: container,
+ kind: DefDiagnosticKind::UnresolvedExternCrate { ast: declaration },
+ }
+ }
+
+ pub(super) fn unresolved_import(
+ container: LocalModuleId,
+ id: ItemTreeId<item_tree::Import>,
+ index: Idx<ast::UseTree>,
+ ) -> Self {
+ Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
+ }
+
+ pub(super) fn unconfigured_code(
+ container: LocalModuleId,
+ ast: AstId<ast::Item>,
+ cfg: CfgExpr,
+ opts: CfgOptions,
+ ) -> Self {
+ Self { in_module: container, kind: DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } }
+ }
+
+ pub(super) fn unresolved_proc_macro(
+ container: LocalModuleId,
+ ast: MacroCallKind,
+ krate: CrateId,
+ ) -> Self {
+ Self { in_module: container, kind: DefDiagnosticKind::UnresolvedProcMacro { ast, krate } }
+ }
+
+ pub(super) fn macro_error(
+ container: LocalModuleId,
+ ast: MacroCallKind,
+ message: String,
+ ) -> Self {
+ Self { in_module: container, kind: DefDiagnosticKind::MacroError { ast, message } }
+ }
+
+ pub(super) fn unresolved_macro_call(
+ container: LocalModuleId,
+ ast: MacroCallKind,
+ path: ModPath,
+ ) -> Self {
+ Self { in_module: container, kind: DefDiagnosticKind::UnresolvedMacroCall { ast, path } }
+ }
+
+ pub(super) fn unimplemented_builtin_macro(
+ container: LocalModuleId,
+ ast: AstId<ast::Macro>,
+ ) -> Self {
+ Self { in_module: container, kind: DefDiagnosticKind::UnimplementedBuiltinMacro { ast } }
+ }
+
+ pub(super) fn invalid_derive_target(
+ container: LocalModuleId,
+ ast: AstId<ast::Item>,
+ id: AttrId,
+ ) -> Self {
+ Self {
+ in_module: container,
+ kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index },
+ }
+ }
+
+ pub(super) fn malformed_derive(
+ container: LocalModuleId,
+ ast: AstId<ast::Adt>,
+ id: AttrId,
+ ) -> Self {
+ Self {
+ in_module: container,
+ kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index },
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
new file mode 100644
index 000000000..52a620fe2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
@@ -0,0 +1,161 @@
+//! This module resolves `mod foo;` declaration to file.
+use arrayvec::ArrayVec;
+use base_db::{AnchoredPath, FileId};
+use hir_expand::name::Name;
+use limit::Limit;
+use syntax::SmolStr;
+
+use crate::{db::DefDatabase, HirFileId};
+
+const MOD_DEPTH_LIMIT: Limit = Limit::new(32);
+
+#[derive(Clone, Debug)]
+pub(super) struct ModDir {
+ /// `` for `mod.rs`, `lib.rs`
+ /// `foo/` for `foo.rs`
+ /// `foo/bar/` for `mod bar { mod x; }` nested in `foo.rs`
+ /// Invariant: path.is_empty() || path.ends_with('/')
+ dir_path: DirPath,
+ /// inside `./foo.rs`, mods with `#[path]` should *not* be relative to `./foo/`
+ root_non_dir_owner: bool,
+ depth: u32,
+}
+
+impl ModDir {
+ pub(super) fn root() -> ModDir {
+ ModDir { dir_path: DirPath::empty(), root_non_dir_owner: false, depth: 0 }
+ }
+
+ pub(super) fn descend_into_definition(
+ &self,
+ name: &Name,
+ attr_path: Option<&SmolStr>,
+ ) -> Option<ModDir> {
+ let path = match attr_path.map(SmolStr::as_str) {
+ None => {
+ let mut path = self.dir_path.clone();
+ path.push(&name.to_smol_str());
+ path
+ }
+ Some(attr_path) => {
+ let mut path = self.dir_path.join_attr(attr_path, self.root_non_dir_owner);
+ if !(path.is_empty() || path.ends_with('/')) {
+ path.push('/')
+ }
+ DirPath::new(path)
+ }
+ };
+ self.child(path, false)
+ }
+
+ fn child(&self, dir_path: DirPath, root_non_dir_owner: bool) -> Option<ModDir> {
+ let depth = self.depth + 1;
+ if MOD_DEPTH_LIMIT.check(depth as usize).is_err() {
+ tracing::error!("MOD_DEPTH_LIMIT exceeded");
+ cov_mark::hit!(circular_mods);
+ return None;
+ }
+ Some(ModDir { dir_path, root_non_dir_owner, depth })
+ }
+
+ pub(super) fn resolve_declaration(
+ &self,
+ db: &dyn DefDatabase,
+ file_id: HirFileId,
+ name: &Name,
+ attr_path: Option<&SmolStr>,
+ ) -> Result<(FileId, bool, ModDir), Box<[String]>> {
+ let orig_file_id = file_id.original_file(db.upcast());
+
+ let mut candidate_files = ArrayVec::<_, 2>::new();
+ match attr_path {
+ Some(attr_path) => {
+ candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner))
+ }
+ None if file_id.is_include_macro(db.upcast()) => {
+ candidate_files.push(format!("{}.rs", name));
+ candidate_files.push(format!("{}/mod.rs", name));
+ }
+ None => {
+ candidate_files.push(format!("{}{}.rs", self.dir_path.0, name));
+ candidate_files.push(format!("{}{}/mod.rs", self.dir_path.0, name));
+ }
+ };
+
+ for candidate in candidate_files.iter() {
+ let path = AnchoredPath { anchor: orig_file_id, path: candidate.as_str() };
+ if let Some(file_id) = db.resolve_path(path) {
+ let is_mod_rs = candidate.ends_with("/mod.rs");
+
+ let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() {
+ (DirPath::empty(), false)
+ } else {
+ (DirPath::new(format!("{}/", name)), true)
+ };
+ if let Some(mod_dir) = self.child(dir_path, root_non_dir_owner) {
+ return Ok((file_id, is_mod_rs, mod_dir));
+ }
+ }
+ }
+ Err(candidate_files.into_iter().collect())
+ }
+}
+
+#[derive(Clone, Debug)]
+struct DirPath(String);
+
+impl DirPath {
+ fn assert_invariant(&self) {
+ assert!(self.0.is_empty() || self.0.ends_with('/'));
+ }
+ fn new(repr: String) -> DirPath {
+ let res = DirPath(repr);
+ res.assert_invariant();
+ res
+ }
+ fn empty() -> DirPath {
+ DirPath::new(String::new())
+ }
+ fn push(&mut self, name: &str) {
+ self.0.push_str(name);
+ self.0.push('/');
+ self.assert_invariant();
+ }
+ fn parent(&self) -> Option<&str> {
+ if self.0.is_empty() {
+ return None;
+ };
+ let idx =
+ self.0[..self.0.len() - '/'.len_utf8()].rfind('/').map_or(0, |it| it + '/'.len_utf8());
+ Some(&self.0[..idx])
+ }
+ /// So this is the case which doesn't really work I think if we try to be
+ /// 100% platform agnostic:
+ ///
+ /// ```
+ /// mod a {
+ /// #[path="C://sad/face"]
+ /// mod b { mod c; }
+ /// }
+ /// ```
+ ///
+ /// Here, we need to join logical dir path to a string path from an
+ /// attribute. Ideally, we should somehow losslessly communicate the whole
+ /// construction to `FileLoader`.
+ fn join_attr(&self, mut attr: &str, relative_to_parent: bool) -> String {
+ let base = if relative_to_parent { self.parent().unwrap() } else { &self.0 };
+
+ if attr.starts_with("./") {
+ attr = &attr["./".len()..];
+ }
+ let tmp;
+ let attr = if attr.contains('\\') {
+ tmp = attr.replace('\\', "/");
+ &tmp
+ } else {
+ attr
+ };
+ let res = format!("{}{}", base, attr);
+ res
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
new file mode 100644
index 000000000..c579bc919
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
@@ -0,0 +1,448 @@
+//! This modules implements a function to resolve a path `foo::bar::baz` to a
+//! def, which is used within the name resolution.
+//!
+//! When name resolution is finished, the result of resolving a path is either
+//! `Some(def)` or `None`. However, when we are in process of resolving imports
+//! or macros, there's a third possibility:
+//!
+//! I can't resolve this path right now, but I might be resolve this path
+//! later, when more macros are expanded.
+//!
+//! `ReachedFixedPoint` signals about this.
+
+use base_db::Edition;
+use hir_expand::name::Name;
+
+use crate::{
+ db::DefDatabase,
+ item_scope::BUILTIN_SCOPE,
+ nameres::{BuiltinShadowMode, DefMap},
+ path::{ModPath, PathKind},
+ per_ns::PerNs,
+ visibility::{RawVisibility, Visibility},
+ AdtId, CrateId, EnumVariantId, LocalModuleId, ModuleDefId, ModuleId,
+};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub(super) enum ResolveMode {
+ Import,
+ Other,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub(super) enum ReachedFixedPoint {
+ Yes,
+ No,
+}
+
+#[derive(Debug, Clone)]
+pub(super) struct ResolvePathResult {
+ pub(super) resolved_def: PerNs,
+ pub(super) segment_index: Option<usize>,
+ pub(super) reached_fixedpoint: ReachedFixedPoint,
+ pub(super) krate: Option<CrateId>,
+}
+
+impl ResolvePathResult {
+ fn empty(reached_fixedpoint: ReachedFixedPoint) -> ResolvePathResult {
+ ResolvePathResult::with(PerNs::none(), reached_fixedpoint, None, None)
+ }
+
+ fn with(
+ resolved_def: PerNs,
+ reached_fixedpoint: ReachedFixedPoint,
+ segment_index: Option<usize>,
+ krate: Option<CrateId>,
+ ) -> ResolvePathResult {
+ ResolvePathResult { resolved_def, segment_index, reached_fixedpoint, krate }
+ }
+}
+
+impl DefMap {
+ pub(super) fn resolve_name_in_extern_prelude(
+ &self,
+ db: &dyn DefDatabase,
+ name: &Name,
+ ) -> Option<ModuleId> {
+ match self.block {
+ Some(_) => self.crate_root(db).def_map(db).extern_prelude.get(name).copied(),
+ None => self.extern_prelude.get(name).copied(),
+ }
+ }
+
+ pub(crate) fn resolve_visibility(
+ &self,
+ db: &dyn DefDatabase,
+ original_module: LocalModuleId,
+ visibility: &RawVisibility,
+ ) -> Option<Visibility> {
+ let mut vis = match visibility {
+ RawVisibility::Module(path) => {
+ let (result, remaining) =
+ self.resolve_path(db, original_module, path, BuiltinShadowMode::Module);
+ if remaining.is_some() {
+ return None;
+ }
+ let types = result.take_types()?;
+ match types {
+ ModuleDefId::ModuleId(m) => Visibility::Module(m),
+ _ => {
+ // error: visibility needs to refer to module
+ return None;
+ }
+ }
+ }
+ RawVisibility::Public => Visibility::Public,
+ };
+
+ // In block expressions, `self` normally refers to the containing non-block module, and
+ // `super` to its parent (etc.). However, visibilities must only refer to a module in the
+ // DefMap they're written in, so we restrict them when that happens.
+ if let Visibility::Module(m) = vis {
+ if self.block_id() != m.block {
+ cov_mark::hit!(adjust_vis_in_block_def_map);
+ vis = Visibility::Module(self.module_id(self.root()));
+ tracing::debug!("visibility {:?} points outside DefMap, adjusting to {:?}", m, vis);
+ }
+ }
+
+ Some(vis)
+ }
+
+ // Returns Yes if we are sure that additions to `ItemMap` wouldn't change
+ // the result.
+ pub(super) fn resolve_path_fp_with_macro(
+ &self,
+ db: &dyn DefDatabase,
+ mode: ResolveMode,
+ mut original_module: LocalModuleId,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
+ ) -> ResolvePathResult {
+ let mut result = ResolvePathResult::empty(ReachedFixedPoint::No);
+
+ let mut arc;
+ let mut current_map = self;
+ loop {
+ let new = current_map.resolve_path_fp_with_macro_single(
+ db,
+ mode,
+ original_module,
+ path,
+ shadow,
+ );
+
+ // Merge `new` into `result`.
+ result.resolved_def = result.resolved_def.or(new.resolved_def);
+ if result.reached_fixedpoint == ReachedFixedPoint::No {
+ result.reached_fixedpoint = new.reached_fixedpoint;
+ }
+ // FIXME: this doesn't seem right; what if the different namespace resolutions come from different crates?
+ result.krate = result.krate.or(new.krate);
+ result.segment_index = match (result.segment_index, new.segment_index) {
+ (Some(idx), None) => Some(idx),
+ (Some(old), Some(new)) => Some(old.max(new)),
+ (None, new) => new,
+ };
+
+ match &current_map.block {
+ Some(block) => {
+ original_module = block.parent.local_id;
+ arc = block.parent.def_map(db);
+ current_map = &*arc;
+ }
+ None => return result,
+ }
+ }
+ }
+
+ pub(super) fn resolve_path_fp_with_macro_single(
+ &self,
+ db: &dyn DefDatabase,
+ mode: ResolveMode,
+ original_module: LocalModuleId,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
+ ) -> ResolvePathResult {
+ let graph = db.crate_graph();
+ let _cx = stdx::panic_context::enter(format!(
+ "DefMap {:?} crate_name={:?} block={:?} path={}",
+ self.krate, graph[self.krate].display_name, self.block, path
+ ));
+
+ let mut segments = path.segments().iter().enumerate();
+ let mut curr_per_ns: PerNs = match path.kind {
+ PathKind::DollarCrate(krate) => {
+ if krate == self.krate {
+ cov_mark::hit!(macro_dollar_crate_self);
+ PerNs::types(self.crate_root(db).into(), Visibility::Public)
+ } else {
+ let def_map = db.crate_def_map(krate);
+ let module = def_map.module_id(def_map.root);
+ cov_mark::hit!(macro_dollar_crate_other);
+ PerNs::types(module.into(), Visibility::Public)
+ }
+ }
+ PathKind::Crate => PerNs::types(self.crate_root(db).into(), Visibility::Public),
+ // plain import or absolute path in 2015: crate-relative with
+ // fallback to extern prelude (with the simplification in
+ // rust-lang/rust#57745)
+ // FIXME there must be a nicer way to write this condition
+ PathKind::Plain | PathKind::Abs
+ if self.edition == Edition::Edition2015
+ && (path.kind == PathKind::Abs || mode == ResolveMode::Import) =>
+ {
+ let (_, segment) = match segments.next() {
+ Some((idx, segment)) => (idx, segment),
+ None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
+ };
+ tracing::debug!("resolving {:?} in crate root (+ extern prelude)", segment);
+ self.resolve_name_in_crate_root_or_extern_prelude(db, segment)
+ }
+ PathKind::Plain => {
+ let (_, segment) = match segments.next() {
+ Some((idx, segment)) => (idx, segment),
+ None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
+ };
+ // The first segment may be a builtin type. If the path has more
+ // than one segment, we first try resolving it as a module
+ // anyway.
+ // FIXME: If the next segment doesn't resolve in the module and
+ // BuiltinShadowMode wasn't Module, then we need to try
+ // resolving it as a builtin.
+ let prefer_module =
+ if path.segments().len() == 1 { shadow } else { BuiltinShadowMode::Module };
+
+ tracing::debug!("resolving {:?} in module", segment);
+ self.resolve_name_in_module(db, original_module, segment, prefer_module)
+ }
+ PathKind::Super(lvl) => {
+ let mut module = original_module;
+ for i in 0..lvl {
+ match self.modules[module].parent {
+ Some(it) => module = it,
+ None => match &self.block {
+ Some(block) => {
+ // Look up remaining path in parent `DefMap`
+ let new_path = ModPath::from_segments(
+ PathKind::Super(lvl - i),
+ path.segments().to_vec(),
+ );
+ tracing::debug!(
+ "`super` path: {} -> {} in parent map",
+ path,
+ new_path
+ );
+ return block.parent.def_map(db).resolve_path_fp_with_macro(
+ db,
+ mode,
+ block.parent.local_id,
+ &new_path,
+ shadow,
+ );
+ }
+ None => {
+ tracing::debug!("super path in root module");
+ return ResolvePathResult::empty(ReachedFixedPoint::Yes);
+ }
+ },
+ }
+ }
+
+ // Resolve `self` to the containing crate-rooted module if we're a block
+ self.with_ancestor_maps(db, module, &mut |def_map, module| {
+ if def_map.block.is_some() {
+ None // keep ascending
+ } else {
+ Some(PerNs::types(def_map.module_id(module).into(), Visibility::Public))
+ }
+ })
+ .expect("block DefMap not rooted in crate DefMap")
+ }
+ PathKind::Abs => {
+ // 2018-style absolute path -- only extern prelude
+ let segment = match segments.next() {
+ Some((_, segment)) => segment,
+ None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
+ };
+ if let Some(&def) = self.extern_prelude.get(segment) {
+ tracing::debug!("absolute path {:?} resolved to crate {:?}", path, def);
+ PerNs::types(def.into(), Visibility::Public)
+ } else {
+ return ResolvePathResult::empty(ReachedFixedPoint::No); // extern crate declarations can add to the extern prelude
+ }
+ }
+ };
+
+ for (i, segment) in segments {
+ let (curr, vis) = match curr_per_ns.take_types_vis() {
+ Some(r) => r,
+ None => {
+ // we still have path segments left, but the path so far
+ // didn't resolve in the types namespace => no resolution
+ // (don't break here because `curr_per_ns` might contain
+ // something in the value namespace, and it would be wrong
+ // to return that)
+ return ResolvePathResult::empty(ReachedFixedPoint::No);
+ }
+ };
+ // resolve segment in curr
+
+ curr_per_ns = match curr {
+ ModuleDefId::ModuleId(module) => {
+ if module.krate != self.krate {
+ let path = ModPath::from_segments(
+ PathKind::Super(0),
+ path.segments()[i..].iter().cloned(),
+ );
+ tracing::debug!("resolving {:?} in other crate", path);
+ let defp_map = module.def_map(db);
+ let (def, s) = defp_map.resolve_path(db, module.local_id, &path, shadow);
+ return ResolvePathResult::with(
+ def,
+ ReachedFixedPoint::Yes,
+ s.map(|s| s + i),
+ Some(module.krate),
+ );
+ }
+
+ let def_map;
+ let module_data = if module.block == self.block_id() {
+ &self[module.local_id]
+ } else {
+ def_map = module.def_map(db);
+ &def_map[module.local_id]
+ };
+
+ // Since it is a qualified path here, it should not contains legacy macros
+ module_data.scope.get(segment)
+ }
+ ModuleDefId::AdtId(AdtId::EnumId(e)) => {
+ // enum variant
+ cov_mark::hit!(can_import_enum_variant);
+ let enum_data = db.enum_data(e);
+ match enum_data.variant(segment) {
+ Some(local_id) => {
+ let variant = EnumVariantId { parent: e, local_id };
+ match &*enum_data.variants[local_id].variant_data {
+ crate::adt::VariantData::Record(_) => {
+ PerNs::types(variant.into(), Visibility::Public)
+ }
+ crate::adt::VariantData::Tuple(_)
+ | crate::adt::VariantData::Unit => {
+ PerNs::both(variant.into(), variant.into(), Visibility::Public)
+ }
+ }
+ }
+ None => {
+ return ResolvePathResult::with(
+ PerNs::types(e.into(), vis),
+ ReachedFixedPoint::Yes,
+ Some(i),
+ Some(self.krate),
+ );
+ }
+ }
+ }
+ s => {
+ // could be an inherent method call in UFCS form
+ // (`Struct::method`), or some other kind of associated item
+ tracing::debug!(
+ "path segment {:?} resolved to non-module {:?}, but is not last",
+ segment,
+ curr,
+ );
+
+ return ResolvePathResult::with(
+ PerNs::types(s, vis),
+ ReachedFixedPoint::Yes,
+ Some(i),
+ Some(self.krate),
+ );
+ }
+ };
+ }
+
+ ResolvePathResult::with(curr_per_ns, ReachedFixedPoint::Yes, None, Some(self.krate))
+ }
+
+ fn resolve_name_in_module(
+ &self,
+ db: &dyn DefDatabase,
+ module: LocalModuleId,
+ name: &Name,
+ shadow: BuiltinShadowMode,
+ ) -> PerNs {
+ // Resolve in:
+ // - legacy scope of macro
+ // - current module / scope
+ // - extern prelude
+ // - std prelude
+ let from_legacy_macro = self[module]
+ .scope
+ .get_legacy_macro(name)
+ // FIXME: shadowing
+ .and_then(|it| it.last())
+ .map_or_else(PerNs::none, |&m| PerNs::macros(m.into(), Visibility::Public));
+ let from_scope = self[module].scope.get(name);
+ let from_builtin = match self.block {
+ Some(_) => {
+ // Only resolve to builtins in the root `DefMap`.
+ PerNs::none()
+ }
+ None => BUILTIN_SCOPE.get(name).copied().unwrap_or_else(PerNs::none),
+ };
+ let from_scope_or_builtin = match shadow {
+ BuiltinShadowMode::Module => from_scope.or(from_builtin),
+ BuiltinShadowMode::Other => match from_scope.take_types() {
+ Some(ModuleDefId::ModuleId(_)) => from_builtin.or(from_scope),
+ Some(_) | None => from_scope.or(from_builtin),
+ },
+ };
+ let from_extern_prelude = self
+ .extern_prelude
+ .get(name)
+ .map_or(PerNs::none(), |&it| PerNs::types(it.into(), Visibility::Public));
+
+ let from_prelude = self.resolve_in_prelude(db, name);
+
+ from_legacy_macro.or(from_scope_or_builtin).or(from_extern_prelude).or(from_prelude)
+ }
+
+ fn resolve_name_in_crate_root_or_extern_prelude(
+ &self,
+ db: &dyn DefDatabase,
+ name: &Name,
+ ) -> PerNs {
+ let arc;
+ let crate_def_map = match self.block {
+ Some(_) => {
+ arc = self.crate_root(db).def_map(db);
+ &arc
+ }
+ None => self,
+ };
+ let from_crate_root = crate_def_map[crate_def_map.root].scope.get(name);
+ let from_extern_prelude = self
+ .resolve_name_in_extern_prelude(db, name)
+ .map_or(PerNs::none(), |it| PerNs::types(it.into(), Visibility::Public));
+
+ from_crate_root.or(from_extern_prelude)
+ }
+
+ fn resolve_in_prelude(&self, db: &dyn DefDatabase, name: &Name) -> PerNs {
+ if let Some(prelude) = self.prelude {
+ let keep;
+ let def_map = if prelude.krate == self.krate {
+ self
+ } else {
+ // Extend lifetime
+ keep = prelude.def_map(db);
+ &keep
+ };
+ def_map[prelude.local_id].scope.get(name)
+ } else {
+ PerNs::none()
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
new file mode 100644
index 000000000..5089ef2d8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
@@ -0,0 +1,81 @@
+//! Nameres-specific procedural macro data and helpers.
+
+use hir_expand::name::{AsName, Name};
+use tt::{Leaf, TokenTree};
+
+use crate::attr::Attrs;
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ProcMacroDef {
+ pub name: Name,
+ pub kind: ProcMacroKind,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum ProcMacroKind {
+ CustomDerive { helpers: Box<[Name]> },
+ FnLike,
+ Attr,
+}
+
+impl ProcMacroKind {
+ pub(super) fn to_basedb_kind(&self) -> base_db::ProcMacroKind {
+ match self {
+ ProcMacroKind::CustomDerive { .. } => base_db::ProcMacroKind::CustomDerive,
+ ProcMacroKind::FnLike => base_db::ProcMacroKind::FuncLike,
+ ProcMacroKind::Attr => base_db::ProcMacroKind::Attr,
+ }
+ }
+}
+
+impl Attrs {
+ #[rustfmt::skip]
+ pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
+ if self.is_proc_macro() {
+ Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::FnLike })
+ } else if self.is_proc_macro_attribute() {
+ Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Attr })
+ } else if self.by_key("proc_macro_derive").exists() {
+ let derive = self.by_key("proc_macro_derive").tt_values().next()?;
+
+ match &*derive.token_trees {
+ // `#[proc_macro_derive(Trait)]`
+ [TokenTree::Leaf(Leaf::Ident(trait_name))] => Some(ProcMacroDef {
+ name: trait_name.as_name(),
+ kind: ProcMacroKind::CustomDerive { helpers: Box::new([]) },
+ }),
+
+ // `#[proc_macro_derive(Trait, attibutes(helper1, helper2, ...))]`
+ [
+ TokenTree::Leaf(Leaf::Ident(trait_name)),
+ TokenTree::Leaf(Leaf::Punct(comma)),
+ TokenTree::Leaf(Leaf::Ident(attributes)),
+ TokenTree::Subtree(helpers)
+ ] if comma.char == ',' && attributes.text == "attributes" =>
+ {
+ let helpers = helpers.token_trees.iter()
+ .filter(|tt| !matches!(tt, TokenTree::Leaf(Leaf::Punct(comma)) if comma.char == ','))
+ .map(|tt| {
+ match tt {
+ TokenTree::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()),
+ _ => None
+ }
+ })
+ .collect::<Option<Box<[_]>>>()?;
+
+ Some(ProcMacroDef {
+ name: trait_name.as_name(),
+ kind: ProcMacroKind::CustomDerive { helpers },
+ })
+ }
+
+ _ => {
+ tracing::trace!("malformed `#[proc_macro_derive]`: {}", derive);
+ None
+ }
+ }
+ } else {
+ None
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
new file mode 100644
index 000000000..70dd2eb3a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
@@ -0,0 +1,933 @@
+mod globs;
+mod incremental;
+mod macros;
+mod mod_resolution;
+mod primitives;
+
+use std::sync::Arc;
+
+use base_db::{fixture::WithFixture, SourceDatabase};
+use expect_test::{expect, Expect};
+
+use crate::{db::DefDatabase, test_db::TestDB};
+
+use super::DefMap;
+
+fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
+ let db = TestDB::with_files(ra_fixture);
+ let krate = db.crate_graph().iter().next().unwrap();
+ db.crate_def_map(krate)
+}
+
+fn render_crate_def_map(ra_fixture: &str) -> String {
+ let db = TestDB::with_files(ra_fixture);
+ let krate = db.crate_graph().iter().next().unwrap();
+ db.crate_def_map(krate).dump(&db)
+}
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = render_crate_def_map(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn crate_def_map_smoke_test() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+struct S;
+use crate::foo::bar::E;
+use self::E::V;
+
+//- /foo/mod.rs
+pub mod bar;
+fn f() {}
+
+//- /foo/bar.rs
+pub struct Baz;
+
+union U { to_be: bool, not_to_be: u8 }
+enum E { V }
+
+extern {
+ type Ext;
+ static EXT: u8;
+ fn ext();
+}
+"#,
+ expect![[r#"
+ crate
+ E: t
+ S: t v
+ V: t v
+ foo: t
+
+ crate::foo
+ bar: t
+ f: v
+
+ crate::foo::bar
+ Baz: t v
+ E: t
+ EXT: v
+ Ext: t
+ U: t
+ ext: v
+ "#]],
+ );
+}
+
+#[test]
+fn crate_def_map_super_super() {
+ check(
+ r#"
+mod a {
+ const A: usize = 0;
+ mod b {
+ const B: usize = 0;
+ mod c {
+ use super::super::*;
+ }
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ a: t
+
+ crate::a
+ A: v
+ b: t
+
+ crate::a::b
+ B: v
+ c: t
+
+ crate::a::b::c
+ A: v
+ b: t
+ "#]],
+ );
+}
+
+#[test]
+fn crate_def_map_fn_mod_same_name() {
+ check(
+ r#"
+mod m {
+ pub mod z {}
+ pub fn z() {}
+}
+"#,
+ expect![[r#"
+ crate
+ m: t
+
+ crate::m
+ z: t v
+
+ crate::m::z
+ "#]],
+ );
+}
+
+#[test]
+fn bogus_paths() {
+ cov_mark::check!(bogus_paths);
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+struct S;
+use self;
+
+//- /foo/mod.rs
+use super;
+use crate;
+"#,
+ expect![[r#"
+ crate
+ S: t v
+ foo: t
+
+ crate::foo
+ "#]],
+ );
+}
+
+#[test]
+fn use_as() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use crate::foo::Baz as Foo;
+
+//- /foo/mod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Foo: t v
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn use_trees() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use crate::foo::bar::{Baz, Quux};
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+pub struct Baz;
+pub enum Quux {};
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ Quux: t
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ Quux: t
+ "#]],
+ );
+}
+
+#[test]
+fn re_exports() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use self::foo::Baz;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::Baz;
+
+//- /foo/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn std_prelude() {
+ cov_mark::check!(std_prelude);
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+#[prelude_import]
+use ::test_crate::prelude::*;
+
+use Foo::*;
+
+//- /lib.rs crate:test_crate
+pub mod prelude;
+
+//- /prelude.rs
+pub enum Foo { Bar, Baz }
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn can_import_enum_variant() {
+ cov_mark::check!(can_import_enum_variant);
+ check(
+ r#"
+enum E { V }
+use self::E::V;
+"#,
+ expect![[r#"
+ crate
+ E: t
+ V: t v
+ "#]],
+ );
+}
+
+#[test]
+fn edition_2015_imports() {
+ check(
+ r#"
+//- /main.rs crate:main deps:other_crate edition:2015
+mod foo;
+mod bar;
+
+//- /bar.rs
+struct Bar;
+
+//- /foo.rs
+use bar::Bar;
+use other_crate::FromLib;
+
+//- /lib.rs crate:other_crate edition:2018
+pub struct FromLib;
+"#,
+ expect![[r#"
+ crate
+ bar: t
+ foo: t
+
+ crate::bar
+ Bar: t v
+
+ crate::foo
+ Bar: t v
+ FromLib: t v
+ "#]],
+ );
+}
+
+#[test]
+fn item_map_using_self() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use crate::foo::bar::Baz::{self};
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn item_map_across_crates() {
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+use test_crate::Baz;
+
+//- /lib.rs crate:test_crate
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn extern_crate_rename() {
+ check(
+ r#"
+//- /main.rs crate:main deps:alloc
+extern crate alloc as alloc_crate;
+mod alloc;
+mod sync;
+
+//- /sync.rs
+use alloc_crate::Arc;
+
+//- /lib.rs crate:alloc
+pub struct Arc;
+"#,
+ expect![[r#"
+ crate
+ alloc: t
+ alloc_crate: t
+ sync: t
+
+ crate::alloc
+
+ crate::sync
+ Arc: t v
+ "#]],
+ );
+}
+
+#[test]
+fn extern_crate_rename_2015_edition() {
+ check(
+ r#"
+//- /main.rs crate:main deps:alloc edition:2015
+extern crate alloc as alloc_crate;
+mod alloc;
+mod sync;
+
+//- /sync.rs
+use alloc_crate::Arc;
+
+//- /lib.rs crate:alloc
+pub struct Arc;
+"#,
+ expect![[r#"
+ crate
+ alloc: t
+ alloc_crate: t
+ sync: t
+
+ crate::alloc
+
+ crate::sync
+ Arc: t v
+ "#]],
+ );
+}
+
+#[test]
+fn macro_use_extern_crate_self() {
+ cov_mark::check!(ignore_macro_use_extern_crate_self);
+ check(
+ r#"
+//- /main.rs crate:main
+#[macro_use]
+extern crate self as bla;
+"#,
+ expect![[r#"
+ crate
+ bla: t
+ "#]],
+ );
+}
+
+#[test]
+fn reexport_across_crates() {
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+use test_crate::Baz;
+
+//- /lib.rs crate:test_crate
+pub use foo::Baz;
+mod foo;
+
+//- /foo.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn values_dont_shadow_extern_crates() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+fn foo() {}
+use foo::Bar;
+
+//- /foo/lib.rs crate:foo
+pub struct Bar;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ foo: v
+ "#]],
+ );
+}
+
+#[test]
+fn no_std_prelude() {
+ check(
+ r#"
+ //- /main.rs crate:main deps:core,std
+ #![cfg_attr(not(never), no_std)]
+ use Rust;
+
+ //- /core.rs crate:core
+ pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Rust;
+ }
+ }
+ //- /std.rs crate:std deps:core
+ pub mod prelude {
+ pub mod rust_2018 {
+ }
+ }
+ "#,
+ expect![[r#"
+ crate
+ Rust: t v
+ "#]],
+ );
+}
+
+#[test]
+fn edition_specific_preludes() {
+ // We can't test the 2015 prelude here since you can't reexport its contents with 2015's
+ // absolute paths.
+
+ check(
+ r#"
+ //- /main.rs edition:2018 crate:main deps:std
+ use Rust2018;
+
+ //- /std.rs crate:std
+ pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Rust2018;
+ }
+ }
+ "#,
+ expect![[r#"
+ crate
+ Rust2018: t v
+ "#]],
+ );
+ check(
+ r#"
+ //- /main.rs edition:2021 crate:main deps:std
+ use Rust2021;
+
+ //- /std.rs crate:std
+ pub mod prelude {
+ pub mod rust_2021 {
+ pub struct Rust2021;
+ }
+ }
+ "#,
+ expect![[r#"
+ crate
+ Rust2021: t v
+ "#]],
+ );
+}
+
+#[test]
+fn std_prelude_takes_precedence_above_core_prelude() {
+ check(
+ r#"
+//- /main.rs crate:main deps:core,std
+use {Foo, Bar};
+
+//- /std.rs crate:std deps:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Foo;
+ pub use core::prelude::rust_2018::Bar;
+ }
+}
+
+//- /core.rs crate:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Bar;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Foo: t v
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_not_test() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+use {Foo, Bar, Baz};
+
+//- /lib.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ #[cfg(test)]
+ pub struct Foo;
+ #[cfg(not(test))]
+ pub struct Bar;
+ #[cfg(all(not(any()), feature = "foo", feature = "bar", opt = "42"))]
+ pub struct Baz;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: _
+ Foo: _
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_test() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+use {Foo, Bar, Baz};
+
+//- /lib.rs crate:std cfg:test,feature=foo,feature=bar,opt=42
+pub mod prelude {
+ pub mod rust_2018 {
+ #[cfg(test)]
+ pub struct Foo;
+ #[cfg(not(test))]
+ pub struct Bar;
+ #[cfg(all(not(any()), feature = "foo", feature = "bar", opt = "42"))]
+ pub struct Baz;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: _
+ Baz: t v
+ Foo: t v
+ "#]],
+ );
+}
+
+#[test]
+fn infer_multiple_namespace() {
+ check(
+ r#"
+//- /main.rs
+mod a {
+ pub type T = ();
+ pub use crate::b::*;
+}
+
+use crate::a::T;
+
+mod b {
+ pub const T: () = ();
+}
+"#,
+ expect![[r#"
+ crate
+ T: t v
+ a: t
+ b: t
+
+ crate::a
+ T: t v
+
+ crate::b
+ T: v
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_import() {
+ check(
+ r#"
+//- /main.rs
+use tr::Tr as _;
+use tr::Tr2 as _;
+
+mod tr {
+ pub trait Tr {}
+ pub trait Tr2 {}
+}
+ "#,
+ expect![[r#"
+ crate
+ _: t
+ _: t
+ tr: t
+
+ crate::tr
+ Tr: t
+ Tr2: t
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_reexport() {
+ check(
+ r#"
+//- /main.rs
+mod tr {
+ pub trait PubTr {}
+ pub trait PrivTr {}
+}
+mod reex {
+ use crate::tr::PrivTr as _;
+ pub use crate::tr::PubTr as _;
+}
+use crate::reex::*;
+ "#,
+ expect![[r#"
+ crate
+ _: t
+ reex: t
+ tr: t
+
+ crate::reex
+ _: t
+ _: t
+
+ crate::tr
+ PrivTr: t
+ PubTr: t
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_pub_crate_reexport() {
+ cov_mark::check!(upgrade_underscore_visibility);
+ check(
+ r#"
+//- /main.rs crate:main deps:lib
+use lib::*;
+
+//- /lib.rs crate:lib
+use tr::Tr as _;
+pub use tr::Tr as _;
+
+mod tr {
+ pub trait Tr {}
+}
+ "#,
+ expect![[r#"
+ crate
+ _: t
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_nontrait() {
+ check(
+ r#"
+//- /main.rs
+mod m {
+ pub struct Struct;
+ pub enum Enum {}
+ pub const CONST: () = ();
+}
+use crate::m::{Struct as _, Enum as _, CONST as _};
+ "#,
+ expect![[r#"
+ crate
+ m: t
+
+ crate::m
+ CONST: v
+ Enum: t
+ Struct: t v
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_name_conflict() {
+ check(
+ r#"
+//- /main.rs
+struct Tr;
+
+use tr::Tr as _;
+
+mod tr {
+ pub trait Tr {}
+}
+ "#,
+ expect![[r#"
+ crate
+ _: t
+ Tr: t v
+ tr: t
+
+ crate::tr
+ Tr: t
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_the_entire_crate() {
+ check(
+ r#"
+//- /main.rs
+#![cfg(never)]
+
+pub struct S;
+pub enum E {}
+pub fn f() {}
+ "#,
+ expect![[r#"
+ crate
+ "#]],
+ );
+}
+
+#[test]
+fn use_crate_as() {
+ check(
+ r#"
+use crate as foo;
+
+use foo::bar as baz;
+
+fn bar() {}
+ "#,
+ expect![[r#"
+ crate
+ bar: v
+ baz: v
+ foo: t
+ "#]],
+ );
+}
+
+#[test]
+fn self_imports_only_types() {
+ check(
+ r#"
+//- /main.rs
+mod m {
+ pub macro S() {}
+ pub struct S;
+}
+
+use self::m::S::{self};
+ "#,
+ expect![[r#"
+ crate
+ S: t
+ m: t
+
+ crate::m
+ S: t v m
+ "#]],
+ );
+}
+
+#[test]
+fn import_from_extern_crate_only_imports_public_items() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:settings,macros
+use macros::settings;
+use settings::Settings;
+//- /settings.rs crate:settings
+pub struct Settings;
+//- /macros.rs crate:macros
+mod settings {}
+pub const settings: () = ();
+ "#,
+ expect![[r#"
+ crate
+ Settings: t v
+ settings: v
+ "#]],
+ )
+}
+
+#[test]
+fn non_prelude_deps() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep extern-prelude:
+use dep::Struct;
+//- /dep.rs crate:dep
+pub struct Struct;
+ "#,
+ expect![[r#"
+ crate
+ Struct: _
+ "#]],
+ );
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep extern-prelude:
+extern crate dep;
+use dep::Struct;
+//- /dep.rs crate:dep
+pub struct Struct;
+ "#,
+ expect![[r#"
+ crate
+ Struct: t v
+ dep: t
+ "#]],
+ );
+}
+
+#[test]
+fn braced_supers_in_use_tree() {
+ cov_mark::check!(concat_super_mod_paths);
+ check(
+ r#"
+mod some_module {
+ pub fn unknown_func() {}
+}
+
+mod other_module {
+ mod some_submodule {
+ use { super::{ super::unknown_func, }, };
+ }
+}
+
+use some_module::unknown_func;
+ "#,
+ expect![[r#"
+ crate
+ other_module: t
+ some_module: t
+ unknown_func: v
+
+ crate::other_module
+ some_submodule: t
+
+ crate::other_module::some_submodule
+ unknown_func: v
+
+ crate::some_module
+ unknown_func: v
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs
new file mode 100644
index 000000000..b2a6a592c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs
@@ -0,0 +1,338 @@
+use super::*;
+
+#[test]
+fn glob_1() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::*;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::Baz;
+pub struct Foo;
+
+//- /foo/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ Foo: t v
+ bar: t
+ foo: t
+
+ crate::foo
+ Baz: t v
+ Foo: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_2() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::*;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::*;
+pub struct Foo;
+
+//- /foo/bar.rs
+pub struct Baz;
+pub use super::*;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ Foo: t v
+ bar: t
+ foo: t
+
+ crate::foo
+ Baz: t v
+ Foo: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ Foo: t v
+ bar: t
+ "#]],
+ );
+}
+
+#[test]
+fn glob_privacy_1() {
+ check(
+ r"
+//- /lib.rs
+mod foo;
+use foo::*;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::*;
+struct PrivateStructFoo;
+
+//- /foo/bar.rs
+pub struct Baz;
+struct PrivateStructBar;
+pub use super::*;
+",
+ expect![[r#"
+ crate
+ Baz: t v
+ bar: t
+ foo: t
+
+ crate::foo
+ Baz: t v
+ PrivateStructFoo: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ PrivateStructBar: t v
+ PrivateStructFoo: t v
+ bar: t
+ "#]],
+ );
+}
+
+#[test]
+fn glob_privacy_2() {
+ check(
+ r"
+//- /lib.rs
+mod foo;
+use foo::*;
+use foo::bar::*;
+
+//- /foo/mod.rs
+mod bar;
+fn Foo() {};
+pub struct Foo {};
+
+//- /foo/bar.rs
+pub(super) struct PrivateBaz;
+struct PrivateBar;
+pub(crate) struct PubCrateStruct;
+",
+ expect![[r#"
+ crate
+ Foo: t
+ PubCrateStruct: t v
+ foo: t
+
+ crate::foo
+ Foo: t v
+ bar: t
+
+ crate::foo::bar
+ PrivateBar: t v
+ PrivateBaz: t v
+ PubCrateStruct: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_across_crates() {
+ cov_mark::check!(glob_across_crates);
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+use test_crate::*;
+
+//- /lib.rs crate:test_crate
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_privacy_across_crates() {
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+use test_crate::*;
+
+//- /lib.rs crate:test_crate
+pub struct Baz;
+struct Foo;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_enum() {
+ cov_mark::check!(glob_enum);
+ check(
+ r#"
+enum Foo { Bar, Baz }
+use self::Foo::*;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: t v
+ Foo: t
+ "#]],
+ );
+}
+
+#[test]
+fn glob_enum_group() {
+ cov_mark::check!(glob_enum_group);
+ check(
+ r#"
+enum Foo { Bar, Baz }
+use self::Foo::{*};
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: t v
+ Foo: t
+ "#]],
+ );
+}
+
+#[test]
+fn glob_shadowed_def() {
+ cov_mark::check!(import_shadowed);
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+mod bar;
+use foo::*;
+use bar::baz;
+use baz::Bar;
+
+//- /foo.rs
+pub mod baz { pub struct Foo; }
+
+//- /bar.rs
+pub mod baz { pub struct Bar; }
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ bar: t
+ baz: t
+ foo: t
+
+ crate::bar
+ baz: t
+
+ crate::bar::baz
+ Bar: t v
+
+ crate::foo
+ baz: t
+
+ crate::foo::baz
+ Foo: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_shadowed_def_reversed() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+mod bar;
+use bar::baz;
+use foo::*;
+use baz::Bar;
+
+//- /foo.rs
+pub mod baz { pub struct Foo; }
+
+//- /bar.rs
+pub mod baz { pub struct Bar; }
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ bar: t
+ baz: t
+ foo: t
+
+ crate::bar
+ baz: t
+
+ crate::bar::baz
+ Bar: t v
+
+ crate::foo
+ baz: t
+
+ crate::foo::baz
+ Foo: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_shadowed_def_dependencies() {
+ check(
+ r#"
+mod a { pub mod foo { pub struct X; } }
+mod b { pub use super::a::foo; }
+mod c { pub mod foo { pub struct Y; } }
+mod d {
+ use super::c::foo;
+ use super::b::*;
+ use foo::Y;
+}
+"#,
+ expect![[r#"
+ crate
+ a: t
+ b: t
+ c: t
+ d: t
+
+ crate::a
+ foo: t
+
+ crate::a::foo
+ X: t v
+
+ crate::b
+ foo: t
+
+ crate::c
+ foo: t
+
+ crate::c::foo
+ Y: t v
+
+ crate::d
+ Y: t v
+ foo: t
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
new file mode 100644
index 000000000..2e8cb3621
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
@@ -0,0 +1,237 @@
+use std::sync::Arc;
+
+use base_db::SourceDatabaseExt;
+
+use crate::{AdtId, ModuleDefId};
+
+use super::*;
+
+fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) {
+ let (mut db, pos) = TestDB::with_position(ra_fixture_initial);
+ let krate = db.test_crate();
+ {
+ let events = db.log_executed(|| {
+ db.crate_def_map(krate);
+ });
+ assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
+ }
+ db.set_file_text(pos.file_id, Arc::new(ra_fixture_change.to_string()));
+
+ {
+ let events = db.log_executed(|| {
+ db.crate_def_map(krate);
+ });
+ assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
+ }
+}
+
+#[test]
+fn typing_inside_a_function_should_not_invalidate_def_map() {
+ check_def_map_is_not_recomputed(
+ r"
+ //- /lib.rs
+ mod foo;$0
+
+ use crate::foo::bar::Baz;
+
+ enum E { A, B }
+ use E::*;
+
+ fn foo() -> i32 {
+ 1 + 1
+ }
+
+ #[cfg(never)]
+ fn no() {}
+ //- /foo/mod.rs
+ pub mod bar;
+
+ //- /foo/bar.rs
+ pub struct Baz;
+ ",
+ r"
+ mod foo;
+
+ use crate::foo::bar::Baz;
+
+ enum E { A, B }
+ use E::*;
+
+ fn foo() -> i32 { 92 }
+
+ #[cfg(never)]
+ fn no() {}
+ ",
+ );
+}
+
+#[test]
+fn typing_inside_a_macro_should_not_invalidate_def_map() {
+ let (mut db, pos) = TestDB::with_position(
+ r"
+ //- /lib.rs
+ macro_rules! m {
+ ($ident:ident) => {
+ fn f() {
+ $ident + $ident;
+ };
+ }
+ }
+ mod foo;
+
+ //- /foo/mod.rs
+ pub mod bar;
+
+ //- /foo/bar.rs
+ $0
+ m!(X);
+ ",
+ );
+ let krate = db.test_crate();
+ {
+ let events = db.log_executed(|| {
+ let crate_def_map = db.crate_def_map(krate);
+ let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
+ assert_eq!(module_data.scope.resolutions().count(), 1);
+ });
+ assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
+ }
+ db.set_file_text(pos.file_id, Arc::new("m!(Y);".to_string()));
+
+ {
+ let events = db.log_executed(|| {
+ let crate_def_map = db.crate_def_map(krate);
+ let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
+ assert_eq!(module_data.scope.resolutions().count(), 1);
+ });
+ assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
+ }
+}
+
+#[test]
+fn typing_inside_a_function_should_not_invalidate_expansions() {
+ let (mut db, pos) = TestDB::with_position(
+ r#"
+//- /lib.rs
+macro_rules! m {
+ ($ident:ident) => {
+ fn $ident() { };
+ }
+}
+mod foo;
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+m!(X);
+fn quux() { 1$0 }
+m!(Y);
+m!(Z);
+"#,
+ );
+ let krate = db.test_crate();
+ {
+ let events = db.log_executed(|| {
+ let crate_def_map = db.crate_def_map(krate);
+ let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
+ assert_eq!(module_data.scope.resolutions().count(), 4);
+ });
+ let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count();
+ assert_eq!(n_recalculated_item_trees, 6);
+ let n_reparsed_macros =
+ events.iter().filter(|it| it.contains("parse_macro_expansion")).count();
+ assert_eq!(n_reparsed_macros, 3);
+ }
+
+ let new_text = r#"
+m!(X);
+fn quux() { 92 }
+m!(Y);
+m!(Z);
+"#;
+ db.set_file_text(pos.file_id, Arc::new(new_text.to_string()));
+
+ {
+ let events = db.log_executed(|| {
+ let crate_def_map = db.crate_def_map(krate);
+ let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
+ assert_eq!(module_data.scope.resolutions().count(), 4);
+ });
+ let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count();
+ assert_eq!(n_recalculated_item_trees, 1);
+ let n_reparsed_macros =
+ events.iter().filter(|it| it.contains("parse_macro_expansion")).count();
+ assert_eq!(n_reparsed_macros, 0);
+ }
+}
+
+#[test]
+fn item_tree_prevents_reparsing() {
+ // The `ItemTree` is used by both name resolution and the various queries in `adt.rs` and
+ // `data.rs`. After computing the `ItemTree` and deleting the parse tree, we should be able to
+ // run those other queries without triggering a reparse.
+
+ let (db, pos) = TestDB::with_position(
+ r#"
+pub struct S;
+pub union U {}
+pub enum E {
+ Variant,
+}
+pub fn f(_: S) { $0 }
+pub trait Tr {}
+impl Tr for () {}
+pub const C: u8 = 0;
+pub static ST: u8 = 0;
+pub type Ty = ();
+"#,
+ );
+ let krate = db.test_crate();
+ {
+ let events = db.log_executed(|| {
+ db.file_item_tree(pos.file_id.into());
+ });
+ let n_calculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count();
+ assert_eq!(n_calculated_item_trees, 1);
+ let n_parsed_files = events.iter().filter(|it| it.contains("parse(")).count();
+ assert_eq!(n_parsed_files, 1);
+ }
+
+ // Delete the parse tree.
+ base_db::ParseQuery.in_db(&db).purge();
+
+ {
+ let events = db.log_executed(|| {
+ let crate_def_map = db.crate_def_map(krate);
+ let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
+ assert_eq!(module_data.scope.resolutions().count(), 8);
+ assert_eq!(module_data.scope.impls().count(), 1);
+
+ for imp in module_data.scope.impls() {
+ db.impl_data(imp);
+ }
+
+ for (_, res) in module_data.scope.resolutions() {
+ match res.values.or(res.types).unwrap().0 {
+ ModuleDefId::FunctionId(f) => drop(db.function_data(f)),
+ ModuleDefId::AdtId(adt) => match adt {
+ AdtId::StructId(it) => drop(db.struct_data(it)),
+ AdtId::UnionId(it) => drop(db.union_data(it)),
+ AdtId::EnumId(it) => drop(db.enum_data(it)),
+ },
+ ModuleDefId::ConstId(it) => drop(db.const_data(it)),
+ ModuleDefId::StaticId(it) => drop(db.static_data(it)),
+ ModuleDefId::TraitId(it) => drop(db.trait_data(it)),
+ ModuleDefId::TypeAliasId(it) => drop(db.type_alias_data(it)),
+ ModuleDefId::EnumVariantId(_)
+ | ModuleDefId::ModuleId(_)
+ | ModuleDefId::MacroId(_)
+ | ModuleDefId::BuiltinType(_) => unreachable!(),
+ }
+ }
+ });
+ let n_reparsed_files = events.iter().filter(|it| it.contains("parse(")).count();
+ assert_eq!(n_reparsed_files, 0);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs
new file mode 100644
index 000000000..3ece1379a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs
@@ -0,0 +1,1187 @@
+use super::*;
+use itertools::Itertools;
+
+#[test]
+fn macro_rules_are_globally_visible() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! structs {
+ ($($i:ident),*) => {
+ $(struct $i { field: u32 } )*
+ }
+}
+structs!(Foo);
+mod nested;
+
+//- /nested.rs
+structs!(Bar, Baz);
+"#,
+ expect![[r#"
+ crate
+ Foo: t
+ nested: t
+
+ crate::nested
+ Bar: t
+ Baz: t
+ "#]],
+ );
+}
+
+#[test]
+fn macro_rules_can_define_modules() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! m {
+ ($name:ident) => { mod $name; }
+}
+m!(n1);
+mod m { m!(n3) }
+
+//- /n1.rs
+m!(n2)
+//- /n1/n2.rs
+struct X;
+//- /m/n3.rs
+struct Y;
+"#,
+ expect![[r#"
+ crate
+ m: t
+ n1: t
+
+ crate::m
+ n3: t
+
+ crate::m::n3
+ Y: t v
+
+ crate::n1
+ n2: t
+
+ crate::n1::n2
+ X: t v
+ "#]],
+ );
+}
+
+#[test]
+fn macro_rules_from_other_crates_are_visible() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+foo::structs!(Foo, Bar)
+mod bar;
+
+//- /bar.rs
+use crate::*;
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! structs {
+ ($($i:ident),*) => {
+ $(struct $i { field: u32 } )*
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t
+ Foo: t
+ bar: t
+
+ crate::bar
+ Bar: t
+ Foo: t
+ bar: t
+ "#]],
+ );
+}
+
+#[test]
+fn macro_rules_export_with_local_inner_macros_are_visible() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+foo::structs!(Foo, Bar)
+mod bar;
+
+//- /bar.rs
+use crate::*;
+
+//- /lib.rs crate:foo
+#[macro_export(local_inner_macros)]
+macro_rules! structs {
+ ($($i:ident),*) => {
+ $(struct $i { field: u32 } )*
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t
+ Foo: t
+ bar: t
+
+ crate::bar
+ Bar: t
+ Foo: t
+ bar: t
+ "#]],
+ );
+}
+
+#[test]
+fn local_inner_macros_makes_local_macros_usable() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+foo::structs!(Foo, Bar);
+mod bar;
+
+//- /bar.rs
+use crate::*;
+
+//- /lib.rs crate:foo
+#[macro_export(local_inner_macros)]
+macro_rules! structs {
+ ($($i:ident),*) => {
+ inner!($($i),*);
+ }
+}
+#[macro_export]
+macro_rules! inner {
+ ($($i:ident),*) => {
+ $(struct $i { field: u32 } )*
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t
+ Foo: t
+ bar: t
+
+ crate::bar
+ Bar: t
+ Foo: t
+ bar: t
+ "#]],
+ );
+}
+
+#[test]
+fn unexpanded_macro_should_expand_by_fixedpoint_loop() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+macro_rules! baz {
+ () => {
+ use foo::bar;
+ }
+}
+foo!();
+bar!();
+baz!();
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! foo {
+ () => {
+ struct Foo { field: u32 }
+ }
+}
+#[macro_export]
+macro_rules! bar {
+ () => {
+ use foo::foo;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Foo: t
+ bar: m
+ foo: m
+ "#]],
+ );
+}
+
+#[test]
+fn macro_rules_from_other_crates_are_visible_with_macro_use() {
+ cov_mark::check!(macro_rules_from_other_crates_are_visible_with_macro_use);
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+structs!(Foo);
+structs_priv!(Bar);
+structs_not_exported!(MacroNotResolved1);
+crate::structs!(MacroNotResolved2);
+
+mod bar;
+
+#[macro_use]
+extern crate foo;
+
+//- /bar.rs
+structs!(Baz);
+crate::structs!(MacroNotResolved3);
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! structs {
+ ($i:ident) => { struct $i; }
+}
+
+macro_rules! structs_not_exported {
+ ($i:ident) => { struct $i; }
+}
+
+mod priv_mod {
+ #[macro_export]
+ macro_rules! structs_priv {
+ ($i:ident) => { struct $i; }
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Foo: t v
+ bar: t
+ foo: t
+
+ crate::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn prelude_is_macro_use() {
+ cov_mark::check!(prelude_is_macro_use);
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+structs!(Foo);
+structs_priv!(Bar);
+structs_outside!(Out);
+crate::structs!(MacroNotResolved2);
+
+mod bar;
+
+//- /bar.rs
+structs!(Baz);
+crate::structs!(MacroNotResolved3);
+
+//- /lib.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ #[macro_export]
+ macro_rules! structs {
+ ($i:ident) => { struct $i; }
+ }
+
+ mod priv_mod {
+ #[macro_export]
+ macro_rules! structs_priv {
+ ($i:ident) => { struct $i; }
+ }
+ }
+ }
+}
+
+#[macro_export]
+macro_rules! structs_outside {
+ ($i:ident) => { struct $i; }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Foo: t v
+ Out: t v
+ bar: t
+
+ crate::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn prelude_cycle() {
+ check(
+ r#"
+#[prelude_import]
+use self::prelude::*;
+
+declare_mod!();
+
+mod prelude {
+ macro_rules! declare_mod {
+ () => (mod foo {})
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ prelude: t
+
+ crate::prelude
+ "#]],
+ );
+}
+
+#[test]
+fn legacy_macro_use_before_def() {
+ check(
+ r#"
+m!();
+
+macro_rules! m {
+ () => {
+ struct S;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ S: t v
+ "#]],
+ );
+ // FIXME: should not expand. legacy macro scoping is not implemented.
+}
+
+#[test]
+fn plain_macros_are_legacy_textual_scoped() {
+ check(
+ r#"
+//- /main.rs
+mod m1;
+bar!(NotFoundNotMacroUse);
+
+mod m2 { foo!(NotFoundBeforeInside2); }
+
+macro_rules! foo {
+ ($x:ident) => { struct $x; }
+}
+foo!(Ok);
+
+mod m3;
+foo!(OkShadowStop);
+bar!(NotFoundMacroUseStop);
+
+#[macro_use]
+mod m5 {
+ #[macro_use]
+ mod m6 {
+ macro_rules! foo {
+ ($x:ident) => { fn $x() {} }
+ }
+ }
+}
+foo!(ok_double_macro_use_shadow);
+
+baz!(NotFoundBefore);
+#[macro_use]
+mod m7 {
+ macro_rules! baz {
+ ($x:ident) => { struct $x; }
+ }
+}
+baz!(OkAfter);
+
+//- /m1.rs
+foo!(NotFoundBeforeInside1);
+macro_rules! bar {
+ ($x:ident) => { struct $x; }
+}
+
+//- /m3/mod.rs
+foo!(OkAfterInside);
+macro_rules! foo {
+ ($x:ident) => { fn $x() {} }
+}
+foo!(ok_shadow);
+
+#[macro_use]
+mod m4;
+bar!(OkMacroUse);
+
+mod m5;
+baz!(OkMacroUseInner);
+
+//- /m3/m4.rs
+foo!(ok_shadow_deep);
+macro_rules! bar {
+ ($x:ident) => { struct $x; }
+}
+//- /m3/m5.rs
+#![macro_use]
+macro_rules! baz {
+ ($x:ident) => { struct $x; }
+}
+
+
+"#,
+ expect![[r#"
+ crate
+ NotFoundBefore: t v
+ Ok: t v
+ OkAfter: t v
+ OkShadowStop: t v
+ m1: t
+ m2: t
+ m3: t
+ m5: t
+ m7: t
+ ok_double_macro_use_shadow: v
+
+ crate::m1
+
+ crate::m2
+
+ crate::m3
+ OkAfterInside: t v
+ OkMacroUse: t v
+ OkMacroUseInner: t v
+ m4: t
+ m5: t
+ ok_shadow: v
+
+ crate::m3::m4
+ ok_shadow_deep: v
+
+ crate::m3::m5
+
+ crate::m5
+ m6: t
+
+ crate::m5::m6
+
+ crate::m7
+ "#]],
+ );
+ // FIXME: should not see `NotFoundBefore`
+}
+
+#[test]
+fn type_value_macro_live_in_different_scopes() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! foo {
+ ($x:ident) => { type $x = (); }
+}
+
+foo!(foo);
+use foo as bar;
+
+use self::foo as baz;
+fn baz() {}
+"#,
+ expect![[r#"
+ crate
+ bar: t m
+ baz: t v m
+ foo: t m
+ "#]],
+ );
+}
+
+#[test]
+fn macro_use_can_be_aliased() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+#[macro_use]
+extern crate foo;
+
+foo!(Direct);
+bar!(Alias);
+
+//- /lib.rs crate:foo
+use crate::foo as bar;
+
+mod m {
+ #[macro_export]
+ macro_rules! foo {
+ ($x:ident) => { struct $x; }
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Alias: t v
+ Direct: t v
+ foo: t
+ "#]],
+ );
+}
+
+#[test]
+fn path_qualified_macros() {
+ check(
+ r#"
+macro_rules! foo {
+ ($x:ident) => { struct $x; }
+}
+
+crate::foo!(NotResolved);
+
+crate::bar!(OkCrate);
+bar!(OkPlain);
+alias1!(NotHere);
+m::alias1!(OkAliasPlain);
+m::alias2!(OkAliasSuper);
+m::alias3!(OkAliasCrate);
+not_found!(NotFound);
+
+mod m {
+ #[macro_export]
+ macro_rules! bar {
+ ($x:ident) => { struct $x; }
+ }
+ pub use bar as alias1;
+ pub use super::bar as alias2;
+ pub use crate::bar as alias3;
+ pub use self::bar as not_found;
+}
+"#,
+ expect![[r#"
+ crate
+ OkAliasCrate: t v
+ OkAliasPlain: t v
+ OkAliasSuper: t v
+ OkCrate: t v
+ OkPlain: t v
+ bar: m
+ m: t
+
+ crate::m
+ alias1: m
+ alias2: m
+ alias3: m
+ not_found: _
+ "#]],
+ );
+}
+
+#[test]
+fn macro_dollar_crate_is_correct_in_item() {
+ cov_mark::check!(macro_dollar_crate_self);
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+#[macro_use]
+extern crate foo;
+
+#[macro_use]
+mod m {
+ macro_rules! current {
+ () => {
+ use $crate::Foo as FooSelf;
+ }
+ }
+}
+
+struct Foo;
+
+current!();
+not_current1!();
+foo::not_current2!();
+
+//- /lib.rs crate:foo
+mod m {
+ #[macro_export]
+ macro_rules! not_current1 {
+ () => {
+ use $crate::Bar;
+ }
+ }
+}
+
+#[macro_export]
+macro_rules! not_current2 {
+ () => {
+ use $crate::Baz;
+ }
+}
+
+pub struct Bar;
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: t v
+ Foo: t v
+ FooSelf: t v
+ foo: t
+ m: t
+
+ crate::m
+ "#]],
+ );
+}
+
+#[test]
+fn macro_dollar_crate_is_correct_in_indirect_deps() {
+ cov_mark::check!(macro_dollar_crate_other);
+ // From std
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+foo!();
+
+//- /std.rs crate:std deps:core
+pub use core::foo;
+
+pub mod prelude {
+ pub mod rust_2018 {}
+}
+
+#[macro_use]
+mod std_macros;
+
+//- /core.rs crate:core
+#[macro_export]
+macro_rules! foo {
+ () => {
+ use $crate::bar;
+ }
+}
+
+pub struct bar;
+"#,
+ expect![[r#"
+ crate
+ bar: t v
+ "#]],
+ );
+}
+
+#[test]
+fn expand_derive() {
+ let map = compute_crate_def_map(
+ r#"
+//- /main.rs crate:main deps:core
+use core::Copy;
+
+#[core::derive(Copy, core::Clone)]
+struct Foo;
+
+//- /core.rs crate:core
+#[rustc_builtin_macro]
+pub macro derive($item:item) {}
+#[rustc_builtin_macro]
+pub macro Copy {}
+#[rustc_builtin_macro]
+pub macro Clone {}
+"#,
+ );
+ assert_eq!(map.modules[map.root].scope.impls().len(), 2);
+}
+
+#[test]
+fn resolve_builtin_derive() {
+ check(
+ r#"
+//- /main.rs crate:main deps:core
+use core::*;
+
+//- /core.rs crate:core
+#[rustc_builtin_macro]
+pub macro Clone {}
+
+pub trait Clone {}
+"#,
+ expect![[r#"
+ crate
+ Clone: t m
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_derive_with_unresolved_attributes_fall_back() {
+ // Tests that we still resolve derives after ignoring an unresolved attribute.
+ cov_mark::check!(unresolved_attribute_fallback);
+ let map = compute_crate_def_map(
+ r#"
+//- /main.rs crate:main deps:core
+use core::{Clone, derive};
+
+#[derive(Clone)]
+#[unresolved]
+struct Foo;
+
+//- /core.rs crate:core
+#[rustc_builtin_macro]
+pub macro derive($item:item) {}
+#[rustc_builtin_macro]
+pub macro Clone {}
+"#,
+ );
+ assert_eq!(map.modules[map.root].scope.impls().len(), 1);
+}
+
+#[test]
+fn unresolved_attributes_fall_back_track_per_file_moditems() {
+ // Tests that we track per-file ModItems when ignoring an unresolved attribute.
+ // Just tracking the `ModItem` leads to `Foo` getting ignored.
+
+ check(
+ r#"
+ //- /main.rs crate:main
+
+ mod submod;
+
+ #[unresolved]
+ struct Foo;
+
+ //- /submod.rs
+ #[unresolved]
+ struct Bar;
+ "#,
+ expect![[r#"
+ crate
+ Foo: t v
+ submod: t
+
+ crate::submod
+ Bar: t v
+ "#]],
+ );
+}
+
+#[test]
+fn unresolved_attrs_extern_block_hang() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/8905
+ check(
+ r#"
+#[unresolved]
+extern "C" {
+ #[unresolved]
+ fn f();
+}
+ "#,
+ expect![[r#"
+ crate
+ f: v
+ "#]],
+ );
+}
+
+#[test]
+fn macros_in_extern_block() {
+ check(
+ r#"
+macro_rules! m {
+ () => { static S: u8; };
+}
+
+extern {
+ m!();
+}
+ "#,
+ expect![[r#"
+ crate
+ S: v
+ "#]],
+ );
+}
+
+#[test]
+fn resolves_derive_helper() {
+ cov_mark::check!(resolved_derive_helper);
+ check(
+ r#"
+//- /main.rs crate:main deps:proc
+#[rustc_builtin_macro]
+pub macro derive($item:item) {}
+
+#[derive(proc::Derive)]
+#[helper]
+#[unresolved]
+struct S;
+
+//- /proc.rs crate:proc
+#![crate_type="proc-macro"]
+#[proc_macro_derive(Derive, attributes(helper))]
+fn derive() {}
+ "#,
+ expect![[r#"
+ crate
+ S: t v
+ derive: m
+ "#]],
+ );
+}
+
+#[test]
+fn unresolved_attr_with_cfg_attr_hang() {
+ // Another regression test for https://github.com/rust-lang/rust-analyzer/issues/8905
+ check(
+ r#"
+#[cfg_attr(not(off), unresolved, unresolved)]
+struct S;
+ "#,
+ expect![[r#"
+ crate
+ S: t v
+ "#]],
+ );
+}
+
+#[test]
+fn macro_expansion_overflow() {
+ cov_mark::check!(macro_expansion_overflow);
+ check(
+ r#"
+macro_rules! a {
+ ($e:expr; $($t:tt)*) => {
+ b!(static = (); $($t)*);
+ };
+ () => {};
+}
+
+macro_rules! b {
+ (static = $e:expr; $($t:tt)*) => {
+ a!($e; $($t)*);
+ };
+ () => {};
+}
+
+b! { static = #[] ();}
+"#,
+ expect![[r#"
+ crate
+ "#]],
+ );
+}
+
+#[test]
+fn macros_defining_macros() {
+ check(
+ r#"
+macro_rules! item {
+ ($item:item) => { $item }
+}
+
+item! {
+ macro_rules! indirect_macro { () => { struct S {} } }
+}
+
+indirect_macro!();
+ "#,
+ expect![[r#"
+ crate
+ S: t
+ "#]],
+ );
+}
+
+#[test]
+fn resolves_proc_macros() {
+ check(
+ r#"
+#![crate_type="proc-macro"]
+struct TokenStream;
+
+#[proc_macro]
+pub fn function_like_macro(args: TokenStream) -> TokenStream {
+ args
+}
+
+#[proc_macro_attribute]
+pub fn attribute_macro(_args: TokenStream, item: TokenStream) -> TokenStream {
+ item
+}
+
+#[proc_macro_derive(DummyTrait)]
+pub fn derive_macro(_item: TokenStream) -> TokenStream {
+ TokenStream
+}
+
+#[proc_macro_derive(AnotherTrait, attributes(helper_attr))]
+pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
+ TokenStream
+}
+"#,
+ expect![[r#"
+ crate
+ AnotherTrait: m
+ DummyTrait: m
+ TokenStream: t v
+ attribute_macro: v m
+ derive_macro: v
+ derive_macro_2: v
+ function_like_macro: v m
+ "#]],
+ );
+}
+
+#[test]
+fn proc_macro_censoring() {
+ // Make sure that only proc macros are publicly exported from proc-macro crates.
+
+ check(
+ r#"
+//- /main.rs crate:main deps:macros
+pub use macros::*;
+
+//- /macros.rs crate:macros
+#![crate_type="proc-macro"]
+pub struct TokenStream;
+
+#[proc_macro]
+pub fn function_like_macro(args: TokenStream) -> TokenStream {
+ args
+}
+
+#[proc_macro_attribute]
+pub fn attribute_macro(_args: TokenStream, item: TokenStream) -> TokenStream {
+ item
+}
+
+#[proc_macro_derive(DummyTrait)]
+pub fn derive_macro(_item: TokenStream) -> TokenStream {
+ TokenStream
+}
+
+#[macro_export]
+macro_rules! mbe {
+ () => {};
+}
+"#,
+ expect![[r#"
+ crate
+ DummyTrait: m
+ attribute_macro: m
+ function_like_macro: m
+ "#]],
+ );
+}
+
+#[test]
+fn collects_derive_helpers() {
+ let def_map = compute_crate_def_map(
+ r#"
+#![crate_type="proc-macro"]
+struct TokenStream;
+
+#[proc_macro_derive(AnotherTrait, attributes(helper_attr))]
+pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
+ TokenStream
+}
+"#,
+ );
+
+ assert_eq!(def_map.exported_derives.len(), 1);
+ match def_map.exported_derives.values().next() {
+ Some(helpers) => match &**helpers {
+ [attr] => assert_eq!(attr.to_string(), "helper_attr"),
+ _ => unreachable!(),
+ },
+ _ => unreachable!(),
+ }
+}
+
+#[test]
+fn resolve_macro_def() {
+ check(
+ r#"
+pub macro structs($($i:ident),*) {
+ $(struct $i { field: u32 } )*
+}
+structs!(Foo);
+"#,
+ expect![[r#"
+ crate
+ Foo: t
+ structs: m
+ "#]],
+ );
+}
+
+#[test]
+fn macro_in_prelude() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:std
+global_asm!();
+
+//- /std.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ pub macro global_asm() {
+ pub struct S;
+ }
+ }
+}
+ "#,
+ expect![[r#"
+ crate
+ S: t v
+ "#]],
+ )
+}
+
+#[test]
+fn issue9358_bad_macro_stack_overflow() {
+ cov_mark::check!(issue9358_bad_macro_stack_overflow);
+ check(
+ r#"
+macro_rules! m {
+ ($cond:expr) => { m!($cond, stringify!($cond)) };
+ ($cond:expr, $($arg:tt)*) => { $cond };
+}
+m!(
+"#,
+ expect![[r#"
+ crate
+ "#]],
+ )
+}
+
+#[test]
+fn eager_macro_correctly_resolves_contents() {
+ // Eager macros resolve any contained macros when expanded. This should work correctly with the
+ // usual name resolution rules, so both of these `include!`s should include the right file.
+
+ check(
+ r#"
+//- /lib.rs
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+
+include!(inner_a!());
+include!(crate::inner_b!());
+
+#[macro_export]
+macro_rules! inner_a {
+ () => { "inc_a.rs" };
+}
+#[macro_export]
+macro_rules! inner_b {
+ () => { "inc_b.rs" };
+}
+//- /inc_a.rs
+struct A;
+//- /inc_b.rs
+struct B;
+"#,
+ expect![[r#"
+ crate
+ A: t v
+ B: t v
+ inner_a: m
+ inner_b: m
+ "#]],
+ );
+}
+
+#[test]
+fn eager_macro_correctly_resolves_dollar_crate() {
+ // MBE -> eager -> $crate::mbe
+ check(
+ r#"
+//- /lib.rs
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+
+#[macro_export]
+macro_rules! inner {
+ () => { "inc.rs" };
+}
+
+macro_rules! m {
+ () => { include!($crate::inner!()); };
+}
+
+m!();
+
+//- /inc.rs
+struct A;
+"#,
+ expect![[r#"
+ crate
+ A: t v
+ inner: m
+ "#]],
+ );
+ // eager -> MBE -> $crate::mbe
+ check(
+ r#"
+//- /lib.rs
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+
+#[macro_export]
+macro_rules! inner {
+ () => { "inc.rs" };
+}
+
+macro_rules! n {
+ () => {
+ $crate::inner!()
+ };
+}
+
+include!(n!());
+
+//- /inc.rs
+struct A;
+"#,
+ expect![[r#"
+ crate
+ A: t v
+ inner: m
+ "#]],
+ );
+}
+
+#[test]
+fn macro_use_imports_all_macro_types() {
+ let def_map = compute_crate_def_map(
+ r#"
+//- /main.rs crate:main deps:lib
+#[macro_use]
+extern crate lib;
+
+//- /lib.rs crate:lib deps:proc
+pub use proc::*;
+
+#[macro_export]
+macro_rules! legacy { () => () }
+
+pub macro macro20 {}
+
+//- /proc.rs crate:proc
+#![crate_type="proc-macro"]
+
+struct TokenStream;
+
+#[proc_macro_attribute]
+fn proc_attr(a: TokenStream, b: TokenStream) -> TokenStream { a }
+ "#,
+ );
+
+ let root = &def_map[def_map.root()].scope;
+ let actual = root
+ .legacy_macros()
+ .sorted_by(|a, b| std::cmp::Ord::cmp(&a.0, &b.0))
+ .map(|(name, _)| format!("{name}\n"))
+ .collect::<String>();
+
+ expect![[r#"
+ legacy
+ macro20
+ proc_attr
+ "#]]
+ .assert_eq(&actual);
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/mod_resolution.rs
new file mode 100644
index 000000000..79a74873b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/mod_resolution.rs
@@ -0,0 +1,843 @@
+use super::*;
+
+#[test]
+fn name_res_works_for_broken_modules() {
+ cov_mark::check!(name_res_works_for_broken_modules);
+ check(
+ r"
+//- /lib.rs
+mod foo // no `;`, no body
+use self::foo::Baz;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::Baz;
+
+//- /foo/bar.rs
+pub struct Baz;
+",
+ expect![[r#"
+ crate
+ Baz: _
+ foo: t
+
+ crate::foo
+ "#]],
+ );
+}
+
+#[test]
+fn nested_module_resolution() {
+ check(
+ r#"
+//- /lib.rs
+mod n1;
+
+//- /n1.rs
+mod n2;
+
+//- /n1/n2.rs
+struct X;
+"#,
+ expect![[r#"
+ crate
+ n1: t
+
+ crate::n1
+ n2: t
+
+ crate::n1::n2
+ X: t v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_module_resolution_2() {
+ check(
+ r#"
+//- /lib.rs
+mod prelude;
+mod iter;
+
+//- /prelude.rs
+pub use crate::iter::Iterator;
+
+//- /iter.rs
+pub use self::traits::Iterator;
+mod traits;
+
+//- /iter/traits.rs
+pub use self::iterator::Iterator;
+mod iterator;
+
+//- /iter/traits/iterator.rs
+pub trait Iterator;
+"#,
+ expect![[r#"
+ crate
+ iter: t
+ prelude: t
+
+ crate::iter
+ Iterator: t
+ traits: t
+
+ crate::iter::traits
+ Iterator: t
+ iterator: t
+
+ crate::iter::traits::iterator
+ Iterator: t
+
+ crate::prelude
+ Iterator: t
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_works_for_non_standard_filenames() {
+ check(
+ r#"
+//- /my_library.rs crate:my_library
+mod foo;
+use self::foo::Bar;
+
+//- /foo/mod.rs
+pub struct Bar;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ foo: t
+
+ crate::foo
+ Bar: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_works_for_raw_modules() {
+ check(
+ r#"
+//- /lib.rs
+mod r#async;
+use self::r#async::Bar;
+
+//- /async.rs
+pub struct Bar;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ async: t
+
+ crate::async
+ Bar: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_path() {
+ check(
+ r#"
+//- /lib.rs
+#[path = "bar/baz/foo.rs"]
+mod foo;
+use self::foo::Bar;
+
+//- /bar/baz/foo.rs
+pub struct Bar;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ foo: t
+
+ crate::foo
+ Bar: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_module_with_path_in_mod_rs() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo/mod.rs
+#[path = "baz.rs"]
+pub mod bar;
+use self::bar::Baz;
+
+//- /foo/baz.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_module_with_path_non_crate_root() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo.rs
+#[path = "baz.rs"]
+pub mod bar;
+use self::bar::Baz;
+
+//- /baz.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_module_decl_path_super() {
+ check(
+ r#"
+//- /main.rs
+#[path = "bar/baz/module.rs"]
+mod foo;
+pub struct Baz;
+
+//- /bar/baz/module.rs
+use super::Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_explicit_path_mod_rs() {
+ check(
+ r#"
+//- /main.rs
+#[path = "module/mod.rs"]
+mod foo;
+
+//- /module/mod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_relative_path() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo.rs
+#[path = "./sub.rs"]
+pub mod foo_bar;
+
+//- /sub.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ foo_bar: t
+
+ crate::foo::foo_bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_relative_path_2() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo/mod.rs
+#[path="../sub.rs"]
+pub mod foo_bar;
+
+//- /sub.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ foo_bar: t
+
+ crate::foo::foo_bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_relative_path_outside_root() {
+ check(
+ r#"
+//- /a/b/c/d/e/main.rs crate:main
+#[path="../../../../../outside.rs"]
+mod foo;
+
+//- /outside.rs
+mod bar;
+
+//- /bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+"#]],
+ );
+}
+
+#[test]
+fn module_resolution_explicit_path_mod_rs_2() {
+ check(
+ r#"
+//- /main.rs
+#[path = "module/bar/mod.rs"]
+mod foo;
+
+//- /module/bar/mod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_explicit_path_mod_rs_with_win_separator() {
+ check(
+ r#"
+//- /main.rs
+#[path = r"module\bar\mod.rs"]
+mod foo;
+
+//- /module/bar/mod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_with_path_attribute() {
+ check(
+ r#"
+//- /main.rs
+#[path = "models"]
+mod foo { mod bar; }
+
+//- /models/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module() {
+ check(
+ r#"
+//- /main.rs
+mod foo { mod bar; }
+
+//- /foo/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_2_with_path_attribute() {
+ check(
+ r#"
+//- /main.rs
+#[path = "models/db"]
+mod foo { mod bar; }
+
+//- /models/db/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_3() {
+ check(
+ r#"
+//- /main.rs
+#[path = "models/db"]
+mod foo {
+ #[path = "users.rs"]
+ mod bar;
+}
+
+//- /models/db/users.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_empty_path() {
+ check(
+ r#"
+//- /main.rs
+#[path = ""]
+mod foo {
+ #[path = "users.rs"]
+ mod bar;
+}
+
+//- /users.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_empty_path() {
+ check(
+ r#"
+//- /main.rs
+#[path = ""] // Should try to read `/` (a directory)
+mod foo;
+
+//- /foo.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_relative_path() {
+ check(
+ r#"
+//- /main.rs
+#[path = "./models"]
+mod foo { mod bar; }
+
+//- /models/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_in_crate_root() {
+ check(
+ r#"
+//- /main.rs
+mod foo {
+ #[path = "baz.rs"]
+ mod bar;
+}
+use self::foo::bar::Baz;
+
+//- /foo/baz.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_in_mod_rs() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo/mod.rs
+mod bar {
+ #[path = "qwe.rs"]
+ pub mod baz;
+}
+use self::bar::baz::Baz;
+
+//- /foo/bar/qwe.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ baz: t
+
+ crate::foo::bar::baz
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_in_non_crate_root() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo.rs
+mod bar {
+ #[path = "qwe.rs"]
+ pub mod baz;
+}
+use self::bar::baz::Baz;
+
+//- /foo/bar/qwe.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ baz: t
+
+ crate::foo::bar::baz
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_in_non_crate_root_2() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo.rs
+#[path = "bar"]
+mod bar {
+ pub mod baz;
+}
+use self::bar::baz::Baz;
+
+//- /bar/baz.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ baz: t
+
+ crate::foo::bar::baz
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_module_in_non_crate_root_2() {
+ check(
+ r#"
+//- /main.rs
+#[path="module/m2.rs"]
+mod module;
+
+//- /module/m2.rs
+pub mod submod;
+
+//- /module/submod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ module: t
+
+ crate::module
+ submod: t
+
+ crate::module::submod
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_out_of_line_module() {
+ check(
+ r#"
+//- /lib.rs
+mod a {
+ mod b {
+ mod c;
+ }
+}
+
+//- /a/b/c.rs
+struct X;
+"#,
+ expect![[r#"
+ crate
+ a: t
+
+ crate::a
+ b: t
+
+ crate::a::b
+ c: t
+
+ crate::a::b::c
+ X: t v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_out_of_line_module_with_path() {
+ check(
+ r#"
+//- /lib.rs
+mod a {
+ #[path = "d/e"]
+ mod b {
+ mod c;
+ }
+}
+
+//- /a/d/e/c.rs
+struct X;
+"#,
+ expect![[r#"
+ crate
+ a: t
+
+ crate::a
+ b: t
+
+ crate::a::b
+ c: t
+
+ crate::a::b::c
+ X: t v
+ "#]],
+ );
+}
+
+#[test]
+fn circular_mods() {
+ cov_mark::check!(circular_mods);
+ compute_crate_def_map(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+#[path = "./foo.rs"]
+mod foo;
+"#,
+ );
+
+ compute_crate_def_map(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+#[path = "./bar.rs"]
+mod bar;
+//- /bar.rs
+#[path = "./foo.rs"]
+mod foo;
+"#,
+ );
+}
+
+#[test]
+fn abs_path_ignores_local() {
+ check(
+ r#"
+//- /main.rs crate:main deps:core
+pub use ::core::hash::Hash;
+pub mod core {}
+
+//- /lib.rs crate:core
+pub mod hash { pub trait Hash {} }
+"#,
+ expect![[r#"
+ crate
+ Hash: t
+ core: t
+
+ crate::core
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_in_module_file() {
+ // Inner `#![cfg]` in a module file makes the whole module disappear.
+ check(
+ r#"
+//- /main.rs
+mod module;
+
+//- /module.rs
+#![cfg(NEVER)]
+
+struct AlsoShoulntAppear;
+ "#,
+ expect![[r#"
+ crate
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/primitives.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/primitives.rs
new file mode 100644
index 000000000..215e8952d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/primitives.rs
@@ -0,0 +1,23 @@
+use super::*;
+
+#[test]
+fn primitive_reexport() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::int;
+
+//- /foo.rs
+pub use i32 as int;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+ int: t
+
+ crate::foo
+ int: t
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
new file mode 100644
index 000000000..2f13a9fbf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
@@ -0,0 +1,222 @@
+//! A desugared representation of paths like `crate::foo` or `<Type as Trait>::bar`.
+mod lower;
+
+use std::{
+ fmt::{self, Display},
+ iter,
+};
+
+use crate::{
+ body::LowerCtx,
+ intern::Interned,
+ type_ref::{ConstScalarOrPath, LifetimeRef},
+};
+use hir_expand::name::Name;
+use syntax::ast;
+
+use crate::type_ref::{TypeBound, TypeRef};
+
+pub use hir_expand::mod_path::{path, ModPath, PathKind};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ImportAlias {
+ /// Unnamed alias, as in `use Foo as _;`
+ Underscore,
+ /// Named alias
+ Alias(Name),
+}
+
+impl Display for ImportAlias {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ImportAlias::Underscore => f.write_str("_"),
+ ImportAlias::Alias(name) => f.write_str(&name.to_smol_str()),
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Path {
+ /// Type based path like `<T>::foo`.
+ /// Note that paths like `<Type as Trait>::foo` are desugard to `Trait::<Self=Type>::foo`.
+ type_anchor: Option<Interned<TypeRef>>,
+ mod_path: Interned<ModPath>,
+ /// Invariant: the same len as `self.mod_path.segments`
+ generic_args: Box<[Option<Interned<GenericArgs>>]>,
+}
+
+/// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This
+/// also includes bindings of associated types, like in `Iterator<Item = Foo>`.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericArgs {
+ pub args: Vec<GenericArg>,
+ /// This specifies whether the args contain a Self type as the first
+ /// element. This is the case for path segments like `<T as Trait>`, where
+ /// `T` is actually a type parameter for the path `Trait` specifying the
+ /// Self type. Otherwise, when we have a path `Trait<X, Y>`, the Self type
+ /// is left out.
+ pub has_self_type: bool,
+ /// Associated type bindings like in `Iterator<Item = T>`.
+ pub bindings: Vec<AssociatedTypeBinding>,
+ /// Whether these generic args were desugared from `Trait(Arg) -> Output`
+ /// parenthesis notation typically used for the `Fn` traits.
+ pub desugared_from_fn: bool,
+}
+
+/// An associated type binding like in `Iterator<Item = T>`.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AssociatedTypeBinding {
+ /// The name of the associated type.
+ pub name: Name,
+ /// The type bound to this associated type (in `Item = T`, this would be the
+ /// `T`). This can be `None` if there are bounds instead.
+ pub type_ref: Option<TypeRef>,
+ /// Bounds for the associated type, like in `Iterator<Item:
+ /// SomeOtherTrait>`. (This is the unstable `associated_type_bounds`
+ /// feature.)
+ pub bounds: Vec<Interned<TypeBound>>,
+}
+
+/// A single generic argument.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericArg {
+ Type(TypeRef),
+ Lifetime(LifetimeRef),
+ Const(ConstScalarOrPath),
+}
+
+impl Path {
+ /// Converts an `ast::Path` to `Path`. Works with use trees.
+ /// It correctly handles `$crate` based path from macro call.
+ pub fn from_src(path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
+ lower::lower_path(path, ctx)
+ }
+
+ /// Converts a known mod path to `Path`.
+ pub fn from_known_path(
+ path: ModPath,
+ generic_args: impl Into<Box<[Option<Interned<GenericArgs>>]>>,
+ ) -> Path {
+ let generic_args = generic_args.into();
+ assert_eq!(path.len(), generic_args.len());
+ Path { type_anchor: None, mod_path: Interned::new(path), generic_args }
+ }
+
+ pub fn kind(&self) -> &PathKind {
+ &self.mod_path.kind
+ }
+
+ pub fn type_anchor(&self) -> Option<&TypeRef> {
+ self.type_anchor.as_deref()
+ }
+
+ pub fn segments(&self) -> PathSegments<'_> {
+ PathSegments { segments: self.mod_path.segments(), generic_args: &self.generic_args }
+ }
+
+ pub fn mod_path(&self) -> &ModPath {
+ &self.mod_path
+ }
+
+ pub fn qualifier(&self) -> Option<Path> {
+ if self.mod_path.is_ident() {
+ return None;
+ }
+ let res = Path {
+ type_anchor: self.type_anchor.clone(),
+ mod_path: Interned::new(ModPath::from_segments(
+ self.mod_path.kind,
+ self.mod_path.segments()[..self.mod_path.segments().len() - 1].iter().cloned(),
+ )),
+ generic_args: self.generic_args[..self.generic_args.len() - 1].to_vec().into(),
+ };
+ Some(res)
+ }
+
+ pub fn is_self_type(&self) -> bool {
+ self.type_anchor.is_none() && *self.generic_args == [None] && self.mod_path.is_Self()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathSegment<'a> {
+ pub name: &'a Name,
+ pub args_and_bindings: Option<&'a GenericArgs>,
+}
+
+pub struct PathSegments<'a> {
+ segments: &'a [Name],
+ generic_args: &'a [Option<Interned<GenericArgs>>],
+}
+
+impl<'a> PathSegments<'a> {
+ pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: &[] };
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+ pub fn len(&self) -> usize {
+ self.segments.len()
+ }
+ pub fn first(&self) -> Option<PathSegment<'a>> {
+ self.get(0)
+ }
+ pub fn last(&self) -> Option<PathSegment<'a>> {
+ self.get(self.len().checked_sub(1)?)
+ }
+ pub fn get(&self, idx: usize) -> Option<PathSegment<'a>> {
+ assert_eq!(self.segments.len(), self.generic_args.len());
+ let res = PathSegment {
+ name: self.segments.get(idx)?,
+ args_and_bindings: self.generic_args.get(idx).unwrap().as_ref().map(|it| &**it),
+ };
+ Some(res)
+ }
+ pub fn skip(&self, len: usize) -> PathSegments<'a> {
+ assert_eq!(self.segments.len(), self.generic_args.len());
+ PathSegments { segments: &self.segments[len..], generic_args: &self.generic_args[len..] }
+ }
+ pub fn take(&self, len: usize) -> PathSegments<'a> {
+ assert_eq!(self.segments.len(), self.generic_args.len());
+ PathSegments { segments: &self.segments[..len], generic_args: &self.generic_args[..len] }
+ }
+ pub fn iter(&self) -> impl Iterator<Item = PathSegment<'a>> {
+ self.segments.iter().zip(self.generic_args.iter()).map(|(name, args)| PathSegment {
+ name,
+ args_and_bindings: args.as_ref().map(|it| &**it),
+ })
+ }
+}
+
+impl GenericArgs {
+ pub(crate) fn from_ast(
+ lower_ctx: &LowerCtx<'_>,
+ node: ast::GenericArgList,
+ ) -> Option<GenericArgs> {
+ lower::lower_generic_args(lower_ctx, node)
+ }
+
+ pub(crate) fn empty() -> GenericArgs {
+ GenericArgs {
+ args: Vec::new(),
+ has_self_type: false,
+ bindings: Vec::new(),
+ desugared_from_fn: false,
+ }
+ }
+}
+
+impl From<Name> for Path {
+ fn from(name: Name) -> Path {
+ Path {
+ type_anchor: None,
+ mod_path: Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))),
+ generic_args: Box::new([None]),
+ }
+ }
+}
+
+impl From<Name> for Box<Path> {
+ fn from(name: Name) -> Box<Path> {
+ Box::new(Path::from(name))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
new file mode 100644
index 000000000..0428f1a39
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
@@ -0,0 +1,230 @@
+//! Transforms syntax into `Path` objects, ideally with accounting for hygiene
+
+use crate::{intern::Interned, type_ref::ConstScalarOrPath};
+
+use either::Either;
+use hir_expand::name::{name, AsName};
+use syntax::ast::{self, AstNode, HasTypeBounds};
+
+use super::AssociatedTypeBinding;
+use crate::{
+ body::LowerCtx,
+ path::{GenericArg, GenericArgs, ModPath, Path, PathKind},
+ type_ref::{LifetimeRef, TypeBound, TypeRef},
+};
+
+/// Converts an `ast::Path` to `Path`. Works with use trees.
+/// It correctly handles `$crate` based path from macro call.
+pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
+ let mut kind = PathKind::Plain;
+ let mut type_anchor = None;
+ let mut segments = Vec::new();
+ let mut generic_args = Vec::new();
+ let hygiene = ctx.hygiene();
+ loop {
+ let segment = path.segment()?;
+
+ if segment.coloncolon_token().is_some() {
+ kind = PathKind::Abs;
+ }
+
+ match segment.kind()? {
+ ast::PathSegmentKind::Name(name_ref) => {
+ // FIXME: this should just return name
+ match hygiene.name_ref_to_name(ctx.db.upcast(), name_ref) {
+ Either::Left(name) => {
+ let args = segment
+ .generic_arg_list()
+ .and_then(|it| lower_generic_args(ctx, it))
+ .or_else(|| {
+ lower_generic_args_from_fn_path(
+ ctx,
+ segment.param_list(),
+ segment.ret_type(),
+ )
+ })
+ .map(Interned::new);
+ segments.push(name);
+ generic_args.push(args)
+ }
+ Either::Right(crate_id) => {
+ kind = PathKind::DollarCrate(crate_id);
+ break;
+ }
+ }
+ }
+ ast::PathSegmentKind::SelfTypeKw => {
+ segments.push(name![Self]);
+ generic_args.push(None)
+ }
+ ast::PathSegmentKind::Type { type_ref, trait_ref } => {
+ assert!(path.qualifier().is_none()); // this can only occur at the first segment
+
+ let self_type = TypeRef::from_ast(ctx, type_ref?);
+
+ match trait_ref {
+ // <T>::foo
+ None => {
+ type_anchor = Some(Interned::new(self_type));
+ kind = PathKind::Plain;
+ }
+ // <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
+ Some(trait_ref) => {
+ let Path { mod_path, generic_args: path_generic_args, .. } =
+ Path::from_src(trait_ref.path()?, ctx)?;
+ let num_segments = mod_path.segments().len();
+ kind = mod_path.kind;
+
+ segments.extend(mod_path.segments().iter().cloned().rev());
+ generic_args.extend(Vec::from(path_generic_args).into_iter().rev());
+
+ // Insert the type reference (T in the above example) as Self parameter for the trait
+ let last_segment =
+ generic_args.iter_mut().rev().nth(num_segments.saturating_sub(1))?;
+ let mut args_inner = match last_segment {
+ Some(it) => it.as_ref().clone(),
+ None => GenericArgs::empty(),
+ };
+ args_inner.has_self_type = true;
+ args_inner.args.insert(0, GenericArg::Type(self_type));
+ *last_segment = Some(Interned::new(args_inner));
+ }
+ }
+ }
+ ast::PathSegmentKind::CrateKw => {
+ kind = PathKind::Crate;
+ break;
+ }
+ ast::PathSegmentKind::SelfKw => {
+ // don't break out if `self` is the last segment of a path, this mean we got a
+ // use tree like `foo::{self}` which we want to resolve as `foo`
+ if !segments.is_empty() {
+ kind = PathKind::Super(0);
+ break;
+ }
+ }
+ ast::PathSegmentKind::SuperKw => {
+ let nested_super_count = if let PathKind::Super(n) = kind { n } else { 0 };
+ kind = PathKind::Super(nested_super_count + 1);
+ }
+ }
+ path = match qualifier(&path) {
+ Some(it) => it,
+ None => break,
+ };
+ }
+ segments.reverse();
+ generic_args.reverse();
+
+ if segments.is_empty() && kind == PathKind::Plain && type_anchor.is_none() {
+ // plain empty paths don't exist, this means we got a single `self` segment as our path
+ kind = PathKind::Super(0);
+ }
+
+ // handle local_inner_macros :
+ // Basically, even in rustc it is quite hacky:
+ // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456
+ // We follow what it did anyway :)
+ if segments.len() == 1 && kind == PathKind::Plain {
+ if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
+ if let Some(crate_id) = hygiene.local_inner_macros(ctx.db.upcast(), path) {
+ kind = PathKind::DollarCrate(crate_id);
+ }
+ }
+ }
+
+ let mod_path = Interned::new(ModPath::from_segments(kind, segments));
+ return Some(Path { type_anchor, mod_path, generic_args: generic_args.into() });
+
+ fn qualifier(path: &ast::Path) -> Option<ast::Path> {
+ if let Some(q) = path.qualifier() {
+ return Some(q);
+ }
+ // FIXME: this bottom up traversal is not too precise.
+ // Should we handle do a top-down analysis, recording results?
+ let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
+ let use_tree = use_tree_list.parent_use_tree();
+ use_tree.path()
+ }
+}
+
+pub(super) fn lower_generic_args(
+ lower_ctx: &LowerCtx<'_>,
+ node: ast::GenericArgList,
+) -> Option<GenericArgs> {
+ let mut args = Vec::new();
+ let mut bindings = Vec::new();
+ for generic_arg in node.generic_args() {
+ match generic_arg {
+ ast::GenericArg::TypeArg(type_arg) => {
+ let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.ty());
+ args.push(GenericArg::Type(type_ref));
+ }
+ ast::GenericArg::AssocTypeArg(assoc_type_arg) => {
+ if let Some(name_ref) = assoc_type_arg.name_ref() {
+ let name = name_ref.as_name();
+ let type_ref = assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it));
+ let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
+ l.bounds()
+ .map(|it| Interned::new(TypeBound::from_ast(lower_ctx, it)))
+ .collect()
+ } else {
+ Vec::new()
+ };
+ bindings.push(AssociatedTypeBinding { name, type_ref, bounds });
+ }
+ }
+ ast::GenericArg::LifetimeArg(lifetime_arg) => {
+ if let Some(lifetime) = lifetime_arg.lifetime() {
+ let lifetime_ref = LifetimeRef::new(&lifetime);
+ args.push(GenericArg::Lifetime(lifetime_ref))
+ }
+ }
+ ast::GenericArg::ConstArg(arg) => {
+ let arg = ConstScalarOrPath::from_expr_opt(arg.expr());
+ args.push(GenericArg::Const(arg))
+ }
+ }
+ }
+
+ if args.is_empty() && bindings.is_empty() {
+ return None;
+ }
+ Some(GenericArgs { args, has_self_type: false, bindings, desugared_from_fn: false })
+}
+
+/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y)
+/// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`).
+fn lower_generic_args_from_fn_path(
+ ctx: &LowerCtx<'_>,
+ params: Option<ast::ParamList>,
+ ret_type: Option<ast::RetType>,
+) -> Option<GenericArgs> {
+ let mut args = Vec::new();
+ let mut bindings = Vec::new();
+ let params = params?;
+ let mut param_types = Vec::new();
+ for param in params.params() {
+ let type_ref = TypeRef::from_ast_opt(ctx, param.ty());
+ param_types.push(type_ref);
+ }
+ let arg = GenericArg::Type(TypeRef::Tuple(param_types));
+ args.push(arg);
+ if let Some(ret_type) = ret_type {
+ let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty());
+ bindings.push(AssociatedTypeBinding {
+ name: name![Output],
+ type_ref: Some(type_ref),
+ bounds: Vec::new(),
+ });
+ } else {
+ // -> ()
+ let type_ref = TypeRef::Tuple(Vec::new());
+ bindings.push(AssociatedTypeBinding {
+ name: name![Output],
+ type_ref: Some(type_ref),
+ bounds: Vec::new(),
+ });
+ }
+ Some(GenericArgs { args, has_self_type: false, bindings, desugared_from_fn: true })
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs b/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs
new file mode 100644
index 000000000..bf5bf10c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs
@@ -0,0 +1,95 @@
+//! In rust, it is possible to have a value, a type and a macro with the same
+//! name without conflicts.
+//!
+//! `PerNs` (per namespace) captures this.
+
+use crate::{item_scope::ItemInNs, visibility::Visibility, MacroId, ModuleDefId};
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct PerNs {
+ pub types: Option<(ModuleDefId, Visibility)>,
+ pub values: Option<(ModuleDefId, Visibility)>,
+ pub macros: Option<(MacroId, Visibility)>,
+}
+
+impl Default for PerNs {
+ fn default() -> Self {
+ PerNs { types: None, values: None, macros: None }
+ }
+}
+
+impl PerNs {
+ pub fn none() -> PerNs {
+ PerNs { types: None, values: None, macros: None }
+ }
+
+ pub fn values(t: ModuleDefId, v: Visibility) -> PerNs {
+ PerNs { types: None, values: Some((t, v)), macros: None }
+ }
+
+ pub fn types(t: ModuleDefId, v: Visibility) -> PerNs {
+ PerNs { types: Some((t, v)), values: None, macros: None }
+ }
+
+ pub fn both(types: ModuleDefId, values: ModuleDefId, v: Visibility) -> PerNs {
+ PerNs { types: Some((types, v)), values: Some((values, v)), macros: None }
+ }
+
+ pub fn macros(macro_: MacroId, v: Visibility) -> PerNs {
+ PerNs { types: None, values: None, macros: Some((macro_, v)) }
+ }
+
+ pub fn is_none(&self) -> bool {
+ self.types.is_none() && self.values.is_none() && self.macros.is_none()
+ }
+
+ pub fn take_types(self) -> Option<ModuleDefId> {
+ self.types.map(|it| it.0)
+ }
+
+ pub fn take_types_vis(self) -> Option<(ModuleDefId, Visibility)> {
+ self.types
+ }
+
+ pub fn take_values(self) -> Option<ModuleDefId> {
+ self.values.map(|it| it.0)
+ }
+
+ pub fn take_macros(self) -> Option<MacroId> {
+ self.macros.map(|it| it.0)
+ }
+
+ pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs {
+ let _p = profile::span("PerNs::filter_visibility");
+ PerNs {
+ types: self.types.filter(|(_, v)| f(*v)),
+ values: self.values.filter(|(_, v)| f(*v)),
+ macros: self.macros.filter(|(_, v)| f(*v)),
+ }
+ }
+
+ pub fn with_visibility(self, vis: Visibility) -> PerNs {
+ PerNs {
+ types: self.types.map(|(it, _)| (it, vis)),
+ values: self.values.map(|(it, _)| (it, vis)),
+ macros: self.macros.map(|(it, _)| (it, vis)),
+ }
+ }
+
+ pub fn or(self, other: PerNs) -> PerNs {
+ PerNs {
+ types: self.types.or(other.types),
+ values: self.values.or(other.values),
+ macros: self.macros.or(other.macros),
+ }
+ }
+
+ pub fn iter_items(self) -> impl Iterator<Item = ItemInNs> {
+ let _p = profile::span("PerNs::iter_items");
+ self.types
+ .map(|it| ItemInNs::Types(it.0))
+ .into_iter()
+ .chain(self.values.map(|it| ItemInNs::Values(it.0)).into_iter())
+ .chain(self.macros.map(|it| ItemInNs::Macros(it.0)).into_iter())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
new file mode 100644
index 000000000..3163fa0f9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
@@ -0,0 +1,912 @@
+//! Name resolution façade.
+use std::{hash::BuildHasherDefault, sync::Arc};
+
+use base_db::CrateId;
+use hir_expand::name::{name, Name};
+use indexmap::IndexMap;
+use rustc_hash::FxHashSet;
+use smallvec::{smallvec, SmallVec};
+
+use crate::{
+ body::scope::{ExprScopes, ScopeId},
+ builtin_type::BuiltinType,
+ db::DefDatabase,
+ expr::{ExprId, LabelId, PatId},
+ generics::{GenericParams, TypeOrConstParamData},
+ intern::Interned,
+ item_scope::{BuiltinShadowMode, BUILTIN_SCOPE},
+ nameres::DefMap,
+ path::{ModPath, PathKind},
+ per_ns::PerNs,
+ visibility::{RawVisibility, Visibility},
+ AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
+ FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
+ LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId,
+ StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, VariantId,
+};
+
+#[derive(Debug, Clone)]
+pub struct Resolver {
+ /// The stack of scopes, where the inner-most scope is the last item.
+ ///
+ /// When using, you generally want to process the scopes in reverse order,
+ /// there's `scopes` *method* for that.
+ ///
+ /// Invariant: There exists at least one Scope::ModuleScope at the start of the vec.
+ scopes: Vec<Scope>,
+}
+
+// FIXME how to store these best
+#[derive(Debug, Clone)]
+struct ModuleItemMap {
+ def_map: Arc<DefMap>,
+ module_id: LocalModuleId,
+}
+
+#[derive(Debug, Clone)]
+struct ExprScope {
+ owner: DefWithBodyId,
+ expr_scopes: Arc<ExprScopes>,
+ scope_id: ScopeId,
+}
+
+#[derive(Debug, Clone)]
+enum Scope {
+ /// All the items and imported names of a module
+ ModuleScope(ModuleItemMap),
+ /// Brings the generic parameters of an item into scope
+ GenericParams { def: GenericDefId, params: Interned<GenericParams> },
+ /// Brings `Self` in `impl` block into scope
+ ImplDefScope(ImplId),
+ /// Brings `Self` in enum, struct and union definitions into scope
+ AdtScope(AdtId),
+ /// Local bindings
+ ExprScope(ExprScope),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum TypeNs {
+ SelfType(ImplId),
+ GenericParam(TypeParamId),
+ AdtId(AdtId),
+ AdtSelfType(AdtId),
+ // Yup, enum variants are added to the types ns, but any usage of variant as
+ // type is an error.
+ EnumVariantId(EnumVariantId),
+ TypeAliasId(TypeAliasId),
+ BuiltinType(BuiltinType),
+ TraitId(TraitId),
+ // Module belong to type ns, but the resolver is used when all module paths
+ // are fully resolved.
+ // ModuleId(ModuleId)
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ResolveValueResult {
+ ValueNs(ValueNs),
+ Partial(TypeNs, usize),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ValueNs {
+ ImplSelf(ImplId),
+ LocalBinding(PatId),
+ FunctionId(FunctionId),
+ ConstId(ConstId),
+ StaticId(StaticId),
+ StructId(StructId),
+ EnumVariantId(EnumVariantId),
+ GenericParam(ConstParamId),
+}
+
+impl Resolver {
+ /// Resolve known trait from std, like `std::futures::Future`
+ pub fn resolve_known_trait(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<TraitId> {
+ let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
+ match res {
+ ModuleDefId::TraitId(it) => Some(it),
+ _ => None,
+ }
+ }
+
+ /// Resolve known struct from std, like `std::boxed::Box`
+ pub fn resolve_known_struct(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<StructId> {
+ let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
+ match res {
+ ModuleDefId::AdtId(AdtId::StructId(it)) => Some(it),
+ _ => None,
+ }
+ }
+
+ /// Resolve known enum from std, like `std::result::Result`
+ pub fn resolve_known_enum(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<EnumId> {
+ let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
+ match res {
+ ModuleDefId::AdtId(AdtId::EnumId(it)) => Some(it),
+ _ => None,
+ }
+ }
+
+ fn scopes(&self) -> impl Iterator<Item = &Scope> {
+ self.scopes.iter().rev()
+ }
+
+ fn resolve_module_path(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
+ ) -> PerNs {
+ let (item_map, module) = self.module_scope();
+ let (module_res, segment_index) = item_map.resolve_path(db, module, path, shadow);
+ if segment_index.is_some() {
+ return PerNs::none();
+ }
+ module_res
+ }
+
+ pub fn resolve_module_path_in_items(&self, db: &dyn DefDatabase, path: &ModPath) -> PerNs {
+ self.resolve_module_path(db, path, BuiltinShadowMode::Module)
+ }
+
+ // FIXME: This shouldn't exist
+ pub fn resolve_module_path_in_trait_assoc_items(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<PerNs> {
+ let (item_map, module) = self.module_scope();
+ let (module_res, idx) = item_map.resolve_path(db, module, path, BuiltinShadowMode::Module);
+ match module_res.take_types()? {
+ ModuleDefId::TraitId(it) => {
+ let idx = idx?;
+ let unresolved = &path.segments()[idx..];
+ let assoc = match unresolved {
+ [it] => it,
+ _ => return None,
+ };
+ let &(_, assoc) = db.trait_data(it).items.iter().find(|(n, _)| n == assoc)?;
+ Some(match assoc {
+ AssocItemId::FunctionId(it) => PerNs::values(it.into(), Visibility::Public),
+ AssocItemId::ConstId(it) => PerNs::values(it.into(), Visibility::Public),
+ AssocItemId::TypeAliasId(it) => PerNs::types(it.into(), Visibility::Public),
+ })
+ }
+ _ => None,
+ }
+ }
+
+ pub fn resolve_path_in_type_ns(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<(TypeNs, Option<usize>)> {
+ let first_name = path.segments().first()?;
+ let skip_to_mod = path.kind != PathKind::Plain;
+ for scope in self.scopes() {
+ match scope {
+ Scope::ExprScope(_) => continue,
+ Scope::GenericParams { .. } | Scope::ImplDefScope(_) if skip_to_mod => continue,
+
+ Scope::GenericParams { params, def } => {
+ if let Some(id) = params.find_type_by_name(first_name, *def) {
+ let idx = if path.segments().len() == 1 { None } else { Some(1) };
+ return Some((TypeNs::GenericParam(id), idx));
+ }
+ }
+ Scope::ImplDefScope(impl_) => {
+ if first_name == &name![Self] {
+ let idx = if path.segments().len() == 1 { None } else { Some(1) };
+ return Some((TypeNs::SelfType(*impl_), idx));
+ }
+ }
+ Scope::AdtScope(adt) => {
+ if first_name == &name![Self] {
+ let idx = if path.segments().len() == 1 { None } else { Some(1) };
+ return Some((TypeNs::AdtSelfType(*adt), idx));
+ }
+ }
+ Scope::ModuleScope(m) => {
+ if let Some(res) = m.resolve_path_in_type_ns(db, path) {
+ return Some(res);
+ }
+ }
+ }
+ }
+ None
+ }
+
+ pub fn resolve_path_in_type_ns_fully(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<TypeNs> {
+ let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?;
+ if unresolved.is_some() {
+ return None;
+ }
+ Some(res)
+ }
+
+ pub fn resolve_visibility(
+ &self,
+ db: &dyn DefDatabase,
+ visibility: &RawVisibility,
+ ) -> Option<Visibility> {
+ match visibility {
+ RawVisibility::Module(_) => {
+ let (item_map, module) = self.module_scope();
+ item_map.resolve_visibility(db, module, visibility)
+ }
+ RawVisibility::Public => Some(Visibility::Public),
+ }
+ }
+
+ pub fn resolve_path_in_value_ns(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<ResolveValueResult> {
+ let n_segments = path.segments().len();
+ let tmp = name![self];
+ let first_name = if path.is_self() { &tmp } else { path.segments().first()? };
+ let skip_to_mod = path.kind != PathKind::Plain && !path.is_self();
+ for scope in self.scopes() {
+ match scope {
+ Scope::AdtScope(_)
+ | Scope::ExprScope(_)
+ | Scope::GenericParams { .. }
+ | Scope::ImplDefScope(_)
+ if skip_to_mod =>
+ {
+ continue
+ }
+
+ Scope::ExprScope(scope) if n_segments <= 1 => {
+ let entry = scope
+ .expr_scopes
+ .entries(scope.scope_id)
+ .iter()
+ .find(|entry| entry.name() == first_name);
+
+ if let Some(e) = entry {
+ return Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(e.pat())));
+ }
+ }
+ Scope::ExprScope(_) => continue,
+
+ Scope::GenericParams { params, def } if n_segments > 1 => {
+ if let Some(id) = params.find_type_by_name(first_name, *def) {
+ let ty = TypeNs::GenericParam(id);
+ return Some(ResolveValueResult::Partial(ty, 1));
+ }
+ }
+ Scope::GenericParams { params, def } if n_segments == 1 => {
+ if let Some(id) = params.find_const_by_name(first_name, *def) {
+ let val = ValueNs::GenericParam(id);
+ return Some(ResolveValueResult::ValueNs(val));
+ }
+ }
+ Scope::GenericParams { .. } => continue,
+
+ Scope::ImplDefScope(impl_) => {
+ if first_name == &name![Self] {
+ if n_segments > 1 {
+ let ty = TypeNs::SelfType(*impl_);
+ return Some(ResolveValueResult::Partial(ty, 1));
+ } else {
+ return Some(ResolveValueResult::ValueNs(ValueNs::ImplSelf(*impl_)));
+ }
+ }
+ }
+ Scope::AdtScope(adt) => {
+ if n_segments == 1 {
+ // bare `Self` doesn't work in the value namespace in a struct/enum definition
+ continue;
+ }
+ if first_name == &name![Self] {
+ let ty = TypeNs::AdtSelfType(*adt);
+ return Some(ResolveValueResult::Partial(ty, 1));
+ }
+ }
+
+ Scope::ModuleScope(m) => {
+ if let Some(def) = m.resolve_path_in_value_ns(db, path) {
+ return Some(def);
+ }
+ }
+ }
+ }
+
+ // If a path of the shape `u16::from_le_bytes` failed to resolve at all, then we fall back
+ // to resolving to the primitive type, to allow this to still work in the presence of
+ // `use core::u16;`.
+ if path.kind == PathKind::Plain && path.segments().len() > 1 {
+ match BuiltinType::by_name(&path.segments()[0]) {
+ Some(builtin) => {
+ return Some(ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1));
+ }
+ None => {}
+ }
+ }
+
+ None
+ }
+
+ pub fn resolve_path_in_value_ns_fully(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<ValueNs> {
+ match self.resolve_path_in_value_ns(db, path)? {
+ ResolveValueResult::ValueNs(it) => Some(it),
+ ResolveValueResult::Partial(..) => None,
+ }
+ }
+
+ pub fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<MacroId> {
+ let (item_map, module) = self.module_scope();
+ item_map.resolve_path(db, module, path, BuiltinShadowMode::Other).0.take_macros()
+ }
+
+ /// Returns a set of names available in the current scope.
+ ///
+ /// Note that this is a somewhat fuzzy concept -- internally, the compiler
+ /// doesn't necessary follow a strict scoping discipline. Rather, it just
+ /// tells for each ident what it resolves to.
+ ///
+ /// A good example is something like `str::from_utf8`. From scopes point of
+ /// view, this code is erroneous -- both `str` module and `str` type occupy
+ /// the same type namespace.
+ ///
+ /// We don't try to model that super-correctly -- this functionality is
+ /// primarily exposed for completions.
+ ///
+ /// Note that in Rust one name can be bound to several items:
+ ///
+ /// ```
+ /// macro_rules! t { () => (()) }
+ /// type t = t!();
+ /// const t: t = t!()
+ /// ```
+ ///
+ /// That's why we return a multimap.
+ ///
+ /// The shadowing is accounted for: in
+ ///
+ /// ```
+ /// let x = 92;
+ /// {
+ /// let x = 92;
+ /// $0
+ /// }
+ /// ```
+ ///
+ /// there will be only one entry for `x` in the result.
+ ///
+ /// The result is ordered *roughly* from the innermost scope to the
+ /// outermost: when the name is introduced in two namespaces in two scopes,
+ /// we use the position of the first scope.
+ pub fn names_in_scope(
+ &self,
+ db: &dyn DefDatabase,
+ ) -> FxIndexMap<Name, SmallVec<[ScopeDef; 1]>> {
+ let mut res = ScopeNames::default();
+ for scope in self.scopes() {
+ scope.process_names(&mut res, db);
+ }
+ res.map
+ }
+
+ pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet<TraitId> {
+ let mut traits = FxHashSet::default();
+ for scope in self.scopes() {
+ match scope {
+ Scope::ModuleScope(m) => {
+ if let Some(prelude) = m.def_map.prelude() {
+ let prelude_def_map = prelude.def_map(db);
+ traits.extend(prelude_def_map[prelude.local_id].scope.traits());
+ }
+ traits.extend(m.def_map[m.module_id].scope.traits());
+
+ // Add all traits that are in scope because of the containing DefMaps
+ m.def_map.with_ancestor_maps(db, m.module_id, &mut |def_map, module| {
+ if let Some(prelude) = def_map.prelude() {
+ let prelude_def_map = prelude.def_map(db);
+ traits.extend(prelude_def_map[prelude.local_id].scope.traits());
+ }
+ traits.extend(def_map[module].scope.traits());
+ None::<()>
+ });
+ }
+ &Scope::ImplDefScope(impl_) => {
+ if let Some(target_trait) = &db.impl_data(impl_).target_trait {
+ if let Some(TypeNs::TraitId(trait_)) =
+ self.resolve_path_in_type_ns_fully(db, target_trait.path.mod_path())
+ {
+ traits.insert(trait_);
+ }
+ }
+ }
+ _ => (),
+ }
+ }
+ traits
+ }
+
+ fn module_scope(&self) -> (&DefMap, LocalModuleId) {
+ self.scopes()
+ .find_map(|scope| match scope {
+ Scope::ModuleScope(m) => Some((&*m.def_map, m.module_id)),
+ _ => None,
+ })
+ .expect("module scope invariant violated")
+ }
+
+ pub fn module(&self) -> ModuleId {
+ let (def_map, local_id) = self.module_scope();
+ def_map.module_id(local_id)
+ }
+
+ pub fn krate(&self) -> CrateId {
+ self.def_map().krate()
+ }
+
+ pub fn def_map(&self) -> &DefMap {
+ self.scopes
+ .get(0)
+ .and_then(|scope| match scope {
+ Scope::ModuleScope(m) => Some(&m.def_map),
+ _ => None,
+ })
+ .expect("module scope invariant violated")
+ }
+
+ pub fn where_predicates_in_scope(
+ &self,
+ ) -> impl Iterator<Item = &crate::generics::WherePredicate> {
+ self.scopes()
+ .filter_map(|scope| match scope {
+ Scope::GenericParams { params, .. } => Some(params),
+ _ => None,
+ })
+ .flat_map(|params| params.where_predicates.iter())
+ }
+
+ pub fn generic_def(&self) -> Option<GenericDefId> {
+ self.scopes().find_map(|scope| match scope {
+ Scope::GenericParams { def, .. } => Some(*def),
+ _ => None,
+ })
+ }
+
+ pub fn body_owner(&self) -> Option<DefWithBodyId> {
+ self.scopes().find_map(|scope| match scope {
+ Scope::ExprScope(it) => Some(it.owner),
+ _ => None,
+ })
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ScopeDef {
+ ModuleDef(ModuleDefId),
+ Unknown,
+ ImplSelfType(ImplId),
+ AdtSelfType(AdtId),
+ GenericParam(GenericParamId),
+ Local(PatId),
+ Label(LabelId),
+}
+
+impl Scope {
+ fn process_names(&self, acc: &mut ScopeNames, db: &dyn DefDatabase) {
+ match self {
+ Scope::ModuleScope(m) => {
+ // FIXME: should we provide `self` here?
+ // f(
+ // Name::self_param(),
+ // PerNs::types(Resolution::Def {
+ // def: m.module.into(),
+ // }),
+ // );
+ m.def_map[m.module_id].scope.entries().for_each(|(name, def)| {
+ acc.add_per_ns(name, def);
+ });
+ m.def_map[m.module_id].scope.legacy_macros().for_each(|(name, macs)| {
+ macs.iter().for_each(|&mac| {
+ acc.add(
+ name,
+ ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac))),
+ );
+ })
+ });
+ m.def_map.extern_prelude().for_each(|(name, &def)| {
+ acc.add(name, ScopeDef::ModuleDef(ModuleDefId::ModuleId(def)));
+ });
+ BUILTIN_SCOPE.iter().for_each(|(name, &def)| {
+ acc.add_per_ns(name, def);
+ });
+ if let Some(prelude) = m.def_map.prelude() {
+ let prelude_def_map = prelude.def_map(db);
+ for (name, def) in prelude_def_map[prelude.local_id].scope.entries() {
+ acc.add_per_ns(name, def)
+ }
+ }
+ }
+ Scope::GenericParams { params, def: parent } => {
+ let parent = *parent;
+ for (local_id, param) in params.type_or_consts.iter() {
+ if let Some(name) = &param.name() {
+ let id = TypeOrConstParamId { parent, local_id };
+ let data = &db.generic_params(parent).type_or_consts[local_id];
+ acc.add(
+ name,
+ ScopeDef::GenericParam(match data {
+ TypeOrConstParamData::TypeParamData(_) => {
+ GenericParamId::TypeParamId(TypeParamId::from_unchecked(id))
+ }
+ TypeOrConstParamData::ConstParamData(_) => {
+ GenericParamId::ConstParamId(ConstParamId::from_unchecked(id))
+ }
+ }),
+ );
+ }
+ }
+ for (local_id, param) in params.lifetimes.iter() {
+ let id = LifetimeParamId { parent, local_id };
+ acc.add(&param.name, ScopeDef::GenericParam(id.into()))
+ }
+ }
+ Scope::ImplDefScope(i) => {
+ acc.add(&name![Self], ScopeDef::ImplSelfType(*i));
+ }
+ Scope::AdtScope(i) => {
+ acc.add(&name![Self], ScopeDef::AdtSelfType(*i));
+ }
+ Scope::ExprScope(scope) => {
+ if let Some((label, name)) = scope.expr_scopes.label(scope.scope_id) {
+ acc.add(&name, ScopeDef::Label(label))
+ }
+ scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| {
+ acc.add_local(e.name(), e.pat());
+ });
+ }
+ }
+ }
+}
+
+// needs arbitrary_self_types to be a method... or maybe move to the def?
+pub fn resolver_for_expr(db: &dyn DefDatabase, owner: DefWithBodyId, expr_id: ExprId) -> Resolver {
+ let scopes = db.expr_scopes(owner);
+ resolver_for_scope(db, owner, scopes.scope_for(expr_id))
+}
+
+pub fn resolver_for_scope(
+ db: &dyn DefDatabase,
+ owner: DefWithBodyId,
+ scope_id: Option<ScopeId>,
+) -> Resolver {
+ let mut r = owner.resolver(db);
+ let scopes = db.expr_scopes(owner);
+ let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
+ r.scopes.reserve(scope_chain.len());
+
+ for scope in scope_chain.into_iter().rev() {
+ if let Some(block) = scopes.block(scope) {
+ if let Some(def_map) = db.block_def_map(block) {
+ let root = def_map.root();
+ r = r.push_module_scope(def_map, root);
+ // FIXME: This adds as many module scopes as there are blocks, but resolving in each
+ // already traverses all parents, so this is O(n²). I think we could only store the
+ // innermost module scope instead?
+ }
+ }
+
+ r = r.push_expr_scope(owner, Arc::clone(&scopes), scope);
+ }
+ r
+}
+
+impl Resolver {
+ fn push_scope(mut self, scope: Scope) -> Resolver {
+ self.scopes.push(scope);
+ self
+ }
+
+ fn push_generic_params_scope(self, db: &dyn DefDatabase, def: GenericDefId) -> Resolver {
+ let params = db.generic_params(def);
+ self.push_scope(Scope::GenericParams { def, params })
+ }
+
+ fn push_impl_def_scope(self, impl_def: ImplId) -> Resolver {
+ self.push_scope(Scope::ImplDefScope(impl_def))
+ }
+
+ fn push_module_scope(self, def_map: Arc<DefMap>, module_id: LocalModuleId) -> Resolver {
+ self.push_scope(Scope::ModuleScope(ModuleItemMap { def_map, module_id }))
+ }
+
+ fn push_expr_scope(
+ self,
+ owner: DefWithBodyId,
+ expr_scopes: Arc<ExprScopes>,
+ scope_id: ScopeId,
+ ) -> Resolver {
+ self.push_scope(Scope::ExprScope(ExprScope { owner, expr_scopes, scope_id }))
+ }
+}
+
+impl ModuleItemMap {
+ fn resolve_path_in_value_ns(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<ResolveValueResult> {
+ let (module_def, idx) =
+ self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
+ match idx {
+ None => {
+ let value = to_value_ns(module_def)?;
+ Some(ResolveValueResult::ValueNs(value))
+ }
+ Some(idx) => {
+ let ty = match module_def.take_types()? {
+ ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
+ ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
+ ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it),
+ ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it),
+
+ ModuleDefId::ModuleId(_)
+ | ModuleDefId::FunctionId(_)
+ | ModuleDefId::EnumVariantId(_)
+ | ModuleDefId::ConstId(_)
+ | ModuleDefId::MacroId(_)
+ | ModuleDefId::StaticId(_) => return None,
+ };
+ Some(ResolveValueResult::Partial(ty, idx))
+ }
+ }
+ }
+
+ fn resolve_path_in_type_ns(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<(TypeNs, Option<usize>)> {
+ let (module_def, idx) =
+ self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
+ let res = to_type_ns(module_def)?;
+ Some((res, idx))
+ }
+}
+
+fn to_value_ns(per_ns: PerNs) -> Option<ValueNs> {
+ let res = match per_ns.take_values()? {
+ ModuleDefId::FunctionId(it) => ValueNs::FunctionId(it),
+ ModuleDefId::AdtId(AdtId::StructId(it)) => ValueNs::StructId(it),
+ ModuleDefId::EnumVariantId(it) => ValueNs::EnumVariantId(it),
+ ModuleDefId::ConstId(it) => ValueNs::ConstId(it),
+ ModuleDefId::StaticId(it) => ValueNs::StaticId(it),
+
+ ModuleDefId::AdtId(AdtId::EnumId(_) | AdtId::UnionId(_))
+ | ModuleDefId::TraitId(_)
+ | ModuleDefId::TypeAliasId(_)
+ | ModuleDefId::BuiltinType(_)
+ | ModuleDefId::MacroId(_)
+ | ModuleDefId::ModuleId(_) => return None,
+ };
+ Some(res)
+}
+
+fn to_type_ns(per_ns: PerNs) -> Option<TypeNs> {
+ let res = match per_ns.take_types()? {
+ ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
+ ModuleDefId::EnumVariantId(it) => TypeNs::EnumVariantId(it),
+
+ ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it),
+ ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it),
+
+ ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
+
+ ModuleDefId::FunctionId(_)
+ | ModuleDefId::ConstId(_)
+ | ModuleDefId::MacroId(_)
+ | ModuleDefId::StaticId(_)
+ | ModuleDefId::ModuleId(_) => return None,
+ };
+ Some(res)
+}
+
+type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>;
+#[derive(Default)]
+struct ScopeNames {
+ map: FxIndexMap<Name, SmallVec<[ScopeDef; 1]>>,
+}
+
+impl ScopeNames {
+ fn add(&mut self, name: &Name, def: ScopeDef) {
+ let set = self.map.entry(name.clone()).or_default();
+ if !set.contains(&def) {
+ set.push(def)
+ }
+ }
+ fn add_per_ns(&mut self, name: &Name, def: PerNs) {
+ if let &Some((ty, _)) = &def.types {
+ self.add(name, ScopeDef::ModuleDef(ty))
+ }
+ if let &Some((def, _)) = &def.values {
+ self.add(name, ScopeDef::ModuleDef(def))
+ }
+ if let &Some((mac, _)) = &def.macros {
+ self.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)))
+ }
+ if def.is_none() {
+ self.add(name, ScopeDef::Unknown)
+ }
+ }
+ fn add_local(&mut self, name: &Name, pat: PatId) {
+ let set = self.map.entry(name.clone()).or_default();
+ // XXX: hack, account for local (and only local) shadowing.
+ //
+ // This should be somewhat more principled and take namespaces into
+ // accounts, but, alas, scoping rules are a hoax. `str` type and `str`
+ // module can be both available in the same scope.
+ if set.iter().any(|it| matches!(it, &ScopeDef::Local(_))) {
+ cov_mark::hit!(shadowing_shows_single_completion);
+ return;
+ }
+ set.push(ScopeDef::Local(pat))
+ }
+}
+
+pub trait HasResolver: Copy {
+ /// Builds a resolver for type references inside this def.
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver;
+}
+
+impl HasResolver for ModuleId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ let mut def_map = self.def_map(db);
+ let mut modules: SmallVec<[_; 2]> = smallvec![(def_map.clone(), self.local_id)];
+ while let Some(parent) = def_map.parent() {
+ def_map = parent.def_map(db);
+ modules.push((def_map.clone(), parent.local_id));
+ }
+ let mut resolver = Resolver { scopes: Vec::with_capacity(modules.len()) };
+ for (def_map, module) in modules.into_iter().rev() {
+ resolver = resolver.push_module_scope(def_map, module);
+ }
+ resolver
+ }
+}
+
+impl HasResolver for TraitId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
+ }
+}
+
+impl<T: Into<AdtId> + Copy> HasResolver for T {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ let def = self.into();
+ def.module(db)
+ .resolver(db)
+ .push_generic_params_scope(db, def.into())
+ .push_scope(Scope::AdtScope(def))
+ }
+}
+
+impl HasResolver for FunctionId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
+ }
+}
+
+impl HasResolver for ConstId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
+
+impl HasResolver for StaticId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
+
+impl HasResolver for TypeAliasId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
+ }
+}
+
+impl HasResolver for ImplId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db)
+ .container
+ .resolver(db)
+ .push_generic_params_scope(db, self.into())
+ .push_impl_def_scope(self)
+ }
+}
+
+impl HasResolver for ExternBlockId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ // Same as parent's
+ self.lookup(db).container.resolver(db)
+ }
+}
+
+impl HasResolver for DefWithBodyId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ match self {
+ DefWithBodyId::ConstId(c) => c.resolver(db),
+ DefWithBodyId::FunctionId(f) => f.resolver(db),
+ DefWithBodyId::StaticId(s) => s.resolver(db),
+ }
+ }
+}
+
+impl HasResolver for ItemContainerId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ match self {
+ ItemContainerId::ModuleId(it) => it.resolver(db),
+ ItemContainerId::TraitId(it) => it.resolver(db),
+ ItemContainerId::ImplId(it) => it.resolver(db),
+ ItemContainerId::ExternBlockId(it) => it.resolver(db),
+ }
+ }
+}
+
+impl HasResolver for GenericDefId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ match self {
+ GenericDefId::FunctionId(inner) => inner.resolver(db),
+ GenericDefId::AdtId(adt) => adt.resolver(db),
+ GenericDefId::TraitId(inner) => inner.resolver(db),
+ GenericDefId::TypeAliasId(inner) => inner.resolver(db),
+ GenericDefId::ImplId(inner) => inner.resolver(db),
+ GenericDefId::EnumVariantId(inner) => inner.parent.resolver(db),
+ GenericDefId::ConstId(inner) => inner.resolver(db),
+ }
+ }
+}
+
+impl HasResolver for VariantId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ match self {
+ VariantId::EnumVariantId(it) => it.parent.resolver(db),
+ VariantId::StructId(it) => it.resolver(db),
+ VariantId::UnionId(it) => it.resolver(db),
+ }
+ }
+}
+
+impl HasResolver for MacroId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ match self {
+ MacroId::Macro2Id(it) => it.resolver(db),
+ MacroId::MacroRulesId(it) => it.resolver(db),
+ MacroId::ProcMacroId(it) => it.resolver(db),
+ }
+ }
+}
+
+impl HasResolver for Macro2Id {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
+
+impl HasResolver for ProcMacroId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
+
+impl HasResolver for MacroRulesId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/src.rs b/src/tools/rust-analyzer/crates/hir-def/src/src.rs
new file mode 100644
index 000000000..f69356cac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/src.rs
@@ -0,0 +1,85 @@
+//! Utilities for mapping between hir IDs and the surface syntax.
+
+use hir_expand::InFile;
+use la_arena::ArenaMap;
+use syntax::ast;
+
+use crate::{
+ db::DefDatabase, item_tree::ItemTreeNode, AssocItemLoc, ItemLoc, Macro2Loc, MacroRulesLoc,
+ ProcMacroLoc,
+};
+
+pub trait HasSource {
+ type Value;
+ fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value>;
+}
+
+impl<N: ItemTreeNode> HasSource for AssocItemLoc<N> {
+ type Value = N::Source;
+
+ fn source(&self, db: &dyn DefDatabase) -> InFile<N::Source> {
+ let tree = self.id.item_tree(db);
+ let ast_id_map = db.ast_id_map(self.id.file_id());
+ let root = db.parse_or_expand(self.id.file_id()).unwrap();
+ let node = &tree[self.id.value];
+
+ InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
+ }
+}
+
+impl<N: ItemTreeNode> HasSource for ItemLoc<N> {
+ type Value = N::Source;
+
+ fn source(&self, db: &dyn DefDatabase) -> InFile<N::Source> {
+ let tree = self.id.item_tree(db);
+ let ast_id_map = db.ast_id_map(self.id.file_id());
+ let root = db.parse_or_expand(self.id.file_id()).unwrap();
+ let node = &tree[self.id.value];
+
+ InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
+ }
+}
+
+impl HasSource for Macro2Loc {
+ type Value = ast::MacroDef;
+
+ fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
+ let tree = self.id.item_tree(db);
+ let ast_id_map = db.ast_id_map(self.id.file_id());
+ let root = db.parse_or_expand(self.id.file_id()).unwrap();
+ let node = &tree[self.id.value];
+
+ InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
+ }
+}
+
+impl HasSource for MacroRulesLoc {
+ type Value = ast::MacroRules;
+
+ fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
+ let tree = self.id.item_tree(db);
+ let ast_id_map = db.ast_id_map(self.id.file_id());
+ let root = db.parse_or_expand(self.id.file_id()).unwrap();
+ let node = &tree[self.id.value];
+
+ InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
+ }
+}
+
+impl HasSource for ProcMacroLoc {
+ type Value = ast::Fn;
+
+ fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
+ let tree = self.id.item_tree(db);
+ let ast_id_map = db.ast_id_map(self.id.file_id());
+ let root = db.parse_or_expand(self.id.file_id()).unwrap();
+ let node = &tree[self.id.value];
+
+ InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
+ }
+}
+
+pub trait HasChildSource<ChildId> {
+ type Value;
+ fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<ChildId, Self::Value>>;
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
new file mode 100644
index 000000000..9cdc18d6b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
@@ -0,0 +1,245 @@
+//! Database used for testing `hir_def`.
+
+use std::{
+ fmt, panic,
+ sync::{Arc, Mutex},
+};
+
+use base_db::{
+ salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition,
+ SourceDatabase, Upcast,
+};
+use hir_expand::{db::AstDatabase, InFile};
+use rustc_hash::FxHashSet;
+use syntax::{algo, ast, AstNode};
+
+use crate::{
+ db::DefDatabase,
+ nameres::{DefMap, ModuleSource},
+ src::HasSource,
+ LocalModuleId, Lookup, ModuleDefId, ModuleId,
+};
+
+#[salsa::database(
+ base_db::SourceDatabaseExtStorage,
+ base_db::SourceDatabaseStorage,
+ hir_expand::db::AstDatabaseStorage,
+ crate::db::InternDatabaseStorage,
+ crate::db::DefDatabaseStorage
+)]
+pub(crate) struct TestDB {
+ storage: salsa::Storage<TestDB>,
+ events: Mutex<Option<Vec<salsa::Event>>>,
+}
+
+impl Default for TestDB {
+ fn default() -> Self {
+ let mut this = Self { storage: Default::default(), events: Default::default() };
+ this.set_enable_proc_attr_macros(true);
+ this
+ }
+}
+
+impl Upcast<dyn AstDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn AstDatabase + 'static) {
+ &*self
+ }
+}
+
+impl Upcast<dyn DefDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn DefDatabase + 'static) {
+ &*self
+ }
+}
+
+impl salsa::Database for TestDB {
+ fn salsa_event(&self, event: salsa::Event) {
+ let mut events = self.events.lock().unwrap();
+ if let Some(events) = &mut *events {
+ events.push(event);
+ }
+ }
+}
+
+impl fmt::Debug for TestDB {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("TestDB").finish()
+ }
+}
+
+impl panic::RefUnwindSafe for TestDB {}
+
+impl FileLoader for TestDB {
+ fn file_text(&self, file_id: FileId) -> Arc<String> {
+ FileLoaderDelegate(self).file_text(file_id)
+ }
+ fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+ FileLoaderDelegate(self).resolve_path(path)
+ }
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ FileLoaderDelegate(self).relevant_crates(file_id)
+ }
+}
+
+impl TestDB {
+ pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
+ for &krate in self.relevant_crates(file_id).iter() {
+ let crate_def_map = self.crate_def_map(krate);
+ for (local_id, data) in crate_def_map.modules() {
+ if data.origin.file_id() == Some(file_id) {
+ return crate_def_map.module_id(local_id);
+ }
+ }
+ }
+ panic!("Can't find module for file")
+ }
+
+ pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
+ let file_module = self.module_for_file(position.file_id);
+ let mut def_map = file_module.def_map(self);
+ let module = self.mod_at_position(&def_map, position);
+
+ def_map = match self.block_at_position(&def_map, position) {
+ Some(it) => it,
+ None => return def_map.module_id(module),
+ };
+ loop {
+ let new_map = self.block_at_position(&def_map, position);
+ match new_map {
+ Some(new_block) if !Arc::ptr_eq(&new_block, &def_map) => {
+ def_map = new_block;
+ }
+ _ => {
+ // FIXME: handle `mod` inside block expression
+ return def_map.module_id(def_map.root());
+ }
+ }
+ }
+ }
+
+ /// Finds the smallest/innermost module in `def_map` containing `position`.
+ fn mod_at_position(&self, def_map: &DefMap, position: FilePosition) -> LocalModuleId {
+ let mut size = None;
+ let mut res = def_map.root();
+ for (module, data) in def_map.modules() {
+ let src = data.definition_source(self);
+ if src.file_id != position.file_id.into() {
+ continue;
+ }
+
+ let range = match src.value {
+ ModuleSource::SourceFile(it) => it.syntax().text_range(),
+ ModuleSource::Module(it) => it.syntax().text_range(),
+ ModuleSource::BlockExpr(it) => it.syntax().text_range(),
+ };
+
+ if !range.contains(position.offset) {
+ continue;
+ }
+
+ let new_size = match size {
+ None => range.len(),
+ Some(size) => {
+ if range.len() < size {
+ range.len()
+ } else {
+ size
+ }
+ }
+ };
+
+ if size != Some(new_size) {
+ cov_mark::hit!(submodule_in_testdb);
+ size = Some(new_size);
+ res = module;
+ }
+ }
+
+ res
+ }
+
+ fn block_at_position(&self, def_map: &DefMap, position: FilePosition) -> Option<Arc<DefMap>> {
+ // Find the smallest (innermost) function in `def_map` containing the cursor.
+ let mut size = None;
+ let mut fn_def = None;
+ for (_, module) in def_map.modules() {
+ let file_id = module.definition_source(self).file_id;
+ if file_id != position.file_id.into() {
+ continue;
+ }
+ for decl in module.scope.declarations() {
+ if let ModuleDefId::FunctionId(it) = decl {
+ let range = it.lookup(self).source(self).value.syntax().text_range();
+
+ if !range.contains(position.offset) {
+ continue;
+ }
+
+ let new_size = match size {
+ None => range.len(),
+ Some(size) => {
+ if range.len() < size {
+ range.len()
+ } else {
+ size
+ }
+ }
+ };
+ if size != Some(new_size) {
+ size = Some(new_size);
+ fn_def = Some(it);
+ }
+ }
+ }
+ }
+
+ // Find the innermost block expression that has a `DefMap`.
+ let def_with_body = fn_def?.into();
+ let (_, source_map) = self.body_with_source_map(def_with_body);
+ let scopes = self.expr_scopes(def_with_body);
+ let root = self.parse(position.file_id);
+
+ let scope_iter = algo::ancestors_at_offset(&root.syntax_node(), position.offset)
+ .filter_map(|node| {
+ let block = ast::BlockExpr::cast(node)?;
+ let expr = ast::Expr::from(block);
+ let expr_id = source_map.node_expr(InFile::new(position.file_id.into(), &expr))?;
+ let scope = scopes.scope_for(expr_id).unwrap();
+ Some(scope)
+ });
+
+ for scope in scope_iter {
+ let containing_blocks =
+ scopes.scope_chain(Some(scope)).filter_map(|scope| scopes.block(scope));
+
+ for block in containing_blocks {
+ if let Some(def_map) = self.block_def_map(block) {
+ return Some(def_map);
+ }
+ }
+ }
+
+ None
+ }
+
+ pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
+ *self.events.lock().unwrap() = Some(Vec::new());
+ f();
+ self.events.lock().unwrap().take().unwrap()
+ }
+
+ pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
+ let events = self.log(f);
+ events
+ .into_iter()
+ .filter_map(|e| match e.kind {
+ // This is pretty horrible, but `Debug` is the only way to inspect
+ // QueryDescriptor at the moment.
+ salsa::EventKind::WillExecute { database_key } => {
+ Some(format!("{:?}", database_key.debug(self)))
+ }
+ _ => None,
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/trace.rs b/src/tools/rust-analyzer/crates/hir-def/src/trace.rs
new file mode 100644
index 000000000..6e6ceb8e4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/trace.rs
@@ -0,0 +1,51 @@
+//! Trace is a pretty niche data structure which is used when lowering a CST
+//! into HIR.
+//!
+//! Lowering process calculates two bits of information:
+//! * the lowered syntax itself
+//! * a mapping between lowered syntax and original syntax
+//!
+//! Due to the way salsa works, the mapping is usually hot lava, as it contains
+//! absolute offsets. The `Trace` structure (inspired, at least in name, by
+//! Kotlin's `BindingTrace`) allows use the same code to compute both
+//! projections.
+use la_arena::{Arena, ArenaMap, Idx, RawIdx};
+
+pub(crate) struct Trace<T, V> {
+ arena: Option<Arena<T>>,
+ map: Option<ArenaMap<Idx<T>, V>>,
+ len: u32,
+}
+
+impl<T, V> Trace<T, V> {
+ pub(crate) fn new_for_arena() -> Trace<T, V> {
+ Trace { arena: Some(Arena::default()), map: None, len: 0 }
+ }
+
+ pub(crate) fn new_for_map() -> Trace<T, V> {
+ Trace { arena: None, map: Some(ArenaMap::default()), len: 0 }
+ }
+
+ pub(crate) fn alloc(&mut self, value: impl FnOnce() -> V, data: impl FnOnce() -> T) -> Idx<T> {
+ let id = if let Some(arena) = &mut self.arena {
+ arena.alloc(data())
+ } else {
+ let id = Idx::<T>::from_raw(RawIdx::from(self.len));
+ self.len += 1;
+ id
+ };
+
+ if let Some(map) = &mut self.map {
+ map.insert(id, value());
+ }
+ id
+ }
+
+ pub(crate) fn into_arena(mut self) -> Arena<T> {
+ self.arena.take().unwrap()
+ }
+
+ pub(crate) fn into_map(mut self) -> ArenaMap<Idx<T>, V> {
+ self.map.take().unwrap()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs
new file mode 100644
index 000000000..924805962
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs
@@ -0,0 +1,486 @@
+//! HIR for references to types. Paths in these are not yet resolved. They can
+//! be directly created from an ast::TypeRef, without further queries.
+
+use std::fmt::Write;
+
+use hir_expand::{
+ name::{AsName, Name},
+ AstId,
+};
+use syntax::ast::{self, HasName};
+
+use crate::{
+ body::LowerCtx,
+ builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
+ expr::Literal,
+ intern::Interned,
+ path::Path,
+};
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+pub enum Mutability {
+ Shared,
+ Mut,
+}
+
+impl Mutability {
+ pub fn from_mutable(mutable: bool) -> Mutability {
+ if mutable {
+ Mutability::Mut
+ } else {
+ Mutability::Shared
+ }
+ }
+
+ pub fn as_keyword_for_ref(self) -> &'static str {
+ match self {
+ Mutability::Shared => "",
+ Mutability::Mut => "mut ",
+ }
+ }
+
+ pub fn as_keyword_for_ptr(self) -> &'static str {
+ match self {
+ Mutability::Shared => "const ",
+ Mutability::Mut => "mut ",
+ }
+ }
+
+ /// Returns `true` if the mutability is [`Mut`].
+ ///
+ /// [`Mut`]: Mutability::Mut
+ #[must_use]
+ pub fn is_mut(&self) -> bool {
+ matches!(self, Self::Mut)
+ }
+
+ /// Returns `true` if the mutability is [`Shared`].
+ ///
+ /// [`Shared`]: Mutability::Shared
+ #[must_use]
+ pub fn is_shared(&self) -> bool {
+ matches!(self, Self::Shared)
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+pub enum Rawness {
+ RawPtr,
+ Ref,
+}
+
+impl Rawness {
+ pub fn from_raw(is_raw: bool) -> Rawness {
+ if is_raw {
+ Rawness::RawPtr
+ } else {
+ Rawness::Ref
+ }
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub struct TraitRef {
+ pub path: Path,
+}
+
+impl TraitRef {
+ /// Converts an `ast::PathType` to a `hir::TraitRef`.
+ pub(crate) fn from_ast(ctx: &LowerCtx<'_>, node: ast::Type) -> Option<Self> {
+ // FIXME: Use `Path::from_src`
+ match node {
+ ast::Type::PathType(path) => {
+ path.path().and_then(|it| ctx.lower_path(it)).map(|path| TraitRef { path })
+ }
+ _ => None,
+ }
+ }
+}
+
+/// Compare ty::Ty
+///
+/// Note: Most users of `TypeRef` that end up in the salsa database intern it using
+/// `Interned<TypeRef>` to save space. But notably, nested `TypeRef`s are not interned, since that
+/// does not seem to save any noticeable amount of memory.
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub enum TypeRef {
+ Never,
+ Placeholder,
+ Tuple(Vec<TypeRef>),
+ Path(Path),
+ RawPtr(Box<TypeRef>, Mutability),
+ Reference(Box<TypeRef>, Option<LifetimeRef>, Mutability),
+ // FIXME: for full const generics, the latter element (length) here is going to have to be an
+ // expression that is further lowered later in hir_ty.
+ Array(Box<TypeRef>, ConstScalarOrPath),
+ Slice(Box<TypeRef>),
+ /// A fn pointer. Last element of the vector is the return type.
+ Fn(Vec<(Option<Name>, TypeRef)>, bool /*varargs*/),
+ ImplTrait(Vec<Interned<TypeBound>>),
+ DynTrait(Vec<Interned<TypeBound>>),
+ Macro(AstId<ast::MacroCall>),
+ Error,
+}
+
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub struct LifetimeRef {
+ pub name: Name,
+}
+
+impl LifetimeRef {
+ pub(crate) fn new_name(name: Name) -> Self {
+ LifetimeRef { name }
+ }
+
+ pub(crate) fn new(lifetime: &ast::Lifetime) -> Self {
+ LifetimeRef { name: Name::new_lifetime(lifetime) }
+ }
+
+ pub fn missing() -> LifetimeRef {
+ LifetimeRef { name: Name::missing() }
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub enum TypeBound {
+ Path(Path, TraitBoundModifier),
+ ForLifetime(Box<[Name]>, Path),
+ Lifetime(LifetimeRef),
+ Error,
+}
+
+/// A modifier on a bound, currently this is only used for `?Sized`, where the
+/// modifier is `Maybe`.
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub enum TraitBoundModifier {
+ None,
+ Maybe,
+}
+
+impl TypeRef {
+ /// Converts an `ast::TypeRef` to a `hir::TypeRef`.
+ pub fn from_ast(ctx: &LowerCtx<'_>, node: ast::Type) -> Self {
+ match node {
+ ast::Type::ParenType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
+ ast::Type::TupleType(inner) => {
+ TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect())
+ }
+ ast::Type::NeverType(..) => TypeRef::Never,
+ ast::Type::PathType(inner) => {
+ // FIXME: Use `Path::from_src`
+ inner
+ .path()
+ .and_then(|it| ctx.lower_path(it))
+ .map(TypeRef::Path)
+ .unwrap_or(TypeRef::Error)
+ }
+ ast::Type::PtrType(inner) => {
+ let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty());
+ let mutability = Mutability::from_mutable(inner.mut_token().is_some());
+ TypeRef::RawPtr(Box::new(inner_ty), mutability)
+ }
+ ast::Type::ArrayType(inner) => {
+ // FIXME: This is a hack. We should probably reuse the machinery of
+ // `hir_def::body::lower` to lower this into an `Expr` and then evaluate it at the
+ // `hir_ty` level, which would allow knowing the type of:
+ // let v: [u8; 2 + 2] = [0u8; 4];
+ let len = ConstScalarOrPath::from_expr_opt(inner.expr());
+ TypeRef::Array(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())), len)
+ }
+ ast::Type::SliceType(inner) => {
+ TypeRef::Slice(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())))
+ }
+ ast::Type::RefType(inner) => {
+ let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty());
+ let lifetime = inner.lifetime().map(|lt| LifetimeRef::new(&lt));
+ let mutability = Mutability::from_mutable(inner.mut_token().is_some());
+ TypeRef::Reference(Box::new(inner_ty), lifetime, mutability)
+ }
+ ast::Type::InferType(_inner) => TypeRef::Placeholder,
+ ast::Type::FnPtrType(inner) => {
+ let ret_ty = inner
+ .ret_type()
+ .and_then(|rt| rt.ty())
+ .map(|it| TypeRef::from_ast(ctx, it))
+ .unwrap_or_else(|| TypeRef::Tuple(Vec::new()));
+ let mut is_varargs = false;
+ let mut params = if let Some(pl) = inner.param_list() {
+ if let Some(param) = pl.params().last() {
+ is_varargs = param.dotdotdot_token().is_some();
+ }
+
+ pl.params()
+ .map(|it| {
+ let type_ref = TypeRef::from_ast_opt(ctx, it.ty());
+ let name = match it.pat() {
+ Some(ast::Pat::IdentPat(it)) => Some(
+ it.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing),
+ ),
+ _ => None,
+ };
+ (name, type_ref)
+ })
+ .collect()
+ } else {
+ Vec::new()
+ };
+ params.push((None, ret_ty));
+ TypeRef::Fn(params, is_varargs)
+ }
+ // for types are close enough for our purposes to the inner type for now...
+ ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
+ ast::Type::ImplTraitType(inner) => {
+ TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
+ }
+ ast::Type::DynTraitType(inner) => {
+ TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
+ }
+ ast::Type::MacroType(mt) => match mt.macro_call() {
+ Some(mc) => ctx.ast_id(ctx.db, &mc).map(TypeRef::Macro).unwrap_or(TypeRef::Error),
+ None => TypeRef::Error,
+ },
+ }
+ }
+
+ pub(crate) fn from_ast_opt(ctx: &LowerCtx<'_>, node: Option<ast::Type>) -> Self {
+ match node {
+ Some(node) => TypeRef::from_ast(ctx, node),
+ None => TypeRef::Error,
+ }
+ }
+
+ pub(crate) fn unit() -> TypeRef {
+ TypeRef::Tuple(Vec::new())
+ }
+
+ pub fn walk(&self, f: &mut impl FnMut(&TypeRef)) {
+ go(self, f);
+
+ fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) {
+ f(type_ref);
+ match type_ref {
+ TypeRef::Fn(params, _) => {
+ params.iter().for_each(|(_, param_type)| go(param_type, f))
+ }
+ TypeRef::Tuple(types) => types.iter().for_each(|t| go(t, f)),
+ TypeRef::RawPtr(type_ref, _)
+ | TypeRef::Reference(type_ref, ..)
+ | TypeRef::Array(type_ref, _)
+ | TypeRef::Slice(type_ref) => go(type_ref, f),
+ TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => {
+ for bound in bounds {
+ match bound.as_ref() {
+ TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => {
+ go_path(path, f)
+ }
+ TypeBound::Lifetime(_) | TypeBound::Error => (),
+ }
+ }
+ }
+ TypeRef::Path(path) => go_path(path, f),
+ TypeRef::Never | TypeRef::Placeholder | TypeRef::Macro(_) | TypeRef::Error => {}
+ };
+ }
+
+ fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef)) {
+ if let Some(type_ref) = path.type_anchor() {
+ go(type_ref, f);
+ }
+ for segment in path.segments().iter() {
+ if let Some(args_and_bindings) = segment.args_and_bindings {
+ for arg in &args_and_bindings.args {
+ match arg {
+ crate::path::GenericArg::Type(type_ref) => {
+ go(type_ref, f);
+ }
+ crate::path::GenericArg::Const(_)
+ | crate::path::GenericArg::Lifetime(_) => {}
+ }
+ }
+ for binding in &args_and_bindings.bindings {
+ if let Some(type_ref) = &binding.type_ref {
+ go(type_ref, f);
+ }
+ for bound in &binding.bounds {
+ match bound.as_ref() {
+ TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => {
+ go_path(path, f)
+ }
+ TypeBound::Lifetime(_) | TypeBound::Error => (),
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+pub(crate) fn type_bounds_from_ast(
+ lower_ctx: &LowerCtx<'_>,
+ type_bounds_opt: Option<ast::TypeBoundList>,
+) -> Vec<Interned<TypeBound>> {
+ if let Some(type_bounds) = type_bounds_opt {
+ type_bounds.bounds().map(|it| Interned::new(TypeBound::from_ast(lower_ctx, it))).collect()
+ } else {
+ vec![]
+ }
+}
+
+impl TypeBound {
+ pub(crate) fn from_ast(ctx: &LowerCtx<'_>, node: ast::TypeBound) -> Self {
+ let lower_path_type = |path_type: ast::PathType| ctx.lower_path(path_type.path()?);
+
+ match node.kind() {
+ ast::TypeBoundKind::PathType(path_type) => {
+ let m = match node.question_mark_token() {
+ Some(_) => TraitBoundModifier::Maybe,
+ None => TraitBoundModifier::None,
+ };
+ lower_path_type(path_type)
+ .map(|p| TypeBound::Path(p, m))
+ .unwrap_or(TypeBound::Error)
+ }
+ ast::TypeBoundKind::ForType(for_type) => {
+ let lt_refs = match for_type.generic_param_list() {
+ Some(gpl) => gpl
+ .lifetime_params()
+ .flat_map(|lp| lp.lifetime().map(|lt| Name::new_lifetime(&lt)))
+ .collect(),
+ None => Box::default(),
+ };
+ let path = for_type.ty().and_then(|ty| match ty {
+ ast::Type::PathType(path_type) => lower_path_type(path_type),
+ _ => None,
+ });
+ match path {
+ Some(p) => TypeBound::ForLifetime(lt_refs, p),
+ None => TypeBound::Error,
+ }
+ }
+ ast::TypeBoundKind::Lifetime(lifetime) => {
+ TypeBound::Lifetime(LifetimeRef::new(&lifetime))
+ }
+ }
+ }
+
+ pub fn as_path(&self) -> Option<(&Path, &TraitBoundModifier)> {
+ match self {
+ TypeBound::Path(p, m) => Some((p, m)),
+ TypeBound::ForLifetime(_, p) => Some((p, &TraitBoundModifier::None)),
+ TypeBound::Lifetime(_) | TypeBound::Error => None,
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ConstScalarOrPath {
+ Scalar(ConstScalar),
+ Path(Name),
+}
+
+impl std::fmt::Display for ConstScalarOrPath {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ ConstScalarOrPath::Scalar(s) => s.fmt(f),
+ ConstScalarOrPath::Path(n) => n.fmt(f),
+ }
+ }
+}
+
+impl ConstScalarOrPath {
+ pub(crate) fn from_expr_opt(expr: Option<ast::Expr>) -> Self {
+ match expr {
+ Some(x) => Self::from_expr(x),
+ None => Self::Scalar(ConstScalar::Unknown),
+ }
+ }
+
+ // FIXME: as per the comments on `TypeRef::Array`, this evaluation should not happen at this
+ // parse stage.
+ fn from_expr(expr: ast::Expr) -> Self {
+ match expr {
+ ast::Expr::PathExpr(p) => {
+ match p.path().and_then(|x| x.segment()).and_then(|x| x.name_ref()) {
+ Some(x) => Self::Path(x.as_name()),
+ None => Self::Scalar(ConstScalar::Unknown),
+ }
+ }
+ ast::Expr::PrefixExpr(prefix_expr) => match prefix_expr.op_kind() {
+ Some(ast::UnaryOp::Neg) => {
+ let unsigned = Self::from_expr_opt(prefix_expr.expr());
+ // Add sign
+ match unsigned {
+ Self::Scalar(ConstScalar::UInt(num)) => {
+ Self::Scalar(ConstScalar::Int(-(num as i128)))
+ }
+ other => other,
+ }
+ }
+ _ => Self::from_expr_opt(prefix_expr.expr()),
+ },
+ ast::Expr::Literal(literal) => Self::Scalar(match literal.kind() {
+ ast::LiteralKind::IntNumber(num) => {
+ num.value().map(ConstScalar::UInt).unwrap_or(ConstScalar::Unknown)
+ }
+ ast::LiteralKind::Char(c) => {
+ c.value().map(ConstScalar::Char).unwrap_or(ConstScalar::Unknown)
+ }
+ ast::LiteralKind::Bool(f) => ConstScalar::Bool(f),
+ _ => ConstScalar::Unknown,
+ }),
+ _ => Self::Scalar(ConstScalar::Unknown),
+ }
+ }
+}
+
+/// A concrete constant value
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ConstScalar {
+ Int(i128),
+ UInt(u128),
+ Bool(bool),
+ Char(char),
+
+ /// Case of an unknown value that rustc might know but we don't
+ // FIXME: this is a hack to get around chalk not being able to represent unevaluatable
+ // constants
+ // https://github.com/rust-lang/rust-analyzer/pull/8813#issuecomment-840679177
+ // https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348
+ Unknown,
+}
+
+impl ConstScalar {
+ pub fn builtin_type(&self) -> BuiltinType {
+ match self {
+ ConstScalar::UInt(_) | ConstScalar::Unknown => BuiltinType::Uint(BuiltinUint::U128),
+ ConstScalar::Int(_) => BuiltinType::Int(BuiltinInt::I128),
+ ConstScalar::Char(_) => BuiltinType::Char,
+ ConstScalar::Bool(_) => BuiltinType::Bool,
+ }
+ }
+}
+
+impl From<Literal> for ConstScalar {
+ fn from(literal: Literal) -> Self {
+ match literal {
+ Literal::Char(c) => Self::Char(c),
+ Literal::Bool(flag) => Self::Bool(flag),
+ Literal::Int(num, _) => Self::Int(num),
+ Literal::Uint(num, _) => Self::UInt(num),
+ _ => Self::Unknown,
+ }
+ }
+}
+
+impl std::fmt::Display for ConstScalar {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
+ match self {
+ ConstScalar::Int(num) => num.fmt(f),
+ ConstScalar::UInt(num) => num.fmt(f),
+ ConstScalar::Bool(flag) => flag.fmt(f),
+ ConstScalar::Char(c) => write!(f, "'{c}'"),
+ ConstScalar::Unknown => f.write_char('_'),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
new file mode 100644
index 000000000..6e22a877a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
@@ -0,0 +1,242 @@
+//! Defines hir-level representation of visibility (e.g. `pub` and `pub(crate)`).
+
+use std::{iter, sync::Arc};
+
+use hir_expand::{hygiene::Hygiene, InFile};
+use la_arena::ArenaMap;
+use syntax::ast;
+
+use crate::{
+ db::DefDatabase,
+ nameres::DefMap,
+ path::{ModPath, PathKind},
+ resolver::HasResolver,
+ ConstId, FunctionId, HasModule, LocalFieldId, ModuleId, VariantId,
+};
+
+/// Visibility of an item, not yet resolved.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum RawVisibility {
+ /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is
+ /// equivalent to `pub(self)`.
+ Module(ModPath),
+ /// `pub`.
+ Public,
+}
+
+impl RawVisibility {
+ pub(crate) const fn private() -> RawVisibility {
+ RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)))
+ }
+
+ pub(crate) fn from_ast(
+ db: &dyn DefDatabase,
+ node: InFile<Option<ast::Visibility>>,
+ ) -> RawVisibility {
+ Self::from_ast_with_hygiene(db, node.value, &Hygiene::new(db.upcast(), node.file_id))
+ }
+
+ pub(crate) fn from_ast_with_hygiene(
+ db: &dyn DefDatabase,
+ node: Option<ast::Visibility>,
+ hygiene: &Hygiene,
+ ) -> RawVisibility {
+ Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene)
+ }
+
+ pub(crate) fn from_ast_with_hygiene_and_default(
+ db: &dyn DefDatabase,
+ node: Option<ast::Visibility>,
+ default: RawVisibility,
+ hygiene: &Hygiene,
+ ) -> RawVisibility {
+ let node = match node {
+ None => return default,
+ Some(node) => node,
+ };
+ match node.kind() {
+ ast::VisibilityKind::In(path) => {
+ let path = ModPath::from_src(db.upcast(), path, hygiene);
+ let path = match path {
+ None => return RawVisibility::private(),
+ Some(path) => path,
+ };
+ RawVisibility::Module(path)
+ }
+ ast::VisibilityKind::PubCrate => {
+ let path = ModPath::from_kind(PathKind::Crate);
+ RawVisibility::Module(path)
+ }
+ ast::VisibilityKind::PubSuper => {
+ let path = ModPath::from_kind(PathKind::Super(1));
+ RawVisibility::Module(path)
+ }
+ ast::VisibilityKind::PubSelf => {
+ let path = ModPath::from_kind(PathKind::Plain);
+ RawVisibility::Module(path)
+ }
+ ast::VisibilityKind::Pub => RawVisibility::Public,
+ }
+ }
+
+ pub fn resolve(
+ &self,
+ db: &dyn DefDatabase,
+ resolver: &crate::resolver::Resolver,
+ ) -> Visibility {
+ // we fall back to public visibility (i.e. fail open) if the path can't be resolved
+ resolver.resolve_visibility(db, self).unwrap_or(Visibility::Public)
+ }
+}
+
+/// Visibility of an item, with the path resolved.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum Visibility {
+ /// Visibility is restricted to a certain module.
+ Module(ModuleId),
+ /// Visibility is unrestricted.
+ Public,
+}
+
+impl Visibility {
+ pub fn is_visible_from(self, db: &dyn DefDatabase, from_module: ModuleId) -> bool {
+ let to_module = match self {
+ Visibility::Module(m) => m,
+ Visibility::Public => return true,
+ };
+ // if they're not in the same crate, it can't be visible
+ if from_module.krate != to_module.krate {
+ return false;
+ }
+ let def_map = from_module.def_map(db);
+ self.is_visible_from_def_map(db, &def_map, from_module.local_id)
+ }
+
+ pub(crate) fn is_visible_from_other_crate(self) -> bool {
+ matches!(self, Visibility::Public)
+ }
+
+ pub(crate) fn is_visible_from_def_map(
+ self,
+ db: &dyn DefDatabase,
+ def_map: &DefMap,
+ mut from_module: crate::LocalModuleId,
+ ) -> bool {
+ let mut to_module = match self {
+ Visibility::Module(m) => m,
+ Visibility::Public => return true,
+ };
+
+ // `to_module` might be the root module of a block expression. Those have the same
+ // visibility as the containing module (even though no items are directly nameable from
+ // there, getting this right is important for method resolution).
+ // In that case, we adjust the visibility of `to_module` to point to the containing module.
+ // Additional complication: `to_module` might be in `from_module`'s `DefMap`, which we're
+ // currently computing, so we must not call the `def_map` query for it.
+ let arc;
+ let to_module_def_map =
+ if to_module.krate == def_map.krate() && to_module.block == def_map.block_id() {
+ cov_mark::hit!(is_visible_from_same_block_def_map);
+ def_map
+ } else {
+ arc = to_module.def_map(db);
+ &arc
+ };
+ let is_block_root = matches!(to_module.block, Some(_) if to_module_def_map[to_module.local_id].parent.is_none());
+ if is_block_root {
+ to_module = to_module_def_map.containing_module(to_module.local_id).unwrap();
+ }
+
+ // from_module needs to be a descendant of to_module
+ let mut def_map = def_map;
+ let mut parent_arc;
+ loop {
+ if def_map.module_id(from_module) == to_module {
+ return true;
+ }
+ match def_map[from_module].parent {
+ Some(parent) => from_module = parent,
+ None => {
+ match def_map.parent() {
+ Some(module) => {
+ parent_arc = module.def_map(db);
+ def_map = &*parent_arc;
+ from_module = module.local_id;
+ }
+ // Reached the root module, nothing left to check.
+ None => return false,
+ }
+ }
+ }
+ }
+ }
+
+ /// Returns the most permissive visibility of `self` and `other`.
+ ///
+ /// If there is no subset relation between `self` and `other`, returns `None` (ie. they're only
+ /// visible in unrelated modules).
+ pub(crate) fn max(self, other: Visibility, def_map: &DefMap) -> Option<Visibility> {
+ match (self, other) {
+ (Visibility::Module(_) | Visibility::Public, Visibility::Public)
+ | (Visibility::Public, Visibility::Module(_)) => Some(Visibility::Public),
+ (Visibility::Module(mod_a), Visibility::Module(mod_b)) => {
+ if mod_a.krate != mod_b.krate {
+ return None;
+ }
+
+ let mut a_ancestors = iter::successors(Some(mod_a.local_id), |&m| {
+ let parent_id = def_map[m].parent?;
+ Some(parent_id)
+ });
+ let mut b_ancestors = iter::successors(Some(mod_b.local_id), |&m| {
+ let parent_id = def_map[m].parent?;
+ Some(parent_id)
+ });
+
+ if a_ancestors.any(|m| m == mod_b.local_id) {
+ // B is above A
+ return Some(Visibility::Module(mod_b));
+ }
+
+ if b_ancestors.any(|m| m == mod_a.local_id) {
+ // A is above B
+ return Some(Visibility::Module(mod_a));
+ }
+
+ None
+ }
+ }
+ }
+}
+
+/// Resolve visibility of all specific fields of a struct or union variant.
+pub(crate) fn field_visibilities_query(
+ db: &dyn DefDatabase,
+ variant_id: VariantId,
+) -> Arc<ArenaMap<LocalFieldId, Visibility>> {
+ let var_data = match variant_id {
+ VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
+ VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
+ VariantId::EnumVariantId(it) => {
+ db.enum_data(it.parent).variants[it.local_id].variant_data.clone()
+ }
+ };
+ let resolver = variant_id.module(db).resolver(db);
+ let mut res = ArenaMap::default();
+ for (field_id, field_data) in var_data.fields().iter() {
+ res.insert(field_id, field_data.visibility.resolve(db, &resolver))
+ }
+ Arc::new(res)
+}
+
+/// Resolve visibility of a function.
+pub(crate) fn function_visibility_query(db: &dyn DefDatabase, def: FunctionId) -> Visibility {
+ let resolver = def.resolver(db);
+ db.function_data(def).visibility.resolve(db, &resolver)
+}
+
+/// Resolve visibility of a const.
+pub(crate) fn const_visibility_query(db: &dyn DefDatabase, def: ConstId) -> Visibility {
+ let resolver = def.resolver(db);
+ db.const_data(def).visibility.resolve(db, &resolver)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
new file mode 100644
index 000000000..dfd470ffc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -0,0 +1,34 @@
+[package]
+name = "hir-expand"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+tracing = "0.1.35"
+either = "1.7.0"
+rustc-hash = "1.1.0"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+itertools = "0.10.3"
+hashbrown = { version = "0.12.1", features = [
+ "inline-more",
+], default-features = false }
+smallvec = { version = "1.9.0", features = ["const_new"] }
+
+stdx = { path = "../stdx", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+mbe = { path = "../mbe", version = "0.0.0" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
new file mode 100644
index 000000000..c1ddef03b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
@@ -0,0 +1,181 @@
+//! `AstIdMap` allows to create stable IDs for "large" syntax nodes like items
+//! and macro calls.
+//!
+//! Specifically, it enumerates all items in a file and uses position of a an
+//! item as an ID. That way, id's don't change unless the set of items itself
+//! changes.
+
+use std::{
+ any::type_name,
+ fmt,
+ hash::{BuildHasher, BuildHasherDefault, Hash, Hasher},
+ marker::PhantomData,
+};
+
+use la_arena::{Arena, Idx};
+use profile::Count;
+use rustc_hash::FxHasher;
+use syntax::{ast, match_ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
+
+/// `AstId` points to an AST node in a specific file.
+pub struct FileAstId<N: AstNode> {
+ raw: ErasedFileAstId,
+ _ty: PhantomData<fn() -> N>,
+}
+
+impl<N: AstNode> Clone for FileAstId<N> {
+ fn clone(&self) -> FileAstId<N> {
+ *self
+ }
+}
+impl<N: AstNode> Copy for FileAstId<N> {}
+
+impl<N: AstNode> PartialEq for FileAstId<N> {
+ fn eq(&self, other: &Self) -> bool {
+ self.raw == other.raw
+ }
+}
+impl<N: AstNode> Eq for FileAstId<N> {}
+impl<N: AstNode> Hash for FileAstId<N> {
+ fn hash<H: Hasher>(&self, hasher: &mut H) {
+ self.raw.hash(hasher);
+ }
+}
+
+impl<N: AstNode> fmt::Debug for FileAstId<N> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw())
+ }
+}
+
+impl<N: AstNode> FileAstId<N> {
+ // Can't make this a From implementation because of coherence
+ pub fn upcast<M: AstNode>(self) -> FileAstId<M>
+ where
+ N: Into<M>,
+ {
+ FileAstId { raw: self.raw, _ty: PhantomData }
+ }
+}
+
+type ErasedFileAstId = Idx<SyntaxNodePtr>;
+
+/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
+#[derive(Default)]
+pub struct AstIdMap {
+ /// Maps stable id to unstable ptr.
+ arena: Arena<SyntaxNodePtr>,
+ /// Reverse: map ptr to id.
+ map: hashbrown::HashMap<Idx<SyntaxNodePtr>, (), ()>,
+ _c: Count<Self>,
+}
+
+impl fmt::Debug for AstIdMap {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("AstIdMap").field("arena", &self.arena).finish()
+ }
+}
+
+impl PartialEq for AstIdMap {
+ fn eq(&self, other: &Self) -> bool {
+ self.arena == other.arena
+ }
+}
+impl Eq for AstIdMap {}
+
+impl AstIdMap {
+ pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
+ assert!(node.parent().is_none());
+ let mut res = AstIdMap::default();
+ // By walking the tree in breadth-first order we make sure that parents
+ // get lower ids then children. That is, adding a new child does not
+ // change parent's id. This means that, say, adding a new function to a
+ // trait does not change ids of top-level items, which helps caching.
+ bdfs(node, |it| {
+ match_ast! {
+ match it {
+ ast::Item(module_item) => {
+ res.alloc(module_item.syntax());
+ true
+ },
+ ast::BlockExpr(block) => {
+ res.alloc(block.syntax());
+ true
+ },
+ _ => false,
+ }
+ }
+ });
+ res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
+ for (idx, ptr) in res.arena.iter() {
+ let hash = hash_ptr(ptr);
+ match res.map.raw_entry_mut().from_hash(hash, |idx2| *idx2 == idx) {
+ hashbrown::hash_map::RawEntryMut::Occupied(_) => unreachable!(),
+ hashbrown::hash_map::RawEntryMut::Vacant(entry) => {
+ entry.insert_with_hasher(hash, idx, (), |&idx| hash_ptr(&res.arena[idx]));
+ }
+ }
+ }
+ res
+ }
+
+ pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
+ let raw = self.erased_ast_id(item.syntax());
+ FileAstId { raw, _ty: PhantomData }
+ }
+ fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
+ let ptr = SyntaxNodePtr::new(item);
+ let hash = hash_ptr(&ptr);
+ match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
+ Some((&idx, &())) => idx,
+ None => panic!(
+ "Can't find {:?} in AstIdMap:\n{:?}",
+ item,
+ self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
+ ),
+ }
+ }
+
+ pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
+ AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
+ }
+
+ fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
+ self.arena.alloc(SyntaxNodePtr::new(item))
+ }
+}
+
+fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
+ let mut hasher = BuildHasherDefault::<FxHasher>::default().build_hasher();
+ ptr.hash(&mut hasher);
+ hasher.finish()
+}
+
+/// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs
+/// order? It is a mix of breadth-first and depth first orders. Nodes for which
+/// `f` returns true are visited breadth-first, all the other nodes are explored
+/// depth-first.
+///
+/// In other words, the size of the bfs queue is bound by the number of "true"
+/// nodes.
+fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
+ let mut curr_layer = vec![node.clone()];
+ let mut next_layer = vec![];
+ while !curr_layer.is_empty() {
+ curr_layer.drain(..).for_each(|node| {
+ let mut preorder = node.preorder();
+ while let Some(event) = preorder.next() {
+ match event {
+ syntax::WalkEvent::Enter(node) => {
+ if f(node.clone()) {
+ next_layer.extend(node.children());
+ preorder.skip_subtree();
+ }
+ }
+ syntax::WalkEvent::Leave(_) => {}
+ }
+ }
+ });
+ std::mem::swap(&mut curr_layer, &mut next_layer);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
new file mode 100644
index 000000000..0c886ac4d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
@@ -0,0 +1,130 @@
+//! Builtin attributes.
+
+use crate::{db::AstDatabase, name, ExpandResult, MacroCallId, MacroCallKind};
+
+macro_rules! register_builtin {
+ ( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub enum BuiltinAttrExpander {
+ $($variant),*
+ }
+
+ impl BuiltinAttrExpander {
+ pub fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+ ) -> ExpandResult<tt::Subtree> {
+ let expander = match *self {
+ $( BuiltinAttrExpander::$variant => $expand, )*
+ };
+ expander(db, id, tt)
+ }
+
+ fn find_by_name(name: &name::Name) -> Option<Self> {
+ match name {
+ $( id if id == &name::name![$name] => Some(BuiltinAttrExpander::$variant), )*
+ _ => None,
+ }
+ }
+ }
+
+ };
+}
+
+impl BuiltinAttrExpander {
+ pub fn is_derive(self) -> bool {
+ matches!(self, BuiltinAttrExpander::Derive)
+ }
+ pub fn is_test(self) -> bool {
+ matches!(self, BuiltinAttrExpander::Test)
+ }
+ pub fn is_bench(self) -> bool {
+ matches!(self, BuiltinAttrExpander::Bench)
+ }
+}
+
+register_builtin! {
+ (bench, Bench) => dummy_attr_expand,
+ (cfg_accessible, CfgAccessible) => dummy_attr_expand,
+ (cfg_eval, CfgEval) => dummy_attr_expand,
+ (derive, Derive) => derive_attr_expand,
+ (global_allocator, GlobalAllocator) => dummy_attr_expand,
+ (test, Test) => dummy_attr_expand,
+ (test_case, TestCase) => dummy_attr_expand
+}
+
+pub fn find_builtin_attr(ident: &name::Name) -> Option<BuiltinAttrExpander> {
+ BuiltinAttrExpander::find_by_name(ident)
+}
+
+fn dummy_attr_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ ExpandResult::ok(tt.clone())
+}
+
+/// We generate a very specific expansion here, as we do not actually expand the `#[derive]` attribute
+/// itself in name res, but we do want to expand it to something for the IDE layer, so that the input
+/// derive attributes can be downmapped, and resolved as proper paths.
+/// This is basically a hack, that simplifies the hacks we need in a lot of ide layer places to
+/// somewhat inconsistently resolve derive attributes.
+///
+/// As such, we expand `#[derive(Foo, bar::Bar)]` into
+/// ```
+/// #[Foo]
+/// #[bar::Bar]
+/// ();
+/// ```
+/// which allows fallback path resolution in hir::Semantics to properly identify our derives.
+/// Since we do not expand the attribute in nameres though, we keep the original item.
+///
+/// The ideal expansion here would be for the `#[derive]` to re-emit the annotated item and somehow
+/// use the input paths in its output as well.
+/// But that would bring two problems with it, for one every derive would duplicate the item token tree
+/// wasting a lot of memory, and it would also require some way to use a path in a way that makes it
+/// always resolve as a derive without nameres recollecting them.
+/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
+/// [`hir::Semantics`] to make this work.
+fn derive_attr_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let loc = db.lookup_intern_macro_call(id);
+ let derives = match &loc.kind {
+ MacroCallKind::Attr { attr_args, is_derive: true, .. } => &attr_args.0,
+ _ => return ExpandResult::ok(Default::default()),
+ };
+ pseudo_derive_attr_expansion(tt, derives)
+}
+
+pub fn pseudo_derive_attr_expansion(
+ tt: &tt::Subtree,
+ args: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let mk_leaf = |char| {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+ char,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }))
+ };
+
+ let mut token_trees = Vec::new();
+ for tt in (&args.token_trees)
+ .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
+ {
+ token_trees.push(mk_leaf('#'));
+ token_trees.push(mk_leaf('['));
+ token_trees.extend(tt.iter().cloned());
+ token_trees.push(mk_leaf(']'));
+ }
+ token_trees.push(mk_leaf('('));
+ token_trees.push(mk_leaf(')'));
+ token_trees.push(mk_leaf(';'));
+ ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
new file mode 100644
index 000000000..79989bc2e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
@@ -0,0 +1,249 @@
+//! Builtin derives.
+
+use base_db::{CrateOrigin, LangCrateOrigin};
+use tracing::debug;
+
+use syntax::{
+ ast::{self, AstNode, HasGenericParams, HasModuleItem, HasName},
+ match_ast,
+};
+use tt::TokenId;
+
+use crate::{db::AstDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
+
+macro_rules! register_builtin {
+ ( $($trait:ident => $expand:ident),* ) => {
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub enum BuiltinDeriveExpander {
+ $($trait),*
+ }
+
+ impl BuiltinDeriveExpander {
+ pub fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+ ) -> ExpandResult<tt::Subtree> {
+ let expander = match *self {
+ $( BuiltinDeriveExpander::$trait => $expand, )*
+ };
+ expander(db, id, tt)
+ }
+
+ fn find_by_name(name: &name::Name) -> Option<Self> {
+ match name {
+ $( id if id == &name::name![$trait] => Some(BuiltinDeriveExpander::$trait), )*
+ _ => None,
+ }
+ }
+ }
+
+ };
+}
+
+register_builtin! {
+ Copy => copy_expand,
+ Clone => clone_expand,
+ Default => default_expand,
+ Debug => debug_expand,
+ Hash => hash_expand,
+ Ord => ord_expand,
+ PartialOrd => partial_ord_expand,
+ Eq => eq_expand,
+ PartialEq => partial_eq_expand
+}
+
+pub fn find_builtin_derive(ident: &name::Name) -> Option<BuiltinDeriveExpander> {
+ BuiltinDeriveExpander::find_by_name(ident)
+}
+
+struct BasicAdtInfo {
+ name: tt::Ident,
+ type_or_const_params: usize,
+}
+
+fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
+ let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems);
+ let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
+ debug!("derive node didn't parse");
+ ExpandError::Other("invalid item definition".into())
+ })?;
+ let item = macro_items.items().next().ok_or_else(|| {
+ debug!("no module item parsed");
+ ExpandError::Other("no item found".into())
+ })?;
+ let node = item.syntax();
+ let (name, params) = match_ast! {
+ match node {
+ ast::Struct(it) => (it.name(), it.generic_param_list()),
+ ast::Enum(it) => (it.name(), it.generic_param_list()),
+ ast::Union(it) => (it.name(), it.generic_param_list()),
+ _ => {
+ debug!("unexpected node is {:?}", node);
+ return Err(ExpandError::Other("expected struct, enum or union".into()))
+ },
+ }
+ };
+ let name = name.ok_or_else(|| {
+ debug!("parsed item has no name");
+ ExpandError::Other("missing name".into())
+ })?;
+ let name_token_id =
+ token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
+ let name_token = tt::Ident { id: name_token_id, text: name.text().into() };
+ let type_or_const_params =
+ params.map_or(0, |type_param_list| type_param_list.type_or_const_params().count());
+ Ok(BasicAdtInfo { name: name_token, type_or_const_params })
+}
+
+fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
+ let mut result = Vec::<tt::TokenTree>::with_capacity(n * 2);
+ result.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: '<',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
+ for i in 0..n {
+ if i > 0 {
+ result.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: ',',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
+ }
+ result.push(
+ tt::Leaf::Ident(tt::Ident {
+ id: tt::TokenId::unspecified(),
+ text: format!("T{}", i).into(),
+ })
+ .into(),
+ );
+ result.extend(bound.iter().cloned());
+ }
+ result.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: '>',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
+ result
+}
+
+fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResult<tt::Subtree> {
+ let info = match parse_adt(tt) {
+ Ok(info) => info,
+ Err(e) => return ExpandResult::only_err(e),
+ };
+ let name = info.name;
+ let trait_path_clone = trait_path.token_trees.clone();
+ let bound = (quote! { : ##trait_path_clone }).token_trees;
+ let type_params = make_type_args(info.type_or_const_params, bound);
+ let type_args = make_type_args(info.type_or_const_params, Vec::new());
+ let trait_path = trait_path.token_trees;
+ let expanded = quote! {
+ impl ##type_params ##trait_path for #name ##type_args {}
+ };
+ ExpandResult::ok(expanded)
+}
+
+fn find_builtin_crate(db: &dyn AstDatabase, id: MacroCallId) -> tt::TokenTree {
+ // FIXME: make hygiene works for builtin derive macro
+ // such that $crate can be used here.
+ let cg = db.crate_graph();
+ let krate = db.lookup_intern_macro_call(id).krate;
+
+ let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) {
+ cov_mark::hit!(test_copy_expand_in_core);
+ quote! { crate }
+ } else {
+ quote! { core }
+ };
+
+ tt.token_trees[0].clone()
+}
+
+fn copy_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::marker::Copy })
+}
+
+fn clone_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::clone::Clone })
+}
+
+fn default_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::default::Default })
+}
+
+fn debug_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::fmt::Debug })
+}
+
+fn hash_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::hash::Hash })
+}
+
+fn eq_expand(db: &dyn AstDatabase, id: MacroCallId, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::Eq })
+}
+
+fn partial_eq_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::PartialEq })
+}
+
+fn ord_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::Ord })
+}
+
+fn partial_ord_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd })
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
new file mode 100644
index 000000000..76da7c9f1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
@@ -0,0 +1,669 @@
+//! Builtin macro
+
+use base_db::{AnchoredPath, Edition, FileId};
+use cfg::CfgExpr;
+use either::Either;
+use mbe::{parse_exprs_with_sep, parse_to_token_tree};
+use syntax::{
+ ast::{self, AstToken},
+ SmolStr,
+};
+
+use crate::{db::AstDatabase, name, quote, ExpandError, ExpandResult, MacroCallId, MacroCallLoc};
+
+macro_rules! register_builtin {
+ ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => {
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub enum BuiltinFnLikeExpander {
+ $($kind),*
+ }
+
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub enum EagerExpander {
+ $($e_kind),*
+ }
+
+ impl BuiltinFnLikeExpander {
+ pub fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+ ) -> ExpandResult<tt::Subtree> {
+ let expander = match *self {
+ $( BuiltinFnLikeExpander::$kind => $expand, )*
+ };
+ expander(db, id, tt)
+ }
+ }
+
+ impl EagerExpander {
+ pub fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ arg_id: MacroCallId,
+ tt: &tt::Subtree,
+ ) -> ExpandResult<ExpandedEager> {
+ let expander = match *self {
+ $( EagerExpander::$e_kind => $e_expand, )*
+ };
+ expander(db, arg_id, tt)
+ }
+ }
+
+ fn find_by_name(ident: &name::Name) -> Option<Either<BuiltinFnLikeExpander, EagerExpander>> {
+ match ident {
+ $( id if id == &name::name![$name] => Some(Either::Left(BuiltinFnLikeExpander::$kind)), )*
+ $( id if id == &name::name![$e_name] => Some(Either::Right(EagerExpander::$e_kind)), )*
+ _ => return None,
+ }
+ }
+ };
+}
+
+#[derive(Debug, Default)]
+pub struct ExpandedEager {
+ pub(crate) subtree: tt::Subtree,
+ /// The included file ID of the include macro.
+ pub(crate) included_file: Option<FileId>,
+}
+
+impl ExpandedEager {
+ fn new(subtree: tt::Subtree) -> Self {
+ ExpandedEager { subtree, included_file: None }
+ }
+}
+
+pub fn find_builtin_macro(
+ ident: &name::Name,
+) -> Option<Either<BuiltinFnLikeExpander, EagerExpander>> {
+ find_by_name(ident)
+}
+
+register_builtin! {
+ LAZY:
+ (column, Column) => column_expand,
+ (file, File) => file_expand,
+ (line, Line) => line_expand,
+ (module_path, ModulePath) => module_path_expand,
+ (assert, Assert) => assert_expand,
+ (stringify, Stringify) => stringify_expand,
+ (format_args, FormatArgs) => format_args_expand,
+ (const_format_args, ConstFormatArgs) => format_args_expand,
+ // format_args_nl only differs in that it adds a newline in the end,
+ // so we use the same stub expansion for now
+ (format_args_nl, FormatArgsNl) => format_args_expand,
+ (llvm_asm, LlvmAsm) => asm_expand,
+ (asm, Asm) => asm_expand,
+ (global_asm, GlobalAsm) => global_asm_expand,
+ (cfg, Cfg) => cfg_expand,
+ (core_panic, CorePanic) => panic_expand,
+ (std_panic, StdPanic) => panic_expand,
+ (unreachable, Unreachable) => unreachable_expand,
+ (log_syntax, LogSyntax) => log_syntax_expand,
+ (trace_macros, TraceMacros) => trace_macros_expand,
+
+ EAGER:
+ (compile_error, CompileError) => compile_error_expand,
+ (concat, Concat) => concat_expand,
+ (concat_idents, ConcatIdents) => concat_idents_expand,
+ (concat_bytes, ConcatBytes) => concat_bytes_expand,
+ (include, Include) => include_expand,
+ (include_bytes, IncludeBytes) => include_bytes_expand,
+ (include_str, IncludeStr) => include_str_expand,
+ (env, Env) => env_expand,
+ (option_env, OptionEnv) => option_env_expand
+}
+
+const DOLLAR_CRATE: tt::Ident =
+ tt::Ident { text: SmolStr::new_inline("$crate"), id: tt::TokenId::unspecified() };
+
+fn module_path_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // Just return a dummy result.
+ ExpandResult::ok(quote! { "module::path" })
+}
+
+fn line_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // dummy implementation for type-checking purposes
+ let line_num = 0;
+ let expanded = quote! {
+ #line_num
+ };
+
+ ExpandResult::ok(expanded)
+}
+
+fn log_syntax_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ ExpandResult::ok(quote! {})
+}
+
+fn trace_macros_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ ExpandResult::ok(quote! {})
+}
+
+fn stringify_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let pretty = tt::pretty(&tt.token_trees);
+
+ let expanded = quote! {
+ #pretty
+ };
+
+ ExpandResult::ok(expanded)
+}
+
+fn column_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // dummy implementation for type-checking purposes
+ let col_num = 0;
+ let expanded = quote! {
+ #col_num
+ };
+
+ ExpandResult::ok(expanded)
+}
+
+fn assert_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let args = parse_exprs_with_sep(tt, ',');
+ let expanded = match &*args {
+ [cond, panic_args @ ..] => {
+ let comma = tt::Subtree {
+ delimiter: None,
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+ char: ',',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }))],
+ };
+ let cond = cond.clone();
+ let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
+ quote! {{
+ if !#cond {
+ #DOLLAR_CRATE::panic!(##panic_args);
+ }
+ }}
+ }
+ [] => quote! {{}},
+ };
+
+ ExpandResult::ok(expanded)
+}
+
+fn file_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // FIXME: RA purposefully lacks knowledge of absolute file names
+ // so just return "".
+ let file_name = "";
+
+ let expanded = quote! {
+ #file_name
+ };
+
+ ExpandResult::ok(expanded)
+}
+
+fn format_args_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // We expand `format_args!("", a1, a2)` to
+ // ```
+ // std::fmt::Arguments::new_v1(&[], &[
+ // std::fmt::ArgumentV1::new(&arg1,std::fmt::Display::fmt),
+ // std::fmt::ArgumentV1::new(&arg2,std::fmt::Display::fmt),
+ // ])
+ // ```,
+ // which is still not really correct, but close enough for now
+ let mut args = parse_exprs_with_sep(tt, ',');
+
+ if args.is_empty() {
+ return ExpandResult::only_err(mbe::ExpandError::NoMatchingRule.into());
+ }
+ for arg in &mut args {
+ // Remove `key =`.
+ if matches!(arg.token_trees.get(1), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=' && p.spacing != tt::Spacing::Joint)
+ {
+ arg.token_trees.drain(..2);
+ }
+ }
+ let _format_string = args.remove(0);
+ let arg_tts = args.into_iter().flat_map(|arg| {
+ quote! { std::fmt::ArgumentV1::new(&(#arg), std::fmt::Display::fmt), }
+ }.token_trees);
+ let expanded = quote! {
+ std::fmt::Arguments::new_v1(&[], &[##arg_tts])
+ };
+ ExpandResult::ok(expanded)
+}
+
+fn asm_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // We expand all assembly snippets to `format_args!` invocations to get format syntax
+ // highlighting for them.
+
+ let mut literals = Vec::new();
+ for tt in tt.token_trees.chunks(2) {
+ match tt {
+ [tt::TokenTree::Leaf(tt::Leaf::Literal(lit))]
+ | [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', id: _, spacing: _ }))] =>
+ {
+ let krate = DOLLAR_CRATE.clone();
+ literals.push(quote!(#krate::format_args!(#lit);));
+ }
+ _ => break,
+ }
+ }
+
+ let expanded = quote! {{
+ ##literals
+ loop {}
+ }};
+ ExpandResult::ok(expanded)
+}
+
+fn global_asm_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // Expand to nothing (at item-level)
+ ExpandResult::ok(quote! {})
+}
+
+fn cfg_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let loc = db.lookup_intern_macro_call(id);
+ let expr = CfgExpr::parse(tt);
+ let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
+ let expanded = if enabled { quote!(true) } else { quote!(false) };
+ ExpandResult::ok(expanded)
+}
+
+fn panic_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ // Expand to a macro call `$crate::panic::panic_{edition}`
+ let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
+ quote!(#DOLLAR_CRATE::panic::panic_2021!)
+ } else {
+ quote!(#DOLLAR_CRATE::panic::panic_2015!)
+ };
+
+ // Pass the original arguments
+ call.token_trees.push(tt::TokenTree::Subtree(tt.clone()));
+ ExpandResult::ok(call)
+}
+
+fn unreachable_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ // Expand to a macro call `$crate::panic::unreachable_{edition}`
+ let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
+ quote!(#DOLLAR_CRATE::panic::unreachable_2021!)
+ } else {
+ quote!(#DOLLAR_CRATE::panic::unreachable_2015!)
+ };
+
+ // Pass the original arguments
+ call.token_trees.push(tt::TokenTree::Subtree(tt.clone()));
+ ExpandResult::ok(call)
+}
+
+fn unquote_str(lit: &tt::Literal) -> Option<String> {
+ let lit = ast::make::tokens::literal(&lit.to_string());
+ let token = ast::String::cast(lit)?;
+ token.value().map(|it| it.into_owned())
+}
+
+fn unquote_byte_string(lit: &tt::Literal) -> Option<Vec<u8>> {
+ let lit = ast::make::tokens::literal(&lit.to_string());
+ let token = ast::ByteString::cast(lit)?;
+ token.value().map(|it| it.into_owned())
+}
+
+fn compile_error_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let err = match &*tt.token_trees {
+ [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => {
+ let text = it.text.as_str();
+ if text.starts_with('"') && text.ends_with('"') {
+ // FIXME: does not handle raw strings
+ ExpandError::Other(text[1..text.len() - 1].into())
+ } else {
+ ExpandError::Other("`compile_error!` argument must be a string".into())
+ }
+ }
+ _ => ExpandError::Other("`compile_error!` argument must be a string".into()),
+ };
+
+ ExpandResult { value: ExpandedEager::new(quote! {}), err: Some(err) }
+}
+
+fn concat_expand(
+ _db: &dyn AstDatabase,
+ _arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let mut err = None;
+ let mut text = String::new();
+ for (i, mut t) in tt.token_trees.iter().enumerate() {
+ // FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses
+ // to ensure the right parsing order, so skip the parentheses here. Ideally we'd
+ // implement rustc's model. cc https://github.com/rust-lang/rust-analyzer/pull/10623
+ if let tt::TokenTree::Subtree(tt::Subtree { delimiter: Some(delim), token_trees }) = t {
+ if let [tt] = &**token_trees {
+ if delim.kind == tt::DelimiterKind::Parenthesis {
+ t = tt;
+ }
+ }
+ }
+
+ match t {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => {
+ // concat works with string and char literals, so remove any quotes.
+ // It also works with integer, float and boolean literals, so just use the rest
+ // as-is.
+ let component = unquote_str(it).unwrap_or_else(|| it.text.to_string());
+ text.push_str(&component);
+ }
+ // handle boolean literals
+ tt::TokenTree::Leaf(tt::Leaf::Ident(id))
+ if i % 2 == 0 && (id.text == "true" || id.text == "false") =>
+ {
+ text.push_str(id.text.as_str());
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
+ _ => {
+ err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
+ }
+ }
+ }
+ ExpandResult { value: ExpandedEager::new(quote!(#text)), err }
+}
+
+fn concat_bytes_expand(
+ _db: &dyn AstDatabase,
+ _arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let mut bytes = Vec::new();
+ let mut err = None;
+ for (i, t) in tt.token_trees.iter().enumerate() {
+ match t {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
+ let token = ast::make::tokens::literal(&lit.to_string());
+ match token.kind() {
+ syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()),
+ syntax::SyntaxKind::BYTE_STRING => {
+ let components = unquote_byte_string(lit).unwrap_or_else(Vec::new);
+ components.into_iter().for_each(|x| bytes.push(x.to_string()));
+ }
+ _ => {
+ err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
+ break;
+ }
+ }
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
+ tt::TokenTree::Subtree(tree)
+ if tree.delimiter_kind() == Some(tt::DelimiterKind::Bracket) =>
+ {
+ if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes) {
+ err.get_or_insert(e);
+ break;
+ }
+ }
+ _ => {
+ err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
+ break;
+ }
+ }
+ }
+ let ident = tt::Ident { text: bytes.join(", ").into(), id: tt::TokenId::unspecified() };
+ ExpandResult { value: ExpandedEager::new(quote!([#ident])), err }
+}
+
+fn concat_bytes_expand_subtree(
+ tree: &tt::Subtree,
+ bytes: &mut Vec<String>,
+) -> Result<(), ExpandError> {
+ for (ti, tt) in tree.token_trees.iter().enumerate() {
+ match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
+ let lit = ast::make::tokens::literal(&lit.to_string());
+ match lit.kind() {
+ syntax::SyntaxKind::BYTE | syntax::SyntaxKind::INT_NUMBER => {
+ bytes.push(lit.text().to_string())
+ }
+ _ => {
+ return Err(mbe::ExpandError::UnexpectedToken.into());
+ }
+ }
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if ti % 2 == 1 && punct.char == ',' => (),
+ _ => {
+ return Err(mbe::ExpandError::UnexpectedToken.into());
+ }
+ }
+ }
+ Ok(())
+}
+
+fn concat_idents_expand(
+ _db: &dyn AstDatabase,
+ _arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let mut err = None;
+ let mut ident = String::new();
+ for (i, t) in tt.token_trees.iter().enumerate() {
+ match t {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => {
+ ident.push_str(id.text.as_str());
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
+ _ => {
+ err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
+ }
+ }
+ }
+ let ident = tt::Ident { text: ident.into(), id: tt::TokenId::unspecified() };
+ ExpandResult { value: ExpandedEager::new(quote!(#ident)), err }
+}
+
+fn relative_file(
+ db: &dyn AstDatabase,
+ call_id: MacroCallId,
+ path_str: &str,
+ allow_recursion: bool,
+) -> Result<FileId, ExpandError> {
+ let call_site = call_id.as_file().original_file(db);
+ let path = AnchoredPath { anchor: call_site, path: path_str };
+ let res = db
+ .resolve_path(path)
+ .ok_or_else(|| ExpandError::Other(format!("failed to load file `{path_str}`").into()))?;
+ // Prevent include itself
+ if res == call_site && !allow_recursion {
+ Err(ExpandError::Other(format!("recursive inclusion of `{path_str}`").into()))
+ } else {
+ Ok(res)
+ }
+}
+
+fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
+ tt.token_trees
+ .get(0)
+ .and_then(|tt| match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(it),
+ _ => None,
+ })
+ .ok_or(mbe::ExpandError::ConversionError.into())
+}
+
+fn include_expand(
+ db: &dyn AstDatabase,
+ arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let res = (|| {
+ let path = parse_string(tt)?;
+ let file_id = relative_file(db, arg_id, &path, false)?;
+
+ let subtree =
+ parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?.0;
+ Ok((subtree, file_id))
+ })();
+
+ match res {
+ Ok((subtree, file_id)) => {
+ ExpandResult::ok(ExpandedEager { subtree, included_file: Some(file_id) })
+ }
+ Err(e) => ExpandResult::only_err(e),
+ }
+}
+
+fn include_bytes_expand(
+ _db: &dyn AstDatabase,
+ _arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ if let Err(e) = parse_string(tt) {
+ return ExpandResult::only_err(e);
+ }
+
+ // FIXME: actually read the file here if the user asked for macro expansion
+ let res = tt::Subtree {
+ delimiter: None,
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
+ text: r#"b"""#.into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ };
+ ExpandResult::ok(ExpandedEager::new(res))
+}
+
+fn include_str_expand(
+ db: &dyn AstDatabase,
+ arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let path = match parse_string(tt) {
+ Ok(it) => it,
+ Err(e) => return ExpandResult::only_err(e),
+ };
+
+ // FIXME: we're not able to read excluded files (which is most of them because
+ // it's unusual to `include_str!` a Rust file), but we can return an empty string.
+ // Ideally, we'd be able to offer a precise expansion if the user asks for macro
+ // expansion.
+ let file_id = match relative_file(db, arg_id, &path, true) {
+ Ok(file_id) => file_id,
+ Err(_) => {
+ return ExpandResult::ok(ExpandedEager::new(quote!("")));
+ }
+ };
+
+ let text = db.file_text(file_id);
+ let text = &*text;
+
+ ExpandResult::ok(ExpandedEager::new(quote!(#text)))
+}
+
+fn get_env_inner(db: &dyn AstDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
+ let krate = db.lookup_intern_macro_call(arg_id).krate;
+ db.crate_graph()[krate].env.get(key)
+}
+
+fn env_expand(
+ db: &dyn AstDatabase,
+ arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let key = match parse_string(tt) {
+ Ok(it) => it,
+ Err(e) => return ExpandResult::only_err(e),
+ };
+
+ let mut err = None;
+ let s = get_env_inner(db, arg_id, &key).unwrap_or_else(|| {
+ // The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid
+ // unnecessary diagnostics for eg. `CARGO_PKG_NAME`.
+ if key == "OUT_DIR" {
+ err = Some(ExpandError::Other(
+ r#"`OUT_DIR` not set, enable "build scripts" to fix"#.into(),
+ ));
+ }
+
+ // If the variable is unset, still return a dummy string to help type inference along.
+ // We cannot use an empty string here, because for
+ // `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` will become
+ // `include!("foo.rs"), which might go to infinite loop
+ "__RA_UNIMPLEMENTED__".to_string()
+ });
+ let expanded = quote! { #s };
+
+ ExpandResult { value: ExpandedEager::new(expanded), err }
+}
+
+fn option_env_expand(
+ db: &dyn AstDatabase,
+ arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let key = match parse_string(tt) {
+ Ok(it) => it,
+ Err(e) => return ExpandResult::only_err(e),
+ };
+
+ let expanded = match get_env_inner(db, arg_id, &key) {
+ None => quote! { std::option::Option::None::<&str> },
+ Some(s) => quote! { std::option::Some(#s) },
+ };
+
+ ExpandResult::ok(ExpandedEager::new(expanded))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
new file mode 100644
index 000000000..bd60c3d26
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -0,0 +1,509 @@
+//! Defines database & queries for macro expansion.
+
+use std::sync::Arc;
+
+use base_db::{salsa, SourceDatabase};
+use either::Either;
+use limit::Limit;
+use mbe::syntax_node_to_token_tree;
+use rustc_hash::FxHashSet;
+use syntax::{
+ ast::{self, HasAttrs, HasDocComments},
+ AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, T,
+};
+
+use crate::{
+ ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, fixup,
+ hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
+ ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
+ MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
+};
+
+/// Total limit on the number of tokens produced by any macro invocation.
+///
+/// If an invocation produces more tokens than this limit, it will not be stored in the database and
+/// an error will be emitted.
+///
+/// Actual max for `analysis-stats .` at some point: 30672.
+static TOKEN_LIMIT: Limit = Limit::new(524_288);
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum TokenExpander {
+ /// Old-style `macro_rules` or the new macros 2.0
+ DeclarativeMacro { mac: mbe::DeclarativeMacro, def_site_token_map: mbe::TokenMap },
+ /// Stuff like `line!` and `file!`.
+ Builtin(BuiltinFnLikeExpander),
+ /// `global_allocator` and such.
+ BuiltinAttr(BuiltinAttrExpander),
+ /// `derive(Copy)` and such.
+ BuiltinDerive(BuiltinDeriveExpander),
+ /// The thing we love the most here in rust-analyzer -- procedural macros.
+ ProcMacro(ProcMacroExpander),
+}
+
+impl TokenExpander {
+ fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+ ) -> ExpandResult<tt::Subtree> {
+ match self {
+ TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into),
+ TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into),
+ TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt),
+ TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
+ TokenExpander::ProcMacro(_) => {
+ // We store the result in salsa db to prevent non-deterministic behavior in
+ // some proc-macro implementation
+ // See #4315 for details
+ db.expand_proc_macro(id)
+ }
+ }
+ }
+
+ pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
+ match self {
+ TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_down(id),
+ TokenExpander::Builtin(..)
+ | TokenExpander::BuiltinAttr(..)
+ | TokenExpander::BuiltinDerive(..)
+ | TokenExpander::ProcMacro(..) => id,
+ }
+ }
+
+ pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
+ match self {
+ TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_up(id),
+ TokenExpander::Builtin(..)
+ | TokenExpander::BuiltinAttr(..)
+ | TokenExpander::BuiltinDerive(..)
+ | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
+ }
+ }
+}
+
+// FIXME: rename to ExpandDatabase
+#[salsa::query_group(AstDatabaseStorage)]
+pub trait AstDatabase: SourceDatabase {
+ fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
+
+ /// Main public API -- parses a hir file, not caring whether it's a real
+ /// file or a macro expansion.
+ #[salsa::transparent]
+ fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
+ /// Implementation for the macro case.
+ fn parse_macro_expansion(
+ &self,
+ macro_file: MacroFile,
+ ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>;
+
+ /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
+ /// reason why we use salsa at all.
+ ///
+ /// We encode macro definitions into ids of macro calls, this what allows us
+ /// to be incremental.
+ #[salsa::interned]
+ fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
+
+ /// Lowers syntactic macro call to a token tree representation.
+ #[salsa::transparent]
+ fn macro_arg(
+ &self,
+ id: MacroCallId,
+ ) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>;
+ /// Extracts syntax node, corresponding to a macro call. That's a firewall
+ /// query, only typing in the macro call itself changes the returned
+ /// subtree.
+ fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
+ /// Gets the expander for this macro. This compiles declarative macros, and
+ /// just fetches procedural ones.
+ fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>;
+
+ /// Expand macro call to a token tree. This query is LRUed (we keep 128 or so results in memory)
+ fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>;
+ /// Special case of the previous query for procedural macros. We can't LRU
+ /// proc macros, since they are not deterministic in general, and
+ /// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng
+ /// heroically debugged this once!
+ fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<tt::Subtree>;
+ /// Firewall query that returns the error from the `macro_expand` query.
+ fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>;
+
+ fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
+}
+
+/// This expands the given macro call, but with different arguments. This is
+/// used for completion, where we want to see what 'would happen' if we insert a
+/// token. The `token_to_map` mapped down into the expansion, with the mapped
+/// token returned.
+pub fn expand_speculative(
+ db: &dyn AstDatabase,
+ actual_macro_call: MacroCallId,
+ speculative_args: &SyntaxNode,
+ token_to_map: SyntaxToken,
+) -> Option<(SyntaxNode, SyntaxToken)> {
+ let loc = db.lookup_intern_macro_call(actual_macro_call);
+ let macro_def = db.macro_def(loc.def).ok()?;
+ let token_range = token_to_map.text_range();
+
+ // Build the subtree and token mapping for the speculative args
+ let censor = censor_for_macro_input(&loc, speculative_args);
+ let mut fixups = fixup::fixup_syntax(speculative_args);
+ fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
+ let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ speculative_args,
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
+
+ let (attr_arg, token_id) = match loc.kind {
+ MacroCallKind::Attr { invoc_attr_index, is_derive, .. } => {
+ let attr = if is_derive {
+ // for pseudo-derive expansion we actually pass the attribute itself only
+ ast::Attr::cast(speculative_args.clone())
+ } else {
+ // Attributes may have an input token tree, build the subtree and map for this as well
+ // then try finding a token id for our token if it is inside this input subtree.
+ let item = ast::Item::cast(speculative_args.clone())?;
+ item.doc_comments_and_attrs().nth(invoc_attr_index as usize).and_then(Either::left)
+ }?;
+ match attr.token_tree() {
+ Some(token_tree) => {
+ let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
+ tree.delimiter = None;
+
+ let shift = mbe::Shift::new(&tt);
+ shift.shift_all(&mut tree);
+
+ let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
+ let attr_input_start =
+ token_tree.left_delimiter_token()?.text_range().start();
+ let range = token_range.checked_sub(attr_input_start)?;
+ let token_id = shift.shift(map.token_by_range(range)?);
+ Some(token_id)
+ } else {
+ None
+ };
+ (Some(tree), token_id)
+ }
+ _ => (None, None),
+ }
+ }
+ _ => (None, None),
+ };
+ let token_id = match token_id {
+ Some(token_id) => token_id,
+ // token wasn't inside an attribute input so it has to be in the general macro input
+ None => {
+ let range = token_range.checked_sub(speculative_args.text_range().start())?;
+ let token_id = spec_args_tmap.token_by_range(range)?;
+ macro_def.map_id_down(token_id)
+ }
+ };
+
+ // Do the actual expansion, we need to directly expand the proc macro due to the attribute args
+ // Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
+ let mut speculative_expansion = match loc.def.kind {
+ MacroDefKind::ProcMacro(expander, ..) => {
+ tt.delimiter = None;
+ expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
+ }
+ MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
+ pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
+ }
+ _ => macro_def.expand(db, actual_macro_call, &tt),
+ };
+
+ let expand_to = macro_expand_to(db, actual_macro_call);
+ fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
+ let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
+
+ let range = rev_tmap.first_range_by_token(token_id, token_to_map.kind())?;
+ let token = node.syntax_node().covering_element(range).into_token()?;
+ Some((node.syntax_node(), token))
+}
+
+fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
+ let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default();
+ Arc::new(map)
+}
+
+fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
+ match file_id.0 {
+ HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
+ HirFileIdRepr::MacroFile(macro_file) => {
+ // FIXME: Note how we convert from `Parse` to `SyntaxNode` here,
+ // forgetting about parse errors.
+ db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node())
+ }
+ }
+}
+
+fn parse_macro_expansion(
+ db: &dyn AstDatabase,
+ macro_file: MacroFile,
+) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> {
+ let _p = profile::span("parse_macro_expansion");
+ let result = db.macro_expand(macro_file.macro_call_id);
+
+ if let Some(err) = &result.err {
+ // Note:
+ // The final goal we would like to make all parse_macro success,
+ // such that the following log will not call anyway.
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let node = loc.kind.to_node(db);
+
+ // collect parent information for warning log
+ let parents =
+ std::iter::successors(loc.kind.file_id().call_node(db), |it| it.file_id.call_node(db))
+ .map(|n| format!("{:#}", n.value))
+ .collect::<Vec<_>>()
+ .join("\n");
+
+ tracing::debug!(
+ "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}",
+ err,
+ node.value,
+ parents
+ );
+ }
+ let tt = match result.value {
+ Some(tt) => tt,
+ None => return ExpandResult { value: None, err: result.err },
+ };
+
+ let expand_to = macro_expand_to(db, macro_file.macro_call_id);
+
+ tracing::debug!("expanded = {}", tt.as_debug_string());
+ tracing::debug!("kind = {:?}", expand_to);
+
+ let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
+
+ ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: result.err }
+}
+
+fn macro_arg(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
+ let arg = db.macro_arg_text(id)?;
+ let loc = db.lookup_intern_macro_call(id);
+
+ let node = SyntaxNode::new_root(arg);
+ let censor = censor_for_macro_input(&loc, &node);
+ let mut fixups = fixup::fixup_syntax(&node);
+ fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
+ let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ &node,
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
+
+ if loc.def.is_proc_macro() {
+ // proc macros expect their inputs without parentheses, MBEs expect it with them included
+ tt.delimiter = None;
+ }
+
+ Some(Arc::new((tt, tmap, fixups.undo_info)))
+}
+
+fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
+ (|| {
+ let censor = match loc.kind {
+ MacroCallKind::FnLike { .. } => return None,
+ MacroCallKind::Derive { derive_attr_index, .. } => {
+ cov_mark::hit!(derive_censoring);
+ ast::Item::cast(node.clone())?
+ .attrs()
+ .take(derive_attr_index as usize + 1)
+ // FIXME
+ .filter(|attr| attr.simple_name().as_deref() == Some("derive"))
+ .map(|it| it.syntax().clone())
+ .collect()
+ }
+ MacroCallKind::Attr { is_derive: true, .. } => return None,
+ MacroCallKind::Attr { invoc_attr_index, .. } => {
+ cov_mark::hit!(attribute_macro_attr_censoring);
+ ast::Item::cast(node.clone())?
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index as usize)
+ .and_then(Either::left)
+ .map(|attr| attr.syntax().clone())
+ .into_iter()
+ .collect()
+ }
+ };
+ Some(censor)
+ })()
+ .unwrap_or_default()
+}
+
+fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
+ let loc = db.lookup_intern_macro_call(id);
+ let arg = loc.kind.arg(db)?;
+ if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
+ let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
+ let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
+ let well_formed_tt =
+ matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
+ if !well_formed_tt {
+ // Don't expand malformed (unbalanced) macro invocations. This is
+ // less than ideal, but trying to expand unbalanced macro calls
+ // sometimes produces pathological, deeply nested code which breaks
+ // all kinds of things.
+ //
+ // Some day, we'll have explicit recursion counters for all
+ // recursive things, at which point this code might be removed.
+ cov_mark::hit!(issue9358_bad_macro_stack_overflow);
+ return None;
+ }
+ }
+ Some(arg.green().into())
+}
+
+fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError> {
+ match id.kind {
+ MacroDefKind::Declarative(ast_id) => {
+ let (mac, def_site_token_map) = match ast_id.to_node(db) {
+ ast::Macro::MacroRules(macro_rules) => {
+ let arg = macro_rules
+ .token_tree()
+ .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
+ let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
+ let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt)?;
+ (mac, def_site_token_map)
+ }
+ ast::Macro::MacroDef(macro_def) => {
+ let arg = macro_def
+ .body()
+ .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
+ let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
+ let mac = mbe::DeclarativeMacro::parse_macro2(&tt)?;
+ (mac, def_site_token_map)
+ }
+ };
+ Ok(Arc::new(TokenExpander::DeclarativeMacro { mac, def_site_token_map }))
+ }
+ MacroDefKind::BuiltIn(expander, _) => Ok(Arc::new(TokenExpander::Builtin(expander))),
+ MacroDefKind::BuiltInAttr(expander, _) => {
+ Ok(Arc::new(TokenExpander::BuiltinAttr(expander)))
+ }
+ MacroDefKind::BuiltInDerive(expander, _) => {
+ Ok(Arc::new(TokenExpander::BuiltinDerive(expander)))
+ }
+ MacroDefKind::BuiltInEager(..) => {
+ // FIXME: Return a random error here just to make the types align.
+ // This obviously should do something real instead.
+ Err(mbe::ParseError::UnexpectedToken("unexpected eager macro".into()))
+ }
+ MacroDefKind::ProcMacro(expander, ..) => Ok(Arc::new(TokenExpander::ProcMacro(expander))),
+ }
+}
+
+fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> {
+ let _p = profile::span("macro_expand");
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ if let Some(eager) = &loc.eager {
+ return ExpandResult {
+ value: Some(eager.arg_or_expansion.clone()),
+ // FIXME: There could be errors here!
+ err: None,
+ };
+ }
+
+ let macro_arg = match db.macro_arg(id) {
+ Some(it) => it,
+ None => {
+ return ExpandResult::only_err(ExpandError::Other(
+ "Failed to lower macro args to token tree".into(),
+ ))
+ }
+ };
+
+ let expander = match db.macro_def(loc.def) {
+ Ok(it) => it,
+ // FIXME: This is weird -- we effectively report macro *definition*
+ // errors lazily, when we try to expand the macro. Instead, they should
+ // be reported at the definition site (when we construct a def map).
+ Err(err) => {
+ return ExpandResult::only_err(ExpandError::Other(
+ format!("invalid macro definition: {}", err).into(),
+ ))
+ }
+ };
+ let ExpandResult { value: mut tt, err } = expander.expand(db, id, &macro_arg.0);
+ // Set a hard limit for the expanded tt
+ let count = tt.count();
+ if TOKEN_LIMIT.check(count).is_err() {
+ return ExpandResult::only_err(ExpandError::Other(
+ format!(
+ "macro invocation exceeds token limit: produced {} tokens, limit is {}",
+ count,
+ TOKEN_LIMIT.inner(),
+ )
+ .into(),
+ ));
+ }
+
+ fixup::reverse_fixups(&mut tt, &macro_arg.1, &macro_arg.2);
+
+ ExpandResult { value: Some(Arc::new(tt)), err }
+}
+
+fn macro_expand_error(db: &dyn AstDatabase, macro_call: MacroCallId) -> Option<ExpandError> {
+ db.macro_expand(macro_call).err
+}
+
+fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ let macro_arg = match db.macro_arg(id) {
+ Some(it) => it,
+ None => {
+ return ExpandResult::only_err(ExpandError::Other("No arguments for proc-macro".into()))
+ }
+ };
+
+ let expander = match loc.def.kind {
+ MacroDefKind::ProcMacro(expander, ..) => expander,
+ _ => unreachable!(),
+ };
+
+ let attr_arg = match &loc.kind {
+ MacroCallKind::Attr { attr_args, .. } => {
+ let mut attr_args = attr_args.0.clone();
+ mbe::Shift::new(&macro_arg.0).shift_all(&mut attr_args);
+ Some(attr_args)
+ }
+ _ => None,
+ };
+
+ expander.expand(db, loc.krate, &macro_arg.0, attr_arg.as_ref())
+}
+
+fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
+ Arc::new(HygieneFrame::new(db, file_id))
+}
+
+fn macro_expand_to(db: &dyn AstDatabase, id: MacroCallId) -> ExpandTo {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ loc.kind.expand_to()
+}
+
+fn token_tree_to_syntax_node(
+ tt: &tt::Subtree,
+ expand_to: ExpandTo,
+) -> (Parse<SyntaxNode>, mbe::TokenMap) {
+ let entry_point = match expand_to {
+ ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
+ ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
+ ExpandTo::Pattern => mbe::TopEntryPoint::Pattern,
+ ExpandTo::Type => mbe::TopEntryPoint::Type,
+ ExpandTo::Expr => mbe::TopEntryPoint::Expr,
+ };
+ mbe::token_tree_to_syntax_node(tt, entry_point)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
new file mode 100644
index 000000000..5fd099aea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -0,0 +1,266 @@
+//! Eager expansion related utils
+//!
+//! Here is a dump of a discussion from Vadim Petrochenkov about Eager Expansion and
+//! Its name resolution :
+//!
+//! > Eagerly expanded macros (and also macros eagerly expanded by eagerly expanded macros,
+//! > which actually happens in practice too!) are resolved at the location of the "root" macro
+//! > that performs the eager expansion on its arguments.
+//! > If some name cannot be resolved at the eager expansion time it's considered unresolved,
+//! > even if becomes available later (e.g. from a glob import or other macro).
+//!
+//! > Eagerly expanded macros don't add anything to the module structure of the crate and
+//! > don't build any speculative module structures, i.e. they are expanded in a "flat"
+//! > way even if tokens in them look like modules.
+//!
+//! > In other words, it kinda works for simple cases for which it was originally intended,
+//! > and we need to live with it because it's available on stable and widely relied upon.
+//!
+//!
+//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
+use std::sync::Arc;
+
+use base_db::CrateId;
+use syntax::{ted, SyntaxNode};
+
+use crate::{
+ ast::{self, AstNode},
+ db::AstDatabase,
+ hygiene::Hygiene,
+ mod_path::ModPath,
+ EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
+ MacroCallLoc, MacroDefId, MacroDefKind, UnresolvedMacro,
+};
+
+#[derive(Debug)]
+pub struct ErrorEmitted {
+ _private: (),
+}
+
+pub trait ErrorSink {
+ fn emit(&mut self, err: ExpandError);
+
+ fn option<T>(
+ &mut self,
+ opt: Option<T>,
+ error: impl FnOnce() -> ExpandError,
+ ) -> Result<T, ErrorEmitted> {
+ match opt {
+ Some(it) => Ok(it),
+ None => {
+ self.emit(error());
+ Err(ErrorEmitted { _private: () })
+ }
+ }
+ }
+
+ fn option_with<T>(
+ &mut self,
+ opt: impl FnOnce() -> Option<T>,
+ error: impl FnOnce() -> ExpandError,
+ ) -> Result<T, ErrorEmitted> {
+ self.option(opt(), error)
+ }
+
+ fn result<T>(&mut self, res: Result<T, ExpandError>) -> Result<T, ErrorEmitted> {
+ match res {
+ Ok(it) => Ok(it),
+ Err(e) => {
+ self.emit(e);
+ Err(ErrorEmitted { _private: () })
+ }
+ }
+ }
+
+ fn expand_result_option<T>(&mut self, res: ExpandResult<Option<T>>) -> Result<T, ErrorEmitted> {
+ match (res.value, res.err) {
+ (None, Some(err)) => {
+ self.emit(err);
+ Err(ErrorEmitted { _private: () })
+ }
+ (Some(value), opt_err) => {
+ if let Some(err) = opt_err {
+ self.emit(err);
+ }
+ Ok(value)
+ }
+ (None, None) => unreachable!("`ExpandResult` without value or error"),
+ }
+ }
+}
+
+impl ErrorSink for &'_ mut dyn FnMut(ExpandError) {
+ fn emit(&mut self, err: ExpandError) {
+ self(err);
+ }
+}
+
+pub fn expand_eager_macro(
+ db: &dyn AstDatabase,
+ krate: CrateId,
+ macro_call: InFile<ast::MacroCall>,
+ def: MacroDefId,
+ resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
+ diagnostic_sink: &mut dyn FnMut(ExpandError),
+) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro> {
+ let hygiene = Hygiene::new(db, macro_call.file_id);
+ let parsed_args = macro_call
+ .value
+ .token_tree()
+ .map(|tt| mbe::syntax_node_to_token_tree(tt.syntax()).0)
+ .unwrap_or_default();
+
+ let ast_map = db.ast_id_map(macro_call.file_id);
+ let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(&macro_call.value));
+ let expand_to = ExpandTo::from_call_site(&macro_call.value);
+
+ // Note:
+ // When `lazy_expand` is called, its *parent* file must be already exists.
+ // Here we store an eager macro id for the argument expanded subtree here
+ // for that purpose.
+ let arg_id = db.intern_macro_call(MacroCallLoc {
+ def,
+ krate,
+ eager: Some(EagerCallInfo {
+ arg_or_expansion: Arc::new(parsed_args.clone()),
+ included_file: None,
+ }),
+ kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
+ });
+
+ let parsed_args = mbe::token_tree_to_syntax_node(&parsed_args, mbe::TopEntryPoint::Expr).0;
+ let result = match eager_macro_recur(
+ db,
+ &hygiene,
+ InFile::new(arg_id.as_file(), parsed_args.syntax_node()),
+ krate,
+ resolver,
+ diagnostic_sink,
+ ) {
+ Ok(Ok(it)) => it,
+ Ok(Err(err)) => return Ok(Err(err)),
+ Err(err) => return Err(err),
+ };
+ let subtree = to_subtree(&result);
+
+ if let MacroDefKind::BuiltInEager(eager, _) = def.kind {
+ let res = eager.expand(db, arg_id, &subtree);
+ if let Some(err) = res.err {
+ diagnostic_sink(err);
+ }
+
+ let loc = MacroCallLoc {
+ def,
+ krate,
+ eager: Some(EagerCallInfo {
+ arg_or_expansion: Arc::new(res.value.subtree),
+ included_file: res.value.included_file,
+ }),
+ kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
+ };
+
+ Ok(Ok(db.intern_macro_call(loc)))
+ } else {
+ panic!("called `expand_eager_macro` on non-eager macro def {:?}", def);
+ }
+}
+
+fn to_subtree(node: &SyntaxNode) -> tt::Subtree {
+ let mut subtree = mbe::syntax_node_to_token_tree(node).0;
+ subtree.delimiter = None;
+ subtree
+}
+
+fn lazy_expand(
+ db: &dyn AstDatabase,
+ def: &MacroDefId,
+ macro_call: InFile<ast::MacroCall>,
+ krate: CrateId,
+) -> ExpandResult<Option<InFile<SyntaxNode>>> {
+ let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
+
+ let expand_to = ExpandTo::from_call_site(&macro_call.value);
+ let id = def.as_lazy_macro(
+ db,
+ krate,
+ MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to },
+ );
+
+ let err = db.macro_expand_error(id);
+ let value = db.parse_or_expand(id.as_file()).map(|node| InFile::new(id.as_file(), node));
+
+ ExpandResult { value, err }
+}
+
+fn eager_macro_recur(
+ db: &dyn AstDatabase,
+ hygiene: &Hygiene,
+ curr: InFile<SyntaxNode>,
+ krate: CrateId,
+ macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
+ mut diagnostic_sink: &mut dyn FnMut(ExpandError),
+) -> Result<Result<SyntaxNode, ErrorEmitted>, UnresolvedMacro> {
+ let original = curr.value.clone_for_update();
+
+ let children = original.descendants().filter_map(ast::MacroCall::cast);
+ let mut replacements = Vec::new();
+
+ // Collect replacement
+ for child in children {
+ let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
+ Some(path) => macro_resolver(path.clone()).ok_or_else(|| UnresolvedMacro { path })?,
+ None => {
+ diagnostic_sink(ExpandError::Other("malformed macro invocation".into()));
+ continue;
+ }
+ };
+ let insert = match def.kind {
+ MacroDefKind::BuiltInEager(..) => {
+ let id = match expand_eager_macro(
+ db,
+ krate,
+ curr.with_value(child.clone()),
+ def,
+ macro_resolver,
+ diagnostic_sink,
+ ) {
+ Ok(Ok(it)) => it,
+ Ok(Err(err)) => return Ok(Err(err)),
+ Err(err) => return Err(err),
+ };
+ db.parse_or_expand(id.as_file())
+ .expect("successful macro expansion should be parseable")
+ .clone_for_update()
+ }
+ MacroDefKind::Declarative(_)
+ | MacroDefKind::BuiltIn(..)
+ | MacroDefKind::BuiltInAttr(..)
+ | MacroDefKind::BuiltInDerive(..)
+ | MacroDefKind::ProcMacro(..) => {
+ let res = lazy_expand(db, &def, curr.with_value(child.clone()), krate);
+ let val = match diagnostic_sink.expand_result_option(res) {
+ Ok(it) => it,
+ Err(err) => return Ok(Err(err)),
+ };
+
+ // replace macro inside
+ let hygiene = Hygiene::new(db, val.file_id);
+ match eager_macro_recur(db, &hygiene, val, krate, macro_resolver, diagnostic_sink) {
+ Ok(Ok(it)) => it,
+ Ok(Err(err)) => return Ok(Err(err)),
+ Err(err) => return Err(err),
+ }
+ }
+ };
+
+ // check if the whole original syntax is replaced
+ if child.syntax() == &original {
+ return Ok(Ok(insert));
+ }
+
+ replacements.push((child, insert));
+ }
+
+ replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
+ Ok(Ok(original))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
new file mode 100644
index 000000000..9999790fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -0,0 +1,382 @@
+//! To make attribute macros work reliably when typing, we need to take care to
+//! fix up syntax errors in the code we're passing to them.
+use std::mem;
+
+use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
+use rustc_hash::FxHashMap;
+use syntax::{
+ ast::{self, AstNode},
+ match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
+};
+use tt::Subtree;
+
+/// The result of calculating fixes for a syntax node -- a bunch of changes
+/// (appending to and replacing nodes), the information that is needed to
+/// reverse those changes afterwards, and a token map.
+#[derive(Debug)]
+pub(crate) struct SyntaxFixups {
+ pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ pub(crate) undo_info: SyntaxFixupUndoInfo,
+ pub(crate) token_map: TokenMap,
+ pub(crate) next_id: u32,
+}
+
+/// This is the information needed to reverse the fixups.
+#[derive(Debug, PartialEq, Eq)]
+pub struct SyntaxFixupUndoInfo {
+ original: Vec<Subtree>,
+}
+
+const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
+
+pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
+ let mut append = FxHashMap::<SyntaxElement, _>::default();
+ let mut replace = FxHashMap::<SyntaxElement, _>::default();
+ let mut preorder = node.preorder();
+ let mut original = Vec::new();
+ let mut token_map = TokenMap::default();
+ let mut next_id = 0;
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ syntax::WalkEvent::Enter(node) => node,
+ syntax::WalkEvent::Leave(_) => continue,
+ };
+
+ if can_handle_error(&node) && has_error_to_handle(&node) {
+ // the node contains an error node, we have to completely replace it by something valid
+ let (original_tree, new_tmap, new_next_id) =
+ mbe::syntax_node_to_token_tree_with_modifications(
+ &node,
+ mem::take(&mut token_map),
+ next_id,
+ Default::default(),
+ Default::default(),
+ );
+ token_map = new_tmap;
+ next_id = new_next_id;
+ let idx = original.len() as u32;
+ original.push(original_tree);
+ let replacement = SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: node.text_range(),
+ id: SyntheticTokenId(idx),
+ };
+ replace.insert(node.clone().into(), vec![replacement]);
+ preorder.skip_subtree();
+ continue;
+ }
+
+ // In some other situations, we can fix things by just appending some tokens.
+ let end_range = TextRange::empty(node.text_range().end());
+ match_ast! {
+ match node {
+ ast::FieldExpr(it) => {
+ if it.name_ref().is_none() {
+ // incomplete field access: some_expr.|
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::ExprStmt(it) => {
+ if it.semicolon_token().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::SEMICOLON,
+ text: ";".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::LetStmt(it) => {
+ if it.semicolon_token().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::SEMICOLON,
+ text: ";".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::IfExpr(it) => {
+ if it.condition().is_none() {
+ // insert placeholder token after the if token
+ let if_token = match it.if_token() {
+ Some(t) => t,
+ None => continue,
+ };
+ append.insert(if_token.into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ if it.then_branch().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::L_CURLY,
+ text: "{".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ SyntheticToken {
+ kind: SyntaxKind::R_CURLY,
+ text: "}".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ // FIXME: foo::
+ // FIXME: for, loop, match etc.
+ _ => (),
+ }
+ }
+ }
+ SyntaxFixups {
+ append,
+ replace,
+ token_map,
+ next_id,
+ undo_info: SyntaxFixupUndoInfo { original },
+ }
+}
+
+fn has_error(node: &SyntaxNode) -> bool {
+ node.children().any(|c| c.kind() == SyntaxKind::ERROR)
+}
+
+fn can_handle_error(node: &SyntaxNode) -> bool {
+ ast::Expr::can_cast(node.kind())
+}
+
+fn has_error_to_handle(node: &SyntaxNode) -> bool {
+ has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
+}
+
+pub(crate) fn reverse_fixups(
+ tt: &mut Subtree,
+ token_map: &TokenMap,
+ undo_info: &SyntaxFixupUndoInfo,
+) {
+ tt.token_trees.retain(|tt| match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ token_map.synthetic_token_id(leaf.id()).is_none()
+ || token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID)
+ }
+ tt::TokenTree::Subtree(st) => st.delimiter.map_or(true, |d| {
+ token_map.synthetic_token_id(d.id).is_none()
+ || token_map.synthetic_token_id(d.id) != Some(EMPTY_ID)
+ }),
+ });
+ tt.token_trees.iter_mut().for_each(|tt| match tt {
+ tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info),
+ tt::TokenTree::Leaf(leaf) => {
+ if let Some(id) = token_map.synthetic_token_id(leaf.id()) {
+ let original = &undo_info.original[id.0 as usize];
+ *tt = tt::TokenTree::Subtree(original.clone());
+ }
+ }
+ });
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use super::reverse_fixups;
+
+ #[track_caller]
+ fn check(ra_fixture: &str, mut expect: Expect) {
+ let parsed = syntax::SourceFile::parse(ra_fixture);
+ let fixups = super::fixup_syntax(&parsed.syntax_node());
+ let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ &parsed.syntax_node(),
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
+
+ let mut actual = tt.to_string();
+ actual.push('\n');
+
+ expect.indent(false);
+ expect.assert_eq(&actual);
+
+ // the fixed-up tree should be syntactically valid
+ let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems);
+ assert_eq!(
+ parse.errors(),
+ &[],
+ "parse has syntax errors. parse tree:\n{:#?}",
+ parse.syntax_node()
+ );
+
+ reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
+
+ // the fixed-up + reversed version should be equivalent to the original input
+ // (but token IDs don't matter)
+ let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
+ assert_eq!(tt.to_string(), original_as_tt.to_string());
+ }
+
+ #[test]
+ fn incomplete_field_expr_1() {
+ check(
+ r#"
+fn foo() {
+ a.
+}
+"#,
+ expect![[r#"
+fn foo () {a . __ra_fixup}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_2() {
+ check(
+ r#"
+fn foo() {
+ a. ;
+}
+"#,
+ expect![[r#"
+fn foo () {a . __ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_3() {
+ check(
+ r#"
+fn foo() {
+ a. ;
+ bar();
+}
+"#,
+ expect![[r#"
+fn foo () {a . __ra_fixup ; bar () ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_let() {
+ check(
+ r#"
+fn foo() {
+ let x = a
+}
+"#,
+ expect![[r#"
+fn foo () {let x = a ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_in_let() {
+ check(
+ r#"
+fn foo() {
+ let x = a.
+}
+"#,
+ expect![[r#"
+fn foo () {let x = a . __ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn field_expr_before_call() {
+ // another case that easily happens while typing
+ check(
+ r#"
+fn foo() {
+ a.b
+ bar();
+}
+"#,
+ expect![[r#"
+fn foo () {a . b ; bar () ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn extraneous_comma() {
+ check(
+ r#"
+fn foo() {
+ bar(,);
+}
+"#,
+ expect![[r#"
+fn foo () {__ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_if_1() {
+ check(
+ r#"
+fn foo() {
+ if a
+}
+"#,
+ expect![[r#"
+fn foo () {if a {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_if_2() {
+ check(
+ r#"
+fn foo() {
+ if
+}
+"#,
+ expect![[r#"
+fn foo () {if __ra_fixup {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_if_3() {
+ check(
+ r#"
+fn foo() {
+ if {}
+}
+"#,
+ // the {} gets parsed as the condition, I think?
+ expect![[r#"
+fn foo () {if {} {}}
+"#]],
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
new file mode 100644
index 000000000..d60734372
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -0,0 +1,256 @@
+//! This modules handles hygiene information.
+//!
+//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
+//! this moment, this is horribly incomplete and handles only `$crate`.
+use std::sync::Arc;
+
+use base_db::CrateId;
+use db::TokenExpander;
+use either::Either;
+use mbe::Origin;
+use syntax::{
+ ast::{self, HasDocComments},
+ AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+
+use crate::{
+ db::{self, AstDatabase},
+ fixup,
+ name::{AsName, Name},
+ HirFileId, HirFileIdRepr, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
+};
+
+#[derive(Clone, Debug)]
+pub struct Hygiene {
+ frames: Option<HygieneFrames>,
+}
+
+impl Hygiene {
+ pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene {
+ Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
+ }
+
+ pub fn new_unhygienic() -> Hygiene {
+ Hygiene { frames: None }
+ }
+
+ // FIXME: this should just return name
+ pub fn name_ref_to_name(
+ &self,
+ db: &dyn AstDatabase,
+ name_ref: ast::NameRef,
+ ) -> Either<Name, CrateId> {
+ if let Some(frames) = &self.frames {
+ if name_ref.text() == "$crate" {
+ if let Some(krate) = frames.root_crate(db, name_ref.syntax()) {
+ return Either::Right(krate);
+ }
+ }
+ }
+
+ Either::Left(name_ref.as_name())
+ }
+
+ pub fn local_inner_macros(&self, db: &dyn AstDatabase, path: ast::Path) -> Option<CrateId> {
+ let mut token = path.syntax().first_token()?.text_range();
+ let frames = self.frames.as_ref()?;
+ let mut current = &frames.0;
+
+ loop {
+ let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
+ if origin == Origin::Def {
+ return if current.local_inner {
+ frames.root_crate(db, path.syntax())
+ } else {
+ None
+ };
+ }
+ current = current.call_site.as_ref()?;
+ token = mapped.value;
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+struct HygieneFrames(Arc<HygieneFrame>);
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct HygieneFrame {
+ expansion: Option<HygieneInfo>,
+
+ // Indicate this is a local inner macro
+ local_inner: bool,
+ krate: Option<CrateId>,
+
+ call_site: Option<Arc<HygieneFrame>>,
+ def_site: Option<Arc<HygieneFrame>>,
+}
+
+impl HygieneFrames {
+ fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Self {
+ // Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
+ // usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
+ HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
+ }
+
+ fn root_crate(&self, db: &dyn AstDatabase, node: &SyntaxNode) -> Option<CrateId> {
+ let mut token = node.first_token()?.text_range();
+ let mut result = self.0.krate;
+ let mut current = self.0.clone();
+
+ while let Some((mapped, origin)) =
+ current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
+ {
+ result = current.krate;
+
+ let site = match origin {
+ Origin::Def => &current.def_site,
+ Origin::Call => &current.call_site,
+ };
+
+ let site = match site {
+ None => break,
+ Some(it) => it,
+ };
+
+ current = site.clone();
+ token = mapped.value;
+ }
+
+ result
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+struct HygieneInfo {
+ file: MacroFile,
+ /// The start offset of the `macro_rules!` arguments or attribute input.
+ attr_input_or_mac_def_start: Option<InFile<TextSize>>,
+
+ macro_def: Arc<TokenExpander>,
+ macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
+ macro_arg_shift: mbe::Shift,
+ exp_map: Arc<mbe::TokenMap>,
+}
+
+impl HygieneInfo {
+ fn map_ident_up(
+ &self,
+ db: &dyn AstDatabase,
+ token: TextRange,
+ ) -> Option<(InFile<TextRange>, Origin)> {
+ let token_id = self.exp_map.token_by_range(token)?;
+ let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
+
+ let loc = db.lookup_intern_macro_call(self.file.macro_call_id);
+
+ let (token_map, tt) = match &loc.kind {
+ MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
+ Some(unshifted) => {
+ token_id = unshifted;
+ (&attr_args.1, self.attr_input_or_mac_def_start?)
+ }
+ None => (
+ &self.macro_arg.1,
+ InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
+ ),
+ },
+ _ => match origin {
+ mbe::Origin::Call => (
+ &self.macro_arg.1,
+ InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
+ ),
+ mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def_start) {
+ (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
+ (def_site_token_map, *tt)
+ }
+ _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
+ },
+ },
+ };
+
+ let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
+ Some((tt.with_value(range + tt.value), origin))
+ }
+}
+
+fn make_hygiene_info(
+ db: &dyn AstDatabase,
+ macro_file: MacroFile,
+ loc: &MacroCallLoc,
+) -> Option<HygieneInfo> {
+ let def = loc.def.ast_id().left().and_then(|id| {
+ let def_tt = match id.to_node(db) {
+ ast::Macro::MacroRules(mac) => mac.token_tree()?,
+ ast::Macro::MacroDef(mac) => mac.body()?,
+ };
+ Some(InFile::new(id.file_id, def_tt))
+ });
+ let attr_input_or_mac_def = def.or_else(|| match loc.kind {
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ let tt = ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index as usize)
+ .and_then(Either::left)?
+ .token_tree()?;
+ Some(InFile::new(ast_id.file_id, tt))
+ }
+ _ => None,
+ });
+
+ let macro_def = db.macro_def(loc.def).ok()?;
+ let (_, exp_map) = db.parse_macro_expansion(macro_file).value?;
+ let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
+
+ Some(HygieneInfo {
+ file: macro_file,
+ attr_input_or_mac_def_start: attr_input_or_mac_def
+ .map(|it| it.map(|tt| tt.syntax().text_range().start())),
+ macro_arg_shift: mbe::Shift::new(&macro_arg.0),
+ macro_arg,
+ macro_def,
+ exp_map,
+ })
+}
+
+impl HygieneFrame {
+ pub(crate) fn new(db: &dyn AstDatabase, file_id: HirFileId) -> HygieneFrame {
+ let (info, krate, local_inner) = match file_id.0 {
+ HirFileIdRepr::FileId(_) => (None, None, false),
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let info =
+ make_hygiene_info(db, macro_file, &loc).map(|info| (loc.kind.file_id(), info));
+ match loc.def.kind {
+ MacroDefKind::Declarative(_) => {
+ (info, Some(loc.def.krate), loc.def.local_inner)
+ }
+ MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false),
+ MacroDefKind::BuiltInAttr(..) => (info, None, false),
+ MacroDefKind::BuiltInDerive(..) => (info, None, false),
+ MacroDefKind::BuiltInEager(..) => (info, None, false),
+ MacroDefKind::ProcMacro(..) => (info, None, false),
+ }
+ }
+ };
+
+ let (calling_file, info) = match info {
+ None => {
+ return HygieneFrame {
+ expansion: None,
+ local_inner,
+ krate,
+ call_site: None,
+ def_site: None,
+ };
+ }
+ Some(it) => it,
+ };
+
+ let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
+ let call_site = Some(db.hygiene_frame(calling_file));
+
+ HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
new file mode 100644
index 000000000..252293090
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -0,0 +1,1000 @@
+//! `hir_expand` deals with macro expansion.
+//!
+//! Specifically, it implements a concept of `MacroFile` -- a file whose syntax
+//! tree originates not from the text of some `FileId`, but from some macro
+//! expansion.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+pub mod db;
+pub mod ast_id_map;
+pub mod name;
+pub mod hygiene;
+pub mod builtin_attr_macro;
+pub mod builtin_derive_macro;
+pub mod builtin_fn_macro;
+pub mod proc_macro;
+pub mod quote;
+pub mod eager;
+pub mod mod_path;
+mod fixup;
+
+pub use mbe::{Origin, ValueResult};
+
+use std::{fmt, hash::Hash, iter, sync::Arc};
+
+use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange, ProcMacroKind};
+use either::Either;
+use syntax::{
+ algo::{self, skip_trivia_token},
+ ast::{self, AstNode, HasDocComments},
+ Direction, SyntaxNode, SyntaxToken,
+};
+
+use crate::{
+ ast_id_map::FileAstId,
+ builtin_attr_macro::BuiltinAttrExpander,
+ builtin_derive_macro::BuiltinDeriveExpander,
+ builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
+ db::TokenExpander,
+ mod_path::ModPath,
+ proc_macro::ProcMacroExpander,
+};
+
+pub type ExpandResult<T> = ValueResult<T, ExpandError>;
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum ExpandError {
+ UnresolvedProcMacro(CrateId),
+ Mbe(mbe::ExpandError),
+ Other(Box<str>),
+}
+
+impl From<mbe::ExpandError> for ExpandError {
+ fn from(mbe: mbe::ExpandError) -> Self {
+ Self::Mbe(mbe)
+ }
+}
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"),
+ ExpandError::Mbe(it) => it.fmt(f),
+ ExpandError::Other(it) => f.write_str(it),
+ }
+ }
+}
+
+/// Input to the analyzer is a set of files, where each file is identified by
+/// `FileId` and contains source code. However, another source of source code in
+/// Rust are macros: each macro can be thought of as producing a "temporary
+/// file". To assign an id to such a file, we use the id of the macro call that
+/// produced the file. So, a `HirFileId` is either a `FileId` (source code
+/// written by user), or a `MacroCallId` (source code produced by macro).
+///
+/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
+/// containing the call plus the offset of the macro call in the file. Note that
+/// this is a recursive definition! However, the size_of of `HirFileId` is
+/// finite (because everything bottoms out at the real `FileId`) and small
+/// (`MacroCallId` uses the location interning. You can check details here:
+/// <https://en.wikipedia.org/wiki/String_interning>).
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct HirFileId(HirFileIdRepr);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+enum HirFileIdRepr {
+ FileId(FileId),
+ MacroFile(MacroFile),
+}
+
+impl From<FileId> for HirFileId {
+ fn from(id: FileId) -> Self {
+ HirFileId(HirFileIdRepr::FileId(id))
+ }
+}
+
+impl From<MacroFile> for HirFileId {
+ fn from(id: MacroFile) -> Self {
+ HirFileId(HirFileIdRepr::MacroFile(id))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroFile {
+ pub macro_call_id: MacroCallId,
+}
+
+/// `MacroCallId` identifies a particular macro invocation, like
+/// `println!("Hello, {}", world)`.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroCallId(salsa::InternId);
+impl_intern_key!(MacroCallId);
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroCallLoc {
+ pub def: MacroDefId,
+ pub(crate) krate: CrateId,
+ eager: Option<EagerCallInfo>,
+ pub kind: MacroCallKind,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroDefId {
+ pub krate: CrateId,
+ pub kind: MacroDefKind,
+ pub local_inner: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum MacroDefKind {
+ Declarative(AstId<ast::Macro>),
+ BuiltIn(BuiltinFnLikeExpander, AstId<ast::Macro>),
+ // FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander
+ BuiltInAttr(BuiltinAttrExpander, AstId<ast::Macro>),
+ BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>),
+ BuiltInEager(EagerExpander, AstId<ast::Macro>),
+ ProcMacro(ProcMacroExpander, ProcMacroKind, AstId<ast::Fn>),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+struct EagerCallInfo {
+ /// NOTE: This can be *either* the expansion result, *or* the argument to the eager macro!
+ arg_or_expansion: Arc<tt::Subtree>,
+ included_file: Option<FileId>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum MacroCallKind {
+ FnLike {
+ ast_id: AstId<ast::MacroCall>,
+ expand_to: ExpandTo,
+ },
+ Derive {
+ ast_id: AstId<ast::Adt>,
+ /// Syntactical index of the invoking `#[derive]` attribute.
+ ///
+ /// Outer attributes are counted first, then inner attributes. This does not support
+ /// out-of-line modules, which may have attributes spread across 2 files!
+ derive_attr_index: u32,
+ /// Index of the derive macro in the derive attribute
+ derive_index: u32,
+ },
+ Attr {
+ ast_id: AstId<ast::Item>,
+ attr_args: Arc<(tt::Subtree, mbe::TokenMap)>,
+ /// Syntactical index of the invoking `#[attribute]`.
+ ///
+ /// Outer attributes are counted first, then inner attributes. This does not support
+ /// out-of-line modules, which may have attributes spread across 2 files!
+ invoc_attr_index: u32,
+ /// Whether this attribute is the `#[derive]` attribute.
+ is_derive: bool,
+ },
+}
+
+impl HirFileId {
+ /// For macro-expansion files, returns the file original source file the
+ /// expansion originated from.
+ pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId {
+ let mut file_id = self;
+ loop {
+ match file_id.0 {
+ HirFileIdRepr::FileId(id) => break id,
+ HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
+ file_id = match loc.eager {
+ Some(EagerCallInfo { included_file: Some(file), .. }) => file.into(),
+ _ => loc.kind.file_id(),
+ };
+ }
+ }
+ }
+ }
+
+ pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 {
+ let mut level = 0;
+ let mut curr = self;
+ while let HirFileIdRepr::MacroFile(macro_file) = curr.0 {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+
+ level += 1;
+ curr = loc.kind.file_id();
+ }
+ level
+ }
+
+ /// If this is a macro call, returns the syntax node of the call.
+ pub fn call_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ Some(loc.kind.to_node(db))
+ }
+ }
+ }
+
+ /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
+ pub fn original_call_node(self, db: &dyn db::AstDatabase) -> Option<(FileId, SyntaxNode)> {
+ let mut call = match self.0 {
+ HirFileIdRepr::FileId(_) => return None,
+ HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
+ db.lookup_intern_macro_call(macro_call_id).kind.to_node(db)
+ }
+ };
+ loop {
+ match call.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)),
+ HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
+ call = db.lookup_intern_macro_call(macro_call_id).kind.to_node(db);
+ }
+ }
+ }
+ }
+
+ /// Return expansion information if it is a macro-expansion file
+ pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option<ExpansionInfo> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+
+ let arg_tt = loc.kind.arg(db)?;
+
+ let macro_def = db.macro_def(loc.def).ok()?;
+ let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?;
+ let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
+
+ let def = loc.def.ast_id().left().and_then(|id| {
+ let def_tt = match id.to_node(db) {
+ ast::Macro::MacroRules(mac) => mac.token_tree()?,
+ ast::Macro::MacroDef(_)
+ if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) =>
+ {
+ return None
+ }
+ ast::Macro::MacroDef(mac) => mac.body()?,
+ };
+ Some(InFile::new(id.file_id, def_tt))
+ });
+ let attr_input_or_mac_def = def.or_else(|| match loc.kind {
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ let tt = ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index as usize)
+ .and_then(Either::left)?
+ .token_tree()?;
+ Some(InFile::new(ast_id.file_id, tt))
+ }
+ _ => None,
+ });
+
+ Some(ExpansionInfo {
+ expanded: InFile::new(self, parse.syntax_node()),
+ arg: InFile::new(loc.kind.file_id(), arg_tt),
+ attr_input_or_mac_def,
+ macro_arg_shift: mbe::Shift::new(&macro_arg.0),
+ macro_arg,
+ macro_def,
+ exp_map,
+ })
+ }
+ }
+ }
+
+ /// Indicate it is macro file generated for builtin derive
+ pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option<InFile<ast::Attr>> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let attr = match loc.def.kind {
+ MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db),
+ _ => return None,
+ };
+ Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
+ }
+ }
+ }
+
+ pub fn is_custom_derive(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => false,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _))
+ }
+ }
+ }
+
+ /// Return whether this file is an include macro
+ pub fn is_include_macro(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.eager, Some(EagerCallInfo { included_file: Some(_), .. }))
+ }
+ _ => false,
+ }
+ }
+
+ /// Return whether this file is an attr macro
+ pub fn is_attr_macro(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.kind, MacroCallKind::Attr { .. })
+ }
+ _ => false,
+ }
+ }
+
+ /// Return whether this file is the pseudo expansion of the derive attribute.
+ /// See [`crate::builtin_attr_macro::derive_attr_expand`].
+ pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.kind, MacroCallKind::Attr { is_derive: true, .. })
+ }
+ _ => false,
+ }
+ }
+
+ pub fn is_macro(self) -> bool {
+ matches!(self.0, HirFileIdRepr::MacroFile(_))
+ }
+
+ pub fn macro_file(self) -> Option<MacroFile> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(m) => Some(m),
+ }
+ }
+}
+
+impl MacroDefId {
+ pub fn as_lazy_macro(
+ self,
+ db: &dyn db::AstDatabase,
+ krate: CrateId,
+ kind: MacroCallKind,
+ ) -> MacroCallId {
+ db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind })
+ }
+
+ pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {
+ let id = match self.kind {
+ MacroDefKind::ProcMacro(.., id) => return Either::Right(id),
+ MacroDefKind::Declarative(id)
+ | MacroDefKind::BuiltIn(_, id)
+ | MacroDefKind::BuiltInAttr(_, id)
+ | MacroDefKind::BuiltInDerive(_, id)
+ | MacroDefKind::BuiltInEager(_, id) => id,
+ };
+ Either::Left(id)
+ }
+
+ pub fn is_proc_macro(&self) -> bool {
+ matches!(self.kind, MacroDefKind::ProcMacro(..))
+ }
+
+ pub fn is_attribute(&self) -> bool {
+ matches!(
+ self.kind,
+ MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, ProcMacroKind::Attr, _)
+ )
+ }
+}
+
+// FIXME: attribute indices do not account for `cfg_attr`, which means that we'll strip the whole
+// `cfg_attr` instead of just one of the attributes it expands to
+
+impl MacroCallKind {
+ /// Returns the file containing the macro invocation.
+ fn file_id(&self) -> HirFileId {
+ match *self {
+ MacroCallKind::FnLike { ast_id: InFile { file_id, .. }, .. }
+ | MacroCallKind::Derive { ast_id: InFile { file_id, .. }, .. }
+ | MacroCallKind::Attr { ast_id: InFile { file_id, .. }, .. } => file_id,
+ }
+ }
+
+ pub fn to_node(&self, db: &dyn db::AstDatabase) -> InFile<SyntaxNode> {
+ match self {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ ast_id.with_value(ast_id.to_node(db).syntax().clone())
+ }
+ MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
+ // FIXME: handle `cfg_attr`
+ ast_id.with_value(ast_id.to_node(db)).map(|it| {
+ it.doc_comments_and_attrs()
+ .nth(*derive_attr_index as usize)
+ .and_then(|it| match it {
+ Either::Left(attr) => Some(attr.syntax().clone()),
+ Either::Right(_) => None,
+ })
+ .unwrap_or_else(|| it.syntax().clone())
+ })
+ }
+ MacroCallKind::Attr { ast_id, is_derive: true, invoc_attr_index, .. } => {
+ // FIXME: handle `cfg_attr`
+ ast_id.with_value(ast_id.to_node(db)).map(|it| {
+ it.doc_comments_and_attrs()
+ .nth(*invoc_attr_index as usize)
+ .and_then(|it| match it {
+ Either::Left(attr) => Some(attr.syntax().clone()),
+ Either::Right(_) => None,
+ })
+ .unwrap_or_else(|| it.syntax().clone())
+ })
+ }
+ MacroCallKind::Attr { ast_id, .. } => {
+ ast_id.with_value(ast_id.to_node(db).syntax().clone())
+ }
+ }
+ }
+
+ /// Returns the original file range that best describes the location of this macro call.
+ ///
+ /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
+ pub fn original_call_range_with_body(self, db: &dyn db::AstDatabase) -> FileRange {
+ let mut kind = self;
+ let file_id = loop {
+ match kind.file_id().0 {
+ HirFileIdRepr::MacroFile(file) => {
+ kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
+ }
+ HirFileIdRepr::FileId(file_id) => break file_id,
+ }
+ };
+
+ let range = match kind {
+ MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ MacroCallKind::Derive { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ MacroCallKind::Attr { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ };
+
+ FileRange { range, file_id }
+ }
+
+ /// Returns the original file range that best describes the location of this macro call.
+ ///
+ /// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
+ /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives
+ /// get only the specific derive that is being referred to.
+ pub fn original_call_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ let mut kind = self;
+ let file_id = loop {
+ match kind.file_id().0 {
+ HirFileIdRepr::MacroFile(file) => {
+ kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
+ }
+ HirFileIdRepr::FileId(file_id) => break file_id,
+ }
+ };
+
+ let range = match kind {
+ MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
+ // FIXME: should be the range of the macro name, not the whole derive
+ ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(derive_attr_index as usize)
+ .expect("missing derive")
+ .expect_left("derive is a doc comment?")
+ .syntax()
+ .text_range()
+ }
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index as usize)
+ .expect("missing attribute")
+ .expect_left("attribute macro is a doc comment?")
+ .syntax()
+ .text_range(),
+ };
+
+ FileRange { range, file_id }
+ }
+
+ fn arg(&self, db: &dyn db::AstDatabase) -> Option<SyntaxNode> {
+ match self {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ Some(ast_id.to_node(db).token_tree()?.syntax().clone())
+ }
+ MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
+ MacroCallKind::Attr { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
+ }
+ }
+
+ fn expand_to(&self) -> ExpandTo {
+ match self {
+ MacroCallKind::FnLike { expand_to, .. } => *expand_to,
+ MacroCallKind::Derive { .. } => ExpandTo::Items,
+ MacroCallKind::Attr { is_derive: true, .. } => ExpandTo::Statements,
+ MacroCallKind::Attr { .. } => ExpandTo::Items, // is this always correct?
+ }
+ }
+}
+
+impl MacroCallId {
+ pub fn as_file(self) -> HirFileId {
+ MacroFile { macro_call_id: self }.into()
+ }
+}
+
+/// ExpansionInfo mainly describes how to map text range between src and expanded macro
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ExpansionInfo {
+ expanded: InFile<SyntaxNode>,
+ /// The argument TokenTree or item for attributes
+ arg: InFile<SyntaxNode>,
+ /// The `macro_rules!` or attribute input.
+ attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
+
+ macro_def: Arc<TokenExpander>,
+ macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
+ /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
+ /// and as such we need to shift tokens if they are part of an attributes input instead of their item.
+ macro_arg_shift: mbe::Shift,
+ exp_map: Arc<mbe::TokenMap>,
+}
+
+impl ExpansionInfo {
+ pub fn expanded(&self) -> InFile<SyntaxNode> {
+ self.expanded.clone()
+ }
+
+ pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
+ Some(self.arg.with_value(self.arg.value.parent()?))
+ }
+
+ /// Map a token down from macro input into the macro expansion.
+ ///
+ /// The inner workings of this function differ slightly depending on the type of macro we are dealing with:
+ /// - declarative:
+ /// For declarative macros, we need to accommodate for the macro definition site(which acts as a second unchanging input)
+ /// , as tokens can mapped in and out of it.
+ /// To do this we shift all ids in the expansion by the maximum id of the definition site giving us an easy
+ /// way to map all the tokens.
+ /// - attribute:
+ /// Attributes have two different inputs, the input tokentree in the attribute node and the item
+ /// the attribute is annotating. Similarly as for declarative macros we need to do a shift here
+ /// as well. Currently this is done by shifting the attribute input by the maximum id of the item.
+ /// - function-like and derives:
+ /// Both of these only have one simple call site input so no special handling is required here.
+ pub fn map_token_down(
+ &self,
+ db: &dyn db::AstDatabase,
+ item: Option<ast::Item>,
+ token: InFile<&SyntaxToken>,
+ ) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
+ assert_eq!(token.file_id, self.arg.file_id);
+ let token_id_in_attr_input = if let Some(item) = item {
+ // check if we are mapping down in an attribute input
+ // this is a special case as attributes can have two inputs
+ let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
+ let loc = db.lookup_intern_macro_call(call_id);
+
+ let token_range = token.value.text_range();
+ match &loc.kind {
+ MacroCallKind::Attr { attr_args, invoc_attr_index, is_derive, .. } => {
+ let attr = item
+ .doc_comments_and_attrs()
+ .nth(*invoc_attr_index as usize)
+ .and_then(Either::left)?;
+ match attr.token_tree() {
+ Some(token_tree)
+ if token_tree.syntax().text_range().contains_range(token_range) =>
+ {
+ let attr_input_start =
+ token_tree.left_delimiter_token()?.text_range().start();
+ let relative_range =
+ token.value.text_range().checked_sub(attr_input_start)?;
+ // shift by the item's tree's max id
+ let token_id = attr_args.1.token_by_range(relative_range)?;
+ let token_id = if *is_derive {
+ // we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens
+ token_id
+ } else {
+ self.macro_arg_shift.shift(token_id)
+ };
+ Some(token_id)
+ }
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ } else {
+ None
+ };
+
+ let token_id = match token_id_in_attr_input {
+ Some(token_id) => token_id,
+ // the token is not inside an attribute's input so do the lookup in the macro_arg as ususal
+ None => {
+ let relative_range =
+ token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
+ let token_id = self.macro_arg.1.token_by_range(relative_range)?;
+ // conditionally shift the id by a declaratives macro definition
+ self.macro_def.map_id_down(token_id)
+ }
+ };
+
+ let tokens = self
+ .exp_map
+ .ranges_by_token(token_id, token.value.kind())
+ .flat_map(move |range| self.expanded.value.covering_element(range).into_token());
+
+ Some(tokens.map(move |token| self.expanded.with_value(token)))
+ }
+
+ /// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
+ pub fn map_token_up(
+ &self,
+ db: &dyn db::AstDatabase,
+ token: InFile<&SyntaxToken>,
+ ) -> Option<(InFile<SyntaxToken>, Origin)> {
+ // Fetch the id through its text range,
+ let token_id = self.exp_map.token_by_range(token.value.text_range())?;
+ // conditionally unshifting the id to accommodate for macro-rules def site
+ let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
+
+ let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
+ let loc = db.lookup_intern_macro_call(call_id);
+
+ // Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item.
+ let (token_map, tt) = match &loc.kind {
+ MacroCallKind::Attr { attr_args, is_derive: true, .. } => {
+ (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
+ }
+ MacroCallKind::Attr { attr_args, .. } => {
+ // try unshifting the the token id, if unshifting fails, the token resides in the non-item attribute input
+ // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this
+ match self.macro_arg_shift.unshift(token_id) {
+ Some(unshifted) => {
+ token_id = unshifted;
+ (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
+ }
+ None => (&self.macro_arg.1, self.arg.clone()),
+ }
+ }
+ _ => match origin {
+ mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
+ mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def) {
+ (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
+ (def_site_token_map, tt.syntax().cloned())
+ }
+ _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
+ },
+ },
+ };
+
+ let range = token_map.first_range_by_token(token_id, token.value.kind())?;
+ let token =
+ tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
+ Some((tt.with_value(token), origin))
+ }
+}
+
+/// `AstId` points to an AST node in any file.
+///
+/// It is stable across reparses, and can be used as salsa key/value.
+pub type AstId<N> = InFile<FileAstId<N>>;
+
+impl<N: AstNode> AstId<N> {
+ pub fn to_node(&self, db: &dyn db::AstDatabase) -> N {
+ let root = db.parse_or_expand(self.file_id).unwrap();
+ db.ast_id_map(self.file_id).get(self.value).to_node(&root)
+ }
+}
+
+/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
+///
+/// Typical usages are:
+///
+/// * `InFile<SyntaxNode>` -- syntax node in a file
+/// * `InFile<ast::FnDef>` -- ast node in a file
+/// * `InFile<TextSize>` -- offset in a file
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct InFile<T> {
+ pub file_id: HirFileId,
+ pub value: T,
+}
+
+impl<T> InFile<T> {
+ pub fn new(file_id: HirFileId, value: T) -> InFile<T> {
+ InFile { file_id, value }
+ }
+
+ pub fn with_value<U>(&self, value: U) -> InFile<U> {
+ InFile::new(self.file_id, value)
+ }
+
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFile<U> {
+ InFile::new(self.file_id, f(self.value))
+ }
+
+ pub fn as_ref(&self) -> InFile<&T> {
+ self.with_value(&self.value)
+ }
+
+ pub fn file_syntax(&self, db: &dyn db::AstDatabase) -> SyntaxNode {
+ db.parse_or_expand(self.file_id).expect("source created from invalid file")
+ }
+}
+
+impl<T: Clone> InFile<&T> {
+ pub fn cloned(&self) -> InFile<T> {
+ self.with_value(self.value.clone())
+ }
+}
+
+impl<T> InFile<Option<T>> {
+ pub fn transpose(self) -> Option<InFile<T>> {
+ let value = self.value?;
+ Some(InFile::new(self.file_id, value))
+ }
+}
+
+impl<'a> InFile<&'a SyntaxNode> {
+ pub fn ancestors_with_macros(
+ self,
+ db: &dyn db::AstDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
+ iter::successors(Some(self.cloned()), move |node| match node.value.parent() {
+ Some(parent) => Some(node.with_value(parent)),
+ None => node.file_id.call_node(db),
+ })
+ }
+
+ /// Skips the attributed item that caused the macro invocation we are climbing up
+ pub fn ancestors_with_macros_skip_attr_item(
+ self,
+ db: &dyn db::AstDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
+ let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
+ Some(parent) => Some(node.with_value(parent)),
+ None => {
+ let parent_node = node.file_id.call_node(db)?;
+ if node.file_id.is_attr_macro(db) {
+ // macro call was an attributed item, skip it
+ // FIXME: does this fail if this is a direct expansion of another macro?
+ parent_node.map(|node| node.parent()).transpose()
+ } else {
+ Some(parent_node)
+ }
+ }
+ };
+ iter::successors(succ(&self.cloned()), succ)
+ }
+
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ ///
+ /// For attributes and derives, this will point back to the attribute only.
+ /// For the entire item `InFile::use original_file_range_full`.
+ pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ match self.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some(res) = self.original_file_range_opt(db) {
+ return res;
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option<FileRange> {
+ match ascend_node_border_tokens(db, self) {
+ Some(InFile { file_id, value: (first, last) }) => {
+ let original_file = file_id.original_file(db);
+ let range = first.text_range().cover(last.text_range());
+ if file_id != original_file.into() {
+ tracing::error!("Failed mapping up more for {:?}", range);
+ return None;
+ }
+ Some(FileRange { file_id: original_file, range })
+ }
+ _ if !self.file_id.is_macro() => Some(FileRange {
+ file_id: self.file_id.original_file(db),
+ range: self.value.text_range(),
+ }),
+ _ => None,
+ }
+ }
+}
+
+impl InFile<SyntaxToken> {
+ pub fn upmap(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxToken>> {
+ let expansion = self.file_id.expansion_info(db)?;
+ expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it)
+ }
+
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ match self.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some(res) = self.original_file_range_opt(db) {
+ return res;
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option<FileRange> {
+ match self.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => {
+ Some(FileRange { file_id, range: self.value.text_range() })
+ }
+ HirFileIdRepr::MacroFile(_) => {
+ let expansion = self.file_id.expansion_info(db)?;
+ let InFile { file_id, value } = ascend_call_token(db, &expansion, self)?;
+ let original_file = file_id.original_file(db);
+ if file_id != original_file.into() {
+ return None;
+ }
+ Some(FileRange { file_id: original_file, range: value.text_range() })
+ }
+ }
+ }
+
+ pub fn ancestors_with_macros(
+ self,
+ db: &dyn db::AstDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
+ self.value.parent().into_iter().flat_map({
+ let file_id = self.file_id;
+ move |parent| InFile::new(file_id, &parent).ancestors_with_macros(db)
+ })
+ }
+}
+
+fn ascend_node_border_tokens(
+ db: &dyn db::AstDatabase,
+ InFile { file_id, value: node }: InFile<&SyntaxNode>,
+) -> Option<InFile<(SyntaxToken, SyntaxToken)>> {
+ let expansion = file_id.expansion_info(db)?;
+
+ let first_token = |node: &SyntaxNode| skip_trivia_token(node.first_token()?, Direction::Next);
+ let last_token = |node: &SyntaxNode| skip_trivia_token(node.last_token()?, Direction::Prev);
+
+ let first = first_token(node)?;
+ let last = last_token(node)?;
+ let first = ascend_call_token(db, &expansion, InFile::new(file_id, first))?;
+ let last = ascend_call_token(db, &expansion, InFile::new(file_id, last))?;
+ (first.file_id == last.file_id).then(|| InFile::new(first.file_id, (first.value, last.value)))
+}
+
+fn ascend_call_token(
+ db: &dyn db::AstDatabase,
+ expansion: &ExpansionInfo,
+ token: InFile<SyntaxToken>,
+) -> Option<InFile<SyntaxToken>> {
+ let mut mapping = expansion.map_token_up(db, token.as_ref())?;
+ while let (mapped, Origin::Call) = mapping {
+ match mapped.file_id.expansion_info(db) {
+ Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?,
+ None => return Some(mapped),
+ }
+ }
+ None
+}
+
+impl<N: AstNode> InFile<N> {
+ pub fn descendants<T: AstNode>(self) -> impl Iterator<Item = InFile<T>> {
+ self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n))
+ }
+
+ pub fn original_ast_node(self, db: &dyn db::AstDatabase) -> Option<InFile<N>> {
+ // This kind of upmapping can only be achieved in attribute expanded files,
+ // as we don't have node inputs otherwise and therefor can't find an `N` node in the input
+ if !self.file_id.is_macro() {
+ return Some(self);
+ } else if !self.file_id.is_attr_macro(db) {
+ return None;
+ }
+
+ if let Some(InFile { file_id, value: (first, last) }) =
+ ascend_node_border_tokens(db, self.syntax())
+ {
+ if file_id.is_macro() {
+ let range = first.text_range().cover(last.text_range());
+ tracing::error!("Failed mapping out of macro file for {:?}", range);
+ return None;
+ }
+ // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes
+ let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?;
+ let value = anc.ancestors().find_map(N::cast)?;
+ return Some(InFile::new(file_id, value));
+ }
+ None
+ }
+
+ pub fn syntax(&self) -> InFile<&SyntaxNode> {
+ self.with_value(self.value.syntax())
+ }
+}
+
+/// In Rust, macros expand token trees to token trees. When we want to turn a
+/// token tree into an AST node, we need to figure out what kind of AST node we
+/// want: something like `foo` can be a type, an expression, or a pattern.
+///
+/// Naively, one would think that "what this expands to" is a property of a
+/// particular macro: macro `m1` returns an item, while macro `m2` returns an
+/// expression, etc. That's not the case -- macros are polymorphic in the
+/// result, and can expand to any type of the AST node.
+///
+/// What defines the actual AST node is the syntactic context of the macro
+/// invocation. As a contrived example, in `let T![*] = T![*];` the first `T`
+/// expands to a pattern, while the second one expands to an expression.
+///
+/// `ExpandTo` captures this bit of information about a particular macro call
+/// site.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ExpandTo {
+ Statements,
+ Items,
+ Pattern,
+ Type,
+ Expr,
+}
+
+impl ExpandTo {
+ pub fn from_call_site(call: &ast::MacroCall) -> ExpandTo {
+ use syntax::SyntaxKind::*;
+
+ let syn = call.syntax();
+
+ let parent = match syn.parent() {
+ Some(it) => it,
+ None => return ExpandTo::Statements,
+ };
+
+ // FIXME: macros in statement position are treated as expression statements, they should
+ // probably be their own statement kind. The *grand*parent indicates what's valid.
+ if parent.kind() == MACRO_EXPR
+ && parent
+ .parent()
+ .map_or(true, |p| matches!(p.kind(), EXPR_STMT | STMT_LIST | MACRO_STMTS))
+ {
+ return ExpandTo::Statements;
+ }
+
+ match parent.kind() {
+ MACRO_ITEMS | SOURCE_FILE | ITEM_LIST => ExpandTo::Items,
+ MACRO_STMTS | EXPR_STMT | STMT_LIST => ExpandTo::Statements,
+ MACRO_PAT => ExpandTo::Pattern,
+ MACRO_TYPE => ExpandTo::Type,
+
+ ARG_LIST | ARRAY_EXPR | AWAIT_EXPR | BIN_EXPR | BREAK_EXPR | CALL_EXPR | CAST_EXPR
+ | CLOSURE_EXPR | FIELD_EXPR | FOR_EXPR | IF_EXPR | INDEX_EXPR | LET_EXPR
+ | MATCH_ARM | MATCH_EXPR | MATCH_GUARD | METHOD_CALL_EXPR | PAREN_EXPR | PATH_EXPR
+ | PREFIX_EXPR | RANGE_EXPR | RECORD_EXPR_FIELD | REF_EXPR | RETURN_EXPR | TRY_EXPR
+ | TUPLE_EXPR | WHILE_EXPR | MACRO_EXPR => ExpandTo::Expr,
+ _ => {
+ // Unknown , Just guess it is `Items`
+ ExpandTo::Items
+ }
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct UnresolvedMacro {
+ pub path: ModPath,
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
new file mode 100644
index 000000000..fea09521e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -0,0 +1,276 @@
+//! A lowering for `use`-paths (more generally, paths without angle-bracketed segments).
+
+use std::{
+ fmt::{self, Display},
+ iter,
+};
+
+use crate::{
+ db::AstDatabase,
+ hygiene::Hygiene,
+ name::{known, Name},
+};
+use base_db::CrateId;
+use either::Either;
+use smallvec::SmallVec;
+use syntax::{ast, AstNode};
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct ModPath {
+ pub kind: PathKind,
+ segments: SmallVec<[Name; 1]>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct EscapedModPath<'a>(&'a ModPath);
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum PathKind {
+ Plain,
+ /// `self::` is `Super(0)`
+ Super(u8),
+ Crate,
+ /// Absolute path (::foo)
+ Abs,
+ /// `$crate` from macro expansion
+ DollarCrate(CrateId),
+}
+
+impl ModPath {
+ pub fn from_src(db: &dyn AstDatabase, path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> {
+ convert_path(db, None, path, hygiene)
+ }
+
+ pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
+ let segments = segments.into_iter().collect();
+ ModPath { kind, segments }
+ }
+
+ /// Creates a `ModPath` from a `PathKind`, with no extra path segments.
+ pub const fn from_kind(kind: PathKind) -> ModPath {
+ ModPath { kind, segments: SmallVec::new_const() }
+ }
+
+ pub fn segments(&self) -> &[Name] {
+ &self.segments
+ }
+
+ pub fn push_segment(&mut self, segment: Name) {
+ self.segments.push(segment);
+ }
+
+ pub fn pop_segment(&mut self) -> Option<Name> {
+ self.segments.pop()
+ }
+
+ /// Returns the number of segments in the path (counting special segments like `$crate` and
+ /// `super`).
+ pub fn len(&self) -> usize {
+ self.segments.len()
+ + match self.kind {
+ PathKind::Plain => 0,
+ PathKind::Super(i) => i as usize,
+ PathKind::Crate => 1,
+ PathKind::Abs => 0,
+ PathKind::DollarCrate(_) => 1,
+ }
+ }
+
+ pub fn is_ident(&self) -> bool {
+ self.as_ident().is_some()
+ }
+
+ pub fn is_self(&self) -> bool {
+ self.kind == PathKind::Super(0) && self.segments.is_empty()
+ }
+
+ #[allow(non_snake_case)]
+ pub fn is_Self(&self) -> bool {
+ self.kind == PathKind::Plain
+ && matches!(&*self.segments, [name] if *name == known::SELF_TYPE)
+ }
+
+ /// If this path is a single identifier, like `foo`, return its name.
+ pub fn as_ident(&self) -> Option<&Name> {
+ if self.kind != PathKind::Plain {
+ return None;
+ }
+
+ match &*self.segments {
+ [name] => Some(name),
+ _ => None,
+ }
+ }
+
+ pub fn escaped(&self) -> EscapedModPath<'_> {
+ EscapedModPath(self)
+ }
+
+ fn _fmt(&self, f: &mut fmt::Formatter<'_>, escaped: bool) -> fmt::Result {
+ let mut first_segment = true;
+ let mut add_segment = |s| -> fmt::Result {
+ if !first_segment {
+ f.write_str("::")?;
+ }
+ first_segment = false;
+ f.write_str(s)?;
+ Ok(())
+ };
+ match self.kind {
+ PathKind::Plain => {}
+ PathKind::Super(0) => add_segment("self")?,
+ PathKind::Super(n) => {
+ for _ in 0..n {
+ add_segment("super")?;
+ }
+ }
+ PathKind::Crate => add_segment("crate")?,
+ PathKind::Abs => add_segment("")?,
+ PathKind::DollarCrate(_) => add_segment("$crate")?,
+ }
+ for segment in &self.segments {
+ if !first_segment {
+ f.write_str("::")?;
+ }
+ first_segment = false;
+ if escaped {
+ segment.escaped().fmt(f)?
+ } else {
+ segment.fmt(f)?
+ };
+ }
+ Ok(())
+ }
+}
+
+impl Display for ModPath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self._fmt(f, false)
+ }
+}
+
+impl<'a> Display for EscapedModPath<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0._fmt(f, true)
+ }
+}
+
+impl From<Name> for ModPath {
+ fn from(name: Name) -> ModPath {
+ ModPath::from_segments(PathKind::Plain, iter::once(name))
+ }
+}
+
+fn convert_path(
+ db: &dyn AstDatabase,
+ prefix: Option<ModPath>,
+ path: ast::Path,
+ hygiene: &Hygiene,
+) -> Option<ModPath> {
+ let prefix = match path.qualifier() {
+ Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
+ None => prefix,
+ };
+
+ let segment = path.segment()?;
+ let mut mod_path = match segment.kind()? {
+ ast::PathSegmentKind::Name(name_ref) => {
+ match hygiene.name_ref_to_name(db, name_ref) {
+ Either::Left(name) => {
+ // no type args in use
+ let mut res = prefix.unwrap_or_else(|| {
+ ModPath::from_kind(
+ segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
+ )
+ });
+ res.segments.push(name);
+ res
+ }
+ Either::Right(crate_id) => {
+ return Some(ModPath::from_segments(
+ PathKind::DollarCrate(crate_id),
+ iter::empty(),
+ ))
+ }
+ }
+ }
+ ast::PathSegmentKind::SelfTypeKw => {
+ if prefix.is_some() {
+ return None;
+ }
+ ModPath::from_segments(PathKind::Plain, Some(known::SELF_TYPE))
+ }
+ ast::PathSegmentKind::CrateKw => {
+ if prefix.is_some() {
+ return None;
+ }
+ ModPath::from_segments(PathKind::Crate, iter::empty())
+ }
+ ast::PathSegmentKind::SelfKw => {
+ if prefix.is_some() {
+ return None;
+ }
+ ModPath::from_segments(PathKind::Super(0), iter::empty())
+ }
+ ast::PathSegmentKind::SuperKw => {
+ let nested_super_count = match prefix.map(|p| p.kind) {
+ Some(PathKind::Super(n)) => n,
+ Some(_) => return None,
+ None => 0,
+ };
+
+ ModPath::from_segments(PathKind::Super(nested_super_count + 1), iter::empty())
+ }
+ ast::PathSegmentKind::Type { .. } => {
+ // not allowed in imports
+ return None;
+ }
+ };
+
+ // handle local_inner_macros :
+ // Basically, even in rustc it is quite hacky:
+ // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456
+ // We follow what it did anyway :)
+ if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
+ if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
+ if let Some(crate_id) = hygiene.local_inner_macros(db, path) {
+ mod_path.kind = PathKind::DollarCrate(crate_id);
+ }
+ }
+ }
+
+ Some(mod_path)
+}
+
+pub use crate::name as __name;
+
+#[macro_export]
+macro_rules! __known_path {
+ (core::iter::IntoIterator) => {};
+ (core::iter::Iterator) => {};
+ (core::result::Result) => {};
+ (core::option::Option) => {};
+ (core::ops::Range) => {};
+ (core::ops::RangeFrom) => {};
+ (core::ops::RangeFull) => {};
+ (core::ops::RangeTo) => {};
+ (core::ops::RangeToInclusive) => {};
+ (core::ops::RangeInclusive) => {};
+ (core::future::Future) => {};
+ (core::ops::Try) => {};
+ ($path:path) => {
+ compile_error!("Please register your known path in the path module")
+ };
+}
+
+#[macro_export]
+macro_rules! __path {
+ ($start:ident $(:: $seg:ident)*) => ({
+ $crate::__known_path!($start $(:: $seg)*);
+ $crate::mod_path::ModPath::from_segments($crate::mod_path::PathKind::Abs, vec![
+ $crate::mod_path::__name![$start], $($crate::mod_path::__name![$seg],)*
+ ])
+ });
+}
+
+pub use crate::__path as path;
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
new file mode 100644
index 000000000..85b0a7735
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
@@ -0,0 +1,433 @@
+//! See [`Name`].
+
+use std::fmt;
+
+use syntax::{ast, SmolStr, SyntaxKind};
+
+/// `Name` is a wrapper around string, which is used in hir for both references
+/// and declarations. In theory, names should also carry hygiene info, but we are
+/// not there yet!
+#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct Name(Repr);
+
+/// `EscapedName` will add a prefix "r#" to the wrapped `Name` when it is a raw identifier
+#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct EscapedName<'a>(&'a Name);
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+enum Repr {
+ Text(SmolStr),
+ TupleField(usize),
+}
+
+impl fmt::Display for Name {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.0 {
+ Repr::Text(text) => fmt::Display::fmt(&text, f),
+ Repr::TupleField(idx) => fmt::Display::fmt(&idx, f),
+ }
+ }
+}
+
+fn is_raw_identifier(name: &str) -> bool {
+ let is_keyword = SyntaxKind::from_keyword(name).is_some();
+ is_keyword && !matches!(name, "self" | "crate" | "super" | "Self")
+}
+
+impl<'a> fmt::Display for EscapedName<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.0 .0 {
+ Repr::Text(text) => {
+ if is_raw_identifier(text) {
+ write!(f, "r#{}", &text)
+ } else {
+ fmt::Display::fmt(&text, f)
+ }
+ }
+ Repr::TupleField(idx) => fmt::Display::fmt(&idx, f),
+ }
+ }
+}
+
+impl<'a> EscapedName<'a> {
+ pub fn is_escaped(&self) -> bool {
+ match &self.0 .0 {
+ Repr::Text(it) => is_raw_identifier(&it),
+ Repr::TupleField(_) => false,
+ }
+ }
+
+ /// Returns the textual representation of this name as a [`SmolStr`].
+ /// Prefer using this over [`ToString::to_string`] if possible as this conversion is cheaper in
+ /// the general case.
+ pub fn to_smol_str(&self) -> SmolStr {
+ match &self.0 .0 {
+ Repr::Text(it) => {
+ if is_raw_identifier(&it) {
+ SmolStr::from_iter(["r#", &it])
+ } else {
+ it.clone()
+ }
+ }
+ Repr::TupleField(it) => SmolStr::new(&it.to_string()),
+ }
+ }
+}
+
+impl Name {
+ /// Note: this is private to make creating name from random string hard.
+ /// Hopefully, this should allow us to integrate hygiene cleaner in the
+ /// future, and to switch to interned representation of names.
+ const fn new_text(text: SmolStr) -> Name {
+ Name(Repr::Text(text))
+ }
+
+ pub fn new_tuple_field(idx: usize) -> Name {
+ Name(Repr::TupleField(idx))
+ }
+
+ pub fn new_lifetime(lt: &ast::Lifetime) -> Name {
+ Self::new_text(lt.text().into())
+ }
+
+ /// Shortcut to create inline plain text name
+ const fn new_inline(text: &str) -> Name {
+ Name::new_text(SmolStr::new_inline(text))
+ }
+
+ /// Resolve a name from the text of token.
+ fn resolve(raw_text: &str) -> Name {
+ match raw_text.strip_prefix("r#") {
+ Some(text) => Name::new_text(SmolStr::new(text)),
+ None => Name::new_text(raw_text.into()),
+ }
+ }
+
+ /// A fake name for things missing in the source code.
+ ///
+ /// For example, `impl Foo for {}` should be treated as a trait impl for a
+ /// type with a missing name. Similarly, `struct S { : u32 }` should have a
+ /// single field with a missing name.
+ ///
+ /// Ideally, we want a `gensym` semantics for missing names -- each missing
+ /// name is equal only to itself. It's not clear how to implement this in
+ /// salsa though, so we punt on that bit for a moment.
+ pub const fn missing() -> Name {
+ Name::new_inline("[missing name]")
+ }
+
+ /// Returns the tuple index this name represents if it is a tuple field.
+ pub fn as_tuple_index(&self) -> Option<usize> {
+ match self.0 {
+ Repr::TupleField(idx) => Some(idx),
+ _ => None,
+ }
+ }
+
+ /// Returns the text this name represents if it isn't a tuple field.
+ pub fn as_text(&self) -> Option<SmolStr> {
+ match &self.0 {
+ Repr::Text(it) => Some(it.clone()),
+ _ => None,
+ }
+ }
+
+ /// Returns the textual representation of this name as a [`SmolStr`].
+ /// Prefer using this over [`ToString::to_string`] if possible as this conversion is cheaper in
+ /// the general case.
+ pub fn to_smol_str(&self) -> SmolStr {
+ match &self.0 {
+ Repr::Text(it) => it.clone(),
+ Repr::TupleField(it) => SmolStr::new(&it.to_string()),
+ }
+ }
+
+ pub fn escaped(&self) -> EscapedName<'_> {
+ EscapedName(self)
+ }
+}
+
+pub trait AsName {
+ fn as_name(&self) -> Name;
+}
+
+impl AsName for ast::NameRef {
+ fn as_name(&self) -> Name {
+ match self.as_tuple_field() {
+ Some(idx) => Name::new_tuple_field(idx),
+ None => Name::resolve(&self.text()),
+ }
+ }
+}
+
+impl AsName for ast::Name {
+ fn as_name(&self) -> Name {
+ Name::resolve(&self.text())
+ }
+}
+
+impl AsName for ast::NameOrNameRef {
+ fn as_name(&self) -> Name {
+ match self {
+ ast::NameOrNameRef::Name(it) => it.as_name(),
+ ast::NameOrNameRef::NameRef(it) => it.as_name(),
+ }
+ }
+}
+
+impl AsName for tt::Ident {
+ fn as_name(&self) -> Name {
+ Name::resolve(&self.text)
+ }
+}
+
+impl AsName for ast::FieldKind {
+ fn as_name(&self) -> Name {
+ match self {
+ ast::FieldKind::Name(nr) => nr.as_name(),
+ ast::FieldKind::Index(idx) => {
+ let idx = idx.text().parse::<usize>().unwrap_or(0);
+ Name::new_tuple_field(idx)
+ }
+ }
+ }
+}
+
+impl AsName for base_db::Dependency {
+ fn as_name(&self) -> Name {
+ Name::new_text(SmolStr::new(&*self.name))
+ }
+}
+
+pub mod known {
+ macro_rules! known_names {
+ ($($ident:ident),* $(,)?) => {
+ $(
+ #[allow(bad_style)]
+ pub const $ident: super::Name =
+ super::Name::new_inline(stringify!($ident));
+ )*
+ };
+ }
+
+ known_names!(
+ // Primitives
+ isize,
+ i8,
+ i16,
+ i32,
+ i64,
+ i128,
+ usize,
+ u8,
+ u16,
+ u32,
+ u64,
+ u128,
+ f32,
+ f64,
+ bool,
+ char,
+ str,
+ // Special names
+ macro_rules,
+ doc,
+ cfg,
+ cfg_attr,
+ register_attr,
+ register_tool,
+ // Components of known path (value or mod name)
+ std,
+ core,
+ alloc,
+ iter,
+ ops,
+ future,
+ result,
+ boxed,
+ option,
+ prelude,
+ rust_2015,
+ rust_2018,
+ rust_2021,
+ v1,
+ // Components of known path (type name)
+ Iterator,
+ IntoIterator,
+ Item,
+ Try,
+ Ok,
+ Future,
+ Result,
+ Option,
+ Output,
+ Target,
+ Box,
+ RangeFrom,
+ RangeFull,
+ RangeInclusive,
+ RangeToInclusive,
+ RangeTo,
+ Range,
+ Neg,
+ Not,
+ None,
+ Index,
+ // Components of known path (function name)
+ filter_map,
+ next,
+ iter_mut,
+ len,
+ is_empty,
+ new,
+ // Builtin macros
+ asm,
+ assert,
+ column,
+ compile_error,
+ concat_idents,
+ concat_bytes,
+ concat,
+ const_format_args,
+ core_panic,
+ env,
+ file,
+ format_args_nl,
+ format_args,
+ global_asm,
+ include_bytes,
+ include_str,
+ include,
+ line,
+ llvm_asm,
+ log_syntax,
+ module_path,
+ option_env,
+ std_panic,
+ stringify,
+ trace_macros,
+ unreachable,
+ // Builtin derives
+ Copy,
+ Clone,
+ Default,
+ Debug,
+ Hash,
+ Ord,
+ PartialOrd,
+ Eq,
+ PartialEq,
+ // Builtin attributes
+ bench,
+ cfg_accessible,
+ cfg_eval,
+ crate_type,
+ derive,
+ global_allocator,
+ test,
+ test_case,
+ recursion_limit,
+ // Safe intrinsics
+ abort,
+ add_with_overflow,
+ black_box,
+ bitreverse,
+ bswap,
+ caller_location,
+ ctlz,
+ ctpop,
+ cttz,
+ discriminant_value,
+ forget,
+ likely,
+ maxnumf32,
+ maxnumf64,
+ min_align_of_val,
+ min_align_of,
+ minnumf32,
+ minnumf64,
+ mul_with_overflow,
+ needs_drop,
+ ptr_guaranteed_eq,
+ ptr_guaranteed_ne,
+ rotate_left,
+ rotate_right,
+ rustc_peek,
+ saturating_add,
+ saturating_sub,
+ size_of_val,
+ size_of,
+ sub_with_overflow,
+ type_id,
+ type_name,
+ unlikely,
+ variant_count,
+ wrapping_add,
+ wrapping_mul,
+ wrapping_sub,
+ // known methods of lang items
+ eq,
+ ne,
+ ge,
+ gt,
+ le,
+ lt,
+ // lang items
+ add_assign,
+ add,
+ bitand_assign,
+ bitand,
+ bitor_assign,
+ bitor,
+ bitxor_assign,
+ bitxor,
+ deref_mut,
+ deref,
+ div_assign,
+ div,
+ fn_mut,
+ fn_once,
+ future_trait,
+ index,
+ index_mut,
+ mul_assign,
+ mul,
+ neg,
+ not,
+ owned_box,
+ partial_ord,
+ r#fn,
+ rem_assign,
+ rem,
+ shl_assign,
+ shl,
+ shr_assign,
+ shr,
+ sub_assign,
+ sub,
+ );
+
+ // self/Self cannot be used as an identifier
+ pub const SELF_PARAM: super::Name = super::Name::new_inline("self");
+ pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
+
+ pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
+
+ #[macro_export]
+ macro_rules! name {
+ (self) => {
+ $crate::name::known::SELF_PARAM
+ };
+ (Self) => {
+ $crate::name::known::SELF_TYPE
+ };
+ ('static) => {
+ $crate::name::known::STATIC_LIFETIME
+ };
+ ($ident:ident) => {
+ $crate::name::known::$ident
+ };
+ }
+}
+
+pub use crate::name;
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
new file mode 100644
index 000000000..5afdcc0e6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
@@ -0,0 +1,81 @@
+//! Proc Macro Expander stub
+
+use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
+use stdx::never;
+
+use crate::{db::AstDatabase, ExpandError, ExpandResult};
+
+#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
+pub struct ProcMacroExpander {
+ krate: CrateId,
+ proc_macro_id: Option<ProcMacroId>,
+}
+
+impl ProcMacroExpander {
+ pub fn new(krate: CrateId, proc_macro_id: ProcMacroId) -> Self {
+ Self { krate, proc_macro_id: Some(proc_macro_id) }
+ }
+
+ pub fn dummy(krate: CrateId) -> Self {
+ // FIXME: Should store the name for better errors
+ Self { krate, proc_macro_id: None }
+ }
+
+ pub fn is_dummy(&self) -> bool {
+ self.proc_macro_id.is_none()
+ }
+
+ pub fn expand(
+ self,
+ db: &dyn AstDatabase,
+ calling_crate: CrateId,
+ tt: &tt::Subtree,
+ attr_arg: Option<&tt::Subtree>,
+ ) -> ExpandResult<tt::Subtree> {
+ match self.proc_macro_id {
+ Some(id) => {
+ let krate_graph = db.crate_graph();
+ let proc_macros = match &krate_graph[self.krate].proc_macro {
+ Ok(proc_macros) => proc_macros,
+ Err(_) => {
+ never!("Non-dummy expander even though there are no proc macros");
+ return ExpandResult::only_err(ExpandError::Other("Internal error".into()));
+ }
+ };
+ let proc_macro = match proc_macros.get(id.0 as usize) {
+ Some(proc_macro) => proc_macro,
+ None => {
+ never!(
+ "Proc macro index out of bounds: the length is {} but the index is {}",
+ proc_macros.len(),
+ id.0
+ );
+ return ExpandResult::only_err(ExpandError::Other("Internal error".into()));
+ }
+ };
+
+ // Proc macros have access to the environment variables of the invoking crate.
+ let env = &krate_graph[calling_crate].env;
+ match proc_macro.expander.expand(tt, attr_arg, env) {
+ Ok(t) => ExpandResult::ok(t),
+ Err(err) => match err {
+ // Don't discard the item in case something unexpected happened while expanding attributes
+ ProcMacroExpansionError::System(text)
+ if proc_macro.kind == ProcMacroKind::Attr =>
+ {
+ ExpandResult {
+ value: tt.clone(),
+ err: Some(ExpandError::Other(text.into())),
+ }
+ }
+ ProcMacroExpansionError::System(text)
+ | ProcMacroExpansionError::Panic(text) => {
+ ExpandResult::only_err(ExpandError::Other(text.into()))
+ }
+ },
+ }
+ }
+ None => ExpandResult::only_err(ExpandError::UnresolvedProcMacro(self.krate)),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
new file mode 100644
index 000000000..82f410ecd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
@@ -0,0 +1,284 @@
+//! A simplified version of quote-crate like quasi quote macro
+
+// A helper macro quote macro
+// FIXME:
+// 1. Not all puncts are handled
+// 2. #()* pattern repetition not supported now
+// * But we can do it manually, see `test_quote_derive_copy_hack`
+#[doc(hidden)]
+#[macro_export]
+macro_rules! __quote {
+ () => {
+ Vec::<tt::TokenTree>::new()
+ };
+
+ ( @SUBTREE $delim:ident $($tt:tt)* ) => {
+ {
+ let children = $crate::__quote!($($tt)*);
+ tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ kind: tt::DelimiterKind::$delim,
+ id: tt::TokenId::unspecified(),
+ }),
+ token_trees: $crate::quote::IntoTt::to_tokens(children),
+ }
+ }
+ };
+
+ ( @PUNCT $first:literal ) => {
+ {
+ vec![
+ tt::Leaf::Punct(tt::Punct {
+ char: $first,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }).into()
+ ]
+ }
+ };
+
+ ( @PUNCT $first:literal, $sec:literal ) => {
+ {
+ vec![
+ tt::Leaf::Punct(tt::Punct {
+ char: $first,
+ spacing: tt::Spacing::Joint,
+ id: tt::TokenId::unspecified(),
+ }).into(),
+ tt::Leaf::Punct(tt::Punct {
+ char: $sec,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }).into()
+ ]
+ }
+ };
+
+ // hash variable
+ ( # $first:ident $($tail:tt)* ) => {
+ {
+ let token = $crate::quote::ToTokenTree::to_token($first);
+ let mut tokens = vec![token.into()];
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ tokens.append(&mut tail_tokens);
+ tokens
+ }
+ };
+
+ ( ## $first:ident $($tail:tt)* ) => {
+ {
+ let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<tt::TokenTree>>();
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ tokens.append(&mut tail_tokens);
+ tokens
+ }
+ };
+
+ // Brace
+ ( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) };
+ // Bracket
+ ( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) };
+ // Parenthesis
+ ( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) };
+
+ // Literal
+ ( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] };
+ // Ident
+ ( $tt:ident ) => {
+ vec![ {
+ tt::Leaf::Ident(tt::Ident {
+ text: stringify!($tt).into(),
+ id: tt::TokenId::unspecified(),
+ }).into()
+ }]
+ };
+
+ // Puncts
+ // FIXME: Not all puncts are handled
+ ( -> ) => {$crate::__quote!(@PUNCT '-', '>')};
+ ( & ) => {$crate::__quote!(@PUNCT '&')};
+ ( , ) => {$crate::__quote!(@PUNCT ',')};
+ ( : ) => {$crate::__quote!(@PUNCT ':')};
+ ( ; ) => {$crate::__quote!(@PUNCT ';')};
+ ( :: ) => {$crate::__quote!(@PUNCT ':', ':')};
+ ( . ) => {$crate::__quote!(@PUNCT '.')};
+ ( < ) => {$crate::__quote!(@PUNCT '<')};
+ ( > ) => {$crate::__quote!(@PUNCT '>')};
+ ( ! ) => {$crate::__quote!(@PUNCT '!')};
+
+ ( $first:tt $($tail:tt)+ ) => {
+ {
+ let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first));
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+
+ tokens.append(&mut tail_tokens);
+ tokens
+ }
+ };
+}
+
+/// FIXME:
+/// It probably should implement in proc-macro
+#[macro_export]
+macro_rules! quote {
+ ( $($tt:tt)* ) => {
+ $crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*))
+ }
+}
+
+pub(crate) trait IntoTt {
+ fn to_subtree(self) -> tt::Subtree;
+ fn to_tokens(self) -> Vec<tt::TokenTree>;
+}
+
+impl IntoTt for Vec<tt::TokenTree> {
+ fn to_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self }
+ }
+
+ fn to_tokens(self) -> Vec<tt::TokenTree> {
+ self
+ }
+}
+
+impl IntoTt for tt::Subtree {
+ fn to_subtree(self) -> tt::Subtree {
+ self
+ }
+
+ fn to_tokens(self) -> Vec<tt::TokenTree> {
+ vec![tt::TokenTree::Subtree(self)]
+ }
+}
+
+pub(crate) trait ToTokenTree {
+ fn to_token(self) -> tt::TokenTree;
+}
+
+impl ToTokenTree for tt::TokenTree {
+ fn to_token(self) -> tt::TokenTree {
+ self
+ }
+}
+
+impl ToTokenTree for tt::Subtree {
+ fn to_token(self) -> tt::TokenTree {
+ self.into()
+ }
+}
+
+macro_rules! impl_to_to_tokentrees {
+ ($($ty:ty => $this:ident $im:block);*) => {
+ $(
+ impl ToTokenTree for $ty {
+ fn to_token($this) -> tt::TokenTree {
+ let leaf: tt::Leaf = $im.into();
+ leaf.into()
+ }
+ }
+
+ impl ToTokenTree for &$ty {
+ fn to_token($this) -> tt::TokenTree {
+ let leaf: tt::Leaf = $im.clone().into();
+ leaf.into()
+ }
+ }
+ )*
+ }
+}
+
+impl_to_to_tokentrees! {
+ u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
+ usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
+ i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
+ bool => self { tt::Ident{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
+ tt::Leaf => self { self };
+ tt::Literal => self { self };
+ tt::Ident => self { self };
+ tt::Punct => self { self };
+ &str => self { tt::Literal{text: format!("\"{}\"", self.escape_debug()).into(), id: tt::TokenId::unspecified()}};
+ String => self { tt::Literal{text: format!("\"{}\"", self.escape_debug()).into(), id: tt::TokenId::unspecified()}}
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn test_quote_delimiters() {
+ assert_eq!(quote!({}).to_string(), "{}");
+ assert_eq!(quote!(()).to_string(), "()");
+ assert_eq!(quote!([]).to_string(), "[]");
+ }
+
+ #[test]
+ fn test_quote_idents() {
+ assert_eq!(quote!(32).to_string(), "32");
+ assert_eq!(quote!(struct).to_string(), "struct");
+ }
+
+ #[test]
+ fn test_quote_hash_simple_literal() {
+ let a = 20;
+ assert_eq!(quote!(#a).to_string(), "20");
+ let s: String = "hello".into();
+ assert_eq!(quote!(#s).to_string(), "\"hello\"");
+ }
+
+ fn mk_ident(name: &str) -> tt::Ident {
+ tt::Ident { text: name.into(), id: tt::TokenId::unspecified() }
+ }
+
+ #[test]
+ fn test_quote_hash_token_tree() {
+ let a = mk_ident("hello");
+
+ let quoted = quote!(#a);
+ assert_eq!(quoted.to_string(), "hello");
+ let t = format!("{:?}", quoted);
+ assert_eq!(t, "SUBTREE $\n IDENT hello 4294967295");
+ }
+
+ #[test]
+ fn test_quote_simple_derive_copy() {
+ let name = mk_ident("Foo");
+
+ let quoted = quote! {
+ impl Clone for #name {
+ fn clone(&self) -> Self {
+ Self {}
+ }
+ }
+ };
+
+ assert_eq!(quoted.to_string(), "impl Clone for Foo {fn clone (& self) -> Self {Self {}}}");
+ }
+
+ #[test]
+ fn test_quote_derive_copy_hack() {
+ // Assume the given struct is:
+ // struct Foo {
+ // name: String,
+ // id: u32,
+ // }
+ let struct_name = mk_ident("Foo");
+ let fields = [mk_ident("name"), mk_ident("id")];
+ let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees);
+
+ let list = tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ kind: tt::DelimiterKind::Brace,
+ id: tt::TokenId::unspecified(),
+ }),
+ token_trees: fields.collect(),
+ };
+
+ let quoted = quote! {
+ impl Clone for #struct_name {
+ fn clone(&self) -> Self {
+ Self #list
+ }
+ }
+ };
+
+ assert_eq!(quoted.to_string(), "impl Clone for Foo {fn clone (& self) -> Self {Self {name : self . name . clone () , id : self . id . clone () ,}}}");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
new file mode 100644
index 000000000..5cd444c1a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
@@ -0,0 +1,44 @@
+[package]
+name = "hir-ty"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+itertools = "0.10.3"
+arrayvec = "0.7.2"
+smallvec = "1.9.0"
+ena = "0.14.0"
+tracing = "0.1.35"
+rustc-hash = "1.1.0"
+scoped-tls = "1.0.0"
+chalk-solve = { version = "0.83.0", default-features = false }
+chalk-ir = "0.83.0"
+chalk-recursive = { version = "0.83.0", default-features = false }
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+once_cell = "1.12.0"
+typed-arena = "2.0.1"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+hir-def = { path = "../hir-def", version = "0.0.0" }
+hir-expand = { path = "../hir-expand", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+expect-test = "1.4.0"
+tracing = "0.1.35"
+tracing-subscriber = { version = "0.3.14", default-features = false, features = [
+ "env-filter",
+ "registry",
+] }
+tracing-tree = "0.2.1"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
new file mode 100644
index 000000000..b6f226dbf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
@@ -0,0 +1,145 @@
+//! In certain situations, rust automatically inserts derefs as necessary: for
+//! example, field accesses `foo.bar` still work when `foo` is actually a
+//! reference to a type with the field `bar`. This is an approximation of the
+//! logic in rustc (which lives in librustc_typeck/check/autoderef.rs).
+
+use std::sync::Arc;
+
+use chalk_ir::cast::Cast;
+use hir_expand::name::name;
+use limit::Limit;
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase, infer::unify::InferenceTable, Canonical, Goal, Interner, ProjectionTyExt,
+ TraitEnvironment, Ty, TyBuilder, TyKind,
+};
+
+static AUTODEREF_RECURSION_LIMIT: Limit = Limit::new(10);
+
+pub(crate) enum AutoderefKind {
+ Builtin,
+ Overloaded,
+}
+
+pub(crate) struct Autoderef<'a, 'db> {
+ pub(crate) table: &'a mut InferenceTable<'db>,
+ ty: Ty,
+ at_start: bool,
+ steps: Vec<(AutoderefKind, Ty)>,
+}
+
+impl<'a, 'db> Autoderef<'a, 'db> {
+ pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty) -> Self {
+ let ty = table.resolve_ty_shallow(&ty);
+ Autoderef { table, ty, at_start: true, steps: Vec::new() }
+ }
+
+ pub(crate) fn step_count(&self) -> usize {
+ self.steps.len()
+ }
+
+ pub(crate) fn steps(&self) -> &[(AutoderefKind, Ty)] {
+ &self.steps
+ }
+
+ pub(crate) fn final_ty(&self) -> Ty {
+ self.ty.clone()
+ }
+}
+
+impl Iterator for Autoderef<'_, '_> {
+ type Item = (Ty, usize);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.at_start {
+ self.at_start = false;
+ return Some((self.ty.clone(), 0));
+ }
+
+ if AUTODEREF_RECURSION_LIMIT.check(self.steps.len() + 1).is_err() {
+ return None;
+ }
+
+ let (kind, new_ty) = autoderef_step(self.table, self.ty.clone())?;
+
+ self.steps.push((kind, self.ty.clone()));
+ self.ty = new_ty;
+
+ Some((self.ty.clone(), self.step_count()))
+ }
+}
+
+pub(crate) fn autoderef_step(
+ table: &mut InferenceTable<'_>,
+ ty: Ty,
+) -> Option<(AutoderefKind, Ty)> {
+ if let Some(derefed) = builtin_deref(&ty) {
+ Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed)))
+ } else {
+ Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?))
+ }
+}
+
+// FIXME: replace uses of this with Autoderef above
+pub fn autoderef<'a>(
+ db: &'a dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ ty: Canonical<Ty>,
+) -> impl Iterator<Item = Canonical<Ty>> + 'a {
+ let mut table = InferenceTable::new(db, env);
+ let ty = table.instantiate_canonical(ty);
+ let mut autoderef = Autoderef::new(&mut table, ty);
+ let mut v = Vec::new();
+ while let Some((ty, _steps)) = autoderef.next() {
+ v.push(autoderef.table.canonicalize(ty).value);
+ }
+ v.into_iter()
+}
+
+pub(crate) fn deref(table: &mut InferenceTable<'_>, ty: Ty) -> Option<Ty> {
+ let _p = profile::span("deref");
+ autoderef_step(table, ty).map(|(_, ty)| ty)
+}
+
+fn builtin_deref(ty: &Ty) -> Option<&Ty> {
+ match ty.kind(Interner) {
+ TyKind::Ref(.., ty) => Some(ty),
+ TyKind::Raw(.., ty) => Some(ty),
+ _ => None,
+ }
+}
+
+fn deref_by_trait(table: &mut InferenceTable<'_>, ty: Ty) -> Option<Ty> {
+ let _p = profile::span("deref_by_trait");
+ if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() {
+ // don't try to deref unknown variables
+ return None;
+ }
+
+ let db = table.db;
+ let deref_trait = db
+ .lang_item(table.trait_env.krate, SmolStr::new_inline("deref"))
+ .and_then(|l| l.as_trait())?;
+ let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?;
+
+ let projection = {
+ let b = TyBuilder::assoc_type_projection(db, target);
+ if b.remaining() != 1 {
+ // the Target type + Deref trait should only have one generic parameter,
+ // namely Deref's Self type
+ return None;
+ }
+ b.push(ty).build()
+ };
+
+ // Check that the type implements Deref at all
+ let trait_ref = projection.trait_ref(db);
+ let implements_goal: Goal = trait_ref.cast(Interner);
+ table.try_obligation(implements_goal.clone())?;
+
+ table.register_obligation(implements_goal);
+
+ let result = table.normalize_projection_ty(projection);
+ Some(table.resolve_ty_shallow(&result))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
new file mode 100644
index 000000000..94d7806cb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
@@ -0,0 +1,311 @@
+//! `TyBuilder`, a helper for building instances of `Ty` and related types.
+
+use std::iter;
+
+use chalk_ir::{
+ cast::{Cast, CastTo, Caster},
+ fold::TypeFoldable,
+ interner::HasInterner,
+ AdtId, BoundVar, DebruijnIndex, Scalar,
+};
+use hir_def::{
+ builtin_type::BuiltinType, generics::TypeOrConstParamData, ConstParamId, GenericDefId, TraitId,
+ TypeAliasId,
+};
+use smallvec::SmallVec;
+
+use crate::{
+ consteval::unknown_const_as_generic, db::HirDatabase, infer::unify::InferenceTable, primitive,
+ to_assoc_type_id, to_chalk_trait_id, utils::generics, Binders, CallableSig, ConstData,
+ ConstValue, GenericArg, GenericArgData, Interner, ProjectionTy, Substitution, TraitRef, Ty,
+ TyDefId, TyExt, TyKind, ValueTyDefId,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ParamKind {
+ Type,
+ Const(Ty),
+}
+
+/// This is a builder for `Ty` or anything that needs a `Substitution`.
+pub struct TyBuilder<D> {
+ /// The `data` field is used to keep track of what we're building (e.g. an
+ /// ADT, a `TraitRef`, ...).
+ data: D,
+ vec: SmallVec<[GenericArg; 2]>,
+ param_kinds: SmallVec<[ParamKind; 2]>,
+}
+
+impl<A> TyBuilder<A> {
+ fn with_data<B>(self, data: B) -> TyBuilder<B> {
+ TyBuilder { data, param_kinds: self.param_kinds, vec: self.vec }
+ }
+}
+
+impl<D> TyBuilder<D> {
+ fn new(data: D, param_kinds: SmallVec<[ParamKind; 2]>) -> TyBuilder<D> {
+ TyBuilder { data, vec: SmallVec::with_capacity(param_kinds.len()), param_kinds }
+ }
+
+ fn build_internal(self) -> (D, Substitution) {
+ assert_eq!(self.vec.len(), self.param_kinds.len());
+ for (a, e) in self.vec.iter().zip(self.param_kinds.iter()) {
+ self.assert_match_kind(a, e);
+ }
+ let subst = Substitution::from_iter(Interner, self.vec);
+ (self.data, subst)
+ }
+
+ pub fn push(mut self, arg: impl CastTo<GenericArg>) -> Self {
+ let arg = arg.cast(Interner);
+ let expected_kind = &self.param_kinds[self.vec.len()];
+ let arg_kind = match arg.data(Interner) {
+ chalk_ir::GenericArgData::Ty(_) => ParamKind::Type,
+ chalk_ir::GenericArgData::Lifetime(_) => panic!("Got lifetime in TyBuilder::push"),
+ chalk_ir::GenericArgData::Const(c) => {
+ let c = c.data(Interner);
+ ParamKind::Const(c.ty.clone())
+ }
+ };
+ assert_eq!(*expected_kind, arg_kind);
+ self.vec.push(arg);
+ self
+ }
+
+ pub fn remaining(&self) -> usize {
+ self.param_kinds.len() - self.vec.len()
+ }
+
+ pub fn fill_with_bound_vars(self, debruijn: DebruijnIndex, starting_from: usize) -> Self {
+ // self.fill is inlined to make borrow checker happy
+ let mut this = self;
+ let other = this.param_kinds.iter().skip(this.vec.len());
+ let filler = (starting_from..).zip(other).map(|(idx, kind)| match kind {
+ ParamKind::Type => {
+ GenericArgData::Ty(TyKind::BoundVar(BoundVar::new(debruijn, idx)).intern(Interner))
+ .intern(Interner)
+ }
+ ParamKind::Const(ty) => GenericArgData::Const(
+ ConstData {
+ value: ConstValue::BoundVar(BoundVar::new(debruijn, idx)),
+ ty: ty.clone(),
+ }
+ .intern(Interner),
+ )
+ .intern(Interner),
+ });
+ this.vec.extend(filler.take(this.remaining()).casted(Interner));
+ assert_eq!(this.remaining(), 0);
+ this
+ }
+
+ pub fn fill_with_unknown(self) -> Self {
+ // self.fill is inlined to make borrow checker happy
+ let mut this = self;
+ let filler = this.param_kinds.iter().skip(this.vec.len()).map(|x| match x {
+ ParamKind::Type => GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner),
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ });
+ this.vec.extend(filler.casted(Interner));
+ assert_eq!(this.remaining(), 0);
+ this
+ }
+
+ pub(crate) fn fill_with_inference_vars(self, table: &mut InferenceTable<'_>) -> Self {
+ self.fill(|x| match x {
+ ParamKind::Type => GenericArgData::Ty(table.new_type_var()).intern(Interner),
+ ParamKind::Const(ty) => {
+ GenericArgData::Const(table.new_const_var(ty.clone())).intern(Interner)
+ }
+ })
+ }
+
+ pub fn fill(mut self, filler: impl FnMut(&ParamKind) -> GenericArg) -> Self {
+ self.vec.extend(self.param_kinds.iter().skip(self.vec.len()).map(filler));
+ assert_eq!(self.remaining(), 0);
+ self
+ }
+
+ pub fn use_parent_substs(mut self, parent_substs: &Substitution) -> Self {
+ assert!(self.vec.is_empty());
+ assert!(parent_substs.len(Interner) <= self.param_kinds.len());
+ self.extend(parent_substs.iter(Interner).cloned());
+ self
+ }
+
+ fn extend(&mut self, it: impl Iterator<Item = GenericArg> + Clone) {
+ for x in it.clone().zip(self.param_kinds.iter().skip(self.vec.len())) {
+ self.assert_match_kind(&x.0, &x.1);
+ }
+ self.vec.extend(it);
+ }
+
+ fn assert_match_kind(&self, a: &chalk_ir::GenericArg<Interner>, e: &ParamKind) {
+ match (a.data(Interner), e) {
+ (chalk_ir::GenericArgData::Ty(_), ParamKind::Type)
+ | (chalk_ir::GenericArgData::Const(_), ParamKind::Const(_)) => (),
+ _ => panic!("Mismatched kinds: {:?}, {:?}, {:?}", a, self.vec, self.param_kinds),
+ }
+ }
+}
+
+impl TyBuilder<()> {
+ pub fn unit() -> Ty {
+ TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner)
+ }
+
+ pub fn usize() -> Ty {
+ TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(Interner)
+ }
+
+ pub fn fn_ptr(sig: CallableSig) -> Ty {
+ TyKind::Function(sig.to_fn_ptr()).intern(Interner)
+ }
+
+ pub fn builtin(builtin: BuiltinType) -> Ty {
+ match builtin {
+ BuiltinType::Char => TyKind::Scalar(Scalar::Char).intern(Interner),
+ BuiltinType::Bool => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ BuiltinType::Str => TyKind::Str.intern(Interner),
+ BuiltinType::Int(t) => {
+ TyKind::Scalar(Scalar::Int(primitive::int_ty_from_builtin(t))).intern(Interner)
+ }
+ BuiltinType::Uint(t) => {
+ TyKind::Scalar(Scalar::Uint(primitive::uint_ty_from_builtin(t))).intern(Interner)
+ }
+ BuiltinType::Float(t) => {
+ TyKind::Scalar(Scalar::Float(primitive::float_ty_from_builtin(t))).intern(Interner)
+ }
+ }
+ }
+
+ pub fn slice(argument: Ty) -> Ty {
+ TyKind::Slice(argument).intern(Interner)
+ }
+
+ pub fn placeholder_subst(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> Substitution {
+ let params = generics(db.upcast(), def.into());
+ params.placeholder_subst(db)
+ }
+
+ pub fn subst_for_def(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> TyBuilder<()> {
+ let def = def.into();
+ let params = generics(db.upcast(), def);
+ TyBuilder::new(
+ (),
+ params
+ .iter()
+ .map(|(id, data)| match data {
+ TypeOrConstParamData::TypeParamData(_) => ParamKind::Type,
+ TypeOrConstParamData::ConstParamData(_) => {
+ ParamKind::Const(db.const_param_ty(ConstParamId::from_unchecked(id)))
+ }
+ })
+ .collect(),
+ )
+ }
+
+ pub fn build(self) -> Substitution {
+ let ((), subst) = self.build_internal();
+ subst
+ }
+}
+
+impl TyBuilder<hir_def::AdtId> {
+ pub fn adt(db: &dyn HirDatabase, def: hir_def::AdtId) -> TyBuilder<hir_def::AdtId> {
+ TyBuilder::subst_for_def(db, def).with_data(def)
+ }
+
+ pub fn fill_with_defaults(
+ mut self,
+ db: &dyn HirDatabase,
+ mut fallback: impl FnMut() -> Ty,
+ ) -> Self {
+ let defaults = db.generic_defaults(self.data.into());
+ for default_ty in defaults.iter().skip(self.vec.len()) {
+ if let GenericArgData::Ty(x) = default_ty.skip_binders().data(Interner) {
+ if x.is_unknown() {
+ self.vec.push(fallback().cast(Interner));
+ continue;
+ }
+ };
+ // each default can depend on the previous parameters
+ let subst_so_far = Substitution::from_iter(Interner, self.vec.clone());
+ self.vec.push(default_ty.clone().substitute(Interner, &subst_so_far).cast(Interner));
+ }
+ self
+ }
+
+ pub fn build(self) -> Ty {
+ let (adt, subst) = self.build_internal();
+ TyKind::Adt(AdtId(adt), subst).intern(Interner)
+ }
+}
+
+pub struct Tuple(usize);
+impl TyBuilder<Tuple> {
+ pub fn tuple(size: usize) -> TyBuilder<Tuple> {
+ TyBuilder::new(Tuple(size), iter::repeat(ParamKind::Type).take(size).collect())
+ }
+
+ pub fn build(self) -> Ty {
+ let (Tuple(size), subst) = self.build_internal();
+ TyKind::Tuple(size, subst).intern(Interner)
+ }
+}
+
+impl TyBuilder<TraitId> {
+ pub fn trait_ref(db: &dyn HirDatabase, def: TraitId) -> TyBuilder<TraitId> {
+ TyBuilder::subst_for_def(db, def).with_data(def)
+ }
+
+ pub fn build(self) -> TraitRef {
+ let (trait_id, substitution) = self.build_internal();
+ TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution }
+ }
+}
+
+impl TyBuilder<TypeAliasId> {
+ pub fn assoc_type_projection(db: &dyn HirDatabase, def: TypeAliasId) -> TyBuilder<TypeAliasId> {
+ TyBuilder::subst_for_def(db, def).with_data(def)
+ }
+
+ pub fn build(self) -> ProjectionTy {
+ let (type_alias, substitution) = self.build_internal();
+ ProjectionTy { associated_ty_id: to_assoc_type_id(type_alias), substitution }
+ }
+}
+
+impl<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>> TyBuilder<Binders<T>> {
+ fn subst_binders(b: Binders<T>) -> Self {
+ let param_kinds = b
+ .binders
+ .iter(Interner)
+ .map(|x| match x {
+ chalk_ir::VariableKind::Ty(_) => ParamKind::Type,
+ chalk_ir::VariableKind::Lifetime => panic!("Got lifetime parameter"),
+ chalk_ir::VariableKind::Const(ty) => ParamKind::Const(ty.clone()),
+ })
+ .collect();
+ TyBuilder::new(b, param_kinds)
+ }
+
+ pub fn build(self) -> T {
+ let (b, subst) = self.build_internal();
+ b.substitute(Interner, &subst)
+ }
+}
+
+impl TyBuilder<Binders<Ty>> {
+ pub fn def_ty(db: &dyn HirDatabase, def: TyDefId) -> TyBuilder<Binders<Ty>> {
+ TyBuilder::subst_binders(db.ty(def))
+ }
+
+ pub fn impl_self_ty(db: &dyn HirDatabase, def: hir_def::ImplId) -> TyBuilder<Binders<Ty>> {
+ TyBuilder::subst_binders(db.impl_self_ty(def))
+ }
+
+ pub fn value_ty(db: &dyn HirDatabase, def: ValueTyDefId) -> TyBuilder<Binders<Ty>> {
+ TyBuilder::subst_binders(db.value_ty(def))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
new file mode 100644
index 000000000..faec99c7d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
@@ -0,0 +1,799 @@
+//! The implementation of `RustIrDatabase` for Chalk, which provides information
+//! about the code that Chalk needs.
+use std::sync::Arc;
+
+use cov_mark::hit;
+use syntax::SmolStr;
+use tracing::debug;
+
+use chalk_ir::{cast::Cast, fold::shift::Shift, CanonicalVarKinds};
+use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
+
+use base_db::CrateId;
+use hir_def::{
+ lang_item::{lang_attr, LangItemTarget},
+ AssocItemId, GenericDefId, HasModule, ItemContainerId, Lookup, ModuleId, TypeAliasId,
+};
+use hir_expand::name::name;
+
+use crate::{
+ db::HirDatabase,
+ display::HirDisplay,
+ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, make_binders,
+ make_single_type_binders,
+ mapping::{from_chalk, ToChalk, TypeAliasAsValue},
+ method_resolution::{TraitImpls, TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS},
+ to_assoc_type_id, to_chalk_trait_id,
+ traits::ChalkContext,
+ utils::generics,
+ AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId, Interner, ProjectionTy,
+ ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder,
+ TyExt, TyKind, WhereClause,
+};
+
+pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>;
+pub(crate) type TraitDatum = chalk_solve::rust_ir::TraitDatum<Interner>;
+pub(crate) type StructDatum = chalk_solve::rust_ir::AdtDatum<Interner>;
+pub(crate) type ImplDatum = chalk_solve::rust_ir::ImplDatum<Interner>;
+pub(crate) type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum<Interner>;
+
+pub(crate) type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
+pub(crate) type TraitId = chalk_ir::TraitId<Interner>;
+pub(crate) type AdtId = chalk_ir::AdtId<Interner>;
+pub(crate) type ImplId = chalk_ir::ImplId<Interner>;
+pub(crate) type AssociatedTyValueId = chalk_solve::rust_ir::AssociatedTyValueId<Interner>;
+pub(crate) type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue<Interner>;
+pub(crate) type FnDefDatum = chalk_solve::rust_ir::FnDefDatum<Interner>;
+pub(crate) type Variances = chalk_ir::Variances<Interner>;
+
+impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
+ fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
+ self.db.associated_ty_data(id)
+ }
+ fn trait_datum(&self, trait_id: TraitId) -> Arc<TraitDatum> {
+ self.db.trait_datum(self.krate, trait_id)
+ }
+ fn adt_datum(&self, struct_id: AdtId) -> Arc<StructDatum> {
+ self.db.struct_datum(self.krate, struct_id)
+ }
+ fn adt_repr(&self, _struct_id: AdtId) -> Arc<rust_ir::AdtRepr<Interner>> {
+ // FIXME: keep track of these
+ Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None })
+ }
+ fn discriminant_type(&self, _ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
+ // FIXME: keep track of this
+ chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(Interner)
+ }
+ fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
+ self.db.impl_datum(self.krate, impl_id)
+ }
+
+ fn fn_def_datum(
+ &self,
+ fn_def_id: chalk_ir::FnDefId<Interner>,
+ ) -> Arc<rust_ir::FnDefDatum<Interner>> {
+ self.db.fn_def_datum(self.krate, fn_def_id)
+ }
+
+ fn impls_for_trait(
+ &self,
+ trait_id: TraitId,
+ parameters: &[chalk_ir::GenericArg<Interner>],
+ binders: &CanonicalVarKinds<Interner>,
+ ) -> Vec<ImplId> {
+ debug!("impls_for_trait {:?}", trait_id);
+ let trait_: hir_def::TraitId = from_chalk_trait_id(trait_id);
+
+ let ty: Ty = parameters[0].assert_ty_ref(Interner).clone();
+
+ fn binder_kind(
+ ty: &Ty,
+ binders: &CanonicalVarKinds<Interner>,
+ ) -> Option<chalk_ir::TyVariableKind> {
+ if let TyKind::BoundVar(bv) = ty.kind(Interner) {
+ let binders = binders.as_slice(Interner);
+ if bv.debruijn == DebruijnIndex::INNERMOST {
+ if let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind {
+ return Some(tk);
+ }
+ }
+ }
+ None
+ }
+
+ let self_ty_fp = TyFingerprint::for_trait_impl(&ty);
+ let fps: &[TyFingerprint] = match binder_kind(&ty, binders) {
+ Some(chalk_ir::TyVariableKind::Integer) => &ALL_INT_FPS,
+ Some(chalk_ir::TyVariableKind::Float) => &ALL_FLOAT_FPS,
+ _ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]),
+ };
+
+ fn local_impls(db: &dyn HirDatabase, module: ModuleId) -> Option<Arc<TraitImpls>> {
+ let block = module.containing_block()?;
+ hit!(block_local_impls);
+ db.trait_impls_in_block(block)
+ }
+
+ // Note: Since we're using impls_for_trait, only impls where the trait
+ // can be resolved should ever reach Chalk. impl_datum relies on that
+ // and will panic if the trait can't be resolved.
+ let in_deps = self.db.trait_impls_in_deps(self.krate);
+ let in_self = self.db.trait_impls_in_crate(self.krate);
+ let trait_module = trait_.module(self.db.upcast());
+ let type_module = match self_ty_fp {
+ Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())),
+ Some(TyFingerprint::ForeignType(type_id)) => {
+ Some(from_foreign_def_id(type_id).module(self.db.upcast()))
+ }
+ Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())),
+ _ => None,
+ };
+ let impl_maps = [
+ Some(in_deps),
+ Some(in_self),
+ local_impls(self.db, trait_module),
+ type_module.and_then(|m| local_impls(self.db, m)),
+ ];
+
+ let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db);
+
+ let result: Vec<_> = if fps.is_empty() {
+ debug!("Unrestricted search for {:?} impls...", trait_);
+ impl_maps
+ .iter()
+ .filter_map(|o| o.as_ref())
+ .flat_map(|impls| impls.for_trait(trait_).map(id_to_chalk))
+ .collect()
+ } else {
+ impl_maps
+ .iter()
+ .filter_map(|o| o.as_ref())
+ .flat_map(|impls| {
+ fps.iter().flat_map(move |fp| {
+ impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk)
+ })
+ })
+ .collect()
+ };
+
+ debug!("impls_for_trait returned {} impls", result.len());
+ result
+ }
+ fn impl_provided_for(&self, auto_trait_id: TraitId, kind: &chalk_ir::TyKind<Interner>) -> bool {
+ debug!("impl_provided_for {:?}, {:?}", auto_trait_id, kind);
+ false // FIXME
+ }
+ fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> {
+ self.db.associated_ty_value(self.krate, id)
+ }
+
+ fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<Interner>> {
+ vec![]
+ }
+ fn local_impls_to_coherence_check(&self, _trait_id: TraitId) -> Vec<ImplId> {
+ // We don't do coherence checking (yet)
+ unimplemented!()
+ }
+ fn interner(&self) -> Interner {
+ Interner
+ }
+ fn well_known_trait_id(
+ &self,
+ well_known_trait: rust_ir::WellKnownTrait,
+ ) -> Option<chalk_ir::TraitId<Interner>> {
+ let lang_attr = lang_attr_from_well_known_trait(well_known_trait);
+ let trait_ = match self.db.lang_item(self.krate, lang_attr.into()) {
+ Some(LangItemTarget::TraitId(trait_)) => trait_,
+ _ => return None,
+ };
+ Some(to_chalk_trait_id(trait_))
+ }
+
+ fn program_clauses_for_env(
+ &self,
+ environment: &chalk_ir::Environment<Interner>,
+ ) -> chalk_ir::ProgramClauses<Interner> {
+ self.db.program_clauses_for_chalk_env(self.krate, environment.clone())
+ }
+
+ fn opaque_ty_data(&self, id: chalk_ir::OpaqueTyId<Interner>) -> Arc<OpaqueTyDatum> {
+ let full_id = self.db.lookup_intern_impl_trait_id(id.into());
+ let bound = match full_id {
+ crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ let datas = self
+ .db
+ .return_type_impl_traits(func)
+ .expect("impl trait id without impl traits");
+ let (datas, binders) = (*datas).as_ref().into_value_and_skipped_binders();
+ let data = &datas.impl_traits[idx as usize];
+ let bound = OpaqueTyDatumBound {
+ bounds: make_single_type_binders(data.bounds.skip_binders().to_vec()),
+ where_clauses: chalk_ir::Binders::empty(Interner, vec![]),
+ };
+ chalk_ir::Binders::new(binders, bound)
+ }
+ crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => {
+ if let Some((future_trait, future_output)) = self
+ .db
+ .lang_item(self.krate, SmolStr::new_inline("future_trait"))
+ .and_then(|item| item.as_trait())
+ .and_then(|trait_| {
+ let alias =
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])?;
+ Some((trait_, alias))
+ })
+ {
+ // Making up Symbol’s value as variable is void: AsyncBlock<T>:
+ //
+ // |--------------------OpaqueTyDatum-------------------|
+ // |-------------OpaqueTyDatumBound--------------|
+ // for<T> <Self> [Future<Self>, Future::Output<Self> = T]
+ // ^1 ^0 ^0 ^0 ^1
+ let impl_bound = WhereClause::Implemented(TraitRef {
+ trait_id: to_chalk_trait_id(future_trait),
+ // Self type as the first parameter.
+ substitution: Substitution::from1(
+ Interner,
+ TyKind::BoundVar(BoundVar {
+ debruijn: DebruijnIndex::INNERMOST,
+ index: 0,
+ })
+ .intern(Interner),
+ ),
+ });
+ let mut binder = vec![];
+ binder.push(crate::wrap_empty_binders(impl_bound));
+ let sized_trait = self
+ .db
+ .lang_item(self.krate, SmolStr::new_inline("sized"))
+ .and_then(|item| item.as_trait());
+ if let Some(sized_trait_) = sized_trait {
+ let sized_bound = WhereClause::Implemented(TraitRef {
+ trait_id: to_chalk_trait_id(sized_trait_),
+ // Self type as the first parameter.
+ substitution: Substitution::from1(
+ Interner,
+ TyKind::BoundVar(BoundVar {
+ debruijn: DebruijnIndex::INNERMOST,
+ index: 0,
+ })
+ .intern(Interner),
+ ),
+ });
+ binder.push(crate::wrap_empty_binders(sized_bound));
+ }
+ let proj_bound = WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(future_output),
+ // Self type as the first parameter.
+ substitution: Substitution::from1(
+ Interner,
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
+ .intern(Interner),
+ ),
+ }),
+ // The parameter of the opaque type.
+ ty: TyKind::BoundVar(BoundVar { debruijn: DebruijnIndex::ONE, index: 0 })
+ .intern(Interner),
+ });
+ binder.push(crate::wrap_empty_binders(proj_bound));
+ let bound = OpaqueTyDatumBound {
+ bounds: make_single_type_binders(binder),
+ where_clauses: chalk_ir::Binders::empty(Interner, vec![]),
+ };
+ // The opaque type has 1 parameter.
+ make_single_type_binders(bound)
+ } else {
+ // If failed to find Symbol’s value as variable is void: Future::Output, return empty bounds as fallback.
+ let bound = OpaqueTyDatumBound {
+ bounds: chalk_ir::Binders::empty(Interner, vec![]),
+ where_clauses: chalk_ir::Binders::empty(Interner, vec![]),
+ };
+ // The opaque type has 1 parameter.
+ make_single_type_binders(bound)
+ }
+ }
+ };
+
+ Arc::new(OpaqueTyDatum { opaque_ty_id: id, bound })
+ }
+
+ fn hidden_opaque_type(&self, _id: chalk_ir::OpaqueTyId<Interner>) -> chalk_ir::Ty<Interner> {
+ // FIXME: actually provide the hidden type; it is relevant for auto traits
+ TyKind::Error.intern(Interner)
+ }
+
+ fn is_object_safe(&self, _trait_id: chalk_ir::TraitId<Interner>) -> bool {
+ // FIXME: implement actual object safety
+ true
+ }
+
+ fn closure_kind(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ _substs: &chalk_ir::Substitution<Interner>,
+ ) -> rust_ir::ClosureKind {
+ // Fn is the closure kind that implements all three traits
+ rust_ir::ClosureKind::Fn
+ }
+ fn closure_inputs_and_output(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ substs: &chalk_ir::Substitution<Interner>,
+ ) -> chalk_ir::Binders<rust_ir::FnDefInputsAndOutputDatum<Interner>> {
+ let sig_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ let sig = &sig_ty.callable_sig(self.db).expect("first closure param should be fn ptr");
+ let io = rust_ir::FnDefInputsAndOutputDatum {
+ argument_types: sig.params().to_vec(),
+ return_type: sig.ret().clone(),
+ };
+ chalk_ir::Binders::empty(Interner, io.shifted_in(Interner))
+ }
+ fn closure_upvars(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ _substs: &chalk_ir::Substitution<Interner>,
+ ) -> chalk_ir::Binders<chalk_ir::Ty<Interner>> {
+ let ty = TyBuilder::unit();
+ chalk_ir::Binders::empty(Interner, ty)
+ }
+ fn closure_fn_substitution(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ _substs: &chalk_ir::Substitution<Interner>,
+ ) -> chalk_ir::Substitution<Interner> {
+ Substitution::empty(Interner)
+ }
+
+ fn trait_name(&self, trait_id: chalk_ir::TraitId<Interner>) -> String {
+ let id = from_chalk_trait_id(trait_id);
+ self.db.trait_data(id).name.to_string()
+ }
+ fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String {
+ match adt_id {
+ hir_def::AdtId::StructId(id) => self.db.struct_data(id).name.to_string(),
+ hir_def::AdtId::EnumId(id) => self.db.enum_data(id).name.to_string(),
+ hir_def::AdtId::UnionId(id) => self.db.union_data(id).name.to_string(),
+ }
+ }
+ fn adt_size_align(&self, _id: chalk_ir::AdtId<Interner>) -> Arc<rust_ir::AdtSizeAlign> {
+ // FIXME
+ Arc::new(rust_ir::AdtSizeAlign::from_one_zst(false))
+ }
+ fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
+ let id = self.db.associated_ty_data(assoc_ty_id).name;
+ self.db.type_alias_data(id).name.to_string()
+ }
+ fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
+ format!("Opaque_{}", opaque_ty_id.0)
+ }
+ fn fn_def_name(&self, fn_def_id: chalk_ir::FnDefId<Interner>) -> String {
+ format!("fn_{}", fn_def_id.0)
+ }
+ fn generator_datum(
+ &self,
+ _: chalk_ir::GeneratorId<Interner>,
+ ) -> std::sync::Arc<chalk_solve::rust_ir::GeneratorDatum<Interner>> {
+ // FIXME
+ unimplemented!()
+ }
+ fn generator_witness_datum(
+ &self,
+ _: chalk_ir::GeneratorId<Interner>,
+ ) -> std::sync::Arc<chalk_solve::rust_ir::GeneratorWitnessDatum<Interner>> {
+ // FIXME
+ unimplemented!()
+ }
+
+ fn unification_database(&self) -> &dyn chalk_ir::UnificationDatabase<Interner> {
+ &self.db
+ }
+}
+
+impl<'a> chalk_ir::UnificationDatabase<Interner> for &'a dyn HirDatabase {
+ fn fn_def_variance(
+ &self,
+ fn_def_id: chalk_ir::FnDefId<Interner>,
+ ) -> chalk_ir::Variances<Interner> {
+ HirDatabase::fn_def_variance(*self, fn_def_id)
+ }
+
+ fn adt_variance(&self, adt_id: chalk_ir::AdtId<Interner>) -> chalk_ir::Variances<Interner> {
+ HirDatabase::adt_variance(*self, adt_id)
+ }
+}
+
+pub(crate) fn program_clauses_for_chalk_env_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ environment: chalk_ir::Environment<Interner>,
+) -> chalk_ir::ProgramClauses<Interner> {
+ chalk_solve::program_clauses_for_env(&ChalkContext { db, krate }, &environment)
+}
+
+pub(crate) fn associated_ty_data_query(
+ db: &dyn HirDatabase,
+ id: AssocTypeId,
+) -> Arc<AssociatedTyDatum> {
+ debug!("associated_ty_data {:?}", id);
+ let type_alias: TypeAliasId = from_assoc_type_id(id);
+ let trait_ = match type_alias.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+
+ // Lower bounds -- we could/should maybe move this to a separate query in `lower`
+ let type_alias_data = db.type_alias_data(type_alias);
+ let generic_params = generics(db.upcast(), type_alias.into());
+ // let bound_vars = generic_params.bound_vars_subst(DebruijnIndex::INNERMOST);
+ let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
+ let ctx = crate::TyLoweringContext::new(db, &resolver)
+ .with_type_param_mode(crate::lower::ParamLoweringMode::Variable);
+ let pro_ty = TyBuilder::assoc_type_projection(db, type_alias)
+ .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, 0)
+ .build();
+ let self_ty = TyKind::Alias(AliasTy::Projection(pro_ty)).intern(Interner);
+ let mut bounds: Vec<_> = type_alias_data
+ .bounds
+ .iter()
+ .flat_map(|bound| ctx.lower_type_bound(bound, self_ty.clone(), false))
+ .filter_map(|pred| generic_predicate_to_inline_bound(db, &pred, &self_ty))
+ .collect();
+
+ if !ctx.unsized_types.borrow().contains(&self_ty) {
+ let sized_trait = db
+ .lang_item(resolver.krate(), SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
+ let sized_bound = sized_trait.into_iter().map(|sized_trait| {
+ let trait_bound =
+ rust_ir::TraitBound { trait_id: sized_trait, args_no_self: Default::default() };
+ let inline_bound = rust_ir::InlineBound::TraitBound(trait_bound);
+ chalk_ir::Binders::empty(Interner, inline_bound)
+ });
+ bounds.extend(sized_bound);
+ bounds.shrink_to_fit();
+ }
+
+ // FIXME: Re-enable where clauses on associated types when an upstream chalk bug is fixed.
+ // (rust-analyzer#9052)
+ // let where_clauses = convert_where_clauses(db, type_alias.into(), &bound_vars);
+ let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses: vec![] };
+ let datum = AssociatedTyDatum {
+ trait_id: to_chalk_trait_id(trait_),
+ id,
+ name: type_alias,
+ binders: make_binders(db, &generic_params, bound_data),
+ };
+ Arc::new(datum)
+}
+
+pub(crate) fn trait_datum_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ trait_id: TraitId,
+) -> Arc<TraitDatum> {
+ debug!("trait_datum {:?}", trait_id);
+ let trait_ = from_chalk_trait_id(trait_id);
+ let trait_data = db.trait_data(trait_);
+ debug!("trait {:?} = {:?}", trait_id, trait_data.name);
+ let generic_params = generics(db.upcast(), trait_.into());
+ let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let flags = rust_ir::TraitFlags {
+ auto: trait_data.is_auto,
+ upstream: trait_.lookup(db.upcast()).container.krate() != krate,
+ non_enumerable: true,
+ coinductive: false, // only relevant for Chalk testing
+ // FIXME: set these flags correctly
+ marker: false,
+ fundamental: false,
+ };
+ let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
+ let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
+ let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
+ let well_known =
+ lang_attr(db.upcast(), trait_).and_then(|name| well_known_trait_from_lang_attr(&name));
+ let trait_datum = TraitDatum {
+ id: trait_id,
+ binders: make_binders(db, &generic_params, trait_datum_bound),
+ flags,
+ associated_ty_ids,
+ well_known,
+ };
+ Arc::new(trait_datum)
+}
+
+fn well_known_trait_from_lang_attr(name: &str) -> Option<WellKnownTrait> {
+ Some(match name {
+ "clone" => WellKnownTrait::Clone,
+ "coerce_unsized" => WellKnownTrait::CoerceUnsized,
+ "copy" => WellKnownTrait::Copy,
+ "discriminant_kind" => WellKnownTrait::DiscriminantKind,
+ "dispatch_from_dyn" => WellKnownTrait::DispatchFromDyn,
+ "drop" => WellKnownTrait::Drop,
+ "fn" => WellKnownTrait::Fn,
+ "fn_mut" => WellKnownTrait::FnMut,
+ "fn_once" => WellKnownTrait::FnOnce,
+ "generator" => WellKnownTrait::Generator,
+ "sized" => WellKnownTrait::Sized,
+ "unpin" => WellKnownTrait::Unpin,
+ "unsize" => WellKnownTrait::Unsize,
+ _ => return None,
+ })
+}
+
+fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str {
+ match attr {
+ WellKnownTrait::Clone => "clone",
+ WellKnownTrait::CoerceUnsized => "coerce_unsized",
+ WellKnownTrait::Copy => "copy",
+ WellKnownTrait::DiscriminantKind => "discriminant_kind",
+ WellKnownTrait::DispatchFromDyn => "dispatch_from_dyn",
+ WellKnownTrait::Drop => "drop",
+ WellKnownTrait::Fn => "fn",
+ WellKnownTrait::FnMut => "fn_mut",
+ WellKnownTrait::FnOnce => "fn_once",
+ WellKnownTrait::Generator => "generator",
+ WellKnownTrait::Sized => "sized",
+ WellKnownTrait::Unpin => "unpin",
+ WellKnownTrait::Unsize => "unsize",
+ }
+}
+
+pub(crate) fn struct_datum_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ struct_id: AdtId,
+) -> Arc<StructDatum> {
+ debug!("struct_datum {:?}", struct_id);
+ let chalk_ir::AdtId(adt_id) = struct_id;
+ let generic_params = generics(db.upcast(), adt_id.into());
+ let upstream = adt_id.module(db.upcast()).krate() != krate;
+ let where_clauses = {
+ let generic_params = generics(db.upcast(), adt_id.into());
+ let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ convert_where_clauses(db, adt_id.into(), &bound_vars)
+ };
+ let flags = rust_ir::AdtFlags {
+ upstream,
+ // FIXME set fundamental and phantom_data flags correctly
+ fundamental: false,
+ phantom_data: false,
+ };
+ // FIXME provide enum variants properly (for auto traits)
+ let variant = rust_ir::AdtVariantDatum {
+ fields: Vec::new(), // FIXME add fields (only relevant for auto traits),
+ };
+ let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses };
+ let struct_datum = StructDatum {
+ // FIXME set ADT kind
+ kind: rust_ir::AdtKind::Struct,
+ id: struct_id,
+ binders: make_binders(db, &generic_params, struct_datum_bound),
+ flags,
+ };
+ Arc::new(struct_datum)
+}
+
+pub(crate) fn impl_datum_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ impl_id: ImplId,
+) -> Arc<ImplDatum> {
+ let _p = profile::span("impl_datum");
+ debug!("impl_datum {:?}", impl_id);
+ let impl_: hir_def::ImplId = from_chalk(db, impl_id);
+ impl_def_datum(db, krate, impl_id, impl_)
+}
+
+fn impl_def_datum(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ chalk_id: ImplId,
+ impl_id: hir_def::ImplId,
+) -> Arc<ImplDatum> {
+ let trait_ref = db
+ .impl_trait(impl_id)
+ // ImplIds for impls where the trait ref can't be resolved should never reach Chalk
+ .expect("invalid impl passed to Chalk")
+ .into_value_and_skipped_binders()
+ .0;
+ let impl_data = db.impl_data(impl_id);
+
+ let generic_params = generics(db.upcast(), impl_id.into());
+ let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let trait_ = trait_ref.hir_trait_id();
+ let impl_type = if impl_id.lookup(db.upcast()).container.krate() == krate {
+ rust_ir::ImplType::Local
+ } else {
+ rust_ir::ImplType::External
+ };
+ let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars);
+ let negative = impl_data.is_negative;
+ debug!(
+ "impl {:?}: {}{} where {:?}",
+ chalk_id,
+ if negative { "!" } else { "" },
+ trait_ref.display(db),
+ where_clauses
+ );
+
+ let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive };
+
+ let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses };
+ let trait_data = db.trait_data(trait_);
+ let associated_ty_value_ids = impl_data
+ .items
+ .iter()
+ .filter_map(|item| match item {
+ AssocItemId::TypeAliasId(type_alias) => Some(*type_alias),
+ _ => None,
+ })
+ .filter(|&type_alias| {
+ // don't include associated types that don't exist in the trait
+ let name = &db.type_alias_data(type_alias).name;
+ trait_data.associated_type_by_name(name).is_some()
+ })
+ .map(|type_alias| TypeAliasAsValue(type_alias).to_chalk(db))
+ .collect();
+ debug!("impl_datum: {:?}", impl_datum_bound);
+ let impl_datum = ImplDatum {
+ binders: make_binders(db, &generic_params, impl_datum_bound),
+ impl_type,
+ polarity,
+ associated_ty_value_ids,
+ };
+ Arc::new(impl_datum)
+}
+
+pub(crate) fn associated_ty_value_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ id: AssociatedTyValueId,
+) -> Arc<AssociatedTyValue> {
+ let type_alias: TypeAliasAsValue = from_chalk(db, id);
+ type_alias_associated_ty_value(db, krate, type_alias.0)
+}
+
+fn type_alias_associated_ty_value(
+ db: &dyn HirDatabase,
+ _krate: CrateId,
+ type_alias: TypeAliasId,
+) -> Arc<AssociatedTyValue> {
+ let type_alias_data = db.type_alias_data(type_alias);
+ let impl_id = match type_alias.lookup(db.upcast()).container {
+ ItemContainerId::ImplId(it) => it,
+ _ => panic!("assoc ty value should be in impl"),
+ };
+
+ let trait_ref = db
+ .impl_trait(impl_id)
+ .expect("assoc ty value should not exist")
+ .into_value_and_skipped_binders()
+ .0; // we don't return any assoc ty values if the impl'd trait can't be resolved
+
+ let assoc_ty = db
+ .trait_data(trait_ref.hir_trait_id())
+ .associated_type_by_name(&type_alias_data.name)
+ .expect("assoc ty value should not exist"); // validated when building the impl data as well
+ let (ty, binders) = db.ty(type_alias.into()).into_value_and_skipped_binders();
+ let value_bound = rust_ir::AssociatedTyValueBound { ty };
+ let value = rust_ir::AssociatedTyValue {
+ impl_id: impl_id.to_chalk(db),
+ associated_ty_id: to_assoc_type_id(assoc_ty),
+ value: chalk_ir::Binders::new(binders, value_bound),
+ };
+ Arc::new(value)
+}
+
+pub(crate) fn fn_def_datum_query(
+ db: &dyn HirDatabase,
+ _krate: CrateId,
+ fn_def_id: FnDefId,
+) -> Arc<FnDefDatum> {
+ let callable_def: CallableDefId = from_chalk(db, fn_def_id);
+ let generic_params = generics(db.upcast(), callable_def.into());
+ let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders();
+ let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let where_clauses = convert_where_clauses(db, callable_def.into(), &bound_vars);
+ let bound = rust_ir::FnDefDatumBound {
+ // Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway
+ inputs_and_output: chalk_ir::Binders::empty(
+ Interner,
+ rust_ir::FnDefInputsAndOutputDatum {
+ argument_types: sig.params().to_vec(),
+ return_type: sig.ret().clone(),
+ }
+ .shifted_in(Interner),
+ ),
+ where_clauses,
+ };
+ let datum = FnDefDatum {
+ id: fn_def_id,
+ sig: chalk_ir::FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: sig.is_varargs },
+ binders: chalk_ir::Binders::new(binders, bound),
+ };
+ Arc::new(datum)
+}
+
+pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances {
+ let callable_def: CallableDefId = from_chalk(db, fn_def_id);
+ let generic_params = generics(db.upcast(), callable_def.into());
+ Variances::from_iter(
+ Interner,
+ std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()),
+ )
+}
+
+pub(crate) fn adt_variance_query(
+ db: &dyn HirDatabase,
+ chalk_ir::AdtId(adt_id): AdtId,
+) -> Variances {
+ let generic_params = generics(db.upcast(), adt_id.into());
+ Variances::from_iter(
+ Interner,
+ std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()),
+ )
+}
+
+pub(super) fn convert_where_clauses(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ substs: &Substitution,
+) -> Vec<chalk_ir::QuantifiedWhereClause<Interner>> {
+ let generic_predicates = db.generic_predicates(def);
+ let mut result = Vec::with_capacity(generic_predicates.len());
+ for pred in generic_predicates.iter() {
+ result.push(pred.clone().substitute(Interner, substs));
+ }
+ result
+}
+
+pub(super) fn generic_predicate_to_inline_bound(
+ db: &dyn HirDatabase,
+ pred: &QuantifiedWhereClause,
+ self_ty: &Ty,
+) -> Option<chalk_ir::Binders<rust_ir::InlineBound<Interner>>> {
+ // An InlineBound is like a GenericPredicate, except the self type is left out.
+ // We don't have a special type for this, but Chalk does.
+ let self_ty_shifted_in = self_ty.clone().shifted_in_from(Interner, DebruijnIndex::ONE);
+ let (pred, binders) = pred.as_ref().into_value_and_skipped_binders();
+ match pred {
+ WhereClause::Implemented(trait_ref) => {
+ if trait_ref.self_type_parameter(Interner) != self_ty_shifted_in {
+ // we can only convert predicates back to type bounds if they
+ // have the expected self type
+ return None;
+ }
+ let args_no_self = trait_ref.substitution.as_slice(Interner)[1..]
+ .iter()
+ .map(|ty| ty.clone().cast(Interner))
+ .collect();
+ let trait_bound = rust_ir::TraitBound { trait_id: trait_ref.trait_id, args_no_self };
+ Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound)))
+ }
+ WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
+ if projection_ty.self_type_parameter(Interner) != self_ty_shifted_in {
+ return None;
+ }
+ let trait_ = projection_ty.trait_(db);
+ let args_no_self = projection_ty.substitution.as_slice(Interner)[1..]
+ .iter()
+ .map(|ty| ty.clone().cast(Interner))
+ .collect();
+ let alias_eq_bound = rust_ir::AliasEqBound {
+ value: ty.clone(),
+ trait_bound: rust_ir::TraitBound {
+ trait_id: to_chalk_trait_id(trait_),
+ args_no_self,
+ },
+ associated_ty_id: projection_ty.associated_ty_id,
+ parameters: Vec::new(), // FIXME we don't support generic associated types yet
+ };
+ Some(chalk_ir::Binders::new(
+ binders,
+ rust_ir::InlineBound::AliasEqBound(alias_eq_bound),
+ ))
+ }
+ _ => None,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
new file mode 100644
index 000000000..a9c124b42
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
@@ -0,0 +1,358 @@
+//! Various extensions traits for Chalk types.
+
+use chalk_ir::{FloatTy, IntTy, Mutability, Scalar, UintTy};
+use hir_def::{
+ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint},
+ generics::TypeOrConstParamData,
+ type_ref::Rawness,
+ FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId,
+};
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
+ from_placeholder_idx, to_chalk_trait_id, AdtId, AliasEq, AliasTy, Binders, CallableDefId,
+ CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy, QuantifiedWhereClause,
+ Substitution, TraitRef, Ty, TyBuilder, TyKind, WhereClause,
+};
+
+pub trait TyExt {
+ fn is_unit(&self) -> bool;
+ fn is_never(&self) -> bool;
+ fn is_unknown(&self) -> bool;
+ fn is_ty_var(&self) -> bool;
+
+ fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>;
+ fn as_builtin(&self) -> Option<BuiltinType>;
+ fn as_tuple(&self) -> Option<&Substitution>;
+ fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId>;
+ fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)>;
+ fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)>;
+ fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId>;
+
+ fn callable_def(&self, db: &dyn HirDatabase) -> Option<CallableDefId>;
+ fn callable_sig(&self, db: &dyn HirDatabase) -> Option<CallableSig>;
+
+ fn strip_references(&self) -> &Ty;
+ fn strip_reference(&self) -> &Ty;
+
+ /// If this is a `dyn Trait`, returns that trait.
+ fn dyn_trait(&self) -> Option<TraitId>;
+
+ fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<QuantifiedWhereClause>>;
+ fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId>;
+
+ /// FIXME: Get rid of this, it's not a good abstraction
+ fn equals_ctor(&self, other: &Ty) -> bool;
+}
+
+impl TyExt for Ty {
+ fn is_unit(&self) -> bool {
+ matches!(self.kind(Interner), TyKind::Tuple(0, _))
+ }
+
+ fn is_never(&self) -> bool {
+ matches!(self.kind(Interner), TyKind::Never)
+ }
+
+ fn is_unknown(&self) -> bool {
+ matches!(self.kind(Interner), TyKind::Error)
+ }
+
+ fn is_ty_var(&self) -> bool {
+ matches!(self.kind(Interner), TyKind::InferenceVar(_, _))
+ }
+
+ fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)> {
+ match self.kind(Interner) {
+ TyKind::Adt(AdtId(adt), parameters) => Some((*adt, parameters)),
+ _ => None,
+ }
+ }
+
+ fn as_builtin(&self) -> Option<BuiltinType> {
+ match self.kind(Interner) {
+ TyKind::Str => Some(BuiltinType::Str),
+ TyKind::Scalar(Scalar::Bool) => Some(BuiltinType::Bool),
+ TyKind::Scalar(Scalar::Char) => Some(BuiltinType::Char),
+ TyKind::Scalar(Scalar::Float(fty)) => Some(BuiltinType::Float(match fty {
+ FloatTy::F64 => BuiltinFloat::F64,
+ FloatTy::F32 => BuiltinFloat::F32,
+ })),
+ TyKind::Scalar(Scalar::Int(ity)) => Some(BuiltinType::Int(match ity {
+ IntTy::Isize => BuiltinInt::Isize,
+ IntTy::I8 => BuiltinInt::I8,
+ IntTy::I16 => BuiltinInt::I16,
+ IntTy::I32 => BuiltinInt::I32,
+ IntTy::I64 => BuiltinInt::I64,
+ IntTy::I128 => BuiltinInt::I128,
+ })),
+ TyKind::Scalar(Scalar::Uint(ity)) => Some(BuiltinType::Uint(match ity {
+ UintTy::Usize => BuiltinUint::Usize,
+ UintTy::U8 => BuiltinUint::U8,
+ UintTy::U16 => BuiltinUint::U16,
+ UintTy::U32 => BuiltinUint::U32,
+ UintTy::U64 => BuiltinUint::U64,
+ UintTy::U128 => BuiltinUint::U128,
+ })),
+ _ => None,
+ }
+ }
+
+ fn as_tuple(&self) -> Option<&Substitution> {
+ match self.kind(Interner) {
+ TyKind::Tuple(_, substs) => Some(substs),
+ _ => None,
+ }
+ }
+
+ fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId> {
+ match self.callable_def(db) {
+ Some(CallableDefId::FunctionId(func)) => Some(func),
+ Some(CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_)) | None => None,
+ }
+ }
+ fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)> {
+ match self.kind(Interner) {
+ TyKind::Ref(mutability, lifetime, ty) => Some((ty, lifetime.clone(), *mutability)),
+ _ => None,
+ }
+ }
+
+ fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)> {
+ match self.kind(Interner) {
+ TyKind::Ref(mutability, _, ty) => Some((ty, Rawness::Ref, *mutability)),
+ TyKind::Raw(mutability, ty) => Some((ty, Rawness::RawPtr, *mutability)),
+ _ => None,
+ }
+ }
+
+ fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId> {
+ match *self.kind(Interner) {
+ TyKind::Adt(AdtId(adt), ..) => Some(adt.into()),
+ TyKind::FnDef(callable, ..) => {
+ Some(db.lookup_intern_callable_def(callable.into()).into())
+ }
+ TyKind::AssociatedType(type_alias, ..) => Some(from_assoc_type_id(type_alias).into()),
+ TyKind::Foreign(type_alias, ..) => Some(from_foreign_def_id(type_alias).into()),
+ _ => None,
+ }
+ }
+
+ fn callable_def(&self, db: &dyn HirDatabase) -> Option<CallableDefId> {
+ match self.kind(Interner) {
+ &TyKind::FnDef(def, ..) => Some(db.lookup_intern_callable_def(def.into())),
+ _ => None,
+ }
+ }
+
+ fn callable_sig(&self, db: &dyn HirDatabase) -> Option<CallableSig> {
+ match self.kind(Interner) {
+ TyKind::Function(fn_ptr) => Some(CallableSig::from_fn_ptr(fn_ptr)),
+ TyKind::FnDef(def, parameters) => {
+ let callable_def = db.lookup_intern_callable_def((*def).into());
+ let sig = db.callable_item_signature(callable_def);
+ Some(sig.substitute(Interner, &parameters))
+ }
+ TyKind::Closure(.., substs) => {
+ let sig_param = substs.at(Interner, 0).assert_ty_ref(Interner);
+ sig_param.callable_sig(db)
+ }
+ _ => None,
+ }
+ }
+
+ fn dyn_trait(&self) -> Option<TraitId> {
+ let trait_ref = match self.kind(Interner) {
+ TyKind::Dyn(dyn_ty) => dyn_ty.bounds.skip_binders().interned().get(0).and_then(|b| {
+ match b.skip_binders() {
+ WhereClause::Implemented(trait_ref) => Some(trait_ref),
+ _ => None,
+ }
+ }),
+ _ => None,
+ }?;
+ Some(from_chalk_trait_id(trait_ref.trait_id))
+ }
+
+ fn strip_references(&self) -> &Ty {
+ let mut t: &Ty = self;
+ while let TyKind::Ref(_mutability, _lifetime, ty) = t.kind(Interner) {
+ t = ty;
+ }
+ t
+ }
+
+ fn strip_reference(&self) -> &Ty {
+ self.as_reference().map_or(self, |(ty, _, _)| ty)
+ }
+
+ fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<QuantifiedWhereClause>> {
+ match self.kind(Interner) {
+ TyKind::OpaqueType(opaque_ty_id, subst) => {
+ match db.lookup_intern_impl_trait_id((*opaque_ty_id).into()) {
+ ImplTraitId::AsyncBlockTypeImplTrait(def, _expr) => {
+ let krate = def.module(db.upcast()).krate();
+ if let Some(future_trait) = db
+ .lang_item(krate, SmolStr::new_inline("future_trait"))
+ .and_then(|item| item.as_trait())
+ {
+ // This is only used by type walking.
+ // Parameters will be walked outside, and projection predicate is not used.
+ // So just provide the Future trait.
+ let impl_bound = Binders::empty(
+ Interner,
+ WhereClause::Implemented(TraitRef {
+ trait_id: to_chalk_trait_id(future_trait),
+ substitution: Substitution::empty(Interner),
+ }),
+ );
+ Some(vec![impl_bound])
+ } else {
+ None
+ }
+ }
+ ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ db.return_type_impl_traits(func).map(|it| {
+ let data = (*it)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ data.substitute(Interner, &subst).into_value_and_skipped_binders().0
+ })
+ }
+ }
+ }
+ TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
+ let predicates = match db.lookup_intern_impl_trait_id(opaque_ty.opaque_ty_id.into())
+ {
+ ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ db.return_type_impl_traits(func).map(|it| {
+ let data = (*it)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ data.substitute(Interner, &opaque_ty.substitution)
+ })
+ }
+ // It always has an parameter for Future::Output type.
+ ImplTraitId::AsyncBlockTypeImplTrait(..) => unreachable!(),
+ };
+
+ predicates.map(|it| it.into_value_and_skipped_binders().0)
+ }
+ TyKind::Placeholder(idx) => {
+ let id = from_placeholder_idx(db, *idx);
+ let generic_params = db.generic_params(id.parent);
+ let param_data = &generic_params.type_or_consts[id.local_id];
+ match param_data {
+ TypeOrConstParamData::TypeParamData(p) => match p.provenance {
+ hir_def::generics::TypeParamProvenance::ArgumentImplTrait => {
+ let substs = TyBuilder::placeholder_subst(db, id.parent);
+ let predicates = db
+ .generic_predicates(id.parent)
+ .iter()
+ .map(|pred| pred.clone().substitute(Interner, &substs))
+ .filter(|wc| match &wc.skip_binders() {
+ WhereClause::Implemented(tr) => {
+ &tr.self_type_parameter(Interner) == self
+ }
+ WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(proj),
+ ty: _,
+ }) => &proj.self_type_parameter(Interner) == self,
+ _ => false,
+ })
+ .collect::<Vec<_>>();
+
+ Some(predicates)
+ }
+ _ => None,
+ },
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ }
+
+ fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId> {
+ match self.kind(Interner) {
+ TyKind::AssociatedType(id, ..) => {
+ match from_assoc_type_id(*id).lookup(db.upcast()).container {
+ ItemContainerId::TraitId(trait_id) => Some(trait_id),
+ _ => None,
+ }
+ }
+ TyKind::Alias(AliasTy::Projection(projection_ty)) => {
+ match from_assoc_type_id(projection_ty.associated_ty_id)
+ .lookup(db.upcast())
+ .container
+ {
+ ItemContainerId::TraitId(trait_id) => Some(trait_id),
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ }
+
+ fn equals_ctor(&self, other: &Ty) -> bool {
+ match (self.kind(Interner), other.kind(Interner)) {
+ (TyKind::Adt(adt, ..), TyKind::Adt(adt2, ..)) => adt == adt2,
+ (TyKind::Slice(_), TyKind::Slice(_)) | (TyKind::Array(_, _), TyKind::Array(_, _)) => {
+ true
+ }
+ (TyKind::FnDef(def_id, ..), TyKind::FnDef(def_id2, ..)) => def_id == def_id2,
+ (TyKind::OpaqueType(ty_id, ..), TyKind::OpaqueType(ty_id2, ..)) => ty_id == ty_id2,
+ (TyKind::AssociatedType(ty_id, ..), TyKind::AssociatedType(ty_id2, ..)) => {
+ ty_id == ty_id2
+ }
+ (TyKind::Foreign(ty_id, ..), TyKind::Foreign(ty_id2, ..)) => ty_id == ty_id2,
+ (TyKind::Closure(id1, _), TyKind::Closure(id2, _)) => id1 == id2,
+ (TyKind::Ref(mutability, ..), TyKind::Ref(mutability2, ..))
+ | (TyKind::Raw(mutability, ..), TyKind::Raw(mutability2, ..)) => {
+ mutability == mutability2
+ }
+ (
+ TyKind::Function(FnPointer { num_binders, sig, .. }),
+ TyKind::Function(FnPointer { num_binders: num_binders2, sig: sig2, .. }),
+ ) => num_binders == num_binders2 && sig == sig2,
+ (TyKind::Tuple(cardinality, _), TyKind::Tuple(cardinality2, _)) => {
+ cardinality == cardinality2
+ }
+ (TyKind::Str, TyKind::Str) | (TyKind::Never, TyKind::Never) => true,
+ (TyKind::Scalar(scalar), TyKind::Scalar(scalar2)) => scalar == scalar2,
+ _ => false,
+ }
+ }
+}
+
+pub trait ProjectionTyExt {
+ fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef;
+ fn trait_(&self, db: &dyn HirDatabase) -> TraitId;
+}
+
+impl ProjectionTyExt for ProjectionTy {
+ fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef {
+ TraitRef {
+ trait_id: to_chalk_trait_id(self.trait_(db)),
+ substitution: self.substitution.clone(),
+ }
+ }
+
+ fn trait_(&self, db: &dyn HirDatabase) -> TraitId {
+ match from_assoc_type_id(self.associated_ty_id).lookup(db.upcast()).container {
+ ItemContainerId::TraitId(it) => it,
+ _ => panic!("projection ty without parent trait"),
+ }
+ }
+}
+
+pub trait TraitRefExt {
+ fn hir_trait_id(&self) -> TraitId;
+}
+
+impl TraitRefExt for TraitRef {
+ fn hir_trait_id(&self) -> TraitId {
+ from_chalk_trait_id(self.trait_id)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
new file mode 100644
index 000000000..0495a4e64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -0,0 +1,469 @@
+//! Constant evaluation details
+
+use std::{
+ collections::HashMap,
+ convert::TryInto,
+ fmt::{Display, Write},
+};
+
+use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData, IntTy, Scalar};
+use hir_def::{
+ expr::{ArithOp, BinaryOp, Expr, ExprId, Literal, Pat, PatId},
+ path::ModPath,
+ resolver::{resolver_for_expr, ResolveValueResult, Resolver, ValueNs},
+ type_ref::ConstScalar,
+ ConstId, DefWithBodyId,
+};
+use la_arena::{Arena, Idx};
+use stdx::never;
+
+use crate::{
+ db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx,
+ utils::Generics, Const, ConstData, ConstValue, GenericArg, InferenceResult, Interner, Ty,
+ TyBuilder, TyKind,
+};
+
+/// Extension trait for [`Const`]
+pub trait ConstExt {
+ /// Is a [`Const`] unknown?
+ fn is_unknown(&self) -> bool;
+}
+
+impl ConstExt for Const {
+ fn is_unknown(&self) -> bool {
+ match self.data(Interner).value {
+ // interned Unknown
+ chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst {
+ interned: ConstScalar::Unknown,
+ }) => true,
+
+ // interned concrete anything else
+ chalk_ir::ConstValue::Concrete(..) => false,
+
+ _ => {
+ tracing::error!(
+ "is_unknown was called on a non-concrete constant value! {:?}",
+ self
+ );
+ true
+ }
+ }
+ }
+}
+
+pub struct ConstEvalCtx<'a> {
+ pub db: &'a dyn HirDatabase,
+ pub owner: DefWithBodyId,
+ pub exprs: &'a Arena<Expr>,
+ pub pats: &'a Arena<Pat>,
+ pub local_data: HashMap<PatId, ComputedExpr>,
+ infer: &'a InferenceResult,
+}
+
+impl ConstEvalCtx<'_> {
+ fn expr_ty(&mut self, expr: ExprId) -> Ty {
+ self.infer[expr].clone()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ConstEvalError {
+ NotSupported(&'static str),
+ SemanticError(&'static str),
+ Loop,
+ IncompleteExpr,
+ Panic(String),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ComputedExpr {
+ Literal(Literal),
+ Tuple(Box<[ComputedExpr]>),
+}
+
+impl Display for ComputedExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ ComputedExpr::Literal(l) => match l {
+ Literal::Int(x, _) => {
+ if *x >= 10 {
+ write!(f, "{} ({:#X})", x, x)
+ } else {
+ x.fmt(f)
+ }
+ }
+ Literal::Uint(x, _) => {
+ if *x >= 10 {
+ write!(f, "{} ({:#X})", x, x)
+ } else {
+ x.fmt(f)
+ }
+ }
+ Literal::Float(x, _) => x.fmt(f),
+ Literal::Bool(x) => x.fmt(f),
+ Literal::Char(x) => std::fmt::Debug::fmt(x, f),
+ Literal::String(x) => std::fmt::Debug::fmt(x, f),
+ Literal::ByteString(x) => std::fmt::Debug::fmt(x, f),
+ },
+ ComputedExpr::Tuple(t) => {
+ f.write_char('(')?;
+ for x in &**t {
+ x.fmt(f)?;
+ f.write_str(", ")?;
+ }
+ f.write_char(')')
+ }
+ }
+ }
+}
+
+fn scalar_max(scalar: &Scalar) -> i128 {
+ match scalar {
+ Scalar::Bool => 1,
+ Scalar::Char => u32::MAX as i128,
+ Scalar::Int(x) => match x {
+ IntTy::Isize => isize::MAX as i128,
+ IntTy::I8 => i8::MAX as i128,
+ IntTy::I16 => i16::MAX as i128,
+ IntTy::I32 => i32::MAX as i128,
+ IntTy::I64 => i64::MAX as i128,
+ IntTy::I128 => i128::MAX as i128,
+ },
+ Scalar::Uint(x) => match x {
+ chalk_ir::UintTy::Usize => usize::MAX as i128,
+ chalk_ir::UintTy::U8 => u8::MAX as i128,
+ chalk_ir::UintTy::U16 => u16::MAX as i128,
+ chalk_ir::UintTy::U32 => u32::MAX as i128,
+ chalk_ir::UintTy::U64 => u64::MAX as i128,
+ chalk_ir::UintTy::U128 => i128::MAX as i128, // ignore too big u128 for now
+ },
+ Scalar::Float(_) => 0,
+ }
+}
+
+fn is_valid(scalar: &Scalar, value: i128) -> bool {
+ if value < 0 {
+ !matches!(scalar, Scalar::Uint(_)) && -scalar_max(scalar) - 1 <= value
+ } else {
+ value <= scalar_max(scalar)
+ }
+}
+
+pub fn eval_const(
+ expr_id: ExprId,
+ ctx: &mut ConstEvalCtx<'_>,
+) -> Result<ComputedExpr, ConstEvalError> {
+ let expr = &ctx.exprs[expr_id];
+ match expr {
+ Expr::Missing => Err(ConstEvalError::IncompleteExpr),
+ Expr::Literal(l) => Ok(ComputedExpr::Literal(l.clone())),
+ &Expr::UnaryOp { expr, op } => {
+ let ty = &ctx.expr_ty(expr);
+ let ev = eval_const(expr, ctx)?;
+ match op {
+ hir_def::expr::UnaryOp::Deref => Err(ConstEvalError::NotSupported("deref")),
+ hir_def::expr::UnaryOp::Not => {
+ let v = match ev {
+ ComputedExpr::Literal(Literal::Bool(b)) => {
+ return Ok(ComputedExpr::Literal(Literal::Bool(!b)))
+ }
+ ComputedExpr::Literal(Literal::Int(v, _)) => v,
+ ComputedExpr::Literal(Literal::Uint(v, _)) => v
+ .try_into()
+ .map_err(|_| ConstEvalError::NotSupported("too big u128"))?,
+ _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
+ };
+ let r = match ty.kind(Interner) {
+ TyKind::Scalar(Scalar::Uint(x)) => match x {
+ chalk_ir::UintTy::U8 => !(v as u8) as i128,
+ chalk_ir::UintTy::U16 => !(v as u16) as i128,
+ chalk_ir::UintTy::U32 => !(v as u32) as i128,
+ chalk_ir::UintTy::U64 => !(v as u64) as i128,
+ chalk_ir::UintTy::U128 => {
+ return Err(ConstEvalError::NotSupported("negation of u128"))
+ }
+ chalk_ir::UintTy::Usize => !(v as usize) as i128,
+ },
+ TyKind::Scalar(Scalar::Int(x)) => match x {
+ chalk_ir::IntTy::I8 => !(v as i8) as i128,
+ chalk_ir::IntTy::I16 => !(v as i16) as i128,
+ chalk_ir::IntTy::I32 => !(v as i32) as i128,
+ chalk_ir::IntTy::I64 => !(v as i64) as i128,
+ chalk_ir::IntTy::I128 => !v,
+ chalk_ir::IntTy::Isize => !(v as isize) as i128,
+ },
+ _ => return Err(ConstEvalError::NotSupported("unreachable?")),
+ };
+ Ok(ComputedExpr::Literal(Literal::Int(r, None)))
+ }
+ hir_def::expr::UnaryOp::Neg => {
+ let v = match ev {
+ ComputedExpr::Literal(Literal::Int(v, _)) => v,
+ ComputedExpr::Literal(Literal::Uint(v, _)) => v
+ .try_into()
+ .map_err(|_| ConstEvalError::NotSupported("too big u128"))?,
+ _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
+ };
+ Ok(ComputedExpr::Literal(Literal::Int(
+ v.checked_neg().ok_or_else(|| {
+ ConstEvalError::Panic("overflow in negation".to_string())
+ })?,
+ None,
+ )))
+ }
+ }
+ }
+ &Expr::BinaryOp { lhs, rhs, op } => {
+ let ty = &ctx.expr_ty(lhs);
+ let lhs = eval_const(lhs, ctx)?;
+ let rhs = eval_const(rhs, ctx)?;
+ let op = op.ok_or(ConstEvalError::IncompleteExpr)?;
+ let v1 = match lhs {
+ ComputedExpr::Literal(Literal::Int(v, _)) => v,
+ ComputedExpr::Literal(Literal::Uint(v, _)) => {
+ v.try_into().map_err(|_| ConstEvalError::NotSupported("too big u128"))?
+ }
+ _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
+ };
+ let v2 = match rhs {
+ ComputedExpr::Literal(Literal::Int(v, _)) => v,
+ ComputedExpr::Literal(Literal::Uint(v, _)) => {
+ v.try_into().map_err(|_| ConstEvalError::NotSupported("too big u128"))?
+ }
+ _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
+ };
+ match op {
+ BinaryOp::ArithOp(b) => {
+ let panic_arith = ConstEvalError::Panic(
+ "attempt to run invalid arithmetic operation".to_string(),
+ );
+ let r = match b {
+ ArithOp::Add => v1.checked_add(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Mul => v1.checked_mul(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Sub => v1.checked_sub(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Div => v1.checked_div(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Rem => v1.checked_rem(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Shl => v1
+ .checked_shl(v2.try_into().map_err(|_| panic_arith.clone())?)
+ .ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Shr => v1
+ .checked_shr(v2.try_into().map_err(|_| panic_arith.clone())?)
+ .ok_or_else(|| panic_arith.clone())?,
+ ArithOp::BitXor => v1 ^ v2,
+ ArithOp::BitOr => v1 | v2,
+ ArithOp::BitAnd => v1 & v2,
+ };
+ if let TyKind::Scalar(s) = ty.kind(Interner) {
+ if !is_valid(s, r) {
+ return Err(panic_arith);
+ }
+ }
+ Ok(ComputedExpr::Literal(Literal::Int(r, None)))
+ }
+ BinaryOp::LogicOp(_) => Err(ConstEvalError::SemanticError("logic op on numbers")),
+ _ => Err(ConstEvalError::NotSupported("bin op on this operators")),
+ }
+ }
+ Expr::Block { statements, tail, .. } => {
+ let mut prev_values = HashMap::<PatId, Option<ComputedExpr>>::default();
+ for statement in &**statements {
+ match *statement {
+ hir_def::expr::Statement::Let { pat: pat_id, initializer, .. } => {
+ let pat = &ctx.pats[pat_id];
+ match pat {
+ Pat::Bind { subpat, .. } if subpat.is_none() => (),
+ _ => {
+ return Err(ConstEvalError::NotSupported("complex patterns in let"))
+ }
+ };
+ let value = match initializer {
+ Some(x) => eval_const(x, ctx)?,
+ None => continue,
+ };
+ if !prev_values.contains_key(&pat_id) {
+ let prev = ctx.local_data.insert(pat_id, value);
+ prev_values.insert(pat_id, prev);
+ } else {
+ ctx.local_data.insert(pat_id, value);
+ }
+ }
+ hir_def::expr::Statement::Expr { .. } => {
+ return Err(ConstEvalError::NotSupported("this kind of statement"))
+ }
+ }
+ }
+ let r = match tail {
+ &Some(x) => eval_const(x, ctx),
+ None => Ok(ComputedExpr::Tuple(Box::new([]))),
+ };
+ // clean up local data, so caller will receive the exact map that passed to us
+ for (name, val) in prev_values {
+ match val {
+ Some(x) => ctx.local_data.insert(name, x),
+ None => ctx.local_data.remove(&name),
+ };
+ }
+ r
+ }
+ Expr::Path(p) => {
+ let resolver = resolver_for_expr(ctx.db.upcast(), ctx.owner, expr_id);
+ let pr = resolver
+ .resolve_path_in_value_ns(ctx.db.upcast(), p.mod_path())
+ .ok_or(ConstEvalError::SemanticError("unresolved path"))?;
+ let pr = match pr {
+ ResolveValueResult::ValueNs(v) => v,
+ ResolveValueResult::Partial(..) => {
+ return match ctx
+ .infer
+ .assoc_resolutions_for_expr(expr_id)
+ .ok_or(ConstEvalError::SemanticError("unresolved assoc item"))?
+ {
+ hir_def::AssocItemId::FunctionId(_) => {
+ Err(ConstEvalError::NotSupported("assoc function"))
+ }
+ hir_def::AssocItemId::ConstId(c) => ctx.db.const_eval(c),
+ hir_def::AssocItemId::TypeAliasId(_) => {
+ Err(ConstEvalError::NotSupported("assoc type alias"))
+ }
+ }
+ }
+ };
+ match pr {
+ ValueNs::LocalBinding(pat_id) => {
+ let r = ctx
+ .local_data
+ .get(&pat_id)
+ .ok_or(ConstEvalError::NotSupported("Unexpected missing local"))?;
+ Ok(r.clone())
+ }
+ ValueNs::ConstId(id) => ctx.db.const_eval(id),
+ ValueNs::GenericParam(_) => {
+ Err(ConstEvalError::NotSupported("const generic without substitution"))
+ }
+ _ => Err(ConstEvalError::NotSupported("path that are not const or local")),
+ }
+ }
+ _ => Err(ConstEvalError::NotSupported("This kind of expression")),
+ }
+}
+
+pub(crate) fn path_to_const(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &ModPath,
+ mode: ParamLoweringMode,
+ args_lazy: impl FnOnce() -> Generics,
+ debruijn: DebruijnIndex,
+) -> Option<Const> {
+ match resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
+ Some(ValueNs::GenericParam(p)) => {
+ let ty = db.const_param_ty(p);
+ let args = args_lazy();
+ let value = match mode {
+ ParamLoweringMode::Placeholder => {
+ ConstValue::Placeholder(to_placeholder_idx(db, p.into()))
+ }
+ ParamLoweringMode::Variable => match args.param_idx(p.into()) {
+ Some(x) => ConstValue::BoundVar(BoundVar::new(debruijn, x)),
+ None => {
+ never!(
+ "Generic list doesn't contain this param: {:?}, {}, {:?}",
+ args,
+ path,
+ p
+ );
+ return None;
+ }
+ },
+ };
+ Some(ConstData { ty, value }.intern(Interner))
+ }
+ _ => None,
+ }
+}
+
+pub fn unknown_const(ty: Ty) -> Const {
+ ConstData {
+ ty,
+ value: ConstValue::Concrete(chalk_ir::ConcreteConst { interned: ConstScalar::Unknown }),
+ }
+ .intern(Interner)
+}
+
+pub fn unknown_const_as_generic(ty: Ty) -> GenericArg {
+ GenericArgData::Const(unknown_const(ty)).intern(Interner)
+}
+
+/// Interns a constant scalar with the given type
+pub fn intern_const_scalar(value: ConstScalar, ty: Ty) -> Const {
+ ConstData { ty, value: ConstValue::Concrete(chalk_ir::ConcreteConst { interned: value }) }
+ .intern(Interner)
+}
+
+/// Interns a possibly-unknown target usize
+pub fn usize_const(value: Option<u128>) -> Const {
+ intern_const_scalar(value.map_or(ConstScalar::Unknown, ConstScalar::UInt), TyBuilder::usize())
+}
+
+pub(crate) fn const_eval_recover(
+ _: &dyn HirDatabase,
+ _: &[String],
+ _: &ConstId,
+) -> Result<ComputedExpr, ConstEvalError> {
+ Err(ConstEvalError::Loop)
+}
+
+pub(crate) fn const_eval_query(
+ db: &dyn HirDatabase,
+ const_id: ConstId,
+) -> Result<ComputedExpr, ConstEvalError> {
+ let def = const_id.into();
+ let body = db.body(def);
+ let infer = &db.infer(def);
+ let result = eval_const(
+ body.body_expr,
+ &mut ConstEvalCtx {
+ db,
+ owner: const_id.into(),
+ exprs: &body.exprs,
+ pats: &body.pats,
+ local_data: HashMap::default(),
+ infer,
+ },
+ );
+ result
+}
+
+pub(crate) fn eval_to_const<'a>(
+ expr: Idx<Expr>,
+ mode: ParamLoweringMode,
+ ctx: &mut InferenceContext<'a>,
+ args: impl FnOnce() -> Generics,
+ debruijn: DebruijnIndex,
+) -> Const {
+ if let Expr::Path(p) = &ctx.body.exprs[expr] {
+ let db = ctx.db;
+ let resolver = &ctx.resolver;
+ if let Some(c) = path_to_const(db, resolver, p.mod_path(), mode, args, debruijn) {
+ return c;
+ }
+ }
+ let body = ctx.body.clone();
+ let mut ctx = ConstEvalCtx {
+ db: ctx.db,
+ owner: ctx.owner,
+ exprs: &body.exprs,
+ pats: &body.pats,
+ local_data: HashMap::default(),
+ infer: &ctx.result,
+ };
+ let computed_expr = eval_const(expr, &mut ctx);
+ let const_scalar = match computed_expr {
+ Ok(ComputedExpr::Literal(literal)) => literal.into(),
+ _ => ConstScalar::Unknown,
+ };
+ intern_const_scalar(const_scalar, TyBuilder::usize())
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
new file mode 100644
index 000000000..4a052851a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
@@ -0,0 +1,148 @@
+use base_db::fixture::WithFixture;
+use hir_def::{db::DefDatabase, expr::Literal};
+
+use crate::{consteval::ComputedExpr, db::HirDatabase, test_db::TestDB};
+
+use super::ConstEvalError;
+
+fn check_fail(ra_fixture: &str, error: ConstEvalError) {
+ assert_eq!(eval_goal(ra_fixture), Err(error));
+}
+
+fn check_number(ra_fixture: &str, answer: i128) {
+ let r = eval_goal(ra_fixture).unwrap();
+ match r {
+ ComputedExpr::Literal(Literal::Int(r, _)) => assert_eq!(r, answer),
+ ComputedExpr::Literal(Literal::Uint(r, _)) => assert_eq!(r, answer as u128),
+ x => panic!("Expected number but found {:?}", x),
+ }
+}
+
+fn eval_goal(ra_fixture: &str) -> Result<ComputedExpr, ConstEvalError> {
+ let (db, file_id) = TestDB::with_single_file(ra_fixture);
+ let module_id = db.module_for_file(file_id);
+ let def_map = module_id.def_map(&db);
+ let scope = &def_map[module_id.local_id].scope;
+ let const_id = scope
+ .declarations()
+ .into_iter()
+ .find_map(|x| match x {
+ hir_def::ModuleDefId::ConstId(x) => {
+ if db.const_data(x).name.as_ref()?.to_string() == "GOAL" {
+ Some(x)
+ } else {
+ None
+ }
+ }
+ _ => None,
+ })
+ .unwrap();
+ db.const_eval(const_id)
+}
+
+#[test]
+fn add() {
+ check_number(r#"const GOAL: usize = 2 + 2;"#, 4);
+}
+
+#[test]
+fn bit_op() {
+ check_number(r#"const GOAL: u8 = !0 & !(!0 >> 1)"#, 128);
+ check_number(r#"const GOAL: i8 = !0 & !(!0 >> 1)"#, 0);
+ // FIXME: rustc evaluate this to -128
+ check_fail(
+ r#"const GOAL: i8 = 1 << 7"#,
+ ConstEvalError::Panic("attempt to run invalid arithmetic operation".to_string()),
+ );
+ check_fail(
+ r#"const GOAL: i8 = 1 << 8"#,
+ ConstEvalError::Panic("attempt to run invalid arithmetic operation".to_string()),
+ );
+}
+
+#[test]
+fn locals() {
+ check_number(
+ r#"
+ const GOAL: usize = {
+ let a = 3 + 2;
+ let b = a * a;
+ b
+ };
+ "#,
+ 25,
+ );
+}
+
+#[test]
+fn consts() {
+ check_number(
+ r#"
+ const F1: i32 = 1;
+ const F3: i32 = 3 * F2;
+ const F2: i32 = 2 * F1;
+ const GOAL: i32 = F3;
+ "#,
+ 6,
+ );
+}
+
+#[test]
+fn const_loop() {
+ check_fail(
+ r#"
+ const F1: i32 = 1 * F3;
+ const F3: i32 = 3 * F2;
+ const F2: i32 = 2 * F1;
+ const GOAL: i32 = F3;
+ "#,
+ ConstEvalError::Loop,
+ );
+}
+
+#[test]
+fn const_impl_assoc() {
+ check_number(
+ r#"
+ struct U5;
+ impl U5 {
+ const VAL: usize = 5;
+ }
+ const GOAL: usize = U5::VAL;
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn const_generic_subst() {
+ // FIXME: this should evaluate to 5
+ check_fail(
+ r#"
+ struct Adder<const N: usize, const M: usize>;
+ impl<const N: usize, const M: usize> Adder<N, M> {
+ const VAL: usize = N + M;
+ }
+ const GOAL: usize = Adder::<2, 3>::VAL;
+ "#,
+ ConstEvalError::NotSupported("const generic without substitution"),
+ );
+}
+
+#[test]
+fn const_trait_assoc() {
+ // FIXME: this should evaluate to 0
+ check_fail(
+ r#"
+ struct U0;
+ trait ToConst {
+ const VAL: usize;
+ }
+ impl ToConst for U0 {
+ const VAL: usize = 0;
+ }
+ const GOAL: usize = U0::VAL;
+ "#,
+ ConstEvalError::IncompleteExpr,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
new file mode 100644
index 000000000..b385b1caf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -0,0 +1,225 @@
+//! The home of `HirDatabase`, which is the Salsa database containing all the
+//! type inference-related queries.
+
+use std::sync::Arc;
+
+use arrayvec::ArrayVec;
+use base_db::{impl_intern_key, salsa, CrateId, Upcast};
+use hir_def::{
+ db::DefDatabase, expr::ExprId, BlockId, ConstId, ConstParamId, DefWithBodyId, FunctionId,
+ GenericDefId, ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId,
+};
+use la_arena::ArenaMap;
+
+use crate::{
+ chalk_db,
+ consteval::{ComputedExpr, ConstEvalError},
+ method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
+ Binders, CallableDefId, FnDefId, GenericArg, ImplTraitId, InferenceResult, Interner, PolyFnSig,
+ QuantifiedWhereClause, ReturnTypeImplTraits, TraitRef, Ty, TyDefId, ValueTyDefId,
+};
+use hir_expand::name::Name;
+
+#[salsa::query_group(HirDatabaseStorage)]
+pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
+ #[salsa::invoke(infer_wait)]
+ #[salsa::transparent]
+ fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
+
+ #[salsa::invoke(crate::infer::infer_query)]
+ fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
+
+ #[salsa::invoke(crate::lower::ty_query)]
+ #[salsa::cycle(crate::lower::ty_recover)]
+ fn ty(&self, def: TyDefId) -> Binders<Ty>;
+
+ #[salsa::invoke(crate::lower::value_ty_query)]
+ fn value_ty(&self, def: ValueTyDefId) -> Binders<Ty>;
+
+ #[salsa::invoke(crate::lower::impl_self_ty_query)]
+ #[salsa::cycle(crate::lower::impl_self_ty_recover)]
+ fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
+
+ #[salsa::invoke(crate::lower::const_param_ty_query)]
+ fn const_param_ty(&self, def: ConstParamId) -> Ty;
+
+ #[salsa::invoke(crate::consteval::const_eval_query)]
+ #[salsa::cycle(crate::consteval::const_eval_recover)]
+ fn const_eval(&self, def: ConstId) -> Result<ComputedExpr, ConstEvalError>;
+
+ #[salsa::invoke(crate::lower::impl_trait_query)]
+ fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
+
+ #[salsa::invoke(crate::lower::field_types_query)]
+ fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
+
+ #[salsa::invoke(crate::lower::callable_item_sig)]
+ fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
+
+ #[salsa::invoke(crate::lower::return_type_impl_traits)]
+ fn return_type_impl_traits(
+ &self,
+ def: FunctionId,
+ ) -> Option<Arc<Binders<ReturnTypeImplTraits>>>;
+
+ #[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
+ #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
+ fn generic_predicates_for_param(
+ &self,
+ def: GenericDefId,
+ param_id: TypeOrConstParamId,
+ assoc_name: Option<Name>,
+ ) -> Arc<[Binders<QuantifiedWhereClause>]>;
+
+ #[salsa::invoke(crate::lower::generic_predicates_query)]
+ fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<QuantifiedWhereClause>]>;
+
+ #[salsa::invoke(crate::lower::trait_environment_query)]
+ fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>;
+
+ #[salsa::invoke(crate::lower::generic_defaults_query)]
+ #[salsa::cycle(crate::lower::generic_defaults_recover)]
+ fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<GenericArg>]>;
+
+ #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
+ fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
+
+ #[salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
+ fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>;
+
+ /// Collects all crates in the dependency graph that have impls for the
+ /// given fingerprint. This is only used for primitive types; for
+ /// user-defined types we just look at the crate where the type is defined.
+ #[salsa::invoke(crate::method_resolution::inherent_impl_crates_query)]
+ fn inherent_impl_crates(&self, krate: CrateId, fp: TyFingerprint) -> ArrayVec<CrateId, 2>;
+
+ #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
+ fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>;
+
+ #[salsa::invoke(TraitImpls::trait_impls_in_block_query)]
+ fn trait_impls_in_block(&self, krate: BlockId) -> Option<Arc<TraitImpls>>;
+
+ #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
+ fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<TraitImpls>;
+
+ // Interned IDs for Chalk integration
+ #[salsa::interned]
+ fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId;
+ #[salsa::interned]
+ fn intern_type_or_const_param_id(
+ &self,
+ param_id: TypeOrConstParamId,
+ ) -> InternedTypeOrConstParamId;
+ #[salsa::interned]
+ fn intern_lifetime_param_id(&self, param_id: LifetimeParamId) -> InternedLifetimeParamId;
+ #[salsa::interned]
+ fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
+ #[salsa::interned]
+ fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> InternedClosureId;
+
+ #[salsa::invoke(chalk_db::associated_ty_data_query)]
+ fn associated_ty_data(&self, id: chalk_db::AssocTypeId) -> Arc<chalk_db::AssociatedTyDatum>;
+
+ #[salsa::invoke(chalk_db::trait_datum_query)]
+ fn trait_datum(&self, krate: CrateId, trait_id: chalk_db::TraitId)
+ -> Arc<chalk_db::TraitDatum>;
+
+ #[salsa::invoke(chalk_db::struct_datum_query)]
+ fn struct_datum(
+ &self,
+ krate: CrateId,
+ struct_id: chalk_db::AdtId,
+ ) -> Arc<chalk_db::StructDatum>;
+
+ #[salsa::invoke(chalk_db::impl_datum_query)]
+ fn impl_datum(&self, krate: CrateId, impl_id: chalk_db::ImplId) -> Arc<chalk_db::ImplDatum>;
+
+ #[salsa::invoke(chalk_db::fn_def_datum_query)]
+ fn fn_def_datum(&self, krate: CrateId, fn_def_id: FnDefId) -> Arc<chalk_db::FnDefDatum>;
+
+ #[salsa::invoke(chalk_db::fn_def_variance_query)]
+ fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances;
+
+ #[salsa::invoke(chalk_db::adt_variance_query)]
+ fn adt_variance(&self, adt_id: chalk_db::AdtId) -> chalk_db::Variances;
+
+ #[salsa::invoke(chalk_db::associated_ty_value_query)]
+ fn associated_ty_value(
+ &self,
+ krate: CrateId,
+ id: chalk_db::AssociatedTyValueId,
+ ) -> Arc<chalk_db::AssociatedTyValue>;
+
+ #[salsa::invoke(trait_solve_wait)]
+ #[salsa::transparent]
+ fn trait_solve(
+ &self,
+ krate: CrateId,
+ goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
+ ) -> Option<crate::Solution>;
+
+ #[salsa::invoke(crate::traits::trait_solve_query)]
+ fn trait_solve_query(
+ &self,
+ krate: CrateId,
+ goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
+ ) -> Option<crate::Solution>;
+
+ #[salsa::invoke(chalk_db::program_clauses_for_chalk_env_query)]
+ fn program_clauses_for_chalk_env(
+ &self,
+ krate: CrateId,
+ env: chalk_ir::Environment<Interner>,
+ ) -> chalk_ir::ProgramClauses<Interner>;
+}
+
+fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
+ let _p = profile::span("infer:wait").detail(|| match def {
+ DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(),
+ DefWithBodyId::StaticId(it) => db.static_data(it).name.clone().to_string(),
+ DefWithBodyId::ConstId(it) => {
+ db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
+ }
+ });
+ db.infer_query(def)
+}
+
+fn trait_solve_wait(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
+) -> Option<crate::Solution> {
+ let _p = profile::span("trait_solve::wait");
+ db.trait_solve_query(krate, goal)
+}
+
+#[test]
+fn hir_database_is_object_safe() {
+ fn _assert_object_safe(_: &dyn HirDatabase) {}
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedTypeOrConstParamId(salsa::InternId);
+impl_intern_key!(InternedTypeOrConstParamId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedLifetimeParamId(salsa::InternId);
+impl_intern_key!(InternedLifetimeParamId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedConstParamId(salsa::InternId);
+impl_intern_key!(InternedConstParamId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedOpaqueTyId(salsa::InternId);
+impl_intern_key!(InternedOpaqueTyId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedClosureId(salsa::InternId);
+impl_intern_key!(InternedClosureId);
+
+/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
+/// we have different IDs for struct and enum variant constructors.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct InternedCallableDefId(salsa::InternId);
+impl_intern_key!(InternedCallableDefId);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
new file mode 100644
index 000000000..37eb06be1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
@@ -0,0 +1,13 @@
+//! Type inference-based diagnostics.
+mod expr;
+mod match_check;
+mod unsafe_check;
+mod decl_check;
+
+pub use crate::diagnostics::{
+ decl_check::{incorrect_case, IncorrectCase},
+ expr::{
+ record_literal_missing_fields, record_pattern_missing_fields, BodyValidationDiagnostic,
+ },
+ unsafe_check::{missing_unsafe, unsafe_expressions, UnsafeExpr},
+};
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
new file mode 100644
index 000000000..f7031a854
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -0,0 +1,701 @@
+//! Provides validators for names of declarations.
+//!
+//! This includes the following items:
+//!
+//! - variable bindings (e.g. `let x = foo();`)
+//! - struct fields (e.g. `struct Foo { field: u8 }`)
+//! - enum variants (e.g. `enum Foo { Variant { field: u8 } }`)
+//! - function/method arguments (e.g. `fn foo(arg: u8)`)
+//! - constants (e.g. `const FOO: u8 = 10;`)
+//! - static items (e.g. `static FOO: u8 = 10;`)
+//! - match arm bindings (e.g. `foo @ Some(_)`)
+
+mod case_conv;
+
+use std::fmt;
+
+use base_db::CrateId;
+use hir_def::{
+ adt::VariantData,
+ expr::{Pat, PatId},
+ src::HasSource,
+ AdtId, AttrDefId, ConstId, EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, StaticId,
+ StructId,
+};
+use hir_expand::{
+ name::{AsName, Name},
+ HirFileId,
+};
+use stdx::{always, never};
+use syntax::{
+ ast::{self, HasName},
+ AstNode, AstPtr,
+};
+
+use crate::db::HirDatabase;
+
+use self::case_conv::{to_camel_case, to_lower_snake_case, to_upper_snake_case};
+
+mod allow {
+ pub(super) const BAD_STYLE: &str = "bad_style";
+ pub(super) const NONSTANDARD_STYLE: &str = "nonstandard_style";
+ pub(super) const NON_SNAKE_CASE: &str = "non_snake_case";
+ pub(super) const NON_UPPER_CASE_GLOBAL: &str = "non_upper_case_globals";
+ pub(super) const NON_CAMEL_CASE_TYPES: &str = "non_camel_case_types";
+}
+
+pub fn incorrect_case(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ owner: ModuleDefId,
+) -> Vec<IncorrectCase> {
+ let _p = profile::span("validate_module_item");
+ let mut validator = DeclValidator::new(db, krate);
+ validator.validate_item(owner);
+ validator.sink
+}
+
+#[derive(Debug)]
+pub enum CaseType {
+ // `some_var`
+ LowerSnakeCase,
+ // `SOME_CONST`
+ UpperSnakeCase,
+ // `SomeStruct`
+ UpperCamelCase,
+}
+
+impl fmt::Display for CaseType {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let repr = match self {
+ CaseType::LowerSnakeCase => "snake_case",
+ CaseType::UpperSnakeCase => "UPPER_SNAKE_CASE",
+ CaseType::UpperCamelCase => "CamelCase",
+ };
+
+ repr.fmt(f)
+ }
+}
+
+#[derive(Debug)]
+pub enum IdentType {
+ Constant,
+ Enum,
+ Field,
+ Function,
+ Parameter,
+ StaticVariable,
+ Structure,
+ Variable,
+ Variant,
+}
+
+impl fmt::Display for IdentType {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let repr = match self {
+ IdentType::Constant => "Constant",
+ IdentType::Enum => "Enum",
+ IdentType::Field => "Field",
+ IdentType::Function => "Function",
+ IdentType::Parameter => "Parameter",
+ IdentType::StaticVariable => "Static variable",
+ IdentType::Structure => "Structure",
+ IdentType::Variable => "Variable",
+ IdentType::Variant => "Variant",
+ };
+
+ repr.fmt(f)
+ }
+}
+
+#[derive(Debug)]
+pub struct IncorrectCase {
+ pub file: HirFileId,
+ pub ident: AstPtr<ast::Name>,
+ pub expected_case: CaseType,
+ pub ident_type: IdentType,
+ pub ident_text: String,
+ pub suggested_text: String,
+}
+
+pub(super) struct DeclValidator<'a> {
+ db: &'a dyn HirDatabase,
+ krate: CrateId,
+ pub(super) sink: Vec<IncorrectCase>,
+}
+
+#[derive(Debug)]
+struct Replacement {
+ current_name: Name,
+ suggested_text: String,
+ expected_case: CaseType,
+}
+
+impl<'a> DeclValidator<'a> {
+ pub(super) fn new(db: &'a dyn HirDatabase, krate: CrateId) -> DeclValidator<'a> {
+ DeclValidator { db, krate, sink: Vec::new() }
+ }
+
+ pub(super) fn validate_item(&mut self, item: ModuleDefId) {
+ match item {
+ ModuleDefId::FunctionId(func) => self.validate_func(func),
+ ModuleDefId::AdtId(adt) => self.validate_adt(adt),
+ ModuleDefId::ConstId(const_id) => self.validate_const(const_id),
+ ModuleDefId::StaticId(static_id) => self.validate_static(static_id),
+ _ => (),
+ }
+ }
+
+ fn validate_adt(&mut self, adt: AdtId) {
+ match adt {
+ AdtId::StructId(struct_id) => self.validate_struct(struct_id),
+ AdtId::EnumId(enum_id) => self.validate_enum(enum_id),
+ AdtId::UnionId(_) => {
+ // FIXME: Unions aren't yet supported by this validator.
+ }
+ }
+ }
+
+ /// Checks whether not following the convention is allowed for this item.
+ fn allowed(&self, id: AttrDefId, allow_name: &str, recursing: bool) -> bool {
+ let is_allowed = |def_id| {
+ let attrs = self.db.attrs(def_id);
+ // don't bug the user about directly no_mangle annotated stuff, they can't do anything about it
+ (!recursing && attrs.by_key("no_mangle").exists())
+ || attrs.by_key("allow").tt_values().any(|tt| {
+ let allows = tt.to_string();
+ allows.contains(allow_name)
+ || allows.contains(allow::BAD_STYLE)
+ || allows.contains(allow::NONSTANDARD_STYLE)
+ })
+ };
+
+ is_allowed(id)
+ // go upwards one step or give up
+ || match id {
+ AttrDefId::ModuleId(m) => m.containing_module(self.db.upcast()).map(|v| v.into()),
+ AttrDefId::FunctionId(f) => Some(f.lookup(self.db.upcast()).container.into()),
+ AttrDefId::StaticId(sid) => Some(sid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::ConstId(cid) => Some(cid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::TraitId(tid) => Some(tid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()),
+ // These warnings should not explore macro definitions at all
+ AttrDefId::MacroId(_) => None,
+ AttrDefId::AdtId(aid) => match aid {
+ AdtId::StructId(sid) => Some(sid.lookup(self.db.upcast()).container.into()),
+ AdtId::EnumId(eid) => Some(eid.lookup(self.db.upcast()).container.into()),
+ // Unions aren't yet supported
+ AdtId::UnionId(_) => None,
+ },
+ AttrDefId::FieldId(_) => None,
+ AttrDefId::EnumVariantId(_) => None,
+ AttrDefId::TypeAliasId(_) => None,
+ AttrDefId::GenericParamId(_) => None,
+ }
+ .map(|mid| self.allowed(mid, allow_name, true))
+ .unwrap_or(false)
+ }
+
+ fn validate_func(&mut self, func: FunctionId) {
+ let data = self.db.function_data(func);
+ if matches!(func.lookup(self.db.upcast()).container, ItemContainerId::ExternBlockId(_)) {
+ cov_mark::hit!(extern_func_incorrect_case_ignored);
+ return;
+ }
+
+ let body = self.db.body(func.into());
+
+ // Recursively validate inner scope items, such as static variables and constants.
+ for (_, block_def_map) in body.blocks(self.db.upcast()) {
+ for (_, module) in block_def_map.modules() {
+ for def_id in module.scope.declarations() {
+ let mut validator = DeclValidator::new(self.db, self.krate);
+ validator.validate_item(def_id);
+ }
+ }
+ }
+
+ // Check whether non-snake case identifiers are allowed for this function.
+ if self.allowed(func.into(), allow::NON_SNAKE_CASE, false) {
+ return;
+ }
+
+ // Check the function name.
+ let function_name = data.name.to_string();
+ let fn_name_replacement = to_lower_snake_case(&function_name).map(|new_name| Replacement {
+ current_name: data.name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::LowerSnakeCase,
+ });
+
+ // Check the patterns inside the function body.
+ // This includes function parameters.
+ let pats_replacements = body
+ .pats
+ .iter()
+ .filter_map(|(id, pat)| match pat {
+ Pat::Bind { name, .. } => Some((id, name)),
+ _ => None,
+ })
+ .filter_map(|(id, bind_name)| {
+ Some((
+ id,
+ Replacement {
+ current_name: bind_name.clone(),
+ suggested_text: to_lower_snake_case(&bind_name.to_string())?,
+ expected_case: CaseType::LowerSnakeCase,
+ },
+ ))
+ })
+ .collect();
+
+ // If there is at least one element to spawn a warning on, go to the source map and generate a warning.
+ if let Some(fn_name_replacement) = fn_name_replacement {
+ self.create_incorrect_case_diagnostic_for_func(func, fn_name_replacement);
+ }
+
+ self.create_incorrect_case_diagnostic_for_variables(func, pats_replacements);
+ }
+
+ /// Given the information about incorrect names in the function declaration, looks up into the source code
+ /// for exact locations and adds diagnostics into the sink.
+ fn create_incorrect_case_diagnostic_for_func(
+ &mut self,
+ func: FunctionId,
+ fn_name_replacement: Replacement,
+ ) {
+ let fn_loc = func.lookup(self.db.upcast());
+ let fn_src = fn_loc.source(self.db.upcast());
+
+ // Diagnostic for function name.
+ let ast_ptr = match fn_src.value.name() {
+ Some(name) => name,
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a function without a name: {:?}",
+ fn_name_replacement,
+ fn_src
+ );
+ return;
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: fn_src.file_id,
+ ident_type: IdentType::Function,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: fn_name_replacement.expected_case,
+ ident_text: fn_name_replacement.current_name.to_string(),
+ suggested_text: fn_name_replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+
+ /// Given the information about incorrect variable names, looks up into the source code
+ /// for exact locations and adds diagnostics into the sink.
+ fn create_incorrect_case_diagnostic_for_variables(
+ &mut self,
+ func: FunctionId,
+ pats_replacements: Vec<(PatId, Replacement)>,
+ ) {
+ // XXX: only look at source_map if we do have missing fields
+ if pats_replacements.is_empty() {
+ return;
+ }
+
+ let (_, source_map) = self.db.body_with_source_map(func.into());
+
+ for (id, replacement) in pats_replacements {
+ if let Ok(source_ptr) = source_map.pat_syntax(id) {
+ if let Some(expr) = source_ptr.value.as_ref().left() {
+ let root = source_ptr.file_syntax(self.db.upcast());
+ if let ast::Pat::IdentPat(ident_pat) = expr.to_node(&root) {
+ let parent = match ident_pat.syntax().parent() {
+ Some(parent) => parent,
+ None => continue,
+ };
+ let name_ast = match ident_pat.name() {
+ Some(name_ast) => name_ast,
+ None => continue,
+ };
+
+ let is_param = ast::Param::can_cast(parent.kind());
+
+ // We have to check that it's either `let var = ...` or `var @ Variant(_)` statement,
+ // because e.g. match arms are patterns as well.
+ // In other words, we check that it's a named variable binding.
+ let is_binding = ast::LetStmt::can_cast(parent.kind())
+ || (ast::MatchArm::can_cast(parent.kind())
+ && ident_pat.at_token().is_some());
+ if !(is_param || is_binding) {
+ // This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm.
+ continue;
+ }
+
+ let ident_type =
+ if is_param { IdentType::Parameter } else { IdentType::Variable };
+
+ let diagnostic = IncorrectCase {
+ file: source_ptr.file_id,
+ ident_type,
+ ident: AstPtr::new(&name_ast),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+ }
+ }
+ }
+ }
+
+ fn validate_struct(&mut self, struct_id: StructId) {
+ let data = self.db.struct_data(struct_id);
+
+ let non_camel_case_allowed =
+ self.allowed(struct_id.into(), allow::NON_CAMEL_CASE_TYPES, false);
+ let non_snake_case_allowed = self.allowed(struct_id.into(), allow::NON_SNAKE_CASE, false);
+
+ // Check the structure name.
+ let struct_name = data.name.to_string();
+ let struct_name_replacement = if !non_camel_case_allowed {
+ to_camel_case(&struct_name).map(|new_name| Replacement {
+ current_name: data.name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::UpperCamelCase,
+ })
+ } else {
+ None
+ };
+
+ // Check the field names.
+ let mut struct_fields_replacements = Vec::new();
+
+ if !non_snake_case_allowed {
+ if let VariantData::Record(fields) = data.variant_data.as_ref() {
+ for (_, field) in fields.iter() {
+ let field_name = field.name.to_string();
+ if let Some(new_name) = to_lower_snake_case(&field_name) {
+ let replacement = Replacement {
+ current_name: field.name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::LowerSnakeCase,
+ };
+ struct_fields_replacements.push(replacement);
+ }
+ }
+ }
+ }
+
+ // If there is at least one element to spawn a warning on, go to the source map and generate a warning.
+ self.create_incorrect_case_diagnostic_for_struct(
+ struct_id,
+ struct_name_replacement,
+ struct_fields_replacements,
+ );
+ }
+
+ /// Given the information about incorrect names in the struct declaration, looks up into the source code
+ /// for exact locations and adds diagnostics into the sink.
+ fn create_incorrect_case_diagnostic_for_struct(
+ &mut self,
+ struct_id: StructId,
+ struct_name_replacement: Option<Replacement>,
+ struct_fields_replacements: Vec<Replacement>,
+ ) {
+ // XXX: Only look at sources if we do have incorrect names.
+ if struct_name_replacement.is_none() && struct_fields_replacements.is_empty() {
+ return;
+ }
+
+ let struct_loc = struct_id.lookup(self.db.upcast());
+ let struct_src = struct_loc.source(self.db.upcast());
+
+ if let Some(replacement) = struct_name_replacement {
+ let ast_ptr = match struct_src.value.name() {
+ Some(name) => name,
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a structure without a name: {:?}",
+ replacement,
+ struct_src
+ );
+ return;
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: struct_src.file_id,
+ ident_type: IdentType::Structure,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+
+ let struct_fields_list = match struct_src.value.field_list() {
+ Some(ast::FieldList::RecordFieldList(fields)) => fields,
+ _ => {
+ always!(
+ struct_fields_replacements.is_empty(),
+ "Replacements ({:?}) were generated for a structure fields which had no fields list: {:?}",
+ struct_fields_replacements,
+ struct_src
+ );
+ return;
+ }
+ };
+ let mut struct_fields_iter = struct_fields_list.fields();
+ for field_to_rename in struct_fields_replacements {
+ // We assume that parameters in replacement are in the same order as in the
+ // actual params list, but just some of them (ones that named correctly) are skipped.
+ let ast_ptr = loop {
+ match struct_fields_iter.next().and_then(|field| field.name()) {
+ Some(field_name) => {
+ if field_name.as_name() == field_to_rename.current_name {
+ break field_name;
+ }
+ }
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a structure field which was not found: {:?}",
+ field_to_rename, struct_src
+ );
+ return;
+ }
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: struct_src.file_id,
+ ident_type: IdentType::Field,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: field_to_rename.expected_case,
+ ident_text: field_to_rename.current_name.to_string(),
+ suggested_text: field_to_rename.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+ }
+
+ fn validate_enum(&mut self, enum_id: EnumId) {
+ let data = self.db.enum_data(enum_id);
+
+ // Check whether non-camel case names are allowed for this enum.
+ if self.allowed(enum_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
+ return;
+ }
+
+ // Check the enum name.
+ let enum_name = data.name.to_string();
+ let enum_name_replacement = to_camel_case(&enum_name).map(|new_name| Replacement {
+ current_name: data.name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::UpperCamelCase,
+ });
+
+ // Check the field names.
+ let enum_fields_replacements = data
+ .variants
+ .iter()
+ .filter_map(|(_, variant)| {
+ Some(Replacement {
+ current_name: variant.name.clone(),
+ suggested_text: to_camel_case(&variant.name.to_string())?,
+ expected_case: CaseType::UpperCamelCase,
+ })
+ })
+ .collect();
+
+ // If there is at least one element to spawn a warning on, go to the source map and generate a warning.
+ self.create_incorrect_case_diagnostic_for_enum(
+ enum_id,
+ enum_name_replacement,
+ enum_fields_replacements,
+ )
+ }
+
+ /// Given the information about incorrect names in the struct declaration, looks up into the source code
+ /// for exact locations and adds diagnostics into the sink.
+ fn create_incorrect_case_diagnostic_for_enum(
+ &mut self,
+ enum_id: EnumId,
+ enum_name_replacement: Option<Replacement>,
+ enum_variants_replacements: Vec<Replacement>,
+ ) {
+ // XXX: only look at sources if we do have incorrect names
+ if enum_name_replacement.is_none() && enum_variants_replacements.is_empty() {
+ return;
+ }
+
+ let enum_loc = enum_id.lookup(self.db.upcast());
+ let enum_src = enum_loc.source(self.db.upcast());
+
+ if let Some(replacement) = enum_name_replacement {
+ let ast_ptr = match enum_src.value.name() {
+ Some(name) => name,
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a enum without a name: {:?}",
+ replacement,
+ enum_src
+ );
+ return;
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: enum_src.file_id,
+ ident_type: IdentType::Enum,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+
+ let enum_variants_list = match enum_src.value.variant_list() {
+ Some(variants) => variants,
+ _ => {
+ always!(
+ enum_variants_replacements.is_empty(),
+ "Replacements ({:?}) were generated for a enum variants which had no fields list: {:?}",
+ enum_variants_replacements,
+ enum_src
+ );
+ return;
+ }
+ };
+ let mut enum_variants_iter = enum_variants_list.variants();
+ for variant_to_rename in enum_variants_replacements {
+ // We assume that parameters in replacement are in the same order as in the
+ // actual params list, but just some of them (ones that named correctly) are skipped.
+ let ast_ptr = loop {
+ match enum_variants_iter.next().and_then(|v| v.name()) {
+ Some(variant_name) => {
+ if variant_name.as_name() == variant_to_rename.current_name {
+ break variant_name;
+ }
+ }
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a enum variant which was not found: {:?}",
+ variant_to_rename, enum_src
+ );
+ return;
+ }
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: enum_src.file_id,
+ ident_type: IdentType::Variant,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: variant_to_rename.expected_case,
+ ident_text: variant_to_rename.current_name.to_string(),
+ suggested_text: variant_to_rename.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+ }
+
+ fn validate_const(&mut self, const_id: ConstId) {
+ let data = self.db.const_data(const_id);
+
+ if self.allowed(const_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
+ return;
+ }
+
+ let name = match &data.name {
+ Some(name) => name,
+ None => return,
+ };
+
+ let const_name = name.to_string();
+ let replacement = if let Some(new_name) = to_upper_snake_case(&const_name) {
+ Replacement {
+ current_name: name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::UpperSnakeCase,
+ }
+ } else {
+ // Nothing to do here.
+ return;
+ };
+
+ let const_loc = const_id.lookup(self.db.upcast());
+ let const_src = const_loc.source(self.db.upcast());
+
+ let ast_ptr = match const_src.value.name() {
+ Some(name) => name,
+ None => return,
+ };
+
+ let diagnostic = IncorrectCase {
+ file: const_src.file_id,
+ ident_type: IdentType::Constant,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+
+ fn validate_static(&mut self, static_id: StaticId) {
+ let data = self.db.static_data(static_id);
+ if data.is_extern {
+ cov_mark::hit!(extern_static_incorrect_case_ignored);
+ return;
+ }
+
+ if self.allowed(static_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
+ return;
+ }
+
+ let name = &data.name;
+
+ let static_name = name.to_string();
+ let replacement = if let Some(new_name) = to_upper_snake_case(&static_name) {
+ Replacement {
+ current_name: name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::UpperSnakeCase,
+ }
+ } else {
+ // Nothing to do here.
+ return;
+ };
+
+ let static_loc = static_id.lookup(self.db.upcast());
+ let static_src = static_loc.source(self.db.upcast());
+
+ let ast_ptr = match static_src.value.name() {
+ Some(name) => name,
+ None => return,
+ };
+
+ let diagnostic = IncorrectCase {
+ file: static_src.file_id,
+ ident_type: IdentType::StaticVariable,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
new file mode 100644
index 000000000..88d607194
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
@@ -0,0 +1,199 @@
+//! Functions for string case manipulation, such as detecting the identifier case,
+//! and converting it into appropriate form.
+
+// Code that was taken from rustc was taken at commit 89fdb30,
+// from file /compiler/rustc_lint/src/nonstandard_style.rs
+
+/// Converts an identifier to an UpperCamelCase form.
+/// Returns `None` if the string is already in UpperCamelCase.
+pub(crate) fn to_camel_case(ident: &str) -> Option<String> {
+ if is_camel_case(ident) {
+ return None;
+ }
+
+ // Taken from rustc.
+ let ret = ident
+ .trim_matches('_')
+ .split('_')
+ .filter(|component| !component.is_empty())
+ .map(|component| {
+ let mut camel_cased_component = String::with_capacity(component.len());
+
+ let mut new_word = true;
+ let mut prev_is_lower_case = true;
+
+ for c in component.chars() {
+ // Preserve the case if an uppercase letter follows a lowercase letter, so that
+ // `camelCase` is converted to `CamelCase`.
+ if prev_is_lower_case && c.is_uppercase() {
+ new_word = true;
+ }
+
+ if new_word {
+ camel_cased_component.extend(c.to_uppercase());
+ } else {
+ camel_cased_component.extend(c.to_lowercase());
+ }
+
+ prev_is_lower_case = c.is_lowercase();
+ new_word = false;
+ }
+
+ camel_cased_component
+ })
+ .fold((String::new(), None), |(acc, prev): (_, Option<String>), next| {
+ // separate two components with an underscore if their boundary cannot
+ // be distinguished using an uppercase/lowercase case distinction
+ let join = prev
+ .and_then(|prev| {
+ let f = next.chars().next()?;
+ let l = prev.chars().last()?;
+ Some(!char_has_case(l) && !char_has_case(f))
+ })
+ .unwrap_or(false);
+ (acc + if join { "_" } else { "" } + &next, Some(next))
+ })
+ .0;
+ Some(ret)
+}
+
+/// Converts an identifier to a lower_snake_case form.
+/// Returns `None` if the string is already in lower_snake_case.
+pub(crate) fn to_lower_snake_case(ident: &str) -> Option<String> {
+ if is_lower_snake_case(ident) {
+ return None;
+ } else if is_upper_snake_case(ident) {
+ return Some(ident.to_lowercase());
+ }
+
+ Some(stdx::to_lower_snake_case(ident))
+}
+
+/// Converts an identifier to an UPPER_SNAKE_CASE form.
+/// Returns `None` if the string is already is UPPER_SNAKE_CASE.
+pub(crate) fn to_upper_snake_case(ident: &str) -> Option<String> {
+ if is_upper_snake_case(ident) {
+ return None;
+ } else if is_lower_snake_case(ident) {
+ return Some(ident.to_uppercase());
+ }
+
+ Some(stdx::to_upper_snake_case(ident))
+}
+
+// Taken from rustc.
+// Modified by replacing the use of unstable feature `array_windows`.
+fn is_camel_case(name: &str) -> bool {
+ let name = name.trim_matches('_');
+ if name.is_empty() {
+ return true;
+ }
+
+ let mut fst = None;
+ // start with a non-lowercase letter rather than non-uppercase
+ // ones (some scripts don't have a concept of upper/lowercase)
+ name.chars().next().map_or(true, |c| !c.is_lowercase())
+ && !name.contains("__")
+ && !name.chars().any(|snd| {
+ let ret = match fst {
+ None => false,
+ Some(fst) => char_has_case(fst) && snd == '_' || char_has_case(snd) && fst == '_',
+ };
+ fst = Some(snd);
+
+ ret
+ })
+}
+
+fn is_lower_snake_case(ident: &str) -> bool {
+ is_snake_case(ident, char::is_uppercase)
+}
+
+fn is_upper_snake_case(ident: &str) -> bool {
+ is_snake_case(ident, char::is_lowercase)
+}
+
+// Taken from rustc.
+// Modified to allow checking for both upper and lower snake case.
+fn is_snake_case<F: Fn(char) -> bool>(ident: &str, wrong_case: F) -> bool {
+ if ident.is_empty() {
+ return true;
+ }
+ let ident = ident.trim_matches('_');
+
+ let mut allow_underscore = true;
+ ident.chars().all(|c| {
+ allow_underscore = match c {
+ '_' if !allow_underscore => return false,
+ '_' => false,
+ // It would be more obvious to check for the correct case,
+ // but some characters do not have a case.
+ c if !wrong_case(c) => true,
+ _ => return false,
+ };
+ true
+ })
+}
+
+// Taken from rustc.
+fn char_has_case(c: char) -> bool {
+ c.is_lowercase() || c.is_uppercase()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use expect_test::{expect, Expect};
+
+ fn check<F: Fn(&str) -> Option<String>>(fun: F, input: &str, expect: Expect) {
+ // `None` is translated to empty string, meaning that there is nothing to fix.
+ let output = fun(input).unwrap_or_default();
+
+ expect.assert_eq(&output);
+ }
+
+ #[test]
+ fn test_to_lower_snake_case() {
+ check(to_lower_snake_case, "lower_snake_case", expect![[""]]);
+ check(to_lower_snake_case, "UPPER_SNAKE_CASE", expect![["upper_snake_case"]]);
+ check(to_lower_snake_case, "Weird_Case", expect![["weird_case"]]);
+ check(to_lower_snake_case, "CamelCase", expect![["camel_case"]]);
+ check(to_lower_snake_case, "lowerCamelCase", expect![["lower_camel_case"]]);
+ check(to_lower_snake_case, "a", expect![[""]]);
+ check(to_lower_snake_case, "abc", expect![[""]]);
+ check(to_lower_snake_case, "foo__bar", expect![["foo_bar"]]);
+ }
+
+ #[test]
+ fn test_to_camel_case() {
+ check(to_camel_case, "CamelCase", expect![[""]]);
+ check(to_camel_case, "CamelCase_", expect![[""]]);
+ check(to_camel_case, "_CamelCase", expect![[""]]);
+ check(to_camel_case, "lowerCamelCase", expect![["LowerCamelCase"]]);
+ check(to_camel_case, "lower_snake_case", expect![["LowerSnakeCase"]]);
+ check(to_camel_case, "UPPER_SNAKE_CASE", expect![["UpperSnakeCase"]]);
+ check(to_camel_case, "Weird_Case", expect![["WeirdCase"]]);
+ check(to_camel_case, "name", expect![["Name"]]);
+ check(to_camel_case, "A", expect![[""]]);
+ check(to_camel_case, "AABB", expect![[""]]);
+ // Taken from rustc: /compiler/rustc_lint/src/nonstandard_style/tests.rs
+ check(to_camel_case, "X86_64", expect![[""]]);
+ check(to_camel_case, "x86__64", expect![["X86_64"]]);
+ check(to_camel_case, "Abc_123", expect![["Abc123"]]);
+ check(to_camel_case, "A1_b2_c3", expect![["A1B2C3"]]);
+ }
+
+ #[test]
+ fn test_to_upper_snake_case() {
+ check(to_upper_snake_case, "UPPER_SNAKE_CASE", expect![[""]]);
+ check(to_upper_snake_case, "lower_snake_case", expect![["LOWER_SNAKE_CASE"]]);
+ check(to_upper_snake_case, "Weird_Case", expect![["WEIRD_CASE"]]);
+ check(to_upper_snake_case, "CamelCase", expect![["CAMEL_CASE"]]);
+ check(to_upper_snake_case, "lowerCamelCase", expect![["LOWER_CAMEL_CASE"]]);
+ check(to_upper_snake_case, "A", expect![[""]]);
+ check(to_upper_snake_case, "ABC", expect![[""]]);
+ check(to_upper_snake_case, "X86_64", expect![[""]]);
+ check(to_upper_snake_case, "FOO_BAr", expect![["FOO_BAR"]]);
+ check(to_upper_snake_case, "FOO__BAR", expect![["FOO_BAR"]]);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
new file mode 100644
index 000000000..642e03edd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
@@ -0,0 +1,416 @@
+//! Various diagnostics for expressions that are collected together in one pass
+//! through the body using inference results: mismatched arg counts, missing
+//! fields, etc.
+
+use std::fmt;
+use std::sync::Arc;
+
+use hir_def::{path::path, resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule};
+use hir_expand::name;
+use itertools::Either;
+use itertools::Itertools;
+use rustc_hash::FxHashSet;
+use typed_arena::Arena;
+
+use crate::{
+ db::HirDatabase,
+ diagnostics::match_check::{
+ self,
+ deconstruct_pat::DeconstructedPat,
+ usefulness::{compute_match_usefulness, MatchCheckCtx},
+ },
+ display::HirDisplay,
+ InferenceResult, Ty, TyExt,
+};
+
+pub(crate) use hir_def::{
+ body::Body,
+ expr::{Expr, ExprId, MatchArm, Pat, PatId},
+ LocalFieldId, VariantId,
+};
+
+pub enum BodyValidationDiagnostic {
+ RecordMissingFields {
+ record: Either<ExprId, PatId>,
+ variant: VariantId,
+ missed_fields: Vec<LocalFieldId>,
+ },
+ ReplaceFilterMapNextWithFindMap {
+ method_call_expr: ExprId,
+ },
+ MissingMatchArms {
+ match_expr: ExprId,
+ uncovered_patterns: String,
+ },
+}
+
+impl BodyValidationDiagnostic {
+ pub fn collect(db: &dyn HirDatabase, owner: DefWithBodyId) -> Vec<BodyValidationDiagnostic> {
+ let _p = profile::span("BodyValidationDiagnostic::collect");
+ let infer = db.infer(owner);
+ let mut validator = ExprValidator::new(owner, infer);
+ validator.validate_body(db);
+ validator.diagnostics
+ }
+}
+
+struct ExprValidator {
+ owner: DefWithBodyId,
+ infer: Arc<InferenceResult>,
+ pub(super) diagnostics: Vec<BodyValidationDiagnostic>,
+}
+
+impl ExprValidator {
+ fn new(owner: DefWithBodyId, infer: Arc<InferenceResult>) -> ExprValidator {
+ ExprValidator { owner, infer, diagnostics: Vec::new() }
+ }
+
+ fn validate_body(&mut self, db: &dyn HirDatabase) {
+ let body = db.body(self.owner);
+ let mut filter_map_next_checker = None;
+
+ for (id, expr) in body.exprs.iter() {
+ if let Some((variant, missed_fields, true)) =
+ record_literal_missing_fields(db, &self.infer, id, expr)
+ {
+ self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
+ record: Either::Left(id),
+ variant,
+ missed_fields,
+ });
+ }
+
+ match expr {
+ Expr::Match { expr, arms } => {
+ self.validate_match(id, *expr, arms, db, self.infer.clone());
+ }
+ Expr::Call { .. } | Expr::MethodCall { .. } => {
+ self.validate_call(db, id, expr, &mut filter_map_next_checker);
+ }
+ _ => {}
+ }
+ }
+ for (id, pat) in body.pats.iter() {
+ if let Some((variant, missed_fields, true)) =
+ record_pattern_missing_fields(db, &self.infer, id, pat)
+ {
+ self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
+ record: Either::Right(id),
+ variant,
+ missed_fields,
+ });
+ }
+ }
+ }
+
+ fn validate_call(
+ &mut self,
+ db: &dyn HirDatabase,
+ call_id: ExprId,
+ expr: &Expr,
+ filter_map_next_checker: &mut Option<FilterMapNextChecker>,
+ ) {
+ // Check that the number of arguments matches the number of parameters.
+
+ // FIXME: Due to shortcomings in the current type system implementation, only emit this
+ // diagnostic if there are no type mismatches in the containing function.
+ if self.infer.expr_type_mismatches().next().is_some() {
+ return;
+ }
+
+ match expr {
+ Expr::MethodCall { receiver, .. } => {
+ let (callee, _) = match self.infer.method_resolution(call_id) {
+ Some(it) => it,
+ None => return,
+ };
+
+ if filter_map_next_checker
+ .get_or_insert_with(|| {
+ FilterMapNextChecker::new(&self.owner.resolver(db.upcast()), db)
+ })
+ .check(call_id, receiver, &callee)
+ .is_some()
+ {
+ self.diagnostics.push(
+ BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap {
+ method_call_expr: call_id,
+ },
+ );
+ }
+ }
+ _ => return,
+ };
+ }
+
+ fn validate_match(
+ &mut self,
+ id: ExprId,
+ match_expr: ExprId,
+ arms: &[MatchArm],
+ db: &dyn HirDatabase,
+ infer: Arc<InferenceResult>,
+ ) {
+ let body = db.body(self.owner);
+
+ let match_expr_ty = &infer[match_expr];
+ if match_expr_ty.is_unknown() {
+ return;
+ }
+
+ let pattern_arena = Arena::new();
+ let cx = MatchCheckCtx {
+ module: self.owner.module(db.upcast()),
+ body: self.owner,
+ db,
+ pattern_arena: &pattern_arena,
+ };
+
+ let mut m_arms = Vec::with_capacity(arms.len());
+ let mut has_lowering_errors = false;
+ for arm in arms {
+ if let Some(pat_ty) = infer.type_of_pat.get(arm.pat) {
+ // We only include patterns whose type matches the type
+ // of the match expression. If we had an InvalidMatchArmPattern
+ // diagnostic or similar we could raise that in an else
+ // block here.
+ //
+ // When comparing the types, we also have to consider that rustc
+ // will automatically de-reference the match expression type if
+ // necessary.
+ //
+ // FIXME we should use the type checker for this.
+ if (pat_ty == match_expr_ty
+ || match_expr_ty
+ .as_reference()
+ .map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
+ .unwrap_or(false))
+ && types_of_subpatterns_do_match(arm.pat, &body, &infer)
+ {
+ // If we had a NotUsefulMatchArm diagnostic, we could
+ // check the usefulness of each pattern as we added it
+ // to the matrix here.
+ let m_arm = match_check::MatchArm {
+ pat: self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors),
+ has_guard: arm.guard.is_some(),
+ };
+ m_arms.push(m_arm);
+ if !has_lowering_errors {
+ continue;
+ }
+ }
+ }
+
+ // If we can't resolve the type of a pattern, or the pattern type doesn't
+ // fit the match expression, we skip this diagnostic. Skipping the entire
+ // diagnostic rather than just not including this match arm is preferred
+ // to avoid the chance of false positives.
+ cov_mark::hit!(validate_match_bailed_out);
+ return;
+ }
+
+ let report = compute_match_usefulness(&cx, &m_arms, match_expr_ty);
+
+ // FIXME Report unreacheble arms
+ // https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200
+
+ let witnesses = report.non_exhaustiveness_witnesses;
+ if !witnesses.is_empty() {
+ self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms {
+ match_expr: id,
+ uncovered_patterns: missing_match_arms(&cx, match_expr_ty, witnesses, arms),
+ });
+ }
+ }
+
+ fn lower_pattern<'p>(
+ &self,
+ cx: &MatchCheckCtx<'_, 'p>,
+ pat: PatId,
+ db: &dyn HirDatabase,
+ body: &Body,
+ have_errors: &mut bool,
+ ) -> &'p DeconstructedPat<'p> {
+ let mut patcx = match_check::PatCtxt::new(db, &self.infer, body);
+ let pattern = patcx.lower_pattern(pat);
+ let pattern = cx.pattern_arena.alloc(DeconstructedPat::from_pat(cx, &pattern));
+ if !patcx.errors.is_empty() {
+ *have_errors = true;
+ }
+ pattern
+ }
+}
+
+struct FilterMapNextChecker {
+ filter_map_function_id: Option<hir_def::FunctionId>,
+ next_function_id: Option<hir_def::FunctionId>,
+ prev_filter_map_expr_id: Option<ExprId>,
+}
+
+impl FilterMapNextChecker {
+ fn new(resolver: &hir_def::resolver::Resolver, db: &dyn HirDatabase) -> Self {
+ // Find and store the FunctionIds for Iterator::filter_map and Iterator::next
+ let iterator_path = path![core::iter::Iterator];
+ let mut filter_map_function_id = None;
+ let mut next_function_id = None;
+
+ if let Some(iterator_trait_id) = resolver.resolve_known_trait(db.upcast(), &iterator_path) {
+ let iterator_trait_items = &db.trait_data(iterator_trait_id).items;
+ for item in iterator_trait_items.iter() {
+ if let (name, AssocItemId::FunctionId(id)) = item {
+ if *name == name![filter_map] {
+ filter_map_function_id = Some(*id);
+ }
+ if *name == name![next] {
+ next_function_id = Some(*id);
+ }
+ }
+ if filter_map_function_id.is_some() && next_function_id.is_some() {
+ break;
+ }
+ }
+ }
+ Self { filter_map_function_id, next_function_id, prev_filter_map_expr_id: None }
+ }
+
+ // check for instances of .filter_map(..).next()
+ fn check(
+ &mut self,
+ current_expr_id: ExprId,
+ receiver_expr_id: &ExprId,
+ function_id: &hir_def::FunctionId,
+ ) -> Option<()> {
+ if *function_id == self.filter_map_function_id? {
+ self.prev_filter_map_expr_id = Some(current_expr_id);
+ return None;
+ }
+
+ if *function_id == self.next_function_id? {
+ if let Some(prev_filter_map_expr_id) = self.prev_filter_map_expr_id {
+ if *receiver_expr_id == prev_filter_map_expr_id {
+ return Some(());
+ }
+ }
+ }
+
+ self.prev_filter_map_expr_id = None;
+ None
+ }
+}
+
+pub fn record_literal_missing_fields(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ id: ExprId,
+ expr: &Expr,
+) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
+ let (fields, exhaustive) = match expr {
+ Expr::RecordLit { fields, spread, ellipsis, is_assignee_expr, .. } => {
+ let exhaustive = if *is_assignee_expr { !*ellipsis } else { spread.is_none() };
+ (fields, exhaustive)
+ }
+ _ => return None,
+ };
+
+ let variant_def = infer.variant_resolution_for_expr(id)?;
+ if let VariantId::UnionId(_) = variant_def {
+ return None;
+ }
+
+ let variant_data = variant_def.variant_data(db.upcast());
+
+ let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+ let missed_fields: Vec<LocalFieldId> = variant_data
+ .fields()
+ .iter()
+ .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
+ .collect();
+ if missed_fields.is_empty() {
+ return None;
+ }
+ Some((variant_def, missed_fields, exhaustive))
+}
+
+pub fn record_pattern_missing_fields(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ id: PatId,
+ pat: &Pat,
+) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
+ let (fields, exhaustive) = match pat {
+ Pat::Record { path: _, args, ellipsis } => (args, !ellipsis),
+ _ => return None,
+ };
+
+ let variant_def = infer.variant_resolution_for_pat(id)?;
+ if let VariantId::UnionId(_) = variant_def {
+ return None;
+ }
+
+ let variant_data = variant_def.variant_data(db.upcast());
+
+ let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+ let missed_fields: Vec<LocalFieldId> = variant_data
+ .fields()
+ .iter()
+ .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
+ .collect();
+ if missed_fields.is_empty() {
+ return None;
+ }
+ Some((variant_def, missed_fields, exhaustive))
+}
+
+fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult) -> bool {
+ fn walk(pat: PatId, body: &Body, infer: &InferenceResult, has_type_mismatches: &mut bool) {
+ match infer.type_mismatch_for_pat(pat) {
+ Some(_) => *has_type_mismatches = true,
+ None => {
+ body[pat].walk_child_pats(|subpat| walk(subpat, body, infer, has_type_mismatches))
+ }
+ }
+ }
+
+ let mut has_type_mismatches = false;
+ walk(pat, body, infer, &mut has_type_mismatches);
+ !has_type_mismatches
+}
+
+fn missing_match_arms<'p>(
+ cx: &MatchCheckCtx<'_, 'p>,
+ scrut_ty: &Ty,
+ witnesses: Vec<DeconstructedPat<'p>>,
+ arms: &[MatchArm],
+) -> String {
+ struct DisplayWitness<'a, 'p>(&'a DeconstructedPat<'p>, &'a MatchCheckCtx<'a, 'p>);
+ impl<'a, 'p> fmt::Display for DisplayWitness<'a, 'p> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let DisplayWitness(witness, cx) = *self;
+ let pat = witness.to_pat(cx);
+ write!(f, "{}", pat.display(cx.db))
+ }
+ }
+
+ let non_empty_enum = match scrut_ty.as_adt() {
+ Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(),
+ _ => false,
+ };
+ if arms.is_empty() && !non_empty_enum {
+ format!("type `{}` is non-empty", scrut_ty.display(cx.db))
+ } else {
+ let pat_display = |witness| DisplayWitness(witness, cx);
+ const LIMIT: usize = 3;
+ match &*witnesses {
+ [witness] => format!("`{}` not covered", pat_display(witness)),
+ [head @ .., tail] if head.len() < LIMIT => {
+ let head = head.iter().map(pat_display);
+ format!("`{}` and `{}` not covered", head.format("`, `"), pat_display(tail))
+ }
+ _ => {
+ let (head, tail) = witnesses.split_at(LIMIT);
+ let head = head.iter().map(pat_display);
+ format!("`{}` and {} more not covered", head.format("`, `"), tail.len())
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
new file mode 100644
index 000000000..d51ad72bd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
@@ -0,0 +1,508 @@
+//! Validation of matches.
+//!
+//! This module provides lowering from [hir_def::expr::Pat] to [self::Pat] and match
+//! checking algorithm.
+//!
+//! It is modeled on the rustc module `rustc_mir_build::thir::pattern`.
+
+mod pat_util;
+
+pub(crate) mod deconstruct_pat;
+pub(crate) mod usefulness;
+
+use chalk_ir::Mutability;
+use hir_def::{
+ adt::VariantData, body::Body, expr::PatId, AdtId, EnumVariantId, HasModule, LocalFieldId,
+ VariantId,
+};
+use hir_expand::name::{name, Name};
+use stdx::{always, never};
+
+use crate::{
+ db::HirDatabase,
+ display::{HirDisplay, HirDisplayError, HirFormatter},
+ infer::BindingMode,
+ InferenceResult, Interner, Substitution, Ty, TyExt, TyKind,
+};
+
+use self::pat_util::EnumerateAndAdjustIterator;
+
+pub(crate) use self::usefulness::MatchArm;
+
+#[derive(Clone, Debug)]
+pub(crate) enum PatternError {
+ Unimplemented,
+ UnexpectedType,
+ UnresolvedVariant,
+ MissingField,
+ ExtraFields,
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub(crate) struct FieldPat {
+ pub(crate) field: LocalFieldId,
+ pub(crate) pattern: Pat,
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub(crate) struct Pat {
+ pub(crate) ty: Ty,
+ pub(crate) kind: Box<PatKind>,
+}
+
+/// Close relative to `rustc_mir_build::thir::pattern::PatKind`
+#[derive(Clone, Debug, PartialEq)]
+pub(crate) enum PatKind {
+ Wild,
+
+ /// `x`, `ref x`, `x @ P`, etc.
+ Binding {
+ name: Name,
+ subpattern: Option<Pat>,
+ },
+
+ /// `Foo(...)` or `Foo{...}` or `Foo`, where `Foo` is a variant name from an ADT with
+ /// multiple variants.
+ Variant {
+ substs: Substitution,
+ enum_variant: EnumVariantId,
+ subpatterns: Vec<FieldPat>,
+ },
+
+ /// `(...)`, `Foo(...)`, `Foo{...}`, or `Foo`, where `Foo` is a variant name from an ADT with
+ /// a single variant.
+ Leaf {
+ subpatterns: Vec<FieldPat>,
+ },
+
+ /// `box P`, `&P`, `&mut P`, etc.
+ Deref {
+ subpattern: Pat,
+ },
+
+ // FIXME: for now, only bool literals are implemented
+ LiteralBool {
+ value: bool,
+ },
+
+ /// An or-pattern, e.g. `p | q`.
+ /// Invariant: `pats.len() >= 2`.
+ Or {
+ pats: Vec<Pat>,
+ },
+}
+
+pub(crate) struct PatCtxt<'a> {
+ db: &'a dyn HirDatabase,
+ infer: &'a InferenceResult,
+ body: &'a Body,
+ pub(crate) errors: Vec<PatternError>,
+}
+
+impl<'a> PatCtxt<'a> {
+ pub(crate) fn new(db: &'a dyn HirDatabase, infer: &'a InferenceResult, body: &'a Body) -> Self {
+ Self { db, infer, body, errors: Vec::new() }
+ }
+
+ pub(crate) fn lower_pattern(&mut self, pat: PatId) -> Pat {
+ // XXX(iDawer): Collecting pattern adjustments feels imprecise to me.
+ // When lowering of & and box patterns are implemented this should be tested
+ // in a manner of `match_ergonomics_issue_9095` test.
+ // Pattern adjustment is part of RFC 2005-match-ergonomics.
+ // More info https://github.com/rust-lang/rust/issues/42640#issuecomment-313535089
+ let unadjusted_pat = self.lower_pattern_unadjusted(pat);
+ self.infer.pat_adjustments.get(&pat).map(|it| &**it).unwrap_or_default().iter().rev().fold(
+ unadjusted_pat,
+ |subpattern, ref_ty| Pat {
+ ty: ref_ty.clone(),
+ kind: Box::new(PatKind::Deref { subpattern }),
+ },
+ )
+ }
+
+ fn lower_pattern_unadjusted(&mut self, pat: PatId) -> Pat {
+ let mut ty = &self.infer[pat];
+ let variant = self.infer.variant_resolution_for_pat(pat);
+
+ let kind = match self.body[pat] {
+ hir_def::expr::Pat::Wild => PatKind::Wild,
+
+ hir_def::expr::Pat::Lit(expr) => self.lower_lit(expr),
+
+ hir_def::expr::Pat::Path(ref path) => {
+ return self.lower_path(pat, path);
+ }
+
+ hir_def::expr::Pat::Tuple { ref args, ellipsis } => {
+ let arity = match *ty.kind(Interner) {
+ TyKind::Tuple(arity, _) => arity,
+ _ => {
+ never!("unexpected type for tuple pattern: {:?}", ty);
+ self.errors.push(PatternError::UnexpectedType);
+ return Pat { ty: ty.clone(), kind: PatKind::Wild.into() };
+ }
+ };
+ let subpatterns = self.lower_tuple_subpats(args, arity, ellipsis);
+ PatKind::Leaf { subpatterns }
+ }
+
+ hir_def::expr::Pat::Bind { ref name, subpat, .. } => {
+ let bm = self.infer.pat_binding_modes[&pat];
+ match (bm, ty.kind(Interner)) {
+ (BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
+ (BindingMode::Ref(_), _) => {
+ never!("`ref {}` has wrong type {:?}", name, ty);
+ self.errors.push(PatternError::UnexpectedType);
+ return Pat { ty: ty.clone(), kind: PatKind::Wild.into() };
+ }
+ _ => (),
+ }
+ PatKind::Binding { name: name.clone(), subpattern: self.lower_opt_pattern(subpat) }
+ }
+
+ hir_def::expr::Pat::TupleStruct { ref args, ellipsis, .. } if variant.is_some() => {
+ let expected_len = variant.unwrap().variant_data(self.db.upcast()).fields().len();
+ let subpatterns = self.lower_tuple_subpats(args, expected_len, ellipsis);
+ self.lower_variant_or_leaf(pat, ty, subpatterns)
+ }
+
+ hir_def::expr::Pat::Record { ref args, .. } if variant.is_some() => {
+ let variant_data = variant.unwrap().variant_data(self.db.upcast());
+ let subpatterns = args
+ .iter()
+ .map(|field| {
+ // XXX(iDawer): field lookup is inefficient
+ variant_data.field(&field.name).map(|lfield_id| FieldPat {
+ field: lfield_id,
+ pattern: self.lower_pattern(field.pat),
+ })
+ })
+ .collect();
+ match subpatterns {
+ Some(subpatterns) => self.lower_variant_or_leaf(pat, ty, subpatterns),
+ None => {
+ self.errors.push(PatternError::MissingField);
+ PatKind::Wild
+ }
+ }
+ }
+ hir_def::expr::Pat::TupleStruct { .. } | hir_def::expr::Pat::Record { .. } => {
+ self.errors.push(PatternError::UnresolvedVariant);
+ PatKind::Wild
+ }
+
+ hir_def::expr::Pat::Or(ref pats) => PatKind::Or { pats: self.lower_patterns(pats) },
+
+ _ => {
+ self.errors.push(PatternError::Unimplemented);
+ PatKind::Wild
+ }
+ };
+
+ Pat { ty: ty.clone(), kind: Box::new(kind) }
+ }
+
+ fn lower_tuple_subpats(
+ &mut self,
+ pats: &[PatId],
+ expected_len: usize,
+ ellipsis: Option<usize>,
+ ) -> Vec<FieldPat> {
+ if pats.len() > expected_len {
+ self.errors.push(PatternError::ExtraFields);
+ return Vec::new();
+ }
+
+ pats.iter()
+ .enumerate_and_adjust(expected_len, ellipsis)
+ .map(|(i, &subpattern)| FieldPat {
+ field: LocalFieldId::from_raw((i as u32).into()),
+ pattern: self.lower_pattern(subpattern),
+ })
+ .collect()
+ }
+
+ fn lower_patterns(&mut self, pats: &[PatId]) -> Vec<Pat> {
+ pats.iter().map(|&p| self.lower_pattern(p)).collect()
+ }
+
+ fn lower_opt_pattern(&mut self, pat: Option<PatId>) -> Option<Pat> {
+ pat.map(|p| self.lower_pattern(p))
+ }
+
+ fn lower_variant_or_leaf(
+ &mut self,
+ pat: PatId,
+ ty: &Ty,
+ subpatterns: Vec<FieldPat>,
+ ) -> PatKind {
+ let kind = match self.infer.variant_resolution_for_pat(pat) {
+ Some(variant_id) => {
+ if let VariantId::EnumVariantId(enum_variant) = variant_id {
+ let substs = match ty.kind(Interner) {
+ TyKind::Adt(_, substs) => substs.clone(),
+ kind => {
+ always!(
+ matches!(kind, TyKind::FnDef(..) | TyKind::Error),
+ "inappropriate type for def: {:?}",
+ ty
+ );
+ self.errors.push(PatternError::UnexpectedType);
+ return PatKind::Wild;
+ }
+ };
+ PatKind::Variant { substs, enum_variant, subpatterns }
+ } else {
+ PatKind::Leaf { subpatterns }
+ }
+ }
+ None => {
+ self.errors.push(PatternError::UnresolvedVariant);
+ PatKind::Wild
+ }
+ };
+ kind
+ }
+
+ fn lower_path(&mut self, pat: PatId, _path: &hir_def::path::Path) -> Pat {
+ let ty = &self.infer[pat];
+
+ let pat_from_kind = |kind| Pat { ty: ty.clone(), kind: Box::new(kind) };
+
+ match self.infer.variant_resolution_for_pat(pat) {
+ Some(_) => pat_from_kind(self.lower_variant_or_leaf(pat, ty, Vec::new())),
+ None => {
+ self.errors.push(PatternError::UnresolvedVariant);
+ pat_from_kind(PatKind::Wild)
+ }
+ }
+ }
+
+ fn lower_lit(&mut self, expr: hir_def::expr::ExprId) -> PatKind {
+ use hir_def::expr::{Expr, Literal::Bool};
+
+ match self.body[expr] {
+ Expr::Literal(Bool(value)) => PatKind::LiteralBool { value },
+ _ => {
+ self.errors.push(PatternError::Unimplemented);
+ PatKind::Wild
+ }
+ }
+ }
+}
+
+impl HirDisplay for Pat {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match &*self.kind {
+ PatKind::Wild => write!(f, "_"),
+ PatKind::Binding { name, subpattern } => {
+ write!(f, "{name}")?;
+ if let Some(subpattern) = subpattern {
+ write!(f, " @ ")?;
+ subpattern.hir_fmt(f)?;
+ }
+ Ok(())
+ }
+ PatKind::Variant { subpatterns, .. } | PatKind::Leaf { subpatterns } => {
+ let variant = match *self.kind {
+ PatKind::Variant { enum_variant, .. } => Some(VariantId::from(enum_variant)),
+ _ => self.ty.as_adt().and_then(|(adt, _)| match adt {
+ AdtId::StructId(s) => Some(s.into()),
+ AdtId::UnionId(u) => Some(u.into()),
+ AdtId::EnumId(_) => None,
+ }),
+ };
+
+ if let Some(variant) = variant {
+ match variant {
+ VariantId::EnumVariantId(v) => {
+ let data = f.db.enum_data(v.parent);
+ write!(f, "{}", data.variants[v.local_id].name)?;
+ }
+ VariantId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?,
+ VariantId::UnionId(u) => write!(f, "{}", f.db.union_data(u).name)?,
+ };
+
+ let variant_data = variant.variant_data(f.db.upcast());
+ if let VariantData::Record(rec_fields) = &*variant_data {
+ write!(f, " {{ ")?;
+
+ let mut printed = 0;
+ let subpats = subpatterns
+ .iter()
+ .filter(|p| !matches!(*p.pattern.kind, PatKind::Wild))
+ .map(|p| {
+ printed += 1;
+ WriteWith(move |f| {
+ write!(f, "{}: ", rec_fields[p.field].name)?;
+ p.pattern.hir_fmt(f)
+ })
+ });
+ f.write_joined(subpats, ", ")?;
+
+ if printed < rec_fields.len() {
+ write!(f, "{}..", if printed > 0 { ", " } else { "" })?;
+ }
+
+ return write!(f, " }}");
+ }
+ }
+
+ let num_fields = variant
+ .map_or(subpatterns.len(), |v| v.variant_data(f.db.upcast()).fields().len());
+ if num_fields != 0 || variant.is_none() {
+ write!(f, "(")?;
+ let subpats = (0..num_fields).map(|i| {
+ WriteWith(move |f| {
+ let fid = LocalFieldId::from_raw((i as u32).into());
+ if let Some(p) = subpatterns.get(i) {
+ if p.field == fid {
+ return p.pattern.hir_fmt(f);
+ }
+ }
+ if let Some(p) = subpatterns.iter().find(|p| p.field == fid) {
+ p.pattern.hir_fmt(f)
+ } else {
+ write!(f, "_")
+ }
+ })
+ });
+ f.write_joined(subpats, ", ")?;
+ if let (TyKind::Tuple(..), 1) = (self.ty.kind(Interner), num_fields) {
+ write!(f, ",")?;
+ }
+ write!(f, ")")?;
+ }
+
+ Ok(())
+ }
+ PatKind::Deref { subpattern } => {
+ match self.ty.kind(Interner) {
+ TyKind::Adt(adt, _) if is_box(adt.0, f.db) => write!(f, "box ")?,
+ &TyKind::Ref(mutbl, ..) => {
+ write!(f, "&{}", if mutbl == Mutability::Mut { "mut " } else { "" })?
+ }
+ _ => never!("{:?} is a bad Deref pattern type", self.ty),
+ }
+ subpattern.hir_fmt(f)
+ }
+ PatKind::LiteralBool { value } => write!(f, "{}", value),
+ PatKind::Or { pats } => f.write_joined(pats.iter(), " | "),
+ }
+ }
+}
+
+struct WriteWith<F>(F)
+where
+ F: Fn(&mut HirFormatter<'_>) -> Result<(), HirDisplayError>;
+
+impl<F> HirDisplay for WriteWith<F>
+where
+ F: Fn(&mut HirFormatter<'_>) -> Result<(), HirDisplayError>,
+{
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ (self.0)(f)
+ }
+}
+
+fn is_box(adt: AdtId, db: &dyn HirDatabase) -> bool {
+ let owned_box = name![owned_box].to_smol_str();
+ let krate = adt.module(db.upcast()).krate();
+ let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from);
+ Some(adt) == box_adt
+}
+
+pub(crate) trait PatternFoldable: Sized {
+ fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ self.super_fold_with(folder)
+ }
+
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self;
+}
+
+pub(crate) trait PatternFolder: Sized {
+ fn fold_pattern(&mut self, pattern: &Pat) -> Pat {
+ pattern.super_fold_with(self)
+ }
+
+ fn fold_pattern_kind(&mut self, kind: &PatKind) -> PatKind {
+ kind.super_fold_with(self)
+ }
+}
+
+impl<T: PatternFoldable> PatternFoldable for Box<T> {
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ let content: T = (**self).fold_with(folder);
+ Box::new(content)
+ }
+}
+
+impl<T: PatternFoldable> PatternFoldable for Vec<T> {
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ self.iter().map(|t| t.fold_with(folder)).collect()
+ }
+}
+
+impl<T: PatternFoldable> PatternFoldable for Option<T> {
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ self.as_ref().map(|t| t.fold_with(folder))
+ }
+}
+
+macro_rules! clone_impls {
+ ($($ty:ty),+) => {
+ $(
+ impl PatternFoldable for $ty {
+ fn super_fold_with<F: PatternFolder>(&self, _: &mut F) -> Self {
+ Clone::clone(self)
+ }
+ }
+ )+
+ }
+}
+
+clone_impls! { LocalFieldId, Ty, Substitution, EnumVariantId }
+
+impl PatternFoldable for FieldPat {
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ FieldPat { field: self.field.fold_with(folder), pattern: self.pattern.fold_with(folder) }
+ }
+}
+
+impl PatternFoldable for Pat {
+ fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ folder.fold_pattern(self)
+ }
+
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ Pat { ty: self.ty.fold_with(folder), kind: self.kind.fold_with(folder) }
+ }
+}
+
+impl PatternFoldable for PatKind {
+ fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ folder.fold_pattern_kind(self)
+ }
+
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ match self {
+ PatKind::Wild => PatKind::Wild,
+ PatKind::Binding { name, subpattern } => {
+ PatKind::Binding { name: name.clone(), subpattern: subpattern.fold_with(folder) }
+ }
+ PatKind::Variant { substs, enum_variant, subpatterns } => PatKind::Variant {
+ substs: substs.fold_with(folder),
+ enum_variant: enum_variant.fold_with(folder),
+ subpatterns: subpatterns.fold_with(folder),
+ },
+ PatKind::Leaf { subpatterns } => {
+ PatKind::Leaf { subpatterns: subpatterns.fold_with(folder) }
+ }
+ PatKind::Deref { subpattern } => {
+ PatKind::Deref { subpattern: subpattern.fold_with(folder) }
+ }
+ &PatKind::LiteralBool { value } => PatKind::LiteralBool { value },
+ PatKind::Or { pats } => PatKind::Or { pats: pats.fold_with(folder) },
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs
new file mode 100644
index 000000000..bbbe539c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs
@@ -0,0 +1,1094 @@
+//! [`super::usefulness`] explains most of what is happening in this file. As explained there,
+//! values and patterns are made from constructors applied to fields. This file defines a
+//! `Constructor` enum, a `Fields` struct, and various operations to manipulate them and convert
+//! them from/to patterns.
+//!
+//! There's one idea that is not detailed in [`super::usefulness`] because the details are not
+//! needed there: _constructor splitting_.
+//!
+//! # Constructor splitting
+//!
+//! The idea is as follows: given a constructor `c` and a matrix, we want to specialize in turn
+//! with all the value constructors that are covered by `c`, and compute usefulness for each.
+//! Instead of listing all those constructors (which is intractable), we group those value
+//! constructors together as much as possible. Example:
+//!
+//! ```
+//! match (0, false) {
+//! (0 ..=100, true) => {} // `p_1`
+//! (50..=150, false) => {} // `p_2`
+//! (0 ..=200, _) => {} // `q`
+//! }
+//! ```
+//!
+//! The naive approach would try all numbers in the range `0..=200`. But we can be a lot more
+//! clever: `0` and `1` for example will match the exact same rows, and return equivalent
+//! witnesses. In fact all of `0..50` would. We can thus restrict our exploration to 4
+//! constructors: `0..50`, `50..=100`, `101..=150` and `151..=200`. That is enough and infinitely
+//! more tractable.
+//!
+//! We capture this idea in a function `split(p_1 ... p_n, c)` which returns a list of constructors
+//! `c'` covered by `c`. Given such a `c'`, we require that all value ctors `c''` covered by `c'`
+//! return an equivalent set of witnesses after specializing and computing usefulness.
+//! In the example above, witnesses for specializing by `c''` covered by `0..50` will only differ
+//! in their first element.
+//!
+//! We usually also ask that the `c'` together cover all of the original `c`. However we allow
+//! skipping some constructors as long as it doesn't change whether the resulting list of witnesses
+//! is empty of not. We use this in the wildcard `_` case.
+//!
+//! Splitting is implemented in the [`Constructor::split`] function. We don't do splitting for
+//! or-patterns; instead we just try the alternatives one-by-one. For details on splitting
+//! wildcards, see [`SplitWildcard`]; for integer ranges, see [`SplitIntRange`].
+
+use std::{
+ cell::Cell,
+ cmp::{max, min},
+ iter::once,
+ ops::RangeInclusive,
+};
+
+use hir_def::{EnumVariantId, HasModule, LocalFieldId, VariantId};
+use smallvec::{smallvec, SmallVec};
+use stdx::never;
+
+use crate::{infer::normalize, AdtId, Interner, Scalar, Ty, TyExt, TyKind};
+
+use super::{
+ is_box,
+ usefulness::{helper::Captures, MatchCheckCtx, PatCtxt},
+ FieldPat, Pat, PatKind,
+};
+
+use self::Constructor::*;
+
+/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
+fn expand_or_pat(pat: &Pat) -> Vec<&Pat> {
+ fn expand<'p>(pat: &'p Pat, vec: &mut Vec<&'p Pat>) {
+ if let PatKind::Or { pats } = pat.kind.as_ref() {
+ for pat in pats {
+ expand(pat, vec);
+ }
+ } else {
+ vec.push(pat)
+ }
+ }
+
+ let mut pats = Vec::new();
+ expand(pat, &mut pats);
+ pats
+}
+
+/// [Constructor] uses this in umimplemented variants.
+/// It allows porting match expressions from upstream algorithm without losing semantics.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(super) enum Void {}
+
+/// An inclusive interval, used for precise integer exhaustiveness checking.
+/// `IntRange`s always store a contiguous range. This means that values are
+/// encoded such that `0` encodes the minimum value for the integer,
+/// regardless of the signedness.
+/// For example, the pattern `-128..=127i8` is encoded as `0..=255`.
+/// This makes comparisons and arithmetic on interval endpoints much more
+/// straightforward. See `signed_bias` for details.
+///
+/// `IntRange` is never used to encode an empty range or a "range" that wraps
+/// around the (offset) space: i.e., `range.lo <= range.hi`.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(super) struct IntRange {
+ range: RangeInclusive<u128>,
+}
+
+impl IntRange {
+ #[inline]
+ fn is_integral(ty: &Ty) -> bool {
+ matches!(
+ ty.kind(Interner),
+ TyKind::Scalar(Scalar::Char | Scalar::Int(_) | Scalar::Uint(_) | Scalar::Bool)
+ )
+ }
+
+ fn is_singleton(&self) -> bool {
+ self.range.start() == self.range.end()
+ }
+
+ fn boundaries(&self) -> (u128, u128) {
+ (*self.range.start(), *self.range.end())
+ }
+
+ #[inline]
+ fn from_bool(value: bool) -> IntRange {
+ let val = value as u128;
+ IntRange { range: val..=val }
+ }
+
+ #[inline]
+ fn from_range(lo: u128, hi: u128, scalar_ty: Scalar) -> IntRange {
+ match scalar_ty {
+ Scalar::Bool => IntRange { range: lo..=hi },
+ _ => unimplemented!(),
+ }
+ }
+
+ fn is_subrange(&self, other: &Self) -> bool {
+ other.range.start() <= self.range.start() && self.range.end() <= other.range.end()
+ }
+
+ fn intersection(&self, other: &Self) -> Option<Self> {
+ let (lo, hi) = self.boundaries();
+ let (other_lo, other_hi) = other.boundaries();
+ if lo <= other_hi && other_lo <= hi {
+ Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi) })
+ } else {
+ None
+ }
+ }
+
+ fn to_pat(&self, _cx: &MatchCheckCtx<'_, '_>, ty: Ty) -> Pat {
+ match ty.kind(Interner) {
+ TyKind::Scalar(Scalar::Bool) => {
+ let kind = match self.boundaries() {
+ (0, 0) => PatKind::LiteralBool { value: false },
+ (1, 1) => PatKind::LiteralBool { value: true },
+ (0, 1) => PatKind::Wild,
+ (lo, hi) => {
+ never!("bad range for bool pattern: {}..={}", lo, hi);
+ PatKind::Wild
+ }
+ };
+ Pat { ty, kind: kind.into() }
+ }
+ _ => unimplemented!(),
+ }
+ }
+
+ /// See `Constructor::is_covered_by`
+ fn is_covered_by(&self, other: &Self) -> bool {
+ if self.intersection(other).is_some() {
+ // Constructor splitting should ensure that all intersections we encounter are actually
+ // inclusions.
+ assert!(self.is_subrange(other));
+ true
+ } else {
+ false
+ }
+ }
+}
+
+/// Represents a border between 2 integers. Because the intervals spanning borders must be able to
+/// cover every integer, we need to be able to represent 2^128 + 1 such borders.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+enum IntBorder {
+ JustBefore(u128),
+ AfterMax,
+}
+
+/// A range of integers that is partitioned into disjoint subranges. This does constructor
+/// splitting for integer ranges as explained at the top of the file.
+///
+/// This is fed multiple ranges, and returns an output that covers the input, but is split so that
+/// the only intersections between an output range and a seen range are inclusions. No output range
+/// straddles the boundary of one of the inputs.
+///
+/// The following input:
+/// ```
+/// |-------------------------| // `self`
+/// |------| |----------| |----|
+/// |-------| |-------|
+/// ```
+/// would be iterated over as follows:
+/// ```
+/// ||---|--||-|---|---|---|--|
+/// ```
+#[derive(Debug, Clone)]
+struct SplitIntRange {
+ /// The range we are splitting
+ range: IntRange,
+ /// The borders of ranges we have seen. They are all contained within `range`. This is kept
+ /// sorted.
+ borders: Vec<IntBorder>,
+}
+
+impl SplitIntRange {
+ fn new(range: IntRange) -> Self {
+ SplitIntRange { range, borders: Vec::new() }
+ }
+
+ /// Internal use
+ fn to_borders(r: IntRange) -> [IntBorder; 2] {
+ use IntBorder::*;
+ let (lo, hi) = r.boundaries();
+ let lo = JustBefore(lo);
+ let hi = match hi.checked_add(1) {
+ Some(m) => JustBefore(m),
+ None => AfterMax,
+ };
+ [lo, hi]
+ }
+
+ /// Add ranges relative to which we split.
+ fn split(&mut self, ranges: impl Iterator<Item = IntRange>) {
+ let this_range = &self.range;
+ let included_ranges = ranges.filter_map(|r| this_range.intersection(&r));
+ let included_borders = included_ranges.flat_map(|r| {
+ let borders = Self::to_borders(r);
+ once(borders[0]).chain(once(borders[1]))
+ });
+ self.borders.extend(included_borders);
+ self.borders.sort_unstable();
+ }
+
+ /// Iterate over the contained ranges.
+ fn iter(&self) -> impl Iterator<Item = IntRange> + '_ {
+ use IntBorder::*;
+
+ let self_range = Self::to_borders(self.range.clone());
+ // Start with the start of the range.
+ let mut prev_border = self_range[0];
+ self.borders
+ .iter()
+ .copied()
+ // End with the end of the range.
+ .chain(once(self_range[1]))
+ // List pairs of adjacent borders.
+ .map(move |border| {
+ let ret = (prev_border, border);
+ prev_border = border;
+ ret
+ })
+ // Skip duplicates.
+ .filter(|(prev_border, border)| prev_border != border)
+ // Finally, convert to ranges.
+ .map(|(prev_border, border)| {
+ let range = match (prev_border, border) {
+ (JustBefore(n), JustBefore(m)) if n < m => n..=(m - 1),
+ (JustBefore(n), AfterMax) => n..=u128::MAX,
+ _ => unreachable!(), // Ruled out by the sorting and filtering we did
+ };
+ IntRange { range }
+ })
+ }
+}
+
+/// A constructor for array and slice patterns.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(super) struct Slice {
+ _unimplemented: Void,
+}
+
+impl Slice {
+ fn arity(self) -> usize {
+ match self._unimplemented {}
+ }
+
+ /// See `Constructor::is_covered_by`
+ fn is_covered_by(self, _other: Self) -> bool {
+ match self._unimplemented {}
+ }
+}
+
+/// A value can be decomposed into a constructor applied to some fields. This struct represents
+/// the constructor. See also `Fields`.
+///
+/// `pat_constructor` retrieves the constructor corresponding to a pattern.
+/// `specialize_constructor` returns the list of fields corresponding to a pattern, given a
+/// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and
+/// `Fields`.
+#[allow(dead_code)]
+#[derive(Clone, Debug, PartialEq)]
+pub(super) enum Constructor {
+ /// The constructor for patterns that have a single constructor, like tuples, struct patterns
+ /// and fixed-length arrays.
+ Single,
+ /// Enum variants.
+ Variant(EnumVariantId),
+ /// Ranges of integer literal values (`2`, `2..=5` or `2..5`).
+ IntRange(IntRange),
+ /// Ranges of floating-point literal values (`2.0..=5.2`).
+ FloatRange(Void),
+ /// String literals. Strings are not quite the same as `&[u8]` so we treat them separately.
+ Str(Void),
+ /// Array and slice patterns.
+ Slice(Slice),
+ /// Constants that must not be matched structurally. They are treated as black
+ /// boxes for the purposes of exhaustiveness: we must not inspect them, and they
+ /// don't count towards making a match exhaustive.
+ Opaque,
+ /// Fake extra constructor for enums that aren't allowed to be matched exhaustively. Also used
+ /// for those types for which we cannot list constructors explicitly, like `f64` and `str`.
+ NonExhaustive,
+ /// Stands for constructors that are not seen in the matrix, as explained in the documentation
+ /// for [`SplitWildcard`]. The carried `bool` is used for the `non_exhaustive_omitted_patterns`
+ /// lint.
+ Missing { nonexhaustive_enum_missing_real_variants: bool },
+ /// Wildcard pattern.
+ Wildcard,
+ /// Or-pattern.
+ Or,
+}
+
+impl Constructor {
+ pub(super) fn is_wildcard(&self) -> bool {
+ matches!(self, Wildcard)
+ }
+
+ pub(super) fn is_non_exhaustive(&self) -> bool {
+ matches!(self, NonExhaustive)
+ }
+
+ fn as_int_range(&self) -> Option<&IntRange> {
+ match self {
+ IntRange(range) => Some(range),
+ _ => None,
+ }
+ }
+
+ fn as_slice(&self) -> Option<Slice> {
+ match self {
+ Slice(slice) => Some(*slice),
+ _ => None,
+ }
+ }
+
+ pub(super) fn is_unstable_variant(&self, _pcx: PatCtxt<'_, '_>) -> bool {
+ false //FIXME: implement this
+ }
+
+ pub(super) fn is_doc_hidden_variant(&self, _pcx: PatCtxt<'_, '_>) -> bool {
+ false //FIXME: implement this
+ }
+
+ fn variant_id_for_adt(&self, adt: hir_def::AdtId) -> VariantId {
+ match *self {
+ Variant(id) => id.into(),
+ Single => {
+ assert!(!matches!(adt, hir_def::AdtId::EnumId(_)));
+ match adt {
+ hir_def::AdtId::EnumId(_) => unreachable!(),
+ hir_def::AdtId::StructId(id) => id.into(),
+ hir_def::AdtId::UnionId(id) => id.into(),
+ }
+ }
+ _ => panic!("bad constructor {:?} for adt {:?}", self, adt),
+ }
+ }
+
+ /// The number of fields for this constructor. This must be kept in sync with
+ /// `Fields::wildcards`.
+ pub(super) fn arity(&self, pcx: PatCtxt<'_, '_>) -> usize {
+ match self {
+ Single | Variant(_) => match *pcx.ty.kind(Interner) {
+ TyKind::Tuple(arity, ..) => arity,
+ TyKind::Ref(..) => 1,
+ TyKind::Adt(adt, ..) => {
+ if is_box(adt.0, pcx.cx.db) {
+ // The only legal patterns of type `Box` (outside `std`) are `_` and box
+ // patterns. If we're here we can assume this is a box pattern.
+ 1
+ } else {
+ let variant = self.variant_id_for_adt(adt.0);
+ Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant).count()
+ }
+ }
+ _ => {
+ never!("Unexpected type for `Single` constructor: {:?}", pcx.ty);
+ 0
+ }
+ },
+ Slice(slice) => slice.arity(),
+ Str(..)
+ | FloatRange(..)
+ | IntRange(..)
+ | NonExhaustive
+ | Opaque
+ | Missing { .. }
+ | Wildcard => 0,
+ Or => {
+ never!("The `Or` constructor doesn't have a fixed arity");
+ 0
+ }
+ }
+ }
+
+ /// Some constructors (namely `Wildcard`, `IntRange` and `Slice`) actually stand for a set of actual
+ /// constructors (like variants, integers or fixed-sized slices). When specializing for these
+ /// constructors, we want to be specialising for the actual underlying constructors.
+ /// Naively, we would simply return the list of constructors they correspond to. We instead are
+ /// more clever: if there are constructors that we know will behave the same wrt the current
+ /// matrix, we keep them grouped. For example, all slices of a sufficiently large length
+ /// will either be all useful or all non-useful with a given matrix.
+ ///
+ /// See the branches for details on how the splitting is done.
+ ///
+ /// This function may discard some irrelevant constructors if this preserves behavior and
+ /// diagnostics. Eg. for the `_` case, we ignore the constructors already present in the
+ /// matrix, unless all of them are.
+ pub(super) fn split<'a>(
+ &self,
+ pcx: PatCtxt<'_, '_>,
+ ctors: impl Iterator<Item = &'a Constructor> + Clone,
+ ) -> SmallVec<[Self; 1]> {
+ match self {
+ Wildcard => {
+ let mut split_wildcard = SplitWildcard::new(pcx);
+ split_wildcard.split(pcx, ctors);
+ split_wildcard.into_ctors(pcx)
+ }
+ // Fast-track if the range is trivial. In particular, we don't do the overlapping
+ // ranges check.
+ IntRange(ctor_range) if !ctor_range.is_singleton() => {
+ let mut split_range = SplitIntRange::new(ctor_range.clone());
+ let int_ranges = ctors.filter_map(|ctor| ctor.as_int_range());
+ split_range.split(int_ranges.cloned());
+ split_range.iter().map(IntRange).collect()
+ }
+ Slice(slice) => match slice._unimplemented {},
+ // Any other constructor can be used unchanged.
+ _ => smallvec![self.clone()],
+ }
+ }
+
+ /// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`.
+ /// For the simple cases, this is simply checking for equality. For the "grouped" constructors,
+ /// this checks for inclusion.
+ // We inline because this has a single call site in `Matrix::specialize_constructor`.
+ #[inline]
+ pub(super) fn is_covered_by(&self, _pcx: PatCtxt<'_, '_>, other: &Self) -> bool {
+ // This must be kept in sync with `is_covered_by_any`.
+ match (self, other) {
+ // Wildcards cover anything
+ (_, Wildcard) => true,
+ // The missing ctors are not covered by anything in the matrix except wildcards.
+ (Missing { .. } | Wildcard, _) => false,
+
+ (Single, Single) => true,
+ (Variant(self_id), Variant(other_id)) => self_id == other_id,
+
+ (IntRange(self_range), IntRange(other_range)) => self_range.is_covered_by(other_range),
+ (FloatRange(void), FloatRange(..)) => match *void {},
+ (Str(void), Str(..)) => match *void {},
+ (Slice(self_slice), Slice(other_slice)) => self_slice.is_covered_by(*other_slice),
+
+ // We are trying to inspect an opaque constant. Thus we skip the row.
+ (Opaque, _) | (_, Opaque) => false,
+ // Only a wildcard pattern can match the special extra constructor.
+ (NonExhaustive, _) => false,
+
+ _ => {
+ never!("trying to compare incompatible constructors {:?} and {:?}", self, other);
+ // Continue with 'whatever is covered' supposed to result in false no-error diagnostic.
+ true
+ }
+ }
+ }
+
+ /// Faster version of `is_covered_by` when applied to many constructors. `used_ctors` is
+ /// assumed to be built from `matrix.head_ctors()` with wildcards filtered out, and `self` is
+ /// assumed to have been split from a wildcard.
+ fn is_covered_by_any(&self, _pcx: PatCtxt<'_, '_>, used_ctors: &[Constructor]) -> bool {
+ if used_ctors.is_empty() {
+ return false;
+ }
+
+ // This must be kept in sync with `is_covered_by`.
+ match self {
+ // If `self` is `Single`, `used_ctors` cannot contain anything else than `Single`s.
+ Single => !used_ctors.is_empty(),
+ Variant(_) => used_ctors.iter().any(|c| c == self),
+ IntRange(range) => used_ctors
+ .iter()
+ .filter_map(|c| c.as_int_range())
+ .any(|other| range.is_covered_by(other)),
+ Slice(slice) => used_ctors
+ .iter()
+ .filter_map(|c| c.as_slice())
+ .any(|other| slice.is_covered_by(other)),
+ // This constructor is never covered by anything else
+ NonExhaustive => false,
+ Str(..) | FloatRange(..) | Opaque | Missing { .. } | Wildcard | Or => {
+ never!("found unexpected ctor in all_ctors: {:?}", self);
+ true
+ }
+ }
+ }
+}
+
+/// A wildcard constructor that we split relative to the constructors in the matrix, as explained
+/// at the top of the file.
+///
+/// A constructor that is not present in the matrix rows will only be covered by the rows that have
+/// wildcards. Thus we can group all of those constructors together; we call them "missing
+/// constructors". Splitting a wildcard would therefore list all present constructors individually
+/// (or grouped if they are integers or slices), and then all missing constructors together as a
+/// group.
+///
+/// However we can go further: since any constructor will match the wildcard rows, and having more
+/// rows can only reduce the amount of usefulness witnesses, we can skip the present constructors
+/// and only try the missing ones.
+/// This will not preserve the whole list of witnesses, but will preserve whether the list is empty
+/// or not. In fact this is quite natural from the point of view of diagnostics too. This is done
+/// in `to_ctors`: in some cases we only return `Missing`.
+#[derive(Debug)]
+pub(super) struct SplitWildcard {
+ /// Constructors seen in the matrix.
+ matrix_ctors: Vec<Constructor>,
+ /// All the constructors for this type
+ all_ctors: SmallVec<[Constructor; 1]>,
+}
+
+impl SplitWildcard {
+ pub(super) fn new(pcx: PatCtxt<'_, '_>) -> Self {
+ let cx = pcx.cx;
+ let make_range = |start, end, scalar| IntRange(IntRange::from_range(start, end, scalar));
+
+ // Unhandled types are treated as non-exhaustive. Being explicit here instead of falling
+ // to catchall arm to ease further implementation.
+ let unhandled = || smallvec![NonExhaustive];
+
+ // This determines the set of all possible constructors for the type `pcx.ty`. For numbers,
+ // arrays and slices we use ranges and variable-length slices when appropriate.
+ //
+ // If the `exhaustive_patterns` feature is enabled, we make sure to omit constructors that
+ // are statically impossible. E.g., for `Option<!>`, we do not include `Some(_)` in the
+ // returned list of constructors.
+ // Invariant: this is empty if and only if the type is uninhabited (as determined by
+ // `cx.is_uninhabited()`).
+ let all_ctors = match pcx.ty.kind(Interner) {
+ TyKind::Scalar(Scalar::Bool) => smallvec![make_range(0, 1, Scalar::Bool)],
+ // TyKind::Array(..) if ... => unhandled(),
+ TyKind::Array(..) | TyKind::Slice(..) => unhandled(),
+ &TyKind::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), ..) => {
+ let enum_data = cx.db.enum_data(enum_id);
+
+ // If the enum is declared as `#[non_exhaustive]`, we treat it as if it had an
+ // additional "unknown" constructor.
+ // There is no point in enumerating all possible variants, because the user can't
+ // actually match against them all themselves. So we always return only the fictitious
+ // constructor.
+ // E.g., in an example like:
+ //
+ // ```
+ // let err: io::ErrorKind = ...;
+ // match err {
+ // io::ErrorKind::NotFound => {},
+ // }
+ // ```
+ //
+ // we don't want to show every possible IO error, but instead have only `_` as the
+ // witness.
+ let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(pcx.ty);
+
+ let is_exhaustive_pat_feature = cx.feature_exhaustive_patterns();
+
+ // If `exhaustive_patterns` is disabled and our scrutinee is an empty enum, we treat it
+ // as though it had an "unknown" constructor to avoid exposing its emptiness. The
+ // exception is if the pattern is at the top level, because we want empty matches to be
+ // considered exhaustive.
+ let is_secretly_empty = enum_data.variants.is_empty()
+ && !is_exhaustive_pat_feature
+ && !pcx.is_top_level;
+
+ let mut ctors: SmallVec<[_; 1]> = enum_data
+ .variants
+ .iter()
+ .filter(|&(_, _v)| {
+ // If `exhaustive_patterns` is enabled, we exclude variants known to be
+ // uninhabited.
+ let is_uninhabited = is_exhaustive_pat_feature
+ && unimplemented!("after MatchCheckCtx.feature_exhaustive_patterns()");
+ !is_uninhabited
+ })
+ .map(|(local_id, _)| Variant(EnumVariantId { parent: enum_id, local_id }))
+ .collect();
+
+ if is_secretly_empty || is_declared_nonexhaustive {
+ ctors.push(NonExhaustive);
+ }
+ ctors
+ }
+ TyKind::Scalar(Scalar::Char) => unhandled(),
+ TyKind::Scalar(Scalar::Int(..) | Scalar::Uint(..)) => unhandled(),
+ TyKind::Never if !cx.feature_exhaustive_patterns() && !pcx.is_top_level => {
+ smallvec![NonExhaustive]
+ }
+ TyKind::Never => SmallVec::new(),
+ _ if cx.is_uninhabited(pcx.ty) => SmallVec::new(),
+ TyKind::Adt(..) | TyKind::Tuple(..) | TyKind::Ref(..) => smallvec![Single],
+ // This type is one for which we cannot list constructors, like `str` or `f64`.
+ _ => smallvec![NonExhaustive],
+ };
+
+ SplitWildcard { matrix_ctors: Vec::new(), all_ctors }
+ }
+
+ /// Pass a set of constructors relative to which to split this one. Don't call twice, it won't
+ /// do what you want.
+ pub(super) fn split<'a>(
+ &mut self,
+ pcx: PatCtxt<'_, '_>,
+ ctors: impl Iterator<Item = &'a Constructor> + Clone,
+ ) {
+ // Since `all_ctors` never contains wildcards, this won't recurse further.
+ self.all_ctors =
+ self.all_ctors.iter().flat_map(|ctor| ctor.split(pcx, ctors.clone())).collect();
+ self.matrix_ctors = ctors.filter(|c| !c.is_wildcard()).cloned().collect();
+ }
+
+ /// Whether there are any value constructors for this type that are not present in the matrix.
+ fn any_missing(&self, pcx: PatCtxt<'_, '_>) -> bool {
+ self.iter_missing(pcx).next().is_some()
+ }
+
+ /// Iterate over the constructors for this type that are not present in the matrix.
+ pub(super) fn iter_missing<'a, 'p>(
+ &'a self,
+ pcx: PatCtxt<'a, 'p>,
+ ) -> impl Iterator<Item = &'a Constructor> + Captures<'p> {
+ self.all_ctors.iter().filter(move |ctor| !ctor.is_covered_by_any(pcx, &self.matrix_ctors))
+ }
+
+ /// Return the set of constructors resulting from splitting the wildcard. As explained at the
+ /// top of the file, if any constructors are missing we can ignore the present ones.
+ fn into_ctors(self, pcx: PatCtxt<'_, '_>) -> SmallVec<[Constructor; 1]> {
+ if self.any_missing(pcx) {
+ // Some constructors are missing, thus we can specialize with the special `Missing`
+ // constructor, which stands for those constructors that are not seen in the matrix,
+ // and matches the same rows as any of them (namely the wildcard rows). See the top of
+ // the file for details.
+ // However, when all constructors are missing we can also specialize with the full
+ // `Wildcard` constructor. The difference will depend on what we want in diagnostics.
+
+ // If some constructors are missing, we typically want to report those constructors,
+ // e.g.:
+ // ```
+ // enum Direction { N, S, E, W }
+ // let Direction::N = ...;
+ // ```
+ // we can report 3 witnesses: `S`, `E`, and `W`.
+ //
+ // However, if the user didn't actually specify a constructor
+ // in this arm, e.g., in
+ // ```
+ // let x: (Direction, Direction, bool) = ...;
+ // let (_, _, false) = x;
+ // ```
+ // we don't want to show all 16 possible witnesses `(<direction-1>, <direction-2>,
+ // true)` - we are satisfied with `(_, _, true)`. So if all constructors are missing we
+ // prefer to report just a wildcard `_`.
+ //
+ // The exception is: if we are at the top-level, for example in an empty match, we
+ // sometimes prefer reporting the list of constructors instead of just `_`.
+ let report_when_all_missing = pcx.is_top_level && !IntRange::is_integral(pcx.ty);
+ let ctor = if !self.matrix_ctors.is_empty() || report_when_all_missing {
+ if pcx.is_non_exhaustive {
+ Missing {
+ nonexhaustive_enum_missing_real_variants: self
+ .iter_missing(pcx)
+ .any(|c| !(c.is_non_exhaustive() || c.is_unstable_variant(pcx))),
+ }
+ } else {
+ Missing { nonexhaustive_enum_missing_real_variants: false }
+ }
+ } else {
+ Wildcard
+ };
+ return smallvec![ctor];
+ }
+
+ // All the constructors are present in the matrix, so we just go through them all.
+ self.all_ctors
+ }
+}
+
+/// A value can be decomposed into a constructor applied to some fields. This struct represents
+/// those fields, generalized to allow patterns in each field. See also `Constructor`.
+///
+/// This is constructed for a constructor using [`Fields::wildcards()`]. The idea is that
+/// [`Fields::wildcards()`] constructs a list of fields where all entries are wildcards, and then
+/// given a pattern we fill some of the fields with its subpatterns.
+/// In the following example `Fields::wildcards` returns `[_, _, _, _]`. Then in
+/// `extract_pattern_arguments` we fill some of the entries, and the result is
+/// `[Some(0), _, _, _]`.
+/// ```rust
+/// let x: [Option<u8>; 4] = foo();
+/// match x {
+/// [Some(0), ..] => {}
+/// }
+/// ```
+///
+/// Note that the number of fields of a constructor may not match the fields declared in the
+/// original struct/variant. This happens if a private or `non_exhaustive` field is uninhabited,
+/// because the code mustn't observe that it is uninhabited. In that case that field is not
+/// included in `fields`. For that reason, when you have a `mir::Field` you must use
+/// `index_with_declared_idx`.
+#[derive(Clone, Copy)]
+pub(super) struct Fields<'p> {
+ fields: &'p [DeconstructedPat<'p>],
+}
+
+impl<'p> Fields<'p> {
+ fn empty() -> Self {
+ Fields { fields: &[] }
+ }
+
+ fn singleton(cx: &MatchCheckCtx<'_, 'p>, field: DeconstructedPat<'p>) -> Self {
+ let field = cx.pattern_arena.alloc(field);
+ Fields { fields: std::slice::from_ref(field) }
+ }
+
+ pub(super) fn from_iter(
+ cx: &MatchCheckCtx<'_, 'p>,
+ fields: impl IntoIterator<Item = DeconstructedPat<'p>>,
+ ) -> Self {
+ let fields: &[_] = cx.pattern_arena.alloc_extend(fields);
+ Fields { fields }
+ }
+
+ fn wildcards_from_tys(cx: &MatchCheckCtx<'_, 'p>, tys: impl IntoIterator<Item = Ty>) -> Self {
+ Fields::from_iter(cx, tys.into_iter().map(DeconstructedPat::wildcard))
+ }
+
+ // In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
+ // uninhabited fields in order not to reveal the uninhabitedness of the whole variant.
+ // This lists the fields we keep along with their types.
+ fn list_variant_nonhidden_fields<'a>(
+ cx: &'a MatchCheckCtx<'a, 'p>,
+ ty: &'a Ty,
+ variant: VariantId,
+ ) -> impl Iterator<Item = (LocalFieldId, Ty)> + Captures<'a> + Captures<'p> {
+ let (adt, substs) = ty.as_adt().unwrap();
+
+ let adt_is_local = variant.module(cx.db.upcast()).krate() == cx.module.krate();
+ // Whether we must not match the fields of this variant exhaustively.
+ let is_non_exhaustive = is_field_list_non_exhaustive(variant, cx) && !adt_is_local;
+
+ let visibility = cx.db.field_visibilities(variant);
+ let field_ty = cx.db.field_types(variant);
+ let fields_len = variant.variant_data(cx.db.upcast()).fields().len() as u32;
+
+ (0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).filter_map(move |fid| {
+ let ty = field_ty[fid].clone().substitute(Interner, substs);
+ let ty = normalize(cx.db, cx.body, ty);
+ let is_visible = matches!(adt, hir_def::AdtId::EnumId(..))
+ || visibility[fid].is_visible_from(cx.db.upcast(), cx.module);
+ let is_uninhabited = cx.is_uninhabited(&ty);
+
+ if is_uninhabited && (!is_visible || is_non_exhaustive) {
+ None
+ } else {
+ Some((fid, ty))
+ }
+ })
+ }
+
+ /// Creates a new list of wildcard fields for a given constructor. The result must have a
+ /// length of `constructor.arity()`.
+ pub(crate) fn wildcards(
+ cx: &MatchCheckCtx<'_, 'p>,
+ ty: &Ty,
+ constructor: &Constructor,
+ ) -> Self {
+ let ret = match constructor {
+ Single | Variant(_) => match ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ let tys = substs.iter(Interner).map(|ty| ty.assert_ty_ref(Interner));
+ Fields::wildcards_from_tys(cx, tys.cloned())
+ }
+ TyKind::Ref(.., rty) => Fields::wildcards_from_tys(cx, once(rty.clone())),
+ &TyKind::Adt(AdtId(adt), ref substs) => {
+ if is_box(adt, cx.db) {
+ // The only legal patterns of type `Box` (outside `std`) are `_` and box
+ // patterns. If we're here we can assume this is a box pattern.
+ let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ Fields::wildcards_from_tys(cx, once(subst_ty))
+ } else {
+ let variant = constructor.variant_id_for_adt(adt);
+ let tys = Fields::list_variant_nonhidden_fields(cx, ty, variant)
+ .map(|(_, ty)| ty);
+ Fields::wildcards_from_tys(cx, tys)
+ }
+ }
+ ty_kind => {
+ never!("Unexpected type for `Single` constructor: {:?}", ty_kind);
+ Fields::wildcards_from_tys(cx, once(ty.clone()))
+ }
+ },
+ Slice(slice) => match slice._unimplemented {},
+ Str(..)
+ | FloatRange(..)
+ | IntRange(..)
+ | NonExhaustive
+ | Opaque
+ | Missing { .. }
+ | Wildcard => Fields::empty(),
+ Or => {
+ never!("called `Fields::wildcards` on an `Or` ctor");
+ Fields::empty()
+ }
+ };
+ ret
+ }
+
+ /// Returns the list of patterns.
+ pub(super) fn iter_patterns<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = &'p DeconstructedPat<'p>> + Captures<'a> {
+ self.fields.iter()
+ }
+}
+
+/// Values and patterns can be represented as a constructor applied to some fields. This represents
+/// a pattern in this form.
+/// This also keeps track of whether the pattern has been found reachable during analysis. For this
+/// reason we should be careful not to clone patterns for which we care about that. Use
+/// `clone_and_forget_reachability` if you're sure.
+pub(crate) struct DeconstructedPat<'p> {
+ ctor: Constructor,
+ fields: Fields<'p>,
+ ty: Ty,
+ reachable: Cell<bool>,
+}
+
+impl<'p> DeconstructedPat<'p> {
+ pub(super) fn wildcard(ty: Ty) -> Self {
+ Self::new(Wildcard, Fields::empty(), ty)
+ }
+
+ pub(super) fn new(ctor: Constructor, fields: Fields<'p>, ty: Ty) -> Self {
+ DeconstructedPat { ctor, fields, ty, reachable: Cell::new(false) }
+ }
+
+ /// Construct a pattern that matches everything that starts with this constructor.
+ /// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern
+ /// `Some(_)`.
+ pub(super) fn wild_from_ctor(pcx: PatCtxt<'_, 'p>, ctor: Constructor) -> Self {
+ let fields = Fields::wildcards(pcx.cx, pcx.ty, &ctor);
+ DeconstructedPat::new(ctor, fields, pcx.ty.clone())
+ }
+
+ /// Clone this value. This method emphasizes that cloning loses reachability information and
+ /// should be done carefully.
+ pub(super) fn clone_and_forget_reachability(&self) -> Self {
+ DeconstructedPat::new(self.ctor.clone(), self.fields, self.ty.clone())
+ }
+
+ pub(crate) fn from_pat(cx: &MatchCheckCtx<'_, 'p>, pat: &Pat) -> Self {
+ let mkpat = |pat| DeconstructedPat::from_pat(cx, pat);
+ let ctor;
+ let fields;
+ match pat.kind.as_ref() {
+ PatKind::Binding { subpattern: Some(subpat), .. } => return mkpat(subpat),
+ PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
+ ctor = Wildcard;
+ fields = Fields::empty();
+ }
+ PatKind::Deref { subpattern } => {
+ ctor = Single;
+ fields = Fields::singleton(cx, mkpat(subpattern));
+ }
+ PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
+ match pat.ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ ctor = Single;
+ let mut wilds: SmallVec<[_; 2]> = substs
+ .iter(Interner)
+ .map(|arg| arg.assert_ty_ref(Interner).clone())
+ .map(DeconstructedPat::wildcard)
+ .collect();
+ for pat in subpatterns {
+ let idx: u32 = pat.field.into_raw().into();
+ wilds[idx as usize] = mkpat(&pat.pattern);
+ }
+ fields = Fields::from_iter(cx, wilds)
+ }
+ TyKind::Adt(adt, substs) if is_box(adt.0, cx.db) => {
+ // The only legal patterns of type `Box` (outside `std`) are `_` and box
+ // patterns. If we're here we can assume this is a box pattern.
+ // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
+ // _)` or a box pattern. As a hack to avoid an ICE with the former, we
+ // ignore other fields than the first one. This will trigger an error later
+ // anyway.
+ // See https://github.com/rust-lang/rust/issues/82772 ,
+ // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
+ // The problem is that we can't know from the type whether we'll match
+ // normally or through box-patterns. We'll have to figure out a proper
+ // solution when we introduce generalized deref patterns. Also need to
+ // prevent mixing of those two options.
+ let pat =
+ subpatterns.iter().find(|pat| pat.field.into_raw() == 0u32.into());
+ let field = if let Some(pat) = pat {
+ mkpat(&pat.pattern)
+ } else {
+ let ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ DeconstructedPat::wildcard(ty)
+ };
+ ctor = Single;
+ fields = Fields::singleton(cx, field)
+ }
+ &TyKind::Adt(adt, _) => {
+ ctor = match pat.kind.as_ref() {
+ PatKind::Leaf { .. } => Single,
+ PatKind::Variant { enum_variant, .. } => Variant(*enum_variant),
+ _ => {
+ never!();
+ Wildcard
+ }
+ };
+ let variant = ctor.variant_id_for_adt(adt.0);
+ let fields_len = variant.variant_data(cx.db.upcast()).fields().len();
+ // For each field in the variant, we store the relevant index into `self.fields` if any.
+ let mut field_id_to_id: Vec<Option<usize>> = vec![None; fields_len];
+ let tys = Fields::list_variant_nonhidden_fields(cx, &pat.ty, variant)
+ .enumerate()
+ .map(|(i, (fid, ty))| {
+ let field_idx: u32 = fid.into_raw().into();
+ field_id_to_id[field_idx as usize] = Some(i);
+ ty
+ });
+ let mut wilds: SmallVec<[_; 2]> =
+ tys.map(DeconstructedPat::wildcard).collect();
+ for pat in subpatterns {
+ let field_idx: u32 = pat.field.into_raw().into();
+ if let Some(i) = field_id_to_id[field_idx as usize] {
+ wilds[i] = mkpat(&pat.pattern);
+ }
+ }
+ fields = Fields::from_iter(cx, wilds);
+ }
+ _ => {
+ never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
+ ctor = Wildcard;
+ fields = Fields::empty();
+ }
+ }
+ }
+ &PatKind::LiteralBool { value } => {
+ ctor = IntRange(IntRange::from_bool(value));
+ fields = Fields::empty();
+ }
+ PatKind::Or { .. } => {
+ ctor = Or;
+ let pats: SmallVec<[_; 2]> = expand_or_pat(pat).into_iter().map(mkpat).collect();
+ fields = Fields::from_iter(cx, pats)
+ }
+ }
+ DeconstructedPat::new(ctor, fields, pat.ty.clone())
+ }
+
+ pub(crate) fn to_pat(&self, cx: &MatchCheckCtx<'_, 'p>) -> Pat {
+ let mut subpatterns = self.iter_fields().map(|p| p.to_pat(cx));
+ let pat = match &self.ctor {
+ Single | Variant(_) => match self.ty.kind(Interner) {
+ TyKind::Tuple(..) => PatKind::Leaf {
+ subpatterns: subpatterns
+ .zip(0u32..)
+ .map(|(p, i)| FieldPat {
+ field: LocalFieldId::from_raw(i.into()),
+ pattern: p,
+ })
+ .collect(),
+ },
+ TyKind::Adt(adt, _) if is_box(adt.0, cx.db) => {
+ // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
+ // of `std`). So this branch is only reachable when the feature is enabled and
+ // the pattern is a box pattern.
+ PatKind::Deref { subpattern: subpatterns.next().unwrap() }
+ }
+ TyKind::Adt(adt, substs) => {
+ let variant = self.ctor.variant_id_for_adt(adt.0);
+ let subpatterns = Fields::list_variant_nonhidden_fields(cx, self.ty(), variant)
+ .zip(subpatterns)
+ .map(|((field, _ty), pattern)| FieldPat { field, pattern })
+ .collect();
+
+ if let VariantId::EnumVariantId(enum_variant) = variant {
+ PatKind::Variant { substs: substs.clone(), enum_variant, subpatterns }
+ } else {
+ PatKind::Leaf { subpatterns }
+ }
+ }
+ // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
+ // be careful to reconstruct the correct constant pattern here. However a string
+ // literal pattern will never be reported as a non-exhaustiveness witness, so we
+ // ignore this issue.
+ TyKind::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
+ _ => {
+ never!("unexpected ctor for type {:?} {:?}", self.ctor, self.ty);
+ PatKind::Wild
+ }
+ },
+ &Slice(slice) => match slice._unimplemented {},
+ &Str(void) => match void {},
+ &FloatRange(void) => match void {},
+ IntRange(range) => return range.to_pat(cx, self.ty.clone()),
+ Wildcard | NonExhaustive => PatKind::Wild,
+ Missing { .. } => {
+ never!(
+ "trying to convert a `Missing` constructor into a `Pat`; this is a bug, \
+ `Missing` should have been processed in `apply_constructors`"
+ );
+ PatKind::Wild
+ }
+ Opaque | Or => {
+ never!("can't convert to pattern: {:?}", self.ctor);
+ PatKind::Wild
+ }
+ };
+ Pat { ty: self.ty.clone(), kind: Box::new(pat) }
+ }
+
+ pub(super) fn is_or_pat(&self) -> bool {
+ matches!(self.ctor, Or)
+ }
+
+ pub(super) fn ctor(&self) -> &Constructor {
+ &self.ctor
+ }
+
+ pub(super) fn ty(&self) -> &Ty {
+ &self.ty
+ }
+
+ pub(super) fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'p DeconstructedPat<'p>> + 'a {
+ self.fields.iter_patterns()
+ }
+
+ /// Specialize this pattern with a constructor.
+ /// `other_ctor` can be different from `self.ctor`, but must be covered by it.
+ pub(super) fn specialize<'a>(
+ &'a self,
+ cx: &MatchCheckCtx<'_, 'p>,
+ other_ctor: &Constructor,
+ ) -> SmallVec<[&'p DeconstructedPat<'p>; 2]> {
+ match (&self.ctor, other_ctor) {
+ (Wildcard, _) => {
+ // We return a wildcard for each field of `other_ctor`.
+ Fields::wildcards(cx, &self.ty, other_ctor).iter_patterns().collect()
+ }
+ (Slice(self_slice), Slice(other_slice))
+ if self_slice.arity() != other_slice.arity() =>
+ {
+ match self_slice._unimplemented {}
+ }
+ _ => self.fields.iter_patterns().collect(),
+ }
+ }
+
+ /// We keep track for each pattern if it was ever reachable during the analysis. This is used
+ /// with `unreachable_spans` to report unreachable subpatterns arising from or patterns.
+ pub(super) fn set_reachable(&self) {
+ self.reachable.set(true)
+ }
+ pub(super) fn is_reachable(&self) -> bool {
+ self.reachable.get()
+ }
+}
+
+fn is_field_list_non_exhaustive(variant_id: VariantId, cx: &MatchCheckCtx<'_, '_>) -> bool {
+ let attr_def_id = match variant_id {
+ VariantId::EnumVariantId(id) => id.into(),
+ VariantId::StructId(id) => id.into(),
+ VariantId::UnionId(id) => id.into(),
+ };
+ cx.db.attrs(attr_def_id).by_key("non_exhaustive").exists()
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs
new file mode 100644
index 000000000..b89b4f2bf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs
@@ -0,0 +1,56 @@
+//! Pattern untilities.
+//!
+//! Originates from `rustc_hir::pat_util`
+
+use std::iter::{Enumerate, ExactSizeIterator};
+
+pub(crate) struct EnumerateAndAdjust<I> {
+ enumerate: Enumerate<I>,
+ gap_pos: usize,
+ gap_len: usize,
+}
+
+impl<I> Iterator for EnumerateAndAdjust<I>
+where
+ I: Iterator,
+{
+ type Item = (usize, <I as Iterator>::Item);
+
+ fn next(&mut self) -> Option<(usize, <I as Iterator>::Item)> {
+ self.enumerate
+ .next()
+ .map(|(i, elem)| (if i < self.gap_pos { i } else { i + self.gap_len }, elem))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.enumerate.size_hint()
+ }
+}
+
+pub(crate) trait EnumerateAndAdjustIterator {
+ fn enumerate_and_adjust(
+ self,
+ expected_len: usize,
+ gap_pos: Option<usize>,
+ ) -> EnumerateAndAdjust<Self>
+ where
+ Self: Sized;
+}
+
+impl<T: ExactSizeIterator> EnumerateAndAdjustIterator for T {
+ fn enumerate_and_adjust(
+ self,
+ expected_len: usize,
+ gap_pos: Option<usize>,
+ ) -> EnumerateAndAdjust<Self>
+ where
+ Self: Sized,
+ {
+ let actual_len = self.len();
+ EnumerateAndAdjust {
+ enumerate: self.enumerate(),
+ gap_pos: gap_pos.unwrap_or(expected_len),
+ gap_len: expected_len - actual_len,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/usefulness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/usefulness.rs
new file mode 100644
index 000000000..1221327b9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/usefulness.rs
@@ -0,0 +1,811 @@
+//! Based on rust-lang/rust (last sync f31622a50 2021-11-12)
+//! <https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs>
+//!
+//! -----
+//!
+//! This file includes the logic for exhaustiveness and reachability checking for pattern-matching.
+//! Specifically, given a list of patterns for a type, we can tell whether:
+//! (a) each pattern is reachable (reachability)
+//! (b) the patterns cover every possible value for the type (exhaustiveness)
+//!
+//! The algorithm implemented here is a modified version of the one described in [this
+//! paper](http://moscova.inria.fr/~maranget/papers/warn/index.html). We have however generalized
+//! it to accommodate the variety of patterns that Rust supports. We thus explain our version here,
+//! without being as rigorous.
+//!
+//!
+//! # Summary
+//!
+//! The core of the algorithm is the notion of "usefulness". A pattern `q` is said to be *useful*
+//! relative to another pattern `p` of the same type if there is a value that is matched by `q` and
+//! not matched by `p`. This generalizes to many `p`s: `q` is useful w.r.t. a list of patterns
+//! `p_1 .. p_n` if there is a value that is matched by `q` and by none of the `p_i`. We write
+//! `usefulness(p_1 .. p_n, q)` for a function that returns a list of such values. The aim of this
+//! file is to compute it efficiently.
+//!
+//! This is enough to compute reachability: a pattern in a `match` expression is reachable iff it
+//! is useful w.r.t. the patterns above it:
+//! ```rust
+//! match x {
+//! Some(_) => ...,
+//! None => ..., // reachable: `None` is matched by this but not the branch above
+//! Some(0) => ..., // unreachable: all the values this matches are already matched by
+//! // `Some(_)` above
+//! }
+//! ```
+//!
+//! This is also enough to compute exhaustiveness: a match is exhaustive iff the wildcard `_`
+//! pattern is _not_ useful w.r.t. the patterns in the match. The values returned by `usefulness`
+//! are used to tell the user which values are missing.
+//! ```rust
+//! match x {
+//! Some(0) => ...,
+//! None => ...,
+//! // not exhaustive: `_` is useful because it matches `Some(1)`
+//! }
+//! ```
+//!
+//! The entrypoint of this file is the [`compute_match_usefulness`] function, which computes
+//! reachability for each match branch and exhaustiveness for the whole match.
+//!
+//!
+//! # Constructors and fields
+//!
+//! Note: we will often abbreviate "constructor" as "ctor".
+//!
+//! The idea that powers everything that is done in this file is the following: a (matcheable)
+//! value is made from a constructor applied to a number of subvalues. Examples of constructors are
+//! `Some`, `None`, `(,)` (the 2-tuple constructor), `Foo {..}` (the constructor for a struct
+//! `Foo`), and `2` (the constructor for the number `2`). This is natural when we think of
+//! pattern-matching, and this is the basis for what follows.
+//!
+//! Some of the ctors listed above might feel weird: `None` and `2` don't take any arguments.
+//! That's ok: those are ctors that take a list of 0 arguments; they are the simplest case of
+//! ctors. We treat `2` as a ctor because `u64` and other number types behave exactly like a huge
+//! `enum`, with one variant for each number. This allows us to see any matcheable value as made up
+//! from a tree of ctors, each having a set number of children. For example: `Foo { bar: None,
+//! baz: Ok(0) }` is made from 4 different ctors, namely `Foo{..}`, `None`, `Ok` and `0`.
+//!
+//! This idea can be extended to patterns: they are also made from constructors applied to fields.
+//! A pattern for a given type is allowed to use all the ctors for values of that type (which we
+//! call "value constructors"), but there are also pattern-only ctors. The most important one is
+//! the wildcard (`_`), and the others are integer ranges (`0..=10`), variable-length slices (`[x,
+//! ..]`), and or-patterns (`Ok(0) | Err(_)`). Examples of valid patterns are `42`, `Some(_)`, `Foo
+//! { bar: Some(0) | None, baz: _ }`. Note that a binder in a pattern (e.g. `Some(x)`) matches the
+//! same values as a wildcard (e.g. `Some(_)`), so we treat both as wildcards.
+//!
+//! From this deconstruction we can compute whether a given value matches a given pattern; we
+//! simply look at ctors one at a time. Given a pattern `p` and a value `v`, we want to compute
+//! `matches!(v, p)`. It's mostly straightforward: we compare the head ctors and when they match
+//! we compare their fields recursively. A few representative examples:
+//!
+//! - `matches!(v, _) := true`
+//! - `matches!((v0, v1), (p0, p1)) := matches!(v0, p0) && matches!(v1, p1)`
+//! - `matches!(Foo { bar: v0, baz: v1 }, Foo { bar: p0, baz: p1 }) := matches!(v0, p0) && matches!(v1, p1)`
+//! - `matches!(Ok(v0), Ok(p0)) := matches!(v0, p0)`
+//! - `matches!(Ok(v0), Err(p0)) := false` (incompatible variants)
+//! - `matches!(v, 1..=100) := matches!(v, 1) || ... || matches!(v, 100)`
+//! - `matches!([v0], [p0, .., p1]) := false` (incompatible lengths)
+//! - `matches!([v0, v1, v2], [p0, .., p1]) := matches!(v0, p0) && matches!(v2, p1)`
+//! - `matches!(v, p0 | p1) := matches!(v, p0) || matches!(v, p1)`
+//!
+//! Constructors, fields and relevant operations are defined in the [`super::deconstruct_pat`] module.
+//!
+//! Note: this constructors/fields distinction may not straightforwardly apply to every Rust type.
+//! For example a value of type `Rc<u64>` can't be deconstructed that way, and `&str` has an
+//! infinitude of constructors. There are also subtleties with visibility of fields and
+//! uninhabitedness and various other things. The constructors idea can be extended to handle most
+//! of these subtleties though; caveats are documented where relevant throughout the code.
+//!
+//! Whether constructors cover each other is computed by [`Constructor::is_covered_by`].
+//!
+//!
+//! # Specialization
+//!
+//! Recall that we wish to compute `usefulness(p_1 .. p_n, q)`: given a list of patterns `p_1 ..
+//! p_n` and a pattern `q`, all of the same type, we want to find a list of values (called
+//! "witnesses") that are matched by `q` and by none of the `p_i`. We obviously don't just
+//! enumerate all possible values. From the discussion above we see that we can proceed
+//! ctor-by-ctor: for each value ctor of the given type, we ask "is there a value that starts with
+//! this constructor and matches `q` and none of the `p_i`?". As we saw above, there's a lot we can
+//! say from knowing only the first constructor of our candidate value.
+//!
+//! Let's take the following example:
+//! ```
+//! match x {
+//! Enum::Variant1(_) => {} // `p1`
+//! Enum::Variant2(None, 0) => {} // `p2`
+//! Enum::Variant2(Some(_), 0) => {} // `q`
+//! }
+//! ```
+//!
+//! We can easily see that if our candidate value `v` starts with `Variant1` it will not match `q`.
+//! If `v = Variant2(v0, v1)` however, whether or not it matches `p2` and `q` will depend on `v0`
+//! and `v1`. In fact, such a `v` will be a witness of usefulness of `q` exactly when the tuple
+//! `(v0, v1)` is a witness of usefulness of `q'` in the following reduced match:
+//!
+//! ```
+//! match x {
+//! (None, 0) => {} // `p2'`
+//! (Some(_), 0) => {} // `q'`
+//! }
+//! ```
+//!
+//! This motivates a new step in computing usefulness, that we call _specialization_.
+//! Specialization consist of filtering a list of patterns for those that match a constructor, and
+//! then looking into the constructor's fields. This enables usefulness to be computed recursively.
+//!
+//! Instead of acting on a single pattern in each row, we will consider a list of patterns for each
+//! row, and we call such a list a _pattern-stack_. The idea is that we will specialize the
+//! leftmost pattern, which amounts to popping the constructor and pushing its fields, which feels
+//! like a stack. We note a pattern-stack simply with `[p_1 ... p_n]`.
+//! Here's a sequence of specializations of a list of pattern-stacks, to illustrate what's
+//! happening:
+//! ```
+//! [Enum::Variant1(_)]
+//! [Enum::Variant2(None, 0)]
+//! [Enum::Variant2(Some(_), 0)]
+//! //==>> specialize with `Variant2`
+//! [None, 0]
+//! [Some(_), 0]
+//! //==>> specialize with `Some`
+//! [_, 0]
+//! //==>> specialize with `true` (say the type was `bool`)
+//! [0]
+//! //==>> specialize with `0`
+//! []
+//! ```
+//!
+//! The function `specialize(c, p)` takes a value constructor `c` and a pattern `p`, and returns 0
+//! or more pattern-stacks. If `c` does not match the head constructor of `p`, it returns nothing;
+//! otherwise if returns the fields of the constructor. This only returns more than one
+//! pattern-stack if `p` has a pattern-only constructor.
+//!
+//! - Specializing for the wrong constructor returns nothing
+//!
+//! `specialize(None, Some(p0)) := []`
+//!
+//! - Specializing for the correct constructor returns a single row with the fields
+//!
+//! `specialize(Variant1, Variant1(p0, p1, p2)) := [[p0, p1, p2]]`
+//!
+//! `specialize(Foo{..}, Foo { bar: p0, baz: p1 }) := [[p0, p1]]`
+//!
+//! - For or-patterns, we specialize each branch and concatenate the results
+//!
+//! `specialize(c, p0 | p1) := specialize(c, p0) ++ specialize(c, p1)`
+//!
+//! - We treat the other pattern constructors as if they were a large or-pattern of all the
+//! possibilities:
+//!
+//! `specialize(c, _) := specialize(c, Variant1(_) | Variant2(_, _) | ...)`
+//!
+//! `specialize(c, 1..=100) := specialize(c, 1 | ... | 100)`
+//!
+//! `specialize(c, [p0, .., p1]) := specialize(c, [p0, p1] | [p0, _, p1] | [p0, _, _, p1] | ...)`
+//!
+//! - If `c` is a pattern-only constructor, `specialize` is defined on a case-by-case basis. See
+//! the discussion about constructor splitting in [`super::deconstruct_pat`].
+//!
+//!
+//! We then extend this function to work with pattern-stacks as input, by acting on the first
+//! column and keeping the other columns untouched.
+//!
+//! Specialization for the whole matrix is done in [`Matrix::specialize_constructor`]. Note that
+//! or-patterns in the first column are expanded before being stored in the matrix. Specialization
+//! for a single patstack is done from a combination of [`Constructor::is_covered_by`] and
+//! [`PatStack::pop_head_constructor`]. The internals of how it's done mostly live in the
+//! [`Fields`] struct.
+//!
+//!
+//! # Computing usefulness
+//!
+//! We now have all we need to compute usefulness. The inputs to usefulness are a list of
+//! pattern-stacks `p_1 ... p_n` (one per row), and a new pattern_stack `q`. The paper and this
+//! file calls the list of patstacks a _matrix_. They must all have the same number of columns and
+//! the patterns in a given column must all have the same type. `usefulness` returns a (possibly
+//! empty) list of witnesses of usefulness. These witnesses will also be pattern-stacks.
+//!
+//! - base case: `n_columns == 0`.
+//! Since a pattern-stack functions like a tuple of patterns, an empty one functions like the
+//! unit type. Thus `q` is useful iff there are no rows above it, i.e. if `n == 0`.
+//!
+//! - inductive case: `n_columns > 0`.
+//! We need a way to list the constructors we want to try. We will be more clever in the next
+//! section but for now assume we list all value constructors for the type of the first column.
+//!
+//! - for each such ctor `c`:
+//!
+//! - for each `q'` returned by `specialize(c, q)`:
+//!
+//! - we compute `usefulness(specialize(c, p_1) ... specialize(c, p_n), q')`
+//!
+//! - for each witness found, we revert specialization by pushing the constructor `c` on top.
+//!
+//! - We return the concatenation of all the witnesses found, if any.
+//!
+//! Example:
+//! ```
+//! [Some(true)] // p_1
+//! [None] // p_2
+//! [Some(_)] // q
+//! //==>> try `None`: `specialize(None, q)` returns nothing
+//! //==>> try `Some`: `specialize(Some, q)` returns a single row
+//! [true] // p_1'
+//! [_] // q'
+//! //==>> try `true`: `specialize(true, q')` returns a single row
+//! [] // p_1''
+//! [] // q''
+//! //==>> base case; `n != 0` so `q''` is not useful.
+//! //==>> go back up a step
+//! [true] // p_1'
+//! [_] // q'
+//! //==>> try `false`: `specialize(false, q')` returns a single row
+//! [] // q''
+//! //==>> base case; `n == 0` so `q''` is useful. We return the single witness `[]`
+//! witnesses:
+//! []
+//! //==>> undo the specialization with `false`
+//! witnesses:
+//! [false]
+//! //==>> undo the specialization with `Some`
+//! witnesses:
+//! [Some(false)]
+//! //==>> we have tried all the constructors. The output is the single witness `[Some(false)]`.
+//! ```
+//!
+//! This computation is done in [`is_useful`]. In practice we don't care about the list of
+//! witnesses when computing reachability; we only need to know whether any exist. We do keep the
+//! witnesses when computing exhaustiveness to report them to the user.
+//!
+//!
+//! # Making usefulness tractable: constructor splitting
+//!
+//! We're missing one last detail: which constructors do we list? Naively listing all value
+//! constructors cannot work for types like `u64` or `&str`, so we need to be more clever. The
+//! first obvious insight is that we only want to list constructors that are covered by the head
+//! constructor of `q`. If it's a value constructor, we only try that one. If it's a pattern-only
+//! constructor, we use the final clever idea for this algorithm: _constructor splitting_, where we
+//! group together constructors that behave the same.
+//!
+//! The details are not necessary to understand this file, so we explain them in
+//! [`super::deconstruct_pat`]. Splitting is done by the [`Constructor::split`] function.
+
+use std::iter::once;
+
+use hir_def::{AdtId, DefWithBodyId, HasModule, ModuleId};
+use smallvec::{smallvec, SmallVec};
+use typed_arena::Arena;
+
+use crate::{db::HirDatabase, Ty, TyExt};
+
+use super::deconstruct_pat::{Constructor, DeconstructedPat, Fields, SplitWildcard};
+
+use self::{helper::Captures, ArmType::*, Usefulness::*};
+
+pub(crate) struct MatchCheckCtx<'a, 'p> {
+ pub(crate) module: ModuleId,
+ pub(crate) body: DefWithBodyId,
+ pub(crate) db: &'a dyn HirDatabase,
+ /// Lowered patterns from arms plus generated by the check.
+ pub(crate) pattern_arena: &'p Arena<DeconstructedPat<'p>>,
+}
+
+impl<'a, 'p> MatchCheckCtx<'a, 'p> {
+ pub(super) fn is_uninhabited(&self, _ty: &Ty) -> bool {
+ // FIXME(iDawer) implement exhaustive_patterns feature. More info in:
+ // Tracking issue for RFC 1872: exhaustive_patterns feature https://github.com/rust-lang/rust/issues/51085
+ false
+ }
+
+ /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
+ pub(super) fn is_foreign_non_exhaustive_enum(&self, ty: &Ty) -> bool {
+ match ty.as_adt() {
+ Some((adt @ AdtId::EnumId(_), _)) => {
+ let has_non_exhaustive_attr =
+ self.db.attrs(adt.into()).by_key("non_exhaustive").exists();
+ let is_local = adt.module(self.db.upcast()).krate() == self.module.krate();
+ has_non_exhaustive_attr && !is_local
+ }
+ _ => false,
+ }
+ }
+
+ // Rust feature described as "Allows exhaustive pattern matching on types that contain uninhabited types."
+ pub(super) fn feature_exhaustive_patterns(&self) -> bool {
+ // FIXME see MatchCheckCtx::is_uninhabited
+ false
+ }
+}
+
+#[derive(Copy, Clone)]
+pub(super) struct PatCtxt<'a, 'p> {
+ pub(super) cx: &'a MatchCheckCtx<'a, 'p>,
+ /// Type of the current column under investigation.
+ pub(super) ty: &'a Ty,
+ /// Whether the current pattern is the whole pattern as found in a match arm, or if it's a
+ /// subpattern.
+ pub(super) is_top_level: bool,
+ /// Whether the current pattern is from a `non_exhaustive` enum.
+ pub(super) is_non_exhaustive: bool,
+}
+
+/// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]`
+/// works well.
+#[derive(Clone)]
+pub(super) struct PatStack<'p> {
+ pats: SmallVec<[&'p DeconstructedPat<'p>; 2]>,
+}
+
+impl<'p> PatStack<'p> {
+ fn from_pattern(pat: &'p DeconstructedPat<'p>) -> Self {
+ Self::from_vec(smallvec![pat])
+ }
+
+ fn from_vec(vec: SmallVec<[&'p DeconstructedPat<'p>; 2]>) -> Self {
+ PatStack { pats: vec }
+ }
+
+ fn is_empty(&self) -> bool {
+ self.pats.is_empty()
+ }
+
+ fn len(&self) -> usize {
+ self.pats.len()
+ }
+
+ fn head(&self) -> &'p DeconstructedPat<'p> {
+ self.pats[0]
+ }
+
+ // Recursively expand the first pattern into its subpatterns. Only useful if the pattern is an
+ // or-pattern. Panics if `self` is empty.
+ fn expand_or_pat(&self) -> impl Iterator<Item = PatStack<'p>> + Captures<'_> {
+ self.head().iter_fields().map(move |pat| {
+ let mut new_patstack = PatStack::from_pattern(pat);
+ new_patstack.pats.extend_from_slice(&self.pats[1..]);
+ new_patstack
+ })
+ }
+
+ /// This computes `S(self.head().ctor(), self)`. See top of the file for explanations.
+ ///
+ /// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing
+ /// fields filled with wild patterns.
+ ///
+ /// This is roughly the inverse of `Constructor::apply`.
+ fn pop_head_constructor(&self, cx: &MatchCheckCtx<'_, 'p>, ctor: &Constructor) -> PatStack<'p> {
+ // We pop the head pattern and push the new fields extracted from the arguments of
+ // `self.head()`.
+ let mut new_fields: SmallVec<[_; 2]> = self.head().specialize(cx, ctor);
+ new_fields.extend_from_slice(&self.pats[1..]);
+ PatStack::from_vec(new_fields)
+ }
+}
+
+/// A 2D matrix.
+#[derive(Clone)]
+pub(super) struct Matrix<'p> {
+ patterns: Vec<PatStack<'p>>,
+}
+
+impl<'p> Matrix<'p> {
+ fn empty() -> Self {
+ Matrix { patterns: vec![] }
+ }
+
+ /// Number of columns of this matrix. `None` is the matrix is empty.
+ pub(super) fn _column_count(&self) -> Option<usize> {
+ self.patterns.get(0).map(|r| r.len())
+ }
+
+ /// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively
+ /// expands it.
+ fn push(&mut self, row: PatStack<'p>) {
+ if !row.is_empty() && row.head().is_or_pat() {
+ self.patterns.extend(row.expand_or_pat());
+ } else {
+ self.patterns.push(row);
+ }
+ }
+
+ /// Iterate over the first component of each row
+ fn heads(&self) -> impl Iterator<Item = &'p DeconstructedPat<'p>> + Clone + Captures<'_> {
+ self.patterns.iter().map(|r| r.head())
+ }
+
+ /// This computes `S(constructor, self)`. See top of the file for explanations.
+ fn specialize_constructor(&self, pcx: PatCtxt<'_, 'p>, ctor: &Constructor) -> Matrix<'p> {
+ let mut matrix = Matrix::empty();
+ for row in &self.patterns {
+ if ctor.is_covered_by(pcx, row.head().ctor()) {
+ let new_row = row.pop_head_constructor(pcx.cx, ctor);
+ matrix.push(new_row);
+ }
+ }
+ matrix
+ }
+}
+
+/// This carries the results of computing usefulness, as described at the top of the file. When
+/// checking usefulness of a match branch, we use the `NoWitnesses` variant, which also keeps track
+/// of potential unreachable sub-patterns (in the presence of or-patterns). When checking
+/// exhaustiveness of a whole match, we use the `WithWitnesses` variant, which carries a list of
+/// witnesses of non-exhaustiveness when there are any.
+/// Which variant to use is dictated by `ArmType`.
+enum Usefulness<'p> {
+ /// If we don't care about witnesses, simply remember if the pattern was useful.
+ NoWitnesses { useful: bool },
+ /// Carries a list of witnesses of non-exhaustiveness. If empty, indicates that the whole
+ /// pattern is unreachable.
+ WithWitnesses(Vec<Witness<'p>>),
+}
+
+impl<'p> Usefulness<'p> {
+ fn new_useful(preference: ArmType) -> Self {
+ match preference {
+ // A single (empty) witness of reachability.
+ FakeExtraWildcard => WithWitnesses(vec![Witness(vec![])]),
+ RealArm => NoWitnesses { useful: true },
+ }
+ }
+ fn new_not_useful(preference: ArmType) -> Self {
+ match preference {
+ FakeExtraWildcard => WithWitnesses(vec![]),
+ RealArm => NoWitnesses { useful: false },
+ }
+ }
+
+ fn is_useful(&self) -> bool {
+ match self {
+ Usefulness::NoWitnesses { useful } => *useful,
+ Usefulness::WithWitnesses(witnesses) => !witnesses.is_empty(),
+ }
+ }
+
+ /// Combine usefulnesses from two branches. This is an associative operation.
+ fn extend(&mut self, other: Self) {
+ match (&mut *self, other) {
+ (WithWitnesses(_), WithWitnesses(o)) if o.is_empty() => {}
+ (WithWitnesses(s), WithWitnesses(o)) if s.is_empty() => *self = WithWitnesses(o),
+ (WithWitnesses(s), WithWitnesses(o)) => s.extend(o),
+ (NoWitnesses { useful: s_useful }, NoWitnesses { useful: o_useful }) => {
+ *s_useful = *s_useful || o_useful
+ }
+ _ => unreachable!(),
+ }
+ }
+
+ /// After calculating usefulness after a specialization, call this to reconstruct a usefulness
+ /// that makes sense for the matrix pre-specialization. This new usefulness can then be merged
+ /// with the results of specializing with the other constructors.
+ fn apply_constructor(
+ self,
+ pcx: PatCtxt<'_, 'p>,
+ matrix: &Matrix<'p>,
+ ctor: &Constructor,
+ ) -> Self {
+ match self {
+ NoWitnesses { .. } => self,
+ WithWitnesses(ref witnesses) if witnesses.is_empty() => self,
+ WithWitnesses(witnesses) => {
+ let new_witnesses = if let Constructor::Missing { .. } = ctor {
+ // We got the special `Missing` constructor, so each of the missing constructors
+ // gives a new pattern that is not caught by the match. We list those patterns.
+ let new_patterns = if pcx.is_non_exhaustive {
+ // Here we don't want the user to try to list all variants, we want them to add
+ // a wildcard, so we only suggest that.
+ vec![DeconstructedPat::wildcard(pcx.ty.clone())]
+ } else {
+ let mut split_wildcard = SplitWildcard::new(pcx);
+ split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
+
+ // This lets us know if we skipped any variants because they are marked
+ // `doc(hidden)` or they are unstable feature gate (only stdlib types).
+ let mut hide_variant_show_wild = false;
+ // Construct for each missing constructor a "wild" version of this
+ // constructor, that matches everything that can be built with
+ // it. For example, if `ctor` is a `Constructor::Variant` for
+ // `Option::Some`, we get the pattern `Some(_)`.
+ let mut new: Vec<DeconstructedPat<'_>> = split_wildcard
+ .iter_missing(pcx)
+ .filter_map(|missing_ctor| {
+ // Check if this variant is marked `doc(hidden)`
+ if missing_ctor.is_doc_hidden_variant(pcx)
+ || missing_ctor.is_unstable_variant(pcx)
+ {
+ hide_variant_show_wild = true;
+ return None;
+ }
+ Some(DeconstructedPat::wild_from_ctor(pcx, missing_ctor.clone()))
+ })
+ .collect();
+
+ if hide_variant_show_wild {
+ new.push(DeconstructedPat::wildcard(pcx.ty.clone()))
+ }
+
+ new
+ };
+
+ witnesses
+ .into_iter()
+ .flat_map(|witness| {
+ new_patterns.iter().map(move |pat| {
+ Witness(
+ witness
+ .0
+ .iter()
+ .chain(once(pat))
+ .map(DeconstructedPat::clone_and_forget_reachability)
+ .collect(),
+ )
+ })
+ })
+ .collect()
+ } else {
+ witnesses
+ .into_iter()
+ .map(|witness| witness.apply_constructor(pcx, ctor))
+ .collect()
+ };
+ WithWitnesses(new_witnesses)
+ }
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug)]
+enum ArmType {
+ FakeExtraWildcard,
+ RealArm,
+}
+
+/// A witness of non-exhaustiveness for error reporting, represented
+/// as a list of patterns (in reverse order of construction) with
+/// wildcards inside to represent elements that can take any inhabitant
+/// of the type as a value.
+///
+/// A witness against a list of patterns should have the same types
+/// and length as the pattern matched against. Because Rust `match`
+/// is always against a single pattern, at the end the witness will
+/// have length 1, but in the middle of the algorithm, it can contain
+/// multiple patterns.
+///
+/// For example, if we are constructing a witness for the match against
+///
+/// ```
+/// struct Pair(Option<(u32, u32)>, bool);
+///
+/// match (p: Pair) {
+/// Pair(None, _) => {}
+/// Pair(_, false) => {}
+/// }
+/// ```
+///
+/// We'll perform the following steps:
+/// 1. Start with an empty witness
+/// `Witness(vec![])`
+/// 2. Push a witness `true` against the `false`
+/// `Witness(vec![true])`
+/// 3. Push a witness `Some(_)` against the `None`
+/// `Witness(vec![true, Some(_)])`
+/// 4. Apply the `Pair` constructor to the witnesses
+/// `Witness(vec![Pair(Some(_), true)])`
+///
+/// The final `Pair(Some(_), true)` is then the resulting witness.
+pub(crate) struct Witness<'p>(Vec<DeconstructedPat<'p>>);
+
+impl<'p> Witness<'p> {
+ /// Asserts that the witness contains a single pattern, and returns it.
+ fn single_pattern(self) -> DeconstructedPat<'p> {
+ assert_eq!(self.0.len(), 1);
+ self.0.into_iter().next().unwrap()
+ }
+
+ /// Constructs a partial witness for a pattern given a list of
+ /// patterns expanded by the specialization step.
+ ///
+ /// When a pattern P is discovered to be useful, this function is used bottom-up
+ /// to reconstruct a complete witness, e.g., a pattern P' that covers a subset
+ /// of values, V, where each value in that set is not covered by any previously
+ /// used patterns and is covered by the pattern P'. Examples:
+ ///
+ /// left_ty: tuple of 3 elements
+ /// pats: [10, 20, _] => (10, 20, _)
+ ///
+ /// left_ty: struct X { a: (bool, &'static str), b: usize}
+ /// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 }
+ fn apply_constructor(mut self, pcx: PatCtxt<'_, 'p>, ctor: &Constructor) -> Self {
+ let pat = {
+ let len = self.0.len();
+ let arity = ctor.arity(pcx);
+ let pats = self.0.drain((len - arity)..).rev();
+ let fields = Fields::from_iter(pcx.cx, pats);
+ DeconstructedPat::new(ctor.clone(), fields, pcx.ty.clone())
+ };
+
+ self.0.push(pat);
+
+ self
+ }
+}
+
+/// Algorithm from <http://moscova.inria.fr/~maranget/papers/warn/index.html>.
+/// The algorithm from the paper has been modified to correctly handle empty
+/// types. The changes are:
+/// (0) We don't exit early if the pattern matrix has zero rows. We just
+/// continue to recurse over columns.
+/// (1) all_constructors will only return constructors that are statically
+/// possible. E.g., it will only return `Ok` for `Result<T, !>`.
+///
+/// This finds whether a (row) vector `v` of patterns is 'useful' in relation
+/// to a set of such vectors `m` - this is defined as there being a set of
+/// inputs that will match `v` but not any of the sets in `m`.
+///
+/// All the patterns at each column of the `matrix ++ v` matrix must have the same type.
+///
+/// This is used both for reachability checking (if a pattern isn't useful in
+/// relation to preceding patterns, it is not reachable) and exhaustiveness
+/// checking (if a wildcard pattern is useful in relation to a matrix, the
+/// matrix isn't exhaustive).
+///
+/// `is_under_guard` is used to inform if the pattern has a guard. If it
+/// has one it must not be inserted into the matrix. This shouldn't be
+/// relied on for soundness.
+fn is_useful<'p>(
+ cx: &MatchCheckCtx<'_, 'p>,
+ matrix: &Matrix<'p>,
+ v: &PatStack<'p>,
+ witness_preference: ArmType,
+ is_under_guard: bool,
+ is_top_level: bool,
+) -> Usefulness<'p> {
+ let Matrix { patterns: rows, .. } = matrix;
+
+ // The base case. We are pattern-matching on () and the return value is
+ // based on whether our matrix has a row or not.
+ // NOTE: This could potentially be optimized by checking rows.is_empty()
+ // first and then, if v is non-empty, the return value is based on whether
+ // the type of the tuple we're checking is inhabited or not.
+ if v.is_empty() {
+ let ret = if rows.is_empty() {
+ Usefulness::new_useful(witness_preference)
+ } else {
+ Usefulness::new_not_useful(witness_preference)
+ };
+ return ret;
+ }
+
+ debug_assert!(rows.iter().all(|r| r.len() == v.len()));
+
+ let ty = v.head().ty();
+ let is_non_exhaustive = cx.is_foreign_non_exhaustive_enum(ty);
+ let pcx = PatCtxt { cx, ty, is_top_level, is_non_exhaustive };
+
+ // If the first pattern is an or-pattern, expand it.
+ let mut ret = Usefulness::new_not_useful(witness_preference);
+ if v.head().is_or_pat() {
+ // We try each or-pattern branch in turn.
+ let mut matrix = matrix.clone();
+ for v in v.expand_or_pat() {
+ let usefulness = is_useful(cx, &matrix, &v, witness_preference, is_under_guard, false);
+ ret.extend(usefulness);
+ // If pattern has a guard don't add it to the matrix.
+ if !is_under_guard {
+ // We push the already-seen patterns into the matrix in order to detect redundant
+ // branches like `Some(_) | Some(0)`.
+ matrix.push(v);
+ }
+ }
+ } else {
+ let v_ctor = v.head().ctor();
+
+ // FIXME: implement `overlapping_range_endpoints` lint
+
+ // We split the head constructor of `v`.
+ let split_ctors = v_ctor.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
+ // For each constructor, we compute whether there's a value that starts with it that would
+ // witness the usefulness of `v`.
+ let start_matrix = matrix;
+ for ctor in split_ctors {
+ // We cache the result of `Fields::wildcards` because it is used a lot.
+ let spec_matrix = start_matrix.specialize_constructor(pcx, &ctor);
+ let v = v.pop_head_constructor(cx, &ctor);
+ let usefulness =
+ is_useful(cx, &spec_matrix, &v, witness_preference, is_under_guard, false);
+ let usefulness = usefulness.apply_constructor(pcx, start_matrix, &ctor);
+
+ // FIXME: implement `non_exhaustive_omitted_patterns` lint
+
+ ret.extend(usefulness);
+ }
+ };
+
+ if ret.is_useful() {
+ v.head().set_reachable();
+ }
+
+ ret
+}
+
+/// The arm of a match expression.
+#[derive(Clone, Copy)]
+pub(crate) struct MatchArm<'p> {
+ pub(crate) pat: &'p DeconstructedPat<'p>,
+ pub(crate) has_guard: bool,
+}
+
+/// Indicates whether or not a given arm is reachable.
+#[derive(Clone, Debug)]
+pub(crate) enum Reachability {
+ /// The arm is reachable. This additionally carries a set of or-pattern branches that have been
+ /// found to be unreachable despite the overall arm being reachable. Used only in the presence
+ /// of or-patterns, otherwise it stays empty.
+ // FIXME: store ureachable subpattern IDs
+ Reachable,
+ /// The arm is unreachable.
+ Unreachable,
+}
+
+/// The output of checking a match for exhaustiveness and arm reachability.
+pub(crate) struct UsefulnessReport<'p> {
+ /// For each arm of the input, whether that arm is reachable after the arms above it.
+ pub(crate) _arm_usefulness: Vec<(MatchArm<'p>, Reachability)>,
+ /// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
+ /// exhaustiveness.
+ pub(crate) non_exhaustiveness_witnesses: Vec<DeconstructedPat<'p>>,
+}
+
+/// The entrypoint for the usefulness algorithm. Computes whether a match is exhaustive and which
+/// of its arms are reachable.
+///
+/// Note: the input patterns must have been lowered through
+/// `check_match::MatchVisitor::lower_pattern`.
+pub(crate) fn compute_match_usefulness<'p>(
+ cx: &MatchCheckCtx<'_, 'p>,
+ arms: &[MatchArm<'p>],
+ scrut_ty: &Ty,
+) -> UsefulnessReport<'p> {
+ let mut matrix = Matrix::empty();
+ let arm_usefulness = arms
+ .iter()
+ .copied()
+ .map(|arm| {
+ let v = PatStack::from_pattern(arm.pat);
+ is_useful(cx, &matrix, &v, RealArm, arm.has_guard, true);
+ if !arm.has_guard {
+ matrix.push(v);
+ }
+ let reachability = if arm.pat.is_reachable() {
+ Reachability::Reachable
+ } else {
+ Reachability::Unreachable
+ };
+ (arm, reachability)
+ })
+ .collect();
+
+ let wild_pattern = cx.pattern_arena.alloc(DeconstructedPat::wildcard(scrut_ty.clone()));
+ let v = PatStack::from_pattern(wild_pattern);
+ let usefulness = is_useful(cx, &matrix, &v, FakeExtraWildcard, false, true);
+ let non_exhaustiveness_witnesses = match usefulness {
+ WithWitnesses(pats) => pats.into_iter().map(Witness::single_pattern).collect(),
+ NoWitnesses { .. } => panic!("bug"),
+ };
+ UsefulnessReport { _arm_usefulness: arm_usefulness, non_exhaustiveness_witnesses }
+}
+
+pub(crate) mod helper {
+ // Copy-pasted from rust/compiler/rustc_data_structures/src/captures.rs
+ /// "Signaling" trait used in impl trait to tag lifetimes that you may
+ /// need to capture but don't really need for other reasons.
+ /// Basically a workaround; see [this comment] for details.
+ ///
+ /// [this comment]: https://github.com/rust-lang/rust/issues/34511#issuecomment-373423999
+ // FIXME(eddyb) false positive, the lifetime parameter is "phantom" but needed.
+ #[allow(unused_lifetimes)]
+ pub(crate) trait Captures<'a> {}
+
+ impl<'a, T: ?Sized> Captures<'a> for T {}
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
new file mode 100644
index 000000000..161b19a73
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -0,0 +1,104 @@
+//! Provides validations for unsafe code. Currently checks if unsafe functions are missing
+//! unsafe blocks.
+
+use hir_def::{
+ body::Body,
+ expr::{Expr, ExprId, UnaryOp},
+ resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
+ DefWithBodyId,
+};
+
+use crate::{
+ db::HirDatabase, utils::is_fn_unsafe_to_call, InferenceResult, Interner, TyExt, TyKind,
+};
+
+pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
+ let infer = db.infer(def);
+ let mut res = Vec::new();
+
+ let is_unsafe = match def {
+ DefWithBodyId::FunctionId(it) => db.function_data(it).has_unsafe_kw(),
+ DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) => false,
+ };
+ if is_unsafe {
+ return res;
+ }
+
+ let body = db.body(def);
+ unsafe_expressions(db, &infer, def, &body, body.body_expr, &mut |expr| {
+ if !expr.inside_unsafe_block {
+ res.push(expr.expr);
+ }
+ });
+
+ res
+}
+
+pub struct UnsafeExpr {
+ pub expr: ExprId,
+ pub inside_unsafe_block: bool,
+}
+
+// FIXME: Move this out, its not a diagnostic only thing anymore, and handle unsafe pattern accesses as well
+pub fn unsafe_expressions(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ def: DefWithBodyId,
+ body: &Body,
+ current: ExprId,
+ unsafe_expr_cb: &mut dyn FnMut(UnsafeExpr),
+) {
+ walk_unsafe(db, infer, def, body, current, false, unsafe_expr_cb)
+}
+
+fn walk_unsafe(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ def: DefWithBodyId,
+ body: &Body,
+ current: ExprId,
+ inside_unsafe_block: bool,
+ unsafe_expr_cb: &mut dyn FnMut(UnsafeExpr),
+) {
+ let expr = &body.exprs[current];
+ match expr {
+ &Expr::Call { callee, .. } => {
+ if let Some(func) = infer[callee].as_fn_def(db) {
+ if is_fn_unsafe_to_call(db, func) {
+ unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ }
+ Expr::Path(path) => {
+ let resolver = resolver_for_expr(db.upcast(), def, current);
+ let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path.mod_path());
+ if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial {
+ if db.static_data(id).mutable {
+ unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ }
+ Expr::MethodCall { .. } => {
+ if infer
+ .method_resolution(current)
+ .map(|(func, _)| is_fn_unsafe_to_call(db, func))
+ .unwrap_or(false)
+ {
+ unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
+ if let TyKind::Raw(..) = &infer[*expr].kind(Interner) {
+ unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ Expr::Unsafe { body: child } => {
+ return walk_unsafe(db, infer, def, body, *child, true, unsafe_expr_cb);
+ }
+ _ => {}
+ }
+
+ expr.walk_child_exprs(|child| {
+ walk_unsafe(db, infer, def, body, child, inside_unsafe_block, unsafe_expr_cb);
+ });
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
new file mode 100644
index 000000000..d2f9c2b8b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -0,0 +1,1315 @@
+//! The `HirDisplay` trait, which serves two purposes: Turning various bits from
+//! HIR back into source code, and just displaying them for debugging/testing
+//! purposes.
+
+use std::fmt::{self, Debug};
+
+use base_db::CrateId;
+use chalk_ir::BoundVar;
+use hir_def::{
+ body,
+ db::DefDatabase,
+ find_path,
+ generics::{TypeOrConstParamData, TypeParamProvenance},
+ intern::{Internable, Interned},
+ item_scope::ItemInNs,
+ path::{Path, PathKind},
+ type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef},
+ visibility::Visibility,
+ HasModule, ItemContainerId, Lookup, ModuleId, TraitId,
+};
+use hir_expand::{hygiene::Hygiene, name::Name};
+use itertools::Itertools;
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase,
+ from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, lt_from_placeholder_idx,
+ mapping::from_chalk,
+ primitive, subst_prefix, to_assoc_type_id,
+ utils::{self, generics},
+ AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstValue, DomainGoal,
+ GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives, Mutability,
+ OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar, Substitution, TraitRef,
+ TraitRefExt, Ty, TyExt, TyKind, WhereClause,
+};
+
+pub struct HirFormatter<'a> {
+ pub db: &'a dyn HirDatabase,
+ fmt: &'a mut dyn fmt::Write,
+ buf: String,
+ curr_size: usize,
+ pub(crate) max_size: Option<usize>,
+ omit_verbose_types: bool,
+ display_target: DisplayTarget,
+}
+
+pub trait HirDisplay {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError>;
+
+ /// Returns a `Display`able type that is human-readable.
+ fn into_displayable<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ max_size: Option<usize>,
+ omit_verbose_types: bool,
+ display_target: DisplayTarget,
+ ) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ assert!(
+ !matches!(display_target, DisplayTarget::SourceCode { .. }),
+ "HirDisplayWrapper cannot fail with DisplaySourceCodeError, use HirDisplay::hir_fmt directly instead"
+ );
+ HirDisplayWrapper { db, t: self, max_size, omit_verbose_types, display_target }
+ }
+
+ /// Returns a `Display`able type that is human-readable.
+ /// Use this for showing types to the user (e.g. diagnostics)
+ fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ HirDisplayWrapper {
+ db,
+ t: self,
+ max_size: None,
+ omit_verbose_types: false,
+ display_target: DisplayTarget::Diagnostics,
+ }
+ }
+
+ /// Returns a `Display`able type that is human-readable and tries to be succinct.
+ /// Use this for showing types to the user where space is constrained (e.g. doc popups)
+ fn display_truncated<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ max_size: Option<usize>,
+ ) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ HirDisplayWrapper {
+ db,
+ t: self,
+ max_size,
+ omit_verbose_types: true,
+ display_target: DisplayTarget::Diagnostics,
+ }
+ }
+
+ /// Returns a String representation of `self` that can be inserted into the given module.
+ /// Use this when generating code (e.g. assists)
+ fn display_source_code<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ module_id: ModuleId,
+ ) -> Result<String, DisplaySourceCodeError> {
+ let mut result = String::new();
+ match self.hir_fmt(&mut HirFormatter {
+ db,
+ fmt: &mut result,
+ buf: String::with_capacity(20),
+ curr_size: 0,
+ max_size: None,
+ omit_verbose_types: false,
+ display_target: DisplayTarget::SourceCode { module_id },
+ }) {
+ Ok(()) => {}
+ Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"),
+ Err(HirDisplayError::DisplaySourceCodeError(e)) => return Err(e),
+ };
+ Ok(result)
+ }
+
+ /// Returns a String representation of `self` for test purposes
+ fn display_test<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ HirDisplayWrapper {
+ db,
+ t: self,
+ max_size: None,
+ omit_verbose_types: false,
+ display_target: DisplayTarget::Test,
+ }
+ }
+}
+
+impl<'a> HirFormatter<'a> {
+ pub fn write_joined<T: HirDisplay>(
+ &mut self,
+ iter: impl IntoIterator<Item = T>,
+ sep: &str,
+ ) -> Result<(), HirDisplayError> {
+ let mut first = true;
+ for e in iter {
+ if !first {
+ write!(self, "{}", sep)?;
+ }
+ first = false;
+
+ // Abbreviate multiple omitted types with a single ellipsis.
+ if self.should_truncate() {
+ return write!(self, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ e.hir_fmt(self)?;
+ }
+ Ok(())
+ }
+
+ /// This allows using the `write!` macro directly with a `HirFormatter`.
+ pub fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> Result<(), HirDisplayError> {
+ // We write to a buffer first to track output size
+ self.buf.clear();
+ fmt::write(&mut self.buf, args)?;
+ self.curr_size += self.buf.len();
+
+ // Then we write to the internal formatter from the buffer
+ self.fmt.write_str(&self.buf).map_err(HirDisplayError::from)
+ }
+
+ pub fn write_str(&mut self, s: &str) -> Result<(), HirDisplayError> {
+ self.fmt.write_str(s)?;
+ Ok(())
+ }
+
+ pub fn write_char(&mut self, c: char) -> Result<(), HirDisplayError> {
+ self.fmt.write_char(c)?;
+ Ok(())
+ }
+
+ pub fn should_truncate(&self) -> bool {
+ match self.max_size {
+ Some(max_size) => self.curr_size >= max_size,
+ None => false,
+ }
+ }
+
+ pub fn omit_verbose_types(&self) -> bool {
+ self.omit_verbose_types
+ }
+}
+
+#[derive(Clone, Copy)]
+pub enum DisplayTarget {
+ /// Display types for inlays, doc popups, autocompletion, etc...
+ /// Showing `{unknown}` or not qualifying paths is fine here.
+ /// There's no reason for this to fail.
+ Diagnostics,
+ /// Display types for inserting them in source files.
+ /// The generated code should compile, so paths need to be qualified.
+ SourceCode { module_id: ModuleId },
+ /// Only for test purpose to keep real types
+ Test,
+}
+
+impl DisplayTarget {
+ fn is_source_code(&self) -> bool {
+ matches!(self, Self::SourceCode { .. })
+ }
+ fn is_test(&self) -> bool {
+ matches!(self, Self::Test)
+ }
+}
+
+#[derive(Debug)]
+pub enum DisplaySourceCodeError {
+ PathNotFound,
+ UnknownType,
+ Closure,
+}
+
+pub enum HirDisplayError {
+ /// Errors that can occur when generating source code
+ DisplaySourceCodeError(DisplaySourceCodeError),
+ /// `FmtError` is required to be compatible with std::fmt::Display
+ FmtError,
+}
+impl From<fmt::Error> for HirDisplayError {
+ fn from(_: fmt::Error) -> Self {
+ Self::FmtError
+ }
+}
+
+pub struct HirDisplayWrapper<'a, T> {
+ db: &'a dyn HirDatabase,
+ t: &'a T,
+ max_size: Option<usize>,
+ omit_verbose_types: bool,
+ display_target: DisplayTarget,
+}
+
+impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T>
+where
+ T: HirDisplay,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.t.hir_fmt(&mut HirFormatter {
+ db: self.db,
+ fmt: f,
+ buf: String::with_capacity(20),
+ curr_size: 0,
+ max_size: self.max_size,
+ omit_verbose_types: self.omit_verbose_types,
+ display_target: self.display_target,
+ }) {
+ Ok(()) => Ok(()),
+ Err(HirDisplayError::FmtError) => Err(fmt::Error),
+ Err(HirDisplayError::DisplaySourceCodeError(_)) => {
+ // This should never happen
+ panic!("HirDisplay::hir_fmt failed with DisplaySourceCodeError when calling Display::fmt!")
+ }
+ }
+ }
+}
+
+const TYPE_HINT_TRUNCATION: &str = "…";
+
+impl<T: HirDisplay> HirDisplay for &'_ T {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ HirDisplay::hir_fmt(*self, f)
+ }
+}
+
+impl<T: HirDisplay + Internable> HirDisplay for Interned<T> {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ HirDisplay::hir_fmt(self.as_ref(), f)
+ }
+}
+
+impl HirDisplay for ProjectionTy {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ let trait_ = f.db.trait_data(self.trait_(f.db));
+ write!(f, "<")?;
+ self.self_type_parameter(Interner).hir_fmt(f)?;
+ write!(f, " as {}", trait_.name)?;
+ if self.substitution.len(Interner) > 1 {
+ write!(f, "<")?;
+ f.write_joined(&self.substitution.as_slice(Interner)[1..], ", ")?;
+ write!(f, ">")?;
+ }
+ write!(f, ">::{}", f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id)).name)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for OpaqueTy {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ self.substitution.at(Interner, 0).hir_fmt(f)
+ }
+}
+
+impl HirDisplay for GenericArg {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self.interned() {
+ crate::GenericArgData::Ty(ty) => ty.hir_fmt(f),
+ crate::GenericArgData::Lifetime(lt) => lt.hir_fmt(f),
+ crate::GenericArgData::Const(c) => c.hir_fmt(f),
+ }
+ }
+}
+
+impl HirDisplay for Const {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let data = self.interned();
+ match data.value {
+ ConstValue::BoundVar(idx) => idx.hir_fmt(f),
+ ConstValue::InferenceVar(..) => write!(f, "#c#"),
+ ConstValue::Placeholder(idx) => {
+ let id = from_placeholder_idx(f.db, idx);
+ let generics = generics(f.db.upcast(), id.parent);
+ let param_data = &generics.params.type_or_consts[id.local_id];
+ write!(f, "{}", param_data.name().unwrap())
+ }
+ ConstValue::Concrete(c) => write!(f, "{}", c.interned),
+ }
+ }
+}
+
+impl HirDisplay for BoundVar {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "?{}.{}", self.debruijn.depth(), self.index)
+ }
+}
+
+impl HirDisplay for Ty {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ match self.kind(Interner) {
+ TyKind::Never => write!(f, "!")?,
+ TyKind::Str => write!(f, "str")?,
+ TyKind::Scalar(Scalar::Bool) => write!(f, "bool")?,
+ TyKind::Scalar(Scalar::Char) => write!(f, "char")?,
+ &TyKind::Scalar(Scalar::Float(t)) => write!(f, "{}", primitive::float_ty_to_string(t))?,
+ &TyKind::Scalar(Scalar::Int(t)) => write!(f, "{}", primitive::int_ty_to_string(t))?,
+ &TyKind::Scalar(Scalar::Uint(t)) => write!(f, "{}", primitive::uint_ty_to_string(t))?,
+ TyKind::Slice(t) => {
+ write!(f, "[")?;
+ t.hir_fmt(f)?;
+ write!(f, "]")?;
+ }
+ TyKind::Array(t, c) => {
+ write!(f, "[")?;
+ t.hir_fmt(f)?;
+ write!(f, "; ")?;
+ c.hir_fmt(f)?;
+ write!(f, "]")?;
+ }
+ TyKind::Raw(m, t) | TyKind::Ref(m, _, t) => {
+ if matches!(self.kind(Interner), TyKind::Raw(..)) {
+ write!(
+ f,
+ "*{}",
+ match m {
+ Mutability::Not => "const ",
+ Mutability::Mut => "mut ",
+ }
+ )?;
+ } else {
+ write!(
+ f,
+ "&{}",
+ match m {
+ Mutability::Not => "",
+ Mutability::Mut => "mut ",
+ }
+ )?;
+ }
+
+ // FIXME: all this just to decide whether to use parentheses...
+ let contains_impl_fn = |bounds: &[QuantifiedWhereClause]| {
+ bounds.iter().any(|bound| {
+ if let WhereClause::Implemented(trait_ref) = bound.skip_binders() {
+ let trait_ = trait_ref.hir_trait_id();
+ fn_traits(f.db.upcast(), trait_).any(|it| it == trait_)
+ } else {
+ false
+ }
+ })
+ };
+ let (preds_to_print, has_impl_fn_pred) = match t.kind(Interner) {
+ TyKind::Dyn(dyn_ty) if dyn_ty.bounds.skip_binders().interned().len() > 1 => {
+ let bounds = dyn_ty.bounds.skip_binders().interned();
+ (bounds.len(), contains_impl_fn(bounds))
+ }
+ TyKind::Alias(AliasTy::Opaque(OpaqueTy {
+ opaque_ty_id,
+ substitution: parameters,
+ }))
+ | TyKind::OpaqueType(opaque_ty_id, parameters) => {
+ let impl_trait_id =
+ f.db.lookup_intern_impl_trait_id((*opaque_ty_id).into());
+ if let ImplTraitId::ReturnTypeImplTrait(func, idx) = impl_trait_id {
+ let datas =
+ f.db.return_type_impl_traits(func)
+ .expect("impl trait id without data");
+ let data = (*datas)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ let bounds = data.substitute(Interner, parameters);
+ let mut len = bounds.skip_binders().len();
+
+ // Don't count Sized but count when it absent
+ // (i.e. when explicit ?Sized bound is set).
+ let default_sized = SizedByDefault::Sized {
+ anchor: func.lookup(f.db.upcast()).module(f.db.upcast()).krate(),
+ };
+ let sized_bounds = bounds
+ .skip_binders()
+ .iter()
+ .filter(|b| {
+ matches!(
+ b.skip_binders(),
+ WhereClause::Implemented(trait_ref)
+ if default_sized.is_sized_trait(
+ trait_ref.hir_trait_id(),
+ f.db.upcast(),
+ ),
+ )
+ })
+ .count();
+ match sized_bounds {
+ 0 => len += 1,
+ _ => {
+ len = len.saturating_sub(sized_bounds);
+ }
+ }
+
+ (len, contains_impl_fn(bounds.skip_binders()))
+ } else {
+ (0, false)
+ }
+ }
+ _ => (0, false),
+ };
+
+ if has_impl_fn_pred && preds_to_print <= 2 {
+ return t.hir_fmt(f);
+ }
+
+ if preds_to_print > 1 {
+ write!(f, "(")?;
+ t.hir_fmt(f)?;
+ write!(f, ")")?;
+ } else {
+ t.hir_fmt(f)?;
+ }
+ }
+ TyKind::Tuple(_, substs) => {
+ if substs.len(Interner) == 1 {
+ write!(f, "(")?;
+ substs.at(Interner, 0).hir_fmt(f)?;
+ write!(f, ",)")?;
+ } else {
+ write!(f, "(")?;
+ f.write_joined(&*substs.as_slice(Interner), ", ")?;
+ write!(f, ")")?;
+ }
+ }
+ TyKind::Function(fn_ptr) => {
+ let sig = CallableSig::from_fn_ptr(fn_ptr);
+ sig.hir_fmt(f)?;
+ }
+ TyKind::FnDef(def, parameters) => {
+ let def = from_chalk(f.db, *def);
+ let sig = f.db.callable_item_signature(def).substitute(Interner, parameters);
+ match def {
+ CallableDefId::FunctionId(ff) => {
+ write!(f, "fn {}", f.db.function_data(ff).name)?
+ }
+ CallableDefId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?,
+ CallableDefId::EnumVariantId(e) => {
+ write!(f, "{}", f.db.enum_data(e.parent).variants[e.local_id].name)?
+ }
+ };
+ if parameters.len(Interner) > 0 {
+ let generics = generics(f.db.upcast(), def.into());
+ let (parent_params, self_param, type_params, const_params, _impl_trait_params) =
+ generics.provenance_split();
+ let total_len = parent_params + self_param + type_params + const_params;
+ // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
+ if total_len > 0 {
+ write!(f, "<")?;
+ f.write_joined(&parameters.as_slice(Interner)[..total_len], ", ")?;
+ write!(f, ">")?;
+ }
+ }
+ write!(f, "(")?;
+ f.write_joined(sig.params(), ", ")?;
+ write!(f, ")")?;
+ let ret = sig.ret();
+ if !ret.is_unit() {
+ write!(f, " -> ")?;
+ ret.hir_fmt(f)?;
+ }
+ }
+ TyKind::Adt(AdtId(def_id), parameters) => {
+ match f.display_target {
+ DisplayTarget::Diagnostics | DisplayTarget::Test => {
+ let name = match *def_id {
+ hir_def::AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
+ hir_def::AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
+ hir_def::AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
+ };
+ write!(f, "{}", name)?;
+ }
+ DisplayTarget::SourceCode { module_id } => {
+ if let Some(path) = find_path::find_path(
+ f.db.upcast(),
+ ItemInNs::Types((*def_id).into()),
+ module_id,
+ ) {
+ write!(f, "{}", path)?;
+ } else {
+ return Err(HirDisplayError::DisplaySourceCodeError(
+ DisplaySourceCodeError::PathNotFound,
+ ));
+ }
+ }
+ }
+
+ if parameters.len(Interner) > 0 {
+ let parameters_to_write = if f.display_target.is_source_code()
+ || f.omit_verbose_types()
+ {
+ match self
+ .as_generic_def(f.db)
+ .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
+ .filter(|defaults| !defaults.is_empty())
+ {
+ None => parameters.as_slice(Interner),
+ Some(default_parameters) => {
+ fn should_show(
+ parameter: &GenericArg,
+ default_parameters: &[Binders<GenericArg>],
+ i: usize,
+ parameters: &Substitution,
+ ) -> bool {
+ if parameter.ty(Interner).map(|x| x.kind(Interner))
+ == Some(&TyKind::Error)
+ {
+ return true;
+ }
+ if let Some(ConstValue::Concrete(c)) =
+ parameter.constant(Interner).map(|x| x.data(Interner).value)
+ {
+ if c.interned == ConstScalar::Unknown {
+ return true;
+ }
+ }
+ let default_parameter = match default_parameters.get(i) {
+ Some(x) => x,
+ None => return true,
+ };
+ let actual_default = default_parameter
+ .clone()
+ .substitute(Interner, &subst_prefix(parameters, i));
+ parameter != &actual_default
+ }
+ let mut default_from = 0;
+ for (i, parameter) in parameters.iter(Interner).enumerate() {
+ if should_show(parameter, &default_parameters, i, parameters) {
+ default_from = i + 1;
+ }
+ }
+ &parameters.as_slice(Interner)[0..default_from]
+ }
+ }
+ } else {
+ parameters.as_slice(Interner)
+ };
+ if !parameters_to_write.is_empty() {
+ write!(f, "<")?;
+
+ if f.display_target.is_source_code() {
+ let mut first = true;
+ for generic_arg in parameters_to_write {
+ if !first {
+ write!(f, ", ")?;
+ }
+ first = false;
+
+ if generic_arg.ty(Interner).map(|ty| ty.kind(Interner))
+ == Some(&TyKind::Error)
+ {
+ write!(f, "_")?;
+ } else {
+ generic_arg.hir_fmt(f)?;
+ }
+ }
+ } else {
+ f.write_joined(parameters_to_write, ", ")?;
+ }
+
+ write!(f, ">")?;
+ }
+ }
+ }
+ TyKind::AssociatedType(assoc_type_id, parameters) => {
+ let type_alias = from_assoc_type_id(*assoc_type_id);
+ let trait_ = match type_alias.lookup(f.db.upcast()).container {
+ ItemContainerId::TraitId(it) => it,
+ _ => panic!("not an associated type"),
+ };
+ let trait_ = f.db.trait_data(trait_);
+ let type_alias_data = f.db.type_alias_data(type_alias);
+
+ // Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
+ if f.display_target.is_test() {
+ write!(f, "{}::{}", trait_.name, type_alias_data.name)?;
+ if parameters.len(Interner) > 0 {
+ write!(f, "<")?;
+ f.write_joined(&*parameters.as_slice(Interner), ", ")?;
+ write!(f, ">")?;
+ }
+ } else {
+ let projection_ty = ProjectionTy {
+ associated_ty_id: to_assoc_type_id(type_alias),
+ substitution: parameters.clone(),
+ };
+
+ projection_ty.hir_fmt(f)?;
+ }
+ }
+ TyKind::Foreign(type_alias) => {
+ let type_alias = f.db.type_alias_data(from_foreign_def_id(*type_alias));
+ write!(f, "{}", type_alias.name)?;
+ }
+ TyKind::OpaqueType(opaque_ty_id, parameters) => {
+ let impl_trait_id = f.db.lookup_intern_impl_trait_id((*opaque_ty_id).into());
+ match impl_trait_id {
+ ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ let datas =
+ f.db.return_type_impl_traits(func).expect("impl trait id without data");
+ let data = (*datas)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ let bounds = data.substitute(Interner, &parameters);
+ let krate = func.lookup(f.db.upcast()).module(f.db.upcast()).krate();
+ write_bounds_like_dyn_trait_with_prefix(
+ "impl",
+ bounds.skip_binders(),
+ SizedByDefault::Sized { anchor: krate },
+ f,
+ )?;
+ // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution
+ }
+ ImplTraitId::AsyncBlockTypeImplTrait(..) => {
+ write!(f, "impl Future<Output = ")?;
+ parameters.at(Interner, 0).hir_fmt(f)?;
+ write!(f, ">")?;
+ }
+ }
+ }
+ TyKind::Closure(.., substs) => {
+ if f.display_target.is_source_code() {
+ return Err(HirDisplayError::DisplaySourceCodeError(
+ DisplaySourceCodeError::Closure,
+ ));
+ }
+ let sig = substs.at(Interner, 0).assert_ty_ref(Interner).callable_sig(f.db);
+ if let Some(sig) = sig {
+ if sig.params().is_empty() {
+ write!(f, "||")?;
+ } else if f.should_truncate() {
+ write!(f, "|{}|", TYPE_HINT_TRUNCATION)?;
+ } else {
+ write!(f, "|")?;
+ f.write_joined(sig.params(), ", ")?;
+ write!(f, "|")?;
+ };
+
+ write!(f, " -> ")?;
+ sig.ret().hir_fmt(f)?;
+ } else {
+ write!(f, "{{closure}}")?;
+ }
+ }
+ TyKind::Placeholder(idx) => {
+ let id = from_placeholder_idx(f.db, *idx);
+ let generics = generics(f.db.upcast(), id.parent);
+ let param_data = &generics.params.type_or_consts[id.local_id];
+ match param_data {
+ TypeOrConstParamData::TypeParamData(p) => match p.provenance {
+ TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
+ write!(f, "{}", p.name.clone().unwrap_or_else(Name::missing))?
+ }
+ TypeParamProvenance::ArgumentImplTrait => {
+ let substs = generics.placeholder_subst(f.db);
+ let bounds =
+ f.db.generic_predicates(id.parent)
+ .iter()
+ .map(|pred| pred.clone().substitute(Interner, &substs))
+ .filter(|wc| match &wc.skip_binders() {
+ WhereClause::Implemented(tr) => {
+ &tr.self_type_parameter(Interner) == self
+ }
+ WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(proj),
+ ty: _,
+ }) => &proj.self_type_parameter(Interner) == self,
+ _ => false,
+ })
+ .collect::<Vec<_>>();
+ let krate = id.parent.module(f.db.upcast()).krate();
+ write_bounds_like_dyn_trait_with_prefix(
+ "impl",
+ &bounds,
+ SizedByDefault::Sized { anchor: krate },
+ f,
+ )?;
+ }
+ },
+ TypeOrConstParamData::ConstParamData(p) => {
+ write!(f, "{}", p.name)?;
+ }
+ }
+ }
+ TyKind::BoundVar(idx) => idx.hir_fmt(f)?,
+ TyKind::Dyn(dyn_ty) => {
+ write_bounds_like_dyn_trait_with_prefix(
+ "dyn",
+ dyn_ty.bounds.skip_binders().interned(),
+ SizedByDefault::NotSized,
+ f,
+ )?;
+ }
+ TyKind::Alias(AliasTy::Projection(p_ty)) => p_ty.hir_fmt(f)?,
+ TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
+ let impl_trait_id = f.db.lookup_intern_impl_trait_id(opaque_ty.opaque_ty_id.into());
+ match impl_trait_id {
+ ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ let datas =
+ f.db.return_type_impl_traits(func).expect("impl trait id without data");
+ let data = (*datas)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ let bounds = data.substitute(Interner, &opaque_ty.substitution);
+ let krate = func.lookup(f.db.upcast()).module(f.db.upcast()).krate();
+ write_bounds_like_dyn_trait_with_prefix(
+ "impl",
+ bounds.skip_binders(),
+ SizedByDefault::Sized { anchor: krate },
+ f,
+ )?;
+ }
+ ImplTraitId::AsyncBlockTypeImplTrait(..) => {
+ write!(f, "{{async block}}")?;
+ }
+ };
+ }
+ TyKind::Error => {
+ if f.display_target.is_source_code() {
+ return Err(HirDisplayError::DisplaySourceCodeError(
+ DisplaySourceCodeError::UnknownType,
+ ));
+ }
+ write!(f, "{{unknown}}")?;
+ }
+ TyKind::InferenceVar(..) => write!(f, "_")?,
+ TyKind::Generator(..) => write!(f, "{{generator}}")?,
+ TyKind::GeneratorWitness(..) => write!(f, "{{generator witness}}")?,
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for CallableSig {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "fn(")?;
+ f.write_joined(self.params(), ", ")?;
+ if self.is_varargs {
+ if self.params().is_empty() {
+ write!(f, "...")?;
+ } else {
+ write!(f, ", ...")?;
+ }
+ }
+ write!(f, ")")?;
+ let ret = self.ret();
+ if !ret.is_unit() {
+ write!(f, " -> ")?;
+ ret.hir_fmt(f)?;
+ }
+ Ok(())
+ }
+}
+
+fn fn_traits(db: &dyn DefDatabase, trait_: TraitId) -> impl Iterator<Item = TraitId> {
+ let krate = trait_.lookup(db).container.krate();
+ utils::fn_traits(db, krate)
+}
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum SizedByDefault {
+ NotSized,
+ Sized { anchor: CrateId },
+}
+
+impl SizedByDefault {
+ fn is_sized_trait(self, trait_: TraitId, db: &dyn DefDatabase) -> bool {
+ match self {
+ Self::NotSized => false,
+ Self::Sized { anchor } => {
+ let sized_trait = db
+ .lang_item(anchor, SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait());
+ Some(trait_) == sized_trait
+ }
+ }
+ }
+}
+
+pub fn write_bounds_like_dyn_trait_with_prefix(
+ prefix: &str,
+ predicates: &[QuantifiedWhereClause],
+ default_sized: SizedByDefault,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ write!(f, "{}", prefix)?;
+ if !predicates.is_empty()
+ || predicates.is_empty() && matches!(default_sized, SizedByDefault::Sized { .. })
+ {
+ write!(f, " ")?;
+ write_bounds_like_dyn_trait(predicates, default_sized, f)
+ } else {
+ Ok(())
+ }
+}
+
+fn write_bounds_like_dyn_trait(
+ predicates: &[QuantifiedWhereClause],
+ default_sized: SizedByDefault,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ // Note: This code is written to produce nice results (i.e.
+ // corresponding to surface Rust) for types that can occur in
+ // actual Rust. It will have weird results if the predicates
+ // aren't as expected (i.e. self types = $0, projection
+ // predicates for a certain trait come after the Implemented
+ // predicate for that trait).
+ let mut first = true;
+ let mut angle_open = false;
+ let mut is_fn_trait = false;
+ let mut is_sized = false;
+ for p in predicates.iter() {
+ match p.skip_binders() {
+ WhereClause::Implemented(trait_ref) => {
+ let trait_ = trait_ref.hir_trait_id();
+ if default_sized.is_sized_trait(trait_, f.db.upcast()) {
+ is_sized = true;
+ if matches!(default_sized, SizedByDefault::Sized { .. }) {
+ // Don't print +Sized, but rather +?Sized if absent.
+ continue;
+ }
+ }
+ if !is_fn_trait {
+ is_fn_trait = fn_traits(f.db.upcast(), trait_).any(|it| it == trait_);
+ }
+ if !is_fn_trait && angle_open {
+ write!(f, ">")?;
+ angle_open = false;
+ }
+ if !first {
+ write!(f, " + ")?;
+ }
+ // We assume that the self type is ^0.0 (i.e. the
+ // existential) here, which is the only thing that's
+ // possible in actual Rust, and hence don't print it
+ write!(f, "{}", f.db.trait_data(trait_).name)?;
+ if let [_, params @ ..] = &*trait_ref.substitution.as_slice(Interner) {
+ if is_fn_trait {
+ if let Some(args) =
+ params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple())
+ {
+ write!(f, "(")?;
+ f.write_joined(args.as_slice(Interner), ", ")?;
+ write!(f, ")")?;
+ }
+ } else if !params.is_empty() {
+ write!(f, "<")?;
+ f.write_joined(params, ", ")?;
+ // there might be assoc type bindings, so we leave the angle brackets open
+ angle_open = true;
+ }
+ }
+ }
+ WhereClause::AliasEq(alias_eq) if is_fn_trait => {
+ is_fn_trait = false;
+ if !alias_eq.ty.is_unit() {
+ write!(f, " -> ")?;
+ alias_eq.ty.hir_fmt(f)?;
+ }
+ }
+ WhereClause::AliasEq(AliasEq { ty, alias }) => {
+ // in types in actual Rust, these will always come
+ // after the corresponding Implemented predicate
+ if angle_open {
+ write!(f, ", ")?;
+ } else {
+ write!(f, "<")?;
+ angle_open = true;
+ }
+ if let AliasTy::Projection(proj) = alias {
+ let type_alias =
+ f.db.type_alias_data(from_assoc_type_id(proj.associated_ty_id));
+ write!(f, "{} = ", type_alias.name)?;
+ }
+ ty.hir_fmt(f)?;
+ }
+
+ // FIXME implement these
+ WhereClause::LifetimeOutlives(_) => {}
+ WhereClause::TypeOutlives(_) => {}
+ }
+ first = false;
+ }
+ if angle_open {
+ write!(f, ">")?;
+ }
+ if matches!(default_sized, SizedByDefault::Sized { .. }) {
+ if !is_sized {
+ write!(f, "{}?Sized", if first { "" } else { " + " })?;
+ } else if first {
+ write!(f, "Sized")?;
+ }
+ }
+ Ok(())
+}
+
+fn fmt_trait_ref(
+ tr: &TraitRef,
+ f: &mut HirFormatter<'_>,
+ use_as: bool,
+) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ tr.self_type_parameter(Interner).hir_fmt(f)?;
+ if use_as {
+ write!(f, " as ")?;
+ } else {
+ write!(f, ": ")?;
+ }
+ write!(f, "{}", f.db.trait_data(tr.hir_trait_id()).name)?;
+ if tr.substitution.len(Interner) > 1 {
+ write!(f, "<")?;
+ f.write_joined(&tr.substitution.as_slice(Interner)[1..], ", ")?;
+ write!(f, ">")?;
+ }
+ Ok(())
+}
+
+impl HirDisplay for TraitRef {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ fmt_trait_ref(self, f, false)
+ }
+}
+
+impl HirDisplay for WhereClause {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ match self {
+ WhereClause::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
+ WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
+ write!(f, "<")?;
+ fmt_trait_ref(&projection_ty.trait_ref(f.db), f, true)?;
+ write!(
+ f,
+ ">::{} = ",
+ f.db.type_alias_data(from_assoc_type_id(projection_ty.associated_ty_id)).name,
+ )?;
+ ty.hir_fmt(f)?;
+ }
+ WhereClause::AliasEq(_) => write!(f, "{{error}}")?,
+
+ // FIXME implement these
+ WhereClause::TypeOutlives(..) => {}
+ WhereClause::LifetimeOutlives(..) => {}
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for LifetimeOutlives {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ self.a.hir_fmt(f)?;
+ write!(f, ": ")?;
+ self.b.hir_fmt(f)
+ }
+}
+
+impl HirDisplay for Lifetime {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ self.interned().hir_fmt(f)
+ }
+}
+
+impl HirDisplay for LifetimeData {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ LifetimeData::BoundVar(idx) => idx.hir_fmt(f),
+ LifetimeData::InferenceVar(_) => write!(f, "_"),
+ LifetimeData::Placeholder(idx) => {
+ let id = lt_from_placeholder_idx(f.db, *idx);
+ let generics = generics(f.db.upcast(), id.parent);
+ let param_data = &generics.params.lifetimes[id.local_id];
+ write!(f, "{}", param_data.name)
+ }
+ LifetimeData::Static => write!(f, "'static"),
+ LifetimeData::Empty(_) => Ok(()),
+ LifetimeData::Erased => Ok(()),
+ LifetimeData::Phantom(_, _) => Ok(()),
+ }
+ }
+}
+
+impl HirDisplay for DomainGoal {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ DomainGoal::Holds(wc) => {
+ write!(f, "Holds(")?;
+ wc.hir_fmt(f)?;
+ write!(f, ")")?;
+ }
+ _ => write!(f, "?")?,
+ }
+ Ok(())
+ }
+}
+
+pub fn write_visibility(
+ module_id: ModuleId,
+ vis: Visibility,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ match vis {
+ Visibility::Public => write!(f, "pub "),
+ Visibility::Module(vis_id) => {
+ let def_map = module_id.def_map(f.db.upcast());
+ let root_module_id = def_map.module_id(def_map.root());
+ if vis_id == module_id {
+ // pub(self) or omitted
+ Ok(())
+ } else if root_module_id == vis_id {
+ write!(f, "pub(crate) ")
+ } else if module_id.containing_module(f.db.upcast()) == Some(vis_id) {
+ write!(f, "pub(super) ")
+ } else {
+ write!(f, "pub(in ...) ")
+ }
+ }
+ }
+}
+
+impl HirDisplay for TypeRef {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ TypeRef::Never => write!(f, "!")?,
+ TypeRef::Placeholder => write!(f, "_")?,
+ TypeRef::Tuple(elems) => {
+ write!(f, "(")?;
+ f.write_joined(elems, ", ")?;
+ if elems.len() == 1 {
+ write!(f, ",")?;
+ }
+ write!(f, ")")?;
+ }
+ TypeRef::Path(path) => path.hir_fmt(f)?,
+ TypeRef::RawPtr(inner, mutability) => {
+ let mutability = match mutability {
+ hir_def::type_ref::Mutability::Shared => "*const ",
+ hir_def::type_ref::Mutability::Mut => "*mut ",
+ };
+ write!(f, "{}", mutability)?;
+ inner.hir_fmt(f)?;
+ }
+ TypeRef::Reference(inner, lifetime, mutability) => {
+ let mutability = match mutability {
+ hir_def::type_ref::Mutability::Shared => "",
+ hir_def::type_ref::Mutability::Mut => "mut ",
+ };
+ write!(f, "&")?;
+ if let Some(lifetime) = lifetime {
+ write!(f, "{} ", lifetime.name)?;
+ }
+ write!(f, "{}", mutability)?;
+ inner.hir_fmt(f)?;
+ }
+ TypeRef::Array(inner, len) => {
+ write!(f, "[")?;
+ inner.hir_fmt(f)?;
+ write!(f, "; {}]", len)?;
+ }
+ TypeRef::Slice(inner) => {
+ write!(f, "[")?;
+ inner.hir_fmt(f)?;
+ write!(f, "]")?;
+ }
+ TypeRef::Fn(parameters, is_varargs) => {
+ // FIXME: Function pointer qualifiers.
+ write!(f, "fn(")?;
+ if let Some(((_, return_type), function_parameters)) = parameters.split_last() {
+ for index in 0..function_parameters.len() {
+ let (param_name, param_type) = &function_parameters[index];
+ if let Some(name) = param_name {
+ write!(f, "{}: ", name)?;
+ }
+
+ param_type.hir_fmt(f)?;
+
+ if index != function_parameters.len() - 1 {
+ write!(f, ", ")?;
+ }
+ }
+ if *is_varargs {
+ write!(f, "{}...", if parameters.len() == 1 { "" } else { ", " })?;
+ }
+ write!(f, ")")?;
+ match &return_type {
+ TypeRef::Tuple(tup) if tup.is_empty() => {}
+ _ => {
+ write!(f, " -> ")?;
+ return_type.hir_fmt(f)?;
+ }
+ }
+ }
+ }
+ TypeRef::ImplTrait(bounds) => {
+ write!(f, "impl ")?;
+ f.write_joined(bounds, " + ")?;
+ }
+ TypeRef::DynTrait(bounds) => {
+ write!(f, "dyn ")?;
+ f.write_joined(bounds, " + ")?;
+ }
+ TypeRef::Macro(macro_call) => {
+ let macro_call = macro_call.to_node(f.db.upcast());
+ let ctx = body::LowerCtx::with_hygiene(f.db.upcast(), &Hygiene::new_unhygienic());
+ match macro_call.path() {
+ Some(path) => match Path::from_src(path, &ctx) {
+ Some(path) => path.hir_fmt(f)?,
+ None => write!(f, "{{macro}}")?,
+ },
+ None => write!(f, "{{macro}}")?,
+ }
+ write!(f, "!(..)")?;
+ }
+ TypeRef::Error => write!(f, "{{error}}")?,
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for TypeBound {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ TypeBound::Path(path, modifier) => {
+ match modifier {
+ TraitBoundModifier::None => (),
+ TraitBoundModifier::Maybe => write!(f, "?")?,
+ }
+ path.hir_fmt(f)
+ }
+ TypeBound::Lifetime(lifetime) => write!(f, "{}", lifetime.name),
+ TypeBound::ForLifetime(lifetimes, path) => {
+ write!(f, "for<{}> ", lifetimes.iter().format(", "))?;
+ path.hir_fmt(f)
+ }
+ TypeBound::Error => write!(f, "{{error}}"),
+ }
+ }
+}
+
+impl HirDisplay for Path {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match (self.type_anchor(), self.kind()) {
+ (Some(anchor), _) => {
+ write!(f, "<")?;
+ anchor.hir_fmt(f)?;
+ write!(f, ">")?;
+ }
+ (_, PathKind::Plain) => {}
+ (_, PathKind::Abs) => {}
+ (_, PathKind::Crate) => write!(f, "crate")?,
+ (_, PathKind::Super(0)) => write!(f, "self")?,
+ (_, PathKind::Super(n)) => {
+ for i in 0..*n {
+ if i > 0 {
+ write!(f, "::")?;
+ }
+ write!(f, "super")?;
+ }
+ }
+ (_, PathKind::DollarCrate(id)) => {
+ // Resolve `$crate` to the crate's display name.
+ // FIXME: should use the dependency name instead if available, but that depends on
+ // the crate invoking `HirDisplay`
+ let crate_graph = f.db.crate_graph();
+ let name = crate_graph[*id]
+ .display_name
+ .as_ref()
+ .map(|name| name.canonical_name())
+ .unwrap_or("$crate");
+ write!(f, "{name}")?
+ }
+ }
+
+ for (seg_idx, segment) in self.segments().iter().enumerate() {
+ if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 {
+ write!(f, "::")?;
+ }
+ write!(f, "{}", segment.name)?;
+ if let Some(generic_args) = segment.args_and_bindings {
+ // We should be in type context, so format as `Foo<Bar>` instead of `Foo::<Bar>`.
+ // Do we actually format expressions?
+ if generic_args.desugared_from_fn {
+ // First argument will be a tuple, which already includes the parentheses.
+ // If the tuple only contains 1 item, write it manually to avoid the trailing `,`.
+ if let hir_def::path::GenericArg::Type(TypeRef::Tuple(v)) =
+ &generic_args.args[0]
+ {
+ if v.len() == 1 {
+ write!(f, "(")?;
+ v[0].hir_fmt(f)?;
+ write!(f, ")")?;
+ } else {
+ generic_args.args[0].hir_fmt(f)?;
+ }
+ }
+ if let Some(ret) = &generic_args.bindings[0].type_ref {
+ if !matches!(ret, TypeRef::Tuple(v) if v.is_empty()) {
+ write!(f, " -> ")?;
+ ret.hir_fmt(f)?;
+ }
+ }
+ return Ok(());
+ }
+
+ write!(f, "<")?;
+ let mut first = true;
+ for arg in &generic_args.args {
+ if first {
+ first = false;
+ if generic_args.has_self_type {
+ // FIXME: Convert to `<Ty as Trait>` form.
+ write!(f, "Self = ")?;
+ }
+ } else {
+ write!(f, ", ")?;
+ }
+ arg.hir_fmt(f)?;
+ }
+ for binding in &generic_args.bindings {
+ if first {
+ first = false;
+ } else {
+ write!(f, ", ")?;
+ }
+ write!(f, "{}", binding.name)?;
+ match &binding.type_ref {
+ Some(ty) => {
+ write!(f, " = ")?;
+ ty.hir_fmt(f)?
+ }
+ None => {
+ write!(f, ": ")?;
+ f.write_joined(&binding.bounds, " + ")?;
+ }
+ }
+ }
+ write!(f, ">")?;
+ }
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for hir_def::path::GenericArg {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f),
+ hir_def::path::GenericArg::Const(c) => write!(f, "{}", c),
+ hir_def::path::GenericArg::Lifetime(lifetime) => write!(f, "{}", lifetime.name),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
new file mode 100644
index 000000000..46eeea0e6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -0,0 +1,1088 @@
+//! Type inference, i.e. the process of walking through the code and determining
+//! the type of each expression and pattern.
+//!
+//! For type inference, compare the implementations in rustc (the various
+//! check_* methods in librustc_typeck/check/mod.rs are a good entry point) and
+//! IntelliJ-Rust (org.rust.lang.core.types.infer). Our entry point for
+//! inference here is the `infer` function, which infers the types of all
+//! expressions in a given function.
+//!
+//! During inference, types (i.e. the `Ty` struct) can contain type 'variables'
+//! which represent currently unknown types; as we walk through the expressions,
+//! we might determine that certain variables need to be equal to each other, or
+//! to certain types. To record this, we use the union-find implementation from
+//! the `ena` crate, which is extracted from rustc.
+
+use std::ops::Index;
+use std::sync::Arc;
+
+use chalk_ir::{cast::Cast, ConstValue, DebruijnIndex, Mutability, Safety, Scalar, TypeFlags};
+use hir_def::{
+ body::Body,
+ data::{ConstData, StaticData},
+ expr::{BindingAnnotation, ExprId, PatId},
+ lang_item::LangItemTarget,
+ path::{path, Path},
+ resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
+ type_ref::TypeRef,
+ AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule, Lookup,
+ TraitId, TypeAliasId, VariantId,
+};
+use hir_expand::name::{name, Name};
+use itertools::Either;
+use la_arena::ArenaMap;
+use rustc_hash::FxHashMap;
+use stdx::{always, impl_from};
+
+use crate::{
+ db::HirDatabase, fold_tys, fold_tys_and_consts, infer::coerce::CoerceMany,
+ lower::ImplTraitLoweringMode, to_assoc_type_id, AliasEq, AliasTy, Const, DomainGoal,
+ GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, Substitution,
+ TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+};
+
+// This lint has a false positive here. See the link below for details.
+//
+// https://github.com/rust-lang/rust/issues/57411
+#[allow(unreachable_pub)]
+pub use coerce::could_coerce;
+#[allow(unreachable_pub)]
+pub use unify::could_unify;
+
+pub(crate) mod unify;
+mod path;
+mod expr;
+mod pat;
+mod coerce;
+mod closure;
+
+/// The entry point of type inference.
+pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
+ let _p = profile::span("infer_query");
+ let resolver = def.resolver(db.upcast());
+ let body = db.body(def);
+ let mut ctx = InferenceContext::new(db, def, &body, resolver);
+
+ match def {
+ DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
+ DefWithBodyId::FunctionId(f) => ctx.collect_fn(f),
+ DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
+ }
+
+ ctx.infer_body();
+
+ Arc::new(ctx.resolve_all())
+}
+
+/// Fully normalize all the types found within `ty` in context of `owner` body definition.
+///
+/// This is appropriate to use only after type-check: it assumes
+/// that normalization will succeed, for example.
+pub(crate) fn normalize(db: &dyn HirDatabase, owner: DefWithBodyId, ty: Ty) -> Ty {
+ if !ty.data(Interner).flags.intersects(TypeFlags::HAS_PROJECTION) {
+ return ty;
+ }
+ let krate = owner.module(db.upcast()).krate();
+ let trait_env = owner
+ .as_generic_def_id()
+ .map_or_else(|| Arc::new(TraitEnvironment::empty(krate)), |d| db.trait_environment(d));
+ let mut table = unify::InferenceTable::new(db, trait_env);
+
+ let ty_with_vars = table.normalize_associated_types_in(ty);
+ table.resolve_obligations_as_possible();
+ table.propagate_diverging_flag();
+ table.resolve_completely(ty_with_vars)
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+enum ExprOrPatId {
+ ExprId(ExprId),
+ PatId(PatId),
+}
+impl_from!(ExprId, PatId for ExprOrPatId);
+
+/// Binding modes inferred for patterns.
+/// <https://doc.rust-lang.org/reference/patterns.html#binding-modes>
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum BindingMode {
+ Move,
+ Ref(Mutability),
+}
+
+impl BindingMode {
+ fn convert(annotation: BindingAnnotation) -> BindingMode {
+ match annotation {
+ BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move,
+ BindingAnnotation::Ref => BindingMode::Ref(Mutability::Not),
+ BindingAnnotation::RefMut => BindingMode::Ref(Mutability::Mut),
+ }
+ }
+}
+
+impl Default for BindingMode {
+ fn default() -> Self {
+ BindingMode::Move
+ }
+}
+
+/// Used to generalize patterns and assignee expressions.
+trait PatLike: Into<ExprOrPatId> + Copy {
+ type BindingMode: Copy;
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ default_bm: Self::BindingMode,
+ ) -> Ty;
+}
+
+impl PatLike for ExprId {
+ type BindingMode = ();
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ _: Self::BindingMode,
+ ) -> Ty {
+ this.infer_assignee_expr(id, expected_ty)
+ }
+}
+
+impl PatLike for PatId {
+ type BindingMode = BindingMode;
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ default_bm: Self::BindingMode,
+ ) -> Ty {
+ this.infer_pat(id, expected_ty, default_bm)
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct InferOk<T> {
+ value: T,
+ goals: Vec<InEnvironment<Goal>>,
+}
+
+impl<T> InferOk<T> {
+ fn map<U>(self, f: impl FnOnce(T) -> U) -> InferOk<U> {
+ InferOk { value: f(self.value), goals: self.goals }
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct TypeError;
+pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>;
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum InferenceDiagnostic {
+ NoSuchField { expr: ExprId },
+ BreakOutsideOfLoop { expr: ExprId },
+ MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize },
+}
+
+/// A mismatch between an expected and an inferred type.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct TypeMismatch {
+ pub expected: Ty,
+ pub actual: Ty,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+struct InternedStandardTypes {
+ unknown: Ty,
+ bool_: Ty,
+ unit: Ty,
+}
+
+impl Default for InternedStandardTypes {
+ fn default() -> Self {
+ InternedStandardTypes {
+ unknown: TyKind::Error.intern(Interner),
+ bool_: TyKind::Scalar(Scalar::Bool).intern(Interner),
+ unit: TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner),
+ }
+ }
+}
+/// Represents coercing a value to a different type of value.
+///
+/// We transform values by following a number of `Adjust` steps in order.
+/// See the documentation on variants of `Adjust` for more details.
+///
+/// Here are some common scenarios:
+///
+/// 1. The simplest cases are where a pointer is not adjusted fat vs thin.
+/// Here the pointer will be dereferenced N times (where a dereference can
+/// happen to raw or borrowed pointers or any smart pointer which implements
+/// Deref, including Box<_>). The types of dereferences is given by
+/// `autoderefs`. It can then be auto-referenced zero or one times, indicated
+/// by `autoref`, to either a raw or borrowed pointer. In these cases unsize is
+/// `false`.
+///
+/// 2. A thin-to-fat coercion involves unsizing the underlying data. We start
+/// with a thin pointer, deref a number of times, unsize the underlying data,
+/// then autoref. The 'unsize' phase may change a fixed length array to a
+/// dynamically sized one, a concrete object to a trait object, or statically
+/// sized struct to a dynamically sized one. E.g., &[i32; 4] -> &[i32] is
+/// represented by:
+///
+/// ```
+/// Deref(None) -> [i32; 4],
+/// Borrow(AutoBorrow::Ref) -> &[i32; 4],
+/// Unsize -> &[i32],
+/// ```
+///
+/// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
+/// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
+/// The autoderef and -ref are the same as in the above example, but the type
+/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
+/// the underlying conversions from `[i32; 4]` to `[i32]`.
+///
+/// 3. Coercing a `Box<T>` to `Box<dyn Trait>` is an interesting special case. In
+/// that case, we have the pointer we need coming in, so there are no
+/// autoderefs, and no autoref. Instead we just do the `Unsize` transformation.
+/// At some point, of course, `Box` should move out of the compiler, in which
+/// case this is analogous to transforming a struct. E.g., Box<[i32; 4]> ->
+/// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub struct Adjustment {
+ pub kind: Adjust,
+ pub target: Ty,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum Adjust {
+ /// Go from ! to any type.
+ NeverToAny,
+ /// Dereference once, producing a place.
+ Deref(Option<OverloadedDeref>),
+ /// Take the address and produce either a `&` or `*` pointer.
+ Borrow(AutoBorrow),
+ Pointer(PointerCast),
+}
+
+/// An overloaded autoderef step, representing a `Deref(Mut)::deref(_mut)`
+/// call, with the signature `&'a T -> &'a U` or `&'a mut T -> &'a mut U`.
+/// The target type is `U` in both cases, with the region and mutability
+/// being those shared by both the receiver and the returned reference.
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct OverloadedDeref(pub Mutability);
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum AutoBorrow {
+ /// Converts from T to &T.
+ Ref(Mutability),
+ /// Converts from T to *T.
+ RawPtr(Mutability),
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum PointerCast {
+ /// Go from a fn-item type to a fn-pointer type.
+ ReifyFnPointer,
+
+ /// Go from a safe fn pointer to an unsafe fn pointer.
+ UnsafeFnPointer,
+
+ /// Go from a non-capturing closure to an fn pointer or an unsafe fn pointer.
+ /// It cannot convert a closure that requires unsafe.
+ ClosureFnPointer(Safety),
+
+ /// Go from a mut raw pointer to a const raw pointer.
+ MutToConstPointer,
+
+ #[allow(dead_code)]
+ /// Go from `*const [T; N]` to `*const T`
+ ArrayToPointer,
+
+ /// Unsize a pointer/reference value, e.g., `&[T; n]` to
+ /// `&[T]`. Note that the source could be a thin or fat pointer.
+ /// This will do things like convert thin pointers to fat
+ /// pointers, or convert structs containing thin pointers to
+ /// structs containing fat pointers, or convert between fat
+ /// pointers. We don't store the details of how the transform is
+ /// done (in fact, we don't know that, because it might depend on
+ /// the precise type parameters). We just store the target
+ /// type. Codegen backends and miri figure out what has to be done
+ /// based on the precise source/target type at hand.
+ Unsize,
+}
+
+/// The result of type inference: A mapping from expressions and patterns to types.
+#[derive(Clone, PartialEq, Eq, Debug, Default)]
+pub struct InferenceResult {
+ /// For each method call expr, records the function it resolves to.
+ method_resolutions: FxHashMap<ExprId, (FunctionId, Substitution)>,
+ /// For each field access expr, records the field it resolves to.
+ field_resolutions: FxHashMap<ExprId, FieldId>,
+ /// For each struct literal or pattern, records the variant it resolves to.
+ variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
+ /// For each associated item record what it resolves to
+ assoc_resolutions: FxHashMap<ExprOrPatId, AssocItemId>,
+ pub diagnostics: Vec<InferenceDiagnostic>,
+ pub type_of_expr: ArenaMap<ExprId, Ty>,
+ /// For each pattern record the type it resolves to.
+ ///
+ /// **Note**: When a pattern type is resolved it may still contain
+ /// unresolved or missing subpatterns or subpatterns of mismatched types.
+ pub type_of_pat: ArenaMap<PatId, Ty>,
+ type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
+ /// Interned Unknown to return references to.
+ standard_types: InternedStandardTypes,
+ /// Stores the types which were implicitly dereferenced in pattern binding modes.
+ pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
+ pub pat_binding_modes: FxHashMap<PatId, BindingMode>,
+ pub expr_adjustments: FxHashMap<ExprId, Vec<Adjustment>>,
+}
+
+impl InferenceResult {
+ pub fn method_resolution(&self, expr: ExprId) -> Option<(FunctionId, Substitution)> {
+ self.method_resolutions.get(&expr).cloned()
+ }
+ pub fn field_resolution(&self, expr: ExprId) -> Option<FieldId> {
+ self.field_resolutions.get(&expr).copied()
+ }
+ pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
+ self.variant_resolutions.get(&id.into()).copied()
+ }
+ pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantId> {
+ self.variant_resolutions.get(&id.into()).copied()
+ }
+ pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<AssocItemId> {
+ self.assoc_resolutions.get(&id.into()).copied()
+ }
+ pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<AssocItemId> {
+ self.assoc_resolutions.get(&id.into()).copied()
+ }
+ pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> {
+ self.type_mismatches.get(&expr.into())
+ }
+ pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch> {
+ self.type_mismatches.get(&pat.into())
+ }
+ pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch)> {
+ self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
+ ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
+ _ => None,
+ })
+ }
+ pub fn pat_type_mismatches(&self) -> impl Iterator<Item = (PatId, &TypeMismatch)> {
+ self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
+ ExprOrPatId::PatId(pat) => Some((pat, mismatch)),
+ _ => None,
+ })
+ }
+}
+
+impl Index<ExprId> for InferenceResult {
+ type Output = Ty;
+
+ fn index(&self, expr: ExprId) -> &Ty {
+ self.type_of_expr.get(expr).unwrap_or(&self.standard_types.unknown)
+ }
+}
+
+impl Index<PatId> for InferenceResult {
+ type Output = Ty;
+
+ fn index(&self, pat: PatId) -> &Ty {
+ self.type_of_pat.get(pat).unwrap_or(&self.standard_types.unknown)
+ }
+}
+
+/// The inference context contains all information needed during type inference.
+#[derive(Clone, Debug)]
+pub(crate) struct InferenceContext<'a> {
+ pub(crate) db: &'a dyn HirDatabase,
+ pub(crate) owner: DefWithBodyId,
+ pub(crate) body: &'a Body,
+ pub(crate) resolver: Resolver,
+ table: unify::InferenceTable<'a>,
+ trait_env: Arc<TraitEnvironment>,
+ pub(crate) result: InferenceResult,
+ /// The return type of the function being inferred, the closure or async block if we're
+ /// currently within one.
+ ///
+ /// We might consider using a nested inference context for checking
+ /// closures, but currently this is the only field that will change there,
+ /// so it doesn't make sense.
+ return_ty: Ty,
+ diverges: Diverges,
+ breakables: Vec<BreakableContext>,
+}
+
+#[derive(Clone, Debug)]
+struct BreakableContext {
+ may_break: bool,
+ coerce: CoerceMany,
+ label: Option<name::Name>,
+}
+
+fn find_breakable<'c>(
+ ctxs: &'c mut [BreakableContext],
+ label: Option<&name::Name>,
+) -> Option<&'c mut BreakableContext> {
+ match label {
+ Some(_) => ctxs.iter_mut().rev().find(|ctx| ctx.label.as_ref() == label),
+ None => ctxs.last_mut(),
+ }
+}
+
+impl<'a> InferenceContext<'a> {
+ fn new(
+ db: &'a dyn HirDatabase,
+ owner: DefWithBodyId,
+ body: &'a Body,
+ resolver: Resolver,
+ ) -> Self {
+ let krate = owner.module(db.upcast()).krate();
+ let trait_env = owner
+ .as_generic_def_id()
+ .map_or_else(|| Arc::new(TraitEnvironment::empty(krate)), |d| db.trait_environment(d));
+ InferenceContext {
+ result: InferenceResult::default(),
+ table: unify::InferenceTable::new(db, trait_env.clone()),
+ trait_env,
+ return_ty: TyKind::Error.intern(Interner), // set in collect_fn_signature
+ db,
+ owner,
+ body,
+ resolver,
+ diverges: Diverges::Maybe,
+ breakables: Vec::new(),
+ }
+ }
+
+ fn resolve_all(self) -> InferenceResult {
+ let InferenceContext { mut table, mut result, .. } = self;
+
+ // FIXME resolve obligations as well (use Guidance if necessary)
+ table.resolve_obligations_as_possible();
+
+ // make sure diverging type variables are marked as such
+ table.propagate_diverging_flag();
+ for ty in result.type_of_expr.values_mut() {
+ *ty = table.resolve_completely(ty.clone());
+ }
+ for ty in result.type_of_pat.values_mut() {
+ *ty = table.resolve_completely(ty.clone());
+ }
+ for mismatch in result.type_mismatches.values_mut() {
+ mismatch.expected = table.resolve_completely(mismatch.expected.clone());
+ mismatch.actual = table.resolve_completely(mismatch.actual.clone());
+ }
+ for (_, subst) in result.method_resolutions.values_mut() {
+ *subst = table.resolve_completely(subst.clone());
+ }
+ for adjustment in result.expr_adjustments.values_mut().flatten() {
+ adjustment.target = table.resolve_completely(adjustment.target.clone());
+ }
+ for adjustment in result.pat_adjustments.values_mut().flatten() {
+ *adjustment = table.resolve_completely(adjustment.clone());
+ }
+ result
+ }
+
+ fn collect_const(&mut self, data: &ConstData) {
+ self.return_ty = self.make_ty(&data.type_ref);
+ }
+
+ fn collect_static(&mut self, data: &StaticData) {
+ self.return_ty = self.make_ty(&data.type_ref);
+ }
+
+ fn collect_fn(&mut self, func: FunctionId) {
+ let data = self.db.function_data(func);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Param);
+ let param_tys =
+ data.params.iter().map(|(_, type_ref)| ctx.lower_ty(type_ref)).collect::<Vec<_>>();
+ for (ty, pat) in param_tys.into_iter().zip(self.body.params.iter()) {
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ self.infer_pat(*pat, &ty, BindingMode::default());
+ }
+ let error_ty = &TypeRef::Error;
+ let return_ty = if data.has_async_kw() {
+ data.async_ret_type.as_deref().unwrap_or(error_ty)
+ } else {
+ &*data.ret_type
+ };
+ let return_ty = self.make_ty_with_mode(return_ty, ImplTraitLoweringMode::Opaque);
+ self.return_ty = return_ty;
+
+ if let Some(rpits) = self.db.return_type_impl_traits(func) {
+ // RPIT opaque types use substitution of their parent function.
+ let fn_placeholders = TyBuilder::placeholder_subst(self.db, func);
+ self.return_ty = fold_tys(
+ self.return_ty.clone(),
+ |ty, _| {
+ let opaque_ty_id = match ty.kind(Interner) {
+ TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id,
+ _ => return ty,
+ };
+ let idx = match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) {
+ ImplTraitId::ReturnTypeImplTrait(_, idx) => idx,
+ _ => unreachable!(),
+ };
+ let bounds = (*rpits).map_ref(|rpits| {
+ rpits.impl_traits[idx as usize].bounds.map_ref(|it| it.into_iter())
+ });
+ let var = self.table.new_type_var();
+ let var_subst = Substitution::from1(Interner, var.clone());
+ for bound in bounds {
+ let predicate =
+ bound.map(|it| it.cloned()).substitute(Interner, &fn_placeholders);
+ let (var_predicate, binders) = predicate
+ .substitute(Interner, &var_subst)
+ .into_value_and_skipped_binders();
+ always!(binders.len(Interner) == 0); // quantified where clauses not yet handled
+ self.push_obligation(var_predicate.cast(Interner));
+ }
+ var
+ },
+ DebruijnIndex::INNERMOST,
+ );
+ }
+ }
+
+ fn infer_body(&mut self) {
+ self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
+ }
+
+ fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) {
+ self.result.type_of_expr.insert(expr, ty);
+ }
+
+ fn write_expr_adj(&mut self, expr: ExprId, adjustments: Vec<Adjustment>) {
+ self.result.expr_adjustments.insert(expr, adjustments);
+ }
+
+ fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId, subst: Substitution) {
+ self.result.method_resolutions.insert(expr, (func, subst));
+ }
+
+ fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) {
+ self.result.variant_resolutions.insert(id, variant);
+ }
+
+ fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) {
+ self.result.assoc_resolutions.insert(id, item);
+ }
+
+ fn write_pat_ty(&mut self, pat: PatId, ty: Ty) {
+ self.result.type_of_pat.insert(pat, ty);
+ }
+
+ fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) {
+ self.result.diagnostics.push(diagnostic);
+ }
+
+ fn make_ty_with_mode(
+ &mut self,
+ type_ref: &TypeRef,
+ impl_trait_mode: ImplTraitLoweringMode,
+ ) -> Ty {
+ // FIXME use right resolver for block
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
+ .with_impl_trait_mode(impl_trait_mode);
+ let ty = ctx.lower_ty(type_ref);
+ let ty = self.insert_type_vars(ty);
+ self.normalize_associated_types_in(ty)
+ }
+
+ fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
+ self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed)
+ }
+
+ fn err_ty(&self) -> Ty {
+ self.result.standard_types.unknown.clone()
+ }
+
+ /// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it.
+ fn insert_const_vars_shallow(&mut self, c: Const) -> Const {
+ let data = c.data(Interner);
+ match data.value {
+ ConstValue::Concrete(cc) => match cc.interned {
+ hir_def::type_ref::ConstScalar::Unknown => {
+ self.table.new_const_var(data.ty.clone())
+ }
+ _ => c,
+ },
+ _ => c,
+ }
+ }
+
+ /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it.
+ fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty {
+ match ty.kind(Interner) {
+ TyKind::Error => self.table.new_type_var(),
+ TyKind::InferenceVar(..) => {
+ let ty_resolved = self.resolve_ty_shallow(&ty);
+ if ty_resolved.is_unknown() {
+ self.table.new_type_var()
+ } else {
+ ty
+ }
+ }
+ _ => ty,
+ }
+ }
+
+ fn insert_type_vars(&mut self, ty: Ty) -> Ty {
+ fold_tys_and_consts(
+ ty,
+ |x, _| match x {
+ Either::Left(ty) => Either::Left(self.insert_type_vars_shallow(ty)),
+ Either::Right(c) => Either::Right(self.insert_const_vars_shallow(c)),
+ },
+ DebruijnIndex::INNERMOST,
+ )
+ }
+
+ fn resolve_obligations_as_possible(&mut self) {
+ self.table.resolve_obligations_as_possible();
+ }
+
+ fn push_obligation(&mut self, o: DomainGoal) {
+ self.table.register_obligation(o.cast(Interner));
+ }
+
+ fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
+ self.table.unify(ty1, ty2)
+ }
+
+ /// Recurses through the given type, normalizing associated types mentioned
+ /// in it by replacing them by type variables and registering obligations to
+ /// resolve later. This should be done once for every type we get from some
+ /// type annotation (e.g. from a let type annotation, field type or function
+ /// call). `make_ty` handles this already, but e.g. for field types we need
+ /// to do it as well.
+ fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
+ self.table.normalize_associated_types_in(ty)
+ }
+
+ fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty {
+ self.resolve_obligations_as_possible();
+ self.table.resolve_ty_shallow(ty)
+ }
+
+ fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option<TypeAliasId>) -> Ty {
+ self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[])
+ }
+
+ fn resolve_associated_type_with_params(
+ &mut self,
+ inner_ty: Ty,
+ assoc_ty: Option<TypeAliasId>,
+ params: &[GenericArg],
+ ) -> Ty {
+ match assoc_ty {
+ Some(res_assoc_ty) => {
+ let trait_ = match res_assoc_ty.lookup(self.db.upcast()).container {
+ hir_def::ItemContainerId::TraitId(trait_) => trait_,
+ _ => panic!("resolve_associated_type called with non-associated type"),
+ };
+ let ty = self.table.new_type_var();
+ let mut param_iter = params.iter().cloned();
+ let trait_ref = TyBuilder::trait_ref(self.db, trait_)
+ .push(inner_ty)
+ .fill(|_| param_iter.next().unwrap())
+ .build();
+ let alias_eq = AliasEq {
+ alias: AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(res_assoc_ty),
+ substitution: trait_ref.substitution.clone(),
+ }),
+ ty: ty.clone(),
+ };
+ self.push_obligation(trait_ref.cast(Interner));
+ self.push_obligation(alias_eq.cast(Interner));
+ ty
+ }
+ None => self.err_ty(),
+ }
+ }
+
+ fn resolve_variant(&mut self, path: Option<&Path>, value_ns: bool) -> (Ty, Option<VariantId>) {
+ let path = match path {
+ Some(path) => path,
+ None => return (self.err_ty(), None),
+ };
+ let resolver = &self.resolver;
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ // FIXME: this should resolve assoc items as well, see this example:
+ // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
+ let (resolution, unresolved) = if value_ns {
+ match resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path()) {
+ Some(ResolveValueResult::ValueNs(value)) => match value {
+ ValueNs::EnumVariantId(var) => {
+ let substs = ctx.substs_from_path(path, var.into(), true);
+ let ty = self.db.ty(var.parent.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ return (ty, Some(var.into()));
+ }
+ ValueNs::StructId(strukt) => {
+ let substs = ctx.substs_from_path(path, strukt.into(), true);
+ let ty = self.db.ty(strukt.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ return (ty, Some(strukt.into()));
+ }
+ _ => return (self.err_ty(), None),
+ },
+ Some(ResolveValueResult::Partial(typens, unresolved)) => (typens, Some(unresolved)),
+ None => return (self.err_ty(), None),
+ }
+ } else {
+ match resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ Some(it) => it,
+ None => return (self.err_ty(), None),
+ }
+ };
+ return match resolution {
+ TypeNs::AdtId(AdtId::StructId(strukt)) => {
+ let substs = ctx.substs_from_path(path, strukt.into(), true);
+ let ty = self.db.ty(strukt.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
+ }
+ TypeNs::AdtId(AdtId::UnionId(u)) => {
+ let substs = ctx.substs_from_path(path, u.into(), true);
+ let ty = self.db.ty(u.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ forbid_unresolved_segments((ty, Some(u.into())), unresolved)
+ }
+ TypeNs::EnumVariantId(var) => {
+ let substs = ctx.substs_from_path(path, var.into(), true);
+ let ty = self.db.ty(var.parent.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ forbid_unresolved_segments((ty, Some(var.into())), unresolved)
+ }
+ TypeNs::SelfType(impl_id) => {
+ let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
+ let substs = generics.placeholder_subst(self.db);
+ let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
+ self.resolve_variant_on_alias(ty, unresolved, path)
+ }
+ TypeNs::TypeAliasId(it) => {
+ let ty = TyBuilder::def_ty(self.db, it.into())
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ self.resolve_variant_on_alias(ty, unresolved, path)
+ }
+ TypeNs::AdtSelfType(_) => {
+ // FIXME this could happen in array size expressions, once we're checking them
+ (self.err_ty(), None)
+ }
+ TypeNs::GenericParam(_) => {
+ // FIXME potentially resolve assoc type
+ (self.err_ty(), None)
+ }
+ TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) => {
+ // FIXME diagnostic
+ (self.err_ty(), None)
+ }
+ };
+
+ fn forbid_unresolved_segments(
+ result: (Ty, Option<VariantId>),
+ unresolved: Option<usize>,
+ ) -> (Ty, Option<VariantId>) {
+ if unresolved.is_none() {
+ result
+ } else {
+ // FIXME diagnostic
+ (TyKind::Error.intern(Interner), None)
+ }
+ }
+ }
+
+ fn resolve_variant_on_alias(
+ &mut self,
+ ty: Ty,
+ unresolved: Option<usize>,
+ path: &Path,
+ ) -> (Ty, Option<VariantId>) {
+ let remaining = unresolved.map(|x| path.segments().skip(x).len()).filter(|x| x > &0);
+ match remaining {
+ None => {
+ let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id {
+ AdtId::StructId(s) => Some(VariantId::StructId(s)),
+ AdtId::UnionId(u) => Some(VariantId::UnionId(u)),
+ AdtId::EnumId(_) => {
+ // FIXME Error E0071, expected struct, variant or union type, found enum `Foo`
+ None
+ }
+ });
+ (ty, variant)
+ }
+ Some(1) => {
+ let segment = path.mod_path().segments().last().unwrap();
+ // this could be an enum variant or associated type
+ if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() {
+ let enum_data = self.db.enum_data(enum_id);
+ if let Some(local_id) = enum_data.variant(segment) {
+ let variant = EnumVariantId { parent: enum_id, local_id };
+ return (ty, Some(variant.into()));
+ }
+ }
+ // FIXME potentially resolve assoc type
+ (self.err_ty(), None)
+ }
+ Some(_) => {
+ // FIXME diagnostic
+ (self.err_ty(), None)
+ }
+ }
+ }
+
+ fn resolve_lang_item(&self, name: Name) -> Option<LangItemTarget> {
+ let krate = self.resolver.krate();
+ self.db.lang_item(krate, name.to_smol_str())
+ }
+
+ fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
+ let path = path![core::iter::IntoIterator];
+ let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Item])
+ }
+
+ fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
+ // FIXME resolve via lang_item once try v2 is stable
+ let path = path![core::ops::Try];
+ let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
+ let trait_data = self.db.trait_data(trait_);
+ trait_data
+ // FIXME remove once try v2 is stable
+ .associated_type_by_name(&name![Ok])
+ .or_else(|| trait_data.associated_type_by_name(&name![Output]))
+ }
+
+ fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_lang_item(name![neg])?.as_trait()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+
+ fn resolve_ops_not_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_lang_item(name![not])?.as_trait()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+
+ fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_lang_item(name![future_trait])?.as_trait()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+
+ fn resolve_boxed_box(&self) -> Option<AdtId> {
+ let struct_ = self.resolve_lang_item(name![owned_box])?.as_struct()?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_full(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeFull];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range(&self) -> Option<AdtId> {
+ let path = path![core::ops::Range];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_inclusive(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeInclusive];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_from(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeFrom];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_to(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeTo];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeToInclusive];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_ops_index(&self) -> Option<TraitId> {
+ self.resolve_lang_item(name![index])?.as_trait()
+ }
+
+ fn resolve_ops_index_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_ops_index()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+}
+
+/// When inferring an expression, we propagate downward whatever type hint we
+/// are able in the form of an `Expectation`.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub(crate) enum Expectation {
+ None,
+ HasType(Ty),
+ // Castable(Ty), // rustc has this, we currently just don't propagate an expectation for casts
+ RValueLikeUnsized(Ty),
+}
+
+impl Expectation {
+ /// The expectation that the type of the expression needs to equal the given
+ /// type.
+ fn has_type(ty: Ty) -> Self {
+ if ty.is_unknown() {
+ // FIXME: get rid of this?
+ Expectation::None
+ } else {
+ Expectation::HasType(ty)
+ }
+ }
+
+ fn from_option(ty: Option<Ty>) -> Self {
+ ty.map_or(Expectation::None, Expectation::HasType)
+ }
+
+ /// The following explanation is copied straight from rustc:
+ /// Provides an expectation for an rvalue expression given an *optional*
+ /// hint, which is not required for type safety (the resulting type might
+ /// be checked higher up, as is the case with `&expr` and `box expr`), but
+ /// is useful in determining the concrete type.
+ ///
+ /// The primary use case is where the expected type is a fat pointer,
+ /// like `&[isize]`. For example, consider the following statement:
+ ///
+ /// let x: &[isize] = &[1, 2, 3];
+ ///
+ /// In this case, the expected type for the `&[1, 2, 3]` expression is
+ /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
+ /// expectation `ExpectHasType([isize])`, that would be too strong --
+ /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
+ /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
+ /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
+ /// which still is useful, because it informs integer literals and the like.
+ /// See the test case `test/ui/coerce-expect-unsized.rs` and #20169
+ /// for examples of where this comes up,.
+ fn rvalue_hint(table: &mut unify::InferenceTable<'_>, ty: Ty) -> Self {
+ // FIXME: do struct_tail_without_normalization
+ match table.resolve_ty_shallow(&ty).kind(Interner) {
+ TyKind::Slice(_) | TyKind::Str | TyKind::Dyn(_) => Expectation::RValueLikeUnsized(ty),
+ _ => Expectation::has_type(ty),
+ }
+ }
+
+ /// This expresses no expectation on the type.
+ fn none() -> Self {
+ Expectation::None
+ }
+
+ fn resolve(&self, table: &mut unify::InferenceTable<'_>) -> Expectation {
+ match self {
+ Expectation::None => Expectation::None,
+ Expectation::HasType(t) => Expectation::HasType(table.resolve_ty_shallow(t)),
+ Expectation::RValueLikeUnsized(t) => {
+ Expectation::RValueLikeUnsized(table.resolve_ty_shallow(t))
+ }
+ }
+ }
+
+ fn to_option(&self, table: &mut unify::InferenceTable<'_>) -> Option<Ty> {
+ match self.resolve(table) {
+ Expectation::None => None,
+ Expectation::HasType(t) |
+ // Expectation::Castable(t) |
+ Expectation::RValueLikeUnsized(t) => Some(t),
+ }
+ }
+
+ fn only_has_type(&self, table: &mut unify::InferenceTable<'_>) -> Option<Ty> {
+ match self {
+ Expectation::HasType(t) => Some(table.resolve_ty_shallow(t)),
+ // Expectation::Castable(_) |
+ Expectation::RValueLikeUnsized(_) | Expectation::None => None,
+ }
+ }
+
+ /// Comment copied from rustc:
+ /// Disregard "castable to" expectations because they
+ /// can lead us astray. Consider for example `if cond
+ /// {22} else {c} as u8` -- if we propagate the
+ /// "castable to u8" constraint to 22, it will pick the
+ /// type 22u8, which is overly constrained (c might not
+ /// be a u8). In effect, the problem is that the
+ /// "castable to" expectation is not the tightest thing
+ /// we can say, so we want to drop it in this case.
+ /// The tightest thing we can say is "must unify with
+ /// else branch". Note that in the case of a "has type"
+ /// constraint, this limitation does not hold.
+ ///
+ /// If the expected type is just a type variable, then don't use
+ /// an expected type. Otherwise, we might write parts of the type
+ /// when checking the 'then' block which are incompatible with the
+ /// 'else' branch.
+ fn adjust_for_branches(&self, table: &mut unify::InferenceTable<'_>) -> Expectation {
+ match self {
+ Expectation::HasType(ety) => {
+ let ety = table.resolve_ty_shallow(ety);
+ if !ety.is_ty_var() {
+ Expectation::HasType(ety)
+ } else {
+ Expectation::None
+ }
+ }
+ Expectation::RValueLikeUnsized(ety) => Expectation::RValueLikeUnsized(ety.clone()),
+ _ => Expectation::None,
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+enum Diverges {
+ Maybe,
+ Always,
+}
+
+impl Diverges {
+ fn is_always(self) -> bool {
+ self == Diverges::Always
+ }
+}
+
+impl std::ops::BitAnd for Diverges {
+ type Output = Self;
+ fn bitand(self, other: Self) -> Self {
+ std::cmp::min(self, other)
+ }
+}
+
+impl std::ops::BitOr for Diverges {
+ type Output = Self;
+ fn bitor(self, other: Self) -> Self {
+ std::cmp::max(self, other)
+ }
+}
+
+impl std::ops::BitAndAssign for Diverges {
+ fn bitand_assign(&mut self, other: Self) {
+ *self = *self & other;
+ }
+}
+
+impl std::ops::BitOrAssign for Diverges {
+ fn bitor_assign(&mut self, other: Self) {
+ *self = *self | other;
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
new file mode 100644
index 000000000..3ead92909
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
@@ -0,0 +1,82 @@
+//! Inference of closure parameter types based on the closure's expected type.
+
+use chalk_ir::{cast::Cast, AliasEq, AliasTy, FnSubst, WhereClause};
+use hir_def::{expr::ExprId, HasModule};
+use smallvec::SmallVec;
+
+use crate::{
+ to_chalk_trait_id, utils, ChalkTraitId, DynTy, FnPointer, FnSig, Interner, Substitution, Ty,
+ TyExt, TyKind,
+};
+
+use super::{Expectation, InferenceContext};
+
+impl InferenceContext<'_> {
+ pub(super) fn deduce_closure_type_from_expectations(
+ &mut self,
+ closure_expr: ExprId,
+ closure_ty: &Ty,
+ sig_ty: &Ty,
+ expectation: &Expectation,
+ ) {
+ let expected_ty = match expectation.to_option(&mut self.table) {
+ Some(ty) => ty,
+ None => return,
+ };
+
+ // Deduction from where-clauses in scope, as well as fn-pointer coercion are handled here.
+ let _ = self.coerce(Some(closure_expr), closure_ty, &expected_ty);
+
+ // Deduction based on the expected `dyn Fn` is done separately.
+ if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner) {
+ if let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty) {
+ let expected_sig_ty = TyKind::Function(sig).intern(Interner);
+
+ self.unify(sig_ty, &expected_sig_ty);
+ }
+ }
+ }
+
+ fn deduce_sig_from_dyn_ty(&self, dyn_ty: &DynTy) -> Option<FnPointer> {
+ // Search for a predicate like `<$self as FnX<Args>>::Output == Ret`
+
+ let fn_traits: SmallVec<[ChalkTraitId; 3]> =
+ utils::fn_traits(self.db.upcast(), self.owner.module(self.db.upcast()).krate())
+ .map(to_chalk_trait_id)
+ .collect();
+
+ let self_ty = TyKind::Error.intern(Interner);
+ let bounds = dyn_ty.bounds.clone().substitute(Interner, &[self_ty.cast(Interner)]);
+ for bound in bounds.iter(Interner) {
+ // NOTE(skip_binders): the extracted types are rebound by the returned `FnPointer`
+ if let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) =
+ bound.skip_binders()
+ {
+ let assoc_data = self.db.associated_ty_data(projection.associated_ty_id);
+ if !fn_traits.contains(&assoc_data.trait_id) {
+ return None;
+ }
+
+ // Skip `Self`, get the type argument.
+ let arg = projection.substitution.as_slice(Interner).get(1)?;
+ if let Some(subst) = arg.ty(Interner)?.as_tuple() {
+ let generic_args = subst.as_slice(Interner);
+ let mut sig_tys = Vec::new();
+ for arg in generic_args {
+ sig_tys.push(arg.ty(Interner)?.clone());
+ }
+ sig_tys.push(ty.clone());
+
+ cov_mark::hit!(dyn_fn_param_informs_call_site_closure_signature);
+ return Some(FnPointer {
+ num_binders: bound.len(Interner),
+ sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
+ substitution: FnSubst(Substitution::from_iter(Interner, sig_tys)),
+ });
+ }
+ }
+ }
+
+ None
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
new file mode 100644
index 000000000..f54440bf5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
@@ -0,0 +1,673 @@
+//! Coercion logic. Coercions are certain type conversions that can implicitly
+//! happen in certain places, e.g. weakening `&mut` to `&` or deref coercions
+//! like going from `&Vec<T>` to `&[T]`.
+//!
+//! See <https://doc.rust-lang.org/nomicon/coercions.html> and
+//! `librustc_typeck/check/coercion.rs`.
+
+use std::{iter, sync::Arc};
+
+use chalk_ir::{cast::Cast, BoundVar, Goal, Mutability, TyVariableKind};
+use hir_def::{expr::ExprId, lang_item::LangItemTarget};
+use stdx::always;
+use syntax::SmolStr;
+
+use crate::{
+ autoderef::{Autoderef, AutoderefKind},
+ db::HirDatabase,
+ infer::{
+ Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast,
+ TypeError, TypeMismatch,
+ },
+ static_lifetime, Canonical, DomainGoal, FnPointer, FnSig, Guidance, InEnvironment, Interner,
+ Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
+};
+
+use super::unify::InferenceTable;
+
+pub(crate) type CoerceResult = Result<InferOk<(Vec<Adjustment>, Ty)>, TypeError>;
+
+/// Do not require any adjustments, i.e. coerce `x -> x`.
+fn identity(_: Ty) -> Vec<Adjustment> {
+ vec![]
+}
+
+fn simple(kind: Adjust) -> impl FnOnce(Ty) -> Vec<Adjustment> {
+ move |target| vec![Adjustment { kind, target }]
+}
+
+/// This always returns `Ok(...)`.
+fn success(
+ adj: Vec<Adjustment>,
+ target: Ty,
+ goals: Vec<InEnvironment<Goal<Interner>>>,
+) -> CoerceResult {
+ Ok(InferOk { goals, value: (adj, target) })
+}
+
+#[derive(Clone, Debug)]
+pub(super) struct CoerceMany {
+ expected_ty: Ty,
+}
+
+impl CoerceMany {
+ pub(super) fn new(expected: Ty) -> Self {
+ CoerceMany { expected_ty: expected }
+ }
+
+ /// Merge two types from different branches, with possible coercion.
+ ///
+ /// Mostly this means trying to coerce one to the other, but
+ /// - if we have two function types for different functions or closures, we need to
+ /// coerce both to function pointers;
+ /// - if we were concerned with lifetime subtyping, we'd need to look for a
+ /// least upper bound.
+ pub(super) fn coerce(
+ &mut self,
+ ctx: &mut InferenceContext<'_>,
+ expr: Option<ExprId>,
+ expr_ty: &Ty,
+ ) {
+ let expr_ty = ctx.resolve_ty_shallow(expr_ty);
+ self.expected_ty = ctx.resolve_ty_shallow(&self.expected_ty);
+
+ // Special case: two function types. Try to coerce both to
+ // pointers to have a chance at getting a match. See
+ // https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
+ let sig = match (self.expected_ty.kind(Interner), expr_ty.kind(Interner)) {
+ (TyKind::FnDef(..) | TyKind::Closure(..), TyKind::FnDef(..) | TyKind::Closure(..)) => {
+ // FIXME: we're ignoring safety here. To be more correct, if we have one FnDef and one Closure,
+ // we should be coercing the closure to a fn pointer of the safety of the FnDef
+ cov_mark::hit!(coerce_fn_reification);
+ let sig =
+ self.expected_ty.callable_sig(ctx.db).expect("FnDef without callable sig");
+ Some(sig)
+ }
+ _ => None,
+ };
+ if let Some(sig) = sig {
+ let target_ty = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
+ let result1 = ctx.table.coerce_inner(self.expected_ty.clone(), &target_ty);
+ let result2 = ctx.table.coerce_inner(expr_ty.clone(), &target_ty);
+ if let (Ok(result1), Ok(result2)) = (result1, result2) {
+ ctx.table.register_infer_ok(result1);
+ ctx.table.register_infer_ok(result2);
+ return self.expected_ty = target_ty;
+ }
+ }
+
+ // It might not seem like it, but order is important here: If the expected
+ // type is a type variable and the new one is `!`, trying it the other
+ // way around first would mean we make the type variable `!`, instead of
+ // just marking it as possibly diverging.
+ if ctx.coerce(expr, &expr_ty, &self.expected_ty).is_ok() {
+ /* self.expected_ty is already correct */
+ } else if ctx.coerce(expr, &self.expected_ty, &expr_ty).is_ok() {
+ self.expected_ty = expr_ty;
+ } else {
+ if let Some(id) = expr {
+ ctx.result.type_mismatches.insert(
+ id.into(),
+ TypeMismatch { expected: self.expected_ty.clone(), actual: expr_ty },
+ );
+ }
+ cov_mark::hit!(coerce_merge_fail_fallback);
+ /* self.expected_ty is already correct */
+ }
+ }
+
+ pub(super) fn complete(self) -> Ty {
+ self.expected_ty
+ }
+}
+
+pub fn could_coerce(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> bool {
+ coerce(db, env, tys).is_ok()
+}
+
+pub(crate) fn coerce(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> Result<(Vec<Adjustment>, Ty), TypeError> {
+ let mut table = InferenceTable::new(db, env);
+ let vars = table.fresh_subst(tys.binders.as_slice(Interner));
+ let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
+ let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
+ let (adjustments, ty) = table.coerce(&ty1_with_vars, &ty2_with_vars)?;
+ // default any type vars that weren't unified back to their original bound vars
+ // (kind of hacky)
+ let find_var = |iv| {
+ vars.iter(Interner).position(|v| match v.interned() {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ } == Some(iv))
+ };
+ let fallback = |iv, kind, default, binder| match kind {
+ chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_ty(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Lifetime => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Const(ty) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)),
+ };
+ // FIXME also map the types in the adjustments
+ Ok((adjustments, table.resolve_with_fallback(ty, &fallback)))
+}
+
+impl<'a> InferenceContext<'a> {
+ /// Unify two types, but may coerce the first one to the second one
+ /// using "implicit coercion rules" if needed.
+ pub(super) fn coerce(
+ &mut self,
+ expr: Option<ExprId>,
+ from_ty: &Ty,
+ to_ty: &Ty,
+ ) -> Result<Ty, TypeError> {
+ let from_ty = self.resolve_ty_shallow(from_ty);
+ let to_ty = self.resolve_ty_shallow(to_ty);
+ let (adjustments, ty) = self.table.coerce(&from_ty, &to_ty)?;
+ if let Some(expr) = expr {
+ self.write_expr_adj(expr, adjustments);
+ }
+ Ok(ty)
+ }
+}
+
+impl<'a> InferenceTable<'a> {
+ /// Unify two types, but may coerce the first one to the second one
+ /// using "implicit coercion rules" if needed.
+ pub(crate) fn coerce(
+ &mut self,
+ from_ty: &Ty,
+ to_ty: &Ty,
+ ) -> Result<(Vec<Adjustment>, Ty), TypeError> {
+ let from_ty = self.resolve_ty_shallow(from_ty);
+ let to_ty = self.resolve_ty_shallow(to_ty);
+ match self.coerce_inner(from_ty, &to_ty) {
+ Ok(InferOk { value: (adjustments, ty), goals }) => {
+ self.register_infer_ok(InferOk { value: (), goals });
+ Ok((adjustments, ty))
+ }
+ Err(e) => {
+ // FIXME deal with error
+ Err(e)
+ }
+ }
+ }
+
+ fn coerce_inner(&mut self, from_ty: Ty, to_ty: &Ty) -> CoerceResult {
+ if from_ty.is_never() {
+ // Subtle: If we are coercing from `!` to `?T`, where `?T` is an unbound
+ // type variable, we want `?T` to fallback to `!` if not
+ // otherwise constrained. An example where this arises:
+ //
+ // let _: Option<?T> = Some({ return; });
+ //
+ // here, we would coerce from `!` to `?T`.
+ if let TyKind::InferenceVar(tv, TyVariableKind::General) = to_ty.kind(Interner) {
+ self.set_diverging(*tv, true);
+ }
+ return success(simple(Adjust::NeverToAny)(to_ty.clone()), to_ty.clone(), vec![]);
+ }
+
+ // Consider coercing the subtype to a DST
+ if let Ok(ret) = self.try_coerce_unsized(&from_ty, to_ty) {
+ return Ok(ret);
+ }
+
+ // Examine the supertype and consider auto-borrowing.
+ match to_ty.kind(Interner) {
+ TyKind::Raw(mt, _) => return self.coerce_ptr(from_ty, to_ty, *mt),
+ TyKind::Ref(mt, _, _) => return self.coerce_ref(from_ty, to_ty, *mt),
+ _ => {}
+ }
+
+ match from_ty.kind(Interner) {
+ TyKind::FnDef(..) => {
+ // Function items are coercible to any closure
+ // type; function pointers are not (that would
+ // require double indirection).
+ // Additionally, we permit coercion of function
+ // items to drop the unsafe qualifier.
+ self.coerce_from_fn_item(from_ty, to_ty)
+ }
+ TyKind::Function(from_fn_ptr) => {
+ // We permit coercion of fn pointers to drop the
+ // unsafe qualifier.
+ self.coerce_from_fn_pointer(from_ty.clone(), from_fn_ptr, to_ty)
+ }
+ TyKind::Closure(_, from_substs) => {
+ // Non-capturing closures are coercible to
+ // function pointers or unsafe function pointers.
+ // It cannot convert closures that require unsafe.
+ self.coerce_closure_to_fn(from_ty.clone(), from_substs, to_ty)
+ }
+ _ => {
+ // Otherwise, just use unification rules.
+ self.unify_and(&from_ty, to_ty, identity)
+ }
+ }
+ }
+
+ /// Unify two types (using sub or lub) and produce a specific coercion.
+ fn unify_and<F>(&mut self, t1: &Ty, t2: &Ty, f: F) -> CoerceResult
+ where
+ F: FnOnce(Ty) -> Vec<Adjustment>,
+ {
+ self.try_unify(t1, t2)
+ .and_then(|InferOk { goals, .. }| success(f(t1.clone()), t1.clone(), goals))
+ }
+
+ fn coerce_ptr(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult {
+ let (is_ref, from_mt, from_inner) = match from_ty.kind(Interner) {
+ TyKind::Ref(mt, _, ty) => (true, mt, ty),
+ TyKind::Raw(mt, ty) => (false, mt, ty),
+ _ => return self.unify_and(&from_ty, to_ty, identity),
+ };
+
+ coerce_mutabilities(*from_mt, to_mt)?;
+
+ // Check that the types which they point at are compatible.
+ let from_raw = TyKind::Raw(to_mt, from_inner.clone()).intern(Interner);
+
+ // Although references and unsafe ptrs have the same
+ // representation, we still register an Adjust::DerefRef so that
+ // regionck knows that the region for `a` must be valid here.
+ if is_ref {
+ self.unify_and(&from_raw, to_ty, |target| {
+ vec![
+ Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
+ Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(to_mt)), target },
+ ]
+ })
+ } else if *from_mt != to_mt {
+ self.unify_and(
+ &from_raw,
+ to_ty,
+ simple(Adjust::Pointer(PointerCast::MutToConstPointer)),
+ )
+ } else {
+ self.unify_and(&from_raw, to_ty, identity)
+ }
+ }
+
+ /// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`.
+ /// To match `A` with `B`, autoderef will be performed,
+ /// calling `deref`/`deref_mut` where necessary.
+ fn coerce_ref(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult {
+ let from_mt = match from_ty.kind(Interner) {
+ &TyKind::Ref(mt, _, _) => {
+ coerce_mutabilities(mt, to_mt)?;
+ mt
+ }
+ _ => return self.unify_and(&from_ty, to_ty, identity),
+ };
+
+ // NOTE: this code is mostly copied and adapted from rustc, and
+ // currently more complicated than necessary, carrying errors around
+ // etc.. This complication will become necessary when we actually track
+ // details of coercion errors though, so I think it's useful to leave
+ // the structure like it is.
+
+ let snapshot = self.snapshot();
+
+ let mut autoderef = Autoderef::new(self, from_ty.clone());
+ let mut first_error = None;
+ let mut found = None;
+
+ while let Some((referent_ty, autoderefs)) = autoderef.next() {
+ if autoderefs == 0 {
+ // Don't let this pass, otherwise it would cause
+ // &T to autoref to &&T.
+ continue;
+ }
+
+ // At this point, we have deref'd `a` to `referent_ty`. So
+ // imagine we are coercing from `&'a mut Vec<T>` to `&'b mut [T]`.
+ // In the autoderef loop for `&'a mut Vec<T>`, we would get
+ // three callbacks:
+ //
+ // - `&'a mut Vec<T>` -- 0 derefs, just ignore it
+ // - `Vec<T>` -- 1 deref
+ // - `[T]` -- 2 deref
+ //
+ // At each point after the first callback, we want to
+ // check to see whether this would match out target type
+ // (`&'b mut [T]`) if we autoref'd it. We can't just
+ // compare the referent types, though, because we still
+ // have to consider the mutability. E.g., in the case
+ // we've been considering, we have an `&mut` reference, so
+ // the `T` in `[T]` needs to be unified with equality.
+ //
+ // Therefore, we construct reference types reflecting what
+ // the types will be after we do the final auto-ref and
+ // compare those. Note that this means we use the target
+ // mutability [1], since it may be that we are coercing
+ // from `&mut T` to `&U`.
+ let lt = static_lifetime(); // FIXME: handle lifetimes correctly, see rustc
+ let derefd_from_ty = TyKind::Ref(to_mt, lt, referent_ty).intern(Interner);
+ match autoderef.table.try_unify(&derefd_from_ty, to_ty) {
+ Ok(result) => {
+ found = Some(result.map(|()| derefd_from_ty));
+ break;
+ }
+ Err(err) => {
+ if first_error.is_none() {
+ first_error = Some(err);
+ }
+ }
+ }
+ }
+
+ // Extract type or return an error. We return the first error
+ // we got, which should be from relating the "base" type
+ // (e.g., in example above, the failure from relating `Vec<T>`
+ // to the target type), since that should be the least
+ // confusing.
+ let InferOk { value: ty, goals } = match found {
+ Some(d) => d,
+ None => {
+ self.rollback_to(snapshot);
+ let err = first_error.expect("coerce_borrowed_pointer had no error");
+ return Err(err);
+ }
+ };
+ if ty == from_ty && from_mt == Mutability::Not && autoderef.step_count() == 1 {
+ // As a special case, if we would produce `&'a *x`, that's
+ // a total no-op. We end up with the type `&'a T` just as
+ // we started with. In that case, just skip it
+ // altogether. This is just an optimization.
+ //
+ // Note that for `&mut`, we DO want to reborrow --
+ // otherwise, this would be a move, which might be an
+ // error. For example `foo(self.x)` where `self` and
+ // `self.x` both have `&mut `type would be a move of
+ // `self.x`, but we auto-coerce it to `foo(&mut *self.x)`,
+ // which is a borrow.
+ always!(to_mt == Mutability::Not); // can only coerce &T -> &U
+ return success(vec![], ty, goals);
+ }
+
+ let mut adjustments = auto_deref_adjust_steps(&autoderef);
+ adjustments
+ .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)), target: ty.clone() });
+
+ success(adjustments, ty, goals)
+ }
+
+ /// Attempts to coerce from the type of a Rust function item into a function pointer.
+ fn coerce_from_fn_item(&mut self, from_ty: Ty, to_ty: &Ty) -> CoerceResult {
+ match to_ty.kind(Interner) {
+ TyKind::Function(_) => {
+ let from_sig = from_ty.callable_sig(self.db).expect("FnDef had no sig");
+
+ // FIXME check ABI: Intrinsics are not coercible to function pointers
+ // FIXME Safe `#[target_feature]` functions are not assignable to safe fn pointers (RFC 2396)
+
+ // FIXME rustc normalizes assoc types in the sig here, not sure if necessary
+
+ let from_sig = from_sig.to_fn_ptr();
+ let from_fn_pointer = TyKind::Function(from_sig.clone()).intern(Interner);
+ let ok = self.coerce_from_safe_fn(
+ from_fn_pointer.clone(),
+ &from_sig,
+ to_ty,
+ |unsafe_ty| {
+ vec![
+ Adjustment {
+ kind: Adjust::Pointer(PointerCast::ReifyFnPointer),
+ target: from_fn_pointer,
+ },
+ Adjustment {
+ kind: Adjust::Pointer(PointerCast::UnsafeFnPointer),
+ target: unsafe_ty,
+ },
+ ]
+ },
+ simple(Adjust::Pointer(PointerCast::ReifyFnPointer)),
+ )?;
+
+ Ok(ok)
+ }
+ _ => self.unify_and(&from_ty, to_ty, identity),
+ }
+ }
+
+ fn coerce_from_fn_pointer(
+ &mut self,
+ from_ty: Ty,
+ from_f: &FnPointer,
+ to_ty: &Ty,
+ ) -> CoerceResult {
+ self.coerce_from_safe_fn(
+ from_ty,
+ from_f,
+ to_ty,
+ simple(Adjust::Pointer(PointerCast::UnsafeFnPointer)),
+ identity,
+ )
+ }
+
+ fn coerce_from_safe_fn<F, G>(
+ &mut self,
+ from_ty: Ty,
+ from_fn_ptr: &FnPointer,
+ to_ty: &Ty,
+ to_unsafe: F,
+ normal: G,
+ ) -> CoerceResult
+ where
+ F: FnOnce(Ty) -> Vec<Adjustment>,
+ G: FnOnce(Ty) -> Vec<Adjustment>,
+ {
+ if let TyKind::Function(to_fn_ptr) = to_ty.kind(Interner) {
+ if let (chalk_ir::Safety::Safe, chalk_ir::Safety::Unsafe) =
+ (from_fn_ptr.sig.safety, to_fn_ptr.sig.safety)
+ {
+ let from_unsafe =
+ TyKind::Function(safe_to_unsafe_fn_ty(from_fn_ptr.clone())).intern(Interner);
+ return self.unify_and(&from_unsafe, to_ty, to_unsafe);
+ }
+ }
+ self.unify_and(&from_ty, to_ty, normal)
+ }
+
+ /// Attempts to coerce from the type of a non-capturing closure into a
+ /// function pointer.
+ fn coerce_closure_to_fn(
+ &mut self,
+ from_ty: Ty,
+ from_substs: &Substitution,
+ to_ty: &Ty,
+ ) -> CoerceResult {
+ match to_ty.kind(Interner) {
+ // if from_substs is non-capturing (FIXME)
+ TyKind::Function(fn_ty) => {
+ // We coerce the closure, which has fn type
+ // `extern "rust-call" fn((arg0,arg1,...)) -> _`
+ // to
+ // `fn(arg0,arg1,...) -> _`
+ // or
+ // `unsafe fn(arg0,arg1,...) -> _`
+ let safety = fn_ty.sig.safety;
+ let pointer_ty = coerce_closure_fn_ty(from_substs, safety);
+ self.unify_and(
+ &pointer_ty,
+ to_ty,
+ simple(Adjust::Pointer(PointerCast::ClosureFnPointer(safety))),
+ )
+ }
+ _ => self.unify_and(&from_ty, to_ty, identity),
+ }
+ }
+
+ /// Coerce a type using `from_ty: CoerceUnsized<ty_ty>`
+ ///
+ /// See: <https://doc.rust-lang.org/nightly/std/marker/trait.CoerceUnsized.html>
+ fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> CoerceResult {
+ // These 'if' statements require some explanation.
+ // The `CoerceUnsized` trait is special - it is only
+ // possible to write `impl CoerceUnsized<B> for A` where
+ // A and B have 'matching' fields. This rules out the following
+ // two types of blanket impls:
+ //
+ // `impl<T> CoerceUnsized<T> for SomeType`
+ // `impl<T> CoerceUnsized<SomeType> for T`
+ //
+ // Both of these trigger a special `CoerceUnsized`-related error (E0376)
+ //
+ // We can take advantage of this fact to avoid performing unnecessary work.
+ // If either `source` or `target` is a type variable, then any applicable impl
+ // would need to be generic over the self-type (`impl<T> CoerceUnsized<SomeType> for T`)
+ // or generic over the `CoerceUnsized` type parameter (`impl<T> CoerceUnsized<T> for
+ // SomeType`).
+ //
+ // However, these are exactly the kinds of impls which are forbidden by
+ // the compiler! Therefore, we can be sure that coercion will always fail
+ // when either the source or target type is a type variable. This allows us
+ // to skip performing any trait selection, and immediately bail out.
+ if from_ty.is_ty_var() {
+ return Err(TypeError);
+ }
+ if to_ty.is_ty_var() {
+ return Err(TypeError);
+ }
+
+ // Handle reborrows before trying to solve `Source: CoerceUnsized<Target>`.
+ let reborrow = match (from_ty.kind(Interner), to_ty.kind(Interner)) {
+ (TyKind::Ref(from_mt, _, from_inner), &TyKind::Ref(to_mt, _, _)) => {
+ coerce_mutabilities(*from_mt, to_mt)?;
+
+ let lt = static_lifetime();
+ Some((
+ Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
+ Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)),
+ target: TyKind::Ref(to_mt, lt, from_inner.clone()).intern(Interner),
+ },
+ ))
+ }
+ (TyKind::Ref(from_mt, _, from_inner), &TyKind::Raw(to_mt, _)) => {
+ coerce_mutabilities(*from_mt, to_mt)?;
+
+ Some((
+ Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
+ Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::RawPtr(to_mt)),
+ target: TyKind::Raw(to_mt, from_inner.clone()).intern(Interner),
+ },
+ ))
+ }
+ _ => None,
+ };
+ let coerce_from =
+ reborrow.as_ref().map_or_else(|| from_ty.clone(), |(_, adj)| adj.target.clone());
+
+ let krate = self.trait_env.krate;
+ let coerce_unsized_trait =
+ match self.db.lang_item(krate, SmolStr::new_inline("coerce_unsized")) {
+ Some(LangItemTarget::TraitId(trait_)) => trait_,
+ _ => return Err(TypeError),
+ };
+
+ let coerce_unsized_tref = {
+ let b = TyBuilder::trait_ref(self.db, coerce_unsized_trait);
+ if b.remaining() != 2 {
+ // The CoerceUnsized trait should have two generic params: Self and T.
+ return Err(TypeError);
+ }
+ b.push(coerce_from).push(to_ty.clone()).build()
+ };
+
+ let goal: InEnvironment<DomainGoal> =
+ InEnvironment::new(&self.trait_env.env, coerce_unsized_tref.cast(Interner));
+
+ let canonicalized = self.canonicalize(goal);
+
+ // FIXME: rustc's coerce_unsized is more specialized -- it only tries to
+ // solve `CoerceUnsized` and `Unsize` goals at this point and leaves the
+ // rest for later. Also, there's some logic about sized type variables.
+ // Need to find out in what cases this is necessary
+ let solution = self
+ .db
+ .trait_solve(krate, canonicalized.value.clone().cast(Interner))
+ .ok_or(TypeError)?;
+
+ match solution {
+ Solution::Unique(v) => {
+ canonicalized.apply_solution(
+ self,
+ Canonical {
+ binders: v.binders,
+ // FIXME handle constraints
+ value: v.value.subst,
+ },
+ );
+ }
+ Solution::Ambig(Guidance::Definite(subst)) => {
+ // FIXME need to record an obligation here
+ canonicalized.apply_solution(self, subst)
+ }
+ // FIXME actually we maybe should also accept unknown guidance here
+ _ => return Err(TypeError),
+ };
+ let unsize =
+ Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target: to_ty.clone() };
+ let adjustments = match reborrow {
+ None => vec![unsize],
+ Some((deref, autoref)) => vec![deref, autoref, unsize],
+ };
+ success(adjustments, to_ty.clone(), vec![])
+ }
+}
+
+fn coerce_closure_fn_ty(closure_substs: &Substitution, safety: chalk_ir::Safety) -> Ty {
+ let closure_sig = closure_substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ match closure_sig.kind(Interner) {
+ TyKind::Function(fn_ty) => TyKind::Function(FnPointer {
+ num_binders: fn_ty.num_binders,
+ sig: FnSig { safety, ..fn_ty.sig },
+ substitution: fn_ty.substitution.clone(),
+ })
+ .intern(Interner),
+ _ => TyKind::Error.intern(Interner),
+ }
+}
+
+fn safe_to_unsafe_fn_ty(fn_ty: FnPointer) -> FnPointer {
+ FnPointer {
+ num_binders: fn_ty.num_binders,
+ sig: FnSig { safety: chalk_ir::Safety::Unsafe, ..fn_ty.sig },
+ substitution: fn_ty.substitution,
+ }
+}
+
+fn coerce_mutabilities(from: Mutability, to: Mutability) -> Result<(), TypeError> {
+ match (from, to) {
+ (Mutability::Mut, Mutability::Mut | Mutability::Not)
+ | (Mutability::Not, Mutability::Not) => Ok(()),
+ (Mutability::Not, Mutability::Mut) => Err(TypeError),
+ }
+}
+
+pub(super) fn auto_deref_adjust_steps(autoderef: &Autoderef<'_, '_>) -> Vec<Adjustment> {
+ let steps = autoderef.steps();
+ let targets =
+ steps.iter().skip(1).map(|(_, ty)| ty.clone()).chain(iter::once(autoderef.final_ty()));
+ steps
+ .iter()
+ .map(|(kind, _source)| match kind {
+ // We do not know what kind of deref we require at this point yet
+ AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ AutoderefKind::Builtin => None,
+ })
+ .zip(targets)
+ .map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target })
+ .collect()
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
new file mode 100644
index 000000000..d164e64a8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -0,0 +1,1527 @@
+//! Type inference for expressions.
+
+use std::{
+ collections::hash_map::Entry,
+ iter::{repeat, repeat_with},
+ mem,
+};
+
+use chalk_ir::{
+ cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind,
+};
+use hir_def::{
+ expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Ordering, Statement, UnaryOp},
+ generics::TypeOrConstParamData,
+ path::{GenericArg, GenericArgs},
+ resolver::resolver_for_expr,
+ ConstParamId, FieldId, FunctionId, ItemContainerId, Lookup,
+};
+use hir_expand::name::{name, Name};
+use stdx::always;
+use syntax::ast::RangeOp;
+
+use crate::{
+ autoderef::{self, Autoderef},
+ consteval,
+ infer::coerce::CoerceMany,
+ lower::{
+ const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
+ },
+ mapping::{from_chalk, ToChalk},
+ method_resolution::{self, VisibleFromModule},
+ primitive::{self, UintTy},
+ static_lifetime, to_chalk_trait_id,
+ utils::{generics, Generics},
+ AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar,
+ Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+};
+
+use super::{
+ coerce::auto_deref_adjust_steps, find_breakable, BindingMode, BreakableContext, Diverges,
+ Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
+};
+
+impl<'a> InferenceContext<'a> {
+ pub(crate) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(tgt_expr, expected);
+ if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
+ let could_unify = self.unify(&ty, &expected_ty);
+ if !could_unify {
+ self.result.type_mismatches.insert(
+ tgt_expr.into(),
+ TypeMismatch { expected: expected_ty, actual: ty.clone() },
+ );
+ }
+ }
+ ty
+ }
+
+ /// Infer type of expression with possibly implicit coerce to the expected type.
+ /// Return the type after possible coercion.
+ pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(expr, expected);
+ if let Some(target) = expected.only_has_type(&mut self.table) {
+ match self.coerce(Some(expr), &ty, &target) {
+ Ok(res) => res,
+ Err(_) => {
+ self.result.type_mismatches.insert(
+ expr.into(),
+ TypeMismatch { expected: target.clone(), actual: ty.clone() },
+ );
+ target
+ }
+ }
+ } else {
+ ty
+ }
+ }
+
+ fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
+ self.db.unwind_if_cancelled();
+
+ let ty = match &self.body[tgt_expr] {
+ Expr::Missing => self.err_ty(),
+ &Expr::If { condition, then_branch, else_branch } => {
+ self.infer_expr(
+ condition,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+
+ let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut both_arms_diverge = Diverges::Always;
+
+ let result_ty = self.table.new_type_var();
+ let then_ty = self.infer_expr_inner(then_branch, expected);
+ both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut coerce = CoerceMany::new(result_ty);
+ coerce.coerce(self, Some(then_branch), &then_ty);
+ let else_ty = match else_branch {
+ Some(else_branch) => self.infer_expr_inner(else_branch, expected),
+ None => TyBuilder::unit(),
+ };
+ both_arms_diverge &= self.diverges;
+ // FIXME: create a synthetic `else {}` so we have something to refer to here instead of None?
+ coerce.coerce(self, else_branch, &else_ty);
+
+ self.diverges = condition_diverges | both_arms_diverge;
+
+ coerce.complete()
+ }
+ &Expr::Let { pat, expr } => {
+ let input_ty = self.infer_expr(expr, &Expectation::none());
+ self.infer_pat(pat, &input_ty, BindingMode::default());
+ TyKind::Scalar(Scalar::Bool).intern(Interner)
+ }
+ Expr::Block { statements, tail, label, id: _ } => {
+ let old_resolver = mem::replace(
+ &mut self.resolver,
+ resolver_for_expr(self.db.upcast(), self.owner, tgt_expr),
+ );
+ let ty = match label {
+ Some(_) => {
+ let break_ty = self.table.new_type_var();
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(break_ty.clone()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ let ty = self.infer_block(
+ tgt_expr,
+ statements,
+ *tail,
+ &Expectation::has_type(break_ty),
+ );
+ let ctxt = self.breakables.pop().expect("breakable stack broken");
+ if ctxt.may_break {
+ ctxt.coerce.complete()
+ } else {
+ ty
+ }
+ }
+ None => self.infer_block(tgt_expr, statements, *tail, expected),
+ };
+ self.resolver = old_resolver;
+ ty
+ }
+ Expr::Unsafe { body } | Expr::Const { body } => self.infer_expr(*body, expected),
+ Expr::TryBlock { body } => {
+ let _inner = self.infer_expr(*body, expected);
+ // FIXME should be std::result::Result<{inner}, _>
+ self.err_ty()
+ }
+ Expr::Async { body } => {
+ let ret_ty = self.table.new_type_var();
+ let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+
+ let inner_ty = self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+
+ self.diverges = prev_diverges;
+ self.return_ty = prev_ret_ty;
+
+ // Use the first type parameter as the output type of future.
+ // existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
+ let impl_trait_id = crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body);
+ let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
+ TyKind::OpaqueType(opaque_ty_id, Substitution::from1(Interner, inner_ty))
+ .intern(Interner)
+ }
+ Expr::Loop { body, label } => {
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.table.new_type_var()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+
+ let ctxt = self.breakables.pop().expect("breakable stack broken");
+
+ if ctxt.may_break {
+ self.diverges = Diverges::Maybe;
+ ctxt.coerce.complete()
+ } else {
+ TyKind::Never.intern(Interner)
+ }
+ }
+ Expr::While { condition, body, label } => {
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.err_ty()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ self.infer_expr(
+ *condition,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+ let _ctxt = self.breakables.pop().expect("breakable stack broken");
+ // the body may not run, so it diverging doesn't mean we diverge
+ self.diverges = Diverges::Maybe;
+ TyBuilder::unit()
+ }
+ Expr::For { iterable, body, pat, label } => {
+ let iterable_ty = self.infer_expr(*iterable, &Expectation::none());
+
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.err_ty()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ let pat_ty =
+ self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
+
+ self.infer_pat(*pat, &pat_ty, BindingMode::default());
+
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+ let _ctxt = self.breakables.pop().expect("breakable stack broken");
+ // the body may not run, so it diverging doesn't mean we diverge
+ self.diverges = Diverges::Maybe;
+ TyBuilder::unit()
+ }
+ Expr::Closure { body, args, ret_type, arg_types } => {
+ assert_eq!(args.len(), arg_types.len());
+
+ let mut sig_tys = Vec::new();
+
+ // collect explicitly written argument types
+ for arg_type in arg_types.iter() {
+ let arg_ty = match arg_type {
+ Some(type_ref) => self.make_ty(type_ref),
+ None => self.table.new_type_var(),
+ };
+ sig_tys.push(arg_ty);
+ }
+
+ // add return type
+ let ret_ty = match ret_type {
+ Some(type_ref) => self.make_ty(type_ref),
+ None => self.table.new_type_var(),
+ };
+ sig_tys.push(ret_ty.clone());
+ let sig_ty = TyKind::Function(FnPointer {
+ num_binders: 0,
+ sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
+ substitution: FnSubst(
+ Substitution::from_iter(Interner, sig_tys.clone()).shifted_in(Interner),
+ ),
+ })
+ .intern(Interner);
+ let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
+ let closure_ty =
+ TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone()))
+ .intern(Interner);
+
+ // Eagerly try to relate the closure type with the expected
+ // type, otherwise we often won't have enough information to
+ // infer the body.
+ self.deduce_closure_type_from_expectations(
+ tgt_expr,
+ &closure_ty,
+ &sig_ty,
+ expected,
+ );
+
+ // Now go through the argument patterns
+ for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
+ self.infer_pat(*arg_pat, &arg_ty, BindingMode::default());
+ }
+
+ let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+
+ self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+
+ self.diverges = prev_diverges;
+ self.return_ty = prev_ret_ty;
+
+ closure_ty
+ }
+ Expr::Call { callee, args, .. } => {
+ let callee_ty = self.infer_expr(*callee, &Expectation::none());
+ let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone());
+ let mut res = None;
+ let mut derefed_callee = callee_ty.clone();
+ // manual loop to be able to access `derefs.table`
+ while let Some((callee_deref_ty, _)) = derefs.next() {
+ res = derefs.table.callable_sig(&callee_deref_ty, args.len());
+ if res.is_some() {
+ derefed_callee = callee_deref_ty;
+ break;
+ }
+ }
+ // if the function is unresolved, we use is_varargs=true to
+ // suppress the arg count diagnostic here
+ let is_varargs =
+ derefed_callee.callable_sig(self.db).map_or(false, |sig| sig.is_varargs)
+ || res.is_none();
+ let (param_tys, ret_ty) = match res {
+ Some(res) => {
+ let adjustments = auto_deref_adjust_steps(&derefs);
+ self.write_expr_adj(*callee, adjustments);
+ res
+ }
+ None => (Vec::new(), self.err_ty()), // FIXME diagnostic
+ };
+ let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
+ self.register_obligations_for_call(&callee_ty);
+
+ let expected_inputs = self.expected_inputs_for_expected_output(
+ expected,
+ ret_ty.clone(),
+ param_tys.clone(),
+ );
+
+ self.check_call_arguments(
+ tgt_expr,
+ args,
+ &expected_inputs,
+ &param_tys,
+ &indices_to_skip,
+ is_varargs,
+ );
+ self.normalize_associated_types_in(ret_ty)
+ }
+ Expr::MethodCall { receiver, args, method_name, generic_args } => self
+ .infer_method_call(
+ tgt_expr,
+ *receiver,
+ args,
+ method_name,
+ generic_args.as_deref(),
+ expected,
+ ),
+ Expr::Match { expr, arms } => {
+ let input_ty = self.infer_expr(*expr, &Expectation::none());
+
+ let expected = expected.adjust_for_branches(&mut self.table);
+
+ let result_ty = if arms.is_empty() {
+ TyKind::Never.intern(Interner)
+ } else {
+ match &expected {
+ Expectation::HasType(ty) => ty.clone(),
+ _ => self.table.new_type_var(),
+ }
+ };
+ let mut coerce = CoerceMany::new(result_ty);
+
+ let matchee_diverges = self.diverges;
+ let mut all_arms_diverge = Diverges::Always;
+
+ for arm in arms.iter() {
+ self.diverges = Diverges::Maybe;
+ let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
+ if let Some(guard_expr) = arm.guard {
+ self.infer_expr(
+ guard_expr,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+ }
+
+ let arm_ty = self.infer_expr_inner(arm.expr, &expected);
+ all_arms_diverge &= self.diverges;
+ coerce.coerce(self, Some(arm.expr), &arm_ty);
+ }
+
+ self.diverges = matchee_diverges | all_arms_diverge;
+
+ coerce.complete()
+ }
+ Expr::Path(p) => {
+ // FIXME this could be more efficient...
+ let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
+ self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or_else(|| self.err_ty())
+ }
+ Expr::Continue { .. } => TyKind::Never.intern(Interner),
+ Expr::Break { expr, label } => {
+ let mut coerce = match find_breakable(&mut self.breakables, label.as_ref()) {
+ Some(ctxt) => {
+ // avoiding the borrowck
+ mem::replace(
+ &mut ctxt.coerce,
+ CoerceMany::new(self.result.standard_types.unknown.clone()),
+ )
+ }
+ None => CoerceMany::new(self.result.standard_types.unknown.clone()),
+ };
+
+ let val_ty = if let Some(expr) = *expr {
+ self.infer_expr(expr, &Expectation::none())
+ } else {
+ TyBuilder::unit()
+ };
+
+ // FIXME: create a synthetic `()` during lowering so we have something to refer to here?
+ coerce.coerce(self, *expr, &val_ty);
+
+ if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
+ ctxt.coerce = coerce;
+ ctxt.may_break = true;
+ } else {
+ self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
+ expr: tgt_expr,
+ });
+ };
+
+ TyKind::Never.intern(Interner)
+ }
+ Expr::Return { expr } => {
+ if let Some(expr) = expr {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
+ } else {
+ let unit = TyBuilder::unit();
+ let _ = self.coerce(Some(tgt_expr), &unit, &self.return_ty.clone());
+ }
+ TyKind::Never.intern(Interner)
+ }
+ Expr::Yield { expr } => {
+ // FIXME: track yield type for coercion
+ if let Some(expr) = expr {
+ self.infer_expr(*expr, &Expectation::none());
+ }
+ TyKind::Never.intern(Interner)
+ }
+ Expr::RecordLit { path, fields, spread, .. } => {
+ let (ty, def_id) = self.resolve_variant(path.as_deref(), false);
+ if let Some(variant) = def_id {
+ self.write_variant_resolution(tgt_expr.into(), variant);
+ }
+
+ if let Some(t) = expected.only_has_type(&mut self.table) {
+ self.unify(&ty, &t);
+ }
+
+ let substs = ty
+ .as_adt()
+ .map(|(_, s)| s.clone())
+ .unwrap_or_else(|| Substitution::empty(Interner));
+ let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let variant_data = def_id.map(|it| it.variant_data(self.db.upcast()));
+ for field in fields.iter() {
+ let field_def =
+ variant_data.as_ref().and_then(|it| match it.field(&field.name) {
+ Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }),
+ None => {
+ self.push_diagnostic(InferenceDiagnostic::NoSuchField {
+ expr: field.expr,
+ });
+ None
+ }
+ });
+ let field_ty = field_def.map_or(self.err_ty(), |it| {
+ field_types[it.local_id].clone().substitute(Interner, &substs)
+ });
+ self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
+ }
+ if let Some(expr) = spread {
+ self.infer_expr(*expr, &Expectation::has_type(ty.clone()));
+ }
+ ty
+ }
+ Expr::Field { expr, name } => {
+ let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
+
+ let mut autoderef = Autoderef::new(&mut self.table, receiver_ty);
+ let ty = autoderef.by_ref().find_map(|(derefed_ty, _)| {
+ let (field_id, parameters) = match derefed_ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ return name.as_tuple_index().and_then(|idx| {
+ substs
+ .as_slice(Interner)
+ .get(idx)
+ .map(|a| a.assert_ty_ref(Interner))
+ .cloned()
+ });
+ }
+ TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
+ let local_id = self.db.struct_data(*s).variant_data.field(name)?;
+ let field = FieldId { parent: (*s).into(), local_id };
+ (field, parameters.clone())
+ }
+ TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
+ let local_id = self.db.union_data(*u).variant_data.field(name)?;
+ let field = FieldId { parent: (*u).into(), local_id };
+ (field, parameters.clone())
+ }
+ _ => return None,
+ };
+ let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
+ .is_visible_from(self.db.upcast(), self.resolver.module());
+ if !is_visible {
+ // Write down the first field resolution even if it is not visible
+ // This aids IDE features for private fields like goto def and in
+ // case of autoderef finding an applicable field, this will be
+ // overwritten in a following cycle
+ if let Entry::Vacant(entry) = self.result.field_resolutions.entry(tgt_expr)
+ {
+ entry.insert(field_id);
+ }
+ return None;
+ }
+ // can't have `write_field_resolution` here because `self.table` is borrowed :(
+ self.result.field_resolutions.insert(tgt_expr, field_id);
+ let ty = self.db.field_types(field_id.parent)[field_id.local_id]
+ .clone()
+ .substitute(Interner, &parameters);
+ Some(ty)
+ });
+ let ty = match ty {
+ Some(ty) => {
+ let adjustments = auto_deref_adjust_steps(&autoderef);
+ self.write_expr_adj(*expr, adjustments);
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+ ty
+ }
+ _ => self.err_ty(),
+ };
+ ty
+ }
+ Expr::Await { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
+ }
+ Expr::Try { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
+ }
+ Expr::Cast { expr, type_ref } => {
+ // FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary)
+ let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ let cast_ty = self.make_ty(type_ref);
+ // FIXME check the cast...
+ cast_ty
+ }
+ Expr::Ref { expr, rawness, mutability } => {
+ let mutability = lower_to_chalk_mutability(*mutability);
+ let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) = expected
+ .only_has_type(&mut self.table)
+ .as_ref()
+ .and_then(|t| t.as_reference_or_ptr())
+ {
+ if exp_mutability == Mutability::Mut && mutability == Mutability::Not {
+ // FIXME: record type error - expected mut reference but found shared ref,
+ // which cannot be coerced
+ }
+ if exp_rawness == Rawness::Ref && *rawness == Rawness::RawPtr {
+ // FIXME: record type error - expected reference but found ptr,
+ // which cannot be coerced
+ }
+ Expectation::rvalue_hint(&mut self.table, Ty::clone(exp_inner))
+ } else {
+ Expectation::none()
+ };
+ let inner_ty = self.infer_expr_inner(*expr, &expectation);
+ match rawness {
+ Rawness::RawPtr => TyKind::Raw(mutability, inner_ty),
+ Rawness::Ref => TyKind::Ref(mutability, static_lifetime(), inner_ty),
+ }
+ .intern(Interner)
+ }
+ &Expr::Box { expr } => self.infer_expr_box(expr, expected),
+ Expr::UnaryOp { expr, op } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ let inner_ty = self.resolve_ty_shallow(&inner_ty);
+ match op {
+ UnaryOp::Deref => {
+ autoderef::deref(&mut self.table, inner_ty).unwrap_or_else(|| self.err_ty())
+ }
+ UnaryOp::Neg => {
+ match inner_ty.kind(Interner) {
+ // Fast path for builtins
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_))
+ | TyKind::InferenceVar(
+ _,
+ TyVariableKind::Integer | TyVariableKind::Float,
+ ) => inner_ty,
+ // Otherwise we resolve via the std::ops::Neg trait
+ _ => self
+ .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
+ }
+ }
+ UnaryOp::Not => {
+ match inner_ty.kind(Interner) {
+ // Fast path for builtins
+ TyKind::Scalar(Scalar::Bool | Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer) => inner_ty,
+ // Otherwise we resolve via the std::ops::Not trait
+ _ => self
+ .resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
+ }
+ }
+ }
+ }
+ Expr::BinaryOp { lhs, rhs, op } => match op {
+ Some(BinaryOp::Assignment { op: None }) => {
+ let lhs = *lhs;
+ let is_ordinary = match &self.body[lhs] {
+ Expr::Array(_)
+ | Expr::RecordLit { .. }
+ | Expr::Tuple { .. }
+ | Expr::Underscore => false,
+ Expr::Call { callee, .. } => !matches!(&self.body[*callee], Expr::Path(_)),
+ _ => true,
+ };
+
+ // In ordinary (non-destructuring) assignments, the type of
+ // `lhs` must be inferred first so that the ADT fields
+ // instantiations in RHS can be coerced to it. Note that this
+ // cannot happen in destructuring assignments because of how
+ // they are desugared.
+ if is_ordinary {
+ let lhs_ty = self.infer_expr(lhs, &Expectation::none());
+ self.infer_expr_coerce(*rhs, &Expectation::has_type(lhs_ty));
+ } else {
+ let rhs_ty = self.infer_expr(*rhs, &Expectation::none());
+ self.infer_assignee_expr(lhs, &rhs_ty);
+ }
+ self.result.standard_types.unit.clone()
+ }
+ Some(BinaryOp::LogicOp(_)) => {
+ let bool_ty = self.result.standard_types.bool_.clone();
+ self.infer_expr_coerce(*lhs, &Expectation::HasType(bool_ty.clone()));
+ let lhs_diverges = self.diverges;
+ self.infer_expr_coerce(*rhs, &Expectation::HasType(bool_ty.clone()));
+ // Depending on the LHS' value, the RHS can never execute.
+ self.diverges = lhs_diverges;
+ bool_ty
+ }
+ Some(op) => self.infer_overloadable_binop(*lhs, *op, *rhs, tgt_expr),
+ _ => self.err_ty(),
+ },
+ Expr::Range { lhs, rhs, range_type } => {
+ let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none()));
+ let rhs_expect = lhs_ty
+ .as_ref()
+ .map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone()));
+ let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect));
+ match (range_type, lhs_ty, rhs_ty) {
+ (RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
+ Some(adt) => TyBuilder::adt(self.db, adt).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, None, Some(ty)) => {
+ match self.resolve_range_to_inclusive() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ }
+ }
+ (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, Some(_), Some(ty)) => {
+ match self.resolve_range_inclusive() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ }
+ }
+ (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, _, None) => self.err_ty(),
+ }
+ }
+ Expr::Index { base, index } => {
+ let base_ty = self.infer_expr_inner(*base, &Expectation::none());
+ let index_ty = self.infer_expr(*index, &Expectation::none());
+
+ if let Some(index_trait) = self.resolve_ops_index() {
+ let canonicalized = self.canonicalize(base_ty.clone());
+ let receiver_adjustments = method_resolution::resolve_indexing_op(
+ self.db,
+ self.trait_env.clone(),
+ canonicalized.value,
+ index_trait,
+ );
+ let (self_ty, adj) = receiver_adjustments
+ .map_or((self.err_ty(), Vec::new()), |adj| {
+ adj.apply(&mut self.table, base_ty)
+ });
+ self.write_expr_adj(*base, adj);
+ self.resolve_associated_type_with_params(
+ self_ty,
+ self.resolve_ops_index_output(),
+ &[GenericArgData::Ty(index_ty).intern(Interner)],
+ )
+ } else {
+ self.err_ty()
+ }
+ }
+ Expr::Tuple { exprs, .. } => {
+ let mut tys = match expected
+ .only_has_type(&mut self.table)
+ .as_ref()
+ .map(|t| t.kind(Interner))
+ {
+ Some(TyKind::Tuple(_, substs)) => substs
+ .iter(Interner)
+ .map(|a| a.assert_ty_ref(Interner).clone())
+ .chain(repeat_with(|| self.table.new_type_var()))
+ .take(exprs.len())
+ .collect::<Vec<_>>(),
+ _ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(),
+ };
+
+ for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone()));
+ }
+
+ TyKind::Tuple(tys.len(), Substitution::from_iter(Interner, tys)).intern(Interner)
+ }
+ Expr::Array(array) => {
+ let elem_ty =
+ match expected.to_option(&mut self.table).as_ref().map(|t| t.kind(Interner)) {
+ Some(TyKind::Array(st, _) | TyKind::Slice(st)) => st.clone(),
+ _ => self.table.new_type_var(),
+ };
+ let mut coerce = CoerceMany::new(elem_ty.clone());
+
+ let expected = Expectation::has_type(elem_ty.clone());
+ let len = match array {
+ Array::ElementList { elements, .. } => {
+ for &expr in elements.iter() {
+ let cur_elem_ty = self.infer_expr_inner(expr, &expected);
+ coerce.coerce(self, Some(expr), &cur_elem_ty);
+ }
+ consteval::usize_const(Some(elements.len() as u128))
+ }
+ &Array::Repeat { initializer, repeat } => {
+ self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty));
+ self.infer_expr(
+ repeat,
+ &Expectation::has_type(
+ TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
+ ),
+ );
+
+ if let Some(g_def) = self.owner.as_generic_def_id() {
+ let generics = generics(self.db.upcast(), g_def);
+ consteval::eval_to_const(
+ repeat,
+ ParamLoweringMode::Placeholder,
+ self,
+ || generics,
+ DebruijnIndex::INNERMOST,
+ )
+ } else {
+ consteval::usize_const(None)
+ }
+ }
+ };
+
+ TyKind::Array(coerce.complete(), len).intern(Interner)
+ }
+ Expr::Literal(lit) => match lit {
+ Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ Literal::String(..) => {
+ TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(Interner))
+ .intern(Interner)
+ }
+ Literal::ByteString(bs) => {
+ let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
+
+ let len = consteval::usize_const(Some(bs.len() as u128));
+
+ let array_type = TyKind::Array(byte_type, len).intern(Interner);
+ TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(Interner)
+ }
+ Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(Interner),
+ Literal::Int(_v, ty) => match ty {
+ Some(int_ty) => {
+ TyKind::Scalar(Scalar::Int(primitive::int_ty_from_builtin(*int_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_integer_var(),
+ },
+ Literal::Uint(_v, ty) => match ty {
+ Some(int_ty) => {
+ TyKind::Scalar(Scalar::Uint(primitive::uint_ty_from_builtin(*int_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_integer_var(),
+ },
+ Literal::Float(_v, ty) => match ty {
+ Some(float_ty) => {
+ TyKind::Scalar(Scalar::Float(primitive::float_ty_from_builtin(*float_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_float_var(),
+ },
+ },
+ Expr::MacroStmts { tail, statements } => {
+ self.infer_block(tgt_expr, statements, *tail, expected)
+ }
+ Expr::Underscore => {
+ // Underscore expressions may only appear in assignee expressions,
+ // which are handled by `infer_assignee_expr()`, so any underscore
+ // expression reaching this branch is an error.
+ self.err_ty()
+ }
+ };
+ // use a new type variable if we got unknown here
+ let ty = self.insert_type_vars_shallow(ty);
+ self.write_expr_ty(tgt_expr, ty.clone());
+ if self.resolve_ty_shallow(&ty).is_never() {
+ // Any expression that produces a value of type `!` must have diverged
+ self.diverges = Diverges::Always;
+ }
+ ty
+ }
+
+ fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty {
+ if let Some(box_id) = self.resolve_boxed_box() {
+ let table = &mut self.table;
+ let inner_exp = expected
+ .to_option(table)
+ .as_ref()
+ .map(|e| e.as_adt())
+ .flatten()
+ .filter(|(e_adt, _)| e_adt == &box_id)
+ .map(|(_, subts)| {
+ let g = subts.at(Interner, 0);
+ Expectation::rvalue_hint(table, Ty::clone(g.assert_ty_ref(Interner)))
+ })
+ .unwrap_or_else(Expectation::none);
+
+ let inner_ty = self.infer_expr_inner(inner_expr, &inner_exp);
+ TyBuilder::adt(self.db, box_id)
+ .push(inner_ty)
+ .fill_with_defaults(self.db, || self.table.new_type_var())
+ .build()
+ } else {
+ self.err_ty()
+ }
+ }
+
+ pub(super) fn infer_assignee_expr(&mut self, lhs: ExprId, rhs_ty: &Ty) -> Ty {
+ let is_rest_expr = |expr| {
+ matches!(
+ &self.body[expr],
+ Expr::Range { lhs: None, rhs: None, range_type: RangeOp::Exclusive },
+ )
+ };
+
+ let rhs_ty = self.resolve_ty_shallow(rhs_ty);
+
+ let ty = match &self.body[lhs] {
+ Expr::Tuple { exprs, .. } => {
+ // We don't consider multiple ellipses. This is analogous to
+ // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
+ let ellipsis = exprs.iter().position(|e| is_rest_expr(*e));
+ let exprs: Vec<_> = exprs.iter().filter(|e| !is_rest_expr(**e)).copied().collect();
+
+ self.infer_tuple_pat_like(&rhs_ty, (), ellipsis, &exprs)
+ }
+ Expr::Call { callee, args, .. } => {
+ // Tuple structs
+ let path = match &self.body[*callee] {
+ Expr::Path(path) => Some(path),
+ _ => None,
+ };
+
+ // We don't consider multiple ellipses. This is analogous to
+ // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
+ let ellipsis = args.iter().position(|e| is_rest_expr(*e));
+ let args: Vec<_> = args.iter().filter(|e| !is_rest_expr(**e)).copied().collect();
+
+ self.infer_tuple_struct_pat_like(path, &rhs_ty, (), lhs, ellipsis, &args)
+ }
+ Expr::Array(Array::ElementList { elements, .. }) => {
+ let elem_ty = match rhs_ty.kind(Interner) {
+ TyKind::Array(st, _) => st.clone(),
+ _ => self.err_ty(),
+ };
+
+ // There's no need to handle `..` as it cannot be bound.
+ let sub_exprs = elements.iter().filter(|e| !is_rest_expr(**e));
+
+ for e in sub_exprs {
+ self.infer_assignee_expr(*e, &elem_ty);
+ }
+
+ match rhs_ty.kind(Interner) {
+ TyKind::Array(_, _) => rhs_ty.clone(),
+ // Even when `rhs_ty` is not an array type, this assignee
+ // expression is inferred to be an array (of unknown element
+ // type and length). This should not be just an error type,
+ // because we are to compute the unifiability of this type and
+ // `rhs_ty` in the end of this function to issue type mismatches.
+ _ => TyKind::Array(self.err_ty(), crate::consteval::usize_const(None))
+ .intern(Interner),
+ }
+ }
+ Expr::RecordLit { path, fields, .. } => {
+ let subs = fields.iter().map(|f| (f.name.clone(), f.expr));
+
+ self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs.into(), subs)
+ }
+ Expr::Underscore => rhs_ty.clone(),
+ _ => {
+ // `lhs` is a place expression, a unit struct, or an enum variant.
+ let lhs_ty = self.infer_expr(lhs, &Expectation::none());
+
+ // This is the only branch where this function may coerce any type.
+ // We are returning early to avoid the unifiability check below.
+ let lhs_ty = self.insert_type_vars_shallow(lhs_ty);
+ let ty = match self.coerce(None, &rhs_ty, &lhs_ty) {
+ Ok(ty) => ty,
+ Err(_) => {
+ self.result.type_mismatches.insert(
+ lhs.into(),
+ TypeMismatch { expected: rhs_ty.clone(), actual: lhs_ty.clone() },
+ );
+ // `rhs_ty` is returned so no further type mismatches are
+ // reported because of this mismatch.
+ rhs_ty
+ }
+ };
+ self.write_expr_ty(lhs, ty.clone());
+ return ty;
+ }
+ };
+
+ let ty = self.insert_type_vars_shallow(ty);
+ if !self.unify(&ty, &rhs_ty) {
+ self.result
+ .type_mismatches
+ .insert(lhs.into(), TypeMismatch { expected: rhs_ty.clone(), actual: ty.clone() });
+ }
+ self.write_expr_ty(lhs, ty.clone());
+ ty
+ }
+
+ fn infer_overloadable_binop(
+ &mut self,
+ lhs: ExprId,
+ op: BinaryOp,
+ rhs: ExprId,
+ tgt_expr: ExprId,
+ ) -> Ty {
+ let lhs_expectation = Expectation::none();
+ let lhs_ty = self.infer_expr(lhs, &lhs_expectation);
+ let rhs_ty = self.table.new_type_var();
+
+ let func = self.resolve_binop_method(op);
+ let func = match func {
+ Some(func) => func,
+ None => {
+ let rhs_ty = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone());
+ let rhs_ty = self.infer_expr_coerce(rhs, &Expectation::from_option(rhs_ty));
+ return self
+ .builtin_binary_op_return_ty(op, lhs_ty, rhs_ty)
+ .unwrap_or_else(|| self.err_ty());
+ }
+ };
+
+ let subst = TyBuilder::subst_for_def(self.db, func)
+ .push(lhs_ty.clone())
+ .push(rhs_ty.clone())
+ .build();
+ self.write_method_resolution(tgt_expr, func, subst.clone());
+
+ let method_ty = self.db.value_ty(func.into()).substitute(Interner, &subst);
+ self.register_obligations_for_call(&method_ty);
+
+ self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty.clone()));
+
+ let ret_ty = match method_ty.callable_sig(self.db) {
+ Some(sig) => sig.ret().clone(),
+ None => self.err_ty(),
+ };
+
+ let ret_ty = self.normalize_associated_types_in(ret_ty);
+
+ // FIXME: record autoref adjustments
+
+ // use knowledge of built-in binary ops, which can sometimes help inference
+ if let Some(builtin_rhs) = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone()) {
+ self.unify(&builtin_rhs, &rhs_ty);
+ }
+ if let Some(builtin_ret) = self.builtin_binary_op_return_ty(op, lhs_ty, rhs_ty) {
+ self.unify(&builtin_ret, &ret_ty);
+ }
+
+ ret_ty
+ }
+
+ fn infer_block(
+ &mut self,
+ expr: ExprId,
+ statements: &[Statement],
+ tail: Option<ExprId>,
+ expected: &Expectation,
+ ) -> Ty {
+ for stmt in statements {
+ match stmt {
+ Statement::Let { pat, type_ref, initializer, else_branch } => {
+ let decl_ty = type_ref
+ .as_ref()
+ .map(|tr| self.make_ty(tr))
+ .unwrap_or_else(|| self.err_ty());
+
+ // Always use the declared type when specified
+ let mut ty = decl_ty.clone();
+
+ if let Some(expr) = initializer {
+ let actual_ty =
+ self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone()));
+ if decl_ty.is_unknown() {
+ ty = actual_ty;
+ }
+ }
+
+ if let Some(expr) = else_branch {
+ self.infer_expr_coerce(
+ *expr,
+ &Expectation::has_type(Ty::new(Interner, TyKind::Never)),
+ );
+ }
+
+ self.infer_pat(*pat, &ty, BindingMode::default());
+ }
+ Statement::Expr { expr, .. } => {
+ self.infer_expr(*expr, &Expectation::none());
+ }
+ }
+ }
+
+ if let Some(expr) = tail {
+ self.infer_expr_coerce(expr, expected)
+ } else {
+ // Citing rustc: if there is no explicit tail expression,
+ // that is typically equivalent to a tail expression
+ // of `()` -- except if the block diverges. In that
+ // case, there is no value supplied from the tail
+ // expression (assuming there are no other breaks,
+ // this implies that the type of the block will be
+ // `!`).
+ if self.diverges.is_always() {
+ // we don't even make an attempt at coercion
+ self.table.new_maybe_never_var()
+ } else {
+ if let Some(t) = expected.only_has_type(&mut self.table) {
+ if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() {
+ self.result.type_mismatches.insert(
+ expr.into(),
+ TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() },
+ );
+ }
+ t
+ } else {
+ TyBuilder::unit()
+ }
+ }
+ }
+ }
+
+ fn infer_method_call(
+ &mut self,
+ tgt_expr: ExprId,
+ receiver: ExprId,
+ args: &[ExprId],
+ method_name: &Name,
+ generic_args: Option<&GenericArgs>,
+ expected: &Expectation,
+ ) -> Ty {
+ let receiver_ty = self.infer_expr(receiver, &Expectation::none());
+ let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
+
+ let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
+
+ let resolved = method_resolution::lookup_method(
+ &canonicalized_receiver.value,
+ self.db,
+ self.trait_env.clone(),
+ &traits_in_scope,
+ VisibleFromModule::Filter(self.resolver.module()),
+ method_name,
+ );
+ let (receiver_ty, method_ty, substs) = match resolved {
+ Some((adjust, func)) => {
+ let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
+ let generics = generics(self.db.upcast(), func.into());
+ let substs = self.substs_for_method_call(generics, generic_args);
+ self.write_expr_adj(receiver, adjustments);
+ self.write_method_resolution(tgt_expr, func, substs.clone());
+ (ty, self.db.value_ty(func.into()), substs)
+ }
+ None => (
+ receiver_ty,
+ Binders::empty(Interner, self.err_ty()),
+ Substitution::empty(Interner),
+ ),
+ };
+ let method_ty = method_ty.substitute(Interner, &substs);
+ self.register_obligations_for_call(&method_ty);
+ let (formal_receiver_ty, param_tys, ret_ty, is_varargs) =
+ match method_ty.callable_sig(self.db) {
+ Some(sig) => {
+ if !sig.params().is_empty() {
+ (
+ sig.params()[0].clone(),
+ sig.params()[1..].to_vec(),
+ sig.ret().clone(),
+ sig.is_varargs,
+ )
+ } else {
+ (self.err_ty(), Vec::new(), sig.ret().clone(), sig.is_varargs)
+ }
+ }
+ None => (self.err_ty(), Vec::new(), self.err_ty(), true),
+ };
+ self.unify(&formal_receiver_ty, &receiver_ty);
+
+ let expected_inputs =
+ self.expected_inputs_for_expected_output(expected, ret_ty.clone(), param_tys.clone());
+
+ self.check_call_arguments(tgt_expr, args, &expected_inputs, &param_tys, &[], is_varargs);
+ self.normalize_associated_types_in(ret_ty)
+ }
+
+ fn expected_inputs_for_expected_output(
+ &mut self,
+ expected_output: &Expectation,
+ output: Ty,
+ inputs: Vec<Ty>,
+ ) -> Vec<Ty> {
+ if let Some(expected_ty) = expected_output.to_option(&mut self.table) {
+ self.table.fudge_inference(|table| {
+ if table.try_unify(&expected_ty, &output).is_ok() {
+ table.resolve_with_fallback(inputs, &|var, kind, _, _| match kind {
+ chalk_ir::VariableKind::Ty(tk) => var.to_ty(Interner, tk).cast(Interner),
+ chalk_ir::VariableKind::Lifetime => {
+ var.to_lifetime(Interner).cast(Interner)
+ }
+ chalk_ir::VariableKind::Const(ty) => {
+ var.to_const(Interner, ty).cast(Interner)
+ }
+ })
+ } else {
+ Vec::new()
+ }
+ })
+ } else {
+ Vec::new()
+ }
+ }
+
+ fn check_call_arguments(
+ &mut self,
+ expr: ExprId,
+ args: &[ExprId],
+ expected_inputs: &[Ty],
+ param_tys: &[Ty],
+ skip_indices: &[u32],
+ is_varargs: bool,
+ ) {
+ if args.len() != param_tys.len() + skip_indices.len() && !is_varargs {
+ self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount {
+ call_expr: expr,
+ expected: param_tys.len() + skip_indices.len(),
+ found: args.len(),
+ });
+ }
+
+ // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
+ // We do this in a pretty awful way: first we type-check any arguments
+ // that are not closures, then we type-check the closures. This is so
+ // that we have more information about the types of arguments when we
+ // type-check the functions. This isn't really the right way to do this.
+ for &check_closures in &[false, true] {
+ let mut skip_indices = skip_indices.into_iter().copied().fuse().peekable();
+ let param_iter = param_tys.iter().cloned().chain(repeat(self.err_ty()));
+ let expected_iter = expected_inputs
+ .iter()
+ .cloned()
+ .chain(param_iter.clone().skip(expected_inputs.len()));
+ for (idx, ((&arg, param_ty), expected_ty)) in
+ args.iter().zip(param_iter).zip(expected_iter).enumerate()
+ {
+ let is_closure = matches!(&self.body[arg], Expr::Closure { .. });
+ if is_closure != check_closures {
+ continue;
+ }
+
+ while skip_indices.peek().map_or(false, |i| *i < idx as u32) {
+ skip_indices.next();
+ }
+ if skip_indices.peek().copied() == Some(idx as u32) {
+ continue;
+ }
+
+ // the difference between param_ty and expected here is that
+ // expected is the parameter when the expected *return* type is
+ // taken into account. So in `let _: &[i32] = identity(&[1, 2])`
+ // the expected type is already `&[i32]`, whereas param_ty is
+ // still an unbound type variable. We don't always want to force
+ // the parameter to coerce to the expected type (for example in
+ // `coerce_unsize_expected_type_4`).
+ let param_ty = self.normalize_associated_types_in(param_ty);
+ let expected = Expectation::rvalue_hint(&mut self.table, expected_ty);
+ // infer with the expected type we have...
+ let ty = self.infer_expr_inner(arg, &expected);
+
+ // then coerce to either the expected type or just the formal parameter type
+ let coercion_target = if let Some(ty) = expected.only_has_type(&mut self.table) {
+ // if we are coercing to the expectation, unify with the
+ // formal parameter type to connect everything
+ self.unify(&ty, &param_ty);
+ ty
+ } else {
+ param_ty
+ };
+ if !coercion_target.is_unknown() {
+ if self.coerce(Some(arg), &ty, &coercion_target).is_err() {
+ self.result.type_mismatches.insert(
+ arg.into(),
+ TypeMismatch { expected: coercion_target, actual: ty.clone() },
+ );
+ }
+ }
+ }
+ }
+ }
+
+ fn substs_for_method_call(
+ &mut self,
+ def_generics: Generics,
+ generic_args: Option<&GenericArgs>,
+ ) -> Substitution {
+ let (parent_params, self_params, type_params, const_params, impl_trait_params) =
+ def_generics.provenance_split();
+ assert_eq!(self_params, 0); // method shouldn't have another Self param
+ let total_len = parent_params + type_params + const_params + impl_trait_params;
+ let mut substs = Vec::with_capacity(total_len);
+ // Parent arguments are unknown
+ for (id, param) in def_generics.iter_parent() {
+ match param {
+ TypeOrConstParamData::TypeParamData(_) => {
+ substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner));
+ }
+ TypeOrConstParamData::ConstParamData(_) => {
+ let ty = self.db.const_param_ty(ConstParamId::from_unchecked(id));
+ substs
+ .push(GenericArgData::Const(self.table.new_const_var(ty)).intern(Interner));
+ }
+ }
+ }
+ // handle provided arguments
+ if let Some(generic_args) = generic_args {
+ // if args are provided, it should be all of them, but we can't rely on that
+ for (arg, kind_id) in generic_args
+ .args
+ .iter()
+ .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
+ .take(type_params + const_params)
+ .zip(def_generics.iter_id().skip(parent_params))
+ {
+ if let Some(g) = generic_arg_to_chalk(
+ self.db,
+ kind_id,
+ arg,
+ self,
+ |this, type_ref| this.make_ty(type_ref),
+ |this, c, ty| {
+ const_or_path_to_chalk(
+ this.db,
+ &this.resolver,
+ ty,
+ c,
+ ParamLoweringMode::Placeholder,
+ || generics(this.db.upcast(), (&this.resolver).generic_def().unwrap()),
+ DebruijnIndex::INNERMOST,
+ )
+ },
+ ) {
+ substs.push(g);
+ }
+ }
+ };
+ for (id, data) in def_generics.iter().skip(substs.len()) {
+ match data {
+ TypeOrConstParamData::TypeParamData(_) => {
+ substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner))
+ }
+ TypeOrConstParamData::ConstParamData(_) => {
+ substs.push(
+ GenericArgData::Const(self.table.new_const_var(
+ self.db.const_param_ty(ConstParamId::from_unchecked(id)),
+ ))
+ .intern(Interner),
+ )
+ }
+ }
+ }
+ assert_eq!(substs.len(), total_len);
+ Substitution::from_iter(Interner, substs)
+ }
+
+ fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
+ let callable_ty = self.resolve_ty_shallow(callable_ty);
+ if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) {
+ let def: CallableDefId = from_chalk(self.db, *fn_def);
+ let generic_predicates = self.db.generic_predicates(def.into());
+ for predicate in generic_predicates.iter() {
+ let (predicate, binders) = predicate
+ .clone()
+ .substitute(Interner, parameters)
+ .into_value_and_skipped_binders();
+ always!(binders.len(Interner) == 0); // quantified where clauses not yet handled
+ self.push_obligation(predicate.cast(Interner));
+ }
+ // add obligation for trait implementation, if this is a trait method
+ match def {
+ CallableDefId::FunctionId(f) => {
+ if let ItemContainerId::TraitId(trait_) = f.lookup(self.db.upcast()).container {
+ // construct a TraitRef
+ let substs = crate::subst_prefix(
+ &*parameters,
+ generics(self.db.upcast(), trait_.into()).len(),
+ );
+ self.push_obligation(
+ TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: substs }
+ .cast(Interner),
+ );
+ }
+ }
+ CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {}
+ }
+ }
+ }
+
+ /// Returns the argument indices to skip.
+ fn check_legacy_const_generics(&mut self, callee: Ty, args: &[ExprId]) -> Box<[u32]> {
+ let (func, subst) = match callee.kind(Interner) {
+ TyKind::FnDef(fn_id, subst) => {
+ let callable = CallableDefId::from_chalk(self.db, *fn_id);
+ let func = match callable {
+ CallableDefId::FunctionId(f) => f,
+ _ => return Default::default(),
+ };
+ (func, subst)
+ }
+ _ => return Default::default(),
+ };
+
+ let data = self.db.function_data(func);
+ if data.legacy_const_generics_indices.is_empty() {
+ return Default::default();
+ }
+
+ // only use legacy const generics if the param count matches with them
+ if data.params.len() + data.legacy_const_generics_indices.len() != args.len() {
+ if args.len() <= data.params.len() {
+ return Default::default();
+ } else {
+ // there are more parameters than there should be without legacy
+ // const params; use them
+ let mut indices = data.legacy_const_generics_indices.clone();
+ indices.sort();
+ return indices;
+ }
+ }
+
+ // check legacy const parameters
+ for (subst_idx, arg_idx) in data.legacy_const_generics_indices.iter().copied().enumerate() {
+ let arg = match subst.at(Interner, subst_idx).constant(Interner) {
+ Some(c) => c,
+ None => continue, // not a const parameter?
+ };
+ if arg_idx >= args.len() as u32 {
+ continue;
+ }
+ let _ty = arg.data(Interner).ty.clone();
+ let expected = Expectation::none(); // FIXME use actual const ty, when that is lowered correctly
+ self.infer_expr(args[arg_idx as usize], &expected);
+ // FIXME: evaluate and unify with the const
+ }
+ let mut indices = data.legacy_const_generics_indices.clone();
+ indices.sort();
+ indices
+ }
+
+ fn builtin_binary_op_return_ty(&mut self, op: BinaryOp, lhs_ty: Ty, rhs_ty: Ty) -> Option<Ty> {
+ let lhs_ty = self.resolve_ty_shallow(&lhs_ty);
+ let rhs_ty = self.resolve_ty_shallow(&rhs_ty);
+ match op {
+ BinaryOp::LogicOp(_) | BinaryOp::CmpOp(_) => {
+ Some(TyKind::Scalar(Scalar::Bool).intern(Interner))
+ }
+ BinaryOp::Assignment { .. } => Some(TyBuilder::unit()),
+ BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => {
+ // all integer combinations are valid here
+ if matches!(
+ lhs_ty.kind(Interner),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer)
+ ) && matches!(
+ rhs_ty.kind(Interner),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer)
+ ) {
+ Some(lhs_ty)
+ } else {
+ None
+ }
+ }
+ BinaryOp::ArithOp(_) => match (lhs_ty.kind(Interner), rhs_ty.kind(Interner)) {
+ // (int, int) | (uint, uint) | (float, float)
+ (TyKind::Scalar(Scalar::Int(_)), TyKind::Scalar(Scalar::Int(_)))
+ | (TyKind::Scalar(Scalar::Uint(_)), TyKind::Scalar(Scalar::Uint(_)))
+ | (TyKind::Scalar(Scalar::Float(_)), TyKind::Scalar(Scalar::Float(_))) => {
+ Some(rhs_ty)
+ }
+ // ({int}, int) | ({int}, uint)
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
+ ) => Some(rhs_ty),
+ // (int, {int}) | (uint, {int})
+ (
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ ) => Some(lhs_ty),
+ // ({float} | float)
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ TyKind::Scalar(Scalar::Float(_)),
+ ) => Some(rhs_ty),
+ // (float, {float})
+ (
+ TyKind::Scalar(Scalar::Float(_)),
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ ) => Some(lhs_ty),
+ // ({int}, {int}) | ({float}, {float})
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ )
+ | (
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ ) => Some(rhs_ty),
+ _ => None,
+ },
+ }
+ }
+
+ fn builtin_binary_op_rhs_expectation(&mut self, op: BinaryOp, lhs_ty: Ty) -> Option<Ty> {
+ Some(match op {
+ BinaryOp::LogicOp(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ BinaryOp::Assignment { op: None } => lhs_ty,
+ BinaryOp::CmpOp(CmpOp::Eq { .. }) => match self
+ .resolve_ty_shallow(&lhs_ty)
+ .kind(Interner)
+ {
+ TyKind::Scalar(_) | TyKind::Str => lhs_ty,
+ TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
+ _ => return None,
+ },
+ BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => return None,
+ BinaryOp::CmpOp(CmpOp::Ord { .. })
+ | BinaryOp::Assignment { op: Some(_) }
+ | BinaryOp::ArithOp(_) => match self.resolve_ty_shallow(&lhs_ty).kind(Interner) {
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_)) => lhs_ty,
+ TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
+ _ => return None,
+ },
+ })
+ }
+
+ fn resolve_binop_method(&self, op: BinaryOp) -> Option<FunctionId> {
+ let (name, lang_item) = match op {
+ BinaryOp::LogicOp(_) => return None,
+ BinaryOp::ArithOp(aop) => match aop {
+ ArithOp::Add => (name!(add), name!(add)),
+ ArithOp::Mul => (name!(mul), name!(mul)),
+ ArithOp::Sub => (name!(sub), name!(sub)),
+ ArithOp::Div => (name!(div), name!(div)),
+ ArithOp::Rem => (name!(rem), name!(rem)),
+ ArithOp::Shl => (name!(shl), name!(shl)),
+ ArithOp::Shr => (name!(shr), name!(shr)),
+ ArithOp::BitXor => (name!(bitxor), name!(bitxor)),
+ ArithOp::BitOr => (name!(bitor), name!(bitor)),
+ ArithOp::BitAnd => (name!(bitand), name!(bitand)),
+ },
+ BinaryOp::Assignment { op: Some(aop) } => match aop {
+ ArithOp::Add => (name!(add_assign), name!(add_assign)),
+ ArithOp::Mul => (name!(mul_assign), name!(mul_assign)),
+ ArithOp::Sub => (name!(sub_assign), name!(sub_assign)),
+ ArithOp::Div => (name!(div_assign), name!(div_assign)),
+ ArithOp::Rem => (name!(rem_assign), name!(rem_assign)),
+ ArithOp::Shl => (name!(shl_assign), name!(shl_assign)),
+ ArithOp::Shr => (name!(shr_assign), name!(shr_assign)),
+ ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)),
+ ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)),
+ ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)),
+ },
+ BinaryOp::CmpOp(cop) => match cop {
+ CmpOp::Eq { negated: false } => (name!(eq), name!(eq)),
+ CmpOp::Eq { negated: true } => (name!(ne), name!(eq)),
+ CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
+ (name!(le), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
+ (name!(lt), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
+ (name!(ge), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
+ (name!(gt), name!(partial_ord))
+ }
+ },
+ BinaryOp::Assignment { op: None } => return None,
+ };
+
+ let trait_ = self.resolve_lang_item(lang_item)?.as_trait()?;
+
+ self.db.trait_data(trait_).method_by_name(&name)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
new file mode 100644
index 000000000..5e7320a5d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
@@ -0,0 +1,354 @@
+//! Type inference for patterns.
+
+use std::iter::repeat_with;
+
+use chalk_ir::Mutability;
+use hir_def::{
+ expr::{BindingAnnotation, Expr, Literal, Pat, PatId},
+ path::Path,
+ type_ref::ConstScalar,
+};
+use hir_expand::name::Name;
+
+use crate::{
+ consteval::intern_const_scalar,
+ infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
+ lower::lower_to_chalk_mutability,
+ static_lifetime, ConcreteConst, ConstValue, Interner, Substitution, Ty, TyBuilder, TyExt,
+ TyKind,
+};
+
+use super::PatLike;
+
+impl<'a> InferenceContext<'a> {
+ /// Infers type for tuple struct pattern or its corresponding assignee expression.
+ ///
+ /// Ellipses found in the original pattern or expression must be filtered out.
+ pub(super) fn infer_tuple_struct_pat_like<T: PatLike>(
+ &mut self,
+ path: Option<&Path>,
+ expected: &Ty,
+ default_bm: T::BindingMode,
+ id: T,
+ ellipsis: Option<usize>,
+ subs: &[T],
+ ) -> Ty {
+ let (ty, def) = self.resolve_variant(path, true);
+ let var_data = def.map(|it| it.variant_data(self.db.upcast()));
+ if let Some(variant) = def {
+ self.write_variant_resolution(id.into(), variant);
+ }
+ self.unify(&ty, expected);
+
+ let substs =
+ ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
+
+ let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let (pre, post) = match ellipsis {
+ Some(idx) => subs.split_at(idx),
+ None => (subs, &[][..]),
+ };
+ let post_idx_offset = field_tys.iter().count().saturating_sub(post.len());
+
+ let pre_iter = pre.iter().enumerate();
+ let post_iter = (post_idx_offset..).zip(post.iter());
+ for (i, &subpat) in pre_iter.chain(post_iter) {
+ let expected_ty = var_data
+ .as_ref()
+ .and_then(|d| d.field(&Name::new_tuple_field(i)))
+ .map_or(self.err_ty(), |field| {
+ field_tys[field].clone().substitute(Interner, &substs)
+ });
+ let expected_ty = self.normalize_associated_types_in(expected_ty);
+ T::infer(self, subpat, &expected_ty, default_bm);
+ }
+
+ ty
+ }
+
+ /// Infers type for record pattern or its corresponding assignee expression.
+ pub(super) fn infer_record_pat_like<T: PatLike>(
+ &mut self,
+ path: Option<&Path>,
+ expected: &Ty,
+ default_bm: T::BindingMode,
+ id: T,
+ subs: impl Iterator<Item = (Name, T)>,
+ ) -> Ty {
+ let (ty, def) = self.resolve_variant(path, false);
+ if let Some(variant) = def {
+ self.write_variant_resolution(id.into(), variant);
+ }
+
+ self.unify(&ty, expected);
+
+ let substs =
+ ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
+
+ let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let var_data = def.map(|it| it.variant_data(self.db.upcast()));
+
+ for (name, inner) in subs {
+ let expected_ty = var_data
+ .as_ref()
+ .and_then(|it| it.field(&name))
+ .map_or(self.err_ty(), |f| field_tys[f].clone().substitute(Interner, &substs));
+ let expected_ty = self.normalize_associated_types_in(expected_ty);
+
+ T::infer(self, inner, &expected_ty, default_bm);
+ }
+
+ ty
+ }
+
+ /// Infers type for tuple pattern or its corresponding assignee expression.
+ ///
+ /// Ellipses found in the original pattern or expression must be filtered out.
+ pub(super) fn infer_tuple_pat_like<T: PatLike>(
+ &mut self,
+ expected: &Ty,
+ default_bm: T::BindingMode,
+ ellipsis: Option<usize>,
+ subs: &[T],
+ ) -> Ty {
+ let expectations = match expected.as_tuple() {
+ Some(parameters) => &*parameters.as_slice(Interner),
+ _ => &[],
+ };
+
+ let ((pre, post), n_uncovered_patterns) = match ellipsis {
+ Some(idx) => (subs.split_at(idx), expectations.len().saturating_sub(subs.len())),
+ None => ((&subs[..], &[][..]), 0),
+ };
+ let mut expectations_iter = expectations
+ .iter()
+ .cloned()
+ .map(|a| a.assert_ty_ref(Interner).clone())
+ .chain(repeat_with(|| self.table.new_type_var()));
+
+ let mut inner_tys = Vec::with_capacity(n_uncovered_patterns + subs.len());
+
+ inner_tys.extend(expectations_iter.by_ref().take(n_uncovered_patterns + subs.len()));
+
+ // Process pre
+ for (ty, pat) in inner_tys.iter_mut().zip(pre) {
+ *ty = T::infer(self, *pat, ty, default_bm);
+ }
+
+ // Process post
+ for (ty, pat) in inner_tys.iter_mut().skip(pre.len() + n_uncovered_patterns).zip(post) {
+ *ty = T::infer(self, *pat, ty, default_bm);
+ }
+
+ TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys))
+ .intern(Interner)
+ }
+
+ pub(super) fn infer_pat(
+ &mut self,
+ pat: PatId,
+ expected: &Ty,
+ mut default_bm: BindingMode,
+ ) -> Ty {
+ let mut expected = self.resolve_ty_shallow(expected);
+
+ if is_non_ref_pat(&self.body, pat) {
+ let mut pat_adjustments = Vec::new();
+ while let Some((inner, _lifetime, mutability)) = expected.as_reference() {
+ pat_adjustments.push(expected.clone());
+ expected = self.resolve_ty_shallow(inner);
+ default_bm = match default_bm {
+ BindingMode::Move => BindingMode::Ref(mutability),
+ BindingMode::Ref(Mutability::Not) => BindingMode::Ref(Mutability::Not),
+ BindingMode::Ref(Mutability::Mut) => BindingMode::Ref(mutability),
+ }
+ }
+
+ if !pat_adjustments.is_empty() {
+ pat_adjustments.shrink_to_fit();
+ self.result.pat_adjustments.insert(pat, pat_adjustments);
+ }
+ } else if let Pat::Ref { .. } = &self.body[pat] {
+ cov_mark::hit!(match_ergonomics_ref);
+ // When you encounter a `&pat` pattern, reset to Move.
+ // This is so that `w` is by value: `let (_, &w) = &(1, &2);`
+ default_bm = BindingMode::Move;
+ }
+
+ // Lose mutability.
+ let default_bm = default_bm;
+ let expected = expected;
+
+ let ty = match &self.body[pat] {
+ Pat::Tuple { args, ellipsis } => {
+ self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args)
+ }
+ Pat::Or(pats) => {
+ if let Some((first_pat, rest)) = pats.split_first() {
+ let ty = self.infer_pat(*first_pat, &expected, default_bm);
+ for pat in rest {
+ self.infer_pat(*pat, &expected, default_bm);
+ }
+ ty
+ } else {
+ self.err_ty()
+ }
+ }
+ Pat::Ref { pat, mutability } => {
+ let mutability = lower_to_chalk_mutability(*mutability);
+ let expectation = match expected.as_reference() {
+ Some((inner_ty, _lifetime, exp_mut)) => {
+ if mutability != exp_mut {
+ // FIXME: emit type error?
+ }
+ inner_ty.clone()
+ }
+ _ => self.result.standard_types.unknown.clone(),
+ };
+ let subty = self.infer_pat(*pat, &expectation, default_bm);
+ TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
+ }
+ Pat::TupleStruct { path: p, args: subpats, ellipsis } => self
+ .infer_tuple_struct_pat_like(
+ p.as_deref(),
+ &expected,
+ default_bm,
+ pat,
+ *ellipsis,
+ subpats,
+ ),
+ Pat::Record { path: p, args: fields, ellipsis: _ } => {
+ let subs = fields.iter().map(|f| (f.name.clone(), f.pat));
+ self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat.into(), subs)
+ }
+ Pat::Path(path) => {
+ // FIXME use correct resolver for the surrounding expression
+ let resolver = self.resolver.clone();
+ self.infer_path(&resolver, path, pat.into()).unwrap_or_else(|| self.err_ty())
+ }
+ Pat::Bind { mode, name: _, subpat } => {
+ let mode = if mode == &BindingAnnotation::Unannotated {
+ default_bm
+ } else {
+ BindingMode::convert(*mode)
+ };
+ self.result.pat_binding_modes.insert(pat, mode);
+
+ let inner_ty = match subpat {
+ Some(subpat) => self.infer_pat(*subpat, &expected, default_bm),
+ None => expected,
+ };
+ let inner_ty = self.insert_type_vars_shallow(inner_ty);
+
+ let bound_ty = match mode {
+ BindingMode::Ref(mutability) => {
+ TyKind::Ref(mutability, static_lifetime(), inner_ty.clone())
+ .intern(Interner)
+ }
+ BindingMode::Move => inner_ty.clone(),
+ };
+ self.write_pat_ty(pat, bound_ty);
+ return inner_ty;
+ }
+ Pat::Slice { prefix, slice, suffix } => {
+ let elem_ty = match expected.kind(Interner) {
+ TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(),
+ _ => self.err_ty(),
+ };
+
+ for &pat_id in prefix.iter().chain(suffix.iter()) {
+ self.infer_pat(pat_id, &elem_ty, default_bm);
+ }
+
+ if let &Some(slice_pat_id) = slice {
+ let rest_pat_ty = match expected.kind(Interner) {
+ TyKind::Array(_, length) => {
+ let len = match length.data(Interner).value {
+ ConstValue::Concrete(ConcreteConst {
+ interned: ConstScalar::UInt(len),
+ }) => len.checked_sub((prefix.len() + suffix.len()) as u128),
+ _ => None,
+ };
+ TyKind::Array(
+ elem_ty.clone(),
+ intern_const_scalar(
+ len.map_or(ConstScalar::Unknown, |len| ConstScalar::UInt(len)),
+ TyBuilder::usize(),
+ ),
+ )
+ }
+ _ => TyKind::Slice(elem_ty.clone()),
+ }
+ .intern(Interner);
+ self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm);
+ }
+
+ match expected.kind(Interner) {
+ TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
+ _ => TyKind::Slice(elem_ty),
+ }
+ .intern(Interner)
+ }
+ Pat::Wild => expected.clone(),
+ Pat::Range { start, end } => {
+ let start_ty = self.infer_expr(*start, &Expectation::has_type(expected.clone()));
+ self.infer_expr(*end, &Expectation::has_type(start_ty))
+ }
+ Pat::Lit(expr) => self.infer_expr(*expr, &Expectation::has_type(expected.clone())),
+ Pat::Box { inner } => match self.resolve_boxed_box() {
+ Some(box_adt) => {
+ let (inner_ty, alloc_ty) = match expected.as_adt() {
+ Some((adt, subst)) if adt == box_adt => (
+ subst.at(Interner, 0).assert_ty_ref(Interner).clone(),
+ subst.as_slice(Interner).get(1).and_then(|a| a.ty(Interner).cloned()),
+ ),
+ _ => (self.result.standard_types.unknown.clone(), None),
+ };
+
+ let inner_ty = self.infer_pat(*inner, &inner_ty, default_bm);
+ let mut b = TyBuilder::adt(self.db, box_adt).push(inner_ty);
+
+ if let Some(alloc_ty) = alloc_ty {
+ b = b.push(alloc_ty);
+ }
+ b.fill_with_defaults(self.db, || self.table.new_type_var()).build()
+ }
+ None => self.err_ty(),
+ },
+ Pat::ConstBlock(expr) => {
+ self.infer_expr(*expr, &Expectation::has_type(expected.clone()))
+ }
+ Pat::Missing => self.err_ty(),
+ };
+ // use a new type variable if we got error type here
+ let ty = self.insert_type_vars_shallow(ty);
+ if !self.unify(&ty, &expected) {
+ self.result
+ .type_mismatches
+ .insert(pat.into(), TypeMismatch { expected, actual: ty.clone() });
+ }
+ self.write_pat_ty(pat, ty.clone());
+ ty
+ }
+}
+
+fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
+ match &body[pat] {
+ Pat::Tuple { .. }
+ | Pat::TupleStruct { .. }
+ | Pat::Record { .. }
+ | Pat::Range { .. }
+ | Pat::Slice { .. } => true,
+ Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)),
+ // FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented.
+ Pat::Path(..) => true,
+ Pat::ConstBlock(..) => true,
+ Pat::Lit(expr) => !matches!(body[*expr], Expr::Literal(Literal::String(..))),
+ Pat::Bind {
+ mode: BindingAnnotation::Mutable | BindingAnnotation::Unannotated,
+ subpat: Some(subpat),
+ ..
+ } => is_non_ref_pat(body, *subpat),
+ Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
new file mode 100644
index 000000000..f580e09e9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
@@ -0,0 +1,295 @@
+//! Path expression resolution.
+
+use chalk_ir::cast::Cast;
+use hir_def::{
+ path::{Path, PathSegment},
+ resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
+ AdtId, AssocItemId, EnumVariantId, ItemContainerId, Lookup,
+};
+use hir_expand::name::Name;
+
+use crate::{
+ builder::ParamKind,
+ consteval,
+ method_resolution::{self, VisibleFromModule},
+ GenericArgData, Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
+ ValueTyDefId,
+};
+
+use super::{ExprOrPatId, InferenceContext, TraitRef};
+
+impl<'a> InferenceContext<'a> {
+ pub(super) fn infer_path(
+ &mut self,
+ resolver: &Resolver,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<Ty> {
+ let ty = self.resolve_value_path(resolver, path, id)?;
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+ Some(ty)
+ }
+
+ fn resolve_value_path(
+ &mut self,
+ resolver: &Resolver,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<Ty> {
+ let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
+ if path.segments().is_empty() {
+ // This can't actually happen syntax-wise
+ return None;
+ }
+ let ty = self.make_ty(type_ref);
+ let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, resolver);
+ let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty);
+ self.resolve_ty_assoc_item(
+ ty,
+ path.segments().last().expect("path had at least one segment").name,
+ id,
+ )?
+ } else {
+ let value_or_partial =
+ resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
+
+ match value_or_partial {
+ ResolveValueResult::ValueNs(it) => (it, None),
+ ResolveValueResult::Partial(def, remaining_index) => {
+ self.resolve_assoc_item(def, path, remaining_index, id)?
+ }
+ }
+ };
+
+ let typable: ValueTyDefId = match value {
+ ValueNs::LocalBinding(pat) => {
+ let ty = self.result.type_of_pat.get(pat)?.clone();
+ return Some(ty);
+ }
+ ValueNs::FunctionId(it) => it.into(),
+ ValueNs::ConstId(it) => it.into(),
+ ValueNs::StaticId(it) => it.into(),
+ ValueNs::StructId(it) => {
+ self.write_variant_resolution(id, it.into());
+
+ it.into()
+ }
+ ValueNs::EnumVariantId(it) => {
+ self.write_variant_resolution(id, it.into());
+
+ it.into()
+ }
+ ValueNs::ImplSelf(impl_id) => {
+ let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
+ let substs = generics.placeholder_subst(self.db);
+ let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
+ if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
+ let ty = self.db.value_ty(struct_id.into()).substitute(Interner, &substs);
+ return Some(ty);
+ } else {
+ // FIXME: diagnostic, invalid Self reference
+ return None;
+ }
+ }
+ ValueNs::GenericParam(it) => return Some(self.db.const_param_ty(it)),
+ };
+
+ let parent_substs = self_subst.unwrap_or_else(|| Substitution::empty(Interner));
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let substs = ctx.substs_from_path(path, typable, true);
+ let mut it = substs.as_slice(Interner)[parent_substs.len(Interner)..].iter().cloned();
+ let ty = TyBuilder::value_ty(self.db, typable)
+ .use_parent_substs(&parent_substs)
+ .fill(|x| {
+ it.next().unwrap_or_else(|| match x {
+ ParamKind::Type => {
+ GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
+ }
+ ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()),
+ })
+ })
+ .build();
+ Some(ty)
+ }
+
+ fn resolve_assoc_item(
+ &mut self,
+ def: TypeNs,
+ path: &Path,
+ remaining_index: usize,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ assert!(remaining_index < path.segments().len());
+ // there may be more intermediate segments between the resolved one and
+ // the end. Only the last segment needs to be resolved to a value; from
+ // the segments before that, we need to get either a type or a trait ref.
+
+ let resolved_segment = path.segments().get(remaining_index - 1).unwrap();
+ let remaining_segments = path.segments().skip(remaining_index);
+ let is_before_last = remaining_segments.len() == 1;
+
+ match (def, is_before_last) {
+ (TypeNs::TraitId(trait_), true) => {
+ let segment =
+ remaining_segments.last().expect("there should be at least one segment here");
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let trait_ref =
+ ctx.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
+ self.resolve_trait_assoc_item(trait_ref, segment, id)
+ }
+ (def, _) => {
+ // Either we already have a type (e.g. `Vec::new`), or we have a
+ // trait but it's not the last segment, so the next segment
+ // should resolve to an associated type of that trait (e.g. `<T
+ // as Iterator>::Item::default`)
+ let remaining_segments_for_ty =
+ remaining_segments.take(remaining_segments.len() - 1);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let (ty, _) = ctx.lower_partly_resolved_path(
+ def,
+ resolved_segment,
+ remaining_segments_for_ty,
+ true,
+ );
+ if let TyKind::Error = ty.kind(Interner) {
+ return None;
+ }
+
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ let segment =
+ remaining_segments.last().expect("there should be at least one segment here");
+
+ self.resolve_ty_assoc_item(ty, segment.name, id)
+ }
+ }
+ }
+
+ fn resolve_trait_assoc_item(
+ &mut self,
+ trait_ref: TraitRef,
+ segment: PathSegment<'_>,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ let trait_ = trait_ref.hir_trait_id();
+ let item =
+ self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
+ match item {
+ AssocItemId::FunctionId(func) => {
+ if segment.name == &self.db.function_data(func).name {
+ Some(AssocItemId::FunctionId(func))
+ } else {
+ None
+ }
+ }
+
+ AssocItemId::ConstId(konst) => {
+ if self
+ .db
+ .const_data(konst)
+ .name
+ .as_ref()
+ .map_or(false, |n| n == segment.name)
+ {
+ Some(AssocItemId::ConstId(konst))
+ } else {
+ None
+ }
+ }
+ AssocItemId::TypeAliasId(_) => None,
+ }
+ })?;
+ let def = match item {
+ AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
+ AssocItemId::ConstId(c) => ValueNs::ConstId(c),
+ AssocItemId::TypeAliasId(_) => unreachable!(),
+ };
+
+ self.write_assoc_resolution(id, item);
+ Some((def, Some(trait_ref.substitution)))
+ }
+
+ fn resolve_ty_assoc_item(
+ &mut self,
+ ty: Ty,
+ name: &Name,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ if let TyKind::Error = ty.kind(Interner) {
+ return None;
+ }
+
+ if let Some(result) = self.resolve_enum_variant_on_ty(&ty, name, id) {
+ return Some(result);
+ }
+
+ let canonical_ty = self.canonicalize(ty.clone());
+ let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
+
+ method_resolution::iterate_method_candidates(
+ &canonical_ty.value,
+ self.db,
+ self.table.trait_env.clone(),
+ &traits_in_scope,
+ VisibleFromModule::Filter(self.resolver.module()),
+ Some(name),
+ method_resolution::LookupMode::Path,
+ move |_ty, item| {
+ let (def, container) = match item {
+ AssocItemId::FunctionId(f) => {
+ (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
+ }
+ AssocItemId::ConstId(c) => {
+ (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container)
+ }
+ AssocItemId::TypeAliasId(_) => unreachable!(),
+ };
+ let substs = match container {
+ ItemContainerId::ImplId(impl_id) => {
+ let impl_substs = TyBuilder::subst_for_def(self.db, impl_id)
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ let impl_self_ty =
+ self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs);
+ self.unify(&impl_self_ty, &ty);
+ Some(impl_substs)
+ }
+ ItemContainerId::TraitId(trait_) => {
+ // we're picking this method
+ let trait_ref = TyBuilder::trait_ref(self.db, trait_)
+ .push(ty.clone())
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ self.push_obligation(trait_ref.clone().cast(Interner));
+ Some(trait_ref.substitution)
+ }
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ };
+
+ self.write_assoc_resolution(id, item);
+ Some((def, substs))
+ },
+ )
+ }
+
+ fn resolve_enum_variant_on_ty(
+ &mut self,
+ ty: &Ty,
+ name: &Name,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ let ty = self.resolve_ty_shallow(ty);
+ let (enum_id, subst) = match ty.as_adt() {
+ Some((AdtId::EnumId(e), subst)) => (e, subst),
+ _ => return None,
+ };
+ let enum_data = self.db.enum_data(enum_id);
+ let local_id = enum_data.variant(name)?;
+ let variant = EnumVariantId { parent: enum_id, local_id };
+ self.write_variant_resolution(id, variant.into());
+ Some((ValueNs::EnumVariantId(variant), Some(subst.clone())))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
new file mode 100644
index 000000000..e77b55670
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -0,0 +1,738 @@
+//! Unification and canonicalization logic.
+
+use std::{fmt, mem, sync::Arc};
+
+use chalk_ir::{
+ cast::Cast, fold::TypeFoldable, interner::HasInterner, zip::Zip, CanonicalVarKind, FloatTy,
+ IntTy, NoSolution, TyVariableKind, UniverseIndex,
+};
+use chalk_solve::infer::ParameterEnaVariableExt;
+use ena::unify::UnifyKey;
+use hir_expand::name;
+use stdx::never;
+
+use super::{InferOk, InferResult, InferenceContext, TypeError};
+use crate::{
+ db::HirDatabase, fold_tys, static_lifetime, traits::FnTrait, AliasEq, AliasTy, BoundVar,
+ Canonical, Const, DebruijnIndex, GenericArg, GenericArgData, Goal, Guidance, InEnvironment,
+ InferenceVar, Interner, Lifetime, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, Solution,
+ Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
+};
+
+impl<'a> InferenceContext<'a> {
+ pub(super) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
+ &mut self,
+ t: T,
+ ) -> Canonicalized<T>
+ where
+ T: HasInterner<Interner = Interner>,
+ {
+ self.table.canonicalize(t)
+ }
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct Canonicalized<T>
+where
+ T: HasInterner<Interner = Interner>,
+{
+ pub(crate) value: Canonical<T>,
+ free_vars: Vec<GenericArg>,
+}
+
+impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
+ pub(super) fn apply_solution(
+ &self,
+ ctx: &mut InferenceTable<'_>,
+ solution: Canonical<Substitution>,
+ ) {
+ // the solution may contain new variables, which we need to convert to new inference vars
+ let new_vars = Substitution::from_iter(
+ Interner,
+ solution.binders.iter(Interner).map(|k| match &k.kind {
+ VariableKind::Ty(TyVariableKind::General) => ctx.new_type_var().cast(Interner),
+ VariableKind::Ty(TyVariableKind::Integer) => ctx.new_integer_var().cast(Interner),
+ VariableKind::Ty(TyVariableKind::Float) => ctx.new_float_var().cast(Interner),
+ // Chalk can sometimes return new lifetime variables. We just use the static lifetime everywhere
+ VariableKind::Lifetime => static_lifetime().cast(Interner),
+ VariableKind::Const(ty) => ctx.new_const_var(ty.clone()).cast(Interner),
+ }),
+ );
+ for (i, v) in solution.value.iter(Interner).enumerate() {
+ let var = self.free_vars[i].clone();
+ if let Some(ty) = v.ty(Interner) {
+ // eagerly replace projections in the type; we may be getting types
+ // e.g. from where clauses where this hasn't happened yet
+ let ty = ctx.normalize_associated_types_in(new_vars.apply(ty.clone(), Interner));
+ ctx.unify(var.assert_ty_ref(Interner), &ty);
+ } else {
+ let _ = ctx.try_unify(&var, &new_vars.apply(v.clone(), Interner));
+ }
+ }
+ }
+}
+
+pub fn could_unify(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> bool {
+ unify(db, env, tys).is_some()
+}
+
+pub(crate) fn unify(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> Option<Substitution> {
+ let mut table = InferenceTable::new(db, env);
+ let vars = Substitution::from_iter(
+ Interner,
+ tys.binders.iter(Interner).map(|x| match &x.kind {
+ chalk_ir::VariableKind::Ty(_) => {
+ GenericArgData::Ty(table.new_type_var()).intern(Interner)
+ }
+ chalk_ir::VariableKind::Lifetime => {
+ GenericArgData::Ty(table.new_type_var()).intern(Interner)
+ } // FIXME: maybe wrong?
+ chalk_ir::VariableKind::Const(ty) => {
+ GenericArgData::Const(table.new_const_var(ty.clone())).intern(Interner)
+ }
+ }),
+ );
+ let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
+ let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
+ if !table.unify(&ty1_with_vars, &ty2_with_vars) {
+ return None;
+ }
+ // default any type vars that weren't unified back to their original bound vars
+ // (kind of hacky)
+ let find_var = |iv| {
+ vars.iter(Interner).position(|v| match v.interned() {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ } == Some(iv))
+ };
+ let fallback = |iv, kind, default, binder| match kind {
+ chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_ty(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Lifetime => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Const(ty) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)),
+ };
+ Some(Substitution::from_iter(
+ Interner,
+ vars.iter(Interner).map(|v| table.resolve_with_fallback(v.clone(), &fallback)),
+ ))
+}
+
+#[derive(Copy, Clone, Debug)]
+pub(crate) struct TypeVariableData {
+ diverging: bool,
+}
+
+type ChalkInferenceTable = chalk_solve::infer::InferenceTable<Interner>;
+
+#[derive(Clone)]
+pub(crate) struct InferenceTable<'a> {
+ pub(crate) db: &'a dyn HirDatabase,
+ pub(crate) trait_env: Arc<TraitEnvironment>,
+ var_unification_table: ChalkInferenceTable,
+ type_variable_table: Vec<TypeVariableData>,
+ pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>,
+}
+
+pub(crate) struct InferenceTableSnapshot {
+ var_table_snapshot: chalk_solve::infer::InferenceSnapshot<Interner>,
+ pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>,
+ type_variable_table_snapshot: Vec<TypeVariableData>,
+}
+
+impl<'a> InferenceTable<'a> {
+ pub(crate) fn new(db: &'a dyn HirDatabase, trait_env: Arc<TraitEnvironment>) -> Self {
+ InferenceTable {
+ db,
+ trait_env,
+ var_unification_table: ChalkInferenceTable::new(),
+ type_variable_table: Vec::new(),
+ pending_obligations: Vec::new(),
+ }
+ }
+
+ /// Chalk doesn't know about the `diverging` flag, so when it unifies two
+ /// type variables of which one is diverging, the chosen root might not be
+ /// diverging and we have no way of marking it as such at that time. This
+ /// function goes through all type variables and make sure their root is
+ /// marked as diverging if necessary, so that resolving them gives the right
+ /// result.
+ pub(super) fn propagate_diverging_flag(&mut self) {
+ for i in 0..self.type_variable_table.len() {
+ if !self.type_variable_table[i].diverging {
+ continue;
+ }
+ let v = InferenceVar::from(i as u32);
+ let root = self.var_unification_table.inference_var_root(v);
+ if let Some(data) = self.type_variable_table.get_mut(root.index() as usize) {
+ data.diverging = true;
+ }
+ }
+ }
+
+ pub(super) fn set_diverging(&mut self, iv: InferenceVar, diverging: bool) {
+ self.type_variable_table[iv.index() as usize].diverging = diverging;
+ }
+
+ fn fallback_value(&self, iv: InferenceVar, kind: TyVariableKind) -> Ty {
+ match kind {
+ _ if self
+ .type_variable_table
+ .get(iv.index() as usize)
+ .map_or(false, |data| data.diverging) =>
+ {
+ TyKind::Never
+ }
+ TyVariableKind::General => TyKind::Error,
+ TyVariableKind::Integer => TyKind::Scalar(Scalar::Int(IntTy::I32)),
+ TyVariableKind::Float => TyKind::Scalar(Scalar::Float(FloatTy::F64)),
+ }
+ .intern(Interner)
+ }
+
+ pub(crate) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
+ &mut self,
+ t: T,
+ ) -> Canonicalized<T>
+ where
+ T: HasInterner<Interner = Interner>,
+ {
+ // try to resolve obligations before canonicalizing, since this might
+ // result in new knowledge about variables
+ self.resolve_obligations_as_possible();
+ let result = self.var_unification_table.canonicalize(Interner, t);
+ let free_vars = result
+ .free_vars
+ .into_iter()
+ .map(|free_var| free_var.to_generic_arg(Interner))
+ .collect();
+ Canonicalized { value: result.quantified, free_vars }
+ }
+
+ /// Recurses through the given type, normalizing associated types mentioned
+ /// in it by replacing them by type variables and registering obligations to
+ /// resolve later. This should be done once for every type we get from some
+ /// type annotation (e.g. from a let type annotation, field type or function
+ /// call). `make_ty` handles this already, but e.g. for field types we need
+ /// to do it as well.
+ pub(crate) fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
+ fold_tys(
+ ty,
+ |ty, _| match ty.kind(Interner) {
+ TyKind::Alias(AliasTy::Projection(proj_ty)) => {
+ self.normalize_projection_ty(proj_ty.clone())
+ }
+ _ => ty,
+ },
+ DebruijnIndex::INNERMOST,
+ )
+ }
+
+ pub(crate) fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty {
+ let var = self.new_type_var();
+ let alias_eq = AliasEq { alias: AliasTy::Projection(proj_ty), ty: var.clone() };
+ let obligation = alias_eq.cast(Interner);
+ self.register_obligation(obligation);
+ var
+ }
+
+ fn extend_type_variable_table(&mut self, to_index: usize) {
+ self.type_variable_table.extend(
+ (0..1 + to_index - self.type_variable_table.len())
+ .map(|_| TypeVariableData { diverging: false }),
+ );
+ }
+
+ fn new_var(&mut self, kind: TyVariableKind, diverging: bool) -> Ty {
+ let var = self.var_unification_table.new_variable(UniverseIndex::ROOT);
+ // Chalk might have created some type variables for its own purposes that we don't know about...
+ self.extend_type_variable_table(var.index() as usize);
+ assert_eq!(var.index() as usize, self.type_variable_table.len() - 1);
+ self.type_variable_table[var.index() as usize].diverging = diverging;
+ var.to_ty_with_kind(Interner, kind)
+ }
+
+ pub(crate) fn new_type_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::General, false)
+ }
+
+ pub(crate) fn new_integer_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::Integer, false)
+ }
+
+ pub(crate) fn new_float_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::Float, false)
+ }
+
+ pub(crate) fn new_maybe_never_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::General, true)
+ }
+
+ pub(crate) fn new_const_var(&mut self, ty: Ty) -> Const {
+ let var = self.var_unification_table.new_variable(UniverseIndex::ROOT);
+ var.to_const(Interner, ty)
+ }
+
+ pub(crate) fn new_lifetime_var(&mut self) -> Lifetime {
+ let var = self.var_unification_table.new_variable(UniverseIndex::ROOT);
+ var.to_lifetime(Interner)
+ }
+
+ pub(crate) fn resolve_with_fallback<T>(
+ &mut self,
+ t: T,
+ fallback: &dyn Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
+ ) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+ {
+ self.resolve_with_fallback_inner(&mut Vec::new(), t, &fallback)
+ }
+
+ pub(crate) fn fresh_subst(&mut self, binders: &[CanonicalVarKind<Interner>]) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ binders.iter().map(|kind| {
+ let param_infer_var =
+ kind.map_ref(|&ui| self.var_unification_table.new_variable(ui));
+ param_infer_var.to_generic_arg(Interner)
+ }),
+ )
+ }
+
+ pub(crate) fn instantiate_canonical<T>(&mut self, canonical: Canonical<T>) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + std::fmt::Debug,
+ {
+ let subst = self.fresh_subst(canonical.binders.as_slice(Interner));
+ subst.apply(canonical.value, Interner)
+ }
+
+ fn resolve_with_fallback_inner<T>(
+ &mut self,
+ var_stack: &mut Vec<InferenceVar>,
+ t: T,
+ fallback: &dyn Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
+ ) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+ {
+ t.fold_with(
+ &mut resolve::Resolver { table: self, var_stack, fallback },
+ DebruijnIndex::INNERMOST,
+ )
+ .expect("fold failed unexpectedly")
+ }
+
+ pub(crate) fn resolve_completely<T>(&mut self, t: T) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+ {
+ self.resolve_with_fallback(t, &|_, _, d, _| d)
+ }
+
+ /// Unify two types and register new trait goals that arise from that.
+ pub(crate) fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
+ let result = match self.try_unify(ty1, ty2) {
+ Ok(r) => r,
+ Err(_) => return false,
+ };
+ self.register_infer_ok(result);
+ true
+ }
+
+ /// Unify two types and return new trait goals arising from it, so the
+ /// caller needs to deal with them.
+ pub(crate) fn try_unify<T: Zip<Interner>>(&mut self, t1: &T, t2: &T) -> InferResult<()> {
+ match self.var_unification_table.relate(
+ Interner,
+ &self.db,
+ &self.trait_env.env,
+ chalk_ir::Variance::Invariant,
+ t1,
+ t2,
+ ) {
+ Ok(result) => Ok(InferOk { goals: result.goals, value: () }),
+ Err(chalk_ir::NoSolution) => Err(TypeError),
+ }
+ }
+
+ /// If `ty` is a type variable with known type, returns that type;
+ /// otherwise, return ty.
+ pub(crate) fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty {
+ self.resolve_obligations_as_possible();
+ self.var_unification_table.normalize_ty_shallow(Interner, ty).unwrap_or_else(|| ty.clone())
+ }
+
+ pub(crate) fn snapshot(&mut self) -> InferenceTableSnapshot {
+ let var_table_snapshot = self.var_unification_table.snapshot();
+ let type_variable_table_snapshot = self.type_variable_table.clone();
+ let pending_obligations = self.pending_obligations.clone();
+ InferenceTableSnapshot {
+ var_table_snapshot,
+ pending_obligations,
+ type_variable_table_snapshot,
+ }
+ }
+
+ pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot) {
+ self.var_unification_table.rollback_to(snapshot.var_table_snapshot);
+ self.type_variable_table = snapshot.type_variable_table_snapshot;
+ self.pending_obligations = snapshot.pending_obligations;
+ }
+
+ pub(crate) fn run_in_snapshot<T>(&mut self, f: impl FnOnce(&mut InferenceTable<'_>) -> T) -> T {
+ let snapshot = self.snapshot();
+ let result = f(self);
+ self.rollback_to(snapshot);
+ result
+ }
+
+ /// Checks an obligation without registering it. Useful mostly to check
+ /// whether a trait *might* be implemented before deciding to 'lock in' the
+ /// choice (during e.g. method resolution or deref).
+ pub(crate) fn try_obligation(&mut self, goal: Goal) -> Option<Solution> {
+ let in_env = InEnvironment::new(&self.trait_env.env, goal);
+ let canonicalized = self.canonicalize(in_env);
+ let solution = self.db.trait_solve(self.trait_env.krate, canonicalized.value);
+ solution
+ }
+
+ pub(crate) fn register_obligation(&mut self, goal: Goal) {
+ let in_env = InEnvironment::new(&self.trait_env.env, goal);
+ self.register_obligation_in_env(in_env)
+ }
+
+ fn register_obligation_in_env(&mut self, goal: InEnvironment<Goal>) {
+ let canonicalized = self.canonicalize(goal);
+ if !self.try_resolve_obligation(&canonicalized) {
+ self.pending_obligations.push(canonicalized);
+ }
+ }
+
+ pub(crate) fn register_infer_ok<T>(&mut self, infer_ok: InferOk<T>) {
+ infer_ok.goals.into_iter().for_each(|goal| self.register_obligation_in_env(goal));
+ }
+
+ pub(crate) fn resolve_obligations_as_possible(&mut self) {
+ let _span = profile::span("resolve_obligations_as_possible");
+ let mut changed = true;
+ let mut obligations = Vec::new();
+ while changed {
+ changed = false;
+ mem::swap(&mut self.pending_obligations, &mut obligations);
+ for canonicalized in obligations.drain(..) {
+ if !self.check_changed(&canonicalized) {
+ self.pending_obligations.push(canonicalized);
+ continue;
+ }
+ changed = true;
+ let uncanonical = chalk_ir::Substitute::apply(
+ &canonicalized.free_vars,
+ canonicalized.value.value,
+ Interner,
+ );
+ self.register_obligation_in_env(uncanonical);
+ }
+ }
+ }
+
+ pub(crate) fn fudge_inference<T: TypeFoldable<Interner>>(
+ &mut self,
+ f: impl FnOnce(&mut Self) -> T,
+ ) -> T {
+ use chalk_ir::fold::TypeFolder;
+ struct VarFudger<'a, 'b> {
+ table: &'a mut InferenceTable<'b>,
+ highest_known_var: InferenceVar,
+ }
+ impl<'a, 'b> TypeFolder<Interner> for VarFudger<'a, 'b> {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_inference_ty(
+ &mut self,
+ var: chalk_ir::InferenceVar,
+ kind: TyVariableKind,
+ _outer_binder: chalk_ir::DebruijnIndex,
+ ) -> chalk_ir::Fallible<chalk_ir::Ty<Interner>> {
+ Ok(if var < self.highest_known_var {
+ var.to_ty(Interner, kind)
+ } else {
+ self.table.new_type_var()
+ })
+ }
+
+ fn fold_inference_lifetime(
+ &mut self,
+ var: chalk_ir::InferenceVar,
+ _outer_binder: chalk_ir::DebruijnIndex,
+ ) -> chalk_ir::Fallible<chalk_ir::Lifetime<Interner>> {
+ Ok(if var < self.highest_known_var {
+ var.to_lifetime(Interner)
+ } else {
+ self.table.new_lifetime_var()
+ })
+ }
+
+ fn fold_inference_const(
+ &mut self,
+ ty: chalk_ir::Ty<Interner>,
+ var: chalk_ir::InferenceVar,
+ _outer_binder: chalk_ir::DebruijnIndex,
+ ) -> chalk_ir::Fallible<chalk_ir::Const<Interner>> {
+ Ok(if var < self.highest_known_var {
+ var.to_const(Interner, ty)
+ } else {
+ self.table.new_const_var(ty)
+ })
+ }
+ }
+
+ let snapshot = self.snapshot();
+ let highest_known_var = self.new_type_var().inference_var(Interner).expect("inference_var");
+ let result = f(self);
+ self.rollback_to(snapshot);
+ result
+ .fold_with(&mut VarFudger { table: self, highest_known_var }, DebruijnIndex::INNERMOST)
+ .expect("fold_with with VarFudger")
+ }
+
+ /// This checks whether any of the free variables in the `canonicalized`
+ /// have changed (either been unified with another variable, or with a
+ /// value). If this is not the case, we don't need to try to solve the goal
+ /// again -- it'll give the same result as last time.
+ fn check_changed(&mut self, canonicalized: &Canonicalized<InEnvironment<Goal>>) -> bool {
+ canonicalized.free_vars.iter().any(|var| {
+ let iv = match var.data(Interner) {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ }
+ .expect("free var is not inference var");
+ if self.var_unification_table.probe_var(iv).is_some() {
+ return true;
+ }
+ let root = self.var_unification_table.inference_var_root(iv);
+ iv != root
+ })
+ }
+
+ fn try_resolve_obligation(
+ &mut self,
+ canonicalized: &Canonicalized<InEnvironment<Goal>>,
+ ) -> bool {
+ let solution = self.db.trait_solve(self.trait_env.krate, canonicalized.value.clone());
+
+ match solution {
+ Some(Solution::Unique(canonical_subst)) => {
+ canonicalized.apply_solution(
+ self,
+ Canonical {
+ binders: canonical_subst.binders,
+ // FIXME: handle constraints
+ value: canonical_subst.value.subst,
+ },
+ );
+ true
+ }
+ Some(Solution::Ambig(Guidance::Definite(substs))) => {
+ canonicalized.apply_solution(self, substs);
+ false
+ }
+ Some(_) => {
+ // FIXME use this when trying to resolve everything at the end
+ false
+ }
+ None => {
+ // FIXME obligation cannot be fulfilled => diagnostic
+ true
+ }
+ }
+ }
+
+ pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
+ match ty.callable_sig(self.db) {
+ Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())),
+ None => self.callable_sig_from_fn_trait(ty, num_args),
+ }
+ }
+
+ fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
+ let krate = self.trait_env.krate;
+ let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?;
+ let output_assoc_type =
+ self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
+
+ let mut arg_tys = vec![];
+ let arg_ty = TyBuilder::tuple(num_args)
+ .fill(|x| {
+ let arg = match x {
+ ParamKind::Type => self.new_type_var(),
+ ParamKind::Const(ty) => {
+ never!("Tuple with const parameter");
+ return GenericArgData::Const(self.new_const_var(ty.clone()))
+ .intern(Interner);
+ }
+ };
+ arg_tys.push(arg.clone());
+ GenericArgData::Ty(arg).intern(Interner)
+ })
+ .build();
+
+ let projection = {
+ let b = TyBuilder::assoc_type_projection(self.db, output_assoc_type);
+ if b.remaining() != 2 {
+ return None;
+ }
+ b.push(ty.clone()).push(arg_ty).build()
+ };
+
+ let trait_env = self.trait_env.env.clone();
+ let obligation = InEnvironment {
+ goal: projection.trait_ref(self.db).cast(Interner),
+ environment: trait_env,
+ };
+ let canonical = self.canonicalize(obligation.clone());
+ if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() {
+ self.register_obligation(obligation.goal);
+ let return_ty = self.normalize_projection_ty(projection);
+ Some((arg_tys, return_ty))
+ } else {
+ None
+ }
+ }
+}
+
+impl<'a> fmt::Debug for InferenceTable<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("InferenceTable").field("num_vars", &self.type_variable_table.len()).finish()
+ }
+}
+
+mod resolve {
+ use super::InferenceTable;
+ use crate::{
+ ConcreteConst, Const, ConstData, ConstValue, DebruijnIndex, GenericArg, InferenceVar,
+ Interner, Lifetime, Ty, TyVariableKind, VariableKind,
+ };
+ use chalk_ir::{
+ cast::Cast,
+ fold::{TypeFoldable, TypeFolder},
+ Fallible, NoSolution,
+ };
+ use hir_def::type_ref::ConstScalar;
+
+ pub(super) struct Resolver<'a, 'b, F> {
+ pub(super) table: &'a mut InferenceTable<'b>,
+ pub(super) var_stack: &'a mut Vec<InferenceVar>,
+ pub(super) fallback: F,
+ }
+ impl<'a, 'b, 'i, F> TypeFolder<Interner> for Resolver<'a, 'b, F>
+ where
+ F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg + 'i,
+ {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_inference_ty(
+ &mut self,
+ var: InferenceVar,
+ kind: TyVariableKind,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ let var = self.table.var_unification_table.inference_var_root(var);
+ if self.var_stack.contains(&var) {
+ // recursive type
+ let default = self.table.fallback_value(var, kind).cast(Interner);
+ return Ok((self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
+ .assert_ty_ref(Interner)
+ .clone());
+ }
+ let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
+ // known_ty may contain other variables that are known by now
+ self.var_stack.push(var);
+ let result =
+ known_ty.fold_with(self, outer_binder).expect("fold failed unexpectedly");
+ self.var_stack.pop();
+ result.assert_ty_ref(Interner).clone()
+ } else {
+ let default = self.table.fallback_value(var, kind).cast(Interner);
+ (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
+ .assert_ty_ref(Interner)
+ .clone()
+ };
+ Ok(result)
+ }
+
+ fn fold_inference_const(
+ &mut self,
+ ty: Ty,
+ var: InferenceVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ let var = self.table.var_unification_table.inference_var_root(var);
+ let default = ConstData {
+ ty: ty.clone(),
+ value: ConstValue::Concrete(ConcreteConst { interned: ConstScalar::Unknown }),
+ }
+ .intern(Interner)
+ .cast(Interner);
+ if self.var_stack.contains(&var) {
+ // recursive
+ return Ok((self.fallback)(var, VariableKind::Const(ty), default, outer_binder)
+ .assert_const_ref(Interner)
+ .clone());
+ }
+ let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
+ // known_ty may contain other variables that are known by now
+ self.var_stack.push(var);
+ let result =
+ known_ty.fold_with(self, outer_binder).expect("fold failed unexpectedly");
+ self.var_stack.pop();
+ result.assert_const_ref(Interner).clone()
+ } else {
+ (self.fallback)(var, VariableKind::Const(ty), default, outer_binder)
+ .assert_const_ref(Interner)
+ .clone()
+ };
+ Ok(result)
+ }
+
+ fn fold_inference_lifetime(
+ &mut self,
+ _var: InferenceVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime> {
+ // fall back all lifetimes to 'static -- currently we don't deal
+ // with any lifetimes, but we can sometimes get some lifetime
+ // variables through Chalk's unification, and this at least makes
+ // sure we don't leak them outside of inference
+ Ok(crate::static_lifetime())
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
new file mode 100644
index 000000000..ca76e08fd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
@@ -0,0 +1,432 @@
+//! Implementation of the Chalk `Interner` trait, which allows customizing the
+//! representation of the various objects Chalk deals with (types, goals etc.).
+
+use crate::{chalk_db, tls, GenericArg};
+use base_db::salsa::InternId;
+use chalk_ir::{Goal, GoalData};
+use hir_def::{
+ intern::{impl_internable, InternStorage, Internable, Interned},
+ type_ref::ConstScalar,
+ TypeAliasId,
+};
+use smallvec::SmallVec;
+use std::{fmt, sync::Arc};
+
+#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)]
+pub struct Interner;
+
+#[derive(PartialEq, Eq, Hash)]
+pub struct InternedWrapper<T>(T);
+
+impl<T: fmt::Debug> fmt::Debug for InternedWrapper<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&self.0, f)
+ }
+}
+
+impl<T> std::ops::Deref for InternedWrapper<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl_internable!(
+ InternedWrapper<Vec<chalk_ir::VariableKind<Interner>>>,
+ InternedWrapper<SmallVec<[GenericArg; 2]>>,
+ InternedWrapper<chalk_ir::TyData<Interner>>,
+ InternedWrapper<chalk_ir::LifetimeData<Interner>>,
+ InternedWrapper<chalk_ir::ConstData<Interner>>,
+ InternedWrapper<ConstScalar>,
+ InternedWrapper<Vec<chalk_ir::CanonicalVarKind<Interner>>>,
+ InternedWrapper<Vec<chalk_ir::ProgramClause<Interner>>>,
+ InternedWrapper<Vec<chalk_ir::QuantifiedWhereClause<Interner>>>,
+ InternedWrapper<Vec<chalk_ir::Variance>>,
+);
+
+impl chalk_ir::interner::Interner for Interner {
+ type InternedType = Interned<InternedWrapper<chalk_ir::TyData<Interner>>>;
+ type InternedLifetime = Interned<InternedWrapper<chalk_ir::LifetimeData<Self>>>;
+ type InternedConst = Interned<InternedWrapper<chalk_ir::ConstData<Self>>>;
+ type InternedConcreteConst = ConstScalar;
+ type InternedGenericArg = chalk_ir::GenericArgData<Self>;
+ type InternedGoal = Arc<GoalData<Self>>;
+ type InternedGoals = Vec<Goal<Self>>;
+ type InternedSubstitution = Interned<InternedWrapper<SmallVec<[GenericArg; 2]>>>;
+ type InternedProgramClause = chalk_ir::ProgramClauseData<Self>;
+ type InternedProgramClauses = Interned<InternedWrapper<Vec<chalk_ir::ProgramClause<Self>>>>;
+ type InternedQuantifiedWhereClauses =
+ Interned<InternedWrapper<Vec<chalk_ir::QuantifiedWhereClause<Self>>>>;
+ type InternedVariableKinds = Interned<InternedWrapper<Vec<chalk_ir::VariableKind<Interner>>>>;
+ type InternedCanonicalVarKinds =
+ Interned<InternedWrapper<Vec<chalk_ir::CanonicalVarKind<Self>>>>;
+ type InternedConstraints = Vec<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>>;
+ type InternedVariances = Interned<InternedWrapper<Vec<chalk_ir::Variance>>>;
+ type DefId = InternId;
+ type InternedAdtId = hir_def::AdtId;
+ type Identifier = TypeAliasId;
+ type FnAbi = ();
+
+ fn debug_adt_id(
+ type_kind_id: chalk_db::AdtId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_struct_id(type_kind_id, fmt)))
+ }
+
+ fn debug_trait_id(
+ type_kind_id: chalk_db::TraitId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_trait_id(type_kind_id, fmt)))
+ }
+
+ fn debug_assoc_type_id(
+ id: chalk_db::AssocTypeId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_assoc_type_id(id, fmt)))
+ }
+
+ fn debug_alias(
+ alias: &chalk_ir::AliasTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ use std::fmt::Debug;
+ match alias {
+ chalk_ir::AliasTy::Projection(projection_ty) => {
+ Interner::debug_projection_ty(projection_ty, fmt)
+ }
+ chalk_ir::AliasTy::Opaque(opaque_ty) => Some(opaque_ty.fmt(fmt)),
+ }
+ }
+
+ fn debug_projection_ty(
+ proj: &chalk_ir::ProjectionTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt)))
+ }
+
+ fn debug_opaque_ty(
+ opaque_ty: &chalk_ir::OpaqueTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", opaque_ty.opaque_ty_id))
+ }
+
+ fn debug_opaque_ty_id(
+ opaque_ty_id: chalk_ir::OpaqueTyId<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "OpaqueTy#{}", opaque_ty_id.0))
+ }
+
+ fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", ty.data(Interner)))
+ }
+
+ fn debug_lifetime(
+ lifetime: &chalk_ir::Lifetime<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", lifetime.data(Interner)))
+ }
+
+ fn debug_generic_arg(
+ parameter: &GenericArg,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", parameter.data(Interner).inner_debug()))
+ }
+
+ fn debug_goal(goal: &Goal<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
+ let goal_data = goal.data(Interner);
+ Some(write!(fmt, "{:?}", goal_data))
+ }
+
+ fn debug_goals(
+ goals: &chalk_ir::Goals<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", goals.debug(Interner)))
+ }
+
+ fn debug_program_clause_implication(
+ pci: &chalk_ir::ProgramClauseImplication<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", pci.debug(Interner)))
+ }
+
+ fn debug_substitution(
+ substitution: &chalk_ir::Substitution<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", substitution.debug(Interner)))
+ }
+
+ fn debug_separator_trait_ref(
+ separator_trait_ref: &chalk_ir::SeparatorTraitRef<'_, Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", separator_trait_ref.debug(Interner)))
+ }
+
+ fn debug_fn_def_id(
+ fn_def_id: chalk_ir::FnDefId<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt)))
+ }
+ fn debug_const(
+ constant: &chalk_ir::Const<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", constant.data(Interner)))
+ }
+ fn debug_variable_kinds(
+ variable_kinds: &chalk_ir::VariableKinds<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", variable_kinds.as_slice(Interner)))
+ }
+ fn debug_variable_kinds_with_angles(
+ variable_kinds: &chalk_ir::VariableKinds<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", variable_kinds.inner_debug(Interner)))
+ }
+ fn debug_canonical_var_kinds(
+ canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", canonical_var_kinds.as_slice(Interner)))
+ }
+ fn debug_program_clause(
+ clause: &chalk_ir::ProgramClause<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", clause.data(Interner)))
+ }
+ fn debug_program_clauses(
+ clauses: &chalk_ir::ProgramClauses<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", clauses.as_slice(Interner)))
+ }
+ fn debug_quantified_where_clauses(
+ clauses: &chalk_ir::QuantifiedWhereClauses<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", clauses.as_slice(Interner)))
+ }
+
+ fn intern_ty(self, kind: chalk_ir::TyKind<Self>) -> Self::InternedType {
+ let flags = kind.compute_flags(self);
+ Interned::new(InternedWrapper(chalk_ir::TyData { kind, flags }))
+ }
+
+ fn ty_data<'a>(self, ty: &'a Self::InternedType) -> &'a chalk_ir::TyData<Self> {
+ &ty.0
+ }
+
+ fn intern_lifetime(self, lifetime: chalk_ir::LifetimeData<Self>) -> Self::InternedLifetime {
+ Interned::new(InternedWrapper(lifetime))
+ }
+
+ fn lifetime_data<'a>(
+ self,
+ lifetime: &'a Self::InternedLifetime,
+ ) -> &'a chalk_ir::LifetimeData<Self> {
+ &lifetime.0
+ }
+
+ fn intern_const(self, constant: chalk_ir::ConstData<Self>) -> Self::InternedConst {
+ Interned::new(InternedWrapper(constant))
+ }
+
+ fn const_data<'a>(self, constant: &'a Self::InternedConst) -> &'a chalk_ir::ConstData<Self> {
+ &constant.0
+ }
+
+ fn const_eq(
+ self,
+ _ty: &Self::InternedType,
+ c1: &Self::InternedConcreteConst,
+ c2: &Self::InternedConcreteConst,
+ ) -> bool {
+ (c1 == &ConstScalar::Unknown) || (c2 == &ConstScalar::Unknown) || (c1 == c2)
+ }
+
+ fn intern_generic_arg(
+ self,
+ parameter: chalk_ir::GenericArgData<Self>,
+ ) -> Self::InternedGenericArg {
+ parameter
+ }
+
+ fn generic_arg_data<'a>(
+ self,
+ parameter: &'a Self::InternedGenericArg,
+ ) -> &'a chalk_ir::GenericArgData<Self> {
+ parameter
+ }
+
+ fn intern_goal(self, goal: GoalData<Self>) -> Self::InternedGoal {
+ Arc::new(goal)
+ }
+
+ fn intern_goals<E>(
+ self,
+ data: impl IntoIterator<Item = Result<Goal<Self>, E>>,
+ ) -> Result<Self::InternedGoals, E> {
+ data.into_iter().collect()
+ }
+
+ fn goal_data<'a>(self, goal: &'a Self::InternedGoal) -> &'a GoalData<Self> {
+ goal
+ }
+
+ fn goals_data<'a>(self, goals: &'a Self::InternedGoals) -> &'a [Goal<Interner>] {
+ goals
+ }
+
+ fn intern_substitution<E>(
+ self,
+ data: impl IntoIterator<Item = Result<GenericArg, E>>,
+ ) -> Result<Self::InternedSubstitution, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn substitution_data<'a>(
+ self,
+ substitution: &'a Self::InternedSubstitution,
+ ) -> &'a [GenericArg] {
+ &substitution.as_ref().0
+ }
+
+ fn intern_program_clause(
+ self,
+ data: chalk_ir::ProgramClauseData<Self>,
+ ) -> Self::InternedProgramClause {
+ data
+ }
+
+ fn program_clause_data<'a>(
+ self,
+ clause: &'a Self::InternedProgramClause,
+ ) -> &'a chalk_ir::ProgramClauseData<Self> {
+ clause
+ }
+
+ fn intern_program_clauses<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::ProgramClause<Self>, E>>,
+ ) -> Result<Self::InternedProgramClauses, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn program_clauses_data<'a>(
+ self,
+ clauses: &'a Self::InternedProgramClauses,
+ ) -> &'a [chalk_ir::ProgramClause<Self>] {
+ clauses
+ }
+
+ fn intern_quantified_where_clauses<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::QuantifiedWhereClause<Self>, E>>,
+ ) -> Result<Self::InternedQuantifiedWhereClauses, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn quantified_where_clauses_data<'a>(
+ self,
+ clauses: &'a Self::InternedQuantifiedWhereClauses,
+ ) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] {
+ clauses
+ }
+
+ fn intern_generic_arg_kinds<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::VariableKind<Self>, E>>,
+ ) -> Result<Self::InternedVariableKinds, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn variable_kinds_data<'a>(
+ self,
+ parameter_kinds: &'a Self::InternedVariableKinds,
+ ) -> &'a [chalk_ir::VariableKind<Self>] {
+ &parameter_kinds.as_ref().0
+ }
+
+ fn intern_canonical_var_kinds<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::CanonicalVarKind<Self>, E>>,
+ ) -> Result<Self::InternedCanonicalVarKinds, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn canonical_var_kinds_data<'a>(
+ self,
+ canonical_var_kinds: &'a Self::InternedCanonicalVarKinds,
+ ) -> &'a [chalk_ir::CanonicalVarKind<Self>] {
+ canonical_var_kinds
+ }
+
+ fn intern_constraints<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>, E>>,
+ ) -> Result<Self::InternedConstraints, E> {
+ data.into_iter().collect()
+ }
+
+ fn constraints_data<'a>(
+ self,
+ constraints: &'a Self::InternedConstraints,
+ ) -> &'a [chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>] {
+ constraints
+ }
+ fn debug_closure_id(
+ _fn_def_id: chalk_ir::ClosureId<Self>,
+ _fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ None
+ }
+ fn debug_constraints(
+ _clauses: &chalk_ir::Constraints<Self>,
+ _fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ None
+ }
+
+ fn intern_variances<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::Variance, E>>,
+ ) -> Result<Self::InternedVariances, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn variances_data<'a>(
+ self,
+ variances: &'a Self::InternedVariances,
+ ) -> &'a [chalk_ir::Variance] {
+ variances
+ }
+}
+
+impl chalk_ir::interner::HasInterner for Interner {
+ type Interner = Self;
+}
+
+#[macro_export]
+macro_rules! has_interner {
+ ($t:ty) => {
+ impl HasInterner for $t {
+ type Interner = crate::Interner;
+ }
+ };
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
new file mode 100644
index 000000000..5a5d610e3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -0,0 +1,525 @@
+//! The type system. We currently use this to infer types for completion, hover
+//! information and various assists.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod autoderef;
+mod builder;
+mod chalk_db;
+mod chalk_ext;
+pub mod consteval;
+mod infer;
+mod interner;
+mod lower;
+mod mapping;
+mod tls;
+mod utils;
+mod walk;
+pub mod db;
+pub mod diagnostics;
+pub mod display;
+pub mod method_resolution;
+pub mod primitive;
+pub mod traits;
+
+#[cfg(test)]
+mod tests;
+#[cfg(test)]
+mod test_db;
+
+use std::sync::Arc;
+
+use chalk_ir::{
+ fold::{Shift, TypeFoldable},
+ interner::HasInterner,
+ NoSolution,
+};
+use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId};
+use itertools::Either;
+use utils::Generics;
+
+use crate::{consteval::unknown_const, db::HirDatabase, utils::generics};
+
+pub use autoderef::autoderef;
+pub use builder::{ParamKind, TyBuilder};
+pub use chalk_ext::*;
+pub use infer::{
+ could_coerce, could_unify, Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic,
+ InferenceResult,
+};
+pub use interner::Interner;
+pub use lower::{
+ associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, TyDefId,
+ TyLoweringContext, ValueTyDefId,
+};
+pub use mapping::{
+ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
+ lt_from_placeholder_idx, to_assoc_type_id, to_chalk_trait_id, to_foreign_def_id,
+ to_placeholder_idx,
+};
+pub use traits::TraitEnvironment;
+pub use utils::{all_super_traits, is_fn_unsafe_to_call};
+pub use walk::TypeWalk;
+
+pub use chalk_ir::{
+ cast::Cast, AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind,
+};
+
+pub type ForeignDefId = chalk_ir::ForeignDefId<Interner>;
+pub type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
+pub type FnDefId = chalk_ir::FnDefId<Interner>;
+pub type ClosureId = chalk_ir::ClosureId<Interner>;
+pub type OpaqueTyId = chalk_ir::OpaqueTyId<Interner>;
+pub type PlaceholderIndex = chalk_ir::PlaceholderIndex;
+
+pub type VariableKind = chalk_ir::VariableKind<Interner>;
+pub type VariableKinds = chalk_ir::VariableKinds<Interner>;
+pub type CanonicalVarKinds = chalk_ir::CanonicalVarKinds<Interner>;
+pub type Binders<T> = chalk_ir::Binders<T>;
+pub type Substitution = chalk_ir::Substitution<Interner>;
+pub type GenericArg = chalk_ir::GenericArg<Interner>;
+pub type GenericArgData = chalk_ir::GenericArgData<Interner>;
+
+pub type Ty = chalk_ir::Ty<Interner>;
+pub type TyKind = chalk_ir::TyKind<Interner>;
+pub type DynTy = chalk_ir::DynTy<Interner>;
+pub type FnPointer = chalk_ir::FnPointer<Interner>;
+// pub type FnSubst = chalk_ir::FnSubst<Interner>;
+pub use chalk_ir::FnSubst;
+pub type ProjectionTy = chalk_ir::ProjectionTy<Interner>;
+pub type AliasTy = chalk_ir::AliasTy<Interner>;
+pub type OpaqueTy = chalk_ir::OpaqueTy<Interner>;
+pub type InferenceVar = chalk_ir::InferenceVar;
+
+pub type Lifetime = chalk_ir::Lifetime<Interner>;
+pub type LifetimeData = chalk_ir::LifetimeData<Interner>;
+pub type LifetimeOutlives = chalk_ir::LifetimeOutlives<Interner>;
+
+pub type Const = chalk_ir::Const<Interner>;
+pub type ConstData = chalk_ir::ConstData<Interner>;
+pub type ConstValue = chalk_ir::ConstValue<Interner>;
+pub type ConcreteConst = chalk_ir::ConcreteConst<Interner>;
+
+pub type ChalkTraitId = chalk_ir::TraitId<Interner>;
+pub type TraitRef = chalk_ir::TraitRef<Interner>;
+pub type QuantifiedWhereClause = Binders<WhereClause>;
+pub type QuantifiedWhereClauses = chalk_ir::QuantifiedWhereClauses<Interner>;
+pub type Canonical<T> = chalk_ir::Canonical<T>;
+
+pub type FnSig = chalk_ir::FnSig<Interner>;
+
+pub type InEnvironment<T> = chalk_ir::InEnvironment<T>;
+pub type Environment = chalk_ir::Environment<Interner>;
+pub type DomainGoal = chalk_ir::DomainGoal<Interner>;
+pub type Goal = chalk_ir::Goal<Interner>;
+pub type AliasEq = chalk_ir::AliasEq<Interner>;
+pub type Solution = chalk_solve::Solution<Interner>;
+pub type ConstrainedSubst = chalk_ir::ConstrainedSubst<Interner>;
+pub type Guidance = chalk_solve::Guidance<Interner>;
+pub type WhereClause = chalk_ir::WhereClause<Interner>;
+
+// FIXME: get rid of this
+pub fn subst_prefix(s: &Substitution, n: usize) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ s.as_slice(Interner)[..std::cmp::min(s.len(Interner), n)].iter().cloned(),
+ )
+}
+
+/// Return an index of a parameter in the generic type parameter list by it's id.
+pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
+ generics(db.upcast(), id.parent).param_idx(id)
+}
+
+pub(crate) fn wrap_empty_binders<T>(value: T) -> Binders<T>
+where
+ T: TypeFoldable<Interner> + HasInterner<Interner = Interner>,
+{
+ Binders::empty(Interner, value.shifted_in_from(Interner, DebruijnIndex::ONE))
+}
+
+pub(crate) fn make_type_and_const_binders<T: HasInterner<Interner = Interner>>(
+ which_is_const: impl Iterator<Item = Option<Ty>>,
+ value: T,
+) -> Binders<T> {
+ Binders::new(
+ VariableKinds::from_iter(
+ Interner,
+ which_is_const.map(|x| {
+ if let Some(ty) = x {
+ chalk_ir::VariableKind::Const(ty)
+ } else {
+ chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
+ }
+ }),
+ ),
+ value,
+ )
+}
+
+pub(crate) fn make_single_type_binders<T: HasInterner<Interner = Interner>>(
+ value: T,
+) -> Binders<T> {
+ Binders::new(
+ VariableKinds::from_iter(
+ Interner,
+ std::iter::once(chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)),
+ ),
+ value,
+ )
+}
+
+pub(crate) fn make_binders_with_count<T: HasInterner<Interner = Interner>>(
+ db: &dyn HirDatabase,
+ count: usize,
+ generics: &Generics,
+ value: T,
+) -> Binders<T> {
+ let it = generics.iter_id().take(count).map(|id| match id {
+ Either::Left(_) => None,
+ Either::Right(id) => Some(db.const_param_ty(id)),
+ });
+ crate::make_type_and_const_binders(it, value)
+}
+
+pub(crate) fn make_binders<T: HasInterner<Interner = Interner>>(
+ db: &dyn HirDatabase,
+ generics: &Generics,
+ value: T,
+) -> Binders<T> {
+ make_binders_with_count(db, usize::MAX, generics, value)
+}
+
+// FIXME: get rid of this
+pub fn make_canonical<T: HasInterner<Interner = Interner>>(
+ value: T,
+ kinds: impl IntoIterator<Item = TyVariableKind>,
+) -> Canonical<T> {
+ let kinds = kinds.into_iter().map(|tk| {
+ chalk_ir::CanonicalVarKind::new(
+ chalk_ir::VariableKind::Ty(tk),
+ chalk_ir::UniverseIndex::ROOT,
+ )
+ });
+ Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) }
+}
+
+// FIXME: get rid of this, just replace it by FnPointer
+/// A function signature as seen by type inference: Several parameter types and
+/// one return type.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct CallableSig {
+ params_and_return: Arc<[Ty]>,
+ is_varargs: bool,
+}
+
+has_interner!(CallableSig);
+
+/// A polymorphic function signature.
+pub type PolyFnSig = Binders<CallableSig>;
+
+impl CallableSig {
+ pub fn from_params_and_return(mut params: Vec<Ty>, ret: Ty, is_varargs: bool) -> CallableSig {
+ params.push(ret);
+ CallableSig { params_and_return: params.into(), is_varargs }
+ }
+
+ pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig {
+ CallableSig {
+ // FIXME: what to do about lifetime params? -> return PolyFnSig
+ params_and_return: fn_ptr
+ .substitution
+ .clone()
+ .shifted_out_to(Interner, DebruijnIndex::ONE)
+ .expect("unexpected lifetime vars in fn ptr")
+ .0
+ .as_slice(Interner)
+ .iter()
+ .map(|arg| arg.assert_ty_ref(Interner).clone())
+ .collect(),
+ is_varargs: fn_ptr.sig.variadic,
+ }
+ }
+
+ pub fn to_fn_ptr(&self) -> FnPointer {
+ FnPointer {
+ num_binders: 0,
+ sig: FnSig { abi: (), safety: Safety::Safe, variadic: self.is_varargs },
+ substitution: FnSubst(Substitution::from_iter(
+ Interner,
+ self.params_and_return.iter().cloned(),
+ )),
+ }
+ }
+
+ pub fn params(&self) -> &[Ty] {
+ &self.params_and_return[0..self.params_and_return.len() - 1]
+ }
+
+ pub fn ret(&self) -> &Ty {
+ &self.params_and_return[self.params_and_return.len() - 1]
+ }
+}
+
+impl TypeFoldable<Interner> for CallableSig {
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn chalk_ir::fold::TypeFolder<Interner, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self, E> {
+ let vec = self.params_and_return.to_vec();
+ let folded = vec.fold_with(folder, outer_binder)?;
+ Ok(CallableSig { params_and_return: folded.into(), is_varargs: self.is_varargs })
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub enum ImplTraitId {
+ ReturnTypeImplTrait(hir_def::FunctionId, u16),
+ AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct ReturnTypeImplTraits {
+ pub(crate) impl_traits: Vec<ReturnTypeImplTrait>,
+}
+
+has_interner!(ReturnTypeImplTraits);
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct ReturnTypeImplTrait {
+ pub(crate) bounds: Binders<Vec<QuantifiedWhereClause>>,
+}
+
+pub fn static_lifetime() -> Lifetime {
+ LifetimeData::Static.intern(Interner)
+}
+
+pub(crate) fn fold_free_vars<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
+ t: T,
+ for_ty: impl FnMut(BoundVar, DebruijnIndex) -> Ty,
+ for_const: impl FnMut(Ty, BoundVar, DebruijnIndex) -> Const,
+) -> T {
+ use chalk_ir::{fold::TypeFolder, Fallible};
+ struct FreeVarFolder<F1, F2>(F1, F2);
+ impl<
+ 'i,
+ F1: FnMut(BoundVar, DebruijnIndex) -> Ty + 'i,
+ F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const + 'i,
+ > TypeFolder<Interner> for FreeVarFolder<F1, F2>
+ {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_free_var_ty(
+ &mut self,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ Ok(self.0(bound_var, outer_binder))
+ }
+
+ fn fold_free_var_const(
+ &mut self,
+ ty: Ty,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ Ok(self.1(ty, bound_var, outer_binder))
+ }
+ }
+ t.fold_with(&mut FreeVarFolder(for_ty, for_const), DebruijnIndex::INNERMOST)
+ .expect("fold failed unexpectedly")
+}
+
+pub(crate) fn fold_tys<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
+ t: T,
+ mut for_ty: impl FnMut(Ty, DebruijnIndex) -> Ty,
+ binders: DebruijnIndex,
+) -> T {
+ fold_tys_and_consts(
+ t,
+ |x, d| match x {
+ Either::Left(x) => Either::Left(for_ty(x, d)),
+ Either::Right(x) => Either::Right(x),
+ },
+ binders,
+ )
+}
+
+pub(crate) fn fold_tys_and_consts<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
+ t: T,
+ f: impl FnMut(Either<Ty, Const>, DebruijnIndex) -> Either<Ty, Const>,
+ binders: DebruijnIndex,
+) -> T {
+ use chalk_ir::{
+ fold::{TypeFolder, TypeSuperFoldable},
+ Fallible,
+ };
+ struct TyFolder<F>(F);
+ impl<'i, F: FnMut(Either<Ty, Const>, DebruijnIndex) -> Either<Ty, Const> + 'i>
+ TypeFolder<Interner> for TyFolder<F>
+ {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Fallible<Ty> {
+ let ty = ty.super_fold_with(self.as_dyn(), outer_binder)?;
+ Ok(self.0(Either::Left(ty), outer_binder).left().unwrap())
+ }
+
+ fn fold_const(&mut self, c: Const, outer_binder: DebruijnIndex) -> Fallible<Const> {
+ Ok(self.0(Either::Right(c), outer_binder).right().unwrap())
+ }
+ }
+ t.fold_with(&mut TyFolder(f), binders).expect("fold failed unexpectedly")
+}
+
+/// 'Canonicalizes' the `t` by replacing any errors with new variables. Also
+/// ensures there are no unbound variables or inference variables anywhere in
+/// the `t`.
+pub fn replace_errors_with_variables<T>(t: &T) -> Canonical<T>
+where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + Clone,
+ T: HasInterner<Interner = Interner>,
+{
+ use chalk_ir::{
+ fold::{TypeFolder, TypeSuperFoldable},
+ Fallible,
+ };
+ struct ErrorReplacer {
+ vars: usize,
+ }
+ impl TypeFolder<Interner> for ErrorReplacer {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Fallible<Ty> {
+ if let TyKind::Error = ty.kind(Interner) {
+ let index = self.vars;
+ self.vars += 1;
+ Ok(TyKind::BoundVar(BoundVar::new(outer_binder, index)).intern(Interner))
+ } else {
+ let ty = ty.super_fold_with(self.as_dyn(), outer_binder)?;
+ Ok(ty)
+ }
+ }
+
+ fn fold_inference_ty(
+ &mut self,
+ _var: InferenceVar,
+ _kind: TyVariableKind,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ if cfg!(debug_assertions) {
+ // we don't want to just panic here, because then the error message
+ // won't contain the whole thing, which would not be very helpful
+ Err(NoSolution)
+ } else {
+ Ok(TyKind::Error.intern(Interner))
+ }
+ }
+
+ fn fold_free_var_ty(
+ &mut self,
+ _bound_var: BoundVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ if cfg!(debug_assertions) {
+ // we don't want to just panic here, because then the error message
+ // won't contain the whole thing, which would not be very helpful
+ Err(NoSolution)
+ } else {
+ Ok(TyKind::Error.intern(Interner))
+ }
+ }
+
+ fn fold_inference_const(
+ &mut self,
+ ty: Ty,
+ _var: InferenceVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(unknown_const(ty))
+ }
+ }
+
+ fn fold_free_var_const(
+ &mut self,
+ ty: Ty,
+ _bound_var: BoundVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(unknown_const(ty))
+ }
+ }
+
+ fn fold_inference_lifetime(
+ &mut self,
+ _var: InferenceVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(static_lifetime())
+ }
+ }
+
+ fn fold_free_var_lifetime(
+ &mut self,
+ _bound_var: BoundVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(static_lifetime())
+ }
+ }
+ }
+ let mut error_replacer = ErrorReplacer { vars: 0 };
+ let value = match t.clone().fold_with(&mut error_replacer, DebruijnIndex::INNERMOST) {
+ Ok(t) => t,
+ Err(_) => panic!("Encountered unbound or inference vars in {:?}", t),
+ };
+ let kinds = (0..error_replacer.vars).map(|_| {
+ chalk_ir::CanonicalVarKind::new(
+ chalk_ir::VariableKind::Ty(TyVariableKind::General),
+ chalk_ir::UniverseIndex::ROOT,
+ )
+ });
+ Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
new file mode 100644
index 000000000..3ed9c941f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -0,0 +1,1778 @@
+//! Methods for lowering the HIR to types. There are two main cases here:
+//!
+//! - Lowering a type reference like `&usize` or `Option<foo::bar::Baz>` to a
+//! type: The entry point for this is `Ty::from_hir`.
+//! - Building the type for an item: This happens through the `type_for_def` query.
+//!
+//! This usually involves resolving names, collecting generic arguments etc.
+use std::{
+ cell::{Cell, RefCell},
+ iter,
+ sync::Arc,
+};
+
+use base_db::CrateId;
+use chalk_ir::{
+ cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
+};
+
+use hir_def::{
+ adt::StructKind,
+ body::{Expander, LowerCtx},
+ builtin_type::BuiltinType,
+ generics::{
+ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
+ },
+ intern::Interned,
+ lang_item::lang_attr,
+ path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments},
+ resolver::{HasResolver, Resolver, TypeNs},
+ type_ref::{
+ ConstScalarOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef,
+ },
+ AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
+ HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, StaticId, StructId, TraitId,
+ TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
+};
+use hir_expand::{name::Name, ExpandResult};
+use itertools::Either;
+use la_arena::ArenaMap;
+use rustc_hash::FxHashSet;
+use smallvec::SmallVec;
+use stdx::{impl_from, never};
+use syntax::{ast, SmolStr};
+
+use crate::{
+ all_super_traits,
+ consteval::{intern_const_scalar, path_to_const, unknown_const, unknown_const_as_generic},
+ db::HirDatabase,
+ make_binders,
+ mapping::ToChalk,
+ static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
+ utils::Generics,
+ utils::{all_super_trait_refs, associated_type_by_name_including_super_traits, generics},
+ AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, DebruijnIndex, DynTy, FnPointer,
+ FnSig, FnSubst, GenericArgData, ImplTraitId, Interner, ParamKind, PolyFnSig, ProjectionTy,
+ QuantifiedWhereClause, QuantifiedWhereClauses, ReturnTypeImplTrait, ReturnTypeImplTraits,
+ Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
+};
+
+#[derive(Debug)]
+pub struct TyLoweringContext<'a> {
+ pub db: &'a dyn HirDatabase,
+ pub resolver: &'a Resolver,
+ in_binders: DebruijnIndex,
+ /// Note: Conceptually, it's thinkable that we could be in a location where
+ /// some type params should be represented as placeholders, and others
+ /// should be converted to variables. I think in practice, this isn't
+ /// possible currently, so this should be fine for now.
+ pub type_param_mode: ParamLoweringMode,
+ pub impl_trait_mode: ImplTraitLoweringMode,
+ impl_trait_counter: Cell<u16>,
+ /// When turning `impl Trait` into opaque types, we have to collect the
+ /// bounds at the same time to get the IDs correct (without becoming too
+ /// complicated). I don't like using interior mutability (as for the
+ /// counter), but I've tried and failed to make the lifetimes work for
+ /// passing around a `&mut TyLoweringContext`. The core problem is that
+ /// we're grouping the mutable data (the counter and this field) together
+ /// with the immutable context (the references to the DB and resolver).
+ /// Splitting this up would be a possible fix.
+ opaque_type_data: RefCell<Vec<ReturnTypeImplTrait>>,
+ expander: RefCell<Option<Expander>>,
+ /// Tracks types with explicit `?Sized` bounds.
+ pub(crate) unsized_types: RefCell<FxHashSet<Ty>>,
+}
+
+impl<'a> TyLoweringContext<'a> {
+ pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self {
+ let impl_trait_counter = Cell::new(0);
+ let impl_trait_mode = ImplTraitLoweringMode::Disallowed;
+ let type_param_mode = ParamLoweringMode::Placeholder;
+ let in_binders = DebruijnIndex::INNERMOST;
+ let opaque_type_data = RefCell::new(Vec::new());
+ Self {
+ db,
+ resolver,
+ in_binders,
+ impl_trait_mode,
+ impl_trait_counter,
+ type_param_mode,
+ opaque_type_data,
+ expander: RefCell::new(None),
+ unsized_types: RefCell::default(),
+ }
+ }
+
+ pub fn with_debruijn<T>(
+ &self,
+ debruijn: DebruijnIndex,
+ f: impl FnOnce(&TyLoweringContext<'_>) -> T,
+ ) -> T {
+ let opaque_ty_data_vec = self.opaque_type_data.take();
+ let expander = self.expander.take();
+ let unsized_types = self.unsized_types.take();
+ let new_ctx = Self {
+ in_binders: debruijn,
+ impl_trait_counter: Cell::new(self.impl_trait_counter.get()),
+ opaque_type_data: RefCell::new(opaque_ty_data_vec),
+ expander: RefCell::new(expander),
+ unsized_types: RefCell::new(unsized_types),
+ ..*self
+ };
+ let result = f(&new_ctx);
+ self.impl_trait_counter.set(new_ctx.impl_trait_counter.get());
+ self.opaque_type_data.replace(new_ctx.opaque_type_data.into_inner());
+ self.expander.replace(new_ctx.expander.into_inner());
+ self.unsized_types.replace(new_ctx.unsized_types.into_inner());
+ result
+ }
+
+ pub fn with_shifted_in<T>(
+ &self,
+ debruijn: DebruijnIndex,
+ f: impl FnOnce(&TyLoweringContext<'_>) -> T,
+ ) -> T {
+ self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f)
+ }
+
+ pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
+ Self { impl_trait_mode, ..self }
+ }
+
+ pub fn with_type_param_mode(self, type_param_mode: ParamLoweringMode) -> Self {
+ Self { type_param_mode, ..self }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ImplTraitLoweringMode {
+ /// `impl Trait` gets lowered into an opaque type that doesn't unify with
+ /// anything except itself. This is used in places where values flow 'out',
+ /// i.e. for arguments of the function we're currently checking, and return
+ /// types of functions we're calling.
+ Opaque,
+ /// `impl Trait` gets lowered into a type variable. Used for argument
+ /// position impl Trait when inside the respective function, since it allows
+ /// us to support that without Chalk.
+ Param,
+ /// `impl Trait` gets lowered into a variable that can unify with some
+ /// type. This is used in places where values flow 'in', i.e. for arguments
+ /// of functions we're calling, and the return type of the function we're
+ /// currently checking.
+ Variable,
+ /// `impl Trait` is disallowed and will be an error.
+ Disallowed,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ParamLoweringMode {
+ Placeholder,
+ Variable,
+}
+
+impl<'a> TyLoweringContext<'a> {
+ pub fn lower_ty(&self, type_ref: &TypeRef) -> Ty {
+ self.lower_ty_ext(type_ref).0
+ }
+
+ fn generics(&self) -> Generics {
+ generics(
+ self.db.upcast(),
+ self.resolver
+ .generic_def()
+ .expect("there should be generics if there's a generic param"),
+ )
+ }
+
+ pub fn lower_ty_ext(&self, type_ref: &TypeRef) -> (Ty, Option<TypeNs>) {
+ let mut res = None;
+ let ty = match type_ref {
+ TypeRef::Never => TyKind::Never.intern(Interner),
+ TypeRef::Tuple(inner) => {
+ let inner_tys = inner.iter().map(|tr| self.lower_ty(tr));
+ TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys))
+ .intern(Interner)
+ }
+ TypeRef::Path(path) => {
+ let (ty, res_) = self.lower_path(path);
+ res = res_;
+ ty
+ }
+ TypeRef::RawPtr(inner, mutability) => {
+ let inner_ty = self.lower_ty(inner);
+ TyKind::Raw(lower_to_chalk_mutability(*mutability), inner_ty).intern(Interner)
+ }
+ TypeRef::Array(inner, len) => {
+ let inner_ty = self.lower_ty(inner);
+ let const_len = const_or_path_to_chalk(
+ self.db,
+ self.resolver,
+ TyBuilder::usize(),
+ len,
+ self.type_param_mode,
+ || self.generics(),
+ self.in_binders,
+ );
+
+ TyKind::Array(inner_ty, const_len).intern(Interner)
+ }
+ TypeRef::Slice(inner) => {
+ let inner_ty = self.lower_ty(inner);
+ TyKind::Slice(inner_ty).intern(Interner)
+ }
+ TypeRef::Reference(inner, _, mutability) => {
+ let inner_ty = self.lower_ty(inner);
+ let lifetime = static_lifetime();
+ TyKind::Ref(lower_to_chalk_mutability(*mutability), lifetime, inner_ty)
+ .intern(Interner)
+ }
+ TypeRef::Placeholder => TyKind::Error.intern(Interner),
+ TypeRef::Fn(params, is_varargs) => {
+ let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ Substitution::from_iter(Interner, params.iter().map(|(_, tr)| ctx.lower_ty(tr)))
+ });
+ TyKind::Function(FnPointer {
+ num_binders: 0, // FIXME lower `for<'a> fn()` correctly
+ sig: FnSig { abi: (), safety: Safety::Safe, variadic: *is_varargs },
+ substitution: FnSubst(substs),
+ })
+ .intern(Interner)
+ }
+ TypeRef::DynTrait(bounds) => {
+ let self_ty =
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
+ let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ QuantifiedWhereClauses::from_iter(
+ Interner,
+ bounds.iter().flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false)),
+ )
+ });
+ let bounds = crate::make_single_type_binders(bounds);
+ TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner)
+ }
+ TypeRef::ImplTrait(bounds) => {
+ match self.impl_trait_mode {
+ ImplTraitLoweringMode::Opaque => {
+ let idx = self.impl_trait_counter.get();
+ self.impl_trait_counter.set(idx + 1);
+ let func = match self.resolver.generic_def() {
+ Some(GenericDefId::FunctionId(f)) => f,
+ _ => panic!("opaque impl trait lowering in non-function"),
+ };
+
+ assert!(idx as usize == self.opaque_type_data.borrow().len());
+ // this dance is to make sure the data is in the right
+ // place even if we encounter more opaque types while
+ // lowering the bounds
+ self.opaque_type_data.borrow_mut().push(ReturnTypeImplTrait {
+ bounds: crate::make_single_type_binders(Vec::new()),
+ });
+ // We don't want to lower the bounds inside the binders
+ // we're currently in, because they don't end up inside
+ // those binders. E.g. when we have `impl Trait<impl
+ // OtherTrait<T>>`, the `impl OtherTrait<T>` can't refer
+ // to the self parameter from `impl Trait`, and the
+ // bounds aren't actually stored nested within each
+ // other, but separately. So if the `T` refers to a type
+ // parameter of the outer function, it's just one binder
+ // away instead of two.
+ let actual_opaque_type_data = self
+ .with_debruijn(DebruijnIndex::INNERMOST, |ctx| {
+ ctx.lower_impl_trait(bounds, func)
+ });
+ self.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data;
+
+ let impl_trait_id = ImplTraitId::ReturnTypeImplTrait(func, idx);
+ let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
+ let generics = generics(self.db.upcast(), func.into());
+ let parameters = generics.bound_vars_subst(self.db, self.in_binders);
+ TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner)
+ }
+ ImplTraitLoweringMode::Param => {
+ let idx = self.impl_trait_counter.get();
+ // FIXME we're probably doing something wrong here
+ self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
+ if let Some(def) = self.resolver.generic_def() {
+ let generics = generics(self.db.upcast(), def);
+ let param = generics
+ .iter()
+ .filter(|(_, data)| {
+ matches!(
+ data,
+ TypeOrConstParamData::TypeParamData(data)
+ if data.provenance == TypeParamProvenance::ArgumentImplTrait
+ )
+ })
+ .nth(idx as usize)
+ .map_or(TyKind::Error, |(id, _)| {
+ TyKind::Placeholder(to_placeholder_idx(self.db, id))
+ });
+ param.intern(Interner)
+ } else {
+ TyKind::Error.intern(Interner)
+ }
+ }
+ ImplTraitLoweringMode::Variable => {
+ let idx = self.impl_trait_counter.get();
+ // FIXME we're probably doing something wrong here
+ self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
+ let (
+ parent_params,
+ self_params,
+ list_params,
+ const_params,
+ _impl_trait_params,
+ ) = if let Some(def) = self.resolver.generic_def() {
+ let generics = generics(self.db.upcast(), def);
+ generics.provenance_split()
+ } else {
+ (0, 0, 0, 0, 0)
+ };
+ TyKind::BoundVar(BoundVar::new(
+ self.in_binders,
+ idx as usize + parent_params + self_params + list_params + const_params,
+ ))
+ .intern(Interner)
+ }
+ ImplTraitLoweringMode::Disallowed => {
+ // FIXME: report error
+ TyKind::Error.intern(Interner)
+ }
+ }
+ }
+ TypeRef::Macro(macro_call) => {
+ let (expander, recursion_start) = {
+ let mut expander = self.expander.borrow_mut();
+ if expander.is_some() {
+ (Some(expander), false)
+ } else {
+ *expander = Some(Expander::new(
+ self.db.upcast(),
+ macro_call.file_id,
+ self.resolver.module(),
+ ));
+ (Some(expander), true)
+ }
+ };
+ let ty = if let Some(mut expander) = expander {
+ let expander_mut = expander.as_mut().unwrap();
+ let macro_call = macro_call.to_node(self.db.upcast());
+ match expander_mut.enter_expand::<ast::Type>(self.db.upcast(), macro_call) {
+ Ok(ExpandResult { value: Some((mark, expanded)), .. }) => {
+ let ctx =
+ LowerCtx::new(self.db.upcast(), expander_mut.current_file_id());
+ let type_ref = TypeRef::from_ast(&ctx, expanded);
+
+ drop(expander);
+ let ty = self.lower_ty(&type_ref);
+
+ self.expander
+ .borrow_mut()
+ .as_mut()
+ .unwrap()
+ .exit(self.db.upcast(), mark);
+ Some(ty)
+ }
+ _ => None,
+ }
+ } else {
+ None
+ };
+ if recursion_start {
+ *self.expander.borrow_mut() = None;
+ }
+ ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
+ }
+ TypeRef::Error => TyKind::Error.intern(Interner),
+ };
+ (ty, res)
+ }
+
+ /// This is only for `generic_predicates_for_param`, where we can't just
+ /// lower the self types of the predicates since that could lead to cycles.
+ /// So we just check here if the `type_ref` resolves to a generic param, and which.
+ fn lower_ty_only_param(&self, type_ref: &TypeRef) -> Option<TypeOrConstParamId> {
+ let path = match type_ref {
+ TypeRef::Path(path) => path,
+ _ => return None,
+ };
+ if path.type_anchor().is_some() {
+ return None;
+ }
+ if path.segments().len() > 1 {
+ return None;
+ }
+ let resolution =
+ match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ Some((it, None)) => it,
+ _ => return None,
+ };
+ match resolution {
+ TypeNs::GenericParam(param_id) => Some(param_id.into()),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn lower_ty_relative_path(
+ &self,
+ ty: Ty,
+ // We need the original resolution to lower `Self::AssocTy` correctly
+ res: Option<TypeNs>,
+ remaining_segments: PathSegments<'_>,
+ ) -> (Ty, Option<TypeNs>) {
+ match remaining_segments.len() {
+ 0 => (ty, res),
+ 1 => {
+ // resolve unselected assoc types
+ let segment = remaining_segments.first().unwrap();
+ (self.select_associated_type(res, segment), None)
+ }
+ _ => {
+ // FIXME report error (ambiguous associated type)
+ (TyKind::Error.intern(Interner), None)
+ }
+ }
+ }
+
+ pub(crate) fn lower_partly_resolved_path(
+ &self,
+ resolution: TypeNs,
+ resolved_segment: PathSegment<'_>,
+ remaining_segments: PathSegments<'_>,
+ infer_args: bool,
+ ) -> (Ty, Option<TypeNs>) {
+ let ty = match resolution {
+ TypeNs::TraitId(trait_) => {
+ let ty = match remaining_segments.len() {
+ 1 => {
+ let trait_ref =
+ self.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
+ let segment = remaining_segments.first().unwrap();
+ let found = self
+ .db
+ .trait_data(trait_ref.hir_trait_id())
+ .associated_type_by_name(segment.name);
+ match found {
+ Some(associated_ty) => {
+ // FIXME handle type parameters on the segment
+ TyKind::Alias(AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: trait_ref.substitution,
+ }))
+ .intern(Interner)
+ }
+ None => {
+ // FIXME: report error (associated type not found)
+ TyKind::Error.intern(Interner)
+ }
+ }
+ }
+ 0 => {
+ let self_ty = Some(
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
+ .intern(Interner),
+ );
+ let trait_ref = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ ctx.lower_trait_ref_from_resolved_path(
+ trait_,
+ resolved_segment,
+ self_ty,
+ )
+ });
+ let dyn_ty = DynTy {
+ bounds: crate::make_single_type_binders(
+ QuantifiedWhereClauses::from_iter(
+ Interner,
+ Some(crate::wrap_empty_binders(WhereClause::Implemented(
+ trait_ref,
+ ))),
+ ),
+ ),
+ lifetime: static_lifetime(),
+ };
+ TyKind::Dyn(dyn_ty).intern(Interner)
+ }
+ _ => {
+ // FIXME report error (ambiguous associated type)
+ TyKind::Error.intern(Interner)
+ }
+ };
+ return (ty, None);
+ }
+ TypeNs::GenericParam(param_id) => {
+ let generics = generics(
+ self.db.upcast(),
+ self.resolver.generic_def().expect("generics in scope"),
+ );
+ match self.type_param_mode {
+ ParamLoweringMode::Placeholder => {
+ TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
+ }
+ ParamLoweringMode::Variable => {
+ let idx = generics.param_idx(param_id.into()).expect("matching generics");
+ TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
+ }
+ }
+ .intern(Interner)
+ }
+ TypeNs::SelfType(impl_id) => {
+ let generics = generics(self.db.upcast(), impl_id.into());
+ let substs = match self.type_param_mode {
+ ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
+ ParamLoweringMode::Variable => {
+ generics.bound_vars_subst(self.db, self.in_binders)
+ }
+ };
+ self.db.impl_self_ty(impl_id).substitute(Interner, &substs)
+ }
+ TypeNs::AdtSelfType(adt) => {
+ let generics = generics(self.db.upcast(), adt.into());
+ let substs = match self.type_param_mode {
+ ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
+ ParamLoweringMode::Variable => {
+ generics.bound_vars_subst(self.db, self.in_binders)
+ }
+ };
+ self.db.ty(adt.into()).substitute(Interner, &substs)
+ }
+
+ TypeNs::AdtId(it) => self.lower_path_inner(resolved_segment, it.into(), infer_args),
+ TypeNs::BuiltinType(it) => {
+ self.lower_path_inner(resolved_segment, it.into(), infer_args)
+ }
+ TypeNs::TypeAliasId(it) => {
+ self.lower_path_inner(resolved_segment, it.into(), infer_args)
+ }
+ // FIXME: report error
+ TypeNs::EnumVariantId(_) => return (TyKind::Error.intern(Interner), None),
+ };
+ self.lower_ty_relative_path(ty, Some(resolution), remaining_segments)
+ }
+
+ pub(crate) fn lower_path(&self, path: &Path) -> (Ty, Option<TypeNs>) {
+ // Resolve the path (in type namespace)
+ if let Some(type_ref) = path.type_anchor() {
+ let (ty, res) = self.lower_ty_ext(type_ref);
+ return self.lower_ty_relative_path(ty, res, path.segments());
+ }
+ let (resolution, remaining_index) =
+ match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ Some(it) => it,
+ None => return (TyKind::Error.intern(Interner), None),
+ };
+ let (resolved_segment, remaining_segments) = match remaining_index {
+ None => (
+ path.segments().last().expect("resolved path has at least one element"),
+ PathSegments::EMPTY,
+ ),
+ Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)),
+ };
+ self.lower_partly_resolved_path(resolution, resolved_segment, remaining_segments, false)
+ }
+
+ fn select_associated_type(&self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty {
+ let (def, res) = match (self.resolver.generic_def(), res) {
+ (Some(def), Some(res)) => (def, res),
+ _ => return TyKind::Error.intern(Interner),
+ };
+ let ty = named_associated_type_shorthand_candidates(
+ self.db,
+ def,
+ res,
+ Some(segment.name.clone()),
+ move |name, t, associated_ty| {
+ if name == segment.name {
+ let substs = match self.type_param_mode {
+ ParamLoweringMode::Placeholder => {
+ // if we're lowering to placeholders, we have to put
+ // them in now
+ let generics = generics(
+ self.db.upcast(),
+ self.resolver
+ .generic_def()
+ .expect("there should be generics if there's a generic param"),
+ );
+ let s = generics.placeholder_subst(self.db);
+ s.apply(t.substitution.clone(), Interner)
+ }
+ ParamLoweringMode::Variable => t.substitution.clone(),
+ };
+ // We need to shift in the bound vars, since
+ // associated_type_shorthand_candidates does not do that
+ let substs = substs.shifted_in_from(Interner, self.in_binders);
+ // FIXME handle type parameters on the segment
+ Some(
+ TyKind::Alias(AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: substs,
+ }))
+ .intern(Interner),
+ )
+ } else {
+ None
+ }
+ },
+ );
+
+ ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
+ }
+
+ fn lower_path_inner(
+ &self,
+ segment: PathSegment<'_>,
+ typeable: TyDefId,
+ infer_args: bool,
+ ) -> Ty {
+ let generic_def = match typeable {
+ TyDefId::BuiltinType(_) => None,
+ TyDefId::AdtId(it) => Some(it.into()),
+ TyDefId::TypeAliasId(it) => Some(it.into()),
+ };
+ let substs = self.substs_from_path_segment(segment, generic_def, infer_args, None);
+ self.db.ty(typeable).substitute(Interner, &substs)
+ }
+
+ /// Collect generic arguments from a path into a `Substs`. See also
+ /// `create_substs_for_ast_path` and `def_to_ty` in rustc.
+ pub(super) fn substs_from_path(
+ &self,
+ path: &Path,
+ // Note that we don't call `db.value_type(resolved)` here,
+ // `ValueTyDefId` is just a convenient way to pass generics and
+ // special-case enum variants
+ resolved: ValueTyDefId,
+ infer_args: bool,
+ ) -> Substitution {
+ let last = path.segments().last().expect("path should have at least one segment");
+ let (segment, generic_def) = match resolved {
+ ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
+ ValueTyDefId::StructId(it) => (last, Some(it.into())),
+ ValueTyDefId::UnionId(it) => (last, Some(it.into())),
+ ValueTyDefId::ConstId(it) => (last, Some(it.into())),
+ ValueTyDefId::StaticId(_) => (last, None),
+ ValueTyDefId::EnumVariantId(var) => {
+ // the generic args for an enum variant may be either specified
+ // on the segment referring to the enum, or on the segment
+ // referring to the variant. So `Option::<T>::None` and
+ // `Option::None::<T>` are both allowed (though the former is
+ // preferred). See also `def_ids_for_path_segments` in rustc.
+ let len = path.segments().len();
+ let penultimate = len.checked_sub(2).and_then(|idx| path.segments().get(idx));
+ let segment = match penultimate {
+ Some(segment) if segment.args_and_bindings.is_some() => segment,
+ _ => last,
+ };
+ (segment, Some(var.parent.into()))
+ }
+ };
+ self.substs_from_path_segment(segment, generic_def, infer_args, None)
+ }
+
+ fn substs_from_path_segment(
+ &self,
+ segment: PathSegment<'_>,
+ def_generic: Option<GenericDefId>,
+ infer_args: bool,
+ explicit_self_ty: Option<Ty>,
+ ) -> Substitution {
+ let mut substs = Vec::new();
+ let def_generics = if let Some(def) = def_generic {
+ generics(self.db.upcast(), def)
+ } else {
+ return Substitution::empty(Interner);
+ };
+ let (parent_params, self_params, type_params, const_params, impl_trait_params) =
+ def_generics.provenance_split();
+ let total_len =
+ parent_params + self_params + type_params + const_params + impl_trait_params;
+
+ let ty_error = GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner);
+
+ let mut def_generic_iter = def_generics.iter_id();
+
+ for _ in 0..parent_params {
+ if let Some(eid) = def_generic_iter.next() {
+ match eid {
+ Either::Left(_) => substs.push(ty_error.clone()),
+ Either::Right(x) => {
+ substs.push(unknown_const_as_generic(self.db.const_param_ty(x)))
+ }
+ }
+ }
+ }
+
+ let fill_self_params = || {
+ for x in explicit_self_ty
+ .into_iter()
+ .map(|x| GenericArgData::Ty(x).intern(Interner))
+ .chain(iter::repeat(ty_error.clone()))
+ .take(self_params)
+ {
+ if let Some(id) = def_generic_iter.next() {
+ assert!(id.is_left());
+ substs.push(x);
+ }
+ }
+ };
+ let mut had_explicit_args = false;
+
+ if let Some(generic_args) = &segment.args_and_bindings {
+ if !generic_args.has_self_type {
+ fill_self_params();
+ }
+ let expected_num = if generic_args.has_self_type {
+ self_params + type_params + const_params
+ } else {
+ type_params + const_params
+ };
+ let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 };
+ // if args are provided, it should be all of them, but we can't rely on that
+ for arg in generic_args
+ .args
+ .iter()
+ .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
+ .skip(skip)
+ .take(expected_num)
+ {
+ if let Some(id) = def_generic_iter.next() {
+ if let Some(x) = generic_arg_to_chalk(
+ self.db,
+ id,
+ arg,
+ &mut (),
+ |_, type_ref| self.lower_ty(type_ref),
+ |_, c, ty| {
+ const_or_path_to_chalk(
+ self.db,
+ &self.resolver,
+ ty,
+ c,
+ self.type_param_mode,
+ || self.generics(),
+ self.in_binders,
+ )
+ },
+ ) {
+ had_explicit_args = true;
+ substs.push(x);
+ } else {
+ // we just filtered them out
+ never!("Unexpected lifetime argument");
+ }
+ }
+ }
+ } else {
+ fill_self_params();
+ }
+
+ // handle defaults. In expression or pattern path segments without
+ // explicitly specified type arguments, missing type arguments are inferred
+ // (i.e. defaults aren't used).
+ if !infer_args || had_explicit_args {
+ if let Some(def_generic) = def_generic {
+ let defaults = self.db.generic_defaults(def_generic);
+ assert_eq!(total_len, defaults.len());
+
+ for default_ty in defaults.iter().skip(substs.len()) {
+ // each default can depend on the previous parameters
+ let substs_so_far = Substitution::from_iter(Interner, substs.clone());
+ if let Some(_id) = def_generic_iter.next() {
+ substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
+ }
+ }
+ }
+ }
+
+ // add placeholders for args that were not provided
+ // FIXME: emit diagnostics in contexts where this is not allowed
+ for eid in def_generic_iter {
+ match eid {
+ Either::Left(_) => substs.push(ty_error.clone()),
+ Either::Right(x) => {
+ substs.push(unknown_const_as_generic(self.db.const_param_ty(x)))
+ }
+ }
+ }
+ // If this assert fails, it means you pushed into subst but didn't call .next() of def_generic_iter
+ assert_eq!(substs.len(), total_len);
+
+ Substitution::from_iter(Interner, substs)
+ }
+
+ fn lower_trait_ref_from_path(
+ &self,
+ path: &Path,
+ explicit_self_ty: Option<Ty>,
+ ) -> Option<TraitRef> {
+ let resolved =
+ match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path())? {
+ TypeNs::TraitId(tr) => tr,
+ _ => return None,
+ };
+ let segment = path.segments().last().expect("path should have at least one segment");
+ Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty))
+ }
+
+ pub(crate) fn lower_trait_ref_from_resolved_path(
+ &self,
+ resolved: TraitId,
+ segment: PathSegment<'_>,
+ explicit_self_ty: Option<Ty>,
+ ) -> TraitRef {
+ let substs = self.trait_ref_substs_from_path(segment, resolved, explicit_self_ty);
+ TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
+ }
+
+ fn lower_trait_ref(
+ &self,
+ trait_ref: &HirTraitRef,
+ explicit_self_ty: Option<Ty>,
+ ) -> Option<TraitRef> {
+ self.lower_trait_ref_from_path(&trait_ref.path, explicit_self_ty)
+ }
+
+ fn trait_ref_substs_from_path(
+ &self,
+ segment: PathSegment<'_>,
+ resolved: TraitId,
+ explicit_self_ty: Option<Ty>,
+ ) -> Substitution {
+ self.substs_from_path_segment(segment, Some(resolved.into()), false, explicit_self_ty)
+ }
+
+ pub(crate) fn lower_where_predicate(
+ &'a self,
+ where_predicate: &'a WherePredicate,
+ ignore_bindings: bool,
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ match where_predicate {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound } => {
+ let self_ty = match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => self.lower_ty(type_ref),
+ WherePredicateTypeTarget::TypeOrConstParam(param_id) => {
+ let generic_def = self.resolver.generic_def().expect("generics in scope");
+ let generics = generics(self.db.upcast(), generic_def);
+ let param_id = hir_def::TypeOrConstParamId {
+ parent: generic_def,
+ local_id: *param_id,
+ };
+ let placeholder = to_placeholder_idx(self.db, param_id);
+ match self.type_param_mode {
+ ParamLoweringMode::Placeholder => TyKind::Placeholder(placeholder),
+ ParamLoweringMode::Variable => {
+ let idx = generics.param_idx(param_id).expect("matching generics");
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, idx))
+ }
+ }
+ .intern(Interner)
+ }
+ };
+ self.lower_type_bound(bound, self_ty, ignore_bindings)
+ .collect::<Vec<_>>()
+ .into_iter()
+ }
+ WherePredicate::Lifetime { .. } => vec![].into_iter(),
+ }
+ }
+
+ pub(crate) fn lower_type_bound(
+ &'a self,
+ bound: &'a TypeBound,
+ self_ty: Ty,
+ ignore_bindings: bool,
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ let mut bindings = None;
+ let trait_ref = match bound {
+ TypeBound::Path(path, TraitBoundModifier::None) => {
+ bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
+ bindings
+ .clone()
+ .filter(|tr| {
+ // ignore `T: Drop` or `T: Destruct` bounds.
+ // - `T: ~const Drop` has a special meaning in Rust 1.61 that we don't implement.
+ // (So ideally, we'd only ignore `~const Drop` here)
+ // - `Destruct` impls are built-in in 1.62 (current nightlies as of 08-04-2022), so until
+ // the builtin impls are supported by Chalk, we ignore them here.
+ if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
+ if lang == "drop" || lang == "destruct" {
+ return false;
+ }
+ }
+ true
+ })
+ .map(WhereClause::Implemented)
+ .map(crate::wrap_empty_binders)
+ }
+ TypeBound::Path(path, TraitBoundModifier::Maybe) => {
+ let sized_trait = self
+ .db
+ .lang_item(self.resolver.krate(), SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait());
+ // Don't lower associated type bindings as the only possible relaxed trait bound
+ // `?Sized` has no of them.
+ // If we got another trait here ignore the bound completely.
+ let trait_id = self
+ .lower_trait_ref_from_path(path, Some(self_ty.clone()))
+ .map(|trait_ref| trait_ref.hir_trait_id());
+ if trait_id == sized_trait {
+ self.unsized_types.borrow_mut().insert(self_ty);
+ }
+ None
+ }
+ TypeBound::ForLifetime(_, path) => {
+ // FIXME Don't silently drop the hrtb lifetimes here
+ bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
+ bindings.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
+ }
+ TypeBound::Lifetime(_) => None,
+ TypeBound::Error => None,
+ };
+ trait_ref.into_iter().chain(
+ bindings
+ .into_iter()
+ .filter(move |_| !ignore_bindings)
+ .flat_map(move |tr| self.assoc_type_bindings_from_type_bound(bound, tr)),
+ )
+ }
+
+ fn assoc_type_bindings_from_type_bound(
+ &'a self,
+ bound: &'a TypeBound,
+ trait_ref: TraitRef,
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ let last_segment = match bound {
+ TypeBound::Path(path, TraitBoundModifier::None) | TypeBound::ForLifetime(_, path) => {
+ path.segments().last()
+ }
+ TypeBound::Path(_, TraitBoundModifier::Maybe)
+ | TypeBound::Error
+ | TypeBound::Lifetime(_) => None,
+ };
+ last_segment
+ .into_iter()
+ .filter_map(|segment| segment.args_and_bindings)
+ .flat_map(|args_and_bindings| &args_and_bindings.bindings)
+ .flat_map(move |binding| {
+ let found = associated_type_by_name_including_super_traits(
+ self.db,
+ trait_ref.clone(),
+ &binding.name,
+ );
+ let (super_trait_ref, associated_ty) = match found {
+ None => return SmallVec::new(),
+ Some(t) => t,
+ };
+ let projection_ty = ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: super_trait_ref.substitution,
+ };
+ let mut preds: SmallVec<[_; 1]> = SmallVec::with_capacity(
+ binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
+ );
+ if let Some(type_ref) = &binding.type_ref {
+ let ty = self.lower_ty(type_ref);
+ let alias_eq =
+ AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
+ preds.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
+ }
+ for bound in &binding.bounds {
+ preds.extend(self.lower_type_bound(
+ bound,
+ TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner),
+ false,
+ ));
+ }
+ preds
+ })
+ }
+
+ fn lower_impl_trait(
+ &self,
+ bounds: &[Interned<TypeBound>],
+ func: FunctionId,
+ ) -> ReturnTypeImplTrait {
+ cov_mark::hit!(lower_rpit);
+ let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
+ let predicates = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ let mut predicates: Vec<_> = bounds
+ .iter()
+ .flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false))
+ .collect();
+
+ if !ctx.unsized_types.borrow().contains(&self_ty) {
+ let krate = func.lookup(ctx.db.upcast()).module(ctx.db.upcast()).krate();
+ let sized_trait = ctx
+ .db
+ .lang_item(krate, SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
+ let sized_clause = sized_trait.map(|trait_id| {
+ let clause = WhereClause::Implemented(TraitRef {
+ trait_id,
+ substitution: Substitution::from1(Interner, self_ty.clone()),
+ });
+ crate::wrap_empty_binders(clause)
+ });
+ predicates.extend(sized_clause.into_iter());
+ predicates.shrink_to_fit();
+ }
+ predicates
+ });
+ ReturnTypeImplTrait { bounds: crate::make_single_type_binders(predicates) }
+ }
+}
+
+fn count_impl_traits(type_ref: &TypeRef) -> usize {
+ let mut count = 0;
+ type_ref.walk(&mut |type_ref| {
+ if matches!(type_ref, TypeRef::ImplTrait(_)) {
+ count += 1;
+ }
+ });
+ count
+}
+
+/// Build the signature of a callable item (function, struct or enum variant).
+pub(crate) fn callable_item_sig(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig {
+ match def {
+ CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f),
+ CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s),
+ CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e),
+ }
+}
+
+pub fn associated_type_shorthand_candidates<R>(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ res: TypeNs,
+ cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
+) -> Option<R> {
+ named_associated_type_shorthand_candidates(db, def, res, None, cb)
+}
+
+fn named_associated_type_shorthand_candidates<R>(
+ db: &dyn HirDatabase,
+ // If the type parameter is defined in an impl and we're in a method, there
+ // might be additional where clauses to consider
+ def: GenericDefId,
+ res: TypeNs,
+ assoc_name: Option<Name>,
+ mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
+) -> Option<R> {
+ let mut search = |t| {
+ for t in all_super_trait_refs(db, t) {
+ let data = db.trait_data(t.hir_trait_id());
+
+ for (name, assoc_id) in &data.items {
+ if let AssocItemId::TypeAliasId(alias) = assoc_id {
+ if let Some(result) = cb(name, &t, *alias) {
+ return Some(result);
+ }
+ }
+ }
+ }
+ None
+ };
+
+ match res {
+ TypeNs::SelfType(impl_id) => search(
+ // we're _in_ the impl -- the binders get added back later. Correct,
+ // but it would be nice to make this more explicit
+ db.impl_trait(impl_id)?.into_value_and_skipped_binders().0,
+ ),
+ TypeNs::GenericParam(param_id) => {
+ let predicates = db.generic_predicates_for_param(def, param_id.into(), assoc_name);
+ let res = predicates.iter().find_map(|pred| match pred.skip_binders().skip_binders() {
+ // FIXME: how to correctly handle higher-ranked bounds here?
+ WhereClause::Implemented(tr) => search(
+ tr.clone()
+ .shifted_out_to(Interner, DebruijnIndex::ONE)
+ .expect("FIXME unexpected higher-ranked trait bound"),
+ ),
+ _ => None,
+ });
+ if let Some(_) = res {
+ return res;
+ }
+ // Handle `Self::Type` referring to own associated type in trait definitions
+ if let GenericDefId::TraitId(trait_id) = param_id.parent() {
+ let generics = generics(db.upcast(), trait_id.into());
+ if generics.params.type_or_consts[param_id.local_id()].is_trait_self() {
+ let trait_ref = TyBuilder::trait_ref(db, trait_id)
+ .fill_with_bound_vars(DebruijnIndex::INNERMOST, 0)
+ .build();
+ return search(trait_ref);
+ }
+ }
+ None
+ }
+ _ => None,
+ }
+}
+
+/// Build the type of all specific fields of a struct or enum variant.
+pub(crate) fn field_types_query(
+ db: &dyn HirDatabase,
+ variant_id: VariantId,
+) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>> {
+ let var_data = variant_id.variant_data(db.upcast());
+ let (resolver, def): (_, GenericDefId) = match variant_id {
+ VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()),
+ VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()),
+ VariantId::EnumVariantId(it) => (it.parent.resolver(db.upcast()), it.parent.into()),
+ };
+ let generics = generics(db.upcast(), def);
+ let mut res = ArenaMap::default();
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ for (field_id, field_data) in var_data.fields().iter() {
+ res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(&field_data.type_ref)))
+ }
+ Arc::new(res)
+}
+
+/// This query exists only to be used when resolving short-hand associated types
+/// like `T::Item`.
+///
+/// See the analogous query in rustc and its comment:
+/// <https://github.com/rust-lang/rust/blob/9150f844e2624eb013ec78ca08c1d416e6644026/src/librustc_typeck/astconv.rs#L46>
+/// This is a query mostly to handle cycles somewhat gracefully; e.g. the
+/// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but
+/// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
+pub(crate) fn generic_predicates_for_param_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ param_id: TypeOrConstParamId,
+ assoc_name: Option<Name>,
+) -> Arc<[Binders<QuantifiedWhereClause>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let generics = generics(db.upcast(), def);
+ let mut predicates: Vec<_> = resolver
+ .where_predicates_in_scope()
+ // we have to filter out all other predicates *first*, before attempting to lower them
+ .filter(|pred| match pred {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound, .. } => {
+ match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => {
+ if ctx.lower_ty_only_param(type_ref) != Some(param_id) {
+ return false;
+ }
+ }
+ &WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
+ let target_id = TypeOrConstParamId { parent: def, local_id };
+ if target_id != param_id {
+ return false;
+ }
+ }
+ };
+
+ match &**bound {
+ TypeBound::ForLifetime(_, path) | TypeBound::Path(path, _) => {
+ // Only lower the bound if the trait could possibly define the associated
+ // type we're looking for.
+
+ let assoc_name = match &assoc_name {
+ Some(it) => it,
+ None => return true,
+ };
+ let tr = match resolver
+ .resolve_path_in_type_ns_fully(db.upcast(), path.mod_path())
+ {
+ Some(TypeNs::TraitId(tr)) => tr,
+ _ => return false,
+ };
+
+ all_super_traits(db.upcast(), tr).iter().any(|tr| {
+ db.trait_data(*tr).items.iter().any(|(name, item)| {
+ matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
+ })
+ })
+ }
+ TypeBound::Lifetime(_) | TypeBound::Error => false,
+ }
+ }
+ WherePredicate::Lifetime { .. } => false,
+ })
+ .flat_map(|pred| {
+ ctx.lower_where_predicate(pred, true).map(|p| make_binders(db, &generics, p))
+ })
+ .collect();
+
+ let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ let implicitly_sized_predicates =
+ implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &subst, &resolver)
+ .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
+ predicates.extend(implicitly_sized_predicates);
+ predicates.into()
+}
+
+pub(crate) fn generic_predicates_for_param_recover(
+ _db: &dyn HirDatabase,
+ _cycle: &[String],
+ _def: &GenericDefId,
+ _param_id: &TypeOrConstParamId,
+ _assoc_name: &Option<Name>,
+) -> Arc<[Binders<QuantifiedWhereClause>]> {
+ Arc::new([])
+}
+
+pub(crate) fn trait_environment_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<TraitEnvironment> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Placeholder);
+ let mut traits_in_scope = Vec::new();
+ let mut clauses = Vec::new();
+ for pred in resolver.where_predicates_in_scope() {
+ for pred in ctx.lower_where_predicate(pred, false) {
+ if let WhereClause::Implemented(tr) = &pred.skip_binders() {
+ traits_in_scope.push((tr.self_type_parameter(Interner).clone(), tr.hir_trait_id()));
+ }
+ let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
+ clauses.push(program_clause.into_from_env_clause(Interner));
+ }
+ }
+
+ let container: Option<ItemContainerId> = match def {
+ // FIXME: is there a function for this?
+ GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
+ GenericDefId::AdtId(_) => None,
+ GenericDefId::TraitId(_) => None,
+ GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
+ GenericDefId::ImplId(_) => None,
+ GenericDefId::EnumVariantId(_) => None,
+ GenericDefId::ConstId(c) => Some(c.lookup(db.upcast()).container),
+ };
+ if let Some(ItemContainerId::TraitId(trait_id)) = container {
+ // add `Self: Trait<T1, T2, ...>` to the environment in trait
+ // function default implementations (and speculative code
+ // inside consts or type aliases)
+ cov_mark::hit!(trait_self_implements_self);
+ let substs = TyBuilder::placeholder_subst(db, trait_id);
+ let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution: substs };
+ let pred = WhereClause::Implemented(trait_ref);
+ let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
+ clauses.push(program_clause.into_from_env_clause(Interner));
+ }
+
+ let subst = generics(db.upcast(), def).placeholder_subst(db);
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ let implicitly_sized_clauses =
+ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver).map(|pred| {
+ let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
+ program_clause.into_from_env_clause(Interner)
+ });
+ clauses.extend(implicitly_sized_clauses);
+
+ let krate = def.module(db.upcast()).krate();
+
+ let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
+
+ Arc::new(TraitEnvironment { krate, traits_from_clauses: traits_in_scope, env })
+}
+
+/// Resolve the where clause(s) of an item with generics.
+pub(crate) fn generic_predicates_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<[Binders<QuantifiedWhereClause>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let generics = generics(db.upcast(), def);
+
+ let mut predicates = resolver
+ .where_predicates_in_scope()
+ .flat_map(|pred| {
+ ctx.lower_where_predicate(pred, false).map(|p| make_binders(db, &generics, p))
+ })
+ .collect::<Vec<_>>();
+
+ let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ let implicitly_sized_predicates =
+ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
+ .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
+ predicates.extend(implicitly_sized_predicates);
+ predicates.into()
+}
+
+/// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound.
+/// Exception is Self of a trait def.
+fn implicitly_sized_clauses<'a>(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ explicitly_unsized_tys: &'a FxHashSet<Ty>,
+ substitution: &'a Substitution,
+ resolver: &Resolver,
+) -> impl Iterator<Item = WhereClause> + 'a {
+ let is_trait_def = matches!(def, GenericDefId::TraitId(..));
+ let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
+ let sized_trait = db
+ .lang_item(resolver.krate(), SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
+
+ sized_trait.into_iter().flat_map(move |sized_trait| {
+ let implicitly_sized_tys = generic_args
+ .iter()
+ .filter_map(|generic_arg| generic_arg.ty(Interner))
+ .filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty));
+ implicitly_sized_tys.map(move |self_ty| {
+ WhereClause::Implemented(TraitRef {
+ trait_id: sized_trait,
+ substitution: Substitution::from1(Interner, self_ty.clone()),
+ })
+ })
+ })
+}
+
+/// Resolve the default type params from generics
+pub(crate) fn generic_defaults_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<[Binders<chalk_ir::GenericArg<Interner>>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let generic_params = generics(db.upcast(), def);
+
+ let defaults = generic_params
+ .iter()
+ .enumerate()
+ .map(|(idx, (id, p))| {
+ let p = match p {
+ TypeOrConstParamData::TypeParamData(p) => p,
+ TypeOrConstParamData::ConstParamData(_) => {
+ // FIXME: implement const generic defaults
+ let val = unknown_const_as_generic(
+ db.const_param_ty(ConstParamId::from_unchecked(id)),
+ );
+ return crate::make_binders_with_count(db, idx, &generic_params, val);
+ }
+ };
+ let mut ty =
+ p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
+
+ // Each default can only refer to previous parameters.
+ // type variable default referring to parameter coming
+ // after it. This is forbidden (FIXME: report
+ // diagnostic)
+ ty = fallback_bound_vars(ty, idx);
+ let val = GenericArgData::Ty(ty).intern(Interner);
+ crate::make_binders_with_count(db, idx, &generic_params, val)
+ })
+ .collect();
+
+ defaults
+}
+
+pub(crate) fn generic_defaults_recover(
+ db: &dyn HirDatabase,
+ _cycle: &[String],
+ def: &GenericDefId,
+) -> Arc<[Binders<crate::GenericArg>]> {
+ let generic_params = generics(db.upcast(), *def);
+ // FIXME: this code is not covered in tests.
+ // we still need one default per parameter
+ let defaults = generic_params
+ .iter_id()
+ .enumerate()
+ .map(|(count, id)| {
+ let val = match id {
+ itertools::Either::Left(_) => {
+ GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
+ }
+ itertools::Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
+ };
+ crate::make_binders_with_count(db, count, &generic_params, val)
+ })
+ .collect();
+
+ defaults
+}
+
+fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
+ let data = db.function_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx_params = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ let params = data.params.iter().map(|(_, tr)| ctx_params.lower_ty(tr)).collect::<Vec<_>>();
+ let ctx_ret = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ let ret = ctx_ret.lower_ty(&data.ret_type);
+ let generics = generics(db.upcast(), def.into());
+ let sig = CallableSig::from_params_and_return(params, ret, data.is_varargs());
+ make_binders(db, &generics, sig)
+}
+
+/// Build the declared type of a function. This should not need to look at the
+/// function body.
+fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), def.into());
+ let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ make_binders(
+ db,
+ &generics,
+ TyKind::FnDef(CallableDefId::FunctionId(def).to_chalk(db), substs).intern(Interner),
+ )
+}
+
+/// Build the declared type of a const.
+fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
+ let data = db.const_data(def);
+ let generics = generics(db.upcast(), def.into());
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+
+ make_binders(db, &generics, ctx.lower_ty(&data.type_ref))
+}
+
+/// Build the declared type of a static.
+fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
+ let data = db.static_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx = TyLoweringContext::new(db, &resolver);
+
+ Binders::empty(Interner, ctx.lower_ty(&data.type_ref))
+}
+
+fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig {
+ let struct_data = db.struct_data(def);
+ let fields = struct_data.variant_data.fields();
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
+ let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders();
+ Binders::new(binders, CallableSig::from_params_and_return(params, ret, false))
+}
+
+/// Build the type of a tuple struct constructor.
+fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Binders<Ty> {
+ let struct_data = db.struct_data(def);
+ if let StructKind::Unit = struct_data.variant_data.kind() {
+ return type_for_adt(db, def.into());
+ }
+ let generics = generics(db.upcast(), def.into());
+ let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ make_binders(
+ db,
+ &generics,
+ TyKind::FnDef(CallableDefId::StructId(def).to_chalk(db), substs).intern(Interner),
+ )
+}
+
+fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig {
+ let enum_data = db.enum_data(def.parent);
+ let var_data = &enum_data.variants[def.local_id];
+ let fields = var_data.variant_data.fields();
+ let resolver = def.parent.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
+ let (ret, binders) = type_for_adt(db, def.parent.into()).into_value_and_skipped_binders();
+ Binders::new(binders, CallableSig::from_params_and_return(params, ret, false))
+}
+
+/// Build the type of a tuple enum variant constructor.
+fn type_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> Binders<Ty> {
+ let enum_data = db.enum_data(def.parent);
+ let var_data = &enum_data.variants[def.local_id].variant_data;
+ if let StructKind::Unit = var_data.kind() {
+ return type_for_adt(db, def.parent.into());
+ }
+ let generics = generics(db.upcast(), def.parent.into());
+ let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ make_binders(
+ db,
+ &generics,
+ TyKind::FnDef(CallableDefId::EnumVariantId(def).to_chalk(db), substs).intern(Interner),
+ )
+}
+
+fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), adt.into());
+ let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let ty = TyKind::Adt(crate::AdtId(adt), subst).intern(Interner);
+ make_binders(db, &generics, ty)
+}
+
+fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), t.into());
+ let resolver = t.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ if db.type_alias_data(t).is_extern {
+ Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
+ } else {
+ let type_ref = &db.type_alias_data(t).type_ref;
+ let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error));
+ make_binders(db, &generics, inner)
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum CallableDefId {
+ FunctionId(FunctionId),
+ StructId(StructId),
+ EnumVariantId(EnumVariantId),
+}
+impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
+
+impl CallableDefId {
+ pub fn krate(self, db: &dyn HirDatabase) -> CrateId {
+ let db = db.upcast();
+ match self {
+ CallableDefId::FunctionId(f) => f.lookup(db).module(db),
+ CallableDefId::StructId(s) => s.lookup(db).container,
+ CallableDefId::EnumVariantId(e) => e.parent.lookup(db).container,
+ }
+ .krate()
+ }
+}
+
+impl From<CallableDefId> for GenericDefId {
+ fn from(def: CallableDefId) -> GenericDefId {
+ match def {
+ CallableDefId::FunctionId(f) => f.into(),
+ CallableDefId::StructId(s) => s.into(),
+ CallableDefId::EnumVariantId(e) => e.into(),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum TyDefId {
+ BuiltinType(BuiltinType),
+ AdtId(AdtId),
+ TypeAliasId(TypeAliasId),
+}
+impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ValueTyDefId {
+ FunctionId(FunctionId),
+ StructId(StructId),
+ UnionId(UnionId),
+ EnumVariantId(EnumVariantId),
+ ConstId(ConstId),
+ StaticId(StaticId),
+}
+impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
+
+/// Build the declared type of an item. This depends on the namespace; e.g. for
+/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
+/// the constructor function `(usize) -> Foo` which lives in the values
+/// namespace.
+pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
+ match def {
+ TyDefId::BuiltinType(it) => Binders::empty(Interner, TyBuilder::builtin(it)),
+ TyDefId::AdtId(it) => type_for_adt(db, it),
+ TyDefId::TypeAliasId(it) => type_for_type_alias(db, it),
+ }
+}
+
+pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> {
+ let generics = match *def {
+ TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
+ TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
+ TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()),
+ };
+ make_binders(db, &generics, TyKind::Error.intern(Interner))
+}
+
+pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Binders<Ty> {
+ match def {
+ ValueTyDefId::FunctionId(it) => type_for_fn(db, it),
+ ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
+ ValueTyDefId::UnionId(it) => type_for_adt(db, it.into()),
+ ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it),
+ ValueTyDefId::ConstId(it) => type_for_const(db, it),
+ ValueTyDefId::StaticId(it) => type_for_static(db, it),
+ }
+}
+
+pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binders<Ty> {
+ let impl_loc = impl_id.lookup(db.upcast());
+ let impl_data = db.impl_data(impl_id);
+ let resolver = impl_id.resolver(db.upcast());
+ let _cx = stdx::panic_context::enter(format!(
+ "impl_self_ty_query({:?} -> {:?} -> {:?})",
+ impl_id, impl_loc, impl_data
+ ));
+ let generics = generics(db.upcast(), impl_id.into());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ make_binders(db, &generics, ctx.lower_ty(&impl_data.self_ty))
+}
+
+// returns None if def is a type arg
+pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty {
+ let parent_data = db.generic_params(def.parent());
+ let data = &parent_data.type_or_consts[def.local_id()];
+ let resolver = def.parent().resolver(db.upcast());
+ let ctx = TyLoweringContext::new(db, &resolver);
+ match data {
+ TypeOrConstParamData::TypeParamData(_) => {
+ never!();
+ Ty::new(Interner, TyKind::Error)
+ }
+ TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(&d.ty),
+ }
+}
+
+pub(crate) fn impl_self_ty_recover(
+ db: &dyn HirDatabase,
+ _cycle: &[String],
+ impl_id: &ImplId,
+) -> Binders<Ty> {
+ let generics = generics(db.upcast(), (*impl_id).into());
+ make_binders(db, &generics, TyKind::Error.intern(Interner))
+}
+
+pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<Binders<TraitRef>> {
+ let impl_loc = impl_id.lookup(db.upcast());
+ let impl_data = db.impl_data(impl_id);
+ let resolver = impl_id.resolver(db.upcast());
+ let _cx = stdx::panic_context::enter(format!(
+ "impl_trait_query({:?} -> {:?} -> {:?})",
+ impl_id, impl_loc, impl_data
+ ));
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders();
+ let target_trait = impl_data.target_trait.as_ref()?;
+ Some(Binders::new(binders, ctx.lower_trait_ref(target_trait, Some(self_ty))?))
+}
+
+pub(crate) fn return_type_impl_traits(
+ db: &dyn HirDatabase,
+ def: hir_def::FunctionId,
+) -> Option<Arc<Binders<ReturnTypeImplTraits>>> {
+ // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
+ let data = db.function_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx_ret = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ let _ret = (&ctx_ret).lower_ty(&data.ret_type);
+ let generics = generics(db.upcast(), def.into());
+ let return_type_impl_traits =
+ ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() };
+ if return_type_impl_traits.impl_traits.is_empty() {
+ None
+ } else {
+ Some(Arc::new(make_binders(db, &generics, return_type_impl_traits)))
+ }
+}
+
+pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mutability {
+ match m {
+ hir_def::type_ref::Mutability::Shared => Mutability::Not,
+ hir_def::type_ref::Mutability::Mut => Mutability::Mut,
+ }
+}
+
+/// Checks if the provided generic arg matches its expected kind, then lower them via
+/// provided closures. Use unknown if there was kind mismatch.
+///
+/// Returns `Some` of the lowered generic arg. `None` if the provided arg is a lifetime.
+pub(crate) fn generic_arg_to_chalk<'a, T>(
+ db: &dyn HirDatabase,
+ kind_id: Either<TypeParamId, ConstParamId>,
+ arg: &'a GenericArg,
+ this: &mut T,
+ for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a,
+ for_const: impl FnOnce(&mut T, &ConstScalarOrPath, Ty) -> Const + 'a,
+) -> Option<crate::GenericArg> {
+ let kind = match kind_id {
+ Either::Left(_) => ParamKind::Type,
+ Either::Right(id) => {
+ let ty = db.const_param_ty(id);
+ ParamKind::Const(ty)
+ }
+ };
+ Some(match (arg, kind) {
+ (GenericArg::Type(type_ref), ParamKind::Type) => {
+ let ty = for_type(this, type_ref);
+ GenericArgData::Ty(ty).intern(Interner)
+ }
+ (GenericArg::Const(c), ParamKind::Const(c_ty)) => {
+ GenericArgData::Const(for_const(this, c, c_ty)).intern(Interner)
+ }
+ (GenericArg::Const(_), ParamKind::Type) => {
+ GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
+ }
+ (GenericArg::Type(t), ParamKind::Const(c_ty)) => {
+ // We want to recover simple idents, which parser detects them
+ // as types. Maybe here is not the best place to do it, but
+ // it works.
+ if let TypeRef::Path(p) = t {
+ let p = p.mod_path();
+ if p.kind == PathKind::Plain {
+ if let [n] = p.segments() {
+ let c = ConstScalarOrPath::Path(n.clone());
+ return Some(
+ GenericArgData::Const(for_const(this, &c, c_ty)).intern(Interner),
+ );
+ }
+ }
+ }
+ unknown_const_as_generic(c_ty)
+ }
+ (GenericArg::Lifetime(_), _) => return None,
+ })
+}
+
+pub(crate) fn const_or_path_to_chalk(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ expected_ty: Ty,
+ value: &ConstScalarOrPath,
+ mode: ParamLoweringMode,
+ args: impl FnOnce() -> Generics,
+ debruijn: DebruijnIndex,
+) -> Const {
+ match value {
+ ConstScalarOrPath::Scalar(s) => intern_const_scalar(s.clone(), expected_ty),
+ ConstScalarOrPath::Path(n) => {
+ let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
+ path_to_const(db, resolver, &path, mode, args, debruijn)
+ .unwrap_or_else(|| unknown_const(expected_ty))
+ }
+ }
+}
+
+/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past
+/// num_vars_to_keep) by `TyKind::Unknown`.
+fn fallback_bound_vars<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
+ s: T,
+ num_vars_to_keep: usize,
+) -> T {
+ crate::fold_free_vars(
+ s,
+ |bound, binders| {
+ if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST {
+ TyKind::Error.intern(Interner)
+ } else {
+ bound.shifted_in_from(binders).to_ty(Interner)
+ }
+ },
+ |ty, bound, binders| {
+ if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST {
+ unknown_const(ty.clone())
+ } else {
+ bound.shifted_in_from(binders).to_const(Interner, ty)
+ }
+ },
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs
new file mode 100644
index 000000000..d765fee0e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs
@@ -0,0 +1,148 @@
+//! This module contains the implementations of the `ToChalk` trait, which
+//! handles conversion between our data types and their corresponding types in
+//! Chalk (in both directions); plus some helper functions for more specialized
+//! conversions.
+
+use chalk_solve::rust_ir;
+
+use base_db::salsa::{self, InternKey};
+use hir_def::{LifetimeParamId, TraitId, TypeAliasId, TypeOrConstParamId};
+
+use crate::{
+ chalk_db, db::HirDatabase, AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId,
+ Interner, OpaqueTyId, PlaceholderIndex,
+};
+
+pub(crate) trait ToChalk {
+ type Chalk;
+ fn to_chalk(self, db: &dyn HirDatabase) -> Self::Chalk;
+ fn from_chalk(db: &dyn HirDatabase, chalk: Self::Chalk) -> Self;
+}
+
+pub(crate) fn from_chalk<T, ChalkT>(db: &dyn HirDatabase, chalk: ChalkT) -> T
+where
+ T: ToChalk<Chalk = ChalkT>,
+{
+ T::from_chalk(db, chalk)
+}
+
+impl ToChalk for hir_def::ImplId {
+ type Chalk = chalk_db::ImplId;
+
+ fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::ImplId {
+ chalk_ir::ImplId(self.as_intern_id())
+ }
+
+ fn from_chalk(_db: &dyn HirDatabase, impl_id: chalk_db::ImplId) -> hir_def::ImplId {
+ InternKey::from_intern_id(impl_id.0)
+ }
+}
+
+impl ToChalk for CallableDefId {
+ type Chalk = FnDefId;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> FnDefId {
+ db.intern_callable_def(self).into()
+ }
+
+ fn from_chalk(db: &dyn HirDatabase, fn_def_id: FnDefId) -> CallableDefId {
+ db.lookup_intern_callable_def(fn_def_id.into())
+ }
+}
+
+pub(crate) struct TypeAliasAsValue(pub(crate) TypeAliasId);
+
+impl ToChalk for TypeAliasAsValue {
+ type Chalk = chalk_db::AssociatedTyValueId;
+
+ fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::AssociatedTyValueId {
+ rust_ir::AssociatedTyValueId(self.0.as_intern_id())
+ }
+
+ fn from_chalk(
+ _db: &dyn HirDatabase,
+ assoc_ty_value_id: chalk_db::AssociatedTyValueId,
+ ) -> TypeAliasAsValue {
+ TypeAliasAsValue(TypeAliasId::from_intern_id(assoc_ty_value_id.0))
+ }
+}
+
+impl From<FnDefId> for crate::db::InternedCallableDefId {
+ fn from(fn_def_id: FnDefId) -> Self {
+ InternKey::from_intern_id(fn_def_id.0)
+ }
+}
+
+impl From<crate::db::InternedCallableDefId> for FnDefId {
+ fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self {
+ chalk_ir::FnDefId(callable_def_id.as_intern_id())
+ }
+}
+
+impl From<OpaqueTyId> for crate::db::InternedOpaqueTyId {
+ fn from(id: OpaqueTyId) -> Self {
+ InternKey::from_intern_id(id.0)
+ }
+}
+
+impl From<crate::db::InternedOpaqueTyId> for OpaqueTyId {
+ fn from(id: crate::db::InternedOpaqueTyId) -> Self {
+ chalk_ir::OpaqueTyId(id.as_intern_id())
+ }
+}
+
+impl From<chalk_ir::ClosureId<Interner>> for crate::db::InternedClosureId {
+ fn from(id: chalk_ir::ClosureId<Interner>) -> Self {
+ Self::from_intern_id(id.0)
+ }
+}
+
+impl From<crate::db::InternedClosureId> for chalk_ir::ClosureId<Interner> {
+ fn from(id: crate::db::InternedClosureId) -> Self {
+ chalk_ir::ClosureId(id.as_intern_id())
+ }
+}
+
+pub fn to_foreign_def_id(id: TypeAliasId) -> ForeignDefId {
+ chalk_ir::ForeignDefId(salsa::InternKey::as_intern_id(&id))
+}
+
+pub fn from_foreign_def_id(id: ForeignDefId) -> TypeAliasId {
+ salsa::InternKey::from_intern_id(id.0)
+}
+
+pub fn to_assoc_type_id(id: TypeAliasId) -> AssocTypeId {
+ chalk_ir::AssocTypeId(salsa::InternKey::as_intern_id(&id))
+}
+
+pub fn from_assoc_type_id(id: AssocTypeId) -> TypeAliasId {
+ salsa::InternKey::from_intern_id(id.0)
+}
+
+pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId {
+ assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
+ let interned_id = salsa::InternKey::from_intern_id(salsa::InternId::from(idx.idx));
+ db.lookup_intern_type_or_const_param_id(interned_id)
+}
+
+pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> PlaceholderIndex {
+ let interned_id = db.intern_type_or_const_param_id(id);
+ PlaceholderIndex {
+ ui: chalk_ir::UniverseIndex::ROOT,
+ idx: salsa::InternKey::as_intern_id(&interned_id).as_usize(),
+ }
+}
+
+pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId {
+ assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
+ let interned_id = salsa::InternKey::from_intern_id(salsa::InternId::from(idx.idx));
+ db.lookup_intern_lifetime_param_id(interned_id)
+}
+
+pub fn to_chalk_trait_id(id: TraitId) -> ChalkTraitId {
+ chalk_ir::TraitId(salsa::InternKey::as_intern_id(&id))
+}
+
+pub fn from_chalk_trait_id(id: ChalkTraitId) -> TraitId {
+ salsa::InternKey::from_intern_id(id.0)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
new file mode 100644
index 000000000..15df7b3dd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -0,0 +1,1186 @@
+//! This module is concerned with finding methods that a given type provides.
+//! For details about how this works in rustc, see the method lookup page in the
+//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
+//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs.
+use std::{iter, ops::ControlFlow, sync::Arc};
+
+use arrayvec::ArrayVec;
+use base_db::{CrateId, Edition};
+use chalk_ir::{cast::Cast, Mutability, UniverseIndex};
+use hir_def::{
+ data::ImplData, item_scope::ItemScope, nameres::DefMap, AssocItemId, BlockId, ConstId,
+ FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, Lookup, ModuleDefId, ModuleId,
+ TraitId,
+};
+use hir_expand::name::Name;
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::never;
+
+use crate::{
+ autoderef::{self, AutoderefKind},
+ db::HirDatabase,
+ from_foreign_def_id,
+ infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast},
+ primitive::{FloatTy, IntTy, UintTy},
+ static_lifetime,
+ utils::all_super_traits,
+ AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner,
+ Scalar, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
+};
+
+/// This is used as a key for indexing impls.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum TyFingerprint {
+ // These are lang item impls:
+ Str,
+ Slice,
+ Array,
+ Never,
+ RawPtr(Mutability),
+ Scalar(Scalar),
+ // These can have user-defined impls:
+ Adt(hir_def::AdtId),
+ Dyn(TraitId),
+ ForeignType(ForeignDefId),
+ // These only exist for trait impls
+ Unit,
+ Unnameable,
+ Function(u32),
+}
+
+impl TyFingerprint {
+ /// Creates a TyFingerprint for looking up an inherent impl. Only certain
+ /// types can have inherent impls: if we have some `struct S`, we can have
+ /// an `impl S`, but not `impl &S`. Hence, this will return `None` for
+ /// reference types and such.
+ pub fn for_inherent_impl(ty: &Ty) -> Option<TyFingerprint> {
+ let fp = match ty.kind(Interner) {
+ TyKind::Str => TyFingerprint::Str,
+ TyKind::Never => TyFingerprint::Never,
+ TyKind::Slice(..) => TyFingerprint::Slice,
+ TyKind::Array(..) => TyFingerprint::Array,
+ TyKind::Scalar(scalar) => TyFingerprint::Scalar(*scalar),
+ TyKind::Adt(AdtId(adt), _) => TyFingerprint::Adt(*adt),
+ TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability),
+ TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id),
+ TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
+ _ => return None,
+ };
+ Some(fp)
+ }
+
+ /// Creates a TyFingerprint for looking up a trait impl.
+ pub fn for_trait_impl(ty: &Ty) -> Option<TyFingerprint> {
+ let fp = match ty.kind(Interner) {
+ TyKind::Str => TyFingerprint::Str,
+ TyKind::Never => TyFingerprint::Never,
+ TyKind::Slice(..) => TyFingerprint::Slice,
+ TyKind::Array(..) => TyFingerprint::Array,
+ TyKind::Scalar(scalar) => TyFingerprint::Scalar(*scalar),
+ TyKind::Adt(AdtId(adt), _) => TyFingerprint::Adt(*adt),
+ TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability),
+ TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id),
+ TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
+ TyKind::Ref(_, _, ty) => return TyFingerprint::for_trait_impl(ty),
+ TyKind::Tuple(_, subst) => {
+ let first_ty = subst.interned().get(0).map(|arg| arg.assert_ty_ref(Interner));
+ match first_ty {
+ Some(ty) => return TyFingerprint::for_trait_impl(ty),
+ None => TyFingerprint::Unit,
+ }
+ }
+ TyKind::AssociatedType(_, _)
+ | TyKind::OpaqueType(_, _)
+ | TyKind::FnDef(_, _)
+ | TyKind::Closure(_, _)
+ | TyKind::Generator(..)
+ | TyKind::GeneratorWitness(..) => TyFingerprint::Unnameable,
+ TyKind::Function(fn_ptr) => {
+ TyFingerprint::Function(fn_ptr.substitution.0.len(Interner) as u32)
+ }
+ TyKind::Alias(_)
+ | TyKind::Placeholder(_)
+ | TyKind::BoundVar(_)
+ | TyKind::InferenceVar(_, _)
+ | TyKind::Error => return None,
+ };
+ Some(fp)
+ }
+}
+
+pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I8)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I16)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I32)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I64)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I128)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::Isize)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U8)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U16)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U32)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U64)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U128)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::Usize)),
+];
+
+pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 2] = [
+ TyFingerprint::Scalar(Scalar::Float(FloatTy::F32)),
+ TyFingerprint::Scalar(Scalar::Float(FloatTy::F64)),
+];
+
+/// Trait impls defined or available in some crate.
+#[derive(Debug, Eq, PartialEq)]
+pub struct TraitImpls {
+ // If the `Option<TyFingerprint>` is `None`, the impl may apply to any self type.
+ map: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>,
+}
+
+impl TraitImpls {
+ pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("trait_impls_in_crate_query").detail(|| format!("{krate:?}"));
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let crate_def_map = db.crate_def_map(krate);
+ impls.collect_def_map(db, &crate_def_map);
+ impls.shrink_to_fit();
+
+ Arc::new(impls)
+ }
+
+ pub(crate) fn trait_impls_in_block_query(
+ db: &dyn HirDatabase,
+ block: BlockId,
+ ) -> Option<Arc<Self>> {
+ let _p = profile::span("trait_impls_in_block_query");
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let block_def_map = db.block_def_map(block)?;
+ impls.collect_def_map(db, &block_def_map);
+ impls.shrink_to_fit();
+
+ Some(Arc::new(impls))
+ }
+
+ pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}"));
+ let crate_graph = db.crate_graph();
+ let mut res = Self { map: FxHashMap::default() };
+
+ for krate in crate_graph.transitive_deps(krate) {
+ res.merge(&db.trait_impls_in_crate(krate));
+ }
+ res.shrink_to_fit();
+
+ Arc::new(res)
+ }
+
+ fn shrink_to_fit(&mut self) {
+ self.map.shrink_to_fit();
+ self.map.values_mut().for_each(|map| {
+ map.shrink_to_fit();
+ map.values_mut().for_each(Vec::shrink_to_fit);
+ });
+ }
+
+ fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
+ for (_module_id, module_data) in def_map.modules() {
+ for impl_id in module_data.scope.impls() {
+ let target_trait = match db.impl_trait(impl_id) {
+ Some(tr) => tr.skip_binders().hir_trait_id(),
+ None => continue,
+ };
+ let self_ty = db.impl_self_ty(impl_id);
+ let self_ty_fp = TyFingerprint::for_trait_impl(self_ty.skip_binders());
+ self.map
+ .entry(target_trait)
+ .or_default()
+ .entry(self_ty_fp)
+ .or_default()
+ .push(impl_id);
+ }
+
+ // To better support custom derives, collect impls in all unnamed const items.
+ // const _: () = { ... };
+ for konst in collect_unnamed_consts(db, &module_data.scope) {
+ let body = db.body(konst.into());
+ for (_, block_def_map) in body.blocks(db.upcast()) {
+ self.collect_def_map(db, &block_def_map);
+ }
+ }
+ }
+ }
+
+ fn merge(&mut self, other: &Self) {
+ for (trait_, other_map) in &other.map {
+ let map = self.map.entry(*trait_).or_default();
+ for (fp, impls) in other_map {
+ map.entry(*fp).or_default().extend(impls);
+ }
+ }
+ }
+
+ /// Queries all trait impls for the given type.
+ pub fn for_self_ty_without_blanket_impls(
+ &self,
+ fp: TyFingerprint,
+ ) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .values()
+ .flat_map(move |impls| impls.get(&Some(fp)).into_iter())
+ .flat_map(|it| it.iter().copied())
+ }
+
+ /// Queries all impls of the given trait.
+ pub fn for_trait(&self, trait_: TraitId) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .get(&trait_)
+ .into_iter()
+ .flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
+ }
+
+ /// Queries all impls of `trait_` that may apply to `self_ty`.
+ pub fn for_trait_and_self_ty(
+ &self,
+ trait_: TraitId,
+ self_ty: TyFingerprint,
+ ) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .get(&trait_)
+ .into_iter()
+ .flat_map(move |map| map.get(&Some(self_ty)).into_iter().chain(map.get(&None)))
+ .flat_map(|v| v.iter().copied())
+ }
+
+ pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
+ }
+}
+
+/// Inherent impls defined in some crate.
+///
+/// Inherent impls can only be defined in the crate that also defines the self type of the impl
+/// (note that some primitives are considered to be defined by both libcore and liballoc).
+///
+/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a
+/// single crate.
+#[derive(Debug, Eq, PartialEq)]
+pub struct InherentImpls {
+ map: FxHashMap<TyFingerprint, Vec<ImplId>>,
+}
+
+impl InherentImpls {
+ pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let crate_def_map = db.crate_def_map(krate);
+ impls.collect_def_map(db, &crate_def_map);
+ impls.shrink_to_fit();
+
+ Arc::new(impls)
+ }
+
+ pub(crate) fn inherent_impls_in_block_query(
+ db: &dyn HirDatabase,
+ block: BlockId,
+ ) -> Option<Arc<Self>> {
+ let mut impls = Self { map: FxHashMap::default() };
+ if let Some(block_def_map) = db.block_def_map(block) {
+ impls.collect_def_map(db, &block_def_map);
+ impls.shrink_to_fit();
+ return Some(Arc::new(impls));
+ }
+ None
+ }
+
+ fn shrink_to_fit(&mut self) {
+ self.map.values_mut().for_each(Vec::shrink_to_fit);
+ self.map.shrink_to_fit();
+ }
+
+ fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
+ for (_module_id, module_data) in def_map.modules() {
+ for impl_id in module_data.scope.impls() {
+ let data = db.impl_data(impl_id);
+ if data.target_trait.is_some() {
+ continue;
+ }
+
+ let self_ty = db.impl_self_ty(impl_id);
+ let fp = TyFingerprint::for_inherent_impl(self_ty.skip_binders());
+ if let Some(fp) = fp {
+ self.map.entry(fp).or_default().push(impl_id);
+ }
+ // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
+ }
+
+ // To better support custom derives, collect impls in all unnamed const items.
+ // const _: () = { ... };
+ for konst in collect_unnamed_consts(db, &module_data.scope) {
+ let body = db.body(konst.into());
+ for (_, block_def_map) in body.blocks(db.upcast()) {
+ self.collect_def_map(db, &block_def_map);
+ }
+ }
+ }
+ }
+
+ pub fn for_self_ty(&self, self_ty: &Ty) -> &[ImplId] {
+ match TyFingerprint::for_inherent_impl(self_ty) {
+ Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]),
+ None => &[],
+ }
+ }
+
+ pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.map.values().flat_map(|v| v.iter().copied())
+ }
+}
+
+pub fn inherent_impl_crates_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ fp: TyFingerprint,
+) -> ArrayVec<CrateId, 2> {
+ let _p = profile::span("inherent_impl_crates_query");
+ let mut res = ArrayVec::new();
+ let crate_graph = db.crate_graph();
+
+ for krate in crate_graph.transitive_deps(krate) {
+ if res.is_full() {
+ // we don't currently look for or store more than two crates here,
+ // so don't needlessly look at more crates than necessary.
+ break;
+ }
+ let impls = db.inherent_impls_in_crate(krate);
+ if impls.map.get(&fp).map_or(false, |v| !v.is_empty()) {
+ res.push(krate);
+ }
+ }
+
+ res
+}
+
+fn collect_unnamed_consts<'a>(
+ db: &'a dyn HirDatabase,
+ scope: &'a ItemScope,
+) -> impl Iterator<Item = ConstId> + 'a {
+ let unnamed_consts = scope.unnamed_consts();
+
+ // FIXME: Also treat consts named `_DERIVE_*` as unnamed, since synstructure generates those.
+ // Should be removed once synstructure stops doing that.
+ let synstructure_hack_consts = scope.values().filter_map(|(item, _)| match item {
+ ModuleDefId::ConstId(id) => {
+ let loc = id.lookup(db.upcast());
+ let item_tree = loc.id.item_tree(db.upcast());
+ if item_tree[loc.id.value]
+ .name
+ .as_ref()
+ .map_or(false, |n| n.to_smol_str().starts_with("_DERIVE_"))
+ {
+ Some(id)
+ } else {
+ None
+ }
+ }
+ _ => None,
+ });
+
+ unnamed_consts.chain(synstructure_hack_consts)
+}
+
+pub fn def_crates(
+ db: &dyn HirDatabase,
+ ty: &Ty,
+ cur_crate: CrateId,
+) -> Option<ArrayVec<CrateId, 2>> {
+ let mod_to_crate_ids = |module: ModuleId| Some(iter::once(module.krate()).collect());
+
+ let fp = TyFingerprint::for_inherent_impl(ty);
+
+ match ty.kind(Interner) {
+ TyKind::Adt(AdtId(def_id), _) => mod_to_crate_ids(def_id.module(db.upcast())),
+ TyKind::Foreign(id) => {
+ mod_to_crate_ids(from_foreign_def_id(*id).lookup(db.upcast()).module(db.upcast()))
+ }
+ TyKind::Dyn(_) => ty
+ .dyn_trait()
+ .and_then(|trait_| mod_to_crate_ids(GenericDefId::TraitId(trait_).module(db.upcast()))),
+ // for primitives, there may be impls in various places (core and alloc
+ // mostly). We just check the whole crate graph for crates with impls
+ // (cached behind a query).
+ TyKind::Scalar(_)
+ | TyKind::Str
+ | TyKind::Slice(_)
+ | TyKind::Array(..)
+ | TyKind::Raw(..) => {
+ Some(db.inherent_impl_crates(cur_crate, fp.expect("fingerprint for primitive")))
+ }
+ _ => return None,
+ }
+}
+
+/// Look up the method with the given name.
+pub(crate) fn lookup_method(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: &Name,
+) -> Option<(ReceiverAdjustments, FunctionId)> {
+ iterate_method_candidates(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ Some(name),
+ LookupMode::MethodCall,
+ |adjustments, f| match f {
+ AssocItemId::FunctionId(f) => Some((adjustments, f)),
+ _ => None,
+ },
+ )
+}
+
+/// Whether we're looking up a dotted method call (like `v.len()`) or a path
+/// (like `Vec::new`).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum LookupMode {
+ /// Looking up a method call like `v.len()`: We only consider candidates
+ /// that have a `self` parameter, and do autoderef.
+ MethodCall,
+ /// Looking up a path like `Vec::new` or `Vec::default`: We consider all
+ /// candidates including associated constants, but don't do autoderef.
+ Path,
+}
+
+#[derive(Clone, Copy)]
+pub enum VisibleFromModule {
+ /// Filter for results that are visible from the given module
+ Filter(ModuleId),
+ /// Include impls from the given block.
+ IncludeBlock(BlockId),
+ /// Do nothing special in regards visibility
+ None,
+}
+
+impl From<Option<ModuleId>> for VisibleFromModule {
+ fn from(module: Option<ModuleId>) -> Self {
+ match module {
+ Some(module) => Self::Filter(module),
+ None => Self::None,
+ }
+ }
+}
+
+impl From<Option<BlockId>> for VisibleFromModule {
+ fn from(block: Option<BlockId>) -> Self {
+ match block {
+ Some(block) => Self::IncludeBlock(block),
+ None => Self::None,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Default)]
+pub struct ReceiverAdjustments {
+ autoref: Option<Mutability>,
+ autoderefs: usize,
+ unsize_array: bool,
+}
+
+impl ReceiverAdjustments {
+ pub(crate) fn apply(&self, table: &mut InferenceTable<'_>, ty: Ty) -> (Ty, Vec<Adjustment>) {
+ let mut ty = ty;
+ let mut adjust = Vec::new();
+ for _ in 0..self.autoderefs {
+ match autoderef::autoderef_step(table, ty.clone()) {
+ None => {
+ never!("autoderef not possible for {:?}", ty);
+ ty = TyKind::Error.intern(Interner);
+ break;
+ }
+ Some((kind, new_ty)) => {
+ ty = new_ty.clone();
+ adjust.push(Adjustment {
+ kind: Adjust::Deref(match kind {
+ // FIXME should we know the mutability here?
+ AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ AutoderefKind::Builtin => None,
+ }),
+ target: new_ty,
+ });
+ }
+ }
+ }
+ if self.unsize_array {
+ ty = match ty.kind(Interner) {
+ TyKind::Array(inner, _) => TyKind::Slice(inner.clone()).intern(Interner),
+ _ => {
+ never!("unsize_array with non-array {:?}", ty);
+ ty
+ }
+ };
+ // FIXME this is kind of wrong since the unsize needs to happen to a pointer/reference
+ adjust.push(Adjustment {
+ kind: Adjust::Pointer(PointerCast::Unsize),
+ target: ty.clone(),
+ });
+ }
+ if let Some(m) = self.autoref {
+ ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner);
+ adjust
+ .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() });
+ }
+ (ty, adjust)
+ }
+
+ fn with_autoref(&self, m: Mutability) -> ReceiverAdjustments {
+ Self { autoref: Some(m), ..*self }
+ }
+}
+
+// This would be nicer if it just returned an iterator, but that runs into
+// lifetime problems, because we need to borrow temp `CrateImplDefs`.
+// FIXME add a context type here?
+pub(crate) fn iterate_method_candidates<T>(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mode: LookupMode,
+ mut callback: impl FnMut(ReceiverAdjustments, AssocItemId) -> Option<T>,
+) -> Option<T> {
+ let mut slot = None;
+ iterate_method_candidates_dyn(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ mode,
+ &mut |adj, item| {
+ assert!(slot.is_none());
+ if let Some(it) = callback(adj, item) {
+ slot = Some(it);
+ return ControlFlow::Break(());
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+}
+
+pub fn lookup_impl_method(
+ self_ty: &Ty,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+ name: &Name,
+) -> Option<FunctionId> {
+ let self_ty_fp = TyFingerprint::for_trait_impl(self_ty)?;
+ let trait_impls = db.trait_impls_in_deps(env.krate);
+ let impls = trait_impls.for_trait_and_self_ty(trait_, self_ty_fp);
+ let mut table = InferenceTable::new(db, env.clone());
+ find_matching_impl(impls, &mut table, &self_ty).and_then(|data| {
+ data.items.iter().find_map(|it| match it {
+ AssocItemId::FunctionId(f) => (db.function_data(*f).name == *name).then(|| *f),
+ _ => None,
+ })
+ })
+}
+
+fn find_matching_impl(
+ mut impls: impl Iterator<Item = ImplId>,
+ table: &mut InferenceTable<'_>,
+ self_ty: &Ty,
+) -> Option<Arc<ImplData>> {
+ let db = table.db;
+ loop {
+ let impl_ = impls.next()?;
+ let r = table.run_in_snapshot(|table| {
+ let impl_data = db.impl_data(impl_);
+ let substs =
+ TyBuilder::subst_for_def(db, impl_).fill_with_inference_vars(table).build();
+ let impl_ty = db.impl_self_ty(impl_).substitute(Interner, &substs);
+
+ table
+ .unify(self_ty, &impl_ty)
+ .then(|| {
+ let wh_goals =
+ crate::chalk_db::convert_where_clauses(db, impl_.into(), &substs)
+ .into_iter()
+ .map(|b| b.cast(Interner));
+
+ let goal = crate::Goal::all(Interner, wh_goals);
+
+ table.try_obligation(goal).map(|_| impl_data)
+ })
+ .flatten()
+ });
+ if r.is_some() {
+ break r;
+ }
+ }
+}
+
+pub fn iterate_path_candidates(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ iterate_method_candidates_dyn(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ LookupMode::Path,
+ // the adjustments are not relevant for path lookup
+ &mut |_, id| callback(id),
+ )
+}
+
+pub fn iterate_method_candidates_dyn(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mode: LookupMode,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ match mode {
+ LookupMode::MethodCall => {
+ // For method calls, rust first does any number of autoderef, and
+ // then one autoref (i.e. when the method takes &self or &mut self).
+ // Note that when we've got a receiver like &S, even if the method
+ // we find in the end takes &self, we still do the autoderef step
+ // (just as rustc does an autoderef and then autoref again).
+
+ // We have to be careful about the order we're looking at candidates
+ // in here. Consider the case where we're resolving `x.clone()`
+ // where `x: &Vec<_>`. This resolves to the clone method with self
+ // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
+ // the receiver type exactly matches before cases where we have to
+ // do autoref. But in the autoderef steps, the `&_` self type comes
+ // up *before* the `Vec<_>` self type.
+ //
+ // On the other hand, we don't want to just pick any by-value method
+ // before any by-autoref method; it's just that we need to consider
+ // the methods by autoderef order of *receiver types*, not *self
+ // types*.
+
+ let mut table = InferenceTable::new(db, env.clone());
+ let ty = table.instantiate_canonical(ty.clone());
+ let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
+
+ let result = deref_chain.into_iter().zip(adj).try_for_each(|(receiver_ty, adj)| {
+ iterate_method_candidates_with_autoref(
+ &receiver_ty,
+ adj,
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ callback,
+ )
+ });
+ result
+ }
+ LookupMode::Path => {
+ // No autoderef for path lookups
+ iterate_method_candidates_for_self_ty(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ callback,
+ )
+ }
+ }
+}
+
+fn iterate_method_candidates_with_autoref(
+ receiver_ty: &Canonical<Ty>,
+ first_adjustment: ReceiverAdjustments,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ if receiver_ty.value.is_general_var(Interner, &receiver_ty.binders) {
+ // don't try to resolve methods on unknown types
+ return ControlFlow::Continue(());
+ }
+
+ iterate_method_candidates_by_receiver(
+ receiver_ty,
+ first_adjustment.clone(),
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )?;
+
+ let refed = Canonical {
+ value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
+ .intern(Interner),
+ binders: receiver_ty.binders.clone(),
+ };
+
+ iterate_method_candidates_by_receiver(
+ &refed,
+ first_adjustment.with_autoref(Mutability::Not),
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )?;
+
+ let ref_muted = Canonical {
+ value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
+ .intern(Interner),
+ binders: receiver_ty.binders.clone(),
+ };
+
+ iterate_method_candidates_by_receiver(
+ &ref_muted,
+ first_adjustment.with_autoref(Mutability::Mut),
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )
+}
+
+fn iterate_method_candidates_by_receiver(
+ receiver_ty: &Canonical<Ty>,
+ receiver_adjustments: ReceiverAdjustments,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let mut table = InferenceTable::new(db, env);
+ let receiver_ty = table.instantiate_canonical(receiver_ty.clone());
+ let snapshot = table.snapshot();
+ // We're looking for methods with *receiver* type receiver_ty. These could
+ // be found in any of the derefs of receiver_ty, so we have to go through
+ // that.
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ while let Some((self_ty, _)) = autoderef.next() {
+ iterate_inherent_methods(
+ &self_ty,
+ &mut autoderef.table,
+ name,
+ Some(&receiver_ty),
+ Some(receiver_adjustments.clone()),
+ visible_from_module,
+ &mut callback,
+ )?
+ }
+
+ table.rollback_to(snapshot);
+
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ while let Some((self_ty, _)) = autoderef.next() {
+ iterate_trait_method_candidates(
+ &self_ty,
+ &mut autoderef.table,
+ traits_in_scope,
+ name,
+ Some(&receiver_ty),
+ Some(receiver_adjustments.clone()),
+ &mut callback,
+ )?
+ }
+
+ ControlFlow::Continue(())
+}
+
+fn iterate_method_candidates_for_self_ty(
+ self_ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let mut table = InferenceTable::new(db, env);
+ let self_ty = table.instantiate_canonical(self_ty.clone());
+ iterate_inherent_methods(
+ &self_ty,
+ &mut table,
+ name,
+ None,
+ None,
+ visible_from_module,
+ &mut callback,
+ )?;
+ iterate_trait_method_candidates(
+ &self_ty,
+ &mut table,
+ traits_in_scope,
+ name,
+ None,
+ None,
+ callback,
+ )
+}
+
+fn iterate_trait_method_candidates(
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let db = table.db;
+ let env = table.trait_env.clone();
+ let self_is_array = matches!(self_ty.kind(Interner), chalk_ir::TyKind::Array(..));
+ // if ty is `dyn Trait`, the trait doesn't need to be in scope
+ let inherent_trait =
+ self_ty.dyn_trait().into_iter().flat_map(|t| all_super_traits(db.upcast(), t));
+ let env_traits = matches!(self_ty.kind(Interner), TyKind::Placeholder(_))
+ // if we have `T: Trait` in the param env, the trait doesn't need to be in scope
+ .then(|| {
+ env.traits_in_scope_from_clauses(self_ty.clone())
+ .flat_map(|t| all_super_traits(db.upcast(), t))
+ })
+ .into_iter()
+ .flatten();
+ let traits = inherent_trait.chain(env_traits).chain(traits_in_scope.iter().copied());
+
+ let canonical_self_ty = table.canonicalize(self_ty.clone()).value;
+
+ 'traits: for t in traits {
+ let data = db.trait_data(t);
+
+ // Traits annotated with `#[rustc_skip_array_during_method_dispatch]` are skipped during
+ // method resolution, if the receiver is an array, and we're compiling for editions before
+ // 2021.
+ // This is to make `[a].into_iter()` not break code with the new `IntoIterator` impl for
+ // arrays.
+ if data.skip_array_during_method_dispatch && self_is_array {
+ // FIXME: this should really be using the edition of the method name's span, in case it
+ // comes from a macro
+ if db.crate_graph()[env.krate].edition < Edition::Edition2021 {
+ continue;
+ }
+ }
+
+ // we'll be lazy about checking whether the type implements the
+ // trait, but if we find out it doesn't, we'll skip the rest of the
+ // iteration
+ let mut known_implemented = false;
+ for &(_, item) in data.items.iter() {
+ // Don't pass a `visible_from_module` down to `is_valid_candidate`,
+ // since only inherent methods should be included into visibility checking.
+ if !is_valid_candidate(table, name, receiver_ty, item, self_ty, None) {
+ continue;
+ }
+ if !known_implemented {
+ let goal = generic_implements_goal(db, env.clone(), t, &canonical_self_ty);
+ if db.trait_solve(env.krate, goal.cast(Interner)).is_none() {
+ continue 'traits;
+ }
+ }
+ known_implemented = true;
+ callback(receiver_adjustments.clone().unwrap_or_default(), item)?;
+ }
+ }
+ ControlFlow::Continue(())
+}
+
+fn iterate_inherent_methods(
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ visible_from_module: VisibleFromModule,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let db = table.db;
+ let env = table.trait_env.clone();
+ let def_crates = match def_crates(db, self_ty, env.krate) {
+ Some(k) => k,
+ None => return ControlFlow::Continue(()),
+ };
+
+ let (module, block) = match visible_from_module {
+ VisibleFromModule::Filter(module) => (Some(module), module.containing_block()),
+ VisibleFromModule::IncludeBlock(block) => (None, Some(block)),
+ VisibleFromModule::None => (None, None),
+ };
+
+ if let Some(block_id) = block {
+ if let Some(impls) = db.inherent_impls_in_block(block_id) {
+ impls_for_self_ty(
+ &impls,
+ self_ty,
+ table,
+ name,
+ receiver_ty,
+ receiver_adjustments.clone(),
+ module,
+ callback,
+ )?;
+ }
+ }
+
+ for krate in def_crates {
+ let impls = db.inherent_impls_in_crate(krate);
+ impls_for_self_ty(
+ &impls,
+ self_ty,
+ table,
+ name,
+ receiver_ty,
+ receiver_adjustments.clone(),
+ module,
+ callback,
+ )?;
+ }
+ return ControlFlow::Continue(());
+
+ fn impls_for_self_ty(
+ impls: &InherentImpls,
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ visible_from_module: Option<ModuleId>,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+ ) -> ControlFlow<()> {
+ let db = table.db;
+ let impls_for_self_ty = impls.for_self_ty(self_ty);
+ for &impl_def in impls_for_self_ty {
+ for &item in &db.impl_data(impl_def).items {
+ if !is_valid_candidate(table, name, receiver_ty, item, self_ty, visible_from_module)
+ {
+ continue;
+ }
+ callback(receiver_adjustments.clone().unwrap_or_default(), item)?;
+ }
+ }
+ ControlFlow::Continue(())
+ }
+}
+
+/// Returns the receiver type for the index trait call.
+pub fn resolve_indexing_op(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ ty: Canonical<Ty>,
+ index_trait: TraitId,
+) -> Option<ReceiverAdjustments> {
+ let mut table = InferenceTable::new(db, env.clone());
+ let ty = table.instantiate_canonical(ty);
+ let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
+ for (ty, adj) in deref_chain.into_iter().zip(adj) {
+ let goal = generic_implements_goal(db, env.clone(), index_trait, &ty);
+ if db.trait_solve(env.krate, goal.cast(Interner)).is_some() {
+ return Some(adj);
+ }
+ }
+ None
+}
+
+fn is_valid_candidate(
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ item: AssocItemId,
+ self_ty: &Ty,
+ visible_from_module: Option<ModuleId>,
+) -> bool {
+ macro_rules! check_that {
+ ($cond:expr) => {
+ if !$cond {
+ return false;
+ }
+ };
+ }
+
+ let db = table.db;
+ match item {
+ AssocItemId::FunctionId(m) => {
+ let data = db.function_data(m);
+
+ check_that!(name.map_or(true, |n| n == &data.name));
+ check_that!(visible_from_module.map_or(true, |from_module| {
+ let v = db.function_visibility(m).is_visible_from(db.upcast(), from_module);
+ if !v {
+ cov_mark::hit!(autoderef_candidate_not_visible);
+ }
+ v
+ }));
+
+ table.run_in_snapshot(|table| {
+ let subst = TyBuilder::subst_for_def(db, m).fill_with_inference_vars(table).build();
+ let expect_self_ty = match m.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(_) => {
+ subst.at(Interner, 0).assert_ty_ref(Interner).clone()
+ }
+ ItemContainerId::ImplId(impl_id) => {
+ subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner)
+ }
+ // We should only get called for associated items (impl/trait)
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
+ unreachable!()
+ }
+ };
+ check_that!(table.unify(&expect_self_ty, self_ty));
+ if let Some(receiver_ty) = receiver_ty {
+ check_that!(data.has_self_param());
+
+ let sig = db.callable_item_signature(m.into());
+ let expected_receiver =
+ sig.map(|s| s.params()[0].clone()).substitute(Interner, &subst);
+
+ check_that!(table.unify(&receiver_ty, &expected_receiver));
+ }
+ true
+ })
+ }
+ AssocItemId::ConstId(c) => {
+ let data = db.const_data(c);
+ check_that!(receiver_ty.is_none());
+
+ check_that!(name.map_or(true, |n| data.name.as_ref() == Some(n)));
+ check_that!(visible_from_module.map_or(true, |from_module| {
+ let v = db.const_visibility(c).is_visible_from(db.upcast(), from_module);
+ if !v {
+ cov_mark::hit!(const_candidate_not_visible);
+ }
+ v
+ }));
+ if let ItemContainerId::ImplId(impl_id) = c.lookup(db.upcast()).container {
+ let self_ty_matches = table.run_in_snapshot(|table| {
+ let subst =
+ TyBuilder::subst_for_def(db, c).fill_with_inference_vars(table).build();
+ let expected_self_ty =
+ subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner);
+ table.unify(&expected_self_ty, &self_ty)
+ });
+ if !self_ty_matches {
+ cov_mark::hit!(const_candidate_self_type_mismatch);
+ return false;
+ }
+ }
+ true
+ }
+ _ => false,
+ }
+}
+
+pub fn implements_trait(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+) -> bool {
+ let goal = generic_implements_goal(db, env.clone(), trait_, ty);
+ let solution = db.trait_solve(env.krate, goal.cast(Interner));
+
+ solution.is_some()
+}
+
+pub fn implements_trait_unique(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+) -> bool {
+ let goal = generic_implements_goal(db, env.clone(), trait_, ty);
+ let solution = db.trait_solve(env.krate, goal.cast(Interner));
+
+ matches!(solution, Some(crate::Solution::Unique(_)))
+}
+
+/// This creates Substs for a trait with the given Self type and type variables
+/// for all other parameters, to query Chalk with it.
+fn generic_implements_goal(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+ self_ty: &Canonical<Ty>,
+) -> Canonical<InEnvironment<super::DomainGoal>> {
+ let mut kinds = self_ty.binders.interned().to_vec();
+ let trait_ref = TyBuilder::trait_ref(db, trait_)
+ .push(self_ty.value.clone())
+ .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
+ .build();
+ kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|x| {
+ let vk = match x.data(Interner) {
+ chalk_ir::GenericArgData::Ty(_) => {
+ chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
+ }
+ chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime,
+ chalk_ir::GenericArgData::Const(c) => {
+ chalk_ir::VariableKind::Const(c.data(Interner).ty.clone())
+ }
+ };
+ chalk_ir::WithKind::new(vk, UniverseIndex::ROOT)
+ }));
+ let obligation = trait_ref.cast(Interner);
+ Canonical {
+ binders: CanonicalVarKinds::from_iter(Interner, kinds),
+ value: InEnvironment::new(&env.env, obligation),
+ }
+}
+
+fn autoderef_method_receiver(
+ table: &mut InferenceTable<'_>,
+ ty: Ty,
+) -> (Vec<Canonical<Ty>>, Vec<ReceiverAdjustments>) {
+ let (mut deref_chain, mut adjustments): (Vec<_>, Vec<_>) = (Vec::new(), Vec::new());
+ let mut autoderef = autoderef::Autoderef::new(table, ty);
+ while let Some((ty, derefs)) = autoderef.next() {
+ deref_chain.push(autoderef.table.canonicalize(ty).value);
+ adjustments.push(ReceiverAdjustments {
+ autoref: None,
+ autoderefs: derefs,
+ unsize_array: false,
+ });
+ }
+ // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
+ if let (Some((TyKind::Array(parameters, _), binders)), Some(adj)) = (
+ deref_chain.last().map(|ty| (ty.value.kind(Interner), ty.binders.clone())),
+ adjustments.last().cloned(),
+ ) {
+ let unsized_ty = TyKind::Slice(parameters.clone()).intern(Interner);
+ deref_chain.push(Canonical { value: unsized_ty, binders });
+ adjustments.push(ReceiverAdjustments { unsize_array: true, ..adj });
+ }
+ (deref_chain, adjustments)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs b/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs
new file mode 100644
index 000000000..d7f48c69a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs
@@ -0,0 +1,62 @@
+//! A few helper functions for dealing with primitives.
+
+pub use chalk_ir::{FloatTy, IntTy, UintTy};
+pub use hir_def::builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint};
+
+pub fn int_ty_to_string(ty: IntTy) -> &'static str {
+ match ty {
+ IntTy::Isize => "isize",
+ IntTy::I8 => "i8",
+ IntTy::I16 => "i16",
+ IntTy::I32 => "i32",
+ IntTy::I64 => "i64",
+ IntTy::I128 => "i128",
+ }
+}
+
+pub fn uint_ty_to_string(ty: UintTy) -> &'static str {
+ match ty {
+ UintTy::Usize => "usize",
+ UintTy::U8 => "u8",
+ UintTy::U16 => "u16",
+ UintTy::U32 => "u32",
+ UintTy::U64 => "u64",
+ UintTy::U128 => "u128",
+ }
+}
+
+pub fn float_ty_to_string(ty: FloatTy) -> &'static str {
+ match ty {
+ FloatTy::F32 => "f32",
+ FloatTy::F64 => "f64",
+ }
+}
+
+pub(super) fn int_ty_from_builtin(t: BuiltinInt) -> IntTy {
+ match t {
+ BuiltinInt::Isize => IntTy::Isize,
+ BuiltinInt::I8 => IntTy::I8,
+ BuiltinInt::I16 => IntTy::I16,
+ BuiltinInt::I32 => IntTy::I32,
+ BuiltinInt::I64 => IntTy::I64,
+ BuiltinInt::I128 => IntTy::I128,
+ }
+}
+
+pub(super) fn uint_ty_from_builtin(t: BuiltinUint) -> UintTy {
+ match t {
+ BuiltinUint::Usize => UintTy::Usize,
+ BuiltinUint::U8 => UintTy::U8,
+ BuiltinUint::U16 => UintTy::U16,
+ BuiltinUint::U32 => UintTy::U32,
+ BuiltinUint::U64 => UintTy::U64,
+ BuiltinUint::U128 => UintTy::U128,
+ }
+}
+
+pub(super) fn float_ty_from_builtin(t: BuiltinFloat) -> FloatTy {
+ match t {
+ BuiltinFloat::F32 => FloatTy::F32,
+ BuiltinFloat::F64 => FloatTy::F64,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
new file mode 100644
index 000000000..dc7252f70
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
@@ -0,0 +1,150 @@
+//! Database used for testing `hir`.
+
+use std::{
+ fmt, panic,
+ sync::{Arc, Mutex},
+};
+
+use base_db::{
+ salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
+};
+use hir_def::{db::DefDatabase, ModuleId};
+use hir_expand::db::AstDatabase;
+use rustc_hash::{FxHashMap, FxHashSet};
+use syntax::TextRange;
+use test_utils::extract_annotations;
+
+#[salsa::database(
+ base_db::SourceDatabaseExtStorage,
+ base_db::SourceDatabaseStorage,
+ hir_expand::db::AstDatabaseStorage,
+ hir_def::db::InternDatabaseStorage,
+ hir_def::db::DefDatabaseStorage,
+ crate::db::HirDatabaseStorage
+)]
+pub(crate) struct TestDB {
+ storage: salsa::Storage<TestDB>,
+ events: Mutex<Option<Vec<salsa::Event>>>,
+}
+
+impl Default for TestDB {
+ fn default() -> Self {
+ let mut this = Self { storage: Default::default(), events: Default::default() };
+ this.set_enable_proc_attr_macros(true);
+ this
+ }
+}
+
+impl fmt::Debug for TestDB {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("TestDB").finish()
+ }
+}
+
+impl Upcast<dyn AstDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn AstDatabase + 'static) {
+ &*self
+ }
+}
+
+impl Upcast<dyn DefDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn DefDatabase + 'static) {
+ &*self
+ }
+}
+
+impl salsa::Database for TestDB {
+ fn salsa_event(&self, event: salsa::Event) {
+ let mut events = self.events.lock().unwrap();
+ if let Some(events) = &mut *events {
+ events.push(event);
+ }
+ }
+}
+
+impl salsa::ParallelDatabase for TestDB {
+ fn snapshot(&self) -> salsa::Snapshot<TestDB> {
+ salsa::Snapshot::new(TestDB {
+ storage: self.storage.snapshot(),
+ events: Default::default(),
+ })
+ }
+}
+
+impl panic::RefUnwindSafe for TestDB {}
+
+impl FileLoader for TestDB {
+ fn file_text(&self, file_id: FileId) -> Arc<String> {
+ FileLoaderDelegate(self).file_text(file_id)
+ }
+ fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+ FileLoaderDelegate(self).resolve_path(path)
+ }
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ FileLoaderDelegate(self).relevant_crates(file_id)
+ }
+}
+
+impl TestDB {
+ pub(crate) fn module_for_file_opt(&self, file_id: FileId) -> Option<ModuleId> {
+ for &krate in self.relevant_crates(file_id).iter() {
+ let crate_def_map = self.crate_def_map(krate);
+ for (local_id, data) in crate_def_map.modules() {
+ if data.origin.file_id() == Some(file_id) {
+ return Some(crate_def_map.module_id(local_id));
+ }
+ }
+ }
+ None
+ }
+
+ pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
+ self.module_for_file_opt(file_id).unwrap()
+ }
+
+ pub(crate) fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> {
+ let mut files = Vec::new();
+ let crate_graph = self.crate_graph();
+ for krate in crate_graph.iter() {
+ let crate_def_map = self.crate_def_map(krate);
+ for (module_id, _) in crate_def_map.modules() {
+ let file_id = crate_def_map[module_id].origin.file_id();
+ files.extend(file_id)
+ }
+ }
+ files
+ .into_iter()
+ .filter_map(|file_id| {
+ let text = self.file_text(file_id);
+ let annotations = extract_annotations(&text);
+ if annotations.is_empty() {
+ return None;
+ }
+ Some((file_id, annotations))
+ })
+ .collect()
+ }
+}
+
+impl TestDB {
+ pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
+ *self.events.lock().unwrap() = Some(Vec::new());
+ f();
+ self.events.lock().unwrap().take().unwrap()
+ }
+
+ pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
+ let events = self.log(f);
+ events
+ .into_iter()
+ .filter_map(|e| match e.kind {
+ // This is pretty horrible, but `Debug` is the only way to inspect
+ // QueryDescriptor at the moment.
+ salsa::EventKind::WillExecute { database_key } => {
+ Some(format!("{:?}", database_key.debug(self)))
+ }
+ _ => None,
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
new file mode 100644
index 000000000..d2f13e435
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
@@ -0,0 +1,578 @@
+mod never_type;
+mod coercion;
+mod regression;
+mod simple;
+mod patterns;
+mod traits;
+mod method_resolution;
+mod macros;
+mod display_source_code;
+mod incremental;
+mod diagnostics;
+
+use std::{collections::HashMap, env, sync::Arc};
+
+use base_db::{fixture::WithFixture, FileRange, SourceDatabaseExt};
+use expect_test::Expect;
+use hir_def::{
+ body::{Body, BodySourceMap, SyntheticSyntax},
+ db::DefDatabase,
+ expr::{ExprId, PatId},
+ item_scope::ItemScope,
+ nameres::DefMap,
+ src::HasSource,
+ AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId,
+};
+use hir_expand::{db::AstDatabase, InFile};
+use once_cell::race::OnceBool;
+use stdx::format_to;
+use syntax::{
+ ast::{self, AstNode, HasName},
+ SyntaxNode,
+};
+use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
+use tracing_tree::HierarchicalLayer;
+
+use crate::{
+ db::HirDatabase,
+ display::HirDisplay,
+ infer::{Adjustment, TypeMismatch},
+ test_db::TestDB,
+ InferenceResult, Ty,
+};
+
+// These tests compare the inference results for all expressions in a file
+// against snapshots of the expected results using expect. Use
+// `env UPDATE_EXPECT=1 cargo test -p hir_ty` to update the snapshots.
+
+fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
+ static ENABLE: OnceBool = OnceBool::new();
+ if !ENABLE.get_or_init(|| env::var("CHALK_DEBUG").is_ok()) {
+ return None;
+ }
+
+ let filter = EnvFilter::from_env("CHALK_DEBUG");
+ let layer = HierarchicalLayer::default()
+ .with_indent_lines(true)
+ .with_ansi(false)
+ .with_indent_amount(2)
+ .with_writer(std::io::stderr);
+ let subscriber = Registry::default().with(filter).with(layer);
+ Some(tracing::subscriber::set_default(subscriber))
+}
+
+fn check_types(ra_fixture: &str) {
+ check_impl(ra_fixture, false, true, false)
+}
+
+fn check_types_source_code(ra_fixture: &str) {
+ check_impl(ra_fixture, false, true, true)
+}
+
+fn check_no_mismatches(ra_fixture: &str) {
+ check_impl(ra_fixture, true, false, false)
+}
+
+fn check(ra_fixture: &str) {
+ check_impl(ra_fixture, false, false, false)
+}
+
+fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_source: bool) {
+ let _tracing = setup_tracing();
+ let (db, files) = TestDB::with_many_files(ra_fixture);
+
+ let mut had_annotations = false;
+ let mut mismatches = HashMap::new();
+ let mut types = HashMap::new();
+ let mut adjustments = HashMap::<_, Vec<_>>::new();
+ for (file_id, annotations) in db.extract_annotations() {
+ for (range, expected) in annotations {
+ let file_range = FileRange { file_id, range };
+ if only_types {
+ types.insert(file_range, expected);
+ } else if expected.starts_with("type: ") {
+ types.insert(file_range, expected.trim_start_matches("type: ").to_string());
+ } else if expected.starts_with("expected") {
+ mismatches.insert(file_range, expected);
+ } else if expected.starts_with("adjustments: ") {
+ adjustments.insert(
+ file_range,
+ expected
+ .trim_start_matches("adjustments: ")
+ .split(',')
+ .map(|it| it.trim().to_string())
+ .filter(|it| !it.is_empty())
+ .collect(),
+ );
+ } else {
+ panic!("unexpected annotation: {}", expected);
+ }
+ had_annotations = true;
+ }
+ }
+ assert!(had_annotations || allow_none, "no `//^` annotations found");
+
+ let mut defs: Vec<DefWithBodyId> = Vec::new();
+ for file_id in files {
+ let module = db.module_for_file_opt(file_id);
+ let module = match module {
+ Some(m) => m,
+ None => continue,
+ };
+ let def_map = module.def_map(&db);
+ visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
+ }
+ defs.sort_by_key(|def| match def {
+ DefWithBodyId::FunctionId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ DefWithBodyId::ConstId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ DefWithBodyId::StaticId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ });
+ let mut unexpected_type_mismatches = String::new();
+ for def in defs {
+ let (_body, body_source_map) = db.body_with_source_map(def);
+ let inference_result = db.infer(def);
+
+ for (pat, ty) in inference_result.type_of_pat.iter() {
+ let node = match pat_node(&body_source_map, pat, &db) {
+ Some(value) => value,
+ None => continue,
+ };
+ let range = node.as_ref().original_file_range(&db);
+ if let Some(expected) = types.remove(&range) {
+ let actual = if display_source {
+ ty.display_source_code(&db, def.module(&db)).unwrap()
+ } else {
+ ty.display_test(&db).to_string()
+ };
+ assert_eq!(actual, expected);
+ }
+ }
+
+ for (expr, ty) in inference_result.type_of_expr.iter() {
+ let node = match expr_node(&body_source_map, expr, &db) {
+ Some(value) => value,
+ None => continue,
+ };
+ let range = node.as_ref().original_file_range(&db);
+ if let Some(expected) = types.remove(&range) {
+ let actual = if display_source {
+ ty.display_source_code(&db, def.module(&db)).unwrap()
+ } else {
+ ty.display_test(&db).to_string()
+ };
+ assert_eq!(actual, expected);
+ }
+ if let Some(expected) = adjustments.remove(&range) {
+ if let Some(adjustments) = inference_result.expr_adjustments.get(&expr) {
+ assert_eq!(
+ expected,
+ adjustments
+ .iter()
+ .map(|Adjustment { kind, .. }| format!("{:?}", kind))
+ .collect::<Vec<_>>()
+ );
+ } else {
+ panic!("expected {:?} adjustments, found none", expected);
+ }
+ }
+ }
+
+ for (pat, mismatch) in inference_result.pat_type_mismatches() {
+ let node = match pat_node(&body_source_map, pat, &db) {
+ Some(value) => value,
+ None => continue,
+ };
+ let range = node.as_ref().original_file_range(&db);
+ let actual = format!(
+ "expected {}, got {}",
+ mismatch.expected.display_test(&db),
+ mismatch.actual.display_test(&db)
+ );
+ match mismatches.remove(&range) {
+ Some(annotation) => assert_eq!(actual, annotation),
+ None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
+ }
+ }
+ for (expr, mismatch) in inference_result.expr_type_mismatches() {
+ let node = match body_source_map.expr_syntax(expr) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
+ }
+ Err(SyntheticSyntax) => continue,
+ };
+ let range = node.as_ref().original_file_range(&db);
+ let actual = format!(
+ "expected {}, got {}",
+ mismatch.expected.display_test(&db),
+ mismatch.actual.display_test(&db)
+ );
+ match mismatches.remove(&range) {
+ Some(annotation) => assert_eq!(actual, annotation),
+ None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
+ }
+ }
+ }
+
+ let mut buf = String::new();
+ if !unexpected_type_mismatches.is_empty() {
+ format_to!(buf, "Unexpected type mismatches:\n{}", unexpected_type_mismatches);
+ }
+ if !mismatches.is_empty() {
+ format_to!(buf, "Unchecked mismatch annotations:\n");
+ for m in mismatches {
+ format_to!(buf, "{:?}: {}\n", m.0.range, m.1);
+ }
+ }
+ if !types.is_empty() {
+ format_to!(buf, "Unchecked type annotations:\n");
+ for t in types {
+ format_to!(buf, "{:?}: type {}\n", t.0.range, t.1);
+ }
+ }
+ if !adjustments.is_empty() {
+ format_to!(buf, "Unchecked adjustments annotations:\n");
+ for t in adjustments {
+ format_to!(buf, "{:?}: type {:?}\n", t.0.range, t.1);
+ }
+ }
+ assert!(buf.is_empty(), "{}", buf);
+}
+
+fn expr_node(
+ body_source_map: &BodySourceMap,
+ expr: ExprId,
+ db: &TestDB,
+) -> Option<InFile<SyntaxNode>> {
+ Some(match body_source_map.expr_syntax(expr) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
+ }
+ Err(SyntheticSyntax) => return None,
+ })
+}
+
+fn pat_node(
+ body_source_map: &BodySourceMap,
+ pat: PatId,
+ db: &TestDB,
+) -> Option<InFile<SyntaxNode>> {
+ Some(match body_source_map.pat_syntax(pat) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| {
+ ptr.either(
+ |it| it.to_node(&root).syntax().clone(),
+ |it| it.to_node(&root).syntax().clone(),
+ )
+ })
+ }
+ Err(SyntheticSyntax) => return None,
+ })
+}
+
+fn infer(ra_fixture: &str) -> String {
+ infer_with_mismatches(ra_fixture, false)
+}
+
+fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
+ let _tracing = setup_tracing();
+ let (db, file_id) = TestDB::with_single_file(content);
+
+ let mut buf = String::new();
+
+ let mut infer_def = |inference_result: Arc<InferenceResult>,
+ body_source_map: Arc<BodySourceMap>| {
+ let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new();
+ let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
+
+ for (pat, ty) in inference_result.type_of_pat.iter() {
+ let syntax_ptr = match body_source_map.pat_syntax(pat) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| {
+ ptr.either(
+ |it| it.to_node(&root).syntax().clone(),
+ |it| it.to_node(&root).syntax().clone(),
+ )
+ })
+ }
+ Err(SyntheticSyntax) => continue,
+ };
+ types.push((syntax_ptr.clone(), ty));
+ if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat) {
+ mismatches.push((syntax_ptr, mismatch));
+ }
+ }
+
+ for (expr, ty) in inference_result.type_of_expr.iter() {
+ let node = match body_source_map.expr_syntax(expr) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
+ }
+ Err(SyntheticSyntax) => continue,
+ };
+ types.push((node.clone(), ty));
+ if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) {
+ mismatches.push((node, mismatch));
+ }
+ }
+
+ // sort ranges for consistency
+ types.sort_by_key(|(node, _)| {
+ let range = node.value.text_range();
+ (range.start(), range.end())
+ });
+ for (node, ty) in &types {
+ let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
+ (self_param.name().unwrap().syntax().text_range(), "self".to_string())
+ } else {
+ (node.value.text_range(), node.value.text().to_string().replace('\n', " "))
+ };
+ let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" };
+ format_to!(
+ buf,
+ "{}{:?} '{}': {}\n",
+ macro_prefix,
+ range,
+ ellipsize(text, 15),
+ ty.display_test(&db)
+ );
+ }
+ if include_mismatches {
+ mismatches.sort_by_key(|(node, _)| {
+ let range = node.value.text_range();
+ (range.start(), range.end())
+ });
+ for (src_ptr, mismatch) in &mismatches {
+ let range = src_ptr.value.text_range();
+ let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
+ format_to!(
+ buf,
+ "{}{:?}: expected {}, got {}\n",
+ macro_prefix,
+ range,
+ mismatch.expected.display_test(&db),
+ mismatch.actual.display_test(&db),
+ );
+ }
+ }
+ };
+
+ let module = db.module_for_file(file_id);
+ let def_map = module.def_map(&db);
+
+ let mut defs: Vec<DefWithBodyId> = Vec::new();
+ visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
+ defs.sort_by_key(|def| match def {
+ DefWithBodyId::FunctionId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ DefWithBodyId::ConstId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ DefWithBodyId::StaticId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ });
+ for def in defs {
+ let (_body, source_map) = db.body_with_source_map(def);
+ let infer = db.infer(def);
+ infer_def(infer, source_map);
+ }
+
+ buf.truncate(buf.trim_end().len());
+ buf
+}
+
+fn visit_module(
+ db: &TestDB,
+ crate_def_map: &DefMap,
+ module_id: LocalModuleId,
+ cb: &mut dyn FnMut(DefWithBodyId),
+) {
+ visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
+ for impl_id in crate_def_map[module_id].scope.impls() {
+ let impl_data = db.impl_data(impl_id);
+ for &item in impl_data.items.iter() {
+ match item {
+ AssocItemId::FunctionId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ AssocItemId::ConstId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ AssocItemId::TypeAliasId(_) => (),
+ }
+ }
+ }
+
+ fn visit_scope(
+ db: &TestDB,
+ crate_def_map: &DefMap,
+ scope: &ItemScope,
+ cb: &mut dyn FnMut(DefWithBodyId),
+ ) {
+ for decl in scope.declarations() {
+ match decl {
+ ModuleDefId::FunctionId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ ModuleDefId::ConstId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ ModuleDefId::StaticId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ ModuleDefId::TraitId(it) => {
+ let trait_data = db.trait_data(it);
+ for &(_, item) in trait_data.items.iter() {
+ match item {
+ AssocItemId::FunctionId(it) => cb(it.into()),
+ AssocItemId::ConstId(it) => cb(it.into()),
+ AssocItemId::TypeAliasId(_) => (),
+ }
+ }
+ }
+ ModuleDefId::ModuleId(it) => visit_module(db, crate_def_map, it.local_id, cb),
+ _ => (),
+ }
+ }
+ }
+
+ fn visit_body(db: &TestDB, body: &Body, cb: &mut dyn FnMut(DefWithBodyId)) {
+ for (_, def_map) in body.blocks(db) {
+ for (mod_id, _) in def_map.modules() {
+ visit_module(db, &def_map, mod_id, cb);
+ }
+ }
+ }
+}
+
+fn ellipsize(mut text: String, max_len: usize) -> String {
+ if text.len() <= max_len {
+ return text;
+ }
+ let ellipsis = "...";
+ let e_len = ellipsis.len();
+ let mut prefix_len = (max_len - e_len) / 2;
+ while !text.is_char_boundary(prefix_len) {
+ prefix_len += 1;
+ }
+ let mut suffix_len = max_len - e_len - prefix_len;
+ while !text.is_char_boundary(text.len() - suffix_len) {
+ suffix_len += 1;
+ }
+ text.replace_range(prefix_len..text.len() - suffix_len, ellipsis);
+ text
+}
+
+fn check_infer(ra_fixture: &str, expect: Expect) {
+ let mut actual = infer(ra_fixture);
+ actual.push('\n');
+ expect.assert_eq(&actual);
+}
+
+fn check_infer_with_mismatches(ra_fixture: &str, expect: Expect) {
+ let mut actual = infer_with_mismatches(ra_fixture, true);
+ actual.push('\n');
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn salsa_bug() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+ //- /lib.rs
+ trait Index {
+ type Output;
+ }
+
+ type Key<S: UnificationStoreBase> = <S as UnificationStoreBase>::Key;
+
+ pub trait UnificationStoreBase: Index<Output = Key<Self>> {
+ type Key;
+
+ fn len(&self) -> usize;
+ }
+
+ pub trait UnificationStoreMut: UnificationStoreBase {
+ fn push(&mut self, value: Self::Key);
+ }
+
+ fn main() {
+ let x = 1;
+ x.push(1);$0
+ }
+ ",
+ );
+
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+
+ let new_text = "
+ //- /lib.rs
+ trait Index {
+ type Output;
+ }
+
+ type Key<S: UnificationStoreBase> = <S as UnificationStoreBase>::Key;
+
+ pub trait UnificationStoreBase: Index<Output = Key<Self>> {
+ type Key;
+
+ fn len(&self) -> usize;
+ }
+
+ pub trait UnificationStoreMut: UnificationStoreBase {
+ fn push(&mut self, value: Self::Key);
+ }
+
+ fn main() {
+
+ let x = 1;
+ x.push(1);
+ }
+ "
+ .to_string();
+
+ db.set_file_text(pos.file_id, Arc::new(new_text));
+
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
new file mode 100644
index 000000000..bf59fadc2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
@@ -0,0 +1,755 @@
+use super::{check, check_no_mismatches, check_types};
+
+#[test]
+fn block_expr_type_mismatch() {
+ check(
+ r"
+fn test() {
+ let a: i32 = { 1i64 };
+ // ^^^^ expected i32, got i64
+}
+ ",
+ );
+}
+
+#[test]
+fn coerce_places() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+struct S<T> { a: T }
+
+fn f<T>(_: &[T]) -> T { loop {} }
+fn g<T>(_: S<&[T]>) -> T { loop {} }
+
+fn gen<T>() -> *mut [T; 2] { loop {} }
+fn test1<U>() -> *mut [U] {
+ gen()
+}
+
+fn test2() {
+ let arr: &[u8; 1] = &[1];
+
+ let a: &[_] = arr;
+ let b = f(arr);
+ let c: &[_] = { arr };
+ let d = g(S { a: arr });
+ let e: [&[_]; 1] = [arr];
+ let f: [&[_]; 2] = [arr; 2];
+ let g: (&[_], &[_]) = (arr, arr);
+}
+"#,
+ );
+}
+
+#[test]
+fn let_stmt_coerce() {
+ check(
+ r"
+//- minicore: coerce_unsized
+fn test() {
+ let x: &[isize] = &[1];
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
+ let x: *const [isize] = &[1];
+ // ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize)
+}
+",
+ );
+}
+
+#[test]
+fn custom_coerce_unsized() {
+ check(
+ r#"
+//- minicore: coerce_unsized
+use core::{marker::Unsize, ops::CoerceUnsized};
+
+struct A<T: ?Sized>(*const T);
+struct B<T: ?Sized>(*const T);
+struct C<T: ?Sized> { inner: *const T }
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<B<U>> for B<T> {}
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<C<U>> for C<T> {}
+
+fn foo1<T>(x: A<[T]>) -> A<[T]> { x }
+fn foo2<T>(x: B<[T]>) -> B<[T]> { x }
+fn foo3<T>(x: C<[T]>) -> C<[T]> { x }
+
+fn test(a: A<[u8; 2]>, b: B<[u8; 2]>, c: C<[u8; 2]>) {
+ let d = foo1(a);
+ // ^ expected A<[{unknown}]>, got A<[u8; 2]>
+ let e = foo2(b);
+ // ^ type: B<[u8]>
+ let f = foo3(c);
+ // ^ type: C<[u8]>
+}
+"#,
+ );
+}
+
+#[test]
+fn if_coerce() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn foo<T>(x: &[T]) -> &[T] { x }
+fn test() {
+ let x = if true {
+ foo(&[1])
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
+ } else {
+ &[1]
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn if_else_coerce() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn foo<T>(x: &[T]) -> &[T] { x }
+fn test() {
+ let x = if true {
+ &[1]
+ } else {
+ foo(&[1])
+ };
+}
+"#,
+ )
+}
+
+#[test]
+fn match_first_coerce() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn foo<T>(x: &[T]) -> &[T] { x }
+fn test(i: i32) {
+ let x = match i {
+ 2 => foo(&[2]),
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
+ 1 => &[1],
+ _ => &[3],
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn match_second_coerce() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn foo<T>(x: &[T]) -> &[T] { loop {} }
+ // ^^^^^^^ adjustments: NeverToAny
+fn test(i: i32) {
+ let x = match i {
+ 1 => &[1],
+ 2 => foo(&[2]),
+ _ => &[3],
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_merge_one_by_one1() {
+ cov_mark::check!(coerce_merge_fail_fallback);
+
+ check(
+ r"
+fn test() {
+ let t = &mut 1;
+ let x = match 1 {
+ 1 => t as *mut i32,
+ 2 => t as &i32,
+ //^^^^^^^^^ expected *mut i32, got &i32
+ _ => t as *const i32,
+ // ^^^^^^^^^^^^^^^ adjustments: Pointer(MutToConstPointer)
+
+ };
+ x;
+ //^ type: *const i32
+
+}
+ ",
+ );
+}
+
+#[test]
+fn return_coerce_unknown() {
+ check_types(
+ r"
+fn foo() -> u32 {
+ return unknown;
+ //^^^^^^^ u32
+}
+ ",
+ );
+}
+
+#[test]
+fn coerce_autoderef() {
+ check_no_mismatches(
+ r"
+struct Foo;
+fn takes_ref_foo(x: &Foo) {}
+fn test() {
+ takes_ref_foo(&Foo);
+ takes_ref_foo(&&Foo);
+ takes_ref_foo(&&&Foo);
+}",
+ );
+}
+
+#[test]
+fn coerce_autoderef_generic() {
+ check_no_mismatches(
+ r#"
+struct Foo;
+fn takes_ref<T>(x: &T) -> T { *x }
+fn test() {
+ takes_ref(&Foo);
+ takes_ref(&&Foo);
+ takes_ref(&&&Foo);
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_autoderef_block() {
+ check_no_mismatches(
+ r#"
+//- minicore: deref
+struct String {}
+impl core::ops::Deref for String { type Target = str; }
+fn takes_ref_str(x: &str) {}
+fn returns_string() -> String { loop {} }
+fn test() {
+ takes_ref_str(&{ returns_string() });
+ // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Not))), Borrow(Ref(Not))
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_autoderef_implication_1() {
+ check_no_mismatches(
+ r"
+//- minicore: deref
+struct Foo<T>;
+impl core::ops::Deref for Foo<u32> { type Target = (); }
+
+fn takes_ref_foo<T>(x: &Foo<T>) {}
+fn test() {
+ let foo = Foo;
+ //^^^ type: Foo<{unknown}>
+ takes_ref_foo(&foo);
+
+ let foo = Foo;
+ //^^^ type: Foo<u32>
+ let _: &() = &foo;
+}",
+ );
+}
+
+#[test]
+fn coerce_autoderef_implication_2() {
+ check(
+ r"
+//- minicore: deref
+struct Foo<T>;
+impl core::ops::Deref for Foo<u32> { type Target = (); }
+
+fn takes_ref_foo<T>(x: &Foo<T>) {}
+fn test() {
+ let foo = Foo;
+ //^^^ type: Foo<{unknown}>
+ let _: &u32 = &Foo;
+ //^^^^ expected &u32, got &Foo<{unknown}>
+}",
+ );
+}
+
+#[test]
+fn closure_return_coerce() {
+ check_no_mismatches(
+ r"
+fn foo() {
+ let x = || {
+ if true {
+ return &1u32;
+ }
+ &&1u32
+ };
+}",
+ );
+}
+
+#[test]
+fn assign_coerce() {
+ check_no_mismatches(
+ r"
+//- minicore: deref
+struct String;
+impl core::ops::Deref for String { type Target = str; }
+fn g(_text: &str) {}
+fn f(text: &str) {
+ let mut text = text;
+ let tmp = String;
+ text = &tmp;
+ g(text);
+}
+",
+ );
+}
+
+#[test]
+fn destructuring_assign_coerce() {
+ check_no_mismatches(
+ r"
+//- minicore: deref
+struct String;
+impl core::ops::Deref for String { type Target = str; }
+fn g(_text: &str) {}
+fn f(text: &str) {
+ let mut text = text;
+ let tmp = String;
+ [text, _] = [&tmp, &tmp];
+ g(text);
+}
+",
+ );
+}
+
+#[test]
+fn coerce_fn_item_to_fn_ptr() {
+ check_no_mismatches(
+ r"
+fn foo(x: u32) -> isize { 1 }
+fn test() {
+ let f: fn(u32) -> isize = foo;
+ // ^^^ adjustments: Pointer(ReifyFnPointer)
+ let f: unsafe fn(u32) -> isize = foo;
+ // ^^^ adjustments: Pointer(ReifyFnPointer)
+}",
+ );
+}
+
+#[test]
+fn coerce_fn_items_in_match_arms() {
+ cov_mark::check!(coerce_fn_reification);
+
+ check_types(
+ r"
+fn foo1(x: u32) -> isize { 1 }
+fn foo2(x: u32) -> isize { 2 }
+fn foo3(x: u32) -> isize { 3 }
+fn test() {
+ let x = match 1 {
+ 1 => foo1,
+ 2 => foo2,
+ _ => foo3,
+ };
+ x;
+ //^ fn(u32) -> isize
+}",
+ );
+}
+
+#[test]
+fn coerce_closure_to_fn_ptr() {
+ check_no_mismatches(
+ r"
+fn test() {
+ let f: fn(u32) -> isize = |x| { 1 };
+}",
+ );
+}
+
+#[test]
+fn coerce_placeholder_ref() {
+ // placeholders should unify, even behind references
+ check_no_mismatches(
+ r"
+struct S<T> { t: T }
+impl<TT> S<TT> {
+ fn get(&self) -> &TT {
+ &self.t
+ }
+}",
+ );
+}
+
+#[test]
+fn coerce_unsize_array() {
+ check_types(
+ r#"
+//- minicore: coerce_unsized
+fn test() {
+ let f: &[usize] = &[1, 2, 3];
+ //^ usize
+}"#,
+ );
+}
+
+#[test]
+fn coerce_unsize_trait_object_simple() {
+ check_types(
+ r#"
+//- minicore: coerce_unsized
+trait Foo<T, U> {}
+trait Bar<U, T, X>: Foo<T, U> {}
+trait Baz<T, X>: Bar<usize, T, X> {}
+
+struct S<T, X>;
+impl<T, X> Foo<T, usize> for S<T, X> {}
+impl<T, X> Bar<usize, T, X> for S<T, X> {}
+impl<T, X> Baz<T, X> for S<T, X> {}
+
+fn test() {
+ let obj: &dyn Baz<i8, i16> = &S;
+ //^ S<i8, i16>
+ let obj: &dyn Bar<_, i8, i16> = &S;
+ //^ S<i8, i16>
+ let obj: &dyn Foo<i8, _> = &S;
+ //^ S<i8, {unknown}>
+}"#,
+ );
+}
+
+#[test]
+fn coerce_unsize_super_trait_cycle() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+trait A {}
+trait B: C + A {}
+trait C: B {}
+trait D: C
+
+struct S;
+impl A for S {}
+impl B for S {}
+impl C for S {}
+impl D for S {}
+
+fn test() {
+ let obj: &dyn D = &S;
+ let obj: &dyn A = &S;
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_unsize_generic() {
+ // FIXME: fix the type mismatches here
+ check(
+ r#"
+//- minicore: coerce_unsized
+struct Foo<T> { t: T };
+struct Bar<T>(Foo<T>);
+
+fn test() {
+ let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] };
+ //^^^^^^^^^ expected [usize], got [usize; 3]
+ let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] });
+ //^^^^^^^^^ expected [usize], got [usize; 3]
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_unsize_apit() {
+ check(
+ r#"
+//- minicore: coerce_unsized
+trait Foo {}
+
+fn test(f: impl Foo, g: &(impl Foo + ?Sized)) {
+ let _: &dyn Foo = &f;
+ let _: &dyn Foo = g;
+ //^ expected &dyn Foo, got &impl Foo + ?Sized
+}
+ "#,
+ );
+}
+
+#[test]
+fn two_closures_lub() {
+ check_types(
+ r#"
+fn foo(c: i32) {
+ let add = |a: i32, b: i32| a + b;
+ let sub = |a, b| a - b;
+ //^^^^^^^^^^^^ |i32, i32| -> i32
+ if c > 42 { add } else { sub };
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ fn(i32, i32) -> i32
+}
+ "#,
+ )
+}
+
+#[test]
+fn match_diverging_branch_1() {
+ check_types(
+ r#"
+enum Result<T> { Ok(T), Err }
+fn parse<T>() -> T { loop {} }
+
+fn test() -> i32 {
+ let a = match parse() {
+ Ok(val) => val,
+ Err => return 0,
+ };
+ a
+ //^ i32
+}
+ "#,
+ )
+}
+
+#[test]
+fn match_diverging_branch_2() {
+ // same as 1 except for order of branches
+ check_types(
+ r#"
+enum Result<T> { Ok(T), Err }
+fn parse<T>() -> T { loop {} }
+
+fn test() -> i32 {
+ let a = match parse() {
+ Err => return 0,
+ Ok(val) => val,
+ };
+ a
+ //^ i32
+}
+ "#,
+ )
+}
+
+#[test]
+fn panic_macro() {
+ check_no_mismatches(
+ r#"
+mod panic {
+ #[macro_export]
+ pub macro panic_2015 {
+ () => (
+ $crate::panicking::panic()
+ ),
+ }
+}
+
+mod panicking {
+ pub fn panic() -> ! { loop {} }
+}
+
+#[rustc_builtin_macro = "core_panic"]
+macro_rules! panic {
+ // Expands to either `$crate::panic::panic_2015` or `$crate::panic::panic_2021`
+ // depending on the edition of the caller.
+ ($($arg:tt)*) => {
+ /* compiler built-in */
+ };
+}
+
+fn main() {
+ panic!()
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_unsize_expected_type_1() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn main() {
+ let foo: &[u32] = &[1, 2];
+ let foo: &[u32] = match true {
+ true => &[1, 2],
+ false => &[1, 2, 3],
+ };
+ let foo: &[u32] = if true {
+ &[1, 2]
+ } else {
+ &[1, 2, 3]
+ };
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_unsize_expected_type_2() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+struct InFile<T>;
+impl<T> InFile<T> {
+ fn with_value<U>(self, value: U) -> InFile<U> { InFile }
+}
+struct RecordField;
+trait AstNode {}
+impl AstNode for RecordField {}
+
+fn takes_dyn(it: InFile<&dyn AstNode>) {}
+
+fn test() {
+ let x: InFile<()> = InFile;
+ let n = &RecordField;
+ takes_dyn(x.with_value(n));
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_unsize_expected_type_3() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+enum Option<T> { Some(T), None }
+struct RecordField;
+trait AstNode {}
+impl AstNode for RecordField {}
+
+fn takes_dyn(it: Option<&dyn AstNode>) {}
+
+fn test() {
+ let x: InFile<()> = InFile;
+ let n = &RecordField;
+ takes_dyn(Option::Some(n));
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_unsize_expected_type_4() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+use core::{marker::Unsize, ops::CoerceUnsized};
+
+struct B<T: ?Sized>(*const T);
+impl<T: ?Sized> B<T> {
+ fn new(t: T) -> Self { B(&t) }
+}
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<B<U>> for B<T> {}
+
+fn test() {
+ let _: B<[isize]> = B::new({ [1, 2, 3] });
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_array_elems_lub() {
+ check_no_mismatches(
+ r#"
+fn f() {}
+fn g() {}
+
+fn test() {
+ [f, g];
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_type_var() {
+ check_types(
+ r#"
+//- minicore: from, coerce_unsized
+fn test() {
+ let x = ();
+ let _: &() = &x.into();
+} //^^^^^^^^ ()
+"#,
+ )
+}
+
+#[test]
+fn coerce_overloaded_binary_op_rhs() {
+ check_types(
+ r#"
+//- minicore: deref, add
+
+struct String {}
+impl core::ops::Deref for String { type Target = str; }
+
+impl core::ops::Add<&str> for String {
+ type Output = String;
+}
+
+fn test() {
+ let s1 = String {};
+ let s2 = String {};
+ s1 + &s2;
+ //^^^^^^^^ String
+}
+
+ "#,
+ );
+}
+
+#[test]
+fn assign_coerce_struct_fields() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+struct S;
+trait Tr {}
+impl Tr for S {}
+struct V<T> { t: T }
+
+fn main() {
+ let a: V<&dyn Tr>;
+ a = V { t: &S };
+
+ let mut a: V<&dyn Tr> = V { t: &S };
+ a = V { t: &S };
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assign_coerce_struct_fields() {
+ check(
+ r#"
+//- minicore: coerce_unsized
+struct S;
+trait Tr {}
+impl Tr for S {}
+struct V<T> { t: T }
+
+fn main() {
+ let a: V<&dyn Tr>;
+ (a,) = V { t: &S };
+ //^^^^expected V<&S>, got (V<&dyn Tr>,)
+
+ let mut a: V<&dyn Tr> = V { t: &S };
+ (a,) = V { t: &S };
+ //^^^^expected V<&S>, got (V<&dyn Tr>,)
+}
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs
new file mode 100644
index 000000000..f00fa9729
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs
@@ -0,0 +1,75 @@
+use super::check;
+
+#[test]
+fn function_return_type_mismatch_1() {
+ check(
+ r#"
+fn test() -> &'static str {
+ 5
+ //^ expected &str, got i32
+}
+"#,
+ );
+}
+
+#[test]
+fn function_return_type_mismatch_2() {
+ check(
+ r#"
+fn test(x: bool) -> &'static str {
+ if x {
+ return 1;
+ //^ expected &str, got i32
+ }
+ "ok"
+}
+"#,
+ );
+}
+
+#[test]
+fn function_return_type_mismatch_3() {
+ check(
+ r#"
+fn test(x: bool) -> &'static str {
+ if x {
+ return "ok";
+ }
+ 1
+ //^ expected &str, got i32
+}
+"#,
+ );
+}
+
+#[test]
+fn function_return_type_mismatch_4() {
+ check(
+ r#"
+fn test(x: bool) -> &'static str {
+ if x {
+ "ok"
+ } else {
+ 1
+ //^ expected &str, got i32
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn function_return_type_mismatch_5() {
+ check(
+ r#"
+fn test(x: bool) -> &'static str {
+ if x {
+ 1
+ //^ expected &str, got i32
+ } else {
+ "ok"
+ }
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs
new file mode 100644
index 000000000..240942e48
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs
@@ -0,0 +1,176 @@
+use super::check_types_source_code;
+
+#[test]
+fn qualify_path_to_submodule() {
+ check_types_source_code(
+ r#"
+mod foo {
+ pub struct Foo;
+}
+
+fn bar() {
+ let foo: foo::Foo = foo::Foo;
+ foo;
+} //^^^ foo::Foo
+
+"#,
+ );
+}
+
+#[test]
+fn omit_default_type_parameters() {
+ check_types_source_code(
+ r#"
+struct Foo<T = u8> { t: T }
+fn main() {
+ let foo = Foo { t: 5u8 };
+ foo;
+} //^^^ Foo
+"#,
+ );
+
+ check_types_source_code(
+ r#"
+struct Foo<K, T = u8> { k: K, t: T }
+fn main() {
+ let foo = Foo { k: 400, t: 5u8 };
+ foo;
+} //^^^ Foo<i32>
+"#,
+ );
+}
+
+#[test]
+fn render_raw_ptr_impl_ty() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Unpin {}
+fn foo() -> *const (impl Unpin + Sized) { loop {} }
+fn main() {
+ let foo = foo();
+ foo;
+} //^^^ *const impl Unpin
+"#,
+ );
+}
+
+#[test]
+fn render_dyn_for_ty() {
+ // FIXME
+ check_types_source_code(
+ r#"
+trait Foo<'a> {}
+
+fn foo(foo: &dyn for<'a> Foo<'a>) {}
+ // ^^^ &dyn Foo
+"#,
+ );
+}
+
+#[test]
+fn sized_bounds_apit() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Foo {}
+trait Bar<T> {}
+struct S<T>;
+fn test(
+ a: impl Foo,
+ b: impl Foo + Sized,
+ c: &(impl Foo + ?Sized),
+ d: S<impl Foo>,
+ ref_any: &impl ?Sized,
+ empty: impl,
+) {
+ a;
+ //^ impl Foo
+ b;
+ //^ impl Foo
+ c;
+ //^ &impl Foo + ?Sized
+ d;
+ //^ S<impl Foo>
+ ref_any;
+ //^^^^^^^ &impl ?Sized
+ empty;
+} //^^^^^ impl Sized
+"#,
+ );
+}
+
+#[test]
+fn sized_bounds_rpit() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Foo {}
+fn foo1() -> impl Foo { loop {} }
+fn foo2() -> impl Foo + Sized { loop {} }
+fn foo3() -> impl Foo + ?Sized { loop {} }
+fn test() {
+ let foo = foo1();
+ foo;
+ //^^^ impl Foo
+ let foo = foo2();
+ foo;
+ //^^^ impl Foo
+ let foo = foo3();
+ foo;
+} //^^^ impl Foo + ?Sized
+"#,
+ );
+}
+
+#[test]
+fn parenthesize_ptr_rpit_sized_bounds() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Foo {}
+fn foo1() -> *const impl Foo { loop {} }
+fn foo2() -> *const (impl Foo + Sized) { loop {} }
+fn foo3() -> *const (impl Sized + Foo) { loop {} }
+fn foo4() -> *const (impl Foo + ?Sized) { loop {} }
+fn foo5() -> *const (impl ?Sized + Foo) { loop {} }
+fn test() {
+ let foo = foo1();
+ foo;
+ //^^^ *const impl Foo
+ let foo = foo2();
+ foo;
+ //^^^ *const impl Foo
+ let foo = foo3();
+ foo;
+ //^^^ *const impl Foo
+ let foo = foo4();
+ foo;
+ //^^^ *const (impl Foo + ?Sized)
+ let foo = foo5();
+ foo;
+} //^^^ *const (impl Foo + ?Sized)
+"#,
+ );
+}
+
+#[test]
+fn sized_bounds_impl_traits_in_fn_signature() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Foo {}
+fn test(
+ a: fn(impl Foo) -> impl Foo,
+ b: fn(impl Foo + Sized) -> impl Foo + Sized,
+ c: fn(&(impl Foo + ?Sized)) -> &(impl Foo + ?Sized),
+) {
+ a;
+ //^ fn(impl Foo) -> impl Foo
+ b;
+ //^ fn(impl Foo) -> impl Foo
+ c;
+} //^ fn(&impl Foo + ?Sized) -> &impl Foo + ?Sized
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
new file mode 100644
index 000000000..3e08e83e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
@@ -0,0 +1,51 @@
+use std::sync::Arc;
+
+use base_db::{fixture::WithFixture, SourceDatabaseExt};
+
+use crate::{db::HirDatabase, test_db::TestDB};
+
+use super::visit_module;
+
+#[test]
+fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+ //- /lib.rs
+ fn foo() -> i32 {
+ $01 + 1
+ }
+ ",
+ );
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(format!("{:?}", events).contains("infer"))
+ }
+
+ let new_text = "
+ fn foo() -> i32 {
+ 1
+ +
+ 1
+ }
+ "
+ .to_string();
+
+ db.set_file_text(pos.file_id, Arc::new(new_text));
+
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
new file mode 100644
index 000000000..a1ab6060e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
@@ -0,0 +1,1338 @@
+use expect_test::expect;
+use test_utils::{bench, bench_fixture, skip_slow_tests};
+
+use crate::tests::check_infer_with_mismatches;
+
+use super::{check_infer, check_types};
+
+#[test]
+fn cfg_impl_def() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo cfg:test
+use foo::S as T;
+struct S;
+
+#[cfg(test)]
+impl S {
+ fn foo1(&self) -> i32 { 0 }
+}
+
+#[cfg(not(test))]
+impl S {
+ fn foo2(&self) -> i32 { 0 }
+}
+
+fn test() {
+ let t = (S.foo1(), S.foo2(), T.foo3(), T.foo4());
+ t;
+} //^ (i32, {unknown}, i32, {unknown})
+
+//- /foo.rs crate:foo
+pub struct S;
+
+#[cfg(not(test))]
+impl S {
+ pub fn foo3(&self) -> i32 { 0 }
+}
+
+#[cfg(test)]
+impl S {
+ pub fn foo4(&self) -> i32 { 0 }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_macros_expanded() {
+ check_infer(
+ r#"
+ struct Foo(Vec<i32>);
+
+ macro_rules! foo {
+ ($($item:expr),*) => {
+ {
+ Foo(vec![$($item,)*])
+ }
+ };
+ }
+
+ fn main() {
+ let x = foo!(1,2);
+ }
+ "#,
+ expect![[r#"
+ !0..17 '{Foo(v...,2,])}': Foo
+ !1..4 'Foo': Foo({unknown}) -> Foo
+ !1..16 'Foo(vec![1,2,])': Foo
+ !5..15 'vec![1,2,]': {unknown}
+ 155..181 '{ ...,2); }': ()
+ 165..166 'x': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn infer_legacy_textual_scoped_macros_expanded() {
+ check_infer(
+ r#"
+ struct Foo(Vec<i32>);
+
+ #[macro_use]
+ mod m {
+ macro_rules! foo {
+ ($($item:expr),*) => {
+ {
+ Foo(vec![$($item,)*])
+ }
+ };
+ }
+ }
+
+ fn main() {
+ let x = foo!(1,2);
+ let y = crate::foo!(1,2);
+ }
+ "#,
+ expect![[r#"
+ !0..17 '{Foo(v...,2,])}': Foo
+ !1..4 'Foo': Foo({unknown}) -> Foo
+ !1..16 'Foo(vec![1,2,])': Foo
+ !5..15 'vec![1,2,]': {unknown}
+ 194..250 '{ ...,2); }': ()
+ 204..205 'x': Foo
+ 227..228 'y': {unknown}
+ 231..247 'crate:...!(1,2)': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn infer_path_qualified_macros_expanded() {
+ check_infer(
+ r#"
+ #[macro_export]
+ macro_rules! foo {
+ () => { 42i32 }
+ }
+
+ mod m {
+ pub use super::foo as bar;
+ }
+
+ fn main() {
+ let x = crate::foo!();
+ let y = m::bar!();
+ }
+ "#,
+ expect![[r#"
+ !0..5 '42i32': i32
+ !0..5 '42i32': i32
+ 110..163 '{ ...!(); }': ()
+ 120..121 'x': i32
+ 147..148 'y': i32
+ "#]],
+ );
+}
+
+#[test]
+fn expr_macro_def_expanded_in_various_places() {
+ check_infer(
+ r#"
+ macro spam() {
+ 1isize
+ }
+
+ fn spam() {
+ spam!();
+ (spam!());
+ spam!().spam(spam!());
+ for _ in spam!() {}
+ || spam!();
+ while spam!() {}
+ break spam!();
+ return spam!();
+ match spam!() {
+ _ if spam!() => spam!(),
+ }
+ spam!()(spam!());
+ Spam { spam: spam!() };
+ spam!()[spam!()];
+ await spam!();
+ spam!() as usize;
+ &spam!();
+ -spam!();
+ spam!()..spam!();
+ spam!() + spam!();
+ }
+ "#,
+ expect![[r#"
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ 39..442 '{ ...!(); }': ()
+ 73..94 'spam!(...am!())': {unknown}
+ 100..119 'for _ ...!() {}': ()
+ 104..105 '_': {unknown}
+ 117..119 '{}': ()
+ 124..134 '|| spam!()': || -> isize
+ 140..156 'while ...!() {}': ()
+ 154..156 '{}': ()
+ 161..174 'break spam!()': !
+ 180..194 'return spam!()': !
+ 200..254 'match ... }': isize
+ 224..225 '_': isize
+ 259..275 'spam!(...am!())': {unknown}
+ 281..303 'Spam {...m!() }': {unknown}
+ 309..325 'spam!(...am!()]': {unknown}
+ 350..366 'spam!(... usize': usize
+ 372..380 '&spam!()': &isize
+ 386..394 '-spam!()': isize
+ 400..416 'spam!(...pam!()': {unknown}
+ 422..439 'spam!(...pam!()': isize
+ "#]],
+ );
+}
+
+#[test]
+fn expr_macro_rules_expanded_in_various_places() {
+ check_infer(
+ r#"
+ macro_rules! spam {
+ () => (1isize);
+ }
+
+ fn spam() {
+ spam!();
+ (spam!());
+ spam!().spam(spam!());
+ for _ in spam!() {}
+ || spam!();
+ while spam!() {}
+ break spam!();
+ return spam!();
+ match spam!() {
+ _ if spam!() => spam!(),
+ }
+ spam!()(spam!());
+ Spam { spam: spam!() };
+ spam!()[spam!()];
+ await spam!();
+ spam!() as usize;
+ &spam!();
+ -spam!();
+ spam!()..spam!();
+ spam!() + spam!();
+ }
+ "#,
+ expect![[r#"
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ 53..456 '{ ...!(); }': ()
+ 87..108 'spam!(...am!())': {unknown}
+ 114..133 'for _ ...!() {}': ()
+ 118..119 '_': {unknown}
+ 131..133 '{}': ()
+ 138..148 '|| spam!()': || -> isize
+ 154..170 'while ...!() {}': ()
+ 168..170 '{}': ()
+ 175..188 'break spam!()': !
+ 194..208 'return spam!()': !
+ 214..268 'match ... }': isize
+ 238..239 '_': isize
+ 273..289 'spam!(...am!())': {unknown}
+ 295..317 'Spam {...m!() }': {unknown}
+ 323..339 'spam!(...am!()]': {unknown}
+ 364..380 'spam!(... usize': usize
+ 386..394 '&spam!()': &isize
+ 400..408 '-spam!()': isize
+ 414..430 'spam!(...pam!()': {unknown}
+ 436..453 'spam!(...pam!()': isize
+ "#]],
+ );
+}
+
+#[test]
+fn expr_macro_expanded_in_stmts() {
+ check_infer(
+ r#"
+ macro_rules! id { ($($es:tt)*) => { $($es)* } }
+ fn foo() {
+ id! { let a = (); }
+ }
+ "#,
+ expect![[r#"
+ !0..8 'leta=();': ()
+ !3..4 'a': ()
+ !5..7 '()': ()
+ 57..84 '{ ...); } }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn recurisve_macro_expanded_in_stmts() {
+ check_infer(
+ r#"
+ macro_rules! ng {
+ ([$($tts:tt)*]) => {
+ $($tts)*;
+ };
+ ([$($tts:tt)*] $head:tt $($rest:tt)*) => {
+ ng! {
+ [$($tts)* $head] $($rest)*
+ }
+ };
+ }
+ fn foo() {
+ ng!([] let a = 3);
+ let b = a;
+ }
+ "#,
+ expect![[r#"
+ !0..7 'leta=3;': ()
+ !0..13 'ng!{[leta=3]}': ()
+ !0..13 'ng!{[leta=]3}': ()
+ !0..13 'ng!{[leta]=3}': ()
+ !0..13 'ng!{[let]a=3}': ()
+ !3..4 'a': i32
+ !5..6 '3': i32
+ 196..237 '{ ...= a; }': ()
+ 229..230 'b': i32
+ 233..234 'a': i32
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_inner_item_macro_rules() {
+ check_infer(
+ r#"
+ macro_rules! mac {
+ () => { mac!($)};
+ ($x:tt) => { macro_rules! blub { () => { 1 }; } };
+ }
+ fn foo() {
+ mac!();
+ let a = blub!();
+ }
+ "#,
+ expect![[r#"
+ !0..1 '1': i32
+ !0..7 'mac!($)': ()
+ !0..26 'macro_...>{1};}': ()
+ 107..143 '{ ...!(); }': ()
+ 129..130 'a': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_macro_defining_block_with_items() {
+ check_infer(
+ r#"
+ macro_rules! foo {
+ () => {{
+ fn bar() -> usize { 0 }
+ bar()
+ }};
+ }
+ fn main() {
+ let _a = foo!();
+ }
+ "#,
+ expect![[r#"
+ !15..18 '{0}': usize
+ !16..17 '0': usize
+ !0..24 '{fnbar...bar()}': usize
+ !18..21 'bar': fn bar() -> usize
+ !18..23 'bar()': usize
+ 98..122 '{ ...!(); }': ()
+ 108..110 '_a': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_type_value_macro_having_same_name() {
+ check_infer(
+ r#"
+ #[macro_export]
+ macro_rules! foo {
+ () => {
+ mod foo {
+ pub use super::foo;
+ }
+ };
+ ($x:tt) => {
+ $x
+ };
+ }
+
+ foo!();
+
+ fn foo() {
+ let foo = foo::foo!(42i32);
+ }
+ "#,
+ expect![[r#"
+ !0..5 '42i32': i32
+ 170..205 '{ ...32); }': ()
+ 180..183 'foo': i32
+ "#]],
+ );
+}
+
+#[test]
+fn processes_impls_generated_by_macros() {
+ check_types(
+ r#"
+macro_rules! m {
+ ($ident:ident) => (impl Trait for $ident {})
+}
+trait Trait { fn foo(self) -> u128 { 0 } }
+struct S;
+m!(S);
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn infer_assoc_items_generated_by_macros() {
+ check_types(
+ r#"
+macro_rules! m {
+ () => (fn foo(&self) -> u128 {0})
+}
+struct S;
+impl S {
+ m!();
+}
+
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn infer_assoc_items_generated_by_macros_chain() {
+ check_types(
+ r#"
+macro_rules! m_inner {
+ () => {fn foo(&self) -> u128 {0}}
+}
+macro_rules! m {
+ () => {m_inner!();}
+}
+
+struct S;
+impl S {
+ m!();
+}
+
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn infer_macro_with_dollar_crate_is_correct_in_expr() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+fn test() {
+ let x = (foo::foo!(1), foo::foo!(2));
+ x;
+} //^ (i32, usize)
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! foo {
+ (1) => { $crate::bar!() };
+ (2) => { 1 + $crate::baz() };
+}
+
+#[macro_export]
+macro_rules! bar {
+ () => { 42 }
+}
+
+pub fn baz() -> usize { 31usize }
+"#,
+ );
+}
+
+#[test]
+fn infer_macro_with_dollar_crate_is_correct_in_trait_associate_type() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+use foo::Trait;
+
+fn test() {
+ let msg = foo::Message(foo::MessageRef);
+ let r = msg.deref();
+ r;
+ //^ &MessageRef
+}
+
+//- /lib.rs crate:foo
+pub struct MessageRef;
+pub struct Message(MessageRef);
+
+pub trait Trait {
+ type Target;
+ fn deref(&self) -> &Self::Target;
+}
+
+#[macro_export]
+macro_rules! expand {
+ () => {
+ impl Trait for Message {
+ type Target = $crate::MessageRef;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+ }
+}
+
+expand!();
+"#,
+ );
+}
+
+#[test]
+fn infer_macro_with_dollar_crate_in_def_site() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+use foo::expand;
+
+macro_rules! list {
+ ($($tt:tt)*) => { $($tt)* }
+}
+
+fn test() {
+ let r = expand!();
+ r;
+ //^ u128
+}
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! expand {
+ () => { list!($crate::m!()) };
+}
+
+#[macro_export]
+macro_rules! m {
+ () => { 0u128 };
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_type_value_non_legacy_macro_use_as() {
+ check_infer(
+ r#"
+ mod m {
+ macro_rules! _foo {
+ ($x:ident) => { type $x = u64; }
+ }
+ pub(crate) use _foo as foo;
+ }
+
+ m::foo!(foo);
+ use foo as bar;
+ fn f() -> bar { 0 }
+ fn main() {
+ let _a = f();
+ }
+ "#,
+ expect![[r#"
+ 158..163 '{ 0 }': u64
+ 160..161 '0': u64
+ 174..196 '{ ...f(); }': ()
+ 184..186 '_a': u64
+ 190..191 'f': fn f() -> u64
+ 190..193 'f()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_local_macro() {
+ check_infer(
+ r#"
+ fn main() {
+ macro_rules! foo {
+ () => { 1usize }
+ }
+ let _a = foo!();
+ }
+ "#,
+ expect![[r#"
+ !0..6 '1usize': usize
+ 10..89 '{ ...!(); }': ()
+ 74..76 '_a': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_local_inner_macros() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+fn test() {
+ let x = foo::foo!(1);
+ x;
+} //^ i32
+
+//- /lib.rs crate:foo
+#[macro_export(local_inner_macros)]
+macro_rules! foo {
+ (1) => { bar!() };
+}
+
+#[macro_export]
+macro_rules! bar {
+ () => { 42 }
+}
+
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_line() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! line {() => {}}
+
+ fn main() {
+ let x = line!();
+ }
+ "#,
+ expect![[r#"
+ !0..1 '0': i32
+ 63..87 '{ ...!(); }': ()
+ 73..74 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_file() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! file {() => {}}
+
+ fn main() {
+ let x = file!();
+ }
+ "#,
+ expect![[r#"
+ !0..2 '""': &str
+ 63..87 '{ ...!(); }': ()
+ 73..74 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_column() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! column {() => {}}
+
+ fn main() {
+ let x = column!();
+ }
+ "#,
+ expect![[r#"
+ !0..1 '0': i32
+ 65..91 '{ ...!(); }': ()
+ 75..76 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_concat() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! concat {() => {}}
+
+ fn main() {
+ let x = concat!("hello", concat!("world", "!"));
+ }
+ "#,
+ expect![[r#"
+ !0..13 '"helloworld!"': &str
+ 65..121 '{ ...")); }': ()
+ 75..76 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("foo.rs");
+
+fn main() {
+ bar();
+} //^^^^^ u32
+
+//- /foo.rs
+fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_expression() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+fn main() {
+ let i = include!("bla.rs");
+ i;
+ //^ i32
+}
+//- /bla.rs
+0
+ "#,
+ )
+}
+
+#[test]
+fn infer_builtin_macros_include_child_mod() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("f/foo.rs");
+
+fn main() {
+ bar::bar();
+} //^^^^^^^^^^ u32
+
+//- /f/foo.rs
+pub mod bar;
+
+//- /f/bar.rs
+pub fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_str() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include_str {() => {}}
+
+fn main() {
+ let a = include_str!("foo.rs");
+ a;
+} //^ &str
+
+//- /foo.rs
+hello
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_str_with_lazy_nested() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! concat {() => {}}
+#[rustc_builtin_macro]
+macro_rules! include_str {() => {}}
+
+macro_rules! m {
+ ($x:expr) => {
+ concat!("foo", $x)
+ };
+}
+
+fn main() {
+ let a = include_str!(m!(".rs"));
+ a;
+} //^ &str
+
+//- /foo.rs
+hello
+"#,
+ );
+}
+
+#[test]
+fn benchmark_include_macro() {
+ if skip_slow_tests() {
+ return;
+ }
+ let data = bench_fixture::big_struct();
+ let fixture = r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("foo.rs");
+
+fn main() {
+ RegisterBlock { };
+ //^^^^^^^^^^^^^^^^^ RegisterBlock
+}
+ "#;
+ let fixture = format!("{}\n//- /foo.rs\n{}", fixture, data);
+
+ {
+ let _b = bench("include macro");
+ check_types(&fixture);
+ }
+}
+
+#[test]
+fn infer_builtin_macros_include_concat() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+#[rustc_builtin_macro]
+macro_rules! concat {() => {}}
+
+include!(concat!("f", "oo.rs"));
+
+fn main() {
+ bar();
+} //^^^^^ u32
+
+//- /foo.rs
+fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_concat_with_bad_env_should_failed() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+#[rustc_builtin_macro]
+macro_rules! concat {() => {}}
+
+#[rustc_builtin_macro]
+macro_rules! env {() => {}}
+
+include!(concat!(env!("OUT_DIR"), "/foo.rs"));
+
+fn main() {
+ bar();
+} //^^^^^ {unknown}
+
+//- /foo.rs
+fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_itself_should_failed() {
+ check_types(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("main.rs");
+
+fn main() {
+ 0;
+} //^ i32
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_concat_with_lazy() {
+ check_infer(
+ r#"
+ macro_rules! hello {() => {"hello"}}
+
+ #[rustc_builtin_macro]
+ macro_rules! concat {() => {}}
+
+ fn main() {
+ let x = concat!(hello!(), concat!("world", "!"));
+ }
+ "#,
+ expect![[r#"
+ !0..13 '"helloworld!"': &str
+ 103..160 '{ ...")); }': ()
+ 113..114 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_env() {
+ check_infer(
+ r#"
+ //- /main.rs env:foo=bar
+ #[rustc_builtin_macro]
+ macro_rules! env {() => {}}
+
+ fn main() {
+ let x = env!("foo");
+ }
+ "#,
+ expect![[r#"
+ !0..22 '"__RA_...TED__"': &str
+ 62..90 '{ ...o"); }': ()
+ 72..73 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_derive_clone_simple() {
+ check_types(
+ r#"
+//- minicore: derive, clone
+#[derive(Clone)]
+struct S;
+fn test() {
+ S.clone();
+} //^^^^^^^^^ S
+"#,
+ );
+}
+
+#[test]
+fn infer_derive_clone_with_params() {
+ check_types(
+ r#"
+//- minicore: clone, derive
+#[derive(Clone)]
+struct S;
+#[derive(Clone)]
+struct Wrapper<T>(T);
+struct NonClone;
+fn test() {
+ let x = (Wrapper(S).clone(), Wrapper(NonClone).clone());
+ x;
+ //^ (Wrapper<S>, {unknown})
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_custom_derive_simple() {
+ // FIXME: this test current now do nothing
+ check_types(
+ r#"
+//- minicore: derive
+use foo::Foo;
+
+#[derive(Foo)]
+struct S{}
+
+fn test() {
+ S{};
+} //^^^ S
+"#,
+ );
+}
+
+#[test]
+fn macro_in_arm() {
+ check_infer(
+ r#"
+ macro_rules! unit {
+ () => { () };
+ }
+
+ fn main() {
+ let x = match () {
+ unit!() => 92u32,
+ };
+ }
+ "#,
+ expect![[r#"
+ !0..2 '()': ()
+ 51..110 '{ ... }; }': ()
+ 61..62 'x': u32
+ 65..107 'match ... }': u32
+ 71..73 '()': ()
+ 95..100 '92u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn macro_in_type_alias_position() {
+ check_infer(
+ r#"
+ macro_rules! U32 {
+ () => { u32 };
+ }
+
+ trait Foo {
+ type Ty;
+ }
+
+ impl<T> Foo for T {
+ type Ty = U32!();
+ }
+
+ type TayTo = U32!();
+
+ fn testy() {
+ let a: <() as Foo>::Ty;
+ let b: TayTo;
+ }
+ "#,
+ expect![[r#"
+ 147..196 '{ ...yTo; }': ()
+ 157..158 'a': u32
+ 185..186 'b': u32
+ "#]],
+ );
+}
+
+#[test]
+fn nested_macro_in_type_alias_position() {
+ check_infer(
+ r#"
+ macro_rules! U32Inner2 {
+ () => { u32 };
+ }
+
+ macro_rules! U32Inner1 {
+ () => { U32Inner2!() };
+ }
+
+ macro_rules! U32 {
+ () => { U32Inner1!() };
+ }
+
+ trait Foo {
+ type Ty;
+ }
+
+ impl<T> Foo for T {
+ type Ty = U32!();
+ }
+
+ type TayTo = U32!();
+
+ fn testy() {
+ let a: <() as Foo>::Ty;
+ let b: TayTo;
+ }
+ "#,
+ expect![[r#"
+ 259..308 '{ ...yTo; }': ()
+ 269..270 'a': u32
+ 297..298 'b': u32
+ "#]],
+ );
+}
+
+#[test]
+fn macros_in_type_alias_position_generics() {
+ check_infer(
+ r#"
+ struct Foo<A, B>(A, B);
+
+ macro_rules! U32 {
+ () => { u32 };
+ }
+
+ macro_rules! Bar {
+ () => { Foo<U32!(), U32!()> };
+ }
+
+ trait Moo {
+ type Ty;
+ }
+
+ impl<T> Moo for T {
+ type Ty = Bar!();
+ }
+
+ type TayTo = Bar!();
+
+ fn main() {
+ let a: <() as Moo>::Ty;
+ let b: TayTo;
+ }
+ "#,
+ expect![[r#"
+ 228..277 '{ ...yTo; }': ()
+ 238..239 'a': Foo<u32, u32>
+ 266..267 'b': Foo<u32, u32>
+ "#]],
+ );
+}
+
+#[test]
+fn macros_in_type_position() {
+ check_infer(
+ r#"
+ struct Foo<A, B>(A, B);
+
+ macro_rules! U32 {
+ () => { u32 };
+ }
+
+ macro_rules! Bar {
+ () => { Foo<U32!(), U32!()> };
+ }
+
+ fn main() {
+ let a: Bar!();
+ }
+ "#,
+ expect![[r#"
+ 133..155 '{ ...!(); }': ()
+ 143..144 'a': Foo<u32, u32>
+ "#]],
+ );
+}
+
+#[test]
+fn macros_in_type_generics() {
+ check_infer(
+ r#"
+ struct Foo<A, B>(A, B);
+
+ macro_rules! U32 {
+ () => { u32 };
+ }
+
+ macro_rules! Bar {
+ () => { Foo<U32!(), U32!()> };
+ }
+
+ trait Moo {
+ type Ty;
+ }
+
+ impl<T> Moo for T {
+ type Ty = Foo<Bar!(), Bar!()>;
+ }
+
+ type TayTo = Foo<Bar!(), U32!()>;
+
+ fn main() {
+ let a: <() as Moo>::Ty;
+ let b: TayTo;
+ }
+ "#,
+ expect![[r#"
+ 254..303 '{ ...yTo; }': ()
+ 264..265 'a': Foo<Foo<u32, u32>, Foo<u32, u32>>
+ 292..293 'b': Foo<Foo<u32, u32>, u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infinitely_recursive_macro_type() {
+ check_infer(
+ r#"
+ struct Bar<T, X>(T, X);
+
+ macro_rules! Foo {
+ () => { Foo!() }
+ }
+
+ macro_rules! U32 {
+ () => { u32 }
+ }
+
+ type A = Foo!();
+ type B = Bar<Foo!(), U32!()>;
+
+ fn main() {
+ let a: A;
+ let b: B;
+ }
+ "#,
+ expect![[r#"
+ 166..197 '{ ...: B; }': ()
+ 176..177 'a': {unknown}
+ 190..191 'b': Bar<{unknown}, u32>
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_tails() {
+ check_infer_with_mismatches(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+struct S {}
+
+impl S {
+ fn new2(bar: u32) -> Self {
+ #[cfg(feature = "foo")]
+ { Self { } }
+ #[cfg(not(feature = "foo"))]
+ { Self { } }
+ }
+}
+"#,
+ expect![[r#"
+ 34..37 'bar': u32
+ 52..170 '{ ... }': S
+ 62..106 '#[cfg(... { } }': S
+ 96..104 'Self { }': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_in_unexpandable_attr_proc_macro_1() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:mac
+#[mac::attr_macro]
+fn foo() {
+ let xxx = 1;
+ //^^^ i32
+}
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro_attribute]
+pub fn attr_macro() {}
+"#,
+ );
+}
+
+#[test]
+fn infer_in_unexpandable_attr_proc_macro_in_impl() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:mac
+struct Foo;
+impl Foo {
+ #[mac::attr_macro]
+ fn foo() {
+ let xxx = 1;
+ //^^^ i32
+ }
+}
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro_attribute]
+pub fn attr_macro() {}
+"#,
+ );
+}
+
+#[test]
+fn infer_in_unexpandable_attr_proc_macro_in_trait() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:mac
+trait Foo {
+ #[mac::attr_macro]
+ fn foo() {
+ let xxx = 1;
+ //^^^ i32
+ }
+}
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro_attribute]
+pub fn attr_macro() {}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
new file mode 100644
index 000000000..68463dc06
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
@@ -0,0 +1,1792 @@
+use expect_test::expect;
+
+use crate::tests::check;
+
+use super::{check_infer, check_no_mismatches, check_types};
+
+#[test]
+fn infer_slice_method() {
+ check_types(
+ r#"
+impl<T> [T] {
+ fn foo(&self) -> T {
+ loop {}
+ }
+}
+
+fn test(x: &[u8]) {
+ <[_]>::foo(x);
+ //^^^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn cross_crate_primitive_method() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:other_crate
+fn test() {
+ let x = 1f32;
+ x.foo();
+} //^^^^^^^ f32
+
+//- /lib.rs crate:other_crate
+mod foo {
+ impl f32 {
+ pub fn foo(self) -> f32 { 0. }
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_array_inherent_impl() {
+ check_types(
+ r#"
+impl<T, const N: usize> [T; N] {
+ fn foo(&self) -> T {
+ loop {}
+ }
+}
+fn test(x: &[u8; 0]) {
+ <[_; 0]>::foo(x);
+ //^^^^^^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_associated_method_struct() {
+ check_infer(
+ r#"
+ struct A { x: u32 }
+
+ impl A {
+ fn new() -> A {
+ A { x: 0 }
+ }
+ }
+ fn test() {
+ let a = A::new();
+ a.x;
+ }
+ "#,
+ expect![[r#"
+ 48..74 '{ ... }': A
+ 58..68 'A { x: 0 }': A
+ 65..66 '0': u32
+ 87..121 '{ ...a.x; }': ()
+ 97..98 'a': A
+ 101..107 'A::new': fn new() -> A
+ 101..109 'A::new()': A
+ 115..116 'a': A
+ 115..118 'a.x': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_struct_in_local_scope() {
+ check_infer(
+ r#"
+ fn mismatch() {
+ struct A;
+
+ impl A {
+ fn from(_: i32, _: i32) -> Self {
+ A
+ }
+ }
+
+ let _a = A::from(1, 2);
+ }
+ "#,
+ expect![[r#"
+ 14..146 '{ ... 2); }': ()
+ 125..127 '_a': A
+ 130..137 'A::from': fn from(i32, i32) -> A
+ 130..143 'A::from(1, 2)': A
+ 138..139 '1': i32
+ 141..142 '2': i32
+ 60..61 '_': i32
+ 68..69 '_': i32
+ 84..109 '{ ... }': A
+ 98..99 'A': A
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_enum() {
+ check_infer(
+ r#"
+ enum A { B, C }
+
+ impl A {
+ pub fn b() -> A {
+ A::B
+ }
+ pub fn c() -> A {
+ A::C
+ }
+ }
+ fn test() {
+ let a = A::b();
+ a;
+ let c = A::c();
+ c;
+ }
+ "#,
+ expect![[r#"
+ 46..66 '{ ... }': A
+ 56..60 'A::B': A
+ 87..107 '{ ... }': A
+ 97..101 'A::C': A
+ 120..177 '{ ... c; }': ()
+ 130..131 'a': A
+ 134..138 'A::b': fn b() -> A
+ 134..140 'A::b()': A
+ 146..147 'a': A
+ 157..158 'c': A
+ 161..165 'A::c': fn c() -> A
+ 161..167 'A::c()': A
+ 173..174 'c': A
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_with_modules() {
+ check_infer(
+ r#"
+ mod a {
+ struct A;
+ impl A { pub fn thing() -> A { A {} }}
+ }
+
+ mod b {
+ struct B;
+ impl B { pub fn thing() -> u32 { 99 }}
+
+ mod c {
+ struct C;
+ impl C { pub fn thing() -> C { C {} }}
+ }
+ }
+ use b::c;
+
+ fn test() {
+ let x = a::A::thing();
+ let y = b::B::thing();
+ let z = c::C::thing();
+ }
+ "#,
+ expect![[r#"
+ 55..63 '{ A {} }': A
+ 57..61 'A {}': A
+ 125..131 '{ 99 }': u32
+ 127..129 '99': u32
+ 201..209 '{ C {} }': C
+ 203..207 'C {}': C
+ 240..324 '{ ...g(); }': ()
+ 250..251 'x': A
+ 254..265 'a::A::thing': fn thing() -> A
+ 254..267 'a::A::thing()': A
+ 277..278 'y': u32
+ 281..292 'b::B::thing': fn thing() -> u32
+ 281..294 'b::B::thing()': u32
+ 304..305 'z': C
+ 308..319 'c::C::thing': fn thing() -> C
+ 308..321 'c::C::thing()': C
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics() {
+ check_infer(
+ r#"
+ struct Gen<T> {
+ val: T
+ }
+
+ impl<T> Gen<T> {
+ pub fn make(val: T) -> Gen<T> {
+ Gen { val }
+ }
+ }
+
+ fn test() {
+ let a = Gen::make(0u32);
+ }
+ "#,
+ expect![[r#"
+ 63..66 'val': T
+ 81..108 '{ ... }': Gen<T>
+ 91..102 'Gen { val }': Gen<T>
+ 97..100 'val': T
+ 122..154 '{ ...32); }': ()
+ 132..133 'a': Gen<u32>
+ 136..145 'Gen::make': fn make<u32>(u32) -> Gen<u32>
+ 136..151 'Gen::make(0u32)': Gen<u32>
+ 146..150 '0u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics_without_args() {
+ check_infer(
+ r#"
+ struct Gen<T> {
+ val: T
+ }
+
+ impl<T> Gen<T> {
+ pub fn make() -> Gen<T> {
+ loop { }
+ }
+ }
+
+ fn test() {
+ let a = Gen::<u32>::make();
+ }
+ "#,
+ expect![[r#"
+ 75..99 '{ ... }': Gen<T>
+ 85..93 'loop { }': !
+ 90..93 '{ }': ()
+ 113..148 '{ ...e(); }': ()
+ 123..124 'a': Gen<u32>
+ 127..143 'Gen::<...::make': fn make<u32>() -> Gen<u32>
+ 127..145 'Gen::<...make()': Gen<u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics_2_type_params_without_args() {
+ check_infer(
+ r#"
+ struct Gen<T, U> {
+ val: T,
+ val2: U,
+ }
+
+ impl<T> Gen<u32, T> {
+ pub fn make() -> Gen<u32,T> {
+ loop { }
+ }
+ }
+
+ fn test() {
+ let a = Gen::<u32, u64>::make();
+ }
+ "#,
+ expect![[r#"
+ 101..125 '{ ... }': Gen<u32, T>
+ 111..119 'loop { }': !
+ 116..119 '{ }': ()
+ 139..179 '{ ...e(); }': ()
+ 149..150 'a': Gen<u32, u64>
+ 153..174 'Gen::<...::make': fn make<u64>() -> Gen<u32, u64>
+ 153..176 'Gen::<...make()': Gen<u32, u64>
+ "#]],
+ );
+}
+
+#[test]
+fn cross_crate_associated_method_call() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:other_crate
+fn test() {
+ let x = other_crate::foo::S::thing();
+ x;
+} //^ i128
+
+//- /lib.rs crate:other_crate
+pub mod foo {
+ pub struct S;
+ impl S {
+ pub fn thing() -> i128 { 0 }
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_trait_method_simple() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait1 {
+ fn method(&self) -> u32;
+}
+struct S1;
+impl Trait1 for S1 {}
+trait Trait2 {
+ fn method(&self) -> i128;
+}
+struct S2;
+impl Trait2 for S2 {}
+fn test() {
+ S1.method();
+ //^^^^^^^^^^^ u32
+ S2.method(); // -> i128
+ //^^^^^^^^^^^ i128
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_scoped() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+struct S;
+mod foo {
+ pub trait Trait1 {
+ fn method(&self) -> u32;
+ }
+ impl Trait1 for super::S {}
+}
+mod bar {
+ pub trait Trait2 {
+ fn method(&self) -> i128;
+ }
+ impl Trait2 for super::S {}
+}
+
+mod foo_test {
+ use super::S;
+ use super::foo::Trait1;
+ fn test() {
+ S.method();
+ //^^^^^^^^^^ u32
+ }
+}
+
+mod bar_test {
+ use super::S;
+ use super::bar::Trait2;
+ fn test() {
+ S.method();
+ //^^^^^^^^^^ i128
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_1() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait<T> {
+ fn method(&self) -> T;
+}
+struct S;
+impl Trait<u32> for S {}
+fn test() {
+ S.method();
+ //^^^^^^^^^^ u32
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_more_params() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait<T1, T2, T3> {
+ fn method1(&self) -> (T1, T2, T3);
+ fn method2(&self) -> (T3, T2, T1);
+}
+struct S1;
+impl Trait<u8, u16, u32> for S1 {}
+struct S2;
+impl<T> Trait<i8, i16, T> for S2 {}
+fn test() {
+ S1.method1();
+ //^^^^^^^^^^^^ (u8, u16, u32)
+ S1.method2();
+ //^^^^^^^^^^^^ (u32, u16, u8)
+ S2.method1();
+ //^^^^^^^^^^^^ (i8, i16, {unknown})
+ S2.method2();
+ //^^^^^^^^^^^^ ({unknown}, i16, i8)
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_2() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait<T> {
+ fn method(&self) -> T;
+}
+struct S<T>(T);
+impl<U> Trait<U> for S<U> {}
+fn test() {
+ S(1u32).method();
+ //^^^^^^^^^^^^^^^^ u32
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method() {
+ check_infer(
+ r#"
+ trait Default {
+ fn default() -> Self;
+ }
+ struct S;
+ impl Default for S {}
+ fn test() {
+ let s1: S = Default::default();
+ let s2 = S::default();
+ let s3 = <S as Default>::default();
+ }
+ "#,
+ expect![[r#"
+ 86..192 '{ ...t(); }': ()
+ 96..98 's1': S
+ 104..120 'Defaul...efault': fn default<S>() -> S
+ 104..122 'Defaul...ault()': S
+ 132..134 's2': S
+ 137..147 'S::default': fn default<S>() -> S
+ 137..149 'S::default()': S
+ 159..161 's3': S
+ 164..187 '<S as ...efault': fn default<S>() -> S
+ 164..189 '<S as ...ault()': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_1() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> T;
+ }
+ struct S;
+ impl Trait<u32> for S {}
+ struct G<T>;
+ impl<T> Trait<T> for G<T> {}
+ fn test() {
+ let a = S::make();
+ let b = G::<u64>::make();
+ let c: f64 = G::make();
+ }
+ "#,
+ expect![[r#"
+ 126..210 '{ ...e(); }': ()
+ 136..137 'a': u32
+ 140..147 'S::make': fn make<S, u32>() -> u32
+ 140..149 'S::make()': u32
+ 159..160 'b': u64
+ 163..177 'G::<u64>::make': fn make<G<u64>, u64>() -> u64
+ 163..179 'G::<u6...make()': u64
+ 189..190 'c': f64
+ 198..205 'G::make': fn make<G<f64>, f64>() -> f64
+ 198..207 'G::make()': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_2() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make<U>() -> (T, U);
+ }
+ struct S;
+ impl Trait<u32> for S {}
+ struct G<T>;
+ impl<T> Trait<T> for G<T> {}
+ fn test() {
+ let a = S::make::<i64>();
+ let b: (_, i64) = S::make();
+ let c = G::<u32>::make::<i64>();
+ let d: (u32, _) = G::make::<i64>();
+ let e: (u32, i64) = G::make();
+ }
+ "#,
+ expect![[r#"
+ 134..312 '{ ...e(); }': ()
+ 144..145 'a': (u32, i64)
+ 148..162 'S::make::<i64>': fn make<S, u32, i64>() -> (u32, i64)
+ 148..164 'S::mak...i64>()': (u32, i64)
+ 174..175 'b': (u32, i64)
+ 188..195 'S::make': fn make<S, u32, i64>() -> (u32, i64)
+ 188..197 'S::make()': (u32, i64)
+ 207..208 'c': (u32, i64)
+ 211..232 'G::<u3...:<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 211..234 'G::<u3...i64>()': (u32, i64)
+ 244..245 'd': (u32, i64)
+ 258..272 'G::make::<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 258..274 'G::mak...i64>()': (u32, i64)
+ 284..285 'e': (u32, i64)
+ 300..307 'G::make': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 300..309 'G::make()': (u32, i64)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_3() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> (Self, T);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<i32> {}
+ fn test() {
+ let a = S::make();
+ }
+ "#,
+ expect![[r#"
+ 100..126 '{ ...e(); }': ()
+ 110..111 'a': (S<i32>, i64)
+ 114..121 'S::make': fn make<S<i32>, i64>() -> (S<i32>, i64)
+ 114..123 'S::make()': (S<i32>, i64)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_4() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> (Self, T);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<u64> {}
+ impl Trait<i32> for S<u32> {}
+ fn test() {
+ let a: (S<u64>, _) = S::make();
+ let b: (_, i32) = S::make();
+ }
+ "#,
+ expect![[r#"
+ 130..202 '{ ...e(); }': ()
+ 140..141 'a': (S<u64>, i64)
+ 157..164 'S::make': fn make<S<u64>, i64>() -> (S<u64>, i64)
+ 157..166 'S::make()': (S<u64>, i64)
+ 176..177 'b': (S<u32>, i32)
+ 190..197 'S::make': fn make<S<u32>, i32>() -> (S<u32>, i32)
+ 190..199 'S::make()': (S<u32>, i32)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_5() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make<U>() -> (Self, T, U);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<u64> {}
+ fn test() {
+ let a = <S as Trait<i64>>::make::<u8>();
+ let b: (S<u64>, _, _) = Trait::<i64>::make::<u8>();
+ }
+ "#,
+ expect![[r#"
+ 106..210 '{ ...>(); }': ()
+ 116..117 'a': (S<u64>, i64, u8)
+ 120..149 '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
+ 120..151 '<S as ...<u8>()': (S<u64>, i64, u8)
+ 161..162 'b': (S<u64>, i64, u8)
+ 181..205 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
+ 181..207 'Trait:...<u8>()': (S<u64>, i64, u8)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_call_trait_method_on_generic_param_1() {
+ check_infer(
+ r#"
+ trait Trait {
+ fn method(&self) -> u32;
+ }
+ fn test<T: Trait>(t: T) {
+ t.method();
+ }
+ "#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 63..64 't': T
+ 69..88 '{ ...d(); }': ()
+ 75..76 't': T
+ 75..85 't.method()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_call_trait_method_on_generic_param_2() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn method(&self) -> T;
+ }
+ fn test<U, T: Trait<U>>(t: T) {
+ t.method();
+ }
+ "#,
+ expect![[r#"
+ 32..36 'self': &Self
+ 70..71 't': T
+ 76..95 '{ ...d(); }': ()
+ 82..83 't': T
+ 82..92 't.method()': U
+ "#]],
+ );
+}
+
+#[test]
+fn infer_with_multiple_trait_impls() {
+ check_infer(
+ r#"
+ trait Into<T> {
+ fn into(self) -> T;
+ }
+ struct S;
+ impl Into<u32> for S {}
+ impl Into<u64> for S {}
+ fn test() {
+ let x: u32 = S.into();
+ let y: u64 = S.into();
+ let z = Into::<u64>::into(S);
+ }
+ "#,
+ expect![[r#"
+ 28..32 'self': Self
+ 110..201 '{ ...(S); }': ()
+ 120..121 'x': u32
+ 129..130 'S': S
+ 129..137 'S.into()': u32
+ 147..148 'y': u64
+ 156..157 'S': S
+ 156..164 'S.into()': u64
+ 174..175 'z': u64
+ 178..195 'Into::...::into': fn into<S, u64>(S) -> u64
+ 178..198 'Into::...nto(S)': u64
+ 196..197 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_unify_impl_self_type() {
+ check_types(
+ r#"
+struct S<T>;
+impl S<u32> { fn foo(&self) -> u8 { 0 } }
+impl S<i32> { fn foo(&self) -> i8 { 0 } }
+fn test() { (S::<u32>.foo(), S::<i32>.foo()); }
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (u8, i8)
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_before_autoref() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(&self) -> i8 { 0 } }
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_by_value_before_autoref() {
+ check_types(
+ r#"
+trait Clone { fn clone(&self) -> Self; }
+struct S;
+impl Clone for S {}
+impl Clone for &S {}
+fn test() { (S.clone(), (&S).clone(), (&&S).clone()); }
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (S, S, &S)
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_before_autoderef() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(self) -> i8 { 0 } }
+impl Trait for &S { fn foo(self) -> u128 { 0 } }
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_impl_before_trait() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(self) -> i8 { 0 } }
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^^^^^^^ i8
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_impl_ref_before_trait() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(&self) -> i8 { 0 } }
+impl Trait for &S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^^^^^^^ i8
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_autoderef() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_unsize_array() {
+ check_types(
+ r#"
+//- minicore: slice
+fn test() {
+ let a = [1, 2, 3];
+ a.len();
+} //^^^^^^^ usize
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_from_prelude() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+struct S;
+impl Clone for S {}
+
+fn test() {
+ S.clone();
+ //^^^^^^^^^ S
+}
+
+//- /lib.rs crate:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub trait Clone {
+ fn clone(&self) -> Self;
+ }
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_for_unknown_trait() {
+ // The blanket impl currently applies because we ignore the unresolved where clause
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T> Trait for T where T: UnknownTrait {}
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_not_met() {
+ // The blanket impl shouldn't apply because we can't prove S: Clone
+ // This is also to make sure that we don't resolve to the foo method just
+ // because that's the only method named foo we can find, which would make
+ // the below tests not work
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T> Trait for T where T: Clone {}
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_inline_not_met() {
+ // The blanket impl shouldn't apply because we can't prove S: Clone
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T: Clone> Trait for T {}
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_1() {
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_2() {
+ check_types(
+ r#"
+trait Into<T> { fn into(self) -> T; }
+trait From<T> { fn from(other: T) -> Self; }
+struct S1;
+struct S2;
+impl From<S2> for S1 {}
+impl<T, U> Into<U> for T where U: From<T> {}
+fn test() { S2.into(); }
+ //^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_inline() {
+ check_types(
+ r#"
+trait Into<T> { fn into(self) -> T; }
+trait From<T> { fn from(other: T) -> Self; }
+struct S1;
+struct S2;
+impl From<S2> for S1 {}
+impl<T, U: From<T>> Into<U> for T {}
+fn test() { S2.into(); }
+ //^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_overloaded_method() {
+ check_types(
+ r#"
+struct Wrapper<T>(T);
+struct Foo<T>(T);
+struct Bar<T>(T);
+
+impl<T> Wrapper<Foo<T>> {
+ pub fn new(foo_: T) -> Self {
+ Wrapper(Foo(foo_))
+ }
+}
+
+impl<T> Wrapper<Bar<T>> {
+ pub fn new(bar_: T) -> Self {
+ Wrapper(Bar(bar_))
+ }
+}
+
+fn main() {
+ let a = Wrapper::<Foo<f32>>::new(1.0);
+ let b = Wrapper::<Bar<f32>>::new(1.0);
+ (a, b);
+ //^^^^^^ (Wrapper<Foo<f32>>, Wrapper<Bar<f32>>)
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_overloaded_const() {
+ cov_mark::check!(const_candidate_self_type_mismatch);
+ check_types(
+ r#"
+struct Wrapper<T>(T);
+struct Foo<T>(T);
+struct Bar<T>(T);
+
+impl<T> Wrapper<Foo<T>> {
+ pub const VALUE: Foo<T>;
+}
+
+impl<T> Wrapper<Bar<T>> {
+ pub const VALUE: Bar<T>;
+}
+
+fn main() {
+ let a = Wrapper::<Foo<f32>>::VALUE;
+ let b = Wrapper::<Bar<f32>>::VALUE;
+ (a, b);
+ //^^^^^^ (Foo<f32>, Bar<f32>)
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_encountering_fn_type() {
+ check_types(
+ r#"
+//- /main.rs
+fn foo() {}
+trait FnOnce { fn call(self); }
+fn test() { foo.call(); }
+ //^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn super_trait_impl_return_trait_method_resolution() {
+ check_infer(
+ r#"
+ //- minicore: sized
+ trait Base {
+ fn foo(self) -> usize;
+ }
+
+ trait Super : Base {}
+
+ fn base1() -> impl Base { loop {} }
+ fn super1() -> impl Super { loop {} }
+
+ fn test(base2: impl Base, super2: impl Super) {
+ base1().foo();
+ super1().foo();
+ base2.foo();
+ super2.foo();
+ }
+ "#,
+ expect![[r#"
+ 24..28 'self': Self
+ 90..101 '{ loop {} }': !
+ 92..99 'loop {}': !
+ 97..99 '{}': ()
+ 128..139 '{ loop {} }': !
+ 130..137 'loop {}': !
+ 135..137 '{}': ()
+ 149..154 'base2': impl Base
+ 167..173 'super2': impl Super
+ 187..264 '{ ...o(); }': ()
+ 193..198 'base1': fn base1() -> impl Base
+ 193..200 'base1()': impl Base
+ 193..206 'base1().foo()': usize
+ 212..218 'super1': fn super1() -> impl Super
+ 212..220 'super1()': impl Super
+ 212..226 'super1().foo()': usize
+ 232..237 'base2': impl Base
+ 232..243 'base2.foo()': usize
+ 249..255 'super2': impl Super
+ 249..261 'super2.foo()': usize
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_non_parameter_type() {
+ check_types(
+ r#"
+mod a {
+ pub trait Foo {
+ fn foo(&self);
+ }
+}
+
+struct Wrapper<T>(T);
+fn foo<T>(t: Wrapper<T>)
+where
+ Wrapper<T>: a::Foo,
+{
+ t.foo();
+} //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_3373() {
+ check_types(
+ r#"
+struct A<T>(T);
+
+impl A<i32> {
+ fn from(v: i32) -> A<i32> { A(v) }
+}
+
+fn main() {
+ A::from(3);
+} //^^^^^^^^^^ A<i32>
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_slow() {
+ // this can get quite slow if we set the solver size limit too high
+ check_types(
+ r#"
+trait SendX {}
+
+struct S1; impl SendX for S1 {}
+struct S2; impl SendX for S2 {}
+struct U1;
+
+trait Trait { fn method(self); }
+
+struct X1<A, B> {}
+impl<A, B> SendX for X1<A, B> where A: SendX, B: SendX {}
+
+struct S<B, C> {}
+
+trait FnX {}
+
+impl<B, C> Trait for S<B, C> where C: FnX, B: SendX {}
+
+fn test() { (S {}).method(); }
+ //^^^^^^^^^^^^^^^ ()
+"#,
+ );
+}
+
+#[test]
+fn dyn_trait_super_trait_not_in_scope() {
+ check_infer(
+ r#"
+ mod m {
+ pub trait SuperTrait {
+ fn foo(&self) -> u32 { 0 }
+ }
+ }
+ trait Trait: m::SuperTrait {}
+
+ struct S;
+ impl m::SuperTrait for S {}
+ impl Trait for S {}
+
+ fn test(d: &dyn Trait) {
+ d.foo();
+ }
+ "#,
+ expect![[r#"
+ 51..55 'self': &Self
+ 64..69 '{ 0 }': u32
+ 66..67 '0': u32
+ 176..177 'd': &dyn Trait
+ 191..207 '{ ...o(); }': ()
+ 197..198 'd': &dyn Trait
+ 197..204 'd.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_foreign_opaque_type() {
+ check_infer(
+ r#"
+extern "C" {
+ type S;
+ fn f() -> &'static S;
+}
+
+impl S {
+ fn foo(&self) -> bool {
+ true
+ }
+}
+
+fn test() {
+ let s = unsafe { f() };
+ s.foo();
+}
+"#,
+ expect![[r#"
+ 75..79 'self': &S
+ 89..109 '{ ... }': bool
+ 99..103 'true': bool
+ 123..167 '{ ...o(); }': ()
+ 133..134 's': &S
+ 137..151 'unsafe { f() }': &S
+ 137..151 'unsafe { f() }': &S
+ 146..147 'f': fn f() -> &S
+ 146..149 'f()': &S
+ 157..158 's': &S
+ 157..164 's.foo()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn method_with_allocator_box_self_type() {
+ check_types(
+ r#"
+struct Slice<T> {}
+struct Box<T, A> {}
+
+impl<T> Slice<T> {
+ pub fn into_vec<A>(self: Box<Self, A>) { }
+}
+
+fn main() {
+ let foo: Slice<u32>;
+ foo.into_vec(); // we shouldn't crash on this at least
+} //^^^^^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_on_dyn_impl() {
+ check_types(
+ r#"
+trait Foo {}
+
+impl Foo for u32 {}
+impl dyn Foo + '_ {
+ pub fn dyn_foo(&self) -> u32 {
+ 0
+ }
+}
+
+fn main() {
+ let f = &42u32 as &dyn Foo;
+ f.dyn_foo();
+ // ^^^^^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn autoderef_visibility_field() {
+ check(
+ r#"
+//- minicore: deref
+mod a {
+ pub struct Foo(pub char);
+ pub struct Bar(i32);
+ impl Bar {
+ pub fn new() -> Self {
+ Self(0)
+ }
+ }
+ impl core::ops::Deref for Bar {
+ type Target = Foo;
+ fn deref(&self) -> &Foo {
+ &Foo('z')
+ }
+ }
+}
+mod b {
+ fn foo() {
+ let x = super::a::Bar::new().0;
+ // ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Not)))
+ // ^^^^^^^^^^^^^^^^^^^^^^ type: char
+ }
+}
+"#,
+ )
+}
+
+#[test]
+fn autoderef_visibility_method() {
+ cov_mark::check!(autoderef_candidate_not_visible);
+ check(
+ r#"
+//- minicore: deref
+mod a {
+ pub struct Foo(pub char);
+ impl Foo {
+ pub fn mango(&self) -> char {
+ self.0
+ }
+ }
+ pub struct Bar(i32);
+ impl Bar {
+ pub fn new() -> Self {
+ Self(0)
+ }
+ fn mango(&self) -> i32 {
+ self.0
+ }
+ }
+ impl core::ops::Deref for Bar {
+ type Target = Foo;
+ fn deref(&self) -> &Foo {
+ &Foo('z')
+ }
+ }
+}
+mod b {
+ fn foo() {
+ let x = super::a::Bar::new().mango();
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type: char
+ }
+}
+"#,
+ )
+}
+
+#[test]
+fn trait_vs_private_inherent_const() {
+ cov_mark::check!(const_candidate_not_visible);
+ check(
+ r#"
+mod a {
+ pub struct Foo;
+ impl Foo {
+ const VALUE: u32 = 2;
+ }
+ pub trait Trait {
+ const VALUE: usize;
+ }
+ impl Trait for Foo {
+ const VALUE: usize = 3;
+ }
+
+ fn foo() {
+ let x = Foo::VALUE;
+ // ^^^^^^^^^^ type: u32
+ }
+}
+use a::Trait;
+fn foo() {
+ let x = a::Foo::VALUE;
+ // ^^^^^^^^^^^^^ type: usize
+}
+"#,
+ )
+}
+
+#[test]
+fn trait_impl_in_unnamed_const() {
+ check_types(
+ r#"
+struct S;
+
+trait Tr {
+ fn method(&self) -> u16;
+}
+
+const _: () = {
+ impl Tr for S {}
+};
+
+fn f() {
+ S.method();
+ //^^^^^^^^^^ u16
+}
+ "#,
+ );
+}
+
+#[test]
+fn trait_impl_in_synstructure_const() {
+ check_types(
+ r#"
+struct S;
+
+trait Tr {
+ fn method(&self) -> u16;
+}
+
+const _DERIVE_Tr_: () = {
+ impl Tr for S {}
+};
+
+fn f() {
+ S.method();
+ //^^^^^^^^^^ u16
+}
+ "#,
+ );
+}
+
+#[test]
+fn inherent_impl_in_unnamed_const() {
+ check_types(
+ r#"
+struct S;
+
+const _: () = {
+ impl S {
+ fn method(&self) -> u16 { 0 }
+
+ pub(super) fn super_method(&self) -> u16 { 0 }
+
+ pub(crate) fn crate_method(&self) -> u16 { 0 }
+
+ pub fn pub_method(&self) -> u16 { 0 }
+ }
+};
+
+fn f() {
+ S.method();
+ //^^^^^^^^^^ u16
+
+ S.super_method();
+ //^^^^^^^^^^^^^^^^ u16
+
+ S.crate_method();
+ //^^^^^^^^^^^^^^^^ u16
+
+ S.pub_method();
+ //^^^^^^^^^^^^^^ u16
+}
+ "#,
+ );
+}
+
+#[test]
+fn resolve_const_generic_array_methods() {
+ check_types(
+ r#"
+#[lang = "array"]
+impl<T, const N: usize> [T; N] {
+ pub fn map<F, U>(self, f: F) -> [U; N]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+#[lang = "slice"]
+impl<T> [T] {
+ pub fn map<F, U>(self, f: F) -> &[U]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+fn f() {
+ let v = [1, 2].map::<_, usize>(|x| -> x * 2);
+ v;
+ //^ [usize; 2]
+}
+ "#,
+ );
+}
+
+#[test]
+fn resolve_const_generic_method() {
+ check_types(
+ r#"
+struct Const<const N: usize>;
+
+#[lang = "array"]
+impl<T, const N: usize> [T; N] {
+ pub fn my_map<F, U, const X: usize>(self, f: F, c: Const<X>) -> [U; X]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+#[lang = "slice"]
+impl<T> [T] {
+ pub fn my_map<F, const X: usize, U>(self, f: F, c: Const<X>) -> &[U]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+fn f<const C: usize, P>() {
+ let v = [1, 2].my_map::<_, (), 12>(|x| -> x * 2, Const::<12>);
+ v;
+ //^ [(); 12]
+ let v = [1, 2].my_map::<_, P, C>(|x| -> x * 2, Const::<C>);
+ v;
+ //^ [P; C]
+}
+ "#,
+ );
+}
+
+#[test]
+fn const_generic_type_alias() {
+ check_types(
+ r#"
+struct Const<const N: usize>;
+type U2 = Const<2>;
+type U5 = Const<5>;
+
+impl U2 {
+ fn f(self) -> Const<12> {
+ loop {}
+ }
+}
+
+impl U5 {
+ fn f(self) -> Const<15> {
+ loop {}
+ }
+}
+
+fn f(x: U2) {
+ let y = x.f();
+ //^ Const<12>
+}
+ "#,
+ );
+}
+
+#[test]
+fn skip_array_during_method_dispatch() {
+ check_types(
+ r#"
+//- /main2018.rs crate:main2018 deps:core
+use core::IntoIterator;
+
+fn f() {
+ let v = [4].into_iter();
+ v;
+ //^ &i32
+
+ let a = [0, 1].into_iter();
+ a;
+ //^ &i32
+}
+
+//- /main2021.rs crate:main2021 deps:core edition:2021
+use core::IntoIterator;
+
+fn f() {
+ let v = [4].into_iter();
+ v;
+ //^ i32
+
+ let a = [0, 1].into_iter();
+ a;
+ //^ &i32
+}
+
+//- /core.rs crate:core
+#[rustc_skip_array_during_method_dispatch]
+pub trait IntoIterator {
+ type Out;
+ fn into_iter(self) -> Self::Out;
+}
+
+impl<T> IntoIterator for [T; 1] {
+ type Out = T;
+ fn into_iter(self) -> Self::Out { loop {} }
+}
+impl<'a, T> IntoIterator for &'a [T] {
+ type Out = &'a T;
+ fn into_iter(self) -> Self::Out { loop {} }
+}
+ "#,
+ );
+}
+
+#[test]
+fn sized_blanket_impl() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Foo { fn foo() -> u8; }
+impl<T: Sized> Foo for T {}
+fn f<S: Sized, T, U: ?Sized>() {
+ u32::foo;
+ S::foo;
+ T::foo;
+ U::foo;
+ <[u32]>::foo;
+}
+"#,
+ expect![[r#"
+ 89..160 '{ ...foo; }': ()
+ 95..103 'u32::foo': fn foo<u32>() -> u8
+ 109..115 'S::foo': fn foo<S>() -> u8
+ 121..127 'T::foo': fn foo<T>() -> u8
+ 133..139 'U::foo': {unknown}
+ 145..157 '<[u32]>::foo': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn local_impl() {
+ check_types(
+ r#"
+fn main() {
+ struct SomeStruct(i32);
+
+ impl SomeStruct {
+ fn is_even(&self) -> bool {
+ self.0 % 2 == 0
+ }
+ }
+
+ let o = SomeStruct(3);
+ let is_even = o.is_even();
+ // ^^^^^^^ bool
+}
+ "#,
+ );
+}
+
+#[test]
+fn deref_fun_1() {
+ check_types(
+ r#"
+//- minicore: deref
+
+struct A<T, U>(T, U);
+struct B<T>(T);
+struct C<T>(T);
+
+impl<T> core::ops::Deref for A<B<T>, u32> {
+ type Target = B<T>;
+ fn deref(&self) -> &B<T> { &self.0 }
+}
+impl core::ops::Deref for B<isize> {
+ type Target = C<isize>;
+ fn deref(&self) -> &C<isize> { loop {} }
+}
+
+impl<T: Copy> C<T> {
+ fn thing(&self) -> T { self.0 }
+}
+
+fn make<T>() -> T { loop {} }
+
+fn test() {
+ let a1 = A(make(), make());
+ let _: usize = (*a1).0;
+ a1;
+ //^^ A<B<usize>, u32>
+
+ let a2 = A(make(), make());
+ a2.thing();
+ //^^^^^^^^^^ isize
+ a2;
+ //^^ A<B<isize>, u32>
+}
+"#,
+ );
+}
+
+#[test]
+fn deref_fun_2() {
+ check_types(
+ r#"
+//- minicore: deref
+
+struct A<T, U>(T, U);
+struct B<T>(T);
+struct C<T>(T);
+
+impl<T> core::ops::Deref for A<B<T>, u32> {
+ type Target = B<T>;
+ fn deref(&self) -> &B<T> { &self.0 }
+}
+impl core::ops::Deref for B<isize> {
+ type Target = C<isize>;
+ fn deref(&self) -> &C<isize> { loop {} }
+}
+
+impl<T> core::ops::Deref for A<C<T>, i32> {
+ type Target = C<T>;
+ fn deref(&self) -> &C<T> { &self.0 }
+}
+
+impl<T: Copy> C<T> {
+ fn thing(&self) -> T { self.0 }
+}
+
+fn make<T>() -> T { loop {} }
+
+fn test() {
+ let a1 = A(make(), 1u32);
+ a1.thing();
+ a1;
+ //^^ A<B<isize>, u32>
+
+ let a2 = A(make(), 1i32);
+ let _: &str = a2.thing();
+ a2;
+ //^^ A<C<&str>, i32>
+}
+"#,
+ );
+}
+
+#[test]
+fn receiver_adjustment_autoref() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+fn test() {
+ Foo.foo();
+ //^^^ adjustments: Borrow(Ref(Not))
+ (&Foo).foo();
+ // ^^^^ adjustments: ,
+}
+"#,
+ );
+}
+
+#[test]
+fn receiver_adjustment_unsize_array() {
+ // FIXME not quite correct
+ check(
+ r#"
+//- minicore: slice
+fn test() {
+ let a = [1, 2, 3];
+ a.len();
+} //^ adjustments: Pointer(Unsize), Borrow(Ref(Not))
+"#,
+ );
+}
+
+#[test]
+fn bad_inferred_reference_1() {
+ check_no_mismatches(
+ r#"
+//- minicore: sized
+pub trait Into<T>: Sized {
+ fn into(self) -> T;
+}
+impl<T> Into<T> for T {
+ fn into(self) -> T { self }
+}
+
+trait ExactSizeIterator {
+ fn len(&self) -> usize;
+}
+
+pub struct Foo;
+impl Foo {
+ fn len(&self) -> usize { 0 }
+}
+
+pub fn test(generic_args: impl Into<Foo>) {
+ let generic_args = generic_args.into();
+ generic_args.len();
+ let _: Foo = generic_args;
+}
+"#,
+ );
+}
+
+#[test]
+fn bad_inferred_reference_2() {
+ check_no_mismatches(
+ r#"
+//- minicore: deref
+trait ExactSizeIterator {
+ fn len(&self) -> usize;
+}
+
+pub struct Foo;
+impl Foo {
+ fn len(&self) -> usize { 0 }
+}
+
+pub fn test() {
+ let generic_args;
+ generic_args.len();
+ let _: Foo = generic_args;
+}
+"#,
+ );
+}
+
+#[test]
+fn resolve_minicore_iterator() {
+ check_types(
+ r#"
+//- minicore: iterators, sized
+fn foo() {
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Option<i32>
+"#,
+ );
+}
+
+#[test]
+fn primitive_assoc_fn_shadowed_by_use() {
+ check_types(
+ r#"
+//- /lib.rs crate:lib deps:core
+use core::u16;
+
+fn f() -> u16 {
+ let x = u16::from_le_bytes();
+ x
+ //^ u16
+}
+
+//- /core.rs crate:core
+pub mod u16 {}
+
+impl u16 {
+ pub fn from_le_bytes() -> Self { 0 }
+}
+ "#,
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs
new file mode 100644
index 000000000..fbdc8209f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs
@@ -0,0 +1,485 @@
+use expect_test::expect;
+
+use super::{check_infer_with_mismatches, check_no_mismatches, check_types};
+
+#[test]
+fn infer_never1() {
+ check_types(
+ r#"
+fn test() {
+ let t = return;
+ t;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn infer_never2() {
+ check_types(
+ r#"
+fn gen<T>() -> T { loop {} }
+
+fn test() {
+ let a = gen();
+ if false { a } else { loop {} };
+ a;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn infer_never3() {
+ check_types(
+ r#"
+fn gen<T>() -> T { loop {} }
+
+fn test() {
+ let a = gen();
+ if false { loop {} } else { a };
+ a;
+ //^ !
+}
+"#,
+ );
+}
+
+#[test]
+fn never_type_in_generic_args() {
+ check_types(
+ r#"
+enum Option<T> { None, Some(T) }
+
+fn test() {
+ let a = if true { Option::None } else { Option::Some(return) };
+ a;
+} //^ Option<!>
+"#,
+ );
+}
+
+#[test]
+fn never_type_can_be_reinferred1() {
+ check_types(
+ r#"
+fn gen<T>() -> T { loop {} }
+
+fn test() {
+ let a = gen();
+ if false { loop {} } else { a };
+ a;
+ //^ ()
+ if false { a };
+}
+"#,
+ );
+}
+
+#[test]
+fn never_type_can_be_reinferred2() {
+ check_types(
+ r#"
+enum Option<T> { None, Some(T) }
+
+fn test() {
+ let a = if true { Option::None } else { Option::Some(return) };
+ a;
+ //^ Option<i32>
+ match 42 {
+ 42 => a,
+ _ => Option::Some(42),
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn never_type_can_be_reinferred3() {
+ check_types(
+ r#"
+enum Option<T> { None, Some(T) }
+
+fn test() {
+ let a = if true { Option::None } else { Option::Some(return) };
+ a;
+ //^ Option<&str>
+ match 42 {
+ 42 => a,
+ _ => Option::Some("str"),
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn match_no_arm() {
+ check_types(
+ r#"
+enum Void {}
+
+fn test(a: Void) {
+ let t = match a {};
+ t;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn match_unknown_arm() {
+ check_types(
+ r#"
+fn test(a: Option) {
+ let t = match 0 {
+ _ => unknown,
+ };
+ t;
+} //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn if_never() {
+ check_types(
+ r#"
+fn test() {
+ let i = if true {
+ loop {}
+ } else {
+ 3.0
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn if_else_never() {
+ check_types(
+ r#"
+fn test(input: bool) {
+ let i = if input {
+ 2.0
+ } else {
+ return
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn match_first_arm_never() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 1 => return,
+ 2 => 2.0,
+ 3 => loop {},
+ _ => 3.0,
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn match_second_arm_never() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 1 => 3.0,
+ 2 => loop {},
+ 3 => 3.0,
+ _ => return,
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn match_all_arms_never() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 2 => return,
+ _ => loop {},
+ };
+ i;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn match_no_never_arms() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 2 => 2.0,
+ _ => 3.0,
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn diverging_expression_1() {
+ check_infer_with_mismatches(
+ r"
+ //- /main.rs
+ fn test1() {
+ let x: u32 = return;
+ }
+ fn test2() {
+ let x: u32 = { return; };
+ }
+ fn test3() {
+ let x: u32 = loop {};
+ }
+ fn test4() {
+ let x: u32 = { loop {} };
+ }
+ fn test5() {
+ let x: u32 = { if true { loop {}; } else { loop {}; } };
+ }
+ fn test6() {
+ let x: u32 = { let y: u32 = { loop {}; }; };
+ }
+ ",
+ expect![[r"
+ 11..39 '{ ...urn; }': ()
+ 21..22 'x': u32
+ 30..36 'return': !
+ 51..84 '{ ...; }; }': ()
+ 61..62 'x': u32
+ 70..81 '{ return; }': u32
+ 72..78 'return': !
+ 96..125 '{ ... {}; }': ()
+ 106..107 'x': u32
+ 115..122 'loop {}': !
+ 120..122 '{}': ()
+ 137..170 '{ ...} }; }': ()
+ 147..148 'x': u32
+ 156..167 '{ loop {} }': u32
+ 158..165 'loop {}': !
+ 163..165 '{}': ()
+ 182..246 '{ ...} }; }': ()
+ 192..193 'x': u32
+ 201..243 '{ if t...}; } }': u32
+ 203..241 'if tru... {}; }': u32
+ 206..210 'true': bool
+ 211..223 '{ loop {}; }': u32
+ 213..220 'loop {}': !
+ 218..220 '{}': ()
+ 229..241 '{ loop {}; }': u32
+ 231..238 'loop {}': !
+ 236..238 '{}': ()
+ 258..310 '{ ...; }; }': ()
+ 268..269 'x': u32
+ 277..307 '{ let ...; }; }': u32
+ 283..284 'y': u32
+ 292..304 '{ loop {}; }': u32
+ 294..301 'loop {}': !
+ 299..301 '{}': ()
+ "]],
+ );
+}
+
+#[test]
+fn diverging_expression_2() {
+ check_infer_with_mismatches(
+ r#"
+ //- /main.rs
+ fn test1() {
+ // should give type mismatch
+ let x: u32 = { loop {}; "foo" };
+ }
+ "#,
+ expect![[r#"
+ 11..84 '{ ..." }; }': ()
+ 54..55 'x': u32
+ 63..81 '{ loop...foo" }': u32
+ 65..72 'loop {}': !
+ 70..72 '{}': ()
+ 74..79 '"foo"': &str
+ 74..79: expected u32, got &str
+ "#]],
+ );
+}
+
+#[test]
+fn diverging_expression_3_break() {
+ check_infer_with_mismatches(
+ r"
+ //- /main.rs
+ fn test1() {
+ // should give type mismatch
+ let x: u32 = { loop { break; } };
+ }
+ fn test2() {
+ // should give type mismatch
+ let x: u32 = { for a in b { break; }; };
+ // should give type mismatch as well
+ let x: u32 = { for a in b {}; };
+ // should give type mismatch as well
+ let x: u32 = { for a in b { return; }; };
+ }
+ fn test3() {
+ // should give type mismatch
+ let x: u32 = { while true { break; }; };
+ // should give type mismatch as well -- there's an implicit break, even if it's never hit
+ let x: u32 = { while true {}; };
+ // should give type mismatch as well
+ let x: u32 = { while true { return; }; };
+ }
+ ",
+ expect![[r#"
+ 11..85 '{ ...} }; }': ()
+ 54..55 'x': u32
+ 63..82 '{ loop...k; } }': u32
+ 65..80 'loop { break; }': ()
+ 70..80 '{ break; }': ()
+ 72..77 'break': !
+ 65..80: expected u32, got ()
+ 97..343 '{ ...; }; }': ()
+ 140..141 'x': u32
+ 149..175 '{ for ...; }; }': u32
+ 151..172 'for a ...eak; }': ()
+ 155..156 'a': {unknown}
+ 160..161 'b': {unknown}
+ 162..172 '{ break; }': ()
+ 164..169 'break': !
+ 226..227 'x': u32
+ 235..253 '{ for ... {}; }': u32
+ 237..250 'for a in b {}': ()
+ 241..242 'a': {unknown}
+ 246..247 'b': {unknown}
+ 248..250 '{}': ()
+ 304..305 'x': u32
+ 313..340 '{ for ...; }; }': u32
+ 315..337 'for a ...urn; }': ()
+ 319..320 'a': {unknown}
+ 324..325 'b': {unknown}
+ 326..337 '{ return; }': ()
+ 328..334 'return': !
+ 149..175: expected u32, got ()
+ 235..253: expected u32, got ()
+ 313..340: expected u32, got ()
+ 355..654 '{ ...; }; }': ()
+ 398..399 'x': u32
+ 407..433 '{ whil...; }; }': u32
+ 409..430 'while ...eak; }': ()
+ 415..419 'true': bool
+ 420..430 '{ break; }': ()
+ 422..427 'break': !
+ 537..538 'x': u32
+ 546..564 '{ whil... {}; }': u32
+ 548..561 'while true {}': ()
+ 554..558 'true': bool
+ 559..561 '{}': ()
+ 615..616 'x': u32
+ 624..651 '{ whil...; }; }': u32
+ 626..648 'while ...urn; }': ()
+ 632..636 'true': bool
+ 637..648 '{ return; }': ()
+ 639..645 'return': !
+ 407..433: expected u32, got ()
+ 546..564: expected u32, got ()
+ 624..651: expected u32, got ()
+ "#]],
+ );
+}
+
+#[test]
+fn let_else_must_diverge() {
+ check_infer_with_mismatches(
+ r#"
+ fn f() {
+ let 1 = 2 else {
+ return;
+ };
+ }
+ "#,
+ expect![[r#"
+ 7..54 '{ ... }; }': ()
+ 17..18 '1': i32
+ 17..18 '1': i32
+ 21..22 '2': i32
+ 28..51 '{ ... }': !
+ 38..44 'return': !
+ "#]],
+ );
+ check_infer_with_mismatches(
+ r#"
+ fn f() {
+ let 1 = 2 else {};
+ }
+ "#,
+ expect![[r#"
+ 7..33 '{ ... {}; }': ()
+ 17..18 '1': i32
+ 17..18 '1': i32
+ 21..22 '2': i32
+ 28..30 '{}': !
+ 28..30: expected !, got ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_11837() {
+ check_no_mismatches(
+ r#"
+//- minicore: result
+enum MyErr {
+ Err1,
+ Err2,
+}
+
+fn example_ng() {
+ let value: Result<i32, MyErr> = Ok(3);
+
+ loop {
+ let ret = match value {
+ Ok(value) => value,
+ Err(ref err) => {
+ match err {
+ MyErr::Err1 => break,
+ MyErr::Err2 => continue,
+ };
+ }
+ };
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn issue_11814() {
+ check_no_mismatches(
+ r#"
+fn example() -> bool {
+ match 1 {
+ _ => return true,
+ };
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
new file mode 100644
index 000000000..399553356
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
@@ -0,0 +1,991 @@
+use expect_test::expect;
+
+use super::{check, check_infer, check_infer_with_mismatches, check_types};
+
+#[test]
+fn infer_pattern() {
+ check_infer(
+ r#"
+ fn test(x: &i32) {
+ let y = x;
+ let &z = x;
+ let a = z;
+ let (c, d) = (1, "hello");
+
+ for (e, f) in some_iter {
+ let g = e;
+ }
+
+ if let [val] = opt {
+ let h = val;
+ }
+
+ if let x @ true = &true {}
+
+ let lambda = |a: u64, b, c: i32| { a + b; c };
+
+ let ref ref_to_x = x;
+ let mut mut_x = x;
+ let ref mut mut_ref_to_x = x;
+ let k = mut_ref_to_x;
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &i32
+ 17..400 '{ ...o_x; }': ()
+ 27..28 'y': &i32
+ 31..32 'x': &i32
+ 42..44 '&z': &i32
+ 43..44 'z': i32
+ 47..48 'x': &i32
+ 58..59 'a': i32
+ 62..63 'z': i32
+ 73..79 '(c, d)': (i32, &str)
+ 74..75 'c': i32
+ 77..78 'd': &str
+ 82..94 '(1, "hello")': (i32, &str)
+ 83..84 '1': i32
+ 86..93 '"hello"': &str
+ 101..151 'for (e... }': ()
+ 105..111 '(e, f)': ({unknown}, {unknown})
+ 106..107 'e': {unknown}
+ 109..110 'f': {unknown}
+ 115..124 'some_iter': {unknown}
+ 125..151 '{ ... }': ()
+ 139..140 'g': {unknown}
+ 143..144 'e': {unknown}
+ 157..204 'if let... }': ()
+ 160..175 'let [val] = opt': bool
+ 164..169 '[val]': [{unknown}]
+ 165..168 'val': {unknown}
+ 172..175 'opt': [{unknown}]
+ 176..204 '{ ... }': ()
+ 190..191 'h': {unknown}
+ 194..197 'val': {unknown}
+ 210..236 'if let...rue {}': ()
+ 213..233 'let x ... &true': bool
+ 217..225 'x @ true': &bool
+ 221..225 'true': bool
+ 221..225 'true': bool
+ 228..233 '&true': &bool
+ 229..233 'true': bool
+ 234..236 '{}': ()
+ 246..252 'lambda': |u64, u64, i32| -> i32
+ 255..287 '|a: u6...b; c }': |u64, u64, i32| -> i32
+ 256..257 'a': u64
+ 264..265 'b': u64
+ 267..268 'c': i32
+ 275..287 '{ a + b; c }': i32
+ 277..278 'a': u64
+ 277..282 'a + b': u64
+ 281..282 'b': u64
+ 284..285 'c': i32
+ 298..310 'ref ref_to_x': &&i32
+ 313..314 'x': &i32
+ 324..333 'mut mut_x': &i32
+ 336..337 'x': &i32
+ 347..367 'ref mu...f_to_x': &mut &i32
+ 370..371 'x': &i32
+ 381..382 'k': &mut &i32
+ 385..397 'mut_ref_to_x': &mut &i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_literal_pattern() {
+ check_infer_with_mismatches(
+ r#"
+ fn any<T>() -> T { loop {} }
+ fn test(x: &i32) {
+ if let "foo" = any() {}
+ if let 1 = any() {}
+ if let 1u32 = any() {}
+ if let 1f32 = any() {}
+ if let 1.0 = any() {}
+ if let true = any() {}
+ }
+ "#,
+ expect![[r#"
+ 17..28 '{ loop {} }': T
+ 19..26 'loop {}': !
+ 24..26 '{}': ()
+ 37..38 'x': &i32
+ 46..208 '{ ...) {} }': ()
+ 52..75 'if let...y() {}': ()
+ 55..72 'let "f... any()': bool
+ 59..64 '"foo"': &str
+ 59..64 '"foo"': &str
+ 67..70 'any': fn any<&str>() -> &str
+ 67..72 'any()': &str
+ 73..75 '{}': ()
+ 80..99 'if let...y() {}': ()
+ 83..96 'let 1 = any()': bool
+ 87..88 '1': i32
+ 87..88 '1': i32
+ 91..94 'any': fn any<i32>() -> i32
+ 91..96 'any()': i32
+ 97..99 '{}': ()
+ 104..126 'if let...y() {}': ()
+ 107..123 'let 1u... any()': bool
+ 111..115 '1u32': u32
+ 111..115 '1u32': u32
+ 118..121 'any': fn any<u32>() -> u32
+ 118..123 'any()': u32
+ 124..126 '{}': ()
+ 131..153 'if let...y() {}': ()
+ 134..150 'let 1f... any()': bool
+ 138..142 '1f32': f32
+ 138..142 '1f32': f32
+ 145..148 'any': fn any<f32>() -> f32
+ 145..150 'any()': f32
+ 151..153 '{}': ()
+ 158..179 'if let...y() {}': ()
+ 161..176 'let 1.0 = any()': bool
+ 165..168 '1.0': f64
+ 165..168 '1.0': f64
+ 171..174 'any': fn any<f64>() -> f64
+ 171..176 'any()': f64
+ 177..179 '{}': ()
+ 184..206 'if let...y() {}': ()
+ 187..203 'let tr... any()': bool
+ 191..195 'true': bool
+ 191..195 'true': bool
+ 198..201 'any': fn any<bool>() -> bool
+ 198..203 'any()': bool
+ 204..206 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_range_pattern() {
+ check_infer_with_mismatches(
+ r#"
+ fn test(x: &i32) {
+ if let 1..76 = 2u32 {}
+ if let 1..=76 = 2u32 {}
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &i32
+ 17..75 '{ ...2 {} }': ()
+ 23..45 'if let...u32 {}': ()
+ 26..42 'let 1....= 2u32': bool
+ 30..35 '1..76': u32
+ 38..42 '2u32': u32
+ 43..45 '{}': ()
+ 50..73 'if let...u32 {}': ()
+ 53..70 'let 1....= 2u32': bool
+ 57..63 '1..=76': u32
+ 66..70 '2u32': u32
+ 71..73 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_ergonomics() {
+ check_infer(
+ r#"
+ struct A<T>(T);
+
+ fn test() {
+ let A(n) = &A(1);
+ let A(n) = &mut A(1);
+ }
+ "#,
+ expect![[r#"
+ 27..78 '{ ...(1); }': ()
+ 37..41 'A(n)': A<i32>
+ 39..40 'n': &i32
+ 44..49 '&A(1)': &A<i32>
+ 45..46 'A': A<i32>(i32) -> A<i32>
+ 45..49 'A(1)': A<i32>
+ 47..48 '1': i32
+ 59..63 'A(n)': A<i32>
+ 61..62 'n': &mut i32
+ 66..75 '&mut A(1)': &mut A<i32>
+ 71..72 'A': A<i32>(i32) -> A<i32>
+ 71..75 'A(1)': A<i32>
+ 73..74 '1': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_ergonomics_ref() {
+ cov_mark::check!(match_ergonomics_ref);
+ check_infer(
+ r#"
+ fn test() {
+ let v = &(1, &2);
+ let (_, &w) = v;
+ }
+ "#,
+ expect![[r#"
+ 10..56 '{ ...= v; }': ()
+ 20..21 'v': &(i32, &i32)
+ 24..32 '&(1, &2)': &(i32, &i32)
+ 25..32 '(1, &2)': (i32, &i32)
+ 26..27 '1': i32
+ 29..31 '&2': &i32
+ 30..31 '2': i32
+ 42..49 '(_, &w)': (i32, &i32)
+ 43..44 '_': i32
+ 46..48 '&w': &i32
+ 47..48 'w': i32
+ 52..53 'v': &(i32, &i32)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_slice() {
+ check_infer(
+ r#"
+ fn test() {
+ let slice: &[f64] = &[0.0];
+ match slice {
+ &[] => {},
+ &[a] => {
+ a;
+ },
+ &[b, c] => {
+ b;
+ c;
+ }
+ _ => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..209 '{ ... } }': ()
+ 20..25 'slice': &[f64]
+ 36..42 '&[0.0]': &[f64; 1]
+ 37..42 '[0.0]': [f64; 1]
+ 38..41 '0.0': f64
+ 48..207 'match ... }': ()
+ 54..59 'slice': &[f64]
+ 70..73 '&[]': &[f64]
+ 71..73 '[]': [f64]
+ 77..79 '{}': ()
+ 89..93 '&[a]': &[f64]
+ 90..93 '[a]': [f64]
+ 91..92 'a': f64
+ 97..123 '{ ... }': ()
+ 111..112 'a': f64
+ 133..140 '&[b, c]': &[f64]
+ 134..140 '[b, c]': [f64]
+ 135..136 'b': f64
+ 138..139 'c': f64
+ 144..185 '{ ... }': ()
+ 158..159 'b': f64
+ 173..174 'c': f64
+ 194..195 '_': &[f64]
+ 199..201 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_string_literal() {
+ check_infer_with_mismatches(
+ r#"
+ fn test() {
+ let s: &str = "hello";
+ match s {
+ "hello" => {}
+ _ => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..98 '{ ... } }': ()
+ 20..21 's': &str
+ 30..37 '"hello"': &str
+ 43..96 'match ... }': ()
+ 49..50 's': &str
+ 61..68 '"hello"': &str
+ 61..68 '"hello"': &str
+ 72..74 '{}': ()
+ 83..84 '_': &str
+ 88..90 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_or() {
+ check_infer_with_mismatches(
+ r#"
+ fn test() {
+ let s: &str = "hello";
+ match s {
+ "hello" | "world" => {}
+ _ => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..108 '{ ... } }': ()
+ 20..21 's': &str
+ 30..37 '"hello"': &str
+ 43..106 'match ... }': ()
+ 49..50 's': &str
+ 61..68 '"hello"': &str
+ 61..68 '"hello"': &str
+ 61..78 '"hello...world"': &str
+ 71..78 '"world"': &str
+ 71..78 '"world"': &str
+ 82..84 '{}': ()
+ 93..94 '_': &str
+ 98..100 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_arr() {
+ check_infer(
+ r#"
+ fn test() {
+ let arr: [f64; 2] = [0.0, 1.0];
+ match arr {
+ [1.0, a] => {
+ a;
+ },
+ [b, c] => {
+ b;
+ c;
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..179 '{ ... } }': ()
+ 20..23 'arr': [f64; 2]
+ 36..46 '[0.0, 1.0]': [f64; 2]
+ 37..40 '0.0': f64
+ 42..45 '1.0': f64
+ 52..177 'match ... }': ()
+ 58..61 'arr': [f64; 2]
+ 72..80 '[1.0, a]': [f64; 2]
+ 73..76 '1.0': f64
+ 73..76 '1.0': f64
+ 78..79 'a': f64
+ 84..110 '{ ... }': ()
+ 98..99 'a': f64
+ 120..126 '[b, c]': [f64; 2]
+ 121..122 'b': f64
+ 124..125 'c': f64
+ 130..171 '{ ... }': ()
+ 144..145 'b': f64
+ 159..160 'c': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_adt_pattern() {
+ check_infer(
+ r#"
+ enum E {
+ A { x: usize },
+ B
+ }
+
+ struct S(u32, E);
+
+ fn test() {
+ let e = E::A { x: 3 };
+
+ let S(y, z) = foo;
+ let E::A { x: new_var } = e;
+
+ match e {
+ E::A { x } => x,
+ E::B if foo => 1,
+ E::B => 10,
+ };
+
+ let ref d @ E::A { .. } = e;
+ d;
+ }
+ "#,
+ expect![[r#"
+ 67..288 '{ ... d; }': ()
+ 77..78 'e': E
+ 81..94 'E::A { x: 3 }': E
+ 91..92 '3': usize
+ 105..112 'S(y, z)': S
+ 107..108 'y': u32
+ 110..111 'z': E
+ 115..118 'foo': S
+ 128..147 'E::A {..._var }': E
+ 138..145 'new_var': usize
+ 150..151 'e': E
+ 158..244 'match ... }': usize
+ 164..165 'e': E
+ 176..186 'E::A { x }': E
+ 183..184 'x': usize
+ 190..191 'x': usize
+ 201..205 'E::B': E
+ 209..212 'foo': bool
+ 216..217 '1': usize
+ 227..231 'E::B': E
+ 235..237 '10': usize
+ 255..274 'ref d ...{ .. }': &E
+ 263..274 'E::A { .. }': E
+ 277..278 'e': E
+ 284..285 'd': &E
+ "#]],
+ );
+}
+
+#[test]
+fn enum_variant_through_self_in_pattern() {
+ check_infer(
+ r#"
+ enum E {
+ A { x: usize },
+ B(usize),
+ C
+ }
+
+ impl E {
+ fn test() {
+ match (loop {}) {
+ Self::A { x } => { x; },
+ Self::B(x) => { x; },
+ Self::C => {},
+ };
+ }
+ }
+ "#,
+ expect![[r#"
+ 75..217 '{ ... }': ()
+ 85..210 'match ... }': ()
+ 92..99 'loop {}': !
+ 97..99 '{}': ()
+ 115..128 'Self::A { x }': E
+ 125..126 'x': usize
+ 132..138 '{ x; }': ()
+ 134..135 'x': usize
+ 152..162 'Self::B(x)': E
+ 160..161 'x': usize
+ 166..172 '{ x; }': ()
+ 168..169 'x': usize
+ 186..193 'Self::C': E
+ 197..199 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_generics_in_patterns() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+
+ enum Option<T> {
+ Some(T),
+ None,
+ }
+
+ fn test(a1: A<u32>, o: Option<u64>) {
+ let A { x: x2 } = a1;
+ let A::<i64> { x: x3 } = A { x: 1 };
+ match o {
+ Option::Some(t) => t,
+ _ => 1,
+ };
+ }
+ "#,
+ expect![[r#"
+ 78..80 'a1': A<u32>
+ 90..91 'o': Option<u64>
+ 106..243 '{ ... }; }': ()
+ 116..127 'A { x: x2 }': A<u32>
+ 123..125 'x2': u32
+ 130..132 'a1': A<u32>
+ 142..160 'A::<i6...: x3 }': A<i64>
+ 156..158 'x3': i64
+ 163..173 'A { x: 1 }': A<i64>
+ 170..171 '1': i64
+ 179..240 'match ... }': u64
+ 185..186 'o': Option<u64>
+ 197..212 'Option::Some(t)': Option<u64>
+ 210..211 't': u64
+ 216..217 't': u64
+ 227..228 '_': Option<u64>
+ 232..233 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_const_pattern() {
+ check(
+ r#"
+enum Option<T> { None }
+use Option::None;
+struct Foo;
+const Bar: usize = 1;
+
+fn test() {
+ let a: Option<u32> = None;
+ let b: Option<i64> = match a {
+ None => None,
+ };
+ let _: () = match () { Foo => () };
+ // ^^^ expected (), got Foo
+ let _: () = match () { Bar => () };
+ // ^^^ expected (), got usize
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_guard() {
+ check_infer(
+ r#"
+struct S;
+impl S { fn foo(&self) -> bool { false } }
+
+fn main() {
+ match S {
+ s if s.foo() => (),
+ }
+}
+ "#,
+ expect![[r#"
+ 27..31 'self': &S
+ 41..50 '{ false }': bool
+ 43..48 'false': bool
+ 64..115 '{ ... } }': ()
+ 70..113 'match ... }': ()
+ 76..77 'S': S
+ 88..89 's': S
+ 93..94 's': S
+ 93..100 's.foo()': bool
+ 104..106 '()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn match_ergonomics_in_closure_params() {
+ check_infer(
+ r#"
+//- minicore: fn
+fn foo<T, U, F: FnOnce(T) -> U>(t: T, f: F) -> U { loop {} }
+
+fn test() {
+ foo(&(1, "a"), |&(x, y)| x); // normal, no match ergonomics
+ foo(&(1, "a"), |(x, y)| x);
+}
+"#,
+ expect![[r#"
+ 32..33 't': T
+ 38..39 'f': F
+ 49..60 '{ loop {} }': U
+ 51..58 'loop {}': !
+ 56..58 '{}': ()
+ 72..171 '{ ... x); }': ()
+ 78..81 'foo': fn foo<&(i32, &str), i32, |&(i32, &str)| -> i32>(&(i32, &str), |&(i32, &str)| -> i32) -> i32
+ 78..105 'foo(&(...y)| x)': i32
+ 82..91 '&(1, "a")': &(i32, &str)
+ 83..91 '(1, "a")': (i32, &str)
+ 84..85 '1': i32
+ 87..90 '"a"': &str
+ 93..104 '|&(x, y)| x': |&(i32, &str)| -> i32
+ 94..101 '&(x, y)': &(i32, &str)
+ 95..101 '(x, y)': (i32, &str)
+ 96..97 'x': i32
+ 99..100 'y': &str
+ 103..104 'x': i32
+ 142..145 'foo': fn foo<&(i32, &str), &i32, |&(i32, &str)| -> &i32>(&(i32, &str), |&(i32, &str)| -> &i32) -> &i32
+ 142..168 'foo(&(...y)| x)': &i32
+ 146..155 '&(1, "a")': &(i32, &str)
+ 147..155 '(1, "a")': (i32, &str)
+ 148..149 '1': i32
+ 151..154 '"a"': &str
+ 157..167 '|(x, y)| x': |&(i32, &str)| -> &i32
+ 158..164 '(x, y)': (i32, &str)
+ 159..160 'x': &i32
+ 162..163 'y': &&str
+ 166..167 'x': &i32
+ "#]],
+ );
+}
+
+#[test]
+fn slice_tail_pattern() {
+ check_infer(
+ r#"
+ fn foo(params: &[i32]) {
+ match params {
+ [head, tail @ ..] => {
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 7..13 'params': &[i32]
+ 23..92 '{ ... } }': ()
+ 29..90 'match ... }': ()
+ 35..41 'params': &[i32]
+ 52..69 '[head,... @ ..]': [i32]
+ 53..57 'head': &i32
+ 59..68 'tail @ ..': &[i32]
+ 66..68 '..': [i32]
+ 73..84 '{ }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn box_pattern() {
+ check_infer(
+ r#"
+ pub struct Global;
+ #[lang = "owned_box"]
+ pub struct Box<T, A = Global>(T);
+
+ fn foo(params: Box<i32>) {
+ match params {
+ box integer => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 83..89 'params': Box<i32, Global>
+ 101..155 '{ ... } }': ()
+ 107..153 'match ... }': ()
+ 113..119 'params': Box<i32, Global>
+ 130..141 'box integer': Box<i32, Global>
+ 134..141 'integer': i32
+ 145..147 '{}': ()
+ "#]],
+ );
+ check_infer(
+ r#"
+ #[lang = "owned_box"]
+ pub struct Box<T>(T);
+
+ fn foo(params: Box<i32>) {
+ match params {
+ box integer => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 52..58 'params': Box<i32>
+ 70..124 '{ ... } }': ()
+ 76..122 'match ... }': ()
+ 82..88 'params': Box<i32>
+ 99..110 'box integer': Box<i32>
+ 103..110 'integer': i32
+ 114..116 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_ellipsis_pattern() {
+ check_infer_with_mismatches(
+ r#"
+fn foo(tuple: (u8, i16, f32)) {
+ match tuple {
+ (.., b, c) => {},
+ (a, .., c) => {},
+ (a, b, ..) => {},
+ (a, b) => {/*too short*/}
+ (a, b, c, d) => {/*too long*/}
+ _ => {}
+ }
+}"#,
+ expect![[r#"
+ 7..12 'tuple': (u8, i16, f32)
+ 30..224 '{ ... } }': ()
+ 36..222 'match ... }': ()
+ 42..47 'tuple': (u8, i16, f32)
+ 58..68 '(.., b, c)': (u8, i16, f32)
+ 63..64 'b': i16
+ 66..67 'c': f32
+ 72..74 '{}': ()
+ 84..94 '(a, .., c)': (u8, i16, f32)
+ 85..86 'a': u8
+ 92..93 'c': f32
+ 98..100 '{}': ()
+ 110..120 '(a, b, ..)': (u8, i16, f32)
+ 111..112 'a': u8
+ 114..115 'b': i16
+ 124..126 '{}': ()
+ 136..142 '(a, b)': (u8, i16)
+ 137..138 'a': u8
+ 140..141 'b': i16
+ 146..161 '{/*too short*/}': ()
+ 170..182 '(a, b, c, d)': (u8, i16, f32, {unknown})
+ 171..172 'a': u8
+ 174..175 'b': i16
+ 177..178 'c': f32
+ 180..181 'd': {unknown}
+ 186..200 '{/*too long*/}': ()
+ 209..210 '_': (u8, i16, f32)
+ 214..216 '{}': ()
+ 136..142: expected (u8, i16, f32), got (u8, i16)
+ 170..182: expected (u8, i16, f32), got (u8, i16, f32, {unknown})
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_ellipsis_pattern() {
+ check_infer(
+ r#"
+struct Tuple(u8, i16, f32);
+fn foo(tuple: Tuple) {
+ match tuple {
+ Tuple(.., b, c) => {},
+ Tuple(a, .., c) => {},
+ Tuple(a, b, ..) => {},
+ Tuple(a, b) => {/*too short*/}
+ Tuple(a, b, c, d) => {/*too long*/}
+ _ => {}
+ }
+}"#,
+ expect![[r#"
+ 35..40 'tuple': Tuple
+ 49..268 '{ ... } }': ()
+ 55..266 'match ... }': ()
+ 61..66 'tuple': Tuple
+ 77..92 'Tuple(.., b, c)': Tuple
+ 87..88 'b': i16
+ 90..91 'c': f32
+ 96..98 '{}': ()
+ 108..123 'Tuple(a, .., c)': Tuple
+ 114..115 'a': u8
+ 121..122 'c': f32
+ 127..129 '{}': ()
+ 139..154 'Tuple(a, b, ..)': Tuple
+ 145..146 'a': u8
+ 148..149 'b': i16
+ 158..160 '{}': ()
+ 170..181 'Tuple(a, b)': Tuple
+ 176..177 'a': u8
+ 179..180 'b': i16
+ 185..200 '{/*too short*/}': ()
+ 209..226 'Tuple(... c, d)': Tuple
+ 215..216 'a': u8
+ 218..219 'b': i16
+ 221..222 'c': f32
+ 224..225 'd': {unknown}
+ 230..244 '{/*too long*/}': ()
+ 253..254 '_': Tuple
+ 258..260 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn const_block_pattern() {
+ check_infer(
+ r#"
+struct Foo(usize);
+fn foo(foo: Foo) {
+ match foo {
+ const { Foo(15 + 32) } => {},
+ _ => {}
+ }
+}"#,
+ expect![[r#"
+ 26..29 'foo': Foo
+ 36..115 '{ ... } }': ()
+ 42..113 'match ... }': ()
+ 48..51 'foo': Foo
+ 62..84 'const ... 32) }': Foo
+ 68..84 '{ Foo(... 32) }': Foo
+ 70..73 'Foo': Foo(usize) -> Foo
+ 70..82 'Foo(15 + 32)': Foo
+ 74..76 '15': usize
+ 74..81 '15 + 32': usize
+ 79..81 '32': usize
+ 88..90 '{}': ()
+ 100..101 '_': Foo
+ 105..107 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn macro_pat() {
+ check_types(
+ r#"
+macro_rules! pat {
+ ($name:ident) => { Enum::Variant1($name) }
+}
+
+enum Enum {
+ Variant1(u8),
+ Variant2,
+}
+
+fn f(e: Enum) {
+ match e {
+ pat!(bind) => {
+ bind;
+ //^^^^ u8
+ }
+ Enum::Variant2 => {}
+ }
+}
+ "#,
+ )
+}
+
+#[test]
+fn type_mismatch_in_or_pattern() {
+ check_infer_with_mismatches(
+ r#"
+fn main() {
+ match (false,) {
+ (true | (),) => {}
+ (() | true,) => {}
+ (_ | (),) => {}
+ (() | _,) => {}
+ }
+}
+"#,
+ expect![[r#"
+ 10..142 '{ ... } }': ()
+ 16..140 'match ... }': ()
+ 22..30 '(false,)': (bool,)
+ 23..28 'false': bool
+ 41..53 '(true | (),)': (bool,)
+ 42..46 'true': bool
+ 42..46 'true': bool
+ 42..51 'true | ()': bool
+ 49..51 '()': ()
+ 57..59 '{}': ()
+ 68..80 '(() | true,)': ((),)
+ 69..71 '()': ()
+ 69..78 '() | true': ()
+ 74..78 'true': bool
+ 74..78 'true': bool
+ 84..86 '{}': ()
+ 95..104 '(_ | (),)': (bool,)
+ 96..97 '_': bool
+ 96..102 '_ | ()': bool
+ 100..102 '()': ()
+ 108..110 '{}': ()
+ 119..128 '(() | _,)': ((),)
+ 120..122 '()': ()
+ 120..126 '() | _': ()
+ 125..126 '_': bool
+ 132..134 '{}': ()
+ 49..51: expected bool, got ()
+ 68..80: expected (bool,), got ((),)
+ 69..71: expected bool, got ()
+ 69..78: expected bool, got ()
+ 100..102: expected bool, got ()
+ 119..128: expected (bool,), got ((),)
+ 120..122: expected bool, got ()
+ 120..126: expected bool, got ()
+ "#]],
+ );
+}
+
+#[test]
+fn slice_pattern_correctly_handles_array_length() {
+ check_infer(
+ r#"
+fn main() {
+ let [head, middle @ .., tail, tail2] = [1, 2, 3, 4, 5];
+}
+ "#,
+ expect![[r#"
+ 10..73 '{ ... 5]; }': ()
+ 20..52 '[head,...tail2]': [i32; 5]
+ 21..25 'head': i32
+ 27..38 'middle @ ..': [i32; 2]
+ 36..38 '..': [i32; 2]
+ 40..44 'tail': i32
+ 46..51 'tail2': i32
+ 55..70 '[1, 2, 3, 4, 5]': [i32; 5]
+ 56..57 '1': i32
+ 59..60 '2': i32
+ 62..63 '3': i32
+ 65..66 '4': i32
+ 68..69 '5': i32
+ "#]],
+ );
+}
+
+#[test]
+fn pattern_lookup_in_value_ns() {
+ check_types(
+ r#"
+use self::Constructor::*;
+struct IntRange {
+ range: (),
+}
+enum Constructor {
+ IntRange(IntRange),
+}
+fn main() {
+ match Constructor::IntRange(IntRange { range: () }) {
+ IntRange(x) => {
+ x;
+ //^ IntRange
+ }
+ Constructor::IntRange(x) => {
+ x;
+ //^ IntRange
+ }
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn if_let_guards() {
+ check_types(
+ r#"
+fn main() {
+ match (0,) {
+ opt if let (x,) = opt => {
+ x;
+ //^ i32
+ }
+ _ => {}
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn tuple_wildcard() {
+ check_types(
+ r#"
+fn main() {
+ enum Option<T> {Some(T), None}
+ use Option::*;
+
+ let mut x = None;
+ x;
+ //^ Option<(i32, i32)>
+
+ if let Some((_, _a)) = x {}
+
+ x = Some((1, 2));
+}
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
new file mode 100644
index 000000000..93a88ab58
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
@@ -0,0 +1,1650 @@
+use expect_test::expect;
+
+use super::{check_infer, check_no_mismatches, check_types};
+
+#[test]
+fn bug_484() {
+ check_infer(
+ r#"
+ fn test() {
+ let x = if true {};
+ }
+ "#,
+ expect![[r#"
+ 10..37 '{ ... {}; }': ()
+ 20..21 'x': ()
+ 24..34 'if true {}': ()
+ 27..31 'true': bool
+ 32..34 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn no_panic_on_field_of_enum() {
+ check_infer(
+ r#"
+ enum X {}
+
+ fn test(x: X) {
+ x.some_field;
+ }
+ "#,
+ expect![[r#"
+ 19..20 'x': X
+ 25..46 '{ ...eld; }': ()
+ 31..32 'x': X
+ 31..43 'x.some_field': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn bug_585() {
+ check_infer(
+ r#"
+ fn test() {
+ X {};
+ match x {
+ A::B {} => (),
+ A::Y() => (),
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..88 '{ ... } }': ()
+ 16..20 'X {}': {unknown}
+ 26..86 'match ... }': ()
+ 32..33 'x': {unknown}
+ 44..51 'A::B {}': {unknown}
+ 55..57 '()': ()
+ 67..73 'A::Y()': {unknown}
+ 77..79 '()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn bug_651() {
+ check_infer(
+ r#"
+ fn quux() {
+ let y = 92;
+ 1 + y;
+ }
+ "#,
+ expect![[r#"
+ 10..40 '{ ...+ y; }': ()
+ 20..21 'y': i32
+ 24..26 '92': i32
+ 32..33 '1': i32
+ 32..37 '1 + y': i32
+ 36..37 'y': i32
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_vars() {
+ check_infer(
+ r#"
+ fn test() {
+ let y = unknown;
+ [y, &y];
+ }
+ "#,
+ expect![[r#"
+ 10..47 '{ ...&y]; }': ()
+ 20..21 'y': {unknown}
+ 24..31 'unknown': {unknown}
+ 37..44 '[y, &y]': [{unknown}; 2]
+ 38..39 'y': {unknown}
+ 41..43 '&y': &{unknown}
+ 42..43 'y': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_vars_2() {
+ check_infer(
+ r#"
+ fn test() {
+ let x = unknown;
+ let y = unknown;
+ [(x, y), (&y, &x)];
+ }
+ "#,
+ expect![[r#"
+ 10..79 '{ ...x)]; }': ()
+ 20..21 'x': &{unknown}
+ 24..31 'unknown': &{unknown}
+ 41..42 'y': {unknown}
+ 45..52 'unknown': {unknown}
+ 58..76 '[(x, y..., &x)]': [(&{unknown}, {unknown}); 2]
+ 59..65 '(x, y)': (&{unknown}, {unknown})
+ 60..61 'x': &{unknown}
+ 63..64 'y': {unknown}
+ 67..75 '(&y, &x)': (&{unknown}, {unknown})
+ 68..70 '&y': &{unknown}
+ 69..70 'y': {unknown}
+ 72..74 '&x': &&{unknown}
+ 73..74 'x': &{unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn array_elements_expected_type() {
+ check_no_mismatches(
+ r#"
+ fn test() {
+ let x: [[u32; 2]; 2] = [[1, 2], [3, 4]];
+ }
+ "#,
+ );
+}
+
+#[test]
+fn infer_std_crash_1() {
+ // caused stack overflow, taken from std
+ check_infer(
+ r#"
+ enum Maybe<T> {
+ Real(T),
+ Fake,
+ }
+
+ fn write() {
+ match something_unknown {
+ Maybe::Real(ref mut something) => (),
+ }
+ }
+ "#,
+ expect![[r#"
+ 53..138 '{ ... } }': ()
+ 59..136 'match ... }': ()
+ 65..82 'someth...nknown': Maybe<{unknown}>
+ 93..123 'Maybe:...thing)': Maybe<{unknown}>
+ 105..122 'ref mu...ething': &mut {unknown}
+ 127..129 '()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_2() {
+ // caused "equating two type variables, ...", taken from std
+ check_infer(
+ r#"
+ fn test_line_buffer() {
+ &[0, b'\n', 1, b'\n'];
+ }
+ "#,
+ expect![[r#"
+ 22..52 '{ ...n']; }': ()
+ 28..49 '&[0, b...b'\n']': &[u8; 4]
+ 29..49 '[0, b'...b'\n']': [u8; 4]
+ 30..31 '0': u8
+ 33..38 'b'\n'': u8
+ 40..41 '1': u8
+ 43..48 'b'\n'': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_3() {
+ // taken from rustc
+ check_infer(
+ r#"
+ pub fn compute() {
+ match nope!() {
+ SizeSkeleton::Pointer { non_zero: true, tail } => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 17..107 '{ ... } }': ()
+ 23..105 'match ... }': ()
+ 29..36 'nope!()': {unknown}
+ 47..93 'SizeSk...tail }': {unknown}
+ 81..85 'true': bool
+ 81..85 'true': bool
+ 87..91 'tail': {unknown}
+ 97..99 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_4() {
+ // taken from rustc
+ check_infer(
+ r#"
+ pub fn primitive_type() {
+ match *self {
+ BorrowedRef { type_: Primitive(p), ..} => {},
+ }
+ }
+ "#,
+ expect![[r#"
+ 24..105 '{ ... } }': ()
+ 30..103 'match ... }': ()
+ 36..41 '*self': {unknown}
+ 37..41 'self': {unknown}
+ 52..90 'Borrow...), ..}': {unknown}
+ 73..85 'Primitive(p)': {unknown}
+ 83..84 'p': {unknown}
+ 94..96 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_5() {
+ // taken from rustc
+ check_infer(
+ r#"
+ fn extra_compiler_flags() {
+ for content in doesnt_matter {
+ let name = if doesnt_matter {
+ first
+ } else {
+ &content
+ };
+
+ let content = if ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE.contains(&name) {
+ name
+ } else {
+ content
+ };
+ }
+ }
+ "#,
+ expect![[r#"
+ 26..322 '{ ... } }': ()
+ 32..320 'for co... }': ()
+ 36..43 'content': {unknown}
+ 47..60 'doesnt_matter': {unknown}
+ 61..320 '{ ... }': ()
+ 75..79 'name': &{unknown}
+ 82..166 'if doe... }': &{unknown}
+ 85..98 'doesnt_matter': bool
+ 99..128 '{ ... }': &{unknown}
+ 113..118 'first': &{unknown}
+ 134..166 '{ ... }': &{unknown}
+ 148..156 '&content': &{unknown}
+ 149..156 'content': {unknown}
+ 181..188 'content': &{unknown}
+ 191..313 'if ICE... }': &{unknown}
+ 194..231 'ICE_RE..._VALUE': {unknown}
+ 194..247 'ICE_RE...&name)': bool
+ 241..246 '&name': &&{unknown}
+ 242..246 'name': &{unknown}
+ 248..276 '{ ... }': &{unknown}
+ 262..266 'name': &{unknown}
+ 282..313 '{ ... }': {unknown}
+ 296..303 'content': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn infer_nested_generics_crash() {
+ // another crash found typechecking rustc
+ check_infer(
+ r#"
+ struct Canonical<V> {
+ value: V,
+ }
+ struct QueryResponse<V> {
+ value: V,
+ }
+ fn test<R>(query_response: Canonical<QueryResponse<R>>) {
+ &query_response.value;
+ }
+ "#,
+ expect![[r#"
+ 91..105 'query_response': Canonical<QueryResponse<R>>
+ 136..166 '{ ...lue; }': ()
+ 142..163 '&query....value': &QueryResponse<R>
+ 143..157 'query_response': Canonical<QueryResponse<R>>
+ 143..163 'query_....value': QueryResponse<R>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_paren_macro_call() {
+ check_infer(
+ r#"
+ macro_rules! bar { () => {0u32} }
+ fn test() {
+ let a = (bar!());
+ }
+ "#,
+ expect![[r#"
+ !0..4 '0u32': u32
+ 44..69 '{ ...()); }': ()
+ 54..55 'a': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_array_macro_call() {
+ check_infer(
+ r#"
+ macro_rules! bar { () => {0u32} }
+ fn test() {
+ let a = [bar!()];
+ }
+ "#,
+ expect![[r#"
+ !0..4 '0u32': u32
+ 44..69 '{ ...()]; }': ()
+ 54..55 'a': [u32; 1]
+ 58..66 '[bar!()]': [u32; 1]
+ "#]],
+ );
+}
+
+#[test]
+fn bug_1030() {
+ check_infer(
+ r#"
+ struct HashSet<T, H>;
+ struct FxHasher;
+ type FxHashSet<T> = HashSet<T, FxHasher>;
+
+ impl<T, H> HashSet<T, H> {
+ fn default() -> HashSet<T, H> {}
+ }
+
+ pub fn main_loop() {
+ FxHashSet::default();
+ }
+ "#,
+ expect![[r#"
+ 143..145 '{}': HashSet<T, H>
+ 168..197 '{ ...t(); }': ()
+ 174..192 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<{unknown}, FxHasher>
+ 174..194 'FxHash...ault()': HashSet<{unknown}, FxHasher>
+ "#]],
+ );
+}
+
+#[test]
+fn issue_2669() {
+ check_infer(
+ r#"
+ trait A {}
+ trait Write {}
+ struct Response<T> {}
+
+ trait D {
+ fn foo();
+ }
+
+ impl<T:A> D for Response<T> {
+ fn foo() {
+ end();
+ fn end<W: Write>() {
+ let _x: T = loop {};
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 119..214 '{ ... }': ()
+ 129..132 'end': fn end<{unknown}>()
+ 129..134 'end()': ()
+ 163..208 '{ ... }': ()
+ 181..183 '_x': !
+ 190..197 'loop {}': !
+ 195..197 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn issue_2705() {
+ check_infer(
+ r#"
+ trait Trait {}
+ fn test() {
+ <Trait<u32>>::foo()
+ }
+ "#,
+ expect![[r#"
+ 25..52 '{ ...oo() }': ()
+ 31..48 '<Trait...>::foo': {unknown}
+ 31..50 '<Trait...:foo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_2683_chars_impl() {
+ check_types(
+ r#"
+//- minicore: iterator
+pub struct Chars<'a> {}
+impl<'a> Iterator for Chars<'a> {
+ type Item = char;
+ fn next(&mut self) -> Option<char> { loop {} }
+}
+
+fn test() {
+ let chars: Chars<'_>;
+ (chars.next(), chars.nth(1));
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (Option<char>, Option<char>)
+"#,
+ );
+}
+
+#[test]
+fn issue_3999_slice() {
+ check_infer(
+ r#"
+ fn foo(params: &[usize]) {
+ match params {
+ [ps @ .., _] => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 7..13 'params': &[usize]
+ 25..80 '{ ... } }': ()
+ 31..78 'match ... }': ()
+ 37..43 'params': &[usize]
+ 54..66 '[ps @ .., _]': [usize]
+ 55..62 'ps @ ..': &[usize]
+ 60..62 '..': [usize]
+ 64..65 '_': usize
+ 70..72 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_3999_struct() {
+ // rust-analyzer should not panic on seeing this malformed
+ // record pattern.
+ check_infer(
+ r#"
+ struct Bar {
+ a: bool,
+ }
+ fn foo(b: Bar) {
+ match b {
+ Bar { a: .. } => {},
+ }
+ }
+ "#,
+ expect![[r#"
+ 35..36 'b': Bar
+ 43..95 '{ ... } }': ()
+ 49..93 'match ... }': ()
+ 55..56 'b': Bar
+ 67..80 'Bar { a: .. }': Bar
+ 76..78 '..': bool
+ 84..86 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4235_name_conflicts() {
+ check_infer(
+ r#"
+ struct FOO {}
+ static FOO:FOO = FOO {};
+
+ impl FOO {
+ fn foo(&self) {}
+ }
+
+ fn main() {
+ let a = &FOO;
+ a.foo();
+ }
+ "#,
+ expect![[r#"
+ 31..37 'FOO {}': FOO
+ 63..67 'self': &FOO
+ 69..71 '{}': ()
+ 85..119 '{ ...o(); }': ()
+ 95..96 'a': &FOO
+ 99..103 '&FOO': &FOO
+ 100..103 'FOO': FOO
+ 109..110 'a': &FOO
+ 109..116 'a.foo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4465_dollar_crate_at_type() {
+ check_infer(
+ r#"
+ pub struct Foo {}
+ pub fn anything<T>() -> T {
+ loop {}
+ }
+ macro_rules! foo {
+ () => {{
+ let r: $crate::Foo = anything();
+ r
+ }};
+ }
+ fn main() {
+ let _a = foo!();
+ }
+ "#,
+ expect![[r#"
+ 44..59 '{ loop {} }': T
+ 50..57 'loop {}': !
+ 55..57 '{}': ()
+ !0..31 '{letr:...g();r}': Foo
+ !4..5 'r': Foo
+ !18..26 'anything': fn anything<Foo>() -> Foo
+ !18..28 'anything()': Foo
+ !29..30 'r': Foo
+ 163..187 '{ ...!(); }': ()
+ 173..175 '_a': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn issue_6811() {
+ check_infer(
+ r#"
+ macro_rules! profile_function {
+ () => {
+ let _a = 1;
+ let _b = 1;
+ };
+ }
+ fn main() {
+ profile_function!();
+ }
+ "#,
+ expect![[r#"
+ !0..16 'let_a=...t_b=1;': ()
+ !3..5 '_a': i32
+ !6..7 '1': i32
+ !11..13 '_b': i32
+ !14..15 '1': i32
+ 103..131 '{ ...!(); }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4053_diesel_where_clauses() {
+ check_infer(
+ r#"
+ trait BoxedDsl<DB> {
+ type Output;
+ fn internal_into_boxed(self) -> Self::Output;
+ }
+
+ struct SelectStatement<From, Select, Distinct, Where, Order, LimitOffset, GroupBy, Locking> {
+ order: Order,
+ }
+
+ trait QueryFragment<DB: Backend> {}
+
+ trait Into<T> { fn into(self) -> T; }
+
+ impl<F, S, D, W, O, LOf, DB> BoxedDsl<DB>
+ for SelectStatement<F, S, D, W, O, LOf, G>
+ where
+ O: Into<dyn QueryFragment<DB>>,
+ {
+ type Output = XXX;
+
+ fn internal_into_boxed(self) -> Self::Output {
+ self.order.into();
+ }
+ }
+ "#,
+ expect![[r#"
+ 65..69 'self': Self
+ 267..271 'self': Self
+ 466..470 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
+ 488..522 '{ ... }': ()
+ 498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
+ 498..508 'self.order': O
+ 498..515 'self.o...into()': dyn QueryFragment<DB>
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4953() {
+ check_infer(
+ r#"
+ pub struct Foo(pub i64);
+ impl Foo {
+ fn test() -> Self { Self(0i64) }
+ }
+ "#,
+ expect![[r#"
+ 58..72 '{ Self(0i64) }': Foo
+ 60..64 'Self': Foo(i64) -> Foo
+ 60..70 'Self(0i64)': Foo
+ 65..69 '0i64': i64
+ "#]],
+ );
+ check_infer(
+ r#"
+ pub struct Foo<T>(pub T);
+ impl Foo<i64> {
+ fn test() -> Self { Self(0i64) }
+ }
+ "#,
+ expect![[r#"
+ 64..78 '{ Self(0i64) }': Foo<i64>
+ 66..70 'Self': Foo<i64>(i64) -> Foo<i64>
+ 66..76 'Self(0i64)': Foo<i64>
+ 71..75 '0i64': i64
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4931() {
+ check_infer(
+ r#"
+ trait Div<T> {
+ type Output;
+ }
+
+ trait CheckedDiv: Div<()> {}
+
+ trait PrimInt: CheckedDiv<Output = ()> {
+ fn pow(self);
+ }
+
+ fn check<T: PrimInt>(i: T) {
+ i.pow();
+ }
+ "#,
+ expect![[r#"
+ 117..121 'self': Self
+ 148..149 'i': T
+ 154..170 '{ ...w(); }': ()
+ 160..161 'i': T
+ 160..167 'i.pow()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4885() {
+ check_infer(
+ r#"
+ //- minicore: coerce_unsized, future
+ use core::future::Future;
+ trait Foo<R> {
+ type Bar;
+ }
+ fn foo<R, K>(key: &K) -> impl Future<Output = K::Bar>
+ where
+ K: Foo<R>,
+ {
+ bar(key)
+ }
+ fn bar<R, K>(key: &K) -> impl Future<Output = K::Bar>
+ where
+ K: Foo<R>,
+ {
+ }
+ "#,
+ expect![[r#"
+ 70..73 'key': &K
+ 132..148 '{ ...key) }': impl Future<Output = <K as Foo<R>>::Bar>
+ 138..141 'bar': fn bar<R, K>(&K) -> impl Future<Output = <K as Foo<R>>::Bar>
+ 138..146 'bar(key)': impl Future<Output = <K as Foo<R>>::Bar>
+ 142..145 'key': &K
+ 162..165 'key': &K
+ 224..227 '{ }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4800() {
+ check_infer(
+ r#"
+ trait Debug {}
+
+ struct Foo<T>;
+
+ type E1<T> = (T, T, T);
+ type E2<T> = E1<E1<E1<(T, T, T)>>>;
+
+ impl Debug for Foo<E2<()>> {}
+
+ struct Request;
+
+ pub trait Future {
+ type Output;
+ }
+
+ pub struct PeerSet<D>;
+
+ impl<D> Service<Request> for PeerSet<D>
+ where
+ D: Discover,
+ D::Key: Debug,
+ {
+ type Error = ();
+ type Future = dyn Future<Output = Self::Error>;
+
+ fn call(&mut self) -> Self::Future {
+ loop {}
+ }
+ }
+
+ pub trait Discover {
+ type Key;
+ }
+
+ pub trait Service<Request> {
+ type Error;
+ type Future: Future<Output = Self::Error>;
+ fn call(&mut self) -> Self::Future;
+ }
+ "#,
+ expect![[r#"
+ 379..383 'self': &mut PeerSet<D>
+ 401..424 '{ ... }': dyn Future<Output = ()>
+ 411..418 'loop {}': !
+ 416..418 '{}': ()
+ 575..579 'self': &mut Self
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4966() {
+ check_infer(
+ r#"
+ //- minicore: deref
+ pub trait IntoIterator {
+ type Item;
+ }
+
+ struct Repeat<A> { element: A }
+
+ struct Map<F> { f: F }
+
+ struct Vec<T> {}
+
+ impl<T> core::ops::Deref for Vec<T> {
+ type Target = [T];
+ }
+
+ fn from_iter<A, T: IntoIterator<Item = A>>(iter: T) -> Vec<A> {}
+
+ fn main() {
+ let inner = Map { f: |_: &f64| 0.0 };
+
+ let repeat = Repeat { element: inner };
+
+ let vec = from_iter(repeat);
+
+ vec.foo_bar();
+ }
+ "#,
+ expect![[r#"
+ 225..229 'iter': T
+ 244..246 '{}': Vec<A>
+ 258..402 '{ ...r(); }': ()
+ 268..273 'inner': Map<|&f64| -> f64>
+ 276..300 'Map { ... 0.0 }': Map<|&f64| -> f64>
+ 285..298 '|_: &f64| 0.0': |&f64| -> f64
+ 286..287 '_': &f64
+ 295..298 '0.0': f64
+ 311..317 'repeat': Repeat<Map<|&f64| -> f64>>
+ 320..345 'Repeat...nner }': Repeat<Map<|&f64| -> f64>>
+ 338..343 'inner': Map<|&f64| -> f64>
+ 356..359 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 362..371 'from_iter': fn from_iter<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>, Repeat<Map<|&f64| -> f64>>>(Repeat<Map<|&f64| -> f64>>) -> Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 362..379 'from_i...epeat)': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 372..378 'repeat': Repeat<Map<|&f64| -> f64>>
+ 386..389 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 386..399 'vec.foo_bar()': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn issue_6628() {
+ check_infer(
+ r#"
+//- minicore: fn
+struct S<T>();
+impl<T> S<T> {
+ fn f(&self, _t: T) {}
+ fn g<F: FnOnce(&T)>(&self, _f: F) {}
+}
+fn main() {
+ let s = S();
+ s.g(|_x| {});
+ s.f(10);
+}
+"#,
+ expect![[r#"
+ 40..44 'self': &S<T>
+ 46..48 '_t': T
+ 53..55 '{}': ()
+ 81..85 'self': &S<T>
+ 87..89 '_f': F
+ 94..96 '{}': ()
+ 109..160 '{ ...10); }': ()
+ 119..120 's': S<i32>
+ 123..124 'S': S<i32>() -> S<i32>
+ 123..126 'S()': S<i32>
+ 132..133 's': S<i32>
+ 132..144 's.g(|_x| {})': ()
+ 136..143 '|_x| {}': |&i32| -> ()
+ 137..139 '_x': &i32
+ 141..143 '{}': ()
+ 150..151 's': S<i32>
+ 150..157 's.f(10)': ()
+ 154..156 '10': i32
+ "#]],
+ );
+}
+
+#[test]
+fn issue_6852() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct BufWriter {}
+
+struct Mutex<T> {}
+struct MutexGuard<'a, T> {}
+impl<T> Mutex<T> {
+ fn lock(&self) -> MutexGuard<'_, T> {}
+}
+impl<'a, T: 'a> Deref for MutexGuard<'a, T> {
+ type Target = T;
+}
+fn flush(&self) {
+ let w: &Mutex<BufWriter>;
+ *(w.lock());
+}
+"#,
+ expect![[r#"
+ 123..127 'self': &Mutex<T>
+ 150..152 '{}': MutexGuard<T>
+ 234..238 'self': &{unknown}
+ 240..290 '{ ...()); }': ()
+ 250..251 'w': &Mutex<BufWriter>
+ 276..287 '*(w.lock())': BufWriter
+ 278..279 'w': &Mutex<BufWriter>
+ 278..286 'w.lock()': MutexGuard<BufWriter>
+ "#]],
+ );
+}
+
+#[test]
+fn param_overrides_fn() {
+ check_types(
+ r#"
+ fn example(example: i32) {
+ fn f() {}
+ example;
+ //^^^^^^^ i32
+ }
+ "#,
+ )
+}
+
+#[test]
+fn lifetime_from_chalk_during_deref() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Box<T: ?Sized> {}
+impl<T: ?Sized> core::ops::Deref for Box<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ loop {}
+ }
+}
+
+trait Iterator {
+ type Item;
+}
+
+pub struct Iter<'a, T: 'a> {
+ inner: Box<dyn IterTrait<'a, T, Item = &'a T> + 'a>,
+}
+
+trait IterTrait<'a, T: 'a>: Iterator<Item = &'a T> {
+ fn clone_box(&self);
+}
+
+fn clone_iter<T>(s: Iter<T>) {
+ s.inner.clone_box();
+ //^^^^^^^^^^^^^^^^^^^ ()
+}
+"#,
+ )
+}
+
+#[test]
+fn issue_8686() {
+ check_infer(
+ r#"
+pub trait Try: FromResidual {
+ type Output;
+ type Residual;
+}
+pub trait FromResidual<R = <Self as Try>::Residual> {
+ fn from_residual(residual: R) -> Self;
+}
+
+struct ControlFlow<B, C>;
+impl<B, C> Try for ControlFlow<B, C> {
+ type Output = C;
+ type Residual = ControlFlow<B, !>;
+}
+impl<B, C> FromResidual for ControlFlow<B, C> {
+ fn from_residual(r: ControlFlow<B, !>) -> Self { ControlFlow }
+}
+
+fn test() {
+ ControlFlow::from_residual(ControlFlow::<u32, !>);
+}
+ "#,
+ expect![[r#"
+ 144..152 'residual': R
+ 365..366 'r': ControlFlow<B, !>
+ 395..410 '{ ControlFlow }': ControlFlow<B, C>
+ 397..408 'ControlFlow': ControlFlow<B, C>
+ 424..482 '{ ...!>); }': ()
+ 430..456 'Contro...sidual': fn from_residual<ControlFlow<u32, {unknown}>, ControlFlow<u32, !>>(ControlFlow<u32, !>) -> ControlFlow<u32, {unknown}>
+ 430..479 'Contro...2, !>)': ControlFlow<u32, {unknown}>
+ 457..478 'Contro...32, !>': ControlFlow<u32, !>
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_tail() {
+ // https://github.com/rust-lang/rust-analyzer/issues/8378
+ check_infer(
+ r#"
+ fn fake_tail(){
+ { "first" }
+ #[cfg(never)] 9
+ }
+ fn multiple_fake(){
+ { "fake" }
+ { "fake" }
+ { "second" }
+ #[cfg(never)] { 11 }
+ #[cfg(never)] 12;
+ #[cfg(never)] 13
+ }
+ fn no_normal_tail(){
+ { "third" }
+ #[cfg(never)] 14;
+ #[cfg(never)] 15;
+ }
+ fn no_actual_tail(){
+ { "fourth" };
+ #[cfg(never)] 14;
+ #[cfg(never)] 15
+ }
+ "#,
+ expect![[r#"
+ 14..53 '{ ...)] 9 }': ()
+ 20..31 '{ "first" }': ()
+ 22..29 '"first"': &str
+ 72..190 '{ ...] 13 }': ()
+ 78..88 '{ "fake" }': &str
+ 80..86 '"fake"': &str
+ 93..103 '{ "fake" }': &str
+ 95..101 '"fake"': &str
+ 108..120 '{ "second" }': ()
+ 110..118 '"second"': &str
+ 210..273 '{ ... 15; }': ()
+ 216..227 '{ "third" }': ()
+ 218..225 '"third"': &str
+ 293..357 '{ ...] 15 }': ()
+ 299..311 '{ "fourth" }': &str
+ 301..309 '"fourth"': &str
+ "#]],
+ )
+}
+
+#[test]
+fn impl_trait_in_option_9530() {
+ check_types(
+ r#"
+//- minicore: sized
+struct Option<T>;
+impl<T> Option<T> {
+ fn unwrap(self) -> T { loop {} }
+}
+fn make() -> Option<impl Copy> { Option }
+trait Copy {}
+fn test() {
+ let o = make();
+ o.unwrap();
+ //^^^^^^^^^^ impl Copy
+}
+ "#,
+ )
+}
+
+#[test]
+fn bare_dyn_trait_binders_9639() {
+ check_no_mismatches(
+ r#"
+//- minicore: fn, coerce_unsized
+fn infix_parse<T, S>(_state: S, _level_code: &Fn(S)) -> T {
+ loop {}
+}
+
+fn parse_arule() {
+ infix_parse((), &(|_recurse| ()))
+}
+ "#,
+ )
+}
+
+#[test]
+fn call_expected_type_closure() {
+ check_types(
+ r#"
+//- minicore: fn, option
+
+fn map<T, U>(o: Option<T>, f: impl FnOnce(T) -> U) -> Option<U> { loop {} }
+struct S {
+ field: u32
+}
+
+fn test() {
+ let o = Some(S { field: 2 });
+ let _: Option<()> = map(o, |s| { s.field; });
+ // ^^^^^^^ u32
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_diesel_panic() {
+ check_no_mismatches(
+ r#"
+//- minicore: option
+
+trait TypeMetadata {
+ type MetadataLookup;
+}
+
+pub struct Output<'a, T, DB>
+where
+ DB: TypeMetadata,
+ DB::MetadataLookup: 'a,
+{
+ out: T,
+ metadata_lookup: Option<&'a DB::MetadataLookup>,
+}
+
+impl<'a, T, DB: TypeMetadata> Output<'a, T, DB> {
+ pub fn new(out: T, metadata_lookup: &'a DB::MetadataLookup) -> Self {
+ Output {
+ out,
+ metadata_lookup: Some(metadata_lookup),
+ }
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn bitslice_panic() {
+ check_no_mismatches(
+ r#"
+//- minicore: option, deref
+
+pub trait BitView {
+ type Store;
+}
+
+pub struct Lsb0;
+
+pub struct BitArray<V: BitView> { }
+
+pub struct BitSlice<T> { }
+
+impl<V: BitView> core::ops::Deref for BitArray<V> {
+ type Target = BitSlice<V::Store>;
+}
+
+impl<T> BitSlice<T> {
+ pub fn split_first(&self) -> Option<(T, &Self)> { loop {} }
+}
+
+fn multiexp_inner() {
+ let exp: &BitArray<Foo>;
+ exp.split_first();
+}
+ "#,
+ );
+}
+
+#[test]
+fn macro_expands_to_impl_trait() {
+ check_no_mismatches(
+ r#"
+trait Foo {}
+
+macro_rules! ty {
+ () => {
+ impl Foo
+ }
+}
+
+fn foo(_: ty!()) {}
+
+fn bar() {
+ foo(());
+}
+ "#,
+ )
+}
+
+#[test]
+fn nested_macro_in_fn_params() {
+ check_no_mismatches(
+ r#"
+macro_rules! U32Inner {
+ () => {
+ u32
+ };
+}
+
+macro_rules! U32 {
+ () => {
+ U32Inner!()
+ };
+}
+
+fn mamba(a: U32!(), p: u32) -> u32 {
+ a
+}
+ "#,
+ )
+}
+
+#[test]
+fn for_loop_block_expr_iterable() {
+ check_infer(
+ r#"
+fn test() {
+ for _ in { let x = 0; } {
+ let y = 0;
+ }
+}
+ "#,
+ expect![[r#"
+ 10..68 '{ ... } }': ()
+ 16..66 'for _ ... }': ()
+ 20..21 '_': {unknown}
+ 25..39 '{ let x = 0; }': ()
+ 31..32 'x': i32
+ 35..36 '0': i32
+ 40..66 '{ ... }': ()
+ 54..55 'y': i32
+ 58..59 '0': i32
+ "#]],
+ );
+}
+
+#[test]
+fn while_loop_block_expr_iterable() {
+ check_infer(
+ r#"
+fn test() {
+ while { true } {
+ let y = 0;
+ }
+}
+ "#,
+ expect![[r#"
+ 10..59 '{ ... } }': ()
+ 16..57 'while ... }': ()
+ 22..30 '{ true }': bool
+ 24..28 'true': bool
+ 31..57 '{ ... }': ()
+ 45..46 'y': i32
+ 49..50 '0': i32
+ "#]],
+ );
+}
+
+#[test]
+fn bug_11242() {
+ // FIXME: wrong, should be u32
+ check_types(
+ r#"
+fn foo<A, B>()
+where
+ A: IntoIterator<Item = u32>,
+ B: IntoIterator<Item = usize>,
+{
+ let _x: <A as IntoIterator>::Item;
+ // ^^ {unknown}
+}
+
+pub trait Iterator {
+ type Item;
+}
+
+pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+}
+
+impl<I: Iterator> IntoIterator for I {
+ type Item = I::Item;
+ type IntoIter = I;
+}
+"#,
+ );
+}
+
+#[test]
+fn bug_11659() {
+ check_no_mismatches(
+ r#"
+struct LinkArray<const N: usize, LD>(LD);
+fn f<const N: usize, LD>(x: LD) -> LinkArray<N, LD> {
+ let r = LinkArray::<N, LD>(x);
+ r
+}
+
+fn test() {
+ let x = f::<2, i32>(5);
+ let y = LinkArray::<52, LinkArray<2, i32>>(x);
+}
+ "#,
+ );
+ check_no_mismatches(
+ r#"
+struct LinkArray<LD, const N: usize>(LD);
+fn f<const N: usize, LD>(x: LD) -> LinkArray<LD, N> {
+ let r = LinkArray::<LD, N>(x);
+ r
+}
+
+fn test() {
+ let x = f::<i32, 2>(5);
+ let y = LinkArray::<LinkArray<i32, 2>, 52>(x);
+}
+ "#,
+ );
+}
+
+#[test]
+fn const_generic_error_tolerance() {
+ check_no_mismatches(
+ r#"
+#[lang = "sized"]
+pub trait Sized {}
+
+struct CT<const N: usize, T>(T);
+struct TC<T, const N: usize>(T);
+fn f<const N: usize, T>(x: T) -> (CT<N, T>, TC<T, N>) {
+ let l = CT::<N, T>(x);
+ let r = TC::<N, T>(x);
+ (l, r)
+}
+
+trait TR1<const N: usize>;
+trait TR2<const N: usize>;
+
+impl<const N: usize, T> TR1<N> for CT<N, T>;
+impl<const N: usize, T> TR1<5> for TC<T, N>;
+impl<const N: usize, T> TR2<N> for CT<T, N>;
+
+trait TR3<const N: usize> {
+ fn tr3(&self) -> &Self;
+}
+
+impl<const N: usize, T> TR3<5> for TC<T, N> {
+ fn tr3(&self) -> &Self {
+ self
+ }
+}
+
+impl<const N: usize, T> TR3<Item = 5> for TC<T, N> {}
+impl<const N: usize, T> TR3<T> for TC<T, N> {}
+
+fn impl_trait<const N: usize>(inp: impl TR1<N>) {}
+fn dyn_trait<const N: usize>(inp: &dyn TR2<N>) {}
+fn impl_trait_bad<'a, const N: usize>(inp: impl TR1<i32>) -> impl TR1<'a, i32> {}
+fn impl_trait_very_bad<const N: usize>(inp: impl TR1<Item = i32>) -> impl TR1<'a, Item = i32, 5, Foo = N> {}
+
+fn test() {
+ f::<2, i32>(5);
+ f::<2, 2>(5);
+ f(5);
+ f::<i32>(5);
+ CT::<52, CT<2, i32>>(x);
+ CT::<CT<2, i32>>(x);
+ impl_trait_bad(5);
+ impl_trait_bad(12);
+ TR3<5>::tr3();
+ TR3<{ 2+3 }>::tr3();
+ TC::<i32, 10>(5).tr3();
+ TC::<i32, 20>(5).tr3();
+ TC::<i32, i32>(5).tr3();
+ TC::<i32, { 7 + 3 }>(5).tr3();
+}
+ "#,
+ );
+}
+
+#[test]
+fn const_generic_impl_trait() {
+ check_no_mismatches(
+ r#"
+ //- minicore: from
+
+ struct Foo<T, const M: usize>;
+
+ trait Tr<T> {
+ fn f(T) -> Self;
+ }
+
+ impl<T, const M: usize> Tr<[T; M]> for Foo<T, M> {
+ fn f(_: [T; M]) -> Self {
+ Self
+ }
+ }
+
+ fn test() {
+ Foo::f([1, 2, 7, 10]);
+ }
+ "#,
+ );
+}
+
+#[test]
+fn nalgebra_factorial() {
+ check_no_mismatches(
+ r#"
+ const FACTORIAL: [u128; 4] = [1, 1, 2, 6];
+
+ fn factorial(n: usize) -> u128 {
+ match FACTORIAL.get(n) {
+ Some(f) => *f,
+ None => panic!("{}! is greater than u128::MAX", n),
+ }
+ }
+ "#,
+ )
+}
+
+#[test]
+fn regression_11688_1() {
+ check_no_mismatches(
+ r#"
+ pub struct Buffer<T>(T);
+ type Writer = Buffer<u8>;
+ impl<T> Buffer<T> {
+ fn extend_from_array<const N: usize>(&mut self, xs: &[T; N]) {
+ loop {}
+ }
+ }
+ trait Encode<S> {
+ fn encode(self, w: &mut Writer, s: &mut S);
+ }
+ impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+ "#,
+ );
+}
+
+#[test]
+fn regression_11688_2() {
+ check_types(
+ r#"
+ union MaybeUninit<T> {
+ uninit: (),
+ value: T,
+ }
+
+ impl<T> MaybeUninit<T> {
+ fn uninit_array<const LEN: usize>() -> [Self; LEN] {
+ loop {}
+ }
+ }
+
+ fn main() {
+ let x = MaybeUninit::<i32>::uninit_array::<1>();
+ //^ [MaybeUninit<i32>; 1]
+ }
+ "#,
+ );
+}
+
+#[test]
+fn regression_11688_3() {
+ check_types(
+ r#"
+ //- minicore: iterator
+ struct Ar<T, const N: u8>(T);
+ fn f<const LEN: usize, T, const BASE: u8>(
+ num_zeros: usize,
+ ) -> dyn Iterator<Item = [Ar<T, BASE>; LEN]> {
+ loop {}
+ }
+ fn dynamic_programming() {
+ for board in f::<9, u8, 7>(1) {
+ //^^^^^ [Ar<u8, 7>; 9]
+ }
+ }
+ "#,
+ );
+}
+
+#[test]
+fn regression_11688_4() {
+ check_types(
+ r#"
+ trait Bar<const C: usize> {
+ fn baz(&self) -> [i32; C];
+ }
+
+ fn foo(x: &dyn Bar<2>) {
+ x.baz();
+ //^^^^^^^ [i32; 2]
+ }
+ "#,
+ )
+}
+
+#[test]
+fn gat_crash_1() {
+ cov_mark::check!(ignore_gats);
+ check_no_mismatches(
+ r#"
+trait ATrait {}
+
+trait Crash {
+ type Member<const N: usize>: ATrait;
+ fn new<const N: usize>() -> Self::Member<N>;
+}
+
+fn test<T: Crash>() {
+ T::new();
+}
+"#,
+ );
+}
+
+#[test]
+fn gat_crash_2() {
+ check_no_mismatches(
+ r#"
+pub struct InlineStorage {}
+
+pub struct InlineStorageHandle<T: ?Sized> {}
+
+pub unsafe trait Storage {
+ type Handle<T: ?Sized>;
+ fn create<T: ?Sized>() -> Self::Handle<T>;
+}
+
+unsafe impl Storage for InlineStorage {
+ type Handle<T: ?Sized> = InlineStorageHandle<T>;
+}
+"#,
+ );
+}
+
+#[test]
+fn cfgd_out_self_param() {
+ cov_mark::check!(cfgd_out_self_param);
+ check_no_mismatches(
+ r#"
+struct S;
+impl S {
+ fn f(#[cfg(never)] &self) {}
+}
+
+fn f(s: S) {
+ s.f();
+}
+"#,
+ );
+}
+
+#[test]
+fn rust_161_option_clone() {
+ check_types(
+ r#"
+//- minicore: option, drop
+
+fn test(o: &Option<i32>) {
+ o.my_clone();
+ //^^^^^^^^^^^^ Option<i32>
+}
+
+pub trait MyClone: Sized {
+ fn my_clone(&self) -> Self;
+}
+
+impl<T> const MyClone for Option<T>
+where
+ T: ~const MyClone + ~const Drop + ~const Destruct,
+{
+ fn my_clone(&self) -> Self {
+ match self {
+ Some(x) => Some(x.my_clone()),
+ None => None,
+ }
+ }
+}
+
+impl const MyClone for i32 {
+ fn my_clone(&self) -> Self {
+ *self
+ }
+}
+
+pub trait Destruct {}
+
+impl<T: ?Sized> const Destruct for T {}
+"#,
+ );
+}
+
+#[test]
+fn rust_162_option_clone() {
+ check_types(
+ r#"
+//- minicore: option, drop
+
+fn test(o: &Option<i32>) {
+ o.my_clone();
+ //^^^^^^^^^^^^ Option<i32>
+}
+
+pub trait MyClone: Sized {
+ fn my_clone(&self) -> Self;
+}
+
+impl<T> const MyClone for Option<T>
+where
+ T: ~const MyClone + ~const Destruct,
+{
+ fn my_clone(&self) -> Self {
+ match self {
+ Some(x) => Some(x.my_clone()),
+ None => None,
+ }
+ }
+}
+
+impl const MyClone for i32 {
+ fn my_clone(&self) -> Self {
+ *self
+ }
+}
+
+#[lang = "destruct"]
+pub trait Destruct {}
+"#,
+ );
+}
+
+#[test]
+fn tuple_struct_pattern_with_unmatched_args_crash() {
+ check_infer(
+ r#"
+struct S(usize);
+fn main() {
+ let S(.., a, b) = S(1);
+ let (.., a, b) = (1,);
+}
+ "#,
+ expect![[r#"
+ 27..85 '{ ...1,); }': ()
+ 37..48 'S(.., a, b)': S
+ 43..44 'a': usize
+ 46..47 'b': {unknown}
+ 51..52 'S': S(usize) -> S
+ 51..55 'S(1)': S
+ 53..54 '1': usize
+ 65..75 '(.., a, b)': (i32, {unknown})
+ 70..71 'a': i32
+ 73..74 'b': {unknown}
+ 78..82 '(1,)': (i32,)
+ 79..80 '1': i32
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
new file mode 100644
index 000000000..5b08f5521
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -0,0 +1,3072 @@
+use expect_test::expect;
+
+use super::{check, check_infer, check_no_mismatches, check_types};
+
+#[test]
+fn infer_box() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+fn test() {
+ let x = box 1;
+ let t = (x, box x, box &1, box [1]);
+ t;
+} //^ (Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32; 1]>)
+
+//- /std.rs crate:std
+#[prelude_import] use prelude::*;
+mod prelude {}
+
+mod boxed {
+ #[lang = "owned_box"]
+ pub struct Box<T: ?Sized> {
+ inner: *mut T,
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_box_with_allocator() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+fn test() {
+ let x = box 1;
+ let t = (x, box x, box &1, box [1]);
+ t;
+} //^ (Box<i32, {unknown}>, Box<Box<i32, {unknown}>, {unknown}>, Box<&i32, {unknown}>, Box<[i32; 1], {unknown}>)
+
+//- /std.rs crate:std
+#[prelude_import] use prelude::*;
+mod boxed {
+ #[lang = "owned_box"]
+ pub struct Box<T: ?Sized, A: Allocator> {
+ inner: *mut T,
+ allocator: A,
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_adt_self() {
+ check_types(
+ r#"
+enum Nat { Succ(Self), Demo(Nat), Zero }
+
+fn test() {
+ let foo: Nat = Nat::Zero;
+ if let Nat::Succ(x) = foo {
+ x;
+ } //^ Nat
+}
+"#,
+ );
+}
+
+#[test]
+fn self_in_struct_lit() {
+ check_infer(
+ r#"
+ //- /main.rs
+ struct S<T> { x: T }
+
+ impl S<u32> {
+ fn foo() {
+ Self { x: 1 };
+ }
+ }
+ "#,
+ expect![[r#"
+ 49..79 '{ ... }': ()
+ 59..72 'Self { x: 1 }': S<u32>
+ 69..70 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn type_alias_in_struct_lit() {
+ check_infer(
+ r#"
+ //- /main.rs
+ struct S<T> { x: T }
+
+ type SS = S<u32>;
+
+ fn foo() {
+ SS { x: 1 };
+ }
+ "#,
+ expect![[r#"
+ 50..70 '{ ...1 }; }': ()
+ 56..67 'SS { x: 1 }': S<u32>
+ 64..65 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_ranges() {
+ check_types(
+ r#"
+//- minicore: range
+fn test() {
+ let a = ..;
+ let b = 1..;
+ let c = ..2u32;
+ let d = 1..2usize;
+ let e = ..=10;
+ let f = 'a'..='z';
+
+ let t = (a, b, c, d, e, f);
+ t;
+} //^ (RangeFull, RangeFrom<i32>, RangeTo<u32>, Range<usize>, RangeToInclusive<i32>, RangeInclusive<char>)
+"#,
+ );
+}
+
+#[test]
+fn infer_while_let() {
+ check_types(
+ r#"
+enum Option<T> { Some(T), None }
+
+fn test() {
+ let foo: Option<f32> = None;
+ while let Option::Some(x) = foo {
+ x;
+ } //^ f32
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_basics() {
+ check_infer(
+ r#"
+fn test(a: u32, b: isize, c: !, d: &str) {
+ a;
+ b;
+ c;
+ d;
+ 1usize;
+ 1isize;
+ "test";
+ 1.0f32;
+}
+"#,
+ expect![[r#"
+ 8..9 'a': u32
+ 16..17 'b': isize
+ 26..27 'c': !
+ 32..33 'd': &str
+ 41..120 '{ ...f32; }': ()
+ 47..48 'a': u32
+ 54..55 'b': isize
+ 61..62 'c': !
+ 68..69 'd': &str
+ 75..81 '1usize': usize
+ 87..93 '1isize': isize
+ 99..105 '"test"': &str
+ 111..117 '1.0f32': f32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_let() {
+ check_infer(
+ r#"
+fn test() {
+ let a = 1isize;
+ let b: usize = 1;
+ let c = b;
+ let d: u32;
+ let e;
+ let f: i32 = e;
+}
+"#,
+ expect![[r#"
+ 10..117 '{ ...= e; }': ()
+ 20..21 'a': isize
+ 24..30 '1isize': isize
+ 40..41 'b': usize
+ 51..52 '1': usize
+ 62..63 'c': usize
+ 66..67 'b': usize
+ 77..78 'd': u32
+ 93..94 'e': i32
+ 104..105 'f': i32
+ 113..114 'e': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_paths() {
+ check_infer(
+ r#"
+fn a() -> u32 { 1 }
+
+mod b {
+ fn c() -> u32 { 1 }
+}
+
+fn test() {
+ a();
+ b::c();
+}
+"#,
+ expect![[r#"
+ 14..19 '{ 1 }': u32
+ 16..17 '1': u32
+ 47..52 '{ 1 }': u32
+ 49..50 '1': u32
+ 66..90 '{ ...c(); }': ()
+ 72..73 'a': fn a() -> u32
+ 72..75 'a()': u32
+ 81..85 'b::c': fn c() -> u32
+ 81..87 'b::c()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_path_type() {
+ check_infer(
+ r#"
+struct S;
+
+impl S {
+ fn foo() -> i32 { 1 }
+}
+
+fn test() {
+ S::foo();
+ <S>::foo();
+}
+"#,
+ expect![[r#"
+ 40..45 '{ 1 }': i32
+ 42..43 '1': i32
+ 59..92 '{ ...o(); }': ()
+ 65..71 'S::foo': fn foo() -> i32
+ 65..73 'S::foo()': i32
+ 79..87 '<S>::foo': fn foo() -> i32
+ 79..89 '<S>::foo()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_struct() {
+ check_infer(
+ r#"
+struct A {
+ b: B,
+ c: C,
+}
+struct B;
+struct C(usize);
+
+fn test() {
+ let c = C(1);
+ B;
+ let a: A = A { b: B, c: C(1) };
+ a.b;
+ a.c;
+}
+"#,
+ expect![[r#"
+ 71..153 '{ ...a.c; }': ()
+ 81..82 'c': C
+ 85..86 'C': C(usize) -> C
+ 85..89 'C(1)': C
+ 87..88 '1': usize
+ 95..96 'B': B
+ 106..107 'a': A
+ 113..132 'A { b:...C(1) }': A
+ 120..121 'B': B
+ 126..127 'C': C(usize) -> C
+ 126..130 'C(1)': C
+ 128..129 '1': usize
+ 138..139 'a': A
+ 138..141 'a.b': B
+ 147..148 'a': A
+ 147..150 'a.c': C
+ "#]],
+ );
+}
+
+#[test]
+fn infer_enum() {
+ check_infer(
+ r#"
+enum E {
+ V1 { field: u32 },
+ V2
+}
+fn test() {
+ E::V1 { field: 1 };
+ E::V2;
+}
+"#,
+ expect![[r#"
+ 51..89 '{ ...:V2; }': ()
+ 57..75 'E::V1 ...d: 1 }': E
+ 72..73 '1': u32
+ 81..86 'E::V2': E
+ "#]],
+ );
+}
+
+#[test]
+fn infer_union() {
+ check_infer(
+ r#"
+union MyUnion {
+ foo: u32,
+ bar: f32,
+}
+
+fn test() {
+ let u = MyUnion { foo: 0 };
+ unsafe { baz(u); }
+ let u = MyUnion { bar: 0.0 };
+ unsafe { baz(u); }
+}
+
+unsafe fn baz(u: MyUnion) {
+ let inner = u.foo;
+ let inner = u.bar;
+}
+"#,
+ expect![[r#"
+ 57..172 '{ ...); } }': ()
+ 67..68 'u': MyUnion
+ 71..89 'MyUnio...o: 0 }': MyUnion
+ 86..87 '0': u32
+ 95..113 'unsafe...(u); }': ()
+ 95..113 'unsafe...(u); }': ()
+ 104..107 'baz': fn baz(MyUnion)
+ 104..110 'baz(u)': ()
+ 108..109 'u': MyUnion
+ 122..123 'u': MyUnion
+ 126..146 'MyUnio... 0.0 }': MyUnion
+ 141..144 '0.0': f32
+ 152..170 'unsafe...(u); }': ()
+ 152..170 'unsafe...(u); }': ()
+ 161..164 'baz': fn baz(MyUnion)
+ 161..167 'baz(u)': ()
+ 165..166 'u': MyUnion
+ 188..189 'u': MyUnion
+ 200..249 '{ ...bar; }': ()
+ 210..215 'inner': u32
+ 218..219 'u': MyUnion
+ 218..223 'u.foo': u32
+ 233..238 'inner': f32
+ 241..242 'u': MyUnion
+ 241..246 'u.bar': f32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_refs() {
+ check_infer(
+ r#"
+fn test(a: &u32, b: &mut u32, c: *const u32, d: *mut u32) {
+ a;
+ *a;
+ &a;
+ &mut a;
+ b;
+ *b;
+ &b;
+ c;
+ *c;
+ d;
+ *d;
+}
+ "#,
+ expect![[r#"
+ 8..9 'a': &u32
+ 17..18 'b': &mut u32
+ 30..31 'c': *const u32
+ 45..46 'd': *mut u32
+ 58..149 '{ ... *d; }': ()
+ 64..65 'a': &u32
+ 71..73 '*a': u32
+ 72..73 'a': &u32
+ 79..81 '&a': &&u32
+ 80..81 'a': &u32
+ 87..93 '&mut a': &mut &u32
+ 92..93 'a': &u32
+ 99..100 'b': &mut u32
+ 106..108 '*b': u32
+ 107..108 'b': &mut u32
+ 114..116 '&b': &&mut u32
+ 115..116 'b': &mut u32
+ 122..123 'c': *const u32
+ 129..131 '*c': u32
+ 130..131 'c': *const u32
+ 137..138 'd': *mut u32
+ 144..146 '*d': u32
+ 145..146 'd': *mut u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_raw_ref() {
+ check_infer(
+ r#"
+fn test(a: i32) {
+ &raw mut a;
+ &raw const a;
+}
+"#,
+ expect![[r#"
+ 8..9 'a': i32
+ 16..53 '{ ...t a; }': ()
+ 22..32 '&raw mut a': *mut i32
+ 31..32 'a': i32
+ 38..50 '&raw const a': *const i32
+ 49..50 'a': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_literals() {
+ check_infer(
+ r##"
+ fn test() {
+ 5i32;
+ 5f32;
+ 5f64;
+ "hello";
+ b"bytes";
+ 'c';
+ b'b';
+ 3.14;
+ 5000;
+ false;
+ true;
+ r#"
+ //! doc
+ // non-doc
+ mod foo {}
+ "#;
+ br#"yolo"#;
+ let a = b"a\x20b\
+ c";
+ let b = br"g\
+h";
+ let c = br#"x"\"yb"#;
+ }
+ "##,
+ expect![[r##"
+ 18..478 '{ ... }': ()
+ 32..36 '5i32': i32
+ 50..54 '5f32': f32
+ 68..72 '5f64': f64
+ 86..93 '"hello"': &str
+ 107..115 'b"bytes"': &[u8; 5]
+ 129..132 ''c'': char
+ 146..150 'b'b'': u8
+ 164..168 '3.14': f64
+ 182..186 '5000': i32
+ 200..205 'false': bool
+ 219..223 'true': bool
+ 237..333 'r#" ... "#': &str
+ 347..357 'br#"yolo"#': &[u8; 4]
+ 375..376 'a': &[u8; 4]
+ 379..403 'b"a\x2... c"': &[u8; 4]
+ 421..422 'b': &[u8; 4]
+ 425..433 'br"g\ h"': &[u8; 4]
+ 451..452 'c': &[u8; 6]
+ 455..467 'br#"x"\"yb"#': &[u8; 6]
+ "##]],
+ );
+}
+
+#[test]
+fn infer_unary_op() {
+ check_infer(
+ r#"
+enum SomeType {}
+
+fn test(x: SomeType) {
+ let b = false;
+ let c = !b;
+ let a = 100;
+ let d: i128 = -a;
+ let e = -100;
+ let f = !!!true;
+ let g = !42;
+ let h = !10u32;
+ let j = !a;
+ -3.14;
+ !3;
+ -x;
+ !x;
+ -"hello";
+ !"hello";
+}
+"#,
+ expect![[r#"
+ 26..27 'x': SomeType
+ 39..271 '{ ...lo"; }': ()
+ 49..50 'b': bool
+ 53..58 'false': bool
+ 68..69 'c': bool
+ 72..74 '!b': bool
+ 73..74 'b': bool
+ 84..85 'a': i128
+ 88..91 '100': i128
+ 101..102 'd': i128
+ 111..113 '-a': i128
+ 112..113 'a': i128
+ 123..124 'e': i32
+ 127..131 '-100': i32
+ 128..131 '100': i32
+ 141..142 'f': bool
+ 145..152 '!!!true': bool
+ 146..152 '!!true': bool
+ 147..152 '!true': bool
+ 148..152 'true': bool
+ 162..163 'g': i32
+ 166..169 '!42': i32
+ 167..169 '42': i32
+ 179..180 'h': u32
+ 183..189 '!10u32': u32
+ 184..189 '10u32': u32
+ 199..200 'j': i128
+ 203..205 '!a': i128
+ 204..205 'a': i128
+ 211..216 '-3.14': f64
+ 212..216 '3.14': f64
+ 222..224 '!3': i32
+ 223..224 '3': i32
+ 230..232 '-x': {unknown}
+ 231..232 'x': SomeType
+ 238..240 '!x': {unknown}
+ 239..240 'x': SomeType
+ 246..254 '-"hello"': {unknown}
+ 247..254 '"hello"': &str
+ 260..268 '!"hello"': {unknown}
+ 261..268 '"hello"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_backwards() {
+ check_infer(
+ r#"
+fn takes_u32(x: u32) {}
+
+struct S { i32_field: i32 }
+
+fn test() -> &mut &f64 {
+ let a = unknown_function();
+ takes_u32(a);
+ let b = unknown_function();
+ S { i32_field: b };
+ let c = unknown_function();
+ &mut &c
+}
+"#,
+ expect![[r#"
+ 13..14 'x': u32
+ 21..23 '{}': ()
+ 77..230 '{ ...t &c }': &mut &f64
+ 87..88 'a': u32
+ 91..107 'unknow...nction': {unknown}
+ 91..109 'unknow...tion()': u32
+ 115..124 'takes_u32': fn takes_u32(u32)
+ 115..127 'takes_u32(a)': ()
+ 125..126 'a': u32
+ 137..138 'b': i32
+ 141..157 'unknow...nction': {unknown}
+ 141..159 'unknow...tion()': i32
+ 165..183 'S { i3...d: b }': S
+ 180..181 'b': i32
+ 193..194 'c': f64
+ 197..213 'unknow...nction': {unknown}
+ 197..215 'unknow...tion()': f64
+ 221..228 '&mut &c': &mut &f64
+ 226..228 '&c': &f64
+ 227..228 'c': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_self() {
+ check_infer(
+ r#"
+struct S;
+
+impl S {
+ fn test(&self) {
+ self;
+ }
+ fn test2(self: &Self) {
+ self;
+ }
+ fn test3() -> Self {
+ S {}
+ }
+ fn test4() -> Self {
+ Self {}
+ }
+}
+"#,
+ expect![[r#"
+ 33..37 'self': &S
+ 39..60 '{ ... }': ()
+ 49..53 'self': &S
+ 74..78 'self': &S
+ 87..108 '{ ... }': ()
+ 97..101 'self': &S
+ 132..152 '{ ... }': S
+ 142..146 'S {}': S
+ 176..199 '{ ... }': S
+ 186..193 'Self {}': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_self_as_path() {
+ check_infer(
+ r#"
+struct S1;
+struct S2(isize);
+enum E {
+ V1,
+ V2(u32),
+}
+
+impl S1 {
+ fn test() {
+ Self;
+ }
+}
+impl S2 {
+ fn test() {
+ Self(1);
+ }
+}
+impl E {
+ fn test() {
+ Self::V1;
+ Self::V2(1);
+ }
+}
+"#,
+ expect![[r#"
+ 86..107 '{ ... }': ()
+ 96..100 'Self': S1
+ 134..158 '{ ... }': ()
+ 144..148 'Self': S2(isize) -> S2
+ 144..151 'Self(1)': S2
+ 149..150 '1': isize
+ 184..230 '{ ... }': ()
+ 194..202 'Self::V1': E
+ 212..220 'Self::V2': V2(u32) -> E
+ 212..223 'Self::V2(1)': E
+ 221..222 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_binary_op() {
+ check_infer(
+ r#"
+fn f(x: bool) -> i32 {
+ 0i32
+}
+
+fn test() -> bool {
+ let x = a && b;
+ let y = true || false;
+ let z = x == y;
+ let t = x != y;
+ let minus_forty: isize = -40isize;
+ let h = minus_forty <= CONST_2;
+ let c = f(z || y) + 5;
+ let d = b;
+ let g = minus_forty ^= i;
+ let ten: usize = 10;
+ let ten_is_eleven = ten == some_num;
+
+ ten < 3
+}
+"#,
+ expect![[r#"
+ 5..6 'x': bool
+ 21..33 '{ 0i32 }': i32
+ 27..31 '0i32': i32
+ 53..369 '{ ... < 3 }': bool
+ 63..64 'x': bool
+ 67..68 'a': bool
+ 67..73 'a && b': bool
+ 72..73 'b': bool
+ 83..84 'y': bool
+ 87..91 'true': bool
+ 87..100 'true || false': bool
+ 95..100 'false': bool
+ 110..111 'z': bool
+ 114..115 'x': bool
+ 114..120 'x == y': bool
+ 119..120 'y': bool
+ 130..131 't': bool
+ 134..135 'x': bool
+ 134..140 'x != y': bool
+ 139..140 'y': bool
+ 150..161 'minus_forty': isize
+ 171..179 '-40isize': isize
+ 172..179 '40isize': isize
+ 189..190 'h': bool
+ 193..204 'minus_forty': isize
+ 193..215 'minus_...ONST_2': bool
+ 208..215 'CONST_2': isize
+ 225..226 'c': i32
+ 229..230 'f': fn f(bool) -> i32
+ 229..238 'f(z || y)': i32
+ 229..242 'f(z || y) + 5': i32
+ 231..232 'z': bool
+ 231..237 'z || y': bool
+ 236..237 'y': bool
+ 241..242 '5': i32
+ 252..253 'd': {unknown}
+ 256..257 'b': {unknown}
+ 267..268 'g': ()
+ 271..282 'minus_forty': isize
+ 271..287 'minus_...y ^= i': ()
+ 286..287 'i': isize
+ 297..300 'ten': usize
+ 310..312 '10': usize
+ 322..335 'ten_is_eleven': bool
+ 338..341 'ten': usize
+ 338..353 'ten == some_num': bool
+ 345..353 'some_num': usize
+ 360..363 'ten': usize
+ 360..367 'ten < 3': bool
+ 366..367 '3': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_shift_op() {
+ check_infer(
+ r#"
+fn test() {
+ 1u32 << 5u8;
+ 1u32 >> 5u8;
+}
+"#,
+ expect![[r#"
+ 10..47 '{ ...5u8; }': ()
+ 16..20 '1u32': u32
+ 16..27 '1u32 << 5u8': u32
+ 24..27 '5u8': u8
+ 33..37 '1u32': u32
+ 33..44 '1u32 >> 5u8': u32
+ 41..44 '5u8': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_field_autoderef() {
+ check_infer(
+ r#"
+struct A {
+ b: B,
+}
+struct B;
+
+fn test1(a: A) {
+ let a1 = a;
+ a1.b;
+ let a2 = &a;
+ a2.b;
+ let a3 = &mut a;
+ a3.b;
+ let a4 = &&&&&&&a;
+ a4.b;
+ let a5 = &mut &&mut &&mut a;
+ a5.b;
+}
+
+fn test2(a1: *const A, a2: *mut A) {
+ a1.b;
+ a2.b;
+}
+"#,
+ expect![[r#"
+ 43..44 'a': A
+ 49..212 '{ ...5.b; }': ()
+ 59..61 'a1': A
+ 64..65 'a': A
+ 71..73 'a1': A
+ 71..75 'a1.b': B
+ 85..87 'a2': &A
+ 90..92 '&a': &A
+ 91..92 'a': A
+ 98..100 'a2': &A
+ 98..102 'a2.b': B
+ 112..114 'a3': &mut A
+ 117..123 '&mut a': &mut A
+ 122..123 'a': A
+ 129..131 'a3': &mut A
+ 129..133 'a3.b': B
+ 143..145 'a4': &&&&&&&A
+ 148..156 '&&&&&&&a': &&&&&&&A
+ 149..156 '&&&&&&a': &&&&&&A
+ 150..156 '&&&&&a': &&&&&A
+ 151..156 '&&&&a': &&&&A
+ 152..156 '&&&a': &&&A
+ 153..156 '&&a': &&A
+ 154..156 '&a': &A
+ 155..156 'a': A
+ 162..164 'a4': &&&&&&&A
+ 162..166 'a4.b': B
+ 176..178 'a5': &mut &&mut &&mut A
+ 181..199 '&mut &...&mut a': &mut &&mut &&mut A
+ 186..199 '&&mut &&mut a': &&mut &&mut A
+ 187..199 '&mut &&mut a': &mut &&mut A
+ 192..199 '&&mut a': &&mut A
+ 193..199 '&mut a': &mut A
+ 198..199 'a': A
+ 205..207 'a5': &mut &&mut &&mut A
+ 205..209 'a5.b': B
+ 223..225 'a1': *const A
+ 237..239 'a2': *mut A
+ 249..272 '{ ...2.b; }': ()
+ 255..257 'a1': *const A
+ 255..259 'a1.b': B
+ 265..267 'a2': *mut A
+ 265..269 'a2.b': B
+ "#]],
+ );
+}
+
+#[test]
+fn infer_argument_autoderef() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+struct A<T>(T);
+
+impl<T> A<T> {
+ fn foo(&self) -> &T {
+ &self.0
+ }
+}
+
+struct B<T>(T);
+
+impl<T> Deref for B<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn test() {
+ let t = A::foo(&&B(B(A(42))));
+}
+"#,
+ expect![[r#"
+ 66..70 'self': &A<T>
+ 78..101 '{ ... }': &T
+ 88..95 '&self.0': &T
+ 89..93 'self': &A<T>
+ 89..95 'self.0': T
+ 182..186 'self': &B<T>
+ 205..228 '{ ... }': &T
+ 215..222 '&self.0': &T
+ 216..220 'self': &B<T>
+ 216..222 'self.0': T
+ 242..280 '{ ...))); }': ()
+ 252..253 't': &i32
+ 256..262 'A::foo': fn foo<i32>(&A<i32>) -> &i32
+ 256..277 'A::foo...42))))': &i32
+ 263..276 '&&B(B(A(42)))': &&B<B<A<i32>>>
+ 264..276 '&B(B(A(42)))': &B<B<A<i32>>>
+ 265..266 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
+ 265..276 'B(B(A(42)))': B<B<A<i32>>>
+ 267..268 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
+ 267..275 'B(A(42))': B<A<i32>>
+ 269..270 'A': A<i32>(i32) -> A<i32>
+ 269..274 'A(42)': A<i32>
+ 271..273 '42': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_method_argument_autoderef() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+struct A<T>(*mut T);
+
+impl<T> A<T> {
+ fn foo(&self, x: &A<T>) -> &T {
+ &*x.0
+ }
+}
+
+struct B<T>(T);
+
+impl<T> Deref for B<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn test(a: A<i32>) {
+ let t = A(0 as *mut _).foo(&&B(B(a)));
+}
+"#,
+ expect![[r#"
+ 71..75 'self': &A<T>
+ 77..78 'x': &A<T>
+ 93..114 '{ ... }': &T
+ 103..108 '&*x.0': &T
+ 104..108 '*x.0': T
+ 105..106 'x': &A<T>
+ 105..108 'x.0': *mut T
+ 195..199 'self': &B<T>
+ 218..241 '{ ... }': &T
+ 228..235 '&self.0': &T
+ 229..233 'self': &B<T>
+ 229..235 'self.0': T
+ 253..254 'a': A<i32>
+ 264..310 '{ ...))); }': ()
+ 274..275 't': &i32
+ 278..279 'A': A<i32>(*mut i32) -> A<i32>
+ 278..292 'A(0 as *mut _)': A<i32>
+ 278..307 'A(0 as...B(a)))': &i32
+ 280..281 '0': i32
+ 280..291 '0 as *mut _': *mut i32
+ 297..306 '&&B(B(a))': &&B<B<A<i32>>>
+ 298..306 '&B(B(a))': &B<B<A<i32>>>
+ 299..300 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
+ 299..306 'B(B(a))': B<B<A<i32>>>
+ 301..302 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
+ 301..305 'B(a)': B<A<i32>>
+ 303..304 'a': A<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_in_elseif() {
+ check_infer(
+ r#"
+struct Foo { field: i32 }
+fn main(foo: Foo) {
+ if true {
+
+ } else if false {
+ foo.field
+ }
+}
+"#,
+ expect![[r#"
+ 34..37 'foo': Foo
+ 44..108 '{ ... } }': ()
+ 50..106 'if tru... }': ()
+ 53..57 'true': bool
+ 58..66 '{ }': ()
+ 72..106 'if fal... }': ()
+ 75..80 'false': bool
+ 81..106 '{ ... }': ()
+ 91..94 'foo': Foo
+ 91..100 'foo.field': i32
+ "#]],
+ )
+}
+
+#[test]
+fn infer_if_match_with_return() {
+ check_infer(
+ r#"
+fn foo() {
+ let _x1 = if true {
+ 1
+ } else {
+ return;
+ };
+ let _x2 = if true {
+ 2
+ } else {
+ return
+ };
+ let _x3 = match true {
+ true => 3,
+ _ => {
+ return;
+ }
+ };
+ let _x4 = match true {
+ true => 4,
+ _ => return
+ };
+}
+"#,
+ expect![[r#"
+ 9..322 '{ ... }; }': ()
+ 19..22 '_x1': i32
+ 25..79 'if tru... }': i32
+ 28..32 'true': bool
+ 33..50 '{ ... }': i32
+ 43..44 '1': i32
+ 56..79 '{ ... }': i32
+ 66..72 'return': !
+ 89..92 '_x2': i32
+ 95..148 'if tru... }': i32
+ 98..102 'true': bool
+ 103..120 '{ ... }': i32
+ 113..114 '2': i32
+ 126..148 '{ ... }': !
+ 136..142 'return': !
+ 158..161 '_x3': i32
+ 164..246 'match ... }': i32
+ 170..174 'true': bool
+ 185..189 'true': bool
+ 185..189 'true': bool
+ 193..194 '3': i32
+ 204..205 '_': bool
+ 209..240 '{ ... }': i32
+ 223..229 'return': !
+ 256..259 '_x4': i32
+ 262..319 'match ... }': i32
+ 268..272 'true': bool
+ 283..287 'true': bool
+ 283..287 'true': bool
+ 291..292 '4': i32
+ 302..303 '_': bool
+ 307..313 'return': !
+ "#]],
+ )
+}
+
+#[test]
+fn infer_inherent_method() {
+ check_infer(
+ r#"
+ struct A;
+
+ impl A {
+ fn foo(self, x: u32) -> i32 {}
+ }
+
+ mod b {
+ impl super::A {
+ pub fn bar(&self, x: u64) -> i64 {}
+ }
+ }
+
+ fn test(a: A) {
+ a.foo(1);
+ (&a).bar(1);
+ a.bar(1);
+ }
+ "#,
+ expect![[r#"
+ 31..35 'self': A
+ 37..38 'x': u32
+ 52..54 '{}': i32
+ 106..110 'self': &A
+ 112..113 'x': u64
+ 127..129 '{}': i64
+ 147..148 'a': A
+ 153..201 '{ ...(1); }': ()
+ 159..160 'a': A
+ 159..167 'a.foo(1)': i32
+ 165..166 '1': u32
+ 173..184 '(&a).bar(1)': i64
+ 174..176 '&a': &A
+ 175..176 'a': A
+ 182..183 '1': u64
+ 190..191 'a': A
+ 190..198 'a.bar(1)': i64
+ 196..197 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_inherent_method_str() {
+ check_infer(
+ r#"
+ #[lang = "str"]
+ impl str {
+ fn foo(&self) -> i32 {}
+ }
+
+ fn test() {
+ "foo".foo();
+ }
+ "#,
+ expect![[r#"
+ 39..43 'self': &str
+ 52..54 '{}': i32
+ 68..88 '{ ...o(); }': ()
+ 74..79 '"foo"': &str
+ 74..85 '"foo".foo()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_tuple() {
+ check_infer(
+ r#"
+ fn test(x: &str, y: isize) {
+ let a: (u32, &str) = (1, "a");
+ let b = (a, x);
+ let c = (y, x);
+ let d = (c, x);
+ let e = (1, "e");
+ let f = (e, "d");
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &str
+ 17..18 'y': isize
+ 27..169 '{ ...d"); }': ()
+ 37..38 'a': (u32, &str)
+ 54..62 '(1, "a")': (u32, &str)
+ 55..56 '1': u32
+ 58..61 '"a"': &str
+ 72..73 'b': ((u32, &str), &str)
+ 76..82 '(a, x)': ((u32, &str), &str)
+ 77..78 'a': (u32, &str)
+ 80..81 'x': &str
+ 92..93 'c': (isize, &str)
+ 96..102 '(y, x)': (isize, &str)
+ 97..98 'y': isize
+ 100..101 'x': &str
+ 112..113 'd': ((isize, &str), &str)
+ 116..122 '(c, x)': ((isize, &str), &str)
+ 117..118 'c': (isize, &str)
+ 120..121 'x': &str
+ 132..133 'e': (i32, &str)
+ 136..144 '(1, "e")': (i32, &str)
+ 137..138 '1': i32
+ 140..143 '"e"': &str
+ 154..155 'f': ((i32, &str), &str)
+ 158..166 '(e, "d")': ((i32, &str), &str)
+ 159..160 'e': (i32, &str)
+ 162..165 '"d"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_array() {
+ check_infer(
+ r#"
+ fn test(x: &str, y: isize) {
+ let a = [x];
+ let b = [a, a];
+ let c = [b, b];
+
+ let d = [y, 1, 2, 3];
+ let d = [1, y, 2, 3];
+ let e = [y];
+ let f = [d, d];
+ let g = [e, e];
+
+ let h = [1, 2];
+ let i = ["a", "b"];
+
+ let b = [a, ["b"]];
+ let x: [u8; 0] = [];
+ let y: [u8; 2+2] = [1,2,3,4];
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &str
+ 17..18 'y': isize
+ 27..326 '{ ...,4]; }': ()
+ 37..38 'a': [&str; 1]
+ 41..44 '[x]': [&str; 1]
+ 42..43 'x': &str
+ 54..55 'b': [[&str; 1]; 2]
+ 58..64 '[a, a]': [[&str; 1]; 2]
+ 59..60 'a': [&str; 1]
+ 62..63 'a': [&str; 1]
+ 74..75 'c': [[[&str; 1]; 2]; 2]
+ 78..84 '[b, b]': [[[&str; 1]; 2]; 2]
+ 79..80 'b': [[&str; 1]; 2]
+ 82..83 'b': [[&str; 1]; 2]
+ 95..96 'd': [isize; 4]
+ 99..111 '[y, 1, 2, 3]': [isize; 4]
+ 100..101 'y': isize
+ 103..104 '1': isize
+ 106..107 '2': isize
+ 109..110 '3': isize
+ 121..122 'd': [isize; 4]
+ 125..137 '[1, y, 2, 3]': [isize; 4]
+ 126..127 '1': isize
+ 129..130 'y': isize
+ 132..133 '2': isize
+ 135..136 '3': isize
+ 147..148 'e': [isize; 1]
+ 151..154 '[y]': [isize; 1]
+ 152..153 'y': isize
+ 164..165 'f': [[isize; 4]; 2]
+ 168..174 '[d, d]': [[isize; 4]; 2]
+ 169..170 'd': [isize; 4]
+ 172..173 'd': [isize; 4]
+ 184..185 'g': [[isize; 1]; 2]
+ 188..194 '[e, e]': [[isize; 1]; 2]
+ 189..190 'e': [isize; 1]
+ 192..193 'e': [isize; 1]
+ 205..206 'h': [i32; 2]
+ 209..215 '[1, 2]': [i32; 2]
+ 210..211 '1': i32
+ 213..214 '2': i32
+ 225..226 'i': [&str; 2]
+ 229..239 '["a", "b"]': [&str; 2]
+ 230..233 '"a"': &str
+ 235..238 '"b"': &str
+ 250..251 'b': [[&str; 1]; 2]
+ 254..264 '[a, ["b"]]': [[&str; 1]; 2]
+ 255..256 'a': [&str; 1]
+ 258..263 '["b"]': [&str; 1]
+ 259..262 '"b"': &str
+ 274..275 'x': [u8; 0]
+ 287..289 '[]': [u8; 0]
+ 299..300 'y': [u8; 4]
+ 314..323 '[1,2,3,4]': [u8; 4]
+ 315..316 '1': u8
+ 317..318 '2': u8
+ 319..320 '3': u8
+ 321..322 '4': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_struct_generics() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+
+ fn test(a1: A<u32>, i: i32) {
+ a1.x;
+ let a2 = A { x: i };
+ a2.x;
+ let a3 = A::<i128> { x: 1 };
+ a3.x;
+ }
+ "#,
+ expect![[r#"
+ 35..37 'a1': A<u32>
+ 47..48 'i': i32
+ 55..146 '{ ...3.x; }': ()
+ 61..63 'a1': A<u32>
+ 61..65 'a1.x': u32
+ 75..77 'a2': A<i32>
+ 80..90 'A { x: i }': A<i32>
+ 87..88 'i': i32
+ 96..98 'a2': A<i32>
+ 96..100 'a2.x': i32
+ 110..112 'a3': A<i128>
+ 115..133 'A::<i1...x: 1 }': A<i128>
+ 130..131 '1': i128
+ 139..141 'a3': A<i128>
+ 139..143 'a3.x': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_tuple_struct_generics() {
+ check_infer(
+ r#"
+ struct A<T>(T);
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ A(42);
+ A(42u128);
+ Some("x");
+ Option::Some("x");
+ None;
+ let x: Option<i64> = None;
+ }
+ "#,
+ expect![[r#"
+ 75..183 '{ ...one; }': ()
+ 81..82 'A': A<i32>(i32) -> A<i32>
+ 81..86 'A(42)': A<i32>
+ 83..85 '42': i32
+ 92..93 'A': A<u128>(u128) -> A<u128>
+ 92..101 'A(42u128)': A<u128>
+ 94..100 '42u128': u128
+ 107..111 'Some': Some<&str>(&str) -> Option<&str>
+ 107..116 'Some("x")': Option<&str>
+ 112..115 '"x"': &str
+ 122..134 'Option::Some': Some<&str>(&str) -> Option<&str>
+ 122..139 'Option...e("x")': Option<&str>
+ 135..138 '"x"': &str
+ 145..149 'None': Option<{unknown}>
+ 159..160 'x': Option<i64>
+ 176..180 'None': Option<i64>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_function_generics() {
+ check_infer(
+ r#"
+ fn id<T>(t: T) -> T { t }
+
+ fn test() {
+ id(1u32);
+ id::<i128>(1);
+ let x: u64 = id(1);
+ }
+ "#,
+ expect![[r#"
+ 9..10 't': T
+ 20..25 '{ t }': T
+ 22..23 't': T
+ 37..97 '{ ...(1); }': ()
+ 43..45 'id': fn id<u32>(u32) -> u32
+ 43..51 'id(1u32)': u32
+ 46..50 '1u32': u32
+ 57..67 'id::<i128>': fn id<i128>(i128) -> i128
+ 57..70 'id::<i128>(1)': i128
+ 68..69 '1': i128
+ 80..81 'x': u64
+ 89..91 'id': fn id<u64>(u64) -> u64
+ 89..94 'id(1)': u64
+ 92..93 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_impl_generics_basic() {
+ check_infer(
+ r#"
+ struct A<T1, T2> {
+ x: T1,
+ y: T2,
+ }
+ impl<Y, X> A<X, Y> {
+ fn x(self) -> X {
+ self.x
+ }
+ fn y(self) -> Y {
+ self.y
+ }
+ fn z<T>(self, t: T) -> (X, Y, T) {
+ (self.x, self.y, t)
+ }
+ }
+
+ fn test() -> i128 {
+ let a = A { x: 1u64, y: 1i64 };
+ a.x();
+ a.y();
+ a.z(1i128);
+ a.z::<u128>(1);
+ }
+ "#,
+ expect![[r#"
+ 73..77 'self': A<X, Y>
+ 84..106 '{ ... }': X
+ 94..98 'self': A<X, Y>
+ 94..100 'self.x': X
+ 116..120 'self': A<X, Y>
+ 127..149 '{ ... }': Y
+ 137..141 'self': A<X, Y>
+ 137..143 'self.y': Y
+ 162..166 'self': A<X, Y>
+ 168..169 't': T
+ 187..222 '{ ... }': (X, Y, T)
+ 197..216 '(self.....y, t)': (X, Y, T)
+ 198..202 'self': A<X, Y>
+ 198..204 'self.x': X
+ 206..210 'self': A<X, Y>
+ 206..212 'self.y': Y
+ 214..215 't': T
+ 244..341 '{ ...(1); }': i128
+ 254..255 'a': A<u64, i64>
+ 258..280 'A { x:...1i64 }': A<u64, i64>
+ 265..269 '1u64': u64
+ 274..278 '1i64': i64
+ 286..287 'a': A<u64, i64>
+ 286..291 'a.x()': u64
+ 297..298 'a': A<u64, i64>
+ 297..302 'a.y()': i64
+ 308..309 'a': A<u64, i64>
+ 308..318 'a.z(1i128)': (u64, i64, i128)
+ 312..317 '1i128': i128
+ 324..325 'a': A<u64, i64>
+ 324..338 'a.z::<u128>(1)': (u64, i64, u128)
+ 336..337 '1': u128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_impl_generics_with_autoderef() {
+ check_infer(
+ r#"
+ enum Option<T> {
+ Some(T),
+ None,
+ }
+ impl<T> Option<T> {
+ fn as_ref(&self) -> Option<&T> {}
+ }
+ fn test(o: Option<u32>) {
+ (&o).as_ref();
+ o.as_ref();
+ }
+ "#,
+ expect![[r#"
+ 77..81 'self': &Option<T>
+ 97..99 '{}': Option<&T>
+ 110..111 'o': Option<u32>
+ 126..164 '{ ...f(); }': ()
+ 132..145 '(&o).as_ref()': Option<&u32>
+ 133..135 '&o': &Option<u32>
+ 134..135 'o': Option<u32>
+ 151..152 'o': Option<u32>
+ 151..161 'o.as_ref()': Option<&u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_generic_chain() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+ impl<T2> A<T2> {
+ fn x(self) -> T2 {
+ self.x
+ }
+ }
+ fn id<T>(t: T) -> T { t }
+
+ fn test() -> i128 {
+ let x = 1;
+ let y = id(x);
+ let a = A { x: id(y) };
+ let z = id(a.x);
+ let b = A { x: z };
+ b.x()
+ }
+ "#,
+ expect![[r#"
+ 52..56 'self': A<T2>
+ 64..86 '{ ... }': T2
+ 74..78 'self': A<T2>
+ 74..80 'self.x': T2
+ 98..99 't': T
+ 109..114 '{ t }': T
+ 111..112 't': T
+ 134..254 '{ ....x() }': i128
+ 144..145 'x': i128
+ 148..149 '1': i128
+ 159..160 'y': i128
+ 163..165 'id': fn id<i128>(i128) -> i128
+ 163..168 'id(x)': i128
+ 166..167 'x': i128
+ 178..179 'a': A<i128>
+ 182..196 'A { x: id(y) }': A<i128>
+ 189..191 'id': fn id<i128>(i128) -> i128
+ 189..194 'id(y)': i128
+ 192..193 'y': i128
+ 206..207 'z': i128
+ 210..212 'id': fn id<i128>(i128) -> i128
+ 210..217 'id(a.x)': i128
+ 213..214 'a': A<i128>
+ 213..216 'a.x': i128
+ 227..228 'b': A<i128>
+ 231..241 'A { x: z }': A<i128>
+ 238..239 'z': i128
+ 247..248 'b': A<i128>
+ 247..252 'b.x()': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_const() {
+ check_infer(
+ r#"
+ struct Struct;
+
+ impl Struct {
+ const FOO: u32 = 1;
+ }
+
+ enum Enum {}
+
+ impl Enum {
+ const BAR: u32 = 2;
+ }
+
+ trait Trait {
+ const ID: u32;
+ }
+
+ struct TraitTest;
+
+ impl Trait for TraitTest {
+ const ID: u32 = 5;
+ }
+
+ fn test() {
+ let x = Struct::FOO;
+ let y = Enum::BAR;
+ let z = TraitTest::ID;
+ }
+ "#,
+ expect![[r#"
+ 51..52 '1': u32
+ 104..105 '2': u32
+ 212..213 '5': u32
+ 228..306 '{ ...:ID; }': ()
+ 238..239 'x': u32
+ 242..253 'Struct::FOO': u32
+ 263..264 'y': u32
+ 267..276 'Enum::BAR': u32
+ 286..287 'z': u32
+ 290..303 'TraitTest::ID': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_type_alias() {
+ check_infer(
+ r#"
+ struct A<X, Y> { x: X, y: Y }
+ type Foo = A<u32, i128>;
+ type Bar<T> = A<T, u128>;
+ type Baz<U, V> = A<V, U>;
+ fn test(x: Foo, y: Bar<&str>, z: Baz<i8, u8>) {
+ x.x;
+ x.y;
+ y.x;
+ y.y;
+ z.x;
+ z.y;
+ }
+ mod m {
+ pub enum Enum {
+ Foo(u8),
+ }
+ pub type Alias = Enum;
+ }
+ fn f() {
+ let e = m::Alias::Foo(0);
+ let m::Alias::Foo(x) = &e;
+ }
+ "#,
+ expect![[r#"
+ 115..116 'x': A<u32, i128>
+ 123..124 'y': A<&str, u128>
+ 137..138 'z': A<u8, i8>
+ 153..210 '{ ...z.y; }': ()
+ 159..160 'x': A<u32, i128>
+ 159..162 'x.x': u32
+ 168..169 'x': A<u32, i128>
+ 168..171 'x.y': i128
+ 177..178 'y': A<&str, u128>
+ 177..180 'y.x': &str
+ 186..187 'y': A<&str, u128>
+ 186..189 'y.y': u128
+ 195..196 'z': A<u8, i8>
+ 195..198 'z.x': u8
+ 204..205 'z': A<u8, i8>
+ 204..207 'z.y': i8
+ 298..362 '{ ... &e; }': ()
+ 308..309 'e': Enum
+ 312..325 'm::Alias::Foo': Foo(u8) -> Enum
+ 312..328 'm::Ali...Foo(0)': Enum
+ 326..327 '0': u8
+ 338..354 'm::Ali...Foo(x)': Enum
+ 352..353 'x': &u8
+ 357..359 '&e': &Enum
+ 358..359 'e': Enum
+ "#]],
+ )
+}
+
+#[test]
+fn recursive_type_alias() {
+ check_infer(
+ r#"
+ struct A<X> {}
+ type Foo = Foo;
+ type Bar = A<Bar>;
+ fn test(x: Foo) {}
+ "#,
+ expect![[r#"
+ 58..59 'x': {unknown}
+ 66..68 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_type_param() {
+ check_infer(
+ r#"
+ fn id<T>(x: T) -> T {
+ x
+ }
+
+ fn clone<T>(x: &T) -> T {
+ *x
+ }
+
+ fn test() {
+ let y = 10u32;
+ id(y);
+ let x: bool = clone(z);
+ id::<i128>(1);
+ }
+ "#,
+ expect![[r#"
+ 9..10 'x': T
+ 20..29 '{ x }': T
+ 26..27 'x': T
+ 43..44 'x': &T
+ 55..65 '{ *x }': T
+ 61..63 '*x': T
+ 62..63 'x': &T
+ 77..157 '{ ...(1); }': ()
+ 87..88 'y': u32
+ 91..96 '10u32': u32
+ 102..104 'id': fn id<u32>(u32) -> u32
+ 102..107 'id(y)': u32
+ 105..106 'y': u32
+ 117..118 'x': bool
+ 127..132 'clone': fn clone<bool>(&bool) -> bool
+ 127..135 'clone(z)': bool
+ 133..134 'z': &bool
+ 141..151 'id::<i128>': fn id<i128>(i128) -> i128
+ 141..154 'id::<i128>(1)': i128
+ 152..153 '1': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_const() {
+ check_infer(
+ r#"
+ struct Foo;
+ impl Foo { const ASSOC_CONST: u32 = 0; }
+ const GLOBAL_CONST: u32 = 101;
+ fn test() {
+ const LOCAL_CONST: u32 = 99;
+ let x = LOCAL_CONST;
+ let z = GLOBAL_CONST;
+ let id = Foo::ASSOC_CONST;
+ }
+ "#,
+ expect![[r#"
+ 48..49 '0': u32
+ 79..82 '101': u32
+ 94..212 '{ ...NST; }': ()
+ 137..138 'x': u32
+ 141..152 'LOCAL_CONST': u32
+ 162..163 'z': u32
+ 166..178 'GLOBAL_CONST': u32
+ 188..190 'id': u32
+ 193..209 'Foo::A..._CONST': u32
+ 125..127 '99': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_static() {
+ check_infer(
+ r#"
+ static GLOBAL_STATIC: u32 = 101;
+ static mut GLOBAL_STATIC_MUT: u32 = 101;
+ fn test() {
+ static LOCAL_STATIC: u32 = 99;
+ static mut LOCAL_STATIC_MUT: u32 = 99;
+ let x = LOCAL_STATIC;
+ let y = LOCAL_STATIC_MUT;
+ let z = GLOBAL_STATIC;
+ let w = GLOBAL_STATIC_MUT;
+ }
+ "#,
+ expect![[r#"
+ 28..31 '101': u32
+ 69..72 '101': u32
+ 84..279 '{ ...MUT; }': ()
+ 172..173 'x': u32
+ 176..188 'LOCAL_STATIC': u32
+ 198..199 'y': u32
+ 202..218 'LOCAL_...IC_MUT': u32
+ 228..229 'z': u32
+ 232..245 'GLOBAL_STATIC': u32
+ 255..256 'w': u32
+ 259..276 'GLOBAL...IC_MUT': u32
+ 117..119 '99': u32
+ 160..162 '99': u32
+ "#]],
+ );
+}
+
+#[test]
+fn shadowing_primitive() {
+ check_types(
+ r#"
+struct i32;
+struct Foo;
+
+impl i32 { fn foo(&self) -> Foo { Foo } }
+
+fn main() {
+ let x: i32 = i32;
+ x.foo();
+ //^^^^^^^ Foo
+}"#,
+ );
+}
+
+#[test]
+fn const_eval_array_repeat_expr() {
+ check_types(
+ r#"
+fn main() {
+ const X: usize = 6 - 1;
+ let t = [(); X + 2];
+ //^ [(); 7]
+}"#,
+ );
+}
+
+#[test]
+fn shadowing_primitive_with_inner_items() {
+ check_types(
+ r#"
+struct i32;
+struct Foo;
+
+impl i32 { fn foo(&self) -> Foo { Foo } }
+
+fn main() {
+ fn inner() {}
+ let x: i32 = i32;
+ x.foo();
+ //^^^^^^^ Foo
+}"#,
+ );
+}
+
+#[test]
+fn not_shadowing_primitive_by_module() {
+ check_types(
+ r#"
+//- /str.rs
+fn foo() {}
+
+//- /main.rs
+mod str;
+fn foo() -> &'static str { "" }
+
+fn main() {
+ foo();
+ //^^^^^ &str
+}"#,
+ );
+}
+
+#[test]
+fn not_shadowing_module_by_primitive() {
+ check_types(
+ r#"
+//- /str.rs
+fn foo() -> u32 {0}
+
+//- /main.rs
+mod str;
+fn foo() -> &'static str { "" }
+
+fn main() {
+ str::foo();
+ //^^^^^^^^^^ u32
+}"#,
+ );
+}
+
+// This test is actually testing the shadowing behavior within hir_def. It
+// lives here because the testing infrastructure in hir_def isn't currently
+// capable of asserting the necessary conditions.
+#[test]
+fn should_be_shadowing_imports() {
+ check_types(
+ r#"
+mod a {
+ pub fn foo() -> i8 {0}
+ pub struct foo { a: i8 }
+}
+mod b { pub fn foo () -> u8 {0} }
+mod c { pub struct foo { a: u8 } }
+mod d {
+ pub use super::a::*;
+ pub use super::c::foo;
+ pub use super::b::foo;
+}
+
+fn main() {
+ d::foo();
+ //^^^^^^^^ u8
+ d::foo{a:0};
+ //^^^^^^^^^^^ foo
+}"#,
+ );
+}
+
+#[test]
+fn closure_return() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || -> usize { return 1; };
+ }
+ "#,
+ expect![[r#"
+ 16..58 '{ ...; }; }': u32
+ 26..27 'x': || -> usize
+ 30..55 '|| -> ...n 1; }': || -> usize
+ 42..55 '{ return 1; }': usize
+ 44..52 'return 1': !
+ 51..52 '1': usize
+ "#]],
+ );
+}
+
+#[test]
+fn closure_return_unit() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || { return; };
+ }
+ "#,
+ expect![[r#"
+ 16..47 '{ ...; }; }': u32
+ 26..27 'x': || -> ()
+ 30..44 '|| { return; }': || -> ()
+ 33..44 '{ return; }': ()
+ 35..41 'return': !
+ "#]],
+ );
+}
+
+#[test]
+fn closure_return_inferred() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || { "test" };
+ }
+ "#,
+ expect![[r#"
+ 16..46 '{ ..." }; }': u32
+ 26..27 'x': || -> &str
+ 30..43 '|| { "test" }': || -> &str
+ 33..43 '{ "test" }': &str
+ 35..41 '"test"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn fn_pointer_return() {
+ check_infer(
+ r#"
+ struct Vtable {
+ method: fn(),
+ }
+
+ fn main() {
+ let vtable = Vtable { method: || {} };
+ let m = vtable.method;
+ }
+ "#,
+ expect![[r#"
+ 47..120 '{ ...hod; }': ()
+ 57..63 'vtable': Vtable
+ 66..90 'Vtable...| {} }': Vtable
+ 83..88 '|| {}': || -> ()
+ 86..88 '{}': ()
+ 100..101 'm': fn()
+ 104..110 'vtable': Vtable
+ 104..117 'vtable.method': fn()
+ "#]],
+ );
+}
+
+#[test]
+fn block_modifiers_smoke_test() {
+ check_infer(
+ r#"
+//- minicore: future
+async fn main() {
+ let x = unsafe { 92 };
+ let y = async { async { () }.await };
+ let z = try { () };
+ let w = const { 92 };
+ let t = 'a: { 92 };
+}
+ "#,
+ expect![[r#"
+ 16..162 '{ ...2 }; }': ()
+ 26..27 'x': i32
+ 30..43 'unsafe { 92 }': i32
+ 30..43 'unsafe { 92 }': i32
+ 39..41 '92': i32
+ 53..54 'y': impl Future<Output = ()>
+ 57..85 'async ...wait }': ()
+ 57..85 'async ...wait }': impl Future<Output = ()>
+ 65..77 'async { () }': ()
+ 65..77 'async { () }': impl Future<Output = ()>
+ 65..83 'async ....await': ()
+ 73..75 '()': ()
+ 95..96 'z': {unknown}
+ 99..109 'try { () }': ()
+ 99..109 'try { () }': {unknown}
+ 105..107 '()': ()
+ 119..120 'w': i32
+ 123..135 'const { 92 }': i32
+ 123..135 'const { 92 }': i32
+ 131..133 '92': i32
+ 145..146 't': i32
+ 149..159 ''a: { 92 }': i32
+ 155..157 '92': i32
+ "#]],
+ )
+}
+#[test]
+fn async_block_early_return() {
+ check_infer(
+ r#"
+//- minicore: future, result, fn
+fn test<I, E, F: FnMut() -> Fut, Fut: core::future::Future<Output = Result<I, E>>>(f: F) {}
+
+fn main() {
+ async {
+ return Err(());
+ Ok(())
+ };
+ test(|| async {
+ return Err(());
+ Ok(())
+ });
+}
+ "#,
+ expect![[r#"
+ 83..84 'f': F
+ 89..91 '{}': ()
+ 103..231 '{ ... }); }': ()
+ 109..161 'async ... }': Result<(), ()>
+ 109..161 'async ... }': impl Future<Output = Result<(), ()>>
+ 125..139 'return Err(())': !
+ 132..135 'Err': Err<(), ()>(()) -> Result<(), ()>
+ 132..139 'Err(())': Result<(), ()>
+ 136..138 '()': ()
+ 149..151 'Ok': Ok<(), ()>(()) -> Result<(), ()>
+ 149..155 'Ok(())': Result<(), ()>
+ 152..154 '()': ()
+ 167..171 'test': fn test<(), (), || -> impl Future<Output = Result<(), ()>>, impl Future<Output = Result<(), ()>>>(|| -> impl Future<Output = Result<(), ()>>)
+ 167..228 'test(|... })': ()
+ 172..227 '|| asy... }': || -> impl Future<Output = Result<(), ()>>
+ 175..227 'async ... }': Result<(), ()>
+ 175..227 'async ... }': impl Future<Output = Result<(), ()>>
+ 191..205 'return Err(())': !
+ 198..201 'Err': Err<(), ()>(()) -> Result<(), ()>
+ 198..205 'Err(())': Result<(), ()>
+ 202..204 '()': ()
+ 215..217 'Ok': Ok<(), ()>(()) -> Result<(), ()>
+ 215..221 'Ok(())': Result<(), ()>
+ 218..220 '()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_generic_from_later_assignment() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let mut end = None;
+ loop {
+ end = Some(true);
+ }
+ }
+ "#,
+ expect![[r#"
+ 59..129 '{ ... } }': ()
+ 69..76 'mut end': Option<bool>
+ 79..83 'None': Option<bool>
+ 89..127 'loop {... }': !
+ 94..127 '{ ... }': ()
+ 104..107 'end': Option<bool>
+ 104..120 'end = ...(true)': ()
+ 110..114 'Some': Some<bool>(bool) -> Option<bool>
+ 110..120 'Some(true)': Option<bool>
+ 115..119 'true': bool
+ "#]],
+ );
+}
+
+#[test]
+fn infer_loop_break_with_val() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let x = loop {
+ if false {
+ break None;
+ }
+
+ break Some(true);
+ };
+ }
+ "#,
+ expect![[r#"
+ 59..168 '{ ... }; }': ()
+ 69..70 'x': Option<bool>
+ 73..165 'loop {... }': Option<bool>
+ 78..165 '{ ... }': ()
+ 88..132 'if fal... }': ()
+ 91..96 'false': bool
+ 97..132 '{ ... }': ()
+ 111..121 'break None': !
+ 117..121 'None': Option<bool>
+ 142..158 'break ...(true)': !
+ 148..152 'Some': Some<bool>(bool) -> Option<bool>
+ 148..158 'Some(true)': Option<bool>
+ 153..157 'true': bool
+ "#]],
+ );
+}
+
+#[test]
+fn infer_loop_break_without_val() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let x = loop {
+ if false {
+ break;
+ }
+ };
+ }
+ "#,
+ expect![[r#"
+ 59..136 '{ ... }; }': ()
+ 69..70 'x': ()
+ 73..133 'loop {... }': ()
+ 78..133 '{ ... }': ()
+ 88..127 'if fal... }': ()
+ 91..96 'false': bool
+ 97..127 '{ ... }': ()
+ 111..116 'break': !
+ "#]],
+ );
+}
+
+#[test]
+fn infer_labelled_break_with_val() {
+ check_infer(
+ r#"
+ fn foo() {
+ let _x = || 'outer: loop {
+ let inner = 'inner: loop {
+ let i = Default::default();
+ if (break 'outer i) {
+ loop { break 'inner 5i8; };
+ } else if true {
+ break 'inner 6;
+ }
+ break 7;
+ };
+ break inner < 8;
+ };
+ }
+ "#,
+ expect![[r#"
+ 9..335 '{ ... }; }': ()
+ 19..21 '_x': || -> bool
+ 24..332 '|| 'ou... }': || -> bool
+ 27..332 ''outer... }': bool
+ 40..332 '{ ... }': ()
+ 54..59 'inner': i8
+ 62..300 ''inner... }': i8
+ 75..300 '{ ... }': ()
+ 93..94 'i': bool
+ 97..113 'Defaul...efault': {unknown}
+ 97..115 'Defaul...ault()': bool
+ 129..269 'if (br... }': ()
+ 133..147 'break 'outer i': !
+ 146..147 'i': bool
+ 149..208 '{ ... }': ()
+ 167..193 'loop {...5i8; }': !
+ 172..193 '{ brea...5i8; }': ()
+ 174..190 'break ...er 5i8': !
+ 187..190 '5i8': i8
+ 214..269 'if tru... }': ()
+ 217..221 'true': bool
+ 222..269 '{ ... }': ()
+ 240..254 'break 'inner 6': !
+ 253..254 '6': i8
+ 282..289 'break 7': !
+ 288..289 '7': i8
+ 310..325 'break inner < 8': !
+ 316..321 'inner': i8
+ 316..325 'inner < 8': bool
+ 324..325 '8': i8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_labelled_block_break_with_val() {
+ check_infer(
+ r#"
+fn default<T>() -> T { loop {} }
+fn foo() {
+ let _x = 'outer: {
+ let inner = 'inner: {
+ let i = default();
+ if (break 'outer i) {
+ break 'inner 5i8;
+ } else if true {
+ break 'inner 6;
+ }
+ break 'inner 'innermost: { 0 };
+ 42
+ };
+ break 'outer inner < 8;
+ };
+}
+"#,
+ expect![[r#"
+ 21..32 '{ loop {} }': T
+ 23..30 'loop {}': !
+ 28..30 '{}': ()
+ 42..381 '{ ... }; }': ()
+ 52..54 '_x': bool
+ 57..378 ''outer... }': bool
+ 79..84 'inner': i8
+ 87..339 ''inner... }': i8
+ 113..114 'i': bool
+ 117..124 'default': fn default<bool>() -> bool
+ 117..126 'default()': bool
+ 140..270 'if (br... }': ()
+ 144..158 'break 'outer i': !
+ 157..158 'i': bool
+ 160..209 '{ ... }': ()
+ 178..194 'break ...er 5i8': !
+ 191..194 '5i8': i8
+ 215..270 'if tru... }': ()
+ 218..222 'true': bool
+ 223..270 '{ ... }': ()
+ 241..255 'break 'inner 6': !
+ 254..255 '6': i8
+ 283..313 'break ... { 0 }': !
+ 296..313 ''inner... { 0 }': i8
+ 310..311 '0': i8
+ 327..329 '42': i8
+ 349..371 'break ...er < 8': !
+ 362..367 'inner': i8
+ 362..371 'inner < 8': bool
+ 370..371 '8': i8
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default() {
+ check_infer(
+ r#"
+ struct Thing<T = ()> { t: T }
+ enum OtherThing<T = ()> {
+ One { t: T },
+ Two(T),
+ }
+
+ fn test(t1: Thing, t2: OtherThing, t3: Thing<i32>, t4: OtherThing<i32>) {
+ t1.t;
+ t3.t;
+ match t2 {
+ OtherThing::One { t } => { t; },
+ OtherThing::Two(t) => { t; },
+ }
+ match t4 {
+ OtherThing::One { t } => { t; },
+ OtherThing::Two(t) => { t; },
+ }
+ }
+ "#,
+ expect![[r#"
+ 97..99 't1': Thing<()>
+ 108..110 't2': OtherThing<()>
+ 124..126 't3': Thing<i32>
+ 140..142 't4': OtherThing<i32>
+ 161..384 '{ ... } }': ()
+ 167..169 't1': Thing<()>
+ 167..171 't1.t': ()
+ 177..179 't3': Thing<i32>
+ 177..181 't3.t': i32
+ 187..282 'match ... }': ()
+ 193..195 't2': OtherThing<()>
+ 206..227 'OtherT... { t }': OtherThing<()>
+ 224..225 't': ()
+ 231..237 '{ t; }': ()
+ 233..234 't': ()
+ 247..265 'OtherT...Two(t)': OtherThing<()>
+ 263..264 't': ()
+ 269..275 '{ t; }': ()
+ 271..272 't': ()
+ 287..382 'match ... }': ()
+ 293..295 't4': OtherThing<i32>
+ 306..327 'OtherT... { t }': OtherThing<i32>
+ 324..325 't': i32
+ 331..337 '{ t; }': ()
+ 333..334 't': i32
+ 347..365 'OtherT...Two(t)': OtherThing<i32>
+ 363..364 't': i32
+ 369..375 '{ t; }': ()
+ 371..372 't': i32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_in_struct_literal() {
+ check_infer(
+ r#"
+ struct Thing<T = ()> { t: T }
+ enum OtherThing<T = ()> {
+ One { t: T },
+ Two(T),
+ }
+
+ fn test() {
+ let x = Thing { t: loop {} };
+ let y = Thing { t: () };
+ let z = Thing { t: 1i32 };
+ if let Thing { t } = z {
+ t;
+ }
+
+ let a = OtherThing::One { t: 1i32 };
+ let b = OtherThing::Two(1i32);
+ }
+ "#,
+ expect![[r#"
+ 99..319 '{ ...32); }': ()
+ 109..110 'x': Thing<!>
+ 113..133 'Thing ...p {} }': Thing<!>
+ 124..131 'loop {}': !
+ 129..131 '{}': ()
+ 143..144 'y': Thing<()>
+ 147..162 'Thing { t: () }': Thing<()>
+ 158..160 '()': ()
+ 172..173 'z': Thing<i32>
+ 176..193 'Thing ...1i32 }': Thing<i32>
+ 187..191 '1i32': i32
+ 199..240 'if let... }': ()
+ 202..221 'let Th... } = z': bool
+ 206..217 'Thing { t }': Thing<i32>
+ 214..215 't': i32
+ 220..221 'z': Thing<i32>
+ 222..240 '{ ... }': ()
+ 232..233 't': i32
+ 250..251 'a': OtherThing<i32>
+ 254..281 'OtherT...1i32 }': OtherThing<i32>
+ 275..279 '1i32': i32
+ 291..292 'b': OtherThing<i32>
+ 295..310 'OtherThing::Two': Two<i32>(i32) -> OtherThing<i32>
+ 295..316 'OtherT...(1i32)': OtherThing<i32>
+ 311..315 '1i32': i32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_depending_on_other_type_arg() {
+ // FIXME: the {unknown} is a bug
+ check_infer(
+ r#"
+ struct Thing<T = u128, F = fn() -> T> { t: T }
+
+ fn test(t1: Thing<u32>, t2: Thing) {
+ t1;
+ t2;
+ Thing::<_> { t: 1u32 };
+ }
+ "#,
+ expect![[r#"
+ 56..58 't1': Thing<u32, fn() -> u32>
+ 72..74 't2': Thing<u128, fn() -> u128>
+ 83..130 '{ ...2 }; }': ()
+ 89..91 't1': Thing<u32, fn() -> u32>
+ 97..99 't2': Thing<u128, fn() -> u128>
+ 105..127 'Thing:...1u32 }': Thing<u32, fn() -> {unknown}>
+ 121..125 '1u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_depending_on_other_type_arg_forward() {
+ // the {unknown} here is intentional, as defaults are not allowed to
+ // refer to type parameters coming later
+ check_infer(
+ r#"
+ struct Thing<F = fn() -> T, T = u128> { t: T }
+
+ fn test(t1: Thing) {
+ t1;
+ }
+ "#,
+ expect![[r#"
+ 56..58 't1': Thing<fn() -> {unknown}, u128>
+ 67..78 '{ t1; }': ()
+ 73..75 't1': Thing<fn() -> {unknown}, u128>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_operator_overload() {
+ check_types(
+ r#"
+//- minicore: add
+struct V2([f32; 2]);
+
+impl core::ops::Add<V2> for V2 {
+ type Output = V2;
+}
+
+fn test() {
+ let va = V2([0.0, 1.0]);
+ let vb = V2([0.0, 1.0]);
+
+ let r = va + vb;
+ // ^^^^^^^ V2
+}
+
+ "#,
+ );
+}
+
+#[test]
+fn infer_const_params() {
+ check_infer(
+ r#"
+ fn foo<const FOO: usize>() {
+ let bar = FOO;
+ }
+ "#,
+ expect![[r#"
+ 27..49 '{ ...FOO; }': ()
+ 37..40 'bar': usize
+ 43..46 'FOO': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_inner_type() {
+ check_infer(
+ r#"
+ fn foo() {
+ struct S { field: u32 }
+ let s = S { field: 0 };
+ let f = s.field;
+ }
+ "#,
+ expect![[r#"
+ 9..89 '{ ...eld; }': ()
+ 47..48 's': S
+ 51..65 'S { field: 0 }': S
+ 62..63 '0': u32
+ 75..76 'f': u32
+ 79..80 's': S
+ 79..86 's.field': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_nested_inner_type() {
+ check_infer(
+ r#"
+ fn foo() {
+ {
+ let s = S { field: 0 };
+ let f = s.field;
+ }
+ struct S { field: u32 }
+ }
+ "#,
+ expect![[r#"
+ 9..109 '{ ...32 } }': ()
+ 15..79 '{ ... }': ()
+ 29..30 's': S
+ 33..47 'S { field: 0 }': S
+ 44..45 '0': u32
+ 61..62 'f': u32
+ 65..66 's': S
+ 65..72 's.field': u32
+ "#]],
+ );
+}
+
+#[test]
+fn inner_use_enum_rename() {
+ check_infer(
+ r#"
+ enum Request {
+ Info
+ }
+
+ fn f() {
+ use Request as R;
+
+ let r = R::Info;
+ match r {
+ R::Info => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 34..123 '{ ... } }': ()
+ 67..68 'r': Request
+ 71..78 'R::Info': Request
+ 84..121 'match ... }': ()
+ 90..91 'r': Request
+ 102..109 'R::Info': Request
+ 113..115 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn box_into_vec() {
+ check_infer(
+ r#"
+#[lang = "sized"]
+pub trait Sized {}
+
+#[lang = "unsize"]
+pub trait Unsize<T: ?Sized> {}
+
+#[lang = "coerce_unsized"]
+pub trait CoerceUnsized<T> {}
+
+pub unsafe trait Allocator {}
+
+pub struct Global;
+unsafe impl Allocator for Global {}
+
+#[lang = "owned_box"]
+#[fundamental]
+pub struct Box<T: ?Sized, A: Allocator = Global>;
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
+
+pub struct Vec<T, A: Allocator = Global> {}
+
+#[lang = "slice"]
+impl<T> [T] {}
+
+#[lang = "slice_alloc"]
+impl<T> [T] {
+ pub fn into_vec<A: Allocator>(self: Box<Self, A>) -> Vec<T, A> {
+ unimplemented!()
+ }
+}
+
+fn test() {
+ let vec = <[_]>::into_vec(box [1i32]);
+ let v: Vec<Box<dyn B>> = <[_]> :: into_vec(box [box Astruct]);
+}
+
+trait B{}
+struct Astruct;
+impl B for Astruct {}
+"#,
+ expect![[r#"
+ 569..573 'self': Box<[T], A>
+ 602..634 '{ ... }': Vec<T, A>
+ 612..628 'unimpl...ted!()': Vec<T, A>
+ 648..761 '{ ...t]); }': ()
+ 658..661 'vec': Vec<i32, Global>
+ 664..679 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
+ 664..691 '<[_]>:...1i32])': Vec<i32, Global>
+ 680..690 'box [1i32]': Box<[i32; 1], Global>
+ 684..690 '[1i32]': [i32; 1]
+ 685..689 '1i32': i32
+ 701..702 'v': Vec<Box<dyn B, Global>, Global>
+ 722..739 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global>
+ 722..758 '<[_]> ...ruct])': Vec<Box<dyn B, Global>, Global>
+ 740..757 'box [b...truct]': Box<[Box<dyn B, Global>; 1], Global>
+ 744..757 '[box Astruct]': [Box<dyn B, Global>; 1]
+ 745..756 'box Astruct': Box<Astruct, Global>
+ 749..756 'Astruct': Astruct
+ "#]],
+ )
+}
+
+#[test]
+fn cfgd_out_assoc_items() {
+ check_types(
+ r#"
+struct S;
+
+impl S {
+ #[cfg(FALSE)]
+ const C: S = S;
+}
+
+fn f() {
+ S::C;
+ //^^^^ {unknown}
+}
+ "#,
+ )
+}
+
+#[test]
+fn infer_missing_type() {
+ check_types(
+ r#"
+struct S;
+
+fn f() {
+ let s: = S;
+ //^ S
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_type_alias_variant() {
+ check_infer(
+ r#"
+type Qux = Foo;
+enum Foo {
+ Bar(i32),
+ Baz { baz: f32 }
+}
+
+fn f() {
+ match Foo::Bar(3) {
+ Qux::Bar(bar) => (),
+ Qux::Baz { baz } => (),
+ }
+}
+ "#,
+ expect![[r#"
+ 72..166 '{ ... } }': ()
+ 78..164 'match ... }': ()
+ 84..92 'Foo::Bar': Bar(i32) -> Foo
+ 84..95 'Foo::Bar(3)': Foo
+ 93..94 '3': i32
+ 106..119 'Qux::Bar(bar)': Foo
+ 115..118 'bar': i32
+ 123..125 '()': ()
+ 135..151 'Qux::B... baz }': Foo
+ 146..149 'baz': f32
+ 155..157 '()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_boxed_self_receiver() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct Box<T>(T);
+
+impl<T> Deref for Box<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target;
+}
+
+struct Foo<T>(T);
+
+impl<T> Foo<T> {
+ fn get_inner<'a>(self: &'a Box<Self>) -> &'a T {}
+
+ fn get_self<'a>(self: &'a Box<Self>) -> &'a Self {}
+
+ fn into_inner(self: Box<Self>) -> Self {}
+}
+
+fn main() {
+ let boxed = Box(Foo(0_i32));
+
+ let bad1 = boxed.get_inner();
+ let good1 = Foo::get_inner(&boxed);
+
+ let bad2 = boxed.get_self();
+ let good2 = Foo::get_self(&boxed);
+
+ let inner = boxed.into_inner();
+}
+ "#,
+ expect![[r#"
+ 104..108 'self': &Box<T>
+ 188..192 'self': &Box<Foo<T>>
+ 218..220 '{}': &T
+ 242..246 'self': &Box<Foo<T>>
+ 275..277 '{}': &Foo<T>
+ 297..301 'self': Box<Foo<T>>
+ 322..324 '{}': Foo<T>
+ 338..559 '{ ...r(); }': ()
+ 348..353 'boxed': Box<Foo<i32>>
+ 356..359 'Box': Box<Foo<i32>>(Foo<i32>) -> Box<Foo<i32>>
+ 356..371 'Box(Foo(0_i32))': Box<Foo<i32>>
+ 360..363 'Foo': Foo<i32>(i32) -> Foo<i32>
+ 360..370 'Foo(0_i32)': Foo<i32>
+ 364..369 '0_i32': i32
+ 382..386 'bad1': &i32
+ 389..394 'boxed': Box<Foo<i32>>
+ 389..406 'boxed....nner()': &i32
+ 416..421 'good1': &i32
+ 424..438 'Foo::get_inner': fn get_inner<i32>(&Box<Foo<i32>>) -> &i32
+ 424..446 'Foo::g...boxed)': &i32
+ 439..445 '&boxed': &Box<Foo<i32>>
+ 440..445 'boxed': Box<Foo<i32>>
+ 457..461 'bad2': &Foo<i32>
+ 464..469 'boxed': Box<Foo<i32>>
+ 464..480 'boxed....self()': &Foo<i32>
+ 490..495 'good2': &Foo<i32>
+ 498..511 'Foo::get_self': fn get_self<i32>(&Box<Foo<i32>>) -> &Foo<i32>
+ 498..519 'Foo::g...boxed)': &Foo<i32>
+ 512..518 '&boxed': &Box<Foo<i32>>
+ 513..518 'boxed': Box<Foo<i32>>
+ 530..535 'inner': Foo<i32>
+ 538..543 'boxed': Box<Foo<i32>>
+ 538..556 'boxed....nner()': Foo<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn prelude_2015() {
+ check_types(
+ r#"
+//- /main.rs edition:2015 crate:main deps:core
+fn f() {
+ Rust;
+ //^^^^ Rust
+}
+
+//- /core.rs crate:core
+pub mod prelude {
+ pub mod rust_2015 {
+ pub struct Rust;
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn legacy_const_generics() {
+ check_no_mismatches(
+ r#"
+#[rustc_legacy_const_generics(1, 3)]
+fn mixed<const N1: &'static str, const N2: bool>(
+ a: u8,
+ b: i8,
+) {}
+
+fn f() {
+ mixed(0, "", -1, true);
+ mixed::<"", true>(0, -1);
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_slice() {
+ check_types(
+ r#"
+fn main() {
+ let a;
+ //^usize
+ [a,] = [0usize];
+
+ let a;
+ //^usize
+ [a, ..] = [0usize; 5];
+
+ let a;
+ //^usize
+ [.., a] = [0usize; 5];
+
+ let a;
+ //^usize
+ [.., a, _] = [0usize; 5];
+
+ let a;
+ //^usize
+ [_, a, ..] = [0usize; 5];
+
+ let a: &mut i64 = &mut 0;
+ [*a, ..] = [1, 2, 3];
+
+ let a: usize;
+ let b;
+ //^usize
+ [a, _, b] = [3, 4, 5];
+ //^usize
+
+ let a;
+ //^i64
+ let b;
+ //^i64
+ [[a, ..], .., [.., b]] = [[1, 2], [3i64, 4], [5, 6], [7, 8]];
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_tuple() {
+ check_types(
+ r#"
+fn main() {
+ let a;
+ //^char
+ let b;
+ //^i64
+ (a, b) = ('c', 0i64);
+
+ let a;
+ //^char
+ (a, ..) = ('c', 0i64);
+
+ let a;
+ //^i64
+ (.., a) = ('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^i64
+ (a, .., b) = ('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^bool
+ (a, .., b) = ('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^bool
+ (_, a, .., b) = ('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^usize
+ (_, a, .., b) = ('c', 0i64, true, 0usize);
+
+ let mut a = 1;
+ //^^^^^i64
+ let mut b: i64 = 0;
+ (a, b) = (b, a);
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_tuple_struct() {
+ check_types(
+ r#"
+struct S2(char, i64);
+struct S3(char, i64, bool);
+struct S4(char, i64, bool usize);
+fn main() {
+ let a;
+ //^char
+ let b;
+ //^i64
+ S2(a, b) = S2('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^i64
+ S2(a, .., b) = S2('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^bool
+ S3(a, .., b) = S3('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^bool
+ S3(_, a, .., b) = S3('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^usize
+ S4(_, a, .., b) = S4('c', 0i64, true, 0usize);
+
+ struct Swap(i64, i64);
+
+ let mut a = 1;
+ //^^^^^i64
+ let mut b = 0;
+ //^^^^^i64
+ Swap(a, b) = Swap(b, a);
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_struct() {
+ check_types(
+ r#"
+struct S {
+ a: usize,
+ b: char,
+}
+struct T {
+ s: S,
+ t: i64,
+}
+
+fn main() {
+ let a;
+ //^usize
+ let c;
+ //^char
+ S { a, b: c } = S { a: 3, b: 'b' };
+
+ let a;
+ //^char
+ S { b: a, .. } = S { a: 3, b: 'b' };
+
+ let a;
+ //^char
+ S { b: a, _ } = S { a: 3, b: 'b' };
+
+ let a;
+ //^usize
+ let c;
+ //^char
+ let t;
+ //^i64
+ T { s: S { a, b: c }, t } = T { s: S { a: 3, b: 'b' }, t: 0 };
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_nested() {
+ check_types(
+ r#"
+struct S {
+ a: TS,
+ b: [char; 3],
+}
+struct TS(usize, i64);
+
+fn main() {
+ let a;
+ //^i32
+ let b;
+ //^bool
+ ([.., a], .., b, _) = ([0, 1, 2], true, 'c');
+
+ let a;
+ //^i32
+ let b;
+ //^i32
+ [(.., a, _), .., (b, ..)] = [(1, 2); 5];
+
+ let a;
+ //^usize
+ let b;
+ //^char
+ S { a: TS(a, ..), b: [_, b, ..] } = S { a: TS(0, 0), b: ['a'; 3] };
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_unit_struct() {
+ // taken from rustc; see https://github.com/rust-lang/rust/pull/95380
+ check_no_mismatches(
+ r#"
+struct S;
+enum E { V, }
+type A = E;
+
+fn main() {
+ let mut a;
+
+ (S, a) = (S, ());
+
+ (E::V, a) = (E::V, ());
+
+ (<E>::V, a) = (E::V, ());
+ (A::V, a) = (E::V, ());
+}
+
+impl S {
+ fn check() {
+ let a;
+ (Self, a) = (S, ());
+ }
+}
+
+impl E {
+ fn check() {
+ let a;
+ (Self::V, a) = (E::V, ());
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_no_default_binding_mode() {
+ check(
+ r#"
+struct S { a: usize }
+struct TS(usize);
+fn main() {
+ let x;
+ [x,] = &[1,];
+ //^^^^expected &[i32; 1], got [{unknown}; _]
+
+ // FIXME we only want the outermost error, but this matches the current
+ // behavior of slice patterns
+ let x;
+ [(x,),] = &[(1,),];
+ // ^^^^expected {unknown}, got ({unknown},)
+ //^^^^^^^expected &[(i32,); 1], got [{unknown}; _]
+
+ let x;
+ ((x,),) = &((1,),);
+ //^^^^^^^expected &((i32,),), got (({unknown},),)
+
+ let x;
+ (x,) = &(1,);
+ //^^^^expected &(i32,), got ({unknown},)
+
+ let x;
+ (S { a: x },) = &(S { a: 42 },);
+ //^^^^^^^^^^^^^expected &(S,), got (S,)
+
+ let x;
+ S { a: x } = &S { a: 42 };
+ //^^^^^^^^^^expected &S, got S
+
+ let x;
+ TS(x) = &TS(42);
+ //^^^^^expected &TS, got TS
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_type_mismatch_on_identifier() {
+ check(
+ r#"
+struct S { v: i64 }
+struct TS(i64);
+fn main() {
+ let mut a: usize = 0;
+ (a,) = (0i64,);
+ //^expected i64, got usize
+
+ let mut a: usize = 0;
+ [a,] = [0i64,];
+ //^expected i64, got usize
+
+ let mut a: usize = 0;
+ S { v: a } = S { v: 0 };
+ //^expected i64, got usize
+
+ let mut a: usize = 0;
+ TS(a) = TS(0);
+ //^expected i64, got usize
+}
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
new file mode 100644
index 000000000..75802a5eb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -0,0 +1,3782 @@
+use cov_mark::check;
+use expect_test::expect;
+
+use super::{check, check_infer, check_infer_with_mismatches, check_no_mismatches, check_types};
+
+#[test]
+fn infer_await() {
+ check_types(
+ r#"
+//- minicore: future
+struct IntFuture;
+
+impl core::future::Future for IntFuture {
+ type Output = u64;
+}
+
+fn test() {
+ let r = IntFuture;
+ let v = r.await;
+ v;
+} //^ u64
+"#,
+ );
+}
+
+#[test]
+fn infer_async() {
+ check_types(
+ r#"
+//- minicore: future
+async fn foo() -> u64 { 128 }
+
+fn test() {
+ let r = foo();
+ let v = r.await;
+ v;
+} //^ u64
+"#,
+ );
+}
+
+#[test]
+fn infer_desugar_async() {
+ check_types(
+ r#"
+//- minicore: future, sized
+async fn foo() -> u64 { 128 }
+
+fn test() {
+ let r = foo();
+ r;
+} //^ impl Future<Output = u64>
+"#,
+ );
+}
+
+#[test]
+fn infer_async_block() {
+ check_types(
+ r#"
+//- minicore: future, option
+async fn test() {
+ let a = async { 42 };
+ a;
+// ^ impl Future<Output = i32>
+ let x = a.await;
+ x;
+// ^ i32
+ let b = async {}.await;
+ b;
+// ^ ()
+ let c = async {
+ let y = None;
+ y
+ // ^ Option<u64>
+ };
+ let _: Option<u64> = c.await;
+ c;
+// ^ impl Future<Output = Option<u64>>
+}
+"#,
+ );
+}
+
+#[test]
+fn auto_sized_async_block() {
+ check_no_mismatches(
+ r#"
+//- minicore: future, sized
+
+use core::future::Future;
+struct MyFut<Fut>(Fut);
+
+impl<Fut> Future for MyFut<Fut>
+where Fut: Future
+{
+ type Output = Fut::Output;
+}
+async fn reproduction() -> usize {
+ let f = async {999usize};
+ MyFut(f).await
+}
+ "#,
+ );
+ check_no_mismatches(
+ r#"
+//- minicore: future
+//#11815
+#[lang = "sized"]
+pub trait Sized {}
+
+#[lang = "unsize"]
+pub trait Unsize<T: ?Sized> {}
+
+#[lang = "coerce_unsized"]
+pub trait CoerceUnsized<T> {}
+
+pub unsafe trait Allocator {}
+
+pub struct Global;
+unsafe impl Allocator for Global {}
+
+#[lang = "owned_box"]
+#[fundamental]
+pub struct Box<T: ?Sized, A: Allocator = Global>;
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
+
+fn send() -> Box<dyn Future<Output = ()> + Send + 'static>{
+ box async move {}
+}
+
+fn not_send() -> Box<dyn Future<Output = ()> + 'static> {
+ box async move {}
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_try() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ let r: Result<i32, u64> = Result::Ok(1);
+ let v = r?;
+ v;
+} //^ i32
+
+//- /core.rs crate:core
+pub mod ops {
+ pub trait Try {
+ type Ok;
+ type Error;
+ }
+}
+
+pub mod result {
+ pub enum Result<O, E> {
+ Ok(O),
+ Err(E)
+ }
+
+ impl<O, E> crate::ops::Try for Result<O, E> {
+ type Ok = O;
+ type Error = E;
+ }
+}
+
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::{result::*, ops::*};
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_try_trait_v2() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ let r: Result<i32, u64> = Result::Ok(1);
+ let v = r?;
+ v;
+} //^ i32
+
+//- /core.rs crate:core
+mod ops {
+ mod try_trait {
+ pub trait Try: FromResidual {
+ type Output;
+ type Residual;
+ }
+ pub trait FromResidual<R = <Self as Try>::Residual> {}
+ }
+
+ pub use self::try_trait::FromResidual;
+ pub use self::try_trait::Try;
+}
+
+mod convert {
+ pub trait From<T> {}
+ impl<T> From<T> for T {}
+}
+
+pub mod result {
+ use crate::convert::From;
+ use crate::ops::{Try, FromResidual};
+
+ pub enum Infallible {}
+ pub enum Result<O, E> {
+ Ok(O),
+ Err(E)
+ }
+
+ impl<O, E> Try for Result<O, E> {
+ type Output = O;
+ type Error = Result<Infallible, E>;
+ }
+
+ impl<T, E, F: From<E>> FromResidual<Result<Infallible, E>> for Result<T, F> {}
+}
+
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::result::*;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_for_loop() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core,alloc
+#![no_std]
+use alloc::collections::Vec;
+
+fn test() {
+ let v = Vec::new();
+ v.push("foo");
+ for x in v {
+ x;
+ } //^ &str
+}
+
+//- /core.rs crate:core
+pub mod iter {
+ pub trait IntoIterator {
+ type Item;
+ }
+}
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::iter::*;
+ }
+}
+
+//- /alloc.rs crate:alloc deps:core
+#![no_std]
+pub mod collections {
+ pub struct Vec<T> {}
+ impl<T> Vec<T> {
+ pub fn new() -> Self { Vec {} }
+ pub fn push(&mut self, t: T) { }
+ }
+
+ impl<T> IntoIterator for Vec<T> {
+ type Item=T;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_neg() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+struct Bar;
+struct Foo;
+
+impl std::ops::Neg for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = -a;
+ b;
+} //^ Foo
+
+//- /std.rs crate:std
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "neg"]
+ pub trait Neg {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_not() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+struct Bar;
+struct Foo;
+
+impl std::ops::Not for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = !a;
+ b;
+} //^ Foo
+
+//- /std.rs crate:std
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "not"]
+ pub trait Not {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_from_bound_1() {
+ check_types(
+ r#"
+trait Trait<T> {}
+struct S<T>(T);
+impl<U> Trait<U> for S<U> {}
+fn foo<T: Trait<u32>>(t: T) {}
+fn test() {
+ let s = S(unknown);
+ // ^^^^^^^ u32
+ foo(s);
+}"#,
+ );
+}
+
+#[test]
+fn infer_from_bound_2() {
+ check_types(
+ r#"
+trait Trait<T> {}
+struct S<T>(T);
+impl<U> Trait<U> for S<U> {}
+fn foo<U, T: Trait<U>>(t: T) -> U { loop {} }
+fn test() {
+ let s = S(unknown);
+ // ^^^^^^^ u32
+ let x: u32 = foo(s);
+}"#,
+ );
+}
+
+#[test]
+fn trait_default_method_self_bound_implements_trait() {
+ cov_mark::check!(trait_self_implements_self);
+ check(
+ r#"
+trait Trait {
+ fn foo(&self) -> i64;
+ fn bar(&self) -> () {
+ self.foo();
+ // ^^^^^^^^^^ type: i64
+ }
+}"#,
+ );
+}
+
+#[test]
+fn trait_default_method_self_bound_implements_super_trait() {
+ check(
+ r#"
+trait SuperTrait {
+ fn foo(&self) -> i64;
+}
+trait Trait: SuperTrait {
+ fn bar(&self) -> () {
+ self.foo();
+ // ^^^^^^^^^^ type: i64
+ }
+}"#,
+ );
+}
+
+#[test]
+fn infer_project_associated_type() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+struct S;
+impl Iterable for S { type Item = u32; }
+fn test<T: Iterable>() {
+ let x: <S as Iterable>::Item = 1;
+ // ^ u32
+ let y: <T as Iterable>::Item = u;
+ // ^ Iterable::Item<T>
+ let z: T::Item = u;
+ // ^ Iterable::Item<T>
+ let a: <T>::Item = u;
+ // ^ Iterable::Item<T>
+}"#,
+ );
+}
+
+#[test]
+fn infer_return_associated_type() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+struct S;
+impl Iterable for S { type Item = u32; }
+fn foo1<T: Iterable>(t: T) -> T::Item { loop {} }
+fn foo2<T: Iterable>(t: T) -> <T as Iterable>::Item { loop {} }
+fn foo3<T: Iterable>(t: T) -> <T>::Item { loop {} }
+fn test() {
+ foo1(S);
+ // ^^^^^^^ u32
+ foo2(S);
+ // ^^^^^^^ u32
+ foo3(S);
+ // ^^^^^^^ u32
+}"#,
+ );
+}
+
+#[test]
+fn associated_type_shorthand_from_method_bound() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+struct S<T>;
+impl<T> S<T> {
+ fn foo(self) -> T::Item where T: Iterable { loop {} }
+}
+fn test<T: Iterable>() {
+ let s: S<T>;
+ s.foo();
+ // ^^^^^^^ Iterable::Item<T>
+}"#,
+ );
+}
+
+#[test]
+fn associated_type_shorthand_from_self_issue_12484() {
+ check_types(
+ r#"
+trait Bar {
+ type A;
+}
+trait Foo {
+ type A;
+ fn test(a: Self::A, _: impl Bar) {
+ a;
+ //^ Foo::A<Self>
+ }
+}"#,
+ );
+}
+
+#[test]
+fn infer_associated_type_bound() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+fn test<T: Iterable<Item=u32>>() {
+ let y: T::Item = unknown;
+ // ^^^^^^^ u32
+}"#,
+ );
+}
+
+#[test]
+fn infer_const_body() {
+ // FIXME make check_types work with other bodies
+ check_infer(
+ r#"
+const A: u32 = 1 + 1;
+static B: u64 = { let x = 1; x };
+"#,
+ expect![[r#"
+ 15..16 '1': u32
+ 15..20 '1 + 1': u32
+ 19..20 '1': u32
+ 38..54 '{ let ...1; x }': u64
+ 44..45 'x': u64
+ 48..49 '1': u64
+ 51..52 'x': u64
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_fields() {
+ check_infer(
+ r#"
+struct S(i32, u64);
+fn test() -> u64 {
+ let a = S(4, 6);
+ let b = a.0;
+ a.1
+}"#,
+ expect![[r#"
+ 37..86 '{ ... a.1 }': u64
+ 47..48 'a': S
+ 51..52 'S': S(i32, u64) -> S
+ 51..58 'S(4, 6)': S
+ 53..54 '4': i32
+ 56..57 '6': u64
+ 68..69 'b': i32
+ 72..73 'a': S
+ 72..75 'a.0': i32
+ 81..82 'a': S
+ 81..84 'a.1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_with_fn() {
+ check_infer(
+ r#"
+struct S(fn(u32) -> u64);
+fn test() -> u64 {
+ let a = S(|i| 2*i);
+ let b = a.0(4);
+ a.0(2)
+}"#,
+ expect![[r#"
+ 43..101 '{ ...0(2) }': u64
+ 53..54 'a': S
+ 57..58 'S': S(fn(u32) -> u64) -> S
+ 57..67 'S(|i| 2*i)': S
+ 59..66 '|i| 2*i': |u32| -> u64
+ 60..61 'i': u32
+ 63..64 '2': u32
+ 63..66 '2*i': u32
+ 65..66 'i': u32
+ 77..78 'b': u64
+ 81..82 'a': S
+ 81..84 'a.0': fn(u32) -> u64
+ 81..87 'a.0(4)': u64
+ 85..86 '4': u32
+ 93..94 'a': S
+ 93..96 'a.0': fn(u32) -> u64
+ 93..99 'a.0(2)': u64
+ 97..98 '2': u32
+ "#]],
+ );
+}
+
+#[test]
+fn indexing_arrays() {
+ check_infer(
+ "fn main() { &mut [9][2]; }",
+ expect![[r#"
+ 10..26 '{ &mut...[2]; }': ()
+ 12..23 '&mut [9][2]': &mut {unknown}
+ 17..20 '[9]': [i32; 1]
+ 17..23 '[9][2]': {unknown}
+ 18..19 '9': i32
+ 21..22 '2': i32
+ "#]],
+ )
+}
+
+#[test]
+fn infer_ops_index() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo;
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1u32];
+ b;
+} //^ Foo
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_field() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo {
+ field: u32;
+}
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1u32].field;
+ b;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_field_autoderef() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo {
+ field: u32;
+}
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = (&a[1u32]).field;
+ b;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_int() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo;
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+struct Range;
+impl core::ops::Index<Range> for Bar {
+ type Output = Bar;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1];
+ b;
+ //^ Foo
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_autoderef() {
+ check_types(
+ r#"
+//- minicore: index, slice
+fn test() {
+ let a = &[1u32, 2, 3];
+ let b = a[1];
+ b;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn deref_trait() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Arc<T: ?Sized>;
+impl<T: ?Sized> core::ops::Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+impl S {
+ fn foo(&self) -> u128 { 0 }
+}
+
+fn test(s: Arc<S>) {
+ (*s, s.foo());
+} //^^^^^^^^^^^^^ (S, u128)
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_inference_var() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Arc<T: ?Sized>;
+fn new_arc<T: ?Sized>() -> Arc<T> { Arc }
+impl<T: ?Sized> core::ops::Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+fn foo(a: Arc<S>) {}
+
+fn test() {
+ let a = new_arc();
+ let b = *a;
+ //^^ S
+ foo(a);
+}
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_infinite_recursion() {
+ check_types(
+ r#"
+//- minicore: deref
+struct S;
+
+impl core::ops::Deref for S {
+ type Target = S;
+}
+
+fn test(s: S) {
+ s.foo();
+} //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_question_mark_size() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Arc<T: ?Sized>;
+impl<T: ?Sized> core::ops::Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+impl S {
+ fn foo(&self) -> u128 { 0 }
+}
+
+fn test(s: Arc<S>) {
+ (*s, s.foo());
+} //^^^^^^^^^^^^^ (S, u128)
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_implicit_sized_requirement_on_inference_var() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Foo<T>;
+impl<T> core::ops::Deref for Foo<T> {
+ type Target = ();
+}
+fn test() {
+ let foo = Foo;
+ *foo;
+ //^^^^ ()
+ let _: Foo<u8> = foo;
+}
+"#,
+ )
+}
+
+#[test]
+fn obligation_from_function_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<u32> for S {}
+
+fn foo<T: Trait<U>, U>(t: T) -> U { loop {} }
+
+fn test(s: S) {
+ foo(s);
+} //^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_method_clause() {
+ check_types(
+ r#"
+//- /main.rs
+struct S;
+
+trait Trait<T> {}
+impl Trait<isize> for S {}
+
+struct O;
+impl O {
+ fn foo<T: Trait<U>, U>(&self, t: T) -> U { loop {} }
+}
+
+fn test() {
+ O.foo(S);
+} //^^^^^^^^ isize
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_self_method_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<i64> for S {}
+
+impl S {
+ fn foo<U>(&self) -> U where Self: Trait<U> { loop {} }
+}
+
+fn test() {
+ S.foo();
+} //^^^^^^^ i64
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_impl_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<&str> for S {}
+
+struct O<T>;
+impl<U, T: Trait<U>> O<T> {
+ fn foo(&self) -> U { loop {} }
+}
+
+fn test(o: O<S>) {
+ o.foo();
+} //^^^^^^^ &str
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_1() {
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test<T: Clone>(t: T) { t.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_1_not_met() {
+ check_types(
+ r#"
+//- /main.rs
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test<T>(t: T) { t.foo(); }
+ //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_2() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S {}
+fn test<T: Trait>(t: T) { t.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_2_not_met() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S {}
+fn test<T>(t: T) { t.foo(); }
+ //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_deref() {
+ check_types(
+ r#"
+//- minicore: deref
+trait Trait {}
+impl<T> core::ops::Deref for T where T: Trait {
+ type Target = i128;
+}
+fn test<T: Trait>(t: T) { *t; }
+ //^^ i128
+"#,
+ );
+}
+
+#[test]
+fn associated_type_placeholder() {
+ // inside the generic function, the associated type gets normalized to a placeholder `ApplL::Out<T>` [https://rust-lang.github.io/rustc-guide/traits/associated-types.html#placeholder-associated-types].
+ check_types(
+ r#"
+pub trait ApplyL {
+ type Out;
+}
+
+pub struct RefMutL<T>;
+
+impl<T> ApplyL for RefMutL<T> {
+ type Out = <T as ApplyL>::Out;
+}
+
+fn test<T: ApplyL>() {
+ let y: <RefMutL<T> as ApplyL>::Out = no_matter;
+ y;
+} //^ ApplyL::Out<T>
+"#,
+ );
+}
+
+#[test]
+fn associated_type_placeholder_2() {
+ check_types(
+ r#"
+pub trait ApplyL {
+ type Out;
+}
+fn foo<T: ApplyL>(t: T) -> <T as ApplyL>::Out;
+
+fn test<T: ApplyL>(t: T) {
+ let y = foo(t);
+ y;
+} //^ ApplyL::Out<T>
+"#,
+ );
+}
+
+#[test]
+fn argument_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+}
+fn bar(x: impl Trait<u16>) {}
+struct S<T>(T);
+impl<T> Trait<T> for S<T> {}
+
+fn test(x: impl Trait<u64>, y: &impl Trait<u32>) {
+ x;
+ y;
+ let z = S(1);
+ bar(z);
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 77..78 'x': impl Trait<u16>
+ 97..99 '{}': ()
+ 154..155 'x': impl Trait<u64>
+ 174..175 'y': &impl Trait<u32>
+ 195..323 '{ ...2(); }': ()
+ 201..202 'x': impl Trait<u64>
+ 208..209 'y': &impl Trait<u32>
+ 219..220 'z': S<u16>
+ 223..224 'S': S<u16>(u16) -> S<u16>
+ 223..227 'S(1)': S<u16>
+ 225..226 '1': u16
+ 233..236 'bar': fn bar(S<u16>)
+ 233..239 'bar(z)': ()
+ 237..238 'z': S<u16>
+ 245..246 'x': impl Trait<u64>
+ 245..252 'x.foo()': u64
+ 258..259 'y': &impl Trait<u32>
+ 258..265 'y.foo()': u32
+ 271..272 'z': S<u16>
+ 271..278 'z.foo()': u16
+ 284..285 'x': impl Trait<u64>
+ 284..292 'x.foo2()': i64
+ 298..299 'y': &impl Trait<u32>
+ 298..306 'y.foo2()': i64
+ 312..313 'z': S<u16>
+ 312..320 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_type_args_1() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait {}
+trait Foo {
+ // this function has an implicit Self param, an explicit type param,
+ // and an implicit impl Trait param!
+ fn bar<T>(x: impl Trait) -> T { loop {} }
+}
+fn foo<T>(x: impl Trait) -> T { loop {} }
+struct S;
+impl Trait for S {}
+struct F;
+impl Foo for F {}
+
+fn test() {
+ Foo::bar(S);
+ <F as Foo>::bar(S);
+ F::bar(S);
+ Foo::bar::<u32>(S);
+ <F as Foo>::bar::<u32>(S);
+
+ foo(S);
+ foo::<u32>(S);
+ foo::<u32, i32>(S); // we should ignore the extraneous i32
+}"#,
+ expect![[r#"
+ 155..156 'x': impl Trait
+ 175..186 '{ loop {} }': T
+ 177..184 'loop {}': !
+ 182..184 '{}': ()
+ 199..200 'x': impl Trait
+ 219..230 '{ loop {} }': T
+ 221..228 'loop {}': !
+ 226..228 '{}': ()
+ 300..509 '{ ... i32 }': ()
+ 306..314 'Foo::bar': fn bar<{unknown}, {unknown}>(S) -> {unknown}
+ 306..317 'Foo::bar(S)': {unknown}
+ 315..316 'S': S
+ 323..338 '<F as Foo>::bar': fn bar<F, {unknown}>(S) -> {unknown}
+ 323..341 '<F as ...bar(S)': {unknown}
+ 339..340 'S': S
+ 347..353 'F::bar': fn bar<F, {unknown}>(S) -> {unknown}
+ 347..356 'F::bar(S)': {unknown}
+ 354..355 'S': S
+ 362..377 'Foo::bar::<u32>': fn bar<{unknown}, u32>(S) -> u32
+ 362..380 'Foo::b...32>(S)': u32
+ 378..379 'S': S
+ 386..408 '<F as ...:<u32>': fn bar<F, u32>(S) -> u32
+ 386..411 '<F as ...32>(S)': u32
+ 409..410 'S': S
+ 418..421 'foo': fn foo<{unknown}>(S) -> {unknown}
+ 418..424 'foo(S)': {unknown}
+ 422..423 'S': S
+ 430..440 'foo::<u32>': fn foo<u32>(S) -> u32
+ 430..443 'foo::<u32>(S)': u32
+ 441..442 'S': S
+ 449..464 'foo::<u32, i32>': fn foo<u32>(S) -> u32
+ 449..467 'foo::<...32>(S)': u32
+ 465..466 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_type_args_2() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait {}
+struct S;
+impl Trait for S {}
+struct F<T>;
+impl<T> F<T> {
+ fn foo<U>(self, x: impl Trait) -> (T, U) { loop {} }
+}
+
+fn test() {
+ F.foo(S);
+ F::<u32>.foo(S);
+ F::<u32>.foo::<i32>(S);
+ F::<u32>.foo::<i32, u32>(S); // extraneous argument should be ignored
+}"#,
+ expect![[r#"
+ 87..91 'self': F<T>
+ 93..94 'x': impl Trait
+ 118..129 '{ loop {} }': (T, U)
+ 120..127 'loop {}': !
+ 125..127 '{}': ()
+ 143..283 '{ ...ored }': ()
+ 149..150 'F': F<{unknown}>
+ 149..157 'F.foo(S)': ({unknown}, {unknown})
+ 155..156 'S': S
+ 163..171 'F::<u32>': F<u32>
+ 163..178 'F::<u32>.foo(S)': (u32, {unknown})
+ 176..177 'S': S
+ 184..192 'F::<u32>': F<u32>
+ 184..206 'F::<u3...32>(S)': (u32, i32)
+ 204..205 'S': S
+ 212..220 'F::<u32>': F<u32>
+ 212..239 'F::<u3...32>(S)': (u32, i32)
+ 237..238 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_to_fn_pointer() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo(x: impl Trait) { loop {} }
+struct S;
+impl Trait for S {}
+
+fn test() {
+ let f: fn(S) -> () = foo;
+}"#,
+ expect![[r#"
+ 22..23 'x': impl Trait
+ 37..48 '{ loop {} }': ()
+ 39..46 'loop {}': !
+ 44..46 '{}': ()
+ 90..123 '{ ...foo; }': ()
+ 100..101 'f': fn(S)
+ 117..120 'foo': fn foo(S)
+ "#]],
+ );
+}
+
+#[test]
+fn impl_trait() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+}
+fn bar() -> impl Trait<u64> {}
+
+fn test(x: impl Trait<u64>, y: &impl Trait<u64>) {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 98..100 '{}': ()
+ 110..111 'x': impl Trait<u64>
+ 130..131 'y': &impl Trait<u64>
+ 151..268 '{ ...2(); }': ()
+ 157..158 'x': impl Trait<u64>
+ 164..165 'y': &impl Trait<u64>
+ 175..176 'z': impl Trait<u64>
+ 179..182 'bar': fn bar() -> impl Trait<u64>
+ 179..184 'bar()': impl Trait<u64>
+ 190..191 'x': impl Trait<u64>
+ 190..197 'x.foo()': u64
+ 203..204 'y': &impl Trait<u64>
+ 203..210 'y.foo()': u64
+ 216..217 'z': impl Trait<u64>
+ 216..223 'z.foo()': u64
+ 229..230 'x': impl Trait<u64>
+ 229..237 'x.foo2()': i64
+ 243..244 'y': &impl Trait<u64>
+ 243..251 'y.foo2()': i64
+ 257..258 'z': impl Trait<u64>
+ 257..265 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn simple_return_pos_impl_trait() {
+ cov_mark::check!(lower_rpit);
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+fn bar() -> impl Trait<u64> { loop {} }
+
+fn test() {
+ let a = bar();
+ a.foo();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 71..82 '{ loop {} }': !
+ 73..80 'loop {}': !
+ 78..80 '{}': ()
+ 94..129 '{ ...o(); }': ()
+ 104..105 'a': impl Trait<u64>
+ 108..111 'bar': fn bar() -> impl Trait<u64>
+ 108..113 'bar()': impl Trait<u64>
+ 119..120 'a': impl Trait<u64>
+ 119..126 'a.foo()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn more_return_pos_impl_trait() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Iterator {
+ type Item;
+ fn next(&mut self) -> Self::Item;
+}
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+fn bar() -> (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>) { loop {} }
+fn baz<T>(t: T) -> (impl Iterator<Item = impl Trait<T>>, impl Trait<T>) { loop {} }
+
+fn test() {
+ let (a, b) = bar();
+ a.next().foo();
+ b.foo();
+ let (c, d) = baz(1u128);
+ c.next().foo();
+ d.foo();
+}"#,
+ expect![[r#"
+ 49..53 'self': &mut Self
+ 101..105 'self': &Self
+ 184..195 '{ loop {} }': ({unknown}, {unknown})
+ 186..193 'loop {}': !
+ 191..193 '{}': ()
+ 206..207 't': T
+ 268..279 '{ loop {} }': ({unknown}, {unknown})
+ 270..277 'loop {}': !
+ 275..277 '{}': ()
+ 291..413 '{ ...o(); }': ()
+ 301..307 '(a, b)': (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 302..303 'a': impl Iterator<Item = impl Trait<u32>>
+ 305..306 'b': impl Trait<u64>
+ 310..313 'bar': fn bar() -> (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 310..315 'bar()': (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 321..322 'a': impl Iterator<Item = impl Trait<u32>>
+ 321..329 'a.next()': impl Trait<u32>
+ 321..335 'a.next().foo()': u32
+ 341..342 'b': impl Trait<u64>
+ 341..348 'b.foo()': u64
+ 358..364 '(c, d)': (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 359..360 'c': impl Iterator<Item = impl Trait<u128>>
+ 362..363 'd': impl Trait<u128>
+ 367..370 'baz': fn baz<u128>(u128) -> (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 367..377 'baz(1u128)': (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 371..376 '1u128': u128
+ 383..384 'c': impl Iterator<Item = impl Trait<u128>>
+ 383..391 'c.next()': impl Trait<u128>
+ 383..397 'c.next().foo()': u128
+ 403..404 'd': impl Trait<u128>
+ 403..410 'd.foo()': u128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_from_return_pos_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, sized
+trait Trait<T> {}
+struct Bar<T>(T);
+impl<T> Trait<T> for Bar<T> {}
+fn foo<const C: u8, T>() -> (impl FnOnce(&str, T), impl Trait<u8>) {
+ (|input, t| {}, Bar(C))
+}
+"#,
+ expect![[r#"
+ 134..165 '{ ...(C)) }': (|&str, T| -> (), Bar<u8>)
+ 140..163 '(|inpu...ar(C))': (|&str, T| -> (), Bar<u8>)
+ 141..154 '|input, t| {}': |&str, T| -> ()
+ 142..147 'input': &str
+ 149..150 't': T
+ 152..154 '{}': ()
+ 156..159 'Bar': Bar<u8>(u8) -> Bar<u8>
+ 156..162 'Bar(C)': Bar<u8>
+ 160..161 'C': u8
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+}
+fn bar() -> dyn Trait<u64> {}
+
+fn test(x: dyn Trait<u64>, y: &dyn Trait<u64>) {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 97..99 '{}': dyn Trait<u64>
+ 109..110 'x': dyn Trait<u64>
+ 128..129 'y': &dyn Trait<u64>
+ 148..265 '{ ...2(); }': ()
+ 154..155 'x': dyn Trait<u64>
+ 161..162 'y': &dyn Trait<u64>
+ 172..173 'z': dyn Trait<u64>
+ 176..179 'bar': fn bar() -> dyn Trait<u64>
+ 176..181 'bar()': dyn Trait<u64>
+ 187..188 'x': dyn Trait<u64>
+ 187..194 'x.foo()': u64
+ 200..201 'y': &dyn Trait<u64>
+ 200..207 'y.foo()': u64
+ 213..214 'z': dyn Trait<u64>
+ 213..220 'z.foo()': u64
+ 226..227 'x': dyn Trait<u64>
+ 226..234 'x.foo2()': i64
+ 240..241 'y': &dyn Trait<u64>
+ 240..248 'y.foo2()': i64
+ 254..255 'z': dyn Trait<u64>
+ 254..262 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait_in_impl() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T, U> {
+ fn foo(&self) -> (T, U);
+}
+struct S<T, U> {}
+impl<T, U> S<T, U> {
+ fn bar(&self) -> &dyn Trait<T, U> { loop {} }
+}
+trait Trait2<T, U> {
+ fn baz(&self) -> (T, U);
+}
+impl<T, U> Trait2<T, U> for dyn Trait<T, U> { }
+
+fn test(s: S<u32, i32>) {
+ s.bar().baz();
+}"#,
+ expect![[r#"
+ 32..36 'self': &Self
+ 102..106 'self': &S<T, U>
+ 128..139 '{ loop {} }': &dyn Trait<T, U>
+ 130..137 'loop {}': !
+ 135..137 '{}': ()
+ 175..179 'self': &Self
+ 251..252 's': S<u32, i32>
+ 267..289 '{ ...z(); }': ()
+ 273..274 's': S<u32, i32>
+ 273..280 's.bar()': &dyn Trait<u32, i32>
+ 273..286 's.bar().baz()': (u32, i32)
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait_bare() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait {
+ fn foo(&self) -> u64;
+}
+fn bar() -> Trait {}
+
+fn test(x: Trait, y: &Trait) -> u64 {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+}"#,
+ expect![[r#"
+ 26..30 'self': &Self
+ 60..62 '{}': dyn Trait
+ 72..73 'x': dyn Trait
+ 82..83 'y': &dyn Trait
+ 100..175 '{ ...o(); }': u64
+ 106..107 'x': dyn Trait
+ 113..114 'y': &dyn Trait
+ 124..125 'z': dyn Trait
+ 128..131 'bar': fn bar() -> dyn Trait
+ 128..133 'bar()': dyn Trait
+ 139..140 'x': dyn Trait
+ 139..146 'x.foo()': u64
+ 152..153 'y': &dyn Trait
+ 152..159 'y.foo()': u64
+ 165..166 'z': dyn Trait
+ 165..172 'z.foo()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn weird_bounds() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn test(
+ a: impl Trait + 'lifetime,
+ b: impl 'lifetime,
+ c: impl (Trait),
+ d: impl ('lifetime),
+ e: impl ?Sized,
+ f: impl Trait + ?Sized
+) {}
+"#,
+ expect![[r#"
+ 28..29 'a': impl Trait
+ 59..60 'b': impl Sized
+ 82..83 'c': impl Trait
+ 103..104 'd': impl Sized
+ 128..129 'e': impl ?Sized
+ 148..149 'f': impl Trait + ?Sized
+ 173..175 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn error_bound_chalk() {
+ check_types(
+ r#"
+trait Trait {
+ fn foo(&self) -> u32 { 0 }
+}
+
+fn test(x: (impl Trait + UnknownTrait)) {
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn assoc_type_bindings() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait {
+ type Type;
+}
+
+fn get<T: Trait>(t: T) -> <T as Trait>::Type {}
+fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
+fn set<T: Trait<Type = u64>>(t: T) -> T {t}
+
+struct S<T>;
+impl<T> Trait for S<T> { type Type = T; }
+
+fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) {
+ get(x);
+ get2(x);
+ get(y);
+ get2(y);
+ get(set(S));
+ get2(set(S));
+ get2(S::<str>);
+}"#,
+ expect![[r#"
+ 49..50 't': T
+ 77..79 '{}': Trait::Type<T>
+ 111..112 't': T
+ 122..124 '{}': U
+ 154..155 't': T
+ 165..168 '{t}': T
+ 166..167 't': T
+ 256..257 'x': T
+ 262..263 'y': impl Trait<Type = i64>
+ 289..397 '{ ...r>); }': ()
+ 295..298 'get': fn get<T>(T) -> <T as Trait>::Type
+ 295..301 'get(x)': u32
+ 299..300 'x': T
+ 307..311 'get2': fn get2<u32, T>(T) -> u32
+ 307..314 'get2(x)': u32
+ 312..313 'x': T
+ 320..323 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type
+ 320..326 'get(y)': i64
+ 324..325 'y': impl Trait<Type = i64>
+ 332..336 'get2': fn get2<i64, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> i64
+ 332..339 'get2(y)': i64
+ 337..338 'y': impl Trait<Type = i64>
+ 345..348 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type
+ 345..356 'get(set(S))': u64
+ 349..352 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 349..355 'set(S)': S<u64>
+ 353..354 'S': S<u64>
+ 362..366 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
+ 362..374 'get2(set(S))': u64
+ 367..370 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 367..373 'set(S)': S<u64>
+ 371..372 'S': S<u64>
+ 380..384 'get2': fn get2<str, S<str>>(S<str>) -> str
+ 380..394 'get2(S::<str>)': str
+ 385..393 'S::<str>': S<str>
+ "#]],
+ );
+}
+
+#[test]
+fn impl_trait_assoc_binding_projection_bug() {
+ check_types(
+ r#"
+//- minicore: iterator
+pub trait Language {
+ type Kind;
+}
+pub enum RustLanguage {}
+impl Language for RustLanguage {
+ type Kind = SyntaxKind;
+}
+struct SyntaxNode<L> {}
+fn foo() -> impl Iterator<Item = SyntaxNode<RustLanguage>> {}
+
+trait Clone {
+ fn clone(&self) -> Self;
+}
+
+fn api_walkthrough() {
+ for node in foo() {
+ node.clone();
+ } //^^^^^^^^^^^^ {unknown}
+}
+"#,
+ );
+}
+
+#[test]
+fn projection_eq_within_chalk() {
+ check_infer(
+ r#"
+trait Trait1 {
+ type Type;
+}
+trait Trait2<T> {
+ fn foo(self) -> T;
+}
+impl<T, U> Trait2<T> for U where U: Trait1<Type = T> {}
+
+fn test<T: Trait1<Type = u32>>(x: T) {
+ x.foo();
+}"#,
+ expect![[r#"
+ 61..65 'self': Self
+ 163..164 'x': T
+ 169..185 '{ ...o(); }': ()
+ 175..176 'x': T
+ 175..182 'x.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn where_clause_trait_in_scope_for_method_resolution() {
+ check_types(
+ r#"
+mod foo {
+ trait Trait {
+ fn foo(&self) -> u32 { 0 }
+ }
+}
+
+fn test<T: foo::Trait>(x: T) {
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn super_trait_method_resolution() {
+ check_infer(
+ r#"
+mod foo {
+ trait SuperTrait {
+ fn foo(&self) -> u32 {}
+ }
+}
+trait Trait1: foo::SuperTrait {}
+trait Trait2 where Self: foo::SuperTrait {}
+
+fn test<T: Trait1, U: Trait2>(x: T, y: U) {
+ x.foo();
+ y.foo();
+}"#,
+ expect![[r#"
+ 49..53 'self': &Self
+ 62..64 '{}': u32
+ 181..182 'x': T
+ 187..188 'y': U
+ 193..222 '{ ...o(); }': ()
+ 199..200 'x': T
+ 199..206 'x.foo()': u32
+ 212..213 'y': U
+ 212..219 'y.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_impl_trait_method_resolution() {
+ check_infer(
+ r#"
+//- minicore: sized
+mod foo {
+ trait SuperTrait {
+ fn foo(&self) -> u32 {}
+ }
+}
+trait Trait1: foo::SuperTrait {}
+
+fn test(x: &impl Trait1) {
+ x.foo();
+}"#,
+ expect![[r#"
+ 49..53 'self': &Self
+ 62..64 '{}': u32
+ 115..116 'x': &impl Trait1
+ 132..148 '{ ...o(); }': ()
+ 138..139 'x': &impl Trait1
+ 138..145 'x.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_cycle() {
+ // This just needs to not crash
+ check_infer(
+ r#"
+ trait A: B {}
+ trait B: A {}
+
+ fn test<T: A>(x: T) {
+ x.foo();
+ }
+ "#,
+ expect![[r#"
+ 43..44 'x': T
+ 49..65 '{ ...o(); }': ()
+ 55..56 'x': T
+ 55..62 'x.foo()': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_assoc_type_bounds() {
+ check_infer(
+ r#"
+trait SuperTrait { type Type; }
+trait Trait where Self: SuperTrait {}
+
+fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
+fn set<T: Trait<Type = u64>>(t: T) -> T {t}
+
+struct S<T>;
+impl<T> SuperTrait for S<T> { type Type = T; }
+impl<T> Trait for S<T> {}
+
+fn test() {
+ get2(set(S));
+}"#,
+ expect![[r#"
+ 102..103 't': T
+ 113..115 '{}': U
+ 145..146 't': T
+ 156..159 '{t}': T
+ 157..158 't': T
+ 258..279 '{ ...S)); }': ()
+ 264..268 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
+ 264..276 'get2(set(S))': u64
+ 269..272 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 269..275 'set(S)': S<u64>
+ 273..274 'S': S<u64>
+ "#]],
+ );
+}
+
+#[test]
+fn fn_trait() {
+ check_infer_with_mismatches(
+ r#"
+trait FnOnce<Args> {
+ type Output;
+
+ fn call_once(self, args: Args) -> <Self as FnOnce<Args>>::Output;
+}
+
+fn test<F: FnOnce(u32, u64) -> u128>(f: F) {
+ f.call_once((1, 2));
+}"#,
+ expect![[r#"
+ 56..60 'self': Self
+ 62..66 'args': Args
+ 149..150 'f': F
+ 155..183 '{ ...2)); }': ()
+ 161..162 'f': F
+ 161..180 'f.call...1, 2))': u128
+ 173..179 '(1, 2)': (u32, u64)
+ 174..175 '1': u32
+ 177..178 '2': u64
+ "#]],
+ );
+}
+
+#[test]
+fn fn_ptr_and_item() {
+ check_infer_with_mismatches(
+ r#"
+#[lang="fn_once"]
+trait FnOnce<Args> {
+ type Output;
+
+ fn call_once(self, args: Args) -> Self::Output;
+}
+
+trait Foo<T> {
+ fn foo(&self) -> T;
+}
+
+struct Bar<T>(T);
+
+impl<A1, R, F: FnOnce(A1) -> R> Foo<(A1, R)> for Bar<F> {
+ fn foo(&self) -> (A1, R) { loop {} }
+}
+
+enum Opt<T> { None, Some(T) }
+impl<T> Opt<T> {
+ fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Opt<U> { loop {} }
+}
+
+fn test() {
+ let bar: Bar<fn(u8) -> u32>;
+ bar.foo();
+
+ let opt: Opt<u8>;
+ let f: fn(u8) -> u32;
+ opt.map(f);
+}"#,
+ expect![[r#"
+ 74..78 'self': Self
+ 80..84 'args': Args
+ 139..143 'self': &Self
+ 243..247 'self': &Bar<F>
+ 260..271 '{ loop {} }': (A1, R)
+ 262..269 'loop {}': !
+ 267..269 '{}': ()
+ 355..359 'self': Opt<T>
+ 361..362 'f': F
+ 377..388 '{ loop {} }': Opt<U>
+ 379..386 'loop {}': !
+ 384..386 '{}': ()
+ 402..518 '{ ...(f); }': ()
+ 412..415 'bar': Bar<fn(u8) -> u32>
+ 441..444 'bar': Bar<fn(u8) -> u32>
+ 441..450 'bar.foo()': (u8, u32)
+ 461..464 'opt': Opt<u8>
+ 483..484 'f': fn(u8) -> u32
+ 505..508 'opt': Opt<u8>
+ 505..515 'opt.map(f)': Opt<u32>
+ 513..514 'f': fn(u8) -> u32
+ "#]],
+ );
+}
+
+#[test]
+fn fn_trait_deref_with_ty_default() {
+ check_infer(
+ r#"
+//- minicore: deref, fn
+struct Foo;
+
+impl Foo {
+ fn foo(&self) -> usize {}
+}
+
+struct Lazy<T, F = fn() -> T>(F);
+
+impl<T, F> Lazy<T, F> {
+ pub fn new(f: F) -> Lazy<T, F> {}
+}
+
+impl<T, F: FnOnce() -> T> core::ops::Deref for Lazy<T, F> {
+ type Target = T;
+}
+
+fn test() {
+ let lazy1: Lazy<Foo, _> = Lazy::new(|| Foo);
+ let r1 = lazy1.foo();
+
+ fn make_foo_fn() -> Foo {}
+ let make_foo_fn_ptr: fn() -> Foo = make_foo_fn;
+ let lazy2: Lazy<Foo, _> = Lazy::new(make_foo_fn_ptr);
+ let r2 = lazy2.foo();
+}"#,
+ expect![[r#"
+ 36..40 'self': &Foo
+ 51..53 '{}': usize
+ 131..132 'f': F
+ 151..153 '{}': Lazy<T, F>
+ 251..497 '{ ...o(); }': ()
+ 261..266 'lazy1': Lazy<Foo, || -> Foo>
+ 283..292 'Lazy::new': fn new<Foo, || -> Foo>(|| -> Foo) -> Lazy<Foo, || -> Foo>
+ 283..300 'Lazy::...| Foo)': Lazy<Foo, || -> Foo>
+ 293..299 '|| Foo': || -> Foo
+ 296..299 'Foo': Foo
+ 310..312 'r1': usize
+ 315..320 'lazy1': Lazy<Foo, || -> Foo>
+ 315..326 'lazy1.foo()': usize
+ 368..383 'make_foo_fn_ptr': fn() -> Foo
+ 399..410 'make_foo_fn': fn make_foo_fn() -> Foo
+ 420..425 'lazy2': Lazy<Foo, fn() -> Foo>
+ 442..451 'Lazy::new': fn new<Foo, fn() -> Foo>(fn() -> Foo) -> Lazy<Foo, fn() -> Foo>
+ 442..468 'Lazy::...n_ptr)': Lazy<Foo, fn() -> Foo>
+ 452..467 'make_foo_fn_ptr': fn() -> Foo
+ 478..480 'r2': usize
+ 483..488 'lazy2': Lazy<Foo, fn() -> Foo>
+ 483..494 'lazy2.foo()': usize
+ 357..359 '{}': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn closure_1() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn
+enum Option<T> { Some(T), None }
+impl<T> Option<T> {
+ fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Option<U> { loop {} }
+}
+
+fn test() {
+ let x = Option::Some(1u32);
+ x.map(|v| v + 1);
+ x.map(|_v| 1u64);
+ let y: Option<i64> = x.map(|_v| 1);
+}"#,
+ expect![[r#"
+ 86..90 'self': Option<T>
+ 92..93 'f': F
+ 111..122 '{ loop {} }': Option<U>
+ 113..120 'loop {}': !
+ 118..120 '{}': ()
+ 136..255 '{ ... 1); }': ()
+ 146..147 'x': Option<u32>
+ 150..162 'Option::Some': Some<u32>(u32) -> Option<u32>
+ 150..168 'Option...(1u32)': Option<u32>
+ 163..167 '1u32': u32
+ 174..175 'x': Option<u32>
+ 174..190 'x.map(...v + 1)': Option<u32>
+ 180..189 '|v| v + 1': |u32| -> u32
+ 181..182 'v': u32
+ 184..185 'v': u32
+ 184..189 'v + 1': u32
+ 188..189 '1': u32
+ 196..197 'x': Option<u32>
+ 196..212 'x.map(... 1u64)': Option<u64>
+ 202..211 '|_v| 1u64': |u32| -> u64
+ 203..205 '_v': u32
+ 207..211 '1u64': u64
+ 222..223 'y': Option<i64>
+ 239..240 'x': Option<u32>
+ 239..252 'x.map(|_v| 1)': Option<i64>
+ 245..251 '|_v| 1': |u32| -> i64
+ 246..248 '_v': u32
+ 250..251 '1': i64
+ "#]],
+ );
+}
+
+#[test]
+fn closure_2() {
+ check_types(
+ r#"
+//- minicore: add, fn
+
+impl core::ops::Add for u64 {
+ type Output = Self;
+ fn add(self, rhs: u64) -> Self::Output {0}
+}
+
+impl core::ops::Add for u128 {
+ type Output = Self;
+ fn add(self, rhs: u128) -> Self::Output {0}
+}
+
+fn test<F: FnOnce(u32) -> u64>(f: F) {
+ f(1);
+ // ^ u32
+ //^^^^ u64
+ let g = |v| v + 1;
+ //^^^^^ u64
+ //^^^^^^^^^ |u64| -> u64
+ g(1u64);
+ //^^^^^^^ u64
+ let h = |v| 1u128 + v;
+ //^^^^^^^^^^^^^ |u128| -> u128
+}"#,
+ );
+}
+
+#[test]
+fn closure_as_argument_inference_order() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn
+fn foo1<T, U, F: FnOnce(T) -> U>(x: T, f: F) -> U { loop {} }
+fn foo2<T, U, F: FnOnce(T) -> U>(f: F, x: T) -> U { loop {} }
+
+struct S;
+impl S {
+ fn method(self) -> u64;
+
+ fn foo1<T, U, F: FnOnce(T) -> U>(self, x: T, f: F) -> U { loop {} }
+ fn foo2<T, U, F: FnOnce(T) -> U>(self, f: F, x: T) -> U { loop {} }
+}
+
+fn test() {
+ let x1 = foo1(S, |s| s.method());
+ let x2 = foo2(|s| s.method(), S);
+ let x3 = S.foo1(S, |s| s.method());
+ let x4 = S.foo2(|s| s.method(), S);
+}"#,
+ expect![[r#"
+ 33..34 'x': T
+ 39..40 'f': F
+ 50..61 '{ loop {} }': U
+ 52..59 'loop {}': !
+ 57..59 '{}': ()
+ 95..96 'f': F
+ 101..102 'x': T
+ 112..123 '{ loop {} }': U
+ 114..121 'loop {}': !
+ 119..121 '{}': ()
+ 158..162 'self': S
+ 210..214 'self': S
+ 216..217 'x': T
+ 222..223 'f': F
+ 233..244 '{ loop {} }': U
+ 235..242 'loop {}': !
+ 240..242 '{}': ()
+ 282..286 'self': S
+ 288..289 'f': F
+ 294..295 'x': T
+ 305..316 '{ loop {} }': U
+ 307..314 'loop {}': !
+ 312..314 '{}': ()
+ 330..489 '{ ... S); }': ()
+ 340..342 'x1': u64
+ 345..349 'foo1': fn foo1<S, u64, |S| -> u64>(S, |S| -> u64) -> u64
+ 345..368 'foo1(S...hod())': u64
+ 350..351 'S': S
+ 353..367 '|s| s.method()': |S| -> u64
+ 354..355 's': S
+ 357..358 's': S
+ 357..367 's.method()': u64
+ 378..380 'x2': u64
+ 383..387 'foo2': fn foo2<S, u64, |S| -> u64>(|S| -> u64, S) -> u64
+ 383..406 'foo2(|...(), S)': u64
+ 388..402 '|s| s.method()': |S| -> u64
+ 389..390 's': S
+ 392..393 's': S
+ 392..402 's.method()': u64
+ 404..405 'S': S
+ 416..418 'x3': u64
+ 421..422 'S': S
+ 421..446 'S.foo1...hod())': u64
+ 428..429 'S': S
+ 431..445 '|s| s.method()': |S| -> u64
+ 432..433 's': S
+ 435..436 's': S
+ 435..445 's.method()': u64
+ 456..458 'x4': u64
+ 461..462 'S': S
+ 461..486 'S.foo2...(), S)': u64
+ 468..482 '|s| s.method()': |S| -> u64
+ 469..470 's': S
+ 472..473 's': S
+ 472..482 's.method()': u64
+ 484..485 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn fn_item_fn_trait() {
+ check_types(
+ r#"
+//- minicore: fn
+struct S;
+
+fn foo() -> S { S }
+
+fn takes_closure<U, F: FnOnce() -> U>(f: F) -> U { f() }
+
+fn test() {
+ takes_closure(foo);
+} //^^^^^^^^^^^^^^^^^^ S
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_1() {
+ check_types(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+}
+
+trait Trait2 {
+ fn foo(&self) -> u32;
+}
+
+fn test<T: Trait>() where T::Item: Trait2 {
+ let x: T::Item = no_matter;
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_2() {
+ check_types(
+ r#"
+trait Trait<T> {
+ type Item;
+}
+
+trait Trait2 {
+ fn foo(&self) -> u32;
+}
+
+fn test<T, U>() where T::Item: Trait2, T: Trait<U::Item>, U: Trait<()> {
+ let x: T::Item = no_matter;
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_on_impl_self() {
+ check_infer(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+
+ fn f(&self, x: Self::Item);
+}
+
+struct S;
+
+impl Trait for S {
+ type Item = u32;
+ fn f(&self, x: Self::Item) { let y = x; }
+}
+
+struct S2;
+
+impl Trait for S2 {
+ type Item = i32;
+ fn f(&self, x: <Self>::Item) { let y = x; }
+}"#,
+ expect![[r#"
+ 40..44 'self': &Self
+ 46..47 'x': Trait::Item<Self>
+ 126..130 'self': &S
+ 132..133 'x': u32
+ 147..161 '{ let y = x; }': ()
+ 153..154 'y': u32
+ 157..158 'x': u32
+ 228..232 'self': &S2
+ 234..235 'x': i32
+ 251..265 '{ let y = x; }': ()
+ 257..258 'y': i32
+ 261..262 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn unselected_projection_on_trait_self() {
+ check_types(
+ r#"
+trait Trait {
+ type Item;
+
+ fn f(&self) -> Self::Item { loop {} }
+}
+
+struct S;
+impl Trait for S {
+ type Item = u32;
+}
+
+fn test() {
+ S.f();
+} //^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_chalk_fold() {
+ check_types(
+ r#"
+trait Interner {}
+trait Fold<I: Interner, TI = I> {
+ type Result;
+}
+
+struct Ty<I: Interner> {}
+impl<I: Interner, TI: Interner> Fold<I, TI> for Ty<I> {
+ type Result = Ty<TI>;
+}
+
+fn fold<I: Interner, T>(interner: &I, t: T) -> T::Result
+where
+ T: Fold<I, I>,
+{
+ loop {}
+}
+
+fn foo<I: Interner>(interner: &I, t: Ty<I>) {
+ fold(interner, t);
+} //^^^^^^^^^^^^^^^^^ Ty<I>
+"#,
+ );
+}
+
+#[test]
+fn trait_impl_self_ty() {
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self);
+}
+
+struct S;
+
+impl Trait<Self> for S {}
+
+fn test() {
+ S.foo();
+} //^^^^^^^ ()
+"#,
+ );
+}
+
+#[test]
+fn trait_impl_self_ty_cycle() {
+ check_types(
+ r#"
+trait Trait {
+ fn foo(&self);
+}
+
+struct S<T>;
+
+impl Trait for S<Self> {}
+
+fn test() {
+ S.foo();
+} //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_1() {
+ // This is not a cycle, because the `T: Trait2<T::Item>` bound depends only on the `T: Trait`
+ // bound, not on itself (since only `Trait` can define `Item`).
+ check_types(
+ r#"
+trait Trait {
+ type Item;
+}
+
+trait Trait2<T> {}
+
+fn test<T: Trait>() where T: Trait2<T::Item> {
+ let x: T::Item = no_matter;
+} //^^^^^^^^^ Trait::Item<T>
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_2() {
+ // this is a legitimate cycle
+ check_types(
+ r#"
+//- /main.rs
+trait Trait<T> {
+ type Item;
+}
+
+fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
+ let x: T::Item = no_matter;
+} //^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_3() {
+ // this is a cycle for rustc; we currently accept it
+ check_types(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+ type OtherItem;
+}
+
+fn test<T>() where T: Trait<OtherItem = T::Item> {
+ let x: T::Item = no_matter;
+} //^^^^^^^^^ Trait::Item<T>
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_no_cycle() {
+ // this is not a cycle
+ check_types(
+ r#"
+//- /main.rs
+trait Index {
+ type Output;
+}
+
+type Key<S: UnificationStoreBase> = <S as UnificationStoreBase>::Key;
+
+pub trait UnificationStoreBase: Index<Output = Key<Self>> {
+ type Key;
+
+ fn len(&self) -> usize;
+}
+
+pub trait UnificationStoreMut: UnificationStoreBase {
+ fn push(&mut self, value: Self::Key);
+}
+
+fn test<T>(t: T) where T: UnificationStoreMut {
+ let x;
+ t.push(x);
+ let y: Key<T>;
+ (x, y);
+} //^^^^^^ (UnificationStoreBase::Key<T>, UnificationStoreBase::Key<T>)
+"#,
+ );
+}
+
+#[test]
+fn inline_assoc_type_bounds_1() {
+ check_types(
+ r#"
+trait Iterator {
+ type Item;
+}
+trait OtherTrait<T> {
+ fn foo(&self) -> T;
+}
+
+// workaround for Chalk assoc type normalization problems
+pub struct S<T>;
+impl<T: Iterator> Iterator for S<T> {
+ type Item = <T as Iterator>::Item;
+}
+
+fn test<I: Iterator<Item: OtherTrait<u32>>>() {
+ let x: <S<I> as Iterator>::Item;
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn inline_assoc_type_bounds_2() {
+ check_types(
+ r#"
+trait Iterator {
+ type Item;
+}
+
+fn test<I: Iterator<Item: Iterator<Item = u32>>>() {
+ let x: <<I as Iterator>::Item as Iterator>::Item;
+ x;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn proc_macro_server_types() {
+ check_infer(
+ r#"
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ TokenStream {
+ fn new() -> $S::TokenStream;
+ },
+ Group {
+ },
+ }
+ };
+}
+macro_rules! associated_item {
+ (type TokenStream) =>
+ (type TokenStream: 'static;);
+ (type Group) =>
+ (type Group: 'static;);
+ ($($item:tt)*) => ($($item)*;)
+}
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ pub trait Types {
+ $(associated_item!(type $name);)*
+ }
+
+ $(pub trait $name: Types {
+ $(associated_item!(fn $method($($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {}
+ impl<S: Types $(+ $name)*> Server for S {}
+ }
+}
+
+with_api!(Self, self_, declare_server_traits);
+struct G {}
+struct T {}
+struct RustAnalyzer;
+impl Types for RustAnalyzer {
+ type TokenStream = T;
+ type Group = G;
+}
+
+fn make<T>() -> T { loop {} }
+impl TokenStream for RustAnalyzer {
+ fn new() -> Self::TokenStream {
+ let group: Self::Group = make();
+ make()
+ }
+}"#,
+ expect![[r#"
+ 1075..1086 '{ loop {} }': T
+ 1077..1084 'loop {}': !
+ 1082..1084 '{}': ()
+ 1157..1220 '{ ... }': T
+ 1171..1176 'group': G
+ 1192..1196 'make': fn make<G>() -> G
+ 1192..1198 'make()': G
+ 1208..1212 'make': fn make<T>() -> T
+ 1208..1214 'make()': T
+ "#]],
+ );
+}
+
+#[test]
+fn unify_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait<T> {}
+
+fn foo(x: impl Trait<u32>) { loop {} }
+fn bar<T>(x: impl Trait<T>) -> T { loop {} }
+
+struct S<T>(T);
+impl<T> Trait<T> for S<T> {}
+
+fn default<T>() -> T { loop {} }
+
+fn test() -> impl Trait<i32> {
+ let s1 = S(default());
+ foo(s1);
+ let x: i32 = bar(S(default()));
+ S(default())
+}"#,
+ expect![[r#"
+ 26..27 'x': impl Trait<u32>
+ 46..57 '{ loop {} }': ()
+ 48..55 'loop {}': !
+ 53..55 '{}': ()
+ 68..69 'x': impl Trait<T>
+ 91..102 '{ loop {} }': T
+ 93..100 'loop {}': !
+ 98..100 '{}': ()
+ 171..182 '{ loop {} }': T
+ 173..180 'loop {}': !
+ 178..180 '{}': ()
+ 213..309 '{ ...t()) }': S<i32>
+ 223..225 's1': S<u32>
+ 228..229 'S': S<u32>(u32) -> S<u32>
+ 228..240 'S(default())': S<u32>
+ 230..237 'default': fn default<u32>() -> u32
+ 230..239 'default()': u32
+ 246..249 'foo': fn foo(S<u32>)
+ 246..253 'foo(s1)': ()
+ 250..252 's1': S<u32>
+ 263..264 'x': i32
+ 272..275 'bar': fn bar<i32>(S<i32>) -> i32
+ 272..289 'bar(S(...lt()))': i32
+ 276..277 'S': S<i32>(i32) -> S<i32>
+ 276..288 'S(default())': S<i32>
+ 278..285 'default': fn default<i32>() -> i32
+ 278..287 'default()': i32
+ 295..296 'S': S<i32>(i32) -> S<i32>
+ 295..307 'S(default())': S<i32>
+ 297..304 'default': fn default<i32>() -> i32
+ 297..306 'default()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn assoc_types_from_bounds() {
+ check_infer(
+ r#"
+//- minicore: fn
+trait T {
+ type O;
+}
+
+impl T for () {
+ type O = ();
+}
+
+fn f<X, F>(_v: F)
+where
+ X: T,
+ F: FnOnce(&X::O),
+{ }
+
+fn main() {
+ f::<(), _>(|z| { z; });
+}"#,
+ expect![[r#"
+ 72..74 '_v': F
+ 117..120 '{ }': ()
+ 132..163 '{ ... }); }': ()
+ 138..148 'f::<(), _>': fn f<(), |&()| -> ()>(|&()| -> ())
+ 138..160 'f::<()... z; })': ()
+ 149..159 '|z| { z; }': |&()| -> ()
+ 150..151 'z': &()
+ 153..159 '{ z; }': ()
+ 155..156 'z': &()
+ "#]],
+ );
+}
+
+#[test]
+fn associated_type_bound() {
+ check_types(
+ r#"
+pub trait Trait {
+ type Item: OtherTrait<u32>;
+}
+pub trait OtherTrait<T> {
+ fn foo(&self) -> T;
+}
+
+// this is just a workaround for chalk#234
+pub struct S<T>;
+impl<T: Trait> Trait for S<T> {
+ type Item = <T as Trait>::Item;
+}
+
+fn test<T: Trait>() {
+ let y: <S<T> as Trait>::Item = no_matter;
+ y.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn dyn_trait_through_chalk() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Box<T: ?Sized> {}
+impl<T: ?Sized> core::ops::Deref for Box<T> {
+ type Target = T;
+}
+trait Trait {
+ fn foo(&self);
+}
+
+fn test(x: Box<dyn Trait>) {
+ x.foo();
+} //^^^^^^^ ()
+"#,
+ );
+}
+
+#[test]
+fn string_to_owned() {
+ check_types(
+ r#"
+struct String {}
+pub trait ToOwned {
+ type Owned;
+ fn to_owned(&self) -> Self::Owned;
+}
+impl ToOwned for str {
+ type Owned = String;
+}
+fn test() {
+ "foo".to_owned();
+} //^^^^^^^^^^^^^^^^ String
+"#,
+ );
+}
+
+#[test]
+fn iterator_chain() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, option
+pub trait Iterator {
+ type Item;
+
+ fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
+ where
+ F: FnMut(Self::Item) -> Option<B>,
+ { loop {} }
+
+ fn for_each<F>(self, f: F)
+ where
+ F: FnMut(Self::Item),
+ { loop {} }
+}
+
+pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ fn into_iter(self) -> Self::IntoIter;
+}
+
+pub struct FilterMap<I, F> { }
+impl<B, I: Iterator, F> Iterator for FilterMap<I, F>
+where
+ F: FnMut(I::Item) -> Option<B>,
+{
+ type Item = B;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<I: Iterator> IntoIterator for I {
+ type Item = I::Item;
+ type IntoIter = I;
+
+ fn into_iter(self) -> I {
+ self
+ }
+}
+
+struct Vec<T> {}
+impl<T> Vec<T> {
+ fn new() -> Self { loop {} }
+}
+
+impl<T> IntoIterator for Vec<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+}
+
+pub struct IntoIter<T> { }
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+}
+
+fn main() {
+ Vec::<i32>::new().into_iter()
+ .filter_map(|x| if x > 0 { Some(x as u32) } else { None })
+ .for_each(|y| { y; });
+}"#,
+ expect![[r#"
+ 61..65 'self': Self
+ 67..68 'f': F
+ 152..163 '{ loop {} }': FilterMap<Self, F>
+ 154..161 'loop {}': !
+ 159..161 '{}': ()
+ 184..188 'self': Self
+ 190..191 'f': F
+ 240..251 '{ loop {} }': ()
+ 242..249 'loop {}': !
+ 247..249 '{}': ()
+ 360..364 'self': Self
+ 689..693 'self': I
+ 700..720 '{ ... }': I
+ 710..714 'self': I
+ 779..790 '{ loop {} }': Vec<T>
+ 781..788 'loop {}': !
+ 786..788 '{}': ()
+ 977..1104 '{ ... }); }': ()
+ 983..998 'Vec::<i32>::new': fn new<i32>() -> Vec<i32>
+ 983..1000 'Vec::<...:new()': Vec<i32>
+ 983..1012 'Vec::<...iter()': IntoIter<i32>
+ 983..1075 'Vec::<...one })': FilterMap<IntoIter<i32>, |i32| -> Option<u32>>
+ 983..1101 'Vec::<... y; })': ()
+ 1029..1074 '|x| if...None }': |i32| -> Option<u32>
+ 1030..1031 'x': i32
+ 1033..1074 'if x >...None }': Option<u32>
+ 1036..1037 'x': i32
+ 1036..1041 'x > 0': bool
+ 1040..1041 '0': i32
+ 1042..1060 '{ Some...u32) }': Option<u32>
+ 1044..1048 'Some': Some<u32>(u32) -> Option<u32>
+ 1044..1058 'Some(x as u32)': Option<u32>
+ 1049..1050 'x': i32
+ 1049..1057 'x as u32': u32
+ 1066..1074 '{ None }': Option<u32>
+ 1068..1072 'None': Option<u32>
+ 1090..1100 '|y| { y; }': |u32| -> ()
+ 1091..1092 'y': u32
+ 1094..1100 '{ y; }': ()
+ 1096..1097 'y': u32
+ "#]],
+ );
+}
+
+#[test]
+fn nested_assoc() {
+ check_types(
+ r#"
+struct Bar;
+struct Foo;
+
+trait A {
+ type OutputA;
+}
+
+impl A for Bar {
+ type OutputA = Foo;
+}
+
+trait B {
+ type Output;
+ fn foo() -> Self::Output;
+}
+
+impl<T:A> B for T {
+ type Output = T::OutputA;
+ fn foo() -> Self::Output { loop {} }
+}
+
+fn main() {
+ Bar::foo();
+} //^^^^^^^^^^ Foo
+"#,
+ );
+}
+
+#[test]
+fn trait_object_no_coercion() {
+ check_infer_with_mismatches(
+ r#"
+trait Foo {}
+
+fn foo(x: &dyn Foo) {}
+
+fn test(x: &dyn Foo) {
+ foo(x);
+}"#,
+ expect![[r#"
+ 21..22 'x': &dyn Foo
+ 34..36 '{}': ()
+ 46..47 'x': &dyn Foo
+ 59..74 '{ foo(x); }': ()
+ 65..68 'foo': fn foo(&dyn Foo)
+ 65..71 'foo(x)': ()
+ 69..70 'x': &dyn Foo
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_copy() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: copy
+struct IsCopy;
+impl Copy for IsCopy {}
+struct NotCopy;
+
+trait Test { fn test(&self) -> bool; }
+impl<T: Copy> Test for T {}
+
+fn test() {
+ IsCopy.test();
+ NotCopy.test();
+ (IsCopy, IsCopy).test();
+ (IsCopy, NotCopy).test();
+}"#,
+ expect![[r#"
+ 78..82 'self': &Self
+ 134..235 '{ ...t(); }': ()
+ 140..146 'IsCopy': IsCopy
+ 140..153 'IsCopy.test()': bool
+ 159..166 'NotCopy': NotCopy
+ 159..173 'NotCopy.test()': {unknown}
+ 179..195 '(IsCop...sCopy)': (IsCopy, IsCopy)
+ 179..202 '(IsCop...test()': bool
+ 180..186 'IsCopy': IsCopy
+ 188..194 'IsCopy': IsCopy
+ 208..225 '(IsCop...tCopy)': (IsCopy, NotCopy)
+ 208..232 '(IsCop...test()': {unknown}
+ 209..215 'IsCopy': IsCopy
+ 217..224 'NotCopy': NotCopy
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_fn_def_copy() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: copy
+fn foo() {}
+fn bar<T: Copy>(T) -> T {}
+struct Struct(usize);
+enum Enum { Variant(usize) }
+
+trait Test { fn test(&self) -> bool; }
+impl<T: Copy> Test for T {}
+
+fn test() {
+ foo.test();
+ bar.test();
+ Struct.test();
+ Enum::Variant.test();
+}"#,
+ expect![[r#"
+ 9..11 '{}': ()
+ 28..29 'T': {unknown}
+ 36..38 '{}': T
+ 36..38: expected T, got ()
+ 113..117 'self': &Self
+ 169..249 '{ ...t(); }': ()
+ 175..178 'foo': fn foo()
+ 175..185 'foo.test()': bool
+ 191..194 'bar': fn bar<{unknown}>({unknown}) -> {unknown}
+ 191..201 'bar.test()': bool
+ 207..213 'Struct': Struct(usize) -> Struct
+ 207..220 'Struct.test()': bool
+ 226..239 'Enum::Variant': Variant(usize) -> Enum
+ 226..246 'Enum::...test()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_fn_ptr_copy() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: copy
+trait Test { fn test(&self) -> bool; }
+impl<T: Copy> Test for T {}
+
+fn test(f1: fn(), f2: fn(usize) -> u8, f3: fn(u8, u8) -> &u8) {
+ f1.test();
+ f2.test();
+ f3.test();
+}"#,
+ expect![[r#"
+ 22..26 'self': &Self
+ 76..78 'f1': fn()
+ 86..88 'f2': fn(usize) -> u8
+ 107..109 'f3': fn(u8, u8) -> &u8
+ 130..178 '{ ...t(); }': ()
+ 136..138 'f1': fn()
+ 136..145 'f1.test()': bool
+ 151..153 'f2': fn(usize) -> u8
+ 151..160 'f2.test()': bool
+ 166..168 'f3': fn(u8, u8) -> &u8
+ 166..175 'f3.test()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_sized() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Test { fn test(&self) -> bool; }
+impl<T: Sized> Test for T {}
+
+fn test() {
+ 1u8.test();
+ (*"foo").test(); // not Sized
+ (1u8, 1u8).test();
+ (1u8, *"foo").test(); // not Sized
+}"#,
+ expect![[r#"
+ 22..26 'self': &Self
+ 79..194 '{ ...ized }': ()
+ 85..88 '1u8': u8
+ 85..95 '1u8.test()': bool
+ 101..116 '(*"foo").test()': {unknown}
+ 102..108 '*"foo"': str
+ 103..108 '"foo"': &str
+ 135..145 '(1u8, 1u8)': (u8, u8)
+ 135..152 '(1u8, ...test()': bool
+ 136..139 '1u8': u8
+ 141..144 '1u8': u8
+ 158..171 '(1u8, *"foo")': (u8, str)
+ 158..178 '(1u8, ...test()': {unknown}
+ 159..162 '1u8': u8
+ 164..170 '*"foo"': str
+ 165..170 '"foo"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn integer_range_iterate() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ for x in 0..100 { x; }
+} //^ i32
+
+//- /core.rs crate:core
+pub mod ops {
+ pub struct Range<Idx> {
+ pub start: Idx,
+ pub end: Idx,
+ }
+}
+
+pub mod iter {
+ pub trait Iterator {
+ type Item;
+ }
+
+ pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ }
+
+ impl<T> IntoIterator for T where T: Iterator {
+ type Item = <T as Iterator>::Item;
+ type IntoIter = Self;
+ }
+}
+
+trait Step {}
+impl Step for i32 {}
+impl Step for i64 {}
+
+impl<A: Step> iter::Iterator for ops::Range<A> {
+ type Item = A;
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_closure_arg() {
+ check_infer(
+ r#"
+//- /lib.rs
+
+enum Option<T> {
+ None,
+ Some(T)
+}
+
+fn foo() {
+ let s = Option::None;
+ let f = |x: Option<i32>| {};
+ (&f)(s)
+}"#,
+ expect![[r#"
+ 52..126 '{ ...)(s) }': ()
+ 62..63 's': Option<i32>
+ 66..78 'Option::None': Option<i32>
+ 88..89 'f': |Option<i32>| -> ()
+ 92..111 '|x: Op...2>| {}': |Option<i32>| -> ()
+ 93..94 'x': Option<i32>
+ 109..111 '{}': ()
+ 117..124 '(&f)(s)': ()
+ 118..120 '&f': &|Option<i32>| -> ()
+ 119..120 'f': |Option<i32>| -> ()
+ 122..123 's': Option<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_fn_param_informs_call_site_closure_signature() {
+ cov_mark::check!(dyn_fn_param_informs_call_site_closure_signature);
+ check_types(
+ r#"
+//- minicore: fn, coerce_unsized
+struct S;
+impl S {
+ fn inherent(&self) -> u8 { 0 }
+}
+fn take_dyn_fn(f: &dyn Fn(S)) {}
+
+fn f() {
+ take_dyn_fn(&|x| { x.inherent(); });
+ //^^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_fn_trait_arg() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, option
+fn foo<F, T>(f: F) -> T
+where
+ F: Fn(Option<i32>) -> T,
+{
+ let s = None;
+ f(s)
+}
+"#,
+ expect![[r#"
+ 13..14 'f': F
+ 59..89 '{ ...f(s) }': T
+ 69..70 's': Option<i32>
+ 73..77 'None': Option<i32>
+ 83..84 'f': F
+ 83..87 'f(s)': T
+ 85..86 's': Option<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_box_fn_arg() {
+ // The type mismatch is because we don't define Unsize and CoerceUnsized
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, deref, option
+#[lang = "owned_box"]
+pub struct Box<T: ?Sized> {
+ inner: *mut T,
+}
+
+impl<T: ?Sized> core::ops::Deref for Box<T> {
+ type Target = T;
+
+ fn deref(&self) -> &T {
+ &self.inner
+ }
+}
+
+fn foo() {
+ let s = None;
+ let f: Box<dyn FnOnce(&Option<i32>)> = box (|ps| {});
+ f(&s);
+}"#,
+ expect![[r#"
+ 154..158 'self': &Box<T>
+ 166..193 '{ ... }': &T
+ 176..187 '&self.inner': &*mut T
+ 177..181 'self': &Box<T>
+ 177..187 'self.inner': *mut T
+ 206..296 '{ ...&s); }': ()
+ 216..217 's': Option<i32>
+ 220..224 'None': Option<i32>
+ 234..235 'f': Box<dyn FnOnce(&Option<i32>)>
+ 269..282 'box (|ps| {})': Box<|&Option<i32>| -> ()>
+ 274..281 '|ps| {}': |&Option<i32>| -> ()
+ 275..277 'ps': &Option<i32>
+ 279..281 '{}': ()
+ 288..289 'f': Box<dyn FnOnce(&Option<i32>)>
+ 288..293 'f(&s)': ()
+ 290..292 '&s': &Option<i32>
+ 291..292 's': Option<i32>
+ 269..282: expected Box<dyn FnOnce(&Option<i32>)>, got Box<|&Option<i32>| -> ()>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_dyn_fn_output() {
+ check_types(
+ r#"
+//- minicore: fn
+fn foo() {
+ let f: &dyn Fn() -> i32;
+ f();
+ //^^^ i32
+}"#,
+ );
+}
+
+#[test]
+fn infer_dyn_fn_once_output() {
+ check_types(
+ r#"
+//- minicore: fn
+fn foo() {
+ let f: dyn FnOnce() -> i32;
+ f();
+ //^^^ i32
+}"#,
+ );
+}
+
+#[test]
+fn variable_kinds_1() {
+ check_types(
+ r#"
+trait Trait<T> { fn get(self, t: T) -> T; }
+struct S;
+impl Trait<u128> for S {}
+impl Trait<f32> for S {}
+fn test() {
+ S.get(1);
+ //^^^^^^^^ u128
+ S.get(1.);
+ //^^^^^^^^^ f32
+}
+ "#,
+ );
+}
+
+#[test]
+fn variable_kinds_2() {
+ check_types(
+ r#"
+trait Trait { fn get(self) -> Self; }
+impl Trait for u128 {}
+impl Trait for f32 {}
+fn test() {
+ 1.get();
+ //^^^^^^^ u128
+ (1.).get();
+ //^^^^^^^^^^ f32
+}
+ "#,
+ );
+}
+
+#[test]
+fn underscore_import() {
+ check_types(
+ r#"
+mod tr {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+}
+
+struct Tr;
+impl crate::tr::Tr for Tr {}
+
+use crate::tr::Tr as _;
+fn test() {
+ Tr.method();
+ //^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn inner_use() {
+ check_types(
+ r#"
+mod m {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+
+ impl Tr for () {}
+}
+
+fn f() {
+ use m::Tr;
+
+ ().method();
+ //^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn trait_in_scope_with_inner_item() {
+ check_infer(
+ r#"
+mod m {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+
+ impl Tr for () {}
+}
+
+use m::Tr;
+
+fn f() {
+ fn inner() {
+ ().method();
+ //^^^^^^^^^^^ u8
+ }
+}"#,
+ expect![[r#"
+ 46..50 'self': &Self
+ 58..63 '{ 0 }': u8
+ 60..61 '0': u8
+ 115..185 '{ ... } }': ()
+ 132..183 '{ ... }': ()
+ 142..144 '()': ()
+ 142..153 '().method()': u8
+ "#]],
+ );
+}
+
+#[test]
+fn inner_use_in_block() {
+ check_types(
+ r#"
+mod m {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+
+ impl Tr for () {}
+}
+
+fn f() {
+ {
+ use m::Tr;
+
+ ().method();
+ //^^^^^^^^^^^ u8
+ }
+
+ {
+ ().method();
+ //^^^^^^^^^^^ {unknown}
+ }
+
+ ().method();
+ //^^^^^^^^^^^ {unknown}
+}
+ "#,
+ );
+}
+
+#[test]
+fn nested_inner_function_calling_self() {
+ check_infer(
+ r#"
+struct S;
+fn f() {
+ fn inner() -> S {
+ let s = inner();
+ }
+}"#,
+ expect![[r#"
+ 17..73 '{ ... } }': ()
+ 39..71 '{ ... }': S
+ 53..54 's': S
+ 57..62 'inner': fn inner() -> S
+ 57..64 'inner()': S
+ "#]],
+ )
+}
+
+#[test]
+fn infer_default_trait_type_parameter() {
+ check_infer(
+ r#"
+struct A;
+
+trait Op<RHS=Self> {
+ type Output;
+
+ fn do_op(self, rhs: RHS) -> Self::Output;
+}
+
+impl Op for A {
+ type Output = bool;
+
+ fn do_op(self, rhs: Self) -> Self::Output {
+ true
+ }
+}
+
+fn test() {
+ let x = A;
+ let y = A;
+ let r = x.do_op(y);
+}"#,
+ expect![[r#"
+ 63..67 'self': Self
+ 69..72 'rhs': RHS
+ 153..157 'self': A
+ 159..162 'rhs': A
+ 186..206 '{ ... }': bool
+ 196..200 'true': bool
+ 220..277 '{ ...(y); }': ()
+ 230..231 'x': A
+ 234..235 'A': A
+ 245..246 'y': A
+ 249..250 'A': A
+ 260..261 'r': bool
+ 264..265 'x': A
+ 264..274 'x.do_op(y)': bool
+ 272..273 'y': A
+ "#]],
+ )
+}
+
+#[test]
+fn qualified_path_as_qualified_trait() {
+ check_infer(
+ r#"
+mod foo {
+
+ pub trait Foo {
+ type Target;
+ }
+ pub trait Bar {
+ type Output;
+ fn boo() -> Self::Output {
+ loop {}
+ }
+ }
+}
+
+struct F;
+impl foo::Foo for F {
+ type Target = ();
+}
+impl foo::Bar for F {
+ type Output = <F as foo::Foo>::Target;
+}
+
+fn foo() {
+ use foo::Bar;
+ let x = <F as Bar>::boo();
+}"#,
+ expect![[r#"
+ 132..163 '{ ... }': Bar::Output<Self>
+ 146..153 'loop {}': !
+ 151..153 '{}': ()
+ 306..358 '{ ...o(); }': ()
+ 334..335 'x': ()
+ 338..353 '<F as Bar>::boo': fn boo<F>() -> <F as Bar>::Output
+ 338..355 '<F as ...:boo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn renamed_extern_crate_in_block() {
+ check_types(
+ r#"
+//- /lib.rs crate:lib deps:serde
+use serde::Deserialize;
+
+struct Foo {}
+
+const _ : () = {
+ extern crate serde as _serde;
+ impl _serde::Deserialize for Foo {
+ fn deserialize() -> u8 { 0 }
+ }
+};
+
+fn foo() {
+ Foo::deserialize();
+ //^^^^^^^^^^^^^^^^^^ u8
+}
+
+//- /serde.rs crate:serde
+
+pub trait Deserialize {
+ fn deserialize() -> u8;
+}"#,
+ );
+}
+
+#[test]
+fn bin_op_with_rhs_is_self_for_assoc_bound() {
+ check_no_mismatches(
+ r#"//- minicore: eq
+ fn repro<T>(t: T) -> bool
+where
+ T: Request,
+ T::Output: Convertable,
+{
+ let a = execute(&t).convert();
+ let b = execute(&t).convert();
+ a.eq(&b);
+ let a = execute(&t).convert2();
+ let b = execute(&t).convert2();
+ a.eq(&b)
+}
+fn execute<T>(t: &T) -> T::Output
+where
+ T: Request,
+{
+ <T as Request>::output()
+}
+trait Convertable {
+ type TraitSelf: PartialEq<Self::TraitSelf>;
+ type AssocAsDefaultSelf: PartialEq;
+ fn convert(self) -> Self::AssocAsDefaultSelf;
+ fn convert2(self) -> Self::TraitSelf;
+}
+trait Request {
+ type Output;
+ fn output() -> Self::Output;
+}
+ "#,
+ );
+}
+
+#[test]
+fn bin_op_adt_with_rhs_primitive() {
+ check_infer_with_mismatches(
+ r#"
+#[lang = "add"]
+pub trait Add<Rhs = Self> {
+ type Output;
+ fn add(self, rhs: Rhs) -> Self::Output;
+}
+
+struct Wrapper(u32);
+impl Add<u32> for Wrapper {
+ type Output = Self;
+ fn add(self, rhs: u32) -> Wrapper {
+ Wrapper(rhs)
+ }
+}
+fn main(){
+ let wrapped = Wrapper(10);
+ let num: u32 = 2;
+ let res = wrapped + num;
+
+}"#,
+ expect![[r#"
+ 72..76 'self': Self
+ 78..81 'rhs': Rhs
+ 192..196 'self': Wrapper
+ 198..201 'rhs': u32
+ 219..247 '{ ... }': Wrapper
+ 229..236 'Wrapper': Wrapper(u32) -> Wrapper
+ 229..241 'Wrapper(rhs)': Wrapper
+ 237..240 'rhs': u32
+ 259..345 '{ ...um; }': ()
+ 269..276 'wrapped': Wrapper
+ 279..286 'Wrapper': Wrapper(u32) -> Wrapper
+ 279..290 'Wrapper(10)': Wrapper
+ 287..289 '10': u32
+ 300..303 'num': u32
+ 311..312 '2': u32
+ 322..325 'res': Wrapper
+ 328..335 'wrapped': Wrapper
+ 328..341 'wrapped + num': Wrapper
+ 338..341 'num': u32
+ "#]],
+ )
+}
+
+#[test]
+fn array_length() {
+ check_infer(
+ r#"
+trait T {
+ type Output;
+ fn do_thing(&self) -> Self::Output;
+}
+
+impl T for [u8; 4] {
+ type Output = usize;
+ fn do_thing(&self) -> Self::Output {
+ 2
+ }
+}
+
+impl T for [u8; 2] {
+ type Output = u8;
+ fn do_thing(&self) -> Self::Output {
+ 2
+ }
+}
+
+fn main() {
+ let v = [0u8; 2];
+ let v2 = v.do_thing();
+ let v3 = [0u8; 4];
+ let v4 = v3.do_thing();
+}
+"#,
+ expect![[r#"
+ 44..48 'self': &Self
+ 133..137 'self': &[u8; 4]
+ 155..172 '{ ... }': usize
+ 165..166 '2': usize
+ 236..240 'self': &[u8; 2]
+ 258..275 '{ ... }': u8
+ 268..269 '2': u8
+ 289..392 '{ ...g(); }': ()
+ 299..300 'v': [u8; 2]
+ 303..311 '[0u8; 2]': [u8; 2]
+ 304..307 '0u8': u8
+ 309..310 '2': usize
+ 321..323 'v2': u8
+ 326..327 'v': [u8; 2]
+ 326..338 'v.do_thing()': u8
+ 348..350 'v3': [u8; 4]
+ 353..361 '[0u8; 4]': [u8; 4]
+ 354..357 '0u8': u8
+ 359..360 '4': usize
+ 371..373 'v4': usize
+ 376..378 'v3': [u8; 4]
+ 376..389 'v3.do_thing()': usize
+ "#]],
+ )
+}
+
+#[test]
+fn const_generics() {
+ check_infer(
+ r#"
+trait T {
+ type Output;
+ fn do_thing(&self) -> Self::Output;
+}
+
+impl<const L: usize> T for [u8; L] {
+ type Output = [u8; L];
+ fn do_thing(&self) -> Self::Output {
+ *self
+ }
+}
+
+fn main() {
+ let v = [0u8; 2];
+ let v2 = v.do_thing();
+}
+"#,
+ expect![[r#"
+ 44..48 'self': &Self
+ 151..155 'self': &[u8; L]
+ 173..194 '{ ... }': [u8; L]
+ 183..188 '*self': [u8; L]
+ 184..188 'self': &[u8; L]
+ 208..260 '{ ...g(); }': ()
+ 218..219 'v': [u8; 2]
+ 222..230 '[0u8; 2]': [u8; 2]
+ 223..226 '0u8': u8
+ 228..229 '2': usize
+ 240..242 'v2': [u8; 2]
+ 245..246 'v': [u8; 2]
+ 245..257 'v.do_thing()': [u8; 2]
+ "#]],
+ )
+}
+
+#[test]
+fn fn_returning_unit() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn
+fn test<F: FnOnce()>(f: F) {
+ let _: () = f();
+}"#,
+ expect![[r#"
+ 21..22 'f': F
+ 27..51 '{ ...f(); }': ()
+ 37..38 '_': ()
+ 45..46 'f': F
+ 45..48 'f()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn trait_in_scope_of_trait_impl() {
+ check_infer(
+ r#"
+mod foo {
+ pub trait Foo {
+ fn foo(self);
+ fn bar(self) -> usize { 0 }
+ }
+}
+impl foo::Foo for u32 {
+ fn foo(self) {
+ let _x = self.bar();
+ }
+}
+ "#,
+ expect![[r#"
+ 45..49 'self': Self
+ 67..71 'self': Self
+ 82..87 '{ 0 }': usize
+ 84..85 '0': usize
+ 131..135 'self': u32
+ 137..173 '{ ... }': ()
+ 151..153 '_x': usize
+ 156..160 'self': u32
+ 156..166 'self.bar()': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_async_ret_type() {
+ check_types(
+ r#"
+//- minicore: future, result
+struct Fooey;
+
+impl Fooey {
+ fn collect<B: Convert>(self) -> B {
+ B::new()
+ }
+}
+
+trait Convert {
+ fn new() -> Self;
+}
+impl Convert for u32 {
+ fn new() -> Self { 0 }
+}
+
+async fn get_accounts() -> Result<u32, ()> {
+ let ret = Fooey.collect();
+ // ^^^^^^^^^^^^^^^ u32
+ Ok(ret)
+}
+"#,
+ );
+}
+
+#[test]
+fn local_impl_1() {
+ check!(block_local_impls);
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+
+fn test() {
+ struct S;
+ impl Trait<u32> for S {
+ fn foo(&self) -> u32 { 0 }
+ }
+
+ S.foo();
+ // ^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn local_impl_2() {
+ check!(block_local_impls);
+ check_types(
+ r#"
+struct S;
+
+fn test() {
+ trait Trait<T> {
+ fn foo(&self) -> T;
+ }
+ impl Trait<u32> for S {
+ fn foo(&self) -> u32 { 0 }
+ }
+
+ S.foo();
+ // ^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn local_impl_3() {
+ check!(block_local_impls);
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+
+fn test() {
+ struct S1;
+ {
+ struct S2;
+
+ impl Trait<S1> for S2 {
+ fn foo(&self) -> S1 { S1 }
+ }
+
+ S2.foo();
+ // ^^^^^^^^ S1
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn associated_type_sized_bounds() {
+ check_infer(
+ r#"
+//- minicore: sized
+struct Yes;
+trait IsSized { const IS_SIZED: Yes; }
+impl<T: Sized> IsSized for T { const IS_SIZED: Yes = Yes; }
+
+trait Foo {
+ type Explicit: Sized;
+ type Implicit;
+ type Relaxed: ?Sized;
+}
+fn f<F: Foo>() {
+ F::Explicit::IS_SIZED;
+ F::Implicit::IS_SIZED;
+ F::Relaxed::IS_SIZED;
+}
+"#,
+ expect![[r#"
+ 104..107 'Yes': Yes
+ 212..295 '{ ...ZED; }': ()
+ 218..239 'F::Exp..._SIZED': Yes
+ 245..266 'F::Imp..._SIZED': Yes
+ 272..292 'F::Rel..._SIZED': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_map() {
+ check_types(
+ r#"
+pub struct Key<K, V, P = (K, V)> {}
+
+pub trait Policy {
+ type K;
+ type V;
+}
+
+impl<K, V> Policy for (K, V) {
+ type K = K;
+ type V = V;
+}
+
+pub struct KeyMap<KEY> {}
+
+impl<P: Policy> KeyMap<Key<P::K, P::V, P>> {
+ pub fn get(&self, key: &P::K) -> P::V {
+ loop {}
+ }
+}
+
+struct Fn {}
+struct FunctionId {}
+
+fn test() {
+ let key_map: &KeyMap<Key<Fn, FunctionId>> = loop {};
+ let key;
+ let result = key_map.get(key);
+ //^^^^^^ FunctionId
+}
+"#,
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs
new file mode 100644
index 000000000..547850b02
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs
@@ -0,0 +1,133 @@
+//! Implementation of Chalk debug helper functions using TLS.
+use std::fmt::{self, Display};
+
+use itertools::Itertools;
+
+use crate::{
+ chalk_db, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, mapping::from_chalk,
+ CallableDefId, Interner,
+};
+use hir_def::{AdtId, ItemContainerId, Lookup, TypeAliasId};
+
+pub(crate) use unsafe_tls::{set_current_program, with_current_program};
+
+pub(crate) struct DebugContext<'a>(&'a dyn HirDatabase);
+
+impl DebugContext<'_> {
+ pub(crate) fn debug_struct_id(
+ &self,
+ id: chalk_db::AdtId,
+ f: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let name = match id.0 {
+ AdtId::StructId(it) => self.0.struct_data(it).name.clone(),
+ AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
+ AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
+ };
+ name.fmt(f)
+ }
+
+ pub(crate) fn debug_trait_id(
+ &self,
+ id: chalk_db::TraitId,
+ f: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let trait_: hir_def::TraitId = from_chalk_trait_id(id);
+ let trait_data = self.0.trait_data(trait_);
+ trait_data.name.fmt(f)
+ }
+
+ pub(crate) fn debug_assoc_type_id(
+ &self,
+ id: chalk_db::AssocTypeId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let type_alias: TypeAliasId = from_assoc_type_id(id);
+ let type_alias_data = self.0.type_alias_data(type_alias);
+ let trait_ = match type_alias.lookup(self.0.upcast()).container {
+ ItemContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+ let trait_data = self.0.trait_data(trait_);
+ write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
+ }
+
+ pub(crate) fn debug_projection_ty(
+ &self,
+ projection_ty: &chalk_ir::ProjectionTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let type_alias = from_assoc_type_id(projection_ty.associated_ty_id);
+ let type_alias_data = self.0.type_alias_data(type_alias);
+ let trait_ = match type_alias.lookup(self.0.upcast()).container {
+ ItemContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+ let trait_data = self.0.trait_data(trait_);
+ let params = projection_ty.substitution.as_slice(Interner);
+ write!(fmt, "<{:?} as {}", &params[0], trait_data.name,)?;
+ if params.len() > 1 {
+ write!(
+ fmt,
+ "<{}>",
+ &params[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
+ )?;
+ }
+ write!(fmt, ">::{}", type_alias_data.name)
+ }
+
+ pub(crate) fn debug_fn_def_id(
+ &self,
+ fn_def_id: chalk_ir::FnDefId<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let def: CallableDefId = from_chalk(self.0, fn_def_id);
+ let name = match def {
+ CallableDefId::FunctionId(ff) => self.0.function_data(ff).name.clone(),
+ CallableDefId::StructId(s) => self.0.struct_data(s).name.clone(),
+ CallableDefId::EnumVariantId(e) => {
+ let enum_data = self.0.enum_data(e.parent);
+ enum_data.variants[e.local_id].name.clone()
+ }
+ };
+ match def {
+ CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name),
+ CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
+ write!(fmt, "{{ctor {}}}", name)
+ }
+ }
+ }
+}
+
+mod unsafe_tls {
+ use super::DebugContext;
+ use crate::db::HirDatabase;
+ use scoped_tls::scoped_thread_local;
+
+ scoped_thread_local!(static PROGRAM: DebugContext<'_>);
+
+ pub(crate) fn with_current_program<R>(
+ op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R,
+ ) -> R {
+ if PROGRAM.is_set() {
+ PROGRAM.with(|prog| op(Some(prog)))
+ } else {
+ op(None)
+ }
+ }
+
+ pub(crate) fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R
+ where
+ OP: FnOnce() -> R,
+ {
+ let ctx = DebugContext(p);
+ // we're transmuting the lifetime in the DebugContext to static. This is
+ // fine because we only keep the reference for the lifetime of this
+ // function, *and* the only way to access the context is through
+ // `with_current_program`, which hides the lifetime through the `for`
+ // type.
+ let static_p: &DebugContext<'static> =
+ unsafe { std::mem::transmute::<&DebugContext<'_>, &DebugContext<'static>>(&ctx) };
+ PROGRAM.set(static_p, op)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
new file mode 100644
index 000000000..77afeb321
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
@@ -0,0 +1,187 @@
+//! Trait solving using Chalk.
+
+use std::env::var;
+
+use chalk_ir::GoalData;
+use chalk_recursive::Cache;
+use chalk_solve::{logging_db::LoggingRustIrDatabase, Solver};
+
+use base_db::CrateId;
+use hir_def::{lang_item::LangItemTarget, TraitId};
+use stdx::panic_context;
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase, AliasEq, AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment,
+ Interner, Solution, TraitRefExt, Ty, TyKind, WhereClause,
+};
+
+/// This controls how much 'time' we give the Chalk solver before giving up.
+const CHALK_SOLVER_FUEL: i32 = 100;
+
+#[derive(Debug, Copy, Clone)]
+pub(crate) struct ChalkContext<'a> {
+ pub(crate) db: &'a dyn HirDatabase,
+ pub(crate) krate: CrateId,
+}
+
+fn create_chalk_solver() -> chalk_recursive::RecursiveSolver<Interner> {
+ let overflow_depth =
+ var("CHALK_OVERFLOW_DEPTH").ok().and_then(|s| s.parse().ok()).unwrap_or(500);
+ let max_size = var("CHALK_SOLVER_MAX_SIZE").ok().and_then(|s| s.parse().ok()).unwrap_or(150);
+ chalk_recursive::RecursiveSolver::new(overflow_depth, max_size, Some(Cache::new()))
+}
+
+/// A set of clauses that we assume to be true. E.g. if we are inside this function:
+/// ```rust
+/// fn foo<T: Default>(t: T) {}
+/// ```
+/// we assume that `T: Default`.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TraitEnvironment {
+ pub krate: CrateId,
+ // FIXME make this a BTreeMap
+ pub(crate) traits_from_clauses: Vec<(Ty, TraitId)>,
+ pub env: chalk_ir::Environment<Interner>,
+}
+
+impl TraitEnvironment {
+ pub fn empty(krate: CrateId) -> Self {
+ TraitEnvironment {
+ krate,
+ traits_from_clauses: Vec::new(),
+ env: chalk_ir::Environment::new(Interner),
+ }
+ }
+
+ pub fn traits_in_scope_from_clauses<'a>(
+ &'a self,
+ ty: Ty,
+ ) -> impl Iterator<Item = TraitId> + 'a {
+ self.traits_from_clauses
+ .iter()
+ .filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then(|| *trait_id))
+ }
+}
+
+/// Solve a trait goal using Chalk.
+pub(crate) fn trait_solve_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ goal: Canonical<InEnvironment<Goal>>,
+) -> Option<Solution> {
+ let _p = profile::span("trait_solve_query").detail(|| match &goal.value.goal.data(Interner) {
+ GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
+ db.trait_data(it.hir_trait_id()).name.to_string()
+ }
+ GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(),
+ _ => "??".to_string(),
+ });
+ tracing::info!("trait_solve_query({:?})", goal.value.goal);
+
+ if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(projection_ty),
+ ..
+ }))) = &goal.value.goal.data(Interner)
+ {
+ if let TyKind::BoundVar(_) = projection_ty.self_type_parameter(Interner).kind(Interner) {
+ // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible
+ return Some(Solution::Ambig(Guidance::Unknown));
+ }
+ }
+
+ // We currently don't deal with universes (I think / hope they're not yet
+ // relevant for our use cases?)
+ let u_canonical = chalk_ir::UCanonical { canonical: goal, universes: 1 };
+ solve(db, krate, &u_canonical)
+}
+
+fn solve(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
+) -> Option<chalk_solve::Solution<Interner>> {
+ let context = ChalkContext { db, krate };
+ tracing::debug!("solve goal: {:?}", goal);
+ let mut solver = create_chalk_solver();
+
+ let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
+
+ let should_continue = || {
+ db.unwind_if_cancelled();
+ let remaining = fuel.get();
+ fuel.set(remaining - 1);
+ if remaining == 0 {
+ tracing::debug!("fuel exhausted");
+ }
+ remaining > 0
+ };
+
+ let mut solve = || {
+ let _ctx = if is_chalk_debug() || is_chalk_print() {
+ Some(panic_context::enter(format!("solving {:?}", goal)))
+ } else {
+ None
+ };
+ let solution = if is_chalk_print() {
+ let logging_db =
+ LoggingRustIrDatabaseLoggingOnDrop(LoggingRustIrDatabase::new(context));
+ solver.solve_limited(&logging_db.0, goal, &should_continue)
+ } else {
+ solver.solve_limited(&context, goal, &should_continue)
+ };
+
+ tracing::debug!("solve({:?}) => {:?}", goal, solution);
+
+ solution
+ };
+
+ // don't set the TLS for Chalk unless Chalk debugging is active, to make
+ // extra sure we only use it for debugging
+ if is_chalk_debug() {
+ crate::tls::set_current_program(db, solve)
+ } else {
+ solve()
+ }
+}
+
+struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase<Interner, ChalkContext<'a>>);
+
+impl<'a> Drop for LoggingRustIrDatabaseLoggingOnDrop<'a> {
+ fn drop(&mut self) {
+ eprintln!("chalk program:\n{}", self.0);
+ }
+}
+
+fn is_chalk_debug() -> bool {
+ std::env::var("CHALK_DEBUG").is_ok()
+}
+
+fn is_chalk_print() -> bool {
+ std::env::var("CHALK_PRINT").is_ok()
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum FnTrait {
+ FnOnce,
+ FnMut,
+ Fn,
+}
+
+impl FnTrait {
+ const fn lang_item_name(self) -> &'static str {
+ match self {
+ FnTrait::FnOnce => "fn_once",
+ FnTrait::FnMut => "fn_mut",
+ FnTrait::Fn => "fn",
+ }
+ }
+
+ pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> {
+ let target = db.lang_item(krate, SmolStr::new_inline(self.lang_item_name()))?;
+ match target {
+ LangItemTarget::TraitId(t) => Some(t),
+ _ => None,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
new file mode 100644
index 000000000..83319755d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
@@ -0,0 +1,408 @@
+//! Helper functions for working with def, which don't need to be a separate
+//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
+
+use std::iter;
+
+use base_db::CrateId;
+use chalk_ir::{fold::Shift, BoundVar, DebruijnIndex};
+use hir_def::{
+ db::DefDatabase,
+ generics::{
+ GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate,
+ WherePredicateTypeTarget,
+ },
+ intern::Interned,
+ resolver::{HasResolver, TypeNs},
+ type_ref::{TraitBoundModifier, TypeRef},
+ ConstParamId, FunctionId, GenericDefId, ItemContainerId, Lookup, TraitId, TypeAliasId,
+ TypeOrConstParamId, TypeParamId,
+};
+use hir_expand::name::{known, Name};
+use itertools::Either;
+use rustc_hash::FxHashSet;
+use smallvec::{smallvec, SmallVec};
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase, ChalkTraitId, ConstData, ConstValue, GenericArgData, Interner, Substitution,
+ TraitRef, TraitRefExt, TyKind, WhereClause,
+};
+
+pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: CrateId) -> impl Iterator<Item = TraitId> {
+ [
+ db.lang_item(krate, SmolStr::new_inline("fn")),
+ db.lang_item(krate, SmolStr::new_inline("fn_mut")),
+ db.lang_item(krate, SmolStr::new_inline("fn_once")),
+ ]
+ .into_iter()
+ .flatten()
+ .flat_map(|it| it.as_trait())
+}
+
+fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
+ let resolver = trait_.resolver(db);
+ // returning the iterator directly doesn't easily work because of
+ // lifetime problems, but since there usually shouldn't be more than a
+ // few direct traits this should be fine (we could even use some kind of
+ // SmallVec if performance is a concern)
+ let generic_params = db.generic_params(trait_.into());
+ let trait_self = generic_params.find_trait_self_param();
+ generic_params
+ .where_predicates
+ .iter()
+ .filter_map(|pred| match pred {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound } => {
+ let is_trait = match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => match &**type_ref {
+ TypeRef::Path(p) => p.is_self_type(),
+ _ => false,
+ },
+ WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
+ Some(*local_id) == trait_self
+ }
+ };
+ match is_trait {
+ true => bound.as_path(),
+ false => None,
+ }
+ }
+ WherePredicate::Lifetime { .. } => None,
+ })
+ .filter(|(_, bound_modifier)| matches!(bound_modifier, TraitBoundModifier::None))
+ .filter_map(|(path, _)| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) {
+ Some(TypeNs::TraitId(t)) => Some(t),
+ _ => None,
+ })
+ .collect()
+}
+
+fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef) -> Vec<TraitRef> {
+ // returning the iterator directly doesn't easily work because of
+ // lifetime problems, but since there usually shouldn't be more than a
+ // few direct traits this should be fine (we could even use some kind of
+ // SmallVec if performance is a concern)
+ let generic_params = db.generic_params(trait_ref.hir_trait_id().into());
+ let trait_self = match generic_params.find_trait_self_param() {
+ Some(p) => TypeOrConstParamId { parent: trait_ref.hir_trait_id().into(), local_id: p },
+ None => return Vec::new(),
+ };
+ db.generic_predicates_for_param(trait_self.parent, trait_self, None)
+ .iter()
+ .filter_map(|pred| {
+ pred.as_ref().filter_map(|pred| match pred.skip_binders() {
+ // FIXME: how to correctly handle higher-ranked bounds here?
+ WhereClause::Implemented(tr) => Some(
+ tr.clone()
+ .shifted_out_to(Interner, DebruijnIndex::ONE)
+ .expect("FIXME unexpected higher-ranked trait bound"),
+ ),
+ _ => None,
+ })
+ })
+ .map(|pred| pred.substitute(Interner, &trait_ref.substitution))
+ .collect()
+}
+
+/// Returns an iterator over the whole super trait hierarchy (including the
+/// trait itself).
+pub fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
+ // we need to take care a bit here to avoid infinite loops in case of cycles
+ // (i.e. if we have `trait A: B; trait B: A;`)
+
+ let mut result = smallvec![trait_];
+ let mut i = 0;
+ while let Some(&t) = result.get(i) {
+ // yeah this is quadratic, but trait hierarchies should be flat
+ // enough that this doesn't matter
+ for tt in direct_super_traits(db, t) {
+ if !result.contains(&tt) {
+ result.push(tt);
+ }
+ }
+ i += 1;
+ }
+ result
+}
+
+/// Given a trait ref (`Self: Trait`), builds all the implied trait refs for
+/// super traits. The original trait ref will be included. So the difference to
+/// `all_super_traits` is that we keep track of type parameters; for example if
+/// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get
+/// `Self: OtherTrait<i32>`.
+pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> SuperTraits<'_> {
+ SuperTraits { db, seen: iter::once(trait_ref.trait_id).collect(), stack: vec![trait_ref] }
+}
+
+pub(super) struct SuperTraits<'a> {
+ db: &'a dyn HirDatabase,
+ stack: Vec<TraitRef>,
+ seen: FxHashSet<ChalkTraitId>,
+}
+
+impl<'a> SuperTraits<'a> {
+ fn elaborate(&mut self, trait_ref: &TraitRef) {
+ let mut trait_refs = direct_super_trait_refs(self.db, trait_ref);
+ trait_refs.retain(|tr| !self.seen.contains(&tr.trait_id));
+ self.stack.extend(trait_refs);
+ }
+}
+
+impl<'a> Iterator for SuperTraits<'a> {
+ type Item = TraitRef;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(next) = self.stack.pop() {
+ self.elaborate(&next);
+ Some(next)
+ } else {
+ None
+ }
+ }
+}
+
+pub(super) fn associated_type_by_name_including_super_traits(
+ db: &dyn HirDatabase,
+ trait_ref: TraitRef,
+ name: &Name,
+) -> Option<(TraitRef, TypeAliasId)> {
+ all_super_trait_refs(db, trait_ref).find_map(|t| {
+ let assoc_type = db.trait_data(t.hir_trait_id()).associated_type_by_name(name)?;
+ Some((t, assoc_type))
+ })
+}
+
+pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
+ let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
+ if parent_generics.is_some() && matches!(def, GenericDefId::TypeAliasId(_)) {
+ let params = db.generic_params(def);
+ let has_consts =
+ params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_)));
+ return if has_consts {
+ // XXX: treat const generic associated types as not existing to avoid crashes (#11769)
+ //
+ // Chalk expects the inner associated type's parameters to come
+ // *before*, not after the trait's generics as we've always done it.
+ // Adapting to this requires a larger refactoring
+ cov_mark::hit!(ignore_gats);
+ Generics { def, params: Interned::new(Default::default()), parent_generics }
+ } else {
+ Generics { def, params, parent_generics }
+ };
+ }
+ Generics { def, params: db.generic_params(def), parent_generics }
+}
+
+#[derive(Debug)]
+pub(crate) struct Generics {
+ def: GenericDefId,
+ pub(crate) params: Interned<GenericParams>,
+ parent_generics: Option<Box<Generics>>,
+}
+
+impl Generics {
+ pub(crate) fn iter_id<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = Either<TypeParamId, ConstParamId>> + 'a {
+ self.iter().map(|(id, data)| match data {
+ TypeOrConstParamData::TypeParamData(_) => Either::Left(TypeParamId::from_unchecked(id)),
+ TypeOrConstParamData::ConstParamData(_) => {
+ Either::Right(ConstParamId::from_unchecked(id))
+ }
+ })
+ }
+
+ /// Iterator over types and const params of parent, then self.
+ pub(crate) fn iter<'a>(
+ &'a self,
+ ) -> impl DoubleEndedIterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
+ let to_toc_id = |it: &'a Generics| {
+ move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p)
+ };
+ self.parent_generics()
+ .into_iter()
+ .flat_map(move |it| it.params.iter().map(to_toc_id(it)))
+ .chain(self.params.iter().map(to_toc_id(self)))
+ }
+
+ /// Iterator over types and const params of parent.
+ pub(crate) fn iter_parent<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
+ self.parent_generics().into_iter().flat_map(|it| {
+ let to_toc_id =
+ move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p);
+ it.params.iter().map(to_toc_id)
+ })
+ }
+
+ pub(crate) fn len(&self) -> usize {
+ let parent = self.parent_generics().map_or(0, Generics::len);
+ let child = self.params.type_or_consts.len();
+ parent + child
+ }
+
+ /// (parent total, self param, type param list, const param list, impl trait)
+ pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize, usize) {
+ let ty_iter = || self.params.iter().filter_map(|x| x.1.type_param());
+
+ let self_params =
+ ty_iter().filter(|p| p.provenance == TypeParamProvenance::TraitSelf).count();
+ let type_params =
+ ty_iter().filter(|p| p.provenance == TypeParamProvenance::TypeParamList).count();
+ let impl_trait_params =
+ ty_iter().filter(|p| p.provenance == TypeParamProvenance::ArgumentImplTrait).count();
+ let const_params = self.params.iter().filter_map(|x| x.1.const_param()).count();
+
+ let parent_len = self.parent_generics().map_or(0, Generics::len);
+ (parent_len, self_params, type_params, const_params, impl_trait_params)
+ }
+
+ pub(crate) fn param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
+ Some(self.find_param(param)?.0)
+ }
+
+ fn find_param(&self, param: TypeOrConstParamId) -> Option<(usize, &TypeOrConstParamData)> {
+ if param.parent == self.def {
+ let (idx, (_local_id, data)) = self
+ .params
+ .iter()
+ .enumerate()
+ .find(|(_, (idx, _))| *idx == param.local_id)
+ .unwrap();
+ let parent_len = self.parent_generics().map_or(0, Generics::len);
+ Some((parent_len + idx, data))
+ } else {
+ self.parent_generics().and_then(|g| g.find_param(param))
+ }
+ }
+
+ fn parent_generics(&self) -> Option<&Generics> {
+ self.parent_generics.as_ref().map(|it| &**it)
+ }
+
+ /// Returns a Substitution that replaces each parameter by a bound variable.
+ pub(crate) fn bound_vars_subst(
+ &self,
+ db: &dyn HirDatabase,
+ debruijn: DebruijnIndex,
+ ) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ self.iter_id().enumerate().map(|(idx, id)| match id {
+ Either::Left(_) => GenericArgData::Ty(
+ TyKind::BoundVar(BoundVar::new(debruijn, idx)).intern(Interner),
+ )
+ .intern(Interner),
+ Either::Right(id) => GenericArgData::Const(
+ ConstData {
+ value: ConstValue::BoundVar(BoundVar::new(debruijn, idx)),
+ ty: db.const_param_ty(id),
+ }
+ .intern(Interner),
+ )
+ .intern(Interner),
+ }),
+ )
+ }
+
+ /// Returns a Substitution that replaces each parameter by itself (i.e. `Ty::Param`).
+ pub(crate) fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ self.iter_id().map(|id| match id {
+ Either::Left(id) => GenericArgData::Ty(
+ TyKind::Placeholder(crate::to_placeholder_idx(db, id.into())).intern(Interner),
+ )
+ .intern(Interner),
+ Either::Right(id) => GenericArgData::Const(
+ ConstData {
+ value: ConstValue::Placeholder(crate::to_placeholder_idx(db, id.into())),
+ ty: db.const_param_ty(id),
+ }
+ .intern(Interner),
+ )
+ .intern(Interner),
+ }),
+ )
+ }
+}
+
+fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
+ let container = match def {
+ GenericDefId::FunctionId(it) => it.lookup(db).container,
+ GenericDefId::TypeAliasId(it) => it.lookup(db).container,
+ GenericDefId::ConstId(it) => it.lookup(db).container,
+ GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
+ GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
+ };
+
+ match container {
+ ItemContainerId::ImplId(it) => Some(it.into()),
+ ItemContainerId::TraitId(it) => Some(it.into()),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ }
+}
+
+pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool {
+ let data = db.function_data(func);
+ if data.has_unsafe_kw() {
+ return true;
+ }
+
+ match func.lookup(db.upcast()).container {
+ hir_def::ItemContainerId::ExternBlockId(block) => {
+ // Function in an `extern` block are always unsafe to call, except when it has
+ // `"rust-intrinsic"` ABI there are a few exceptions.
+ let id = block.lookup(db.upcast()).id;
+ !matches!(
+ id.item_tree(db.upcast())[id.value].abi.as_deref(),
+ Some("rust-intrinsic") if !is_intrinsic_fn_unsafe(&data.name)
+ )
+ }
+ _ => false,
+ }
+}
+
+/// Returns `true` if the given intrinsic is unsafe to call, or false otherwise.
+fn is_intrinsic_fn_unsafe(name: &Name) -> bool {
+ // Should be kept in sync with https://github.com/rust-lang/rust/blob/532d2b14c05f9bc20b2d27cbb5f4550d28343a36/compiler/rustc_typeck/src/check/intrinsic.rs#L72-L106
+ ![
+ known::abort,
+ known::add_with_overflow,
+ known::bitreverse,
+ known::black_box,
+ known::bswap,
+ known::caller_location,
+ known::ctlz,
+ known::ctpop,
+ known::cttz,
+ known::discriminant_value,
+ known::forget,
+ known::likely,
+ known::maxnumf32,
+ known::maxnumf64,
+ known::min_align_of,
+ known::minnumf32,
+ known::minnumf64,
+ known::mul_with_overflow,
+ known::needs_drop,
+ known::ptr_guaranteed_eq,
+ known::ptr_guaranteed_ne,
+ known::rotate_left,
+ known::rotate_right,
+ known::rustc_peek,
+ known::saturating_add,
+ known::saturating_sub,
+ known::size_of,
+ known::sub_with_overflow,
+ known::type_id,
+ known::type_name,
+ known::unlikely,
+ known::variant_count,
+ known::wrapping_add,
+ known::wrapping_mul,
+ known::wrapping_sub,
+ ]
+ .contains(name)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/walk.rs b/src/tools/rust-analyzer/crates/hir-ty/src/walk.rs
new file mode 100644
index 000000000..c47689455
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/walk.rs
@@ -0,0 +1,147 @@
+//! The `TypeWalk` trait (probably to be replaced by Chalk's `Fold` and
+//! `Visit`).
+
+use chalk_ir::interner::HasInterner;
+
+use crate::{
+ AliasEq, AliasTy, Binders, CallableSig, FnSubst, GenericArg, GenericArgData, Interner,
+ OpaqueTy, ProjectionTy, Substitution, TraitRef, Ty, TyKind, WhereClause,
+};
+
+/// This allows walking structures that contain types to do something with those
+/// types, similar to Chalk's `Fold` trait.
+pub trait TypeWalk {
+ fn walk(&self, f: &mut impl FnMut(&Ty));
+}
+
+impl TypeWalk for Ty {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ match self.kind(Interner) {
+ TyKind::Alias(AliasTy::Projection(p_ty)) => {
+ for t in p_ty.substitution.iter(Interner) {
+ t.walk(f);
+ }
+ }
+ TyKind::Alias(AliasTy::Opaque(o_ty)) => {
+ for t in o_ty.substitution.iter(Interner) {
+ t.walk(f);
+ }
+ }
+ TyKind::Dyn(dyn_ty) => {
+ for p in dyn_ty.bounds.skip_binders().interned().iter() {
+ p.walk(f);
+ }
+ }
+ TyKind::Slice(ty)
+ | TyKind::Array(ty, _)
+ | TyKind::Ref(_, _, ty)
+ | TyKind::Raw(_, ty) => {
+ ty.walk(f);
+ }
+ TyKind::Function(fn_pointer) => {
+ fn_pointer.substitution.0.walk(f);
+ }
+ TyKind::Adt(_, substs)
+ | TyKind::FnDef(_, substs)
+ | TyKind::Tuple(_, substs)
+ | TyKind::OpaqueType(_, substs)
+ | TyKind::AssociatedType(_, substs)
+ | TyKind::Closure(.., substs) => {
+ substs.walk(f);
+ }
+ _ => {}
+ }
+ f(self);
+ }
+}
+
+impl<T: TypeWalk> TypeWalk for Vec<T> {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ for t in self {
+ t.walk(f);
+ }
+ }
+}
+
+impl TypeWalk for OpaqueTy {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.substitution.walk(f);
+ }
+}
+
+impl TypeWalk for ProjectionTy {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.substitution.walk(f);
+ }
+}
+
+impl TypeWalk for AliasTy {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ match self {
+ AliasTy::Projection(it) => it.walk(f),
+ AliasTy::Opaque(it) => it.walk(f),
+ }
+ }
+}
+
+impl TypeWalk for GenericArg {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ if let GenericArgData::Ty(ty) = &self.interned() {
+ ty.walk(f);
+ }
+ }
+}
+
+impl TypeWalk for Substitution {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ for t in self.iter(Interner) {
+ t.walk(f);
+ }
+ }
+}
+
+impl<T: TypeWalk + HasInterner<Interner = Interner>> TypeWalk for Binders<T> {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.skip_binders().walk(f);
+ }
+}
+
+impl TypeWalk for TraitRef {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.substitution.walk(f);
+ }
+}
+
+impl TypeWalk for WhereClause {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ match self {
+ WhereClause::Implemented(trait_ref) => trait_ref.walk(f),
+ WhereClause::AliasEq(alias_eq) => alias_eq.walk(f),
+ _ => {}
+ }
+ }
+}
+
+impl TypeWalk for CallableSig {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ for t in self.params_and_return.iter() {
+ t.walk(f);
+ }
+ }
+}
+
+impl TypeWalk for AliasEq {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.ty.walk(f);
+ match &self.alias {
+ AliasTy::Projection(projection_ty) => projection_ty.walk(f),
+ AliasTy::Opaque(opaque) => opaque.walk(f),
+ }
+ }
+}
+
+impl TypeWalk for FnSubst<Interner> {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.0.walk(f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml
new file mode 100644
index 000000000..8e6a2441b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml
@@ -0,0 +1,28 @@
+[package]
+name = "hir"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+rustc-hash = "1.1.0"
+either = "1.7.0"
+arrayvec = "0.7.2"
+itertools = "0.10.3"
+smallvec = "1.9.0"
+once_cell = "1.12.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+hir-expand = { path = "../hir-expand", version = "0.0.0" }
+hir-def = { path = "../hir-def", version = "0.0.0" }
+hir-ty = { path = "../hir-ty", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
new file mode 100644
index 000000000..0bd379340
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -0,0 +1,177 @@
+//! Attributes & documentation for hir types.
+
+use hir_def::{
+ attr::{AttrsWithOwner, Documentation},
+ item_scope::ItemInNs,
+ path::ModPath,
+ per_ns::PerNs,
+ resolver::HasResolver,
+ AttrDefId, GenericParamId, ModuleDefId,
+};
+use hir_expand::hygiene::Hygiene;
+use hir_ty::db::HirDatabase;
+use syntax::{ast, AstNode};
+
+use crate::{
+ Adt, AssocItem, Const, ConstParam, Enum, Field, Function, GenericParam, Impl, LifetimeParam,
+ Macro, Module, ModuleDef, Static, Struct, Trait, TypeAlias, TypeParam, Union, Variant,
+};
+
+pub trait HasAttrs {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner;
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>;
+ fn resolve_doc_path(
+ self,
+ db: &dyn HirDatabase,
+ link: &str,
+ ns: Option<Namespace>,
+ ) -> Option<ModuleDef>;
+}
+
+#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
+pub enum Namespace {
+ Types,
+ Values,
+ Macros,
+}
+
+macro_rules! impl_has_attrs {
+ ($(($def:ident, $def_id:ident),)*) => {$(
+ impl HasAttrs for $def {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+ let def = AttrDefId::$def_id(self.into());
+ db.attrs(def)
+ }
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ let def = AttrDefId::$def_id(self.into());
+ db.attrs(def).docs()
+ }
+ fn resolve_doc_path(self, db: &dyn HirDatabase, link: &str, ns: Option<Namespace>) -> Option<ModuleDef> {
+ let def = AttrDefId::$def_id(self.into());
+ resolve_doc_path(db, def, link, ns).map(ModuleDef::from)
+ }
+ }
+ )*};
+}
+
+impl_has_attrs![
+ (Field, FieldId),
+ (Variant, EnumVariantId),
+ (Static, StaticId),
+ (Const, ConstId),
+ (Trait, TraitId),
+ (TypeAlias, TypeAliasId),
+ (Macro, MacroId),
+ (Function, FunctionId),
+ (Adt, AdtId),
+ (Module, ModuleId),
+ (GenericParam, GenericParamId),
+ (Impl, ImplId),
+];
+
+macro_rules! impl_has_attrs_enum {
+ ($($variant:ident),* for $enum:ident) => {$(
+ impl HasAttrs for $variant {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+ $enum::$variant(self).attrs(db)
+ }
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ $enum::$variant(self).docs(db)
+ }
+ fn resolve_doc_path(self, db: &dyn HirDatabase, link: &str, ns: Option<Namespace>) -> Option<ModuleDef> {
+ $enum::$variant(self).resolve_doc_path(db, link, ns)
+ }
+ }
+ )*};
+}
+
+impl_has_attrs_enum![Struct, Union, Enum for Adt];
+impl_has_attrs_enum![TypeParam, ConstParam, LifetimeParam for GenericParam];
+
+impl HasAttrs for AssocItem {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+ match self {
+ AssocItem::Function(it) => it.attrs(db),
+ AssocItem::Const(it) => it.attrs(db),
+ AssocItem::TypeAlias(it) => it.attrs(db),
+ }
+ }
+
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ match self {
+ AssocItem::Function(it) => it.docs(db),
+ AssocItem::Const(it) => it.docs(db),
+ AssocItem::TypeAlias(it) => it.docs(db),
+ }
+ }
+
+ fn resolve_doc_path(
+ self,
+ db: &dyn HirDatabase,
+ link: &str,
+ ns: Option<Namespace>,
+ ) -> Option<ModuleDef> {
+ match self {
+ AssocItem::Function(it) => it.resolve_doc_path(db, link, ns),
+ AssocItem::Const(it) => it.resolve_doc_path(db, link, ns),
+ AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
+ }
+ }
+}
+
+fn resolve_doc_path(
+ db: &dyn HirDatabase,
+ def: AttrDefId,
+ link: &str,
+ ns: Option<Namespace>,
+) -> Option<ModuleDefId> {
+ let resolver = match def {
+ AttrDefId::ModuleId(it) => it.resolver(db.upcast()),
+ AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()),
+ AttrDefId::AdtId(it) => it.resolver(db.upcast()),
+ AttrDefId::FunctionId(it) => it.resolver(db.upcast()),
+ AttrDefId::EnumVariantId(it) => it.parent.resolver(db.upcast()),
+ AttrDefId::StaticId(it) => it.resolver(db.upcast()),
+ AttrDefId::ConstId(it) => it.resolver(db.upcast()),
+ AttrDefId::TraitId(it) => it.resolver(db.upcast()),
+ AttrDefId::TypeAliasId(it) => it.resolver(db.upcast()),
+ AttrDefId::ImplId(it) => it.resolver(db.upcast()),
+ AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()),
+ AttrDefId::MacroId(it) => it.resolver(db.upcast()),
+ AttrDefId::GenericParamId(it) => match it {
+ GenericParamId::TypeParamId(it) => it.parent(),
+ GenericParamId::ConstParamId(it) => it.parent(),
+ GenericParamId::LifetimeParamId(it) => it.parent,
+ }
+ .resolver(db.upcast()),
+ };
+
+ let modpath = {
+ // FIXME: this is not how we should get a mod path here
+ let ast_path = ast::SourceFile::parse(&format!("type T = {};", link))
+ .syntax_node()
+ .descendants()
+ .find_map(ast::Path::cast)?;
+ if ast_path.to_string() != link {
+ return None;
+ }
+ ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())?
+ };
+
+ let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
+ let resolved = if resolved == PerNs::none() {
+ resolver.resolve_module_path_in_trait_assoc_items(db.upcast(), &modpath)?
+ } else {
+ resolved
+ };
+ match ns {
+ Some(Namespace::Types) => resolved.take_types(),
+ Some(Namespace::Values) => resolved.take_values(),
+ Some(Namespace::Macros) => resolved.take_macros().map(ModuleDefId::MacroId),
+ None => resolved.iter_items().next().map(|it| match it {
+ ItemInNs::Types(it) => it,
+ ItemInNs::Values(it) => it,
+ ItemInNs::Macros(it) => ModuleDefId::MacroId(it),
+ }),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs
new file mode 100644
index 000000000..e25d86784
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/db.rs
@@ -0,0 +1,16 @@
+//! Re-exports various subcrates databases so that the calling code can depend
+//! only on `hir`. This breaks abstraction boundary a bit, it would be cool if
+//! we didn't do that.
+//!
+//! But we need this for at least LRU caching at the query level.
+pub use hir_def::db::*;
+pub use hir_expand::db::{
+ AstDatabase, AstDatabaseStorage, AstIdMapQuery, HygieneFrameQuery, InternMacroCallQuery,
+ MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, ParseMacroExpansionQuery,
+};
+pub use hir_ty::db::*;
+
+#[test]
+fn hir_database_is_object_safe() {
+ fn _assert_object_safe(_: &dyn HirDatabase) {}
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
new file mode 100644
index 000000000..6c6c11ea4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -0,0 +1,170 @@
+//! Re-export diagnostics such that clients of `hir` don't have to depend on
+//! low-level crates.
+//!
+//! This probably isn't the best way to do this -- ideally, diagnistics should
+//! be expressed in terms of hir types themselves.
+use base_db::CrateId;
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_def::path::ModPath;
+use hir_expand::{name::Name, HirFileId, InFile};
+use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange};
+
+use crate::{MacroKind, Type};
+
+macro_rules! diagnostics {
+ ($($diag:ident,)*) => {
+ pub enum AnyDiagnostic {$(
+ $diag(Box<$diag>),
+ )*}
+
+ $(
+ impl From<$diag> for AnyDiagnostic {
+ fn from(d: $diag) -> AnyDiagnostic {
+ AnyDiagnostic::$diag(Box::new(d))
+ }
+ }
+ )*
+ };
+}
+
+diagnostics![
+ BreakOutsideOfLoop,
+ InactiveCode,
+ IncorrectCase,
+ InvalidDeriveTarget,
+ MacroError,
+ MalformedDerive,
+ MismatchedArgCount,
+ MissingFields,
+ MissingMatchArms,
+ MissingUnsafe,
+ NoSuchField,
+ ReplaceFilterMapNextWithFindMap,
+ TypeMismatch,
+ UnimplementedBuiltinMacro,
+ UnresolvedExternCrate,
+ UnresolvedImport,
+ UnresolvedMacroCall,
+ UnresolvedModule,
+ UnresolvedProcMacro,
+];
+
+#[derive(Debug)]
+pub struct UnresolvedModule {
+ pub decl: InFile<AstPtr<ast::Module>>,
+ pub candidates: Box<[String]>,
+}
+
+#[derive(Debug)]
+pub struct UnresolvedExternCrate {
+ pub decl: InFile<AstPtr<ast::ExternCrate>>,
+}
+
+#[derive(Debug)]
+pub struct UnresolvedImport {
+ pub decl: InFile<AstPtr<ast::UseTree>>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct UnresolvedMacroCall {
+ pub macro_call: InFile<SyntaxNodePtr>,
+ pub precise_location: Option<TextRange>,
+ pub path: ModPath,
+ pub is_bang: bool,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct InactiveCode {
+ pub node: InFile<SyntaxNodePtr>,
+ pub cfg: CfgExpr,
+ pub opts: CfgOptions,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct UnresolvedProcMacro {
+ pub node: InFile<SyntaxNodePtr>,
+ /// If the diagnostic can be pinpointed more accurately than via `node`, this is the `TextRange`
+ /// to use instead.
+ pub precise_location: Option<TextRange>,
+ pub macro_name: Option<String>,
+ pub kind: MacroKind,
+ /// The crate id of the proc-macro this macro belongs to, or `None` if the proc-macro can't be found.
+ pub krate: CrateId,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroError {
+ pub node: InFile<SyntaxNodePtr>,
+ pub precise_location: Option<TextRange>,
+ pub message: String,
+}
+
+#[derive(Debug)]
+pub struct UnimplementedBuiltinMacro {
+ pub node: InFile<SyntaxNodePtr>,
+}
+
+#[derive(Debug)]
+pub struct InvalidDeriveTarget {
+ pub node: InFile<SyntaxNodePtr>,
+}
+
+#[derive(Debug)]
+pub struct MalformedDerive {
+ pub node: InFile<SyntaxNodePtr>,
+}
+
+#[derive(Debug)]
+pub struct NoSuchField {
+ pub field: InFile<AstPtr<ast::RecordExprField>>,
+}
+
+#[derive(Debug)]
+pub struct BreakOutsideOfLoop {
+ pub expr: InFile<AstPtr<ast::Expr>>,
+}
+
+#[derive(Debug)]
+pub struct MissingUnsafe {
+ pub expr: InFile<AstPtr<ast::Expr>>,
+}
+
+#[derive(Debug)]
+pub struct MissingFields {
+ pub file: HirFileId,
+ pub field_list_parent: Either<AstPtr<ast::RecordExpr>, AstPtr<ast::RecordPat>>,
+ pub field_list_parent_path: Option<AstPtr<ast::Path>>,
+ pub missed_fields: Vec<Name>,
+}
+
+#[derive(Debug)]
+pub struct ReplaceFilterMapNextWithFindMap {
+ pub file: HirFileId,
+ /// This expression is the whole method chain up to and including `.filter_map(..).next()`.
+ pub next_expr: AstPtr<ast::Expr>,
+}
+
+#[derive(Debug)]
+pub struct MismatchedArgCount {
+ pub call_expr: InFile<AstPtr<ast::Expr>>,
+ pub expected: usize,
+ pub found: usize,
+}
+
+#[derive(Debug)]
+pub struct MissingMatchArms {
+ pub file: HirFileId,
+ pub match_expr: AstPtr<ast::Expr>,
+ pub uncovered_patterns: String,
+}
+
+#[derive(Debug)]
+pub struct TypeMismatch {
+ // FIXME: add mismatches in patterns as well
+ pub expr: InFile<AstPtr<ast::Expr>>,
+ pub expected: Type,
+ pub actual: Type,
+}
+
+pub use hir_ty::diagnostics::IncorrectCase;
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
new file mode 100644
index 000000000..0e29c52ad
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -0,0 +1,530 @@
+//! HirDisplay implementations for various hir types.
+use hir_def::{
+ adt::VariantData,
+ generics::{
+ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
+ },
+ type_ref::{TypeBound, TypeRef},
+ AdtId, GenericDefId,
+};
+use hir_ty::{
+ display::{
+ write_bounds_like_dyn_trait_with_prefix, write_visibility, HirDisplay, HirDisplayError,
+ HirFormatter, SizedByDefault,
+ },
+ Interner, TraitRefExt, WhereClause,
+};
+use syntax::SmolStr;
+
+use crate::{
+ Adt, Const, ConstParam, Enum, Field, Function, GenericParam, HasCrate, HasVisibility,
+ LifetimeParam, Macro, Module, Static, Struct, Trait, TyBuilder, Type, TypeAlias,
+ TypeOrConstParam, TypeParam, Union, Variant,
+};
+
+impl HirDisplay for Function {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let data = f.db.function_data(self.id);
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ if data.has_default_kw() {
+ f.write_str("default ")?;
+ }
+ if data.has_const_kw() {
+ f.write_str("const ")?;
+ }
+ if data.has_async_kw() {
+ f.write_str("async ")?;
+ }
+ if self.is_unsafe_to_call(f.db) {
+ f.write_str("unsafe ")?;
+ }
+ if let Some(abi) = &data.abi {
+ // FIXME: String escape?
+ write!(f, "extern \"{}\" ", &**abi)?;
+ }
+ write!(f, "fn {}", data.name)?;
+
+ write_generic_params(GenericDefId::FunctionId(self.id), f)?;
+
+ f.write_char('(')?;
+
+ let write_self_param = |ty: &TypeRef, f: &mut HirFormatter<'_>| match ty {
+ TypeRef::Path(p) if p.is_self_type() => f.write_str("self"),
+ TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner,TypeRef::Path(p) if p.is_self_type()) =>
+ {
+ f.write_char('&')?;
+ if let Some(lifetime) = lifetime {
+ write!(f, "{} ", lifetime.name)?;
+ }
+ if let hir_def::type_ref::Mutability::Mut = mut_ {
+ f.write_str("mut ")?;
+ }
+ f.write_str("self")
+ }
+ _ => {
+ f.write_str("self: ")?;
+ ty.hir_fmt(f)
+ }
+ };
+
+ let mut first = true;
+ for (name, type_ref) in &data.params {
+ if !first {
+ f.write_str(", ")?;
+ } else {
+ first = false;
+ if data.has_self_param() {
+ write_self_param(type_ref, f)?;
+ continue;
+ }
+ }
+ match name {
+ Some(name) => write!(f, "{}: ", name)?,
+ None => f.write_str("_: ")?,
+ }
+ // FIXME: Use resolved `param.ty` or raw `type_ref`?
+ // The former will ignore lifetime arguments currently.
+ type_ref.hir_fmt(f)?;
+ }
+
+ if data.is_varargs() {
+ f.write_str(", ...")?;
+ }
+
+ f.write_char(')')?;
+
+ // `FunctionData::ret_type` will be `::core::future::Future<Output = ...>` for async fns.
+ // Use ugly pattern match to strip the Future trait.
+ // Better way?
+ let ret_type = if !data.has_async_kw() {
+ &data.ret_type
+ } else {
+ match &*data.ret_type {
+ TypeRef::ImplTrait(bounds) => match bounds[0].as_ref() {
+ TypeBound::Path(path, _) => {
+ path.segments().iter().last().unwrap().args_and_bindings.unwrap().bindings
+ [0]
+ .type_ref
+ .as_ref()
+ .unwrap()
+ }
+ _ => panic!("Async fn ret_type should be impl Future"),
+ },
+ _ => panic!("Async fn ret_type should be impl Future"),
+ }
+ };
+
+ match ret_type {
+ TypeRef::Tuple(tup) if tup.is_empty() => {}
+ ty => {
+ f.write_str(" -> ")?;
+ ty.hir_fmt(f)?;
+ }
+ }
+
+ write_where_clause(GenericDefId::FunctionId(self.id), f)?;
+
+ Ok(())
+ }
+}
+
+impl HirDisplay for Adt {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ Adt::Struct(it) => it.hir_fmt(f),
+ Adt::Union(it) => it.hir_fmt(f),
+ Adt::Enum(it) => it.hir_fmt(f),
+ }
+ }
+}
+
+impl HirDisplay for Struct {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ f.write_str("struct ")?;
+ write!(f, "{}", self.name(f.db))?;
+ let def_id = GenericDefId::AdtId(AdtId::StructId(self.id));
+ write_generic_params(def_id, f)?;
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Enum {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ f.write_str("enum ")?;
+ write!(f, "{}", self.name(f.db))?;
+ let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
+ write_generic_params(def_id, f)?;
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Union {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ f.write_str("union ")?;
+ write!(f, "{}", self.name(f.db))?;
+ let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
+ write_generic_params(def_id, f)?;
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Field {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?;
+ write!(f, "{}: ", self.name(f.db))?;
+ self.ty(f.db).hir_fmt(f)
+ }
+}
+
+impl HirDisplay for Variant {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "{}", self.name(f.db))?;
+ let data = self.variant_data(f.db);
+ match &*data {
+ VariantData::Unit => {}
+ VariantData::Tuple(fields) => {
+ f.write_char('(')?;
+ let mut first = true;
+ for (_, field) in fields.iter() {
+ if first {
+ first = false;
+ } else {
+ f.write_str(", ")?;
+ }
+ // Enum variant fields must be pub.
+ field.type_ref.hir_fmt(f)?;
+ }
+ f.write_char(')')?;
+ }
+ VariantData::Record(fields) => {
+ f.write_str(" {")?;
+ let mut first = true;
+ for (_, field) in fields.iter() {
+ if first {
+ first = false;
+ f.write_char(' ')?;
+ } else {
+ f.write_str(", ")?;
+ }
+ // Enum variant fields must be pub.
+ write!(f, "{}: ", field.name)?;
+ field.type_ref.hir_fmt(f)?;
+ }
+ f.write_str(" }")?;
+ }
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for Type {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ self.ty.hir_fmt(f)
+ }
+}
+
+impl HirDisplay for GenericParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ GenericParam::TypeParam(it) => it.hir_fmt(f),
+ GenericParam::ConstParam(it) => it.hir_fmt(f),
+ GenericParam::LifetimeParam(it) => it.hir_fmt(f),
+ }
+ }
+}
+
+impl HirDisplay for TypeOrConstParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self.split(f.db) {
+ either::Either::Left(x) => x.hir_fmt(f),
+ either::Either::Right(x) => x.hir_fmt(f),
+ }
+ }
+}
+
+impl HirDisplay for TypeParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "{}", self.name(f.db))?;
+ if f.omit_verbose_types() {
+ return Ok(());
+ }
+
+ let bounds = f.db.generic_predicates_for_param(self.id.parent(), self.id.into(), None);
+ let substs = TyBuilder::placeholder_subst(f.db, self.id.parent());
+ let predicates: Vec<_> =
+ bounds.iter().cloned().map(|b| b.substitute(Interner, &substs)).collect();
+ let krate = self.id.parent().krate(f.db).id;
+ let sized_trait =
+ f.db.lang_item(krate, SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait());
+ let has_only_sized_bound = predicates.iter().all(move |pred| match pred.skip_binders() {
+ WhereClause::Implemented(it) => Some(it.hir_trait_id()) == sized_trait,
+ _ => false,
+ });
+ let has_only_not_sized_bound = predicates.is_empty();
+ if !has_only_sized_bound || has_only_not_sized_bound {
+ let default_sized = SizedByDefault::Sized { anchor: krate };
+ write_bounds_like_dyn_trait_with_prefix(":", &predicates, default_sized, f)?;
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for LifetimeParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "{}", self.name(f.db))
+ }
+}
+
+impl HirDisplay for ConstParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "const {}: ", self.name(f.db))?;
+ self.ty(f.db).hir_fmt(f)
+ }
+}
+
+fn write_generic_params(
+ def: GenericDefId,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ let params = f.db.generic_params(def);
+ if params.lifetimes.is_empty()
+ && params.type_or_consts.iter().all(|x| x.1.const_param().is_none())
+ && params
+ .type_or_consts
+ .iter()
+ .filter_map(|x| x.1.type_param())
+ .all(|param| !matches!(param.provenance, TypeParamProvenance::TypeParamList))
+ {
+ return Ok(());
+ }
+ f.write_char('<')?;
+
+ let mut first = true;
+ let mut delim = |f: &mut HirFormatter<'_>| {
+ if first {
+ first = false;
+ Ok(())
+ } else {
+ f.write_str(", ")
+ }
+ };
+ for (_, lifetime) in params.lifetimes.iter() {
+ delim(f)?;
+ write!(f, "{}", lifetime.name)?;
+ }
+ for (_, ty) in params.type_or_consts.iter() {
+ if let Some(name) = &ty.name() {
+ match ty {
+ TypeOrConstParamData::TypeParamData(ty) => {
+ if ty.provenance != TypeParamProvenance::TypeParamList {
+ continue;
+ }
+ delim(f)?;
+ write!(f, "{}", name)?;
+ if let Some(default) = &ty.default {
+ f.write_str(" = ")?;
+ default.hir_fmt(f)?;
+ }
+ }
+ TypeOrConstParamData::ConstParamData(c) => {
+ delim(f)?;
+ write!(f, "const {}: ", name)?;
+ c.ty.hir_fmt(f)?;
+ }
+ }
+ }
+ }
+
+ f.write_char('>')?;
+ Ok(())
+}
+
+fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let params = f.db.generic_params(def);
+
+ // unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`.
+ let is_unnamed_type_target = |target: &WherePredicateTypeTarget| match target {
+ WherePredicateTypeTarget::TypeRef(_) => false,
+ WherePredicateTypeTarget::TypeOrConstParam(id) => {
+ params.type_or_consts[*id].name().is_none()
+ }
+ };
+
+ let has_displayable_predicate = params
+ .where_predicates
+ .iter()
+ .any(|pred| {
+ !matches!(pred, WherePredicate::TypeBound { target, .. } if is_unnamed_type_target(target))
+ });
+
+ if !has_displayable_predicate {
+ return Ok(());
+ }
+
+ let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
+ WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f),
+ WherePredicateTypeTarget::TypeOrConstParam(id) => {
+ match &params.type_or_consts[*id].name() {
+ Some(name) => write!(f, "{}", name),
+ None => f.write_str("{unnamed}"),
+ }
+ }
+ };
+
+ f.write_str("\nwhere")?;
+
+ for (pred_idx, pred) in params.where_predicates.iter().enumerate() {
+ let prev_pred =
+ if pred_idx == 0 { None } else { Some(&params.where_predicates[pred_idx - 1]) };
+
+ let new_predicate = |f: &mut HirFormatter<'_>| {
+ f.write_str(if pred_idx == 0 { "\n " } else { ",\n " })
+ };
+
+ match pred {
+ WherePredicate::TypeBound { target, .. } if is_unnamed_type_target(target) => {}
+ WherePredicate::TypeBound { target, bound } => {
+ if matches!(prev_pred, Some(WherePredicate::TypeBound { target: target_, .. }) if target_ == target)
+ {
+ f.write_str(" + ")?;
+ } else {
+ new_predicate(f)?;
+ write_target(target, f)?;
+ f.write_str(": ")?;
+ }
+ bound.hir_fmt(f)?;
+ }
+ WherePredicate::Lifetime { target, bound } => {
+ if matches!(prev_pred, Some(WherePredicate::Lifetime { target: target_, .. }) if target_ == target)
+ {
+ write!(f, " + {}", bound.name)?;
+ } else {
+ new_predicate(f)?;
+ write!(f, "{}: {}", target.name, bound.name)?;
+ }
+ }
+ WherePredicate::ForLifetime { lifetimes, target, bound } => {
+ if matches!(
+ prev_pred,
+ Some(WherePredicate::ForLifetime { lifetimes: lifetimes_, target: target_, .. })
+ if lifetimes_ == lifetimes && target_ == target,
+ ) {
+ f.write_str(" + ")?;
+ } else {
+ new_predicate(f)?;
+ f.write_str("for<")?;
+ for (idx, lifetime) in lifetimes.iter().enumerate() {
+ if idx != 0 {
+ f.write_str(", ")?;
+ }
+ write!(f, "{}", lifetime)?;
+ }
+ f.write_str("> ")?;
+ write_target(target, f)?;
+ f.write_str(": ")?;
+ }
+ bound.hir_fmt(f)?;
+ }
+ }
+ }
+
+ // End of final predicate. There must be at least one predicate here.
+ f.write_char(',')?;
+
+ Ok(())
+}
+
+impl HirDisplay for Const {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.const_data(self.id);
+ f.write_str("const ")?;
+ match &data.name {
+ Some(name) => write!(f, "{}: ", name)?,
+ None => f.write_str("_: ")?,
+ }
+ data.type_ref.hir_fmt(f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Static {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.static_data(self.id);
+ f.write_str("static ")?;
+ if data.mutable {
+ f.write_str("mut ")?;
+ }
+ write!(f, "{}: ", &data.name)?;
+ data.type_ref.hir_fmt(f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Trait {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.trait_data(self.id);
+ if data.is_unsafe {
+ f.write_str("unsafe ")?;
+ }
+ if data.is_auto {
+ f.write_str("auto ")?;
+ }
+ write!(f, "trait {}", data.name)?;
+ let def_id = GenericDefId::TraitId(self.id);
+ write_generic_params(def_id, f)?;
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for TypeAlias {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.type_alias_data(self.id);
+ write!(f, "type {}", data.name)?;
+ if !data.bounds.is_empty() {
+ f.write_str(": ")?;
+ f.write_joined(&data.bounds, " + ")?;
+ }
+ if let Some(ty) = &data.type_ref {
+ f.write_str(" = ")?;
+ ty.hir_fmt(f)?;
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for Module {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ // FIXME: Module doesn't have visibility saved in data.
+ match self.name(f.db) {
+ Some(name) => write!(f, "mod {}", name),
+ None if self.is_crate_root(f.db) => match self.krate(f.db).display_name(f.db) {
+ Some(name) => write!(f, "extern crate {}", name),
+ None => f.write_str("extern crate {unknown}"),
+ },
+ None => f.write_str("mod {unnamed}"),
+ }
+ }
+}
+
+impl HirDisplay for Macro {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self.id {
+ hir_def::MacroId::Macro2Id(_) => f.write_str("macro"),
+ hir_def::MacroId::MacroRulesId(_) => f.write_str("macro_rules!"),
+ hir_def::MacroId::ProcMacroId(_) => f.write_str("proc_macro"),
+ }?;
+ write!(f, " {}", self.name(f.db))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/from_id.rs b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
new file mode 100644
index 000000000..9c7558d19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
@@ -0,0 +1,293 @@
+//! Utility module for converting between hir_def ids and code_model wrappers.
+//!
+//! It's unclear if we need this long-term, but it's definitely useful while we
+//! are splitting the hir.
+
+use hir_def::{
+ expr::{LabelId, PatId},
+ AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, GenericDefId, GenericParamId,
+ ModuleDefId, VariantId,
+};
+
+use crate::{
+ Adt, AssocItem, BuiltinType, DefWithBody, Field, GenericDef, GenericParam, ItemInNs, Label,
+ Local, ModuleDef, Variant, VariantDef,
+};
+
+macro_rules! from_id {
+ ($(($id:path, $ty:path)),*) => {$(
+ impl From<$id> for $ty {
+ fn from(id: $id) -> $ty {
+ $ty { id }
+ }
+ }
+ impl From<$ty> for $id {
+ fn from(ty: $ty) -> $id {
+ ty.id
+ }
+ }
+ )*}
+}
+
+from_id![
+ (base_db::CrateId, crate::Crate),
+ (hir_def::ModuleId, crate::Module),
+ (hir_def::StructId, crate::Struct),
+ (hir_def::UnionId, crate::Union),
+ (hir_def::EnumId, crate::Enum),
+ (hir_def::TypeAliasId, crate::TypeAlias),
+ (hir_def::TraitId, crate::Trait),
+ (hir_def::StaticId, crate::Static),
+ (hir_def::ConstId, crate::Const),
+ (hir_def::FunctionId, crate::Function),
+ (hir_def::ImplId, crate::Impl),
+ (hir_def::TypeOrConstParamId, crate::TypeOrConstParam),
+ (hir_def::TypeParamId, crate::TypeParam),
+ (hir_def::ConstParamId, crate::ConstParam),
+ (hir_def::LifetimeParamId, crate::LifetimeParam),
+ (hir_def::MacroId, crate::Macro)
+];
+
+impl From<AdtId> for Adt {
+ fn from(id: AdtId) -> Self {
+ match id {
+ AdtId::StructId(it) => Adt::Struct(it.into()),
+ AdtId::UnionId(it) => Adt::Union(it.into()),
+ AdtId::EnumId(it) => Adt::Enum(it.into()),
+ }
+ }
+}
+
+impl From<Adt> for AdtId {
+ fn from(id: Adt) -> Self {
+ match id {
+ Adt::Struct(it) => AdtId::StructId(it.id),
+ Adt::Union(it) => AdtId::UnionId(it.id),
+ Adt::Enum(it) => AdtId::EnumId(it.id),
+ }
+ }
+}
+
+impl From<GenericParamId> for GenericParam {
+ fn from(id: GenericParamId) -> Self {
+ match id {
+ GenericParamId::TypeParamId(it) => GenericParam::TypeParam(it.into()),
+ GenericParamId::ConstParamId(it) => GenericParam::ConstParam(it.into()),
+ GenericParamId::LifetimeParamId(it) => GenericParam::LifetimeParam(it.into()),
+ }
+ }
+}
+
+impl From<GenericParam> for GenericParamId {
+ fn from(id: GenericParam) -> Self {
+ match id {
+ GenericParam::LifetimeParam(it) => GenericParamId::LifetimeParamId(it.id),
+ GenericParam::ConstParam(it) => GenericParamId::ConstParamId(it.id),
+ GenericParam::TypeParam(it) => GenericParamId::TypeParamId(it.id),
+ }
+ }
+}
+
+impl From<EnumVariantId> for Variant {
+ fn from(id: EnumVariantId) -> Self {
+ Variant { parent: id.parent.into(), id: id.local_id }
+ }
+}
+
+impl From<Variant> for EnumVariantId {
+ fn from(def: Variant) -> Self {
+ EnumVariantId { parent: def.parent.id, local_id: def.id }
+ }
+}
+
+impl From<ModuleDefId> for ModuleDef {
+ fn from(id: ModuleDefId) -> Self {
+ match id {
+ ModuleDefId::ModuleId(it) => ModuleDef::Module(it.into()),
+ ModuleDefId::FunctionId(it) => ModuleDef::Function(it.into()),
+ ModuleDefId::AdtId(it) => ModuleDef::Adt(it.into()),
+ ModuleDefId::EnumVariantId(it) => ModuleDef::Variant(it.into()),
+ ModuleDefId::ConstId(it) => ModuleDef::Const(it.into()),
+ ModuleDefId::StaticId(it) => ModuleDef::Static(it.into()),
+ ModuleDefId::TraitId(it) => ModuleDef::Trait(it.into()),
+ ModuleDefId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()),
+ ModuleDefId::BuiltinType(it) => ModuleDef::BuiltinType(it.into()),
+ ModuleDefId::MacroId(it) => ModuleDef::Macro(it.into()),
+ }
+ }
+}
+
+impl From<ModuleDef> for ModuleDefId {
+ fn from(id: ModuleDef) -> Self {
+ match id {
+ ModuleDef::Module(it) => ModuleDefId::ModuleId(it.into()),
+ ModuleDef::Function(it) => ModuleDefId::FunctionId(it.into()),
+ ModuleDef::Adt(it) => ModuleDefId::AdtId(it.into()),
+ ModuleDef::Variant(it) => ModuleDefId::EnumVariantId(it.into()),
+ ModuleDef::Const(it) => ModuleDefId::ConstId(it.into()),
+ ModuleDef::Static(it) => ModuleDefId::StaticId(it.into()),
+ ModuleDef::Trait(it) => ModuleDefId::TraitId(it.into()),
+ ModuleDef::TypeAlias(it) => ModuleDefId::TypeAliasId(it.into()),
+ ModuleDef::BuiltinType(it) => ModuleDefId::BuiltinType(it.into()),
+ ModuleDef::Macro(it) => ModuleDefId::MacroId(it.into()),
+ }
+ }
+}
+
+impl From<DefWithBody> for DefWithBodyId {
+ fn from(def: DefWithBody) -> Self {
+ match def {
+ DefWithBody::Function(it) => DefWithBodyId::FunctionId(it.id),
+ DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id),
+ DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id),
+ }
+ }
+}
+
+impl From<DefWithBodyId> for DefWithBody {
+ fn from(def: DefWithBodyId) -> Self {
+ match def {
+ DefWithBodyId::FunctionId(it) => DefWithBody::Function(it.into()),
+ DefWithBodyId::StaticId(it) => DefWithBody::Static(it.into()),
+ DefWithBodyId::ConstId(it) => DefWithBody::Const(it.into()),
+ }
+ }
+}
+
+impl From<AssocItemId> for AssocItem {
+ fn from(def: AssocItemId) -> Self {
+ match def {
+ AssocItemId::FunctionId(it) => AssocItem::Function(it.into()),
+ AssocItemId::TypeAliasId(it) => AssocItem::TypeAlias(it.into()),
+ AssocItemId::ConstId(it) => AssocItem::Const(it.into()),
+ }
+ }
+}
+
+impl From<GenericDef> for GenericDefId {
+ fn from(def: GenericDef) -> Self {
+ match def {
+ GenericDef::Function(it) => GenericDefId::FunctionId(it.id),
+ GenericDef::Adt(it) => GenericDefId::AdtId(it.into()),
+ GenericDef::Trait(it) => GenericDefId::TraitId(it.id),
+ GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
+ GenericDef::Impl(it) => GenericDefId::ImplId(it.id),
+ GenericDef::Variant(it) => {
+ GenericDefId::EnumVariantId(EnumVariantId { parent: it.parent.id, local_id: it.id })
+ }
+ GenericDef::Const(it) => GenericDefId::ConstId(it.id),
+ }
+ }
+}
+
+impl From<GenericDefId> for GenericDef {
+ fn from(def: GenericDefId) -> Self {
+ match def {
+ GenericDefId::FunctionId(it) => GenericDef::Function(it.into()),
+ GenericDefId::AdtId(it) => GenericDef::Adt(it.into()),
+ GenericDefId::TraitId(it) => GenericDef::Trait(it.into()),
+ GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
+ GenericDefId::ImplId(it) => GenericDef::Impl(it.into()),
+ GenericDefId::EnumVariantId(it) => {
+ GenericDef::Variant(Variant { parent: it.parent.into(), id: it.local_id })
+ }
+ GenericDefId::ConstId(it) => GenericDef::Const(it.into()),
+ }
+ }
+}
+
+impl From<Adt> for GenericDefId {
+ fn from(id: Adt) -> Self {
+ match id {
+ Adt::Struct(it) => it.id.into(),
+ Adt::Union(it) => it.id.into(),
+ Adt::Enum(it) => it.id.into(),
+ }
+ }
+}
+
+impl From<VariantId> for VariantDef {
+ fn from(def: VariantId) -> Self {
+ match def {
+ VariantId::StructId(it) => VariantDef::Struct(it.into()),
+ VariantId::EnumVariantId(it) => VariantDef::Variant(it.into()),
+ VariantId::UnionId(it) => VariantDef::Union(it.into()),
+ }
+ }
+}
+
+impl From<VariantDef> for VariantId {
+ fn from(def: VariantDef) -> Self {
+ match def {
+ VariantDef::Struct(it) => VariantId::StructId(it.id),
+ VariantDef::Variant(it) => VariantId::EnumVariantId(it.into()),
+ VariantDef::Union(it) => VariantId::UnionId(it.id),
+ }
+ }
+}
+
+impl From<Field> for FieldId {
+ fn from(def: Field) -> Self {
+ FieldId { parent: def.parent.into(), local_id: def.id }
+ }
+}
+
+impl From<FieldId> for Field {
+ fn from(def: FieldId) -> Self {
+ Field { parent: def.parent.into(), id: def.local_id }
+ }
+}
+
+impl From<AssocItem> for GenericDefId {
+ fn from(item: AssocItem) -> Self {
+ match item {
+ AssocItem::Function(f) => f.id.into(),
+ AssocItem::Const(c) => c.id.into(),
+ AssocItem::TypeAlias(t) => t.id.into(),
+ }
+ }
+}
+
+impl From<(DefWithBodyId, PatId)> for Local {
+ fn from((parent, pat_id): (DefWithBodyId, PatId)) -> Self {
+ Local { parent, pat_id }
+ }
+}
+
+impl From<(DefWithBodyId, LabelId)> for Label {
+ fn from((parent, label_id): (DefWithBodyId, LabelId)) -> Self {
+ Label { parent, label_id }
+ }
+}
+
+impl From<hir_def::item_scope::ItemInNs> for ItemInNs {
+ fn from(it: hir_def::item_scope::ItemInNs) -> Self {
+ match it {
+ hir_def::item_scope::ItemInNs::Types(it) => ItemInNs::Types(it.into()),
+ hir_def::item_scope::ItemInNs::Values(it) => ItemInNs::Values(it.into()),
+ hir_def::item_scope::ItemInNs::Macros(it) => ItemInNs::Macros(it.into()),
+ }
+ }
+}
+
+impl From<ItemInNs> for hir_def::item_scope::ItemInNs {
+ fn from(it: ItemInNs) -> Self {
+ match it {
+ ItemInNs::Types(it) => Self::Types(it.into()),
+ ItemInNs::Values(it) => Self::Values(it.into()),
+ ItemInNs::Macros(it) => Self::Macros(it.into()),
+ }
+ }
+}
+
+impl From<hir_def::builtin_type::BuiltinType> for BuiltinType {
+ fn from(inner: hir_def::builtin_type::BuiltinType) -> Self {
+ Self { inner }
+ }
+}
+
+impl From<BuiltinType> for hir_def::builtin_type::BuiltinType {
+ fn from(it: BuiltinType) -> Self {
+ it.inner
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
new file mode 100644
index 000000000..f8b01db3e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
@@ -0,0 +1,174 @@
+//! Provides set of implementation for hir's objects that allows get back location in file.
+
+use either::Either;
+use hir_def::{
+ nameres::{ModuleOrigin, ModuleSource},
+ src::{HasChildSource, HasSource as _},
+ Lookup, MacroId, VariantId,
+};
+use hir_expand::InFile;
+use syntax::ast;
+
+use crate::{
+ db::HirDatabase, Adt, Const, Enum, Field, FieldSource, Function, Impl, LifetimeParam, Macro,
+ Module, Static, Struct, Trait, TypeAlias, TypeOrConstParam, Union, Variant,
+};
+
+pub trait HasSource {
+ type Ast;
+ /// Fetches the definition's source node.
+ /// Using [`crate::Semantics::source`] is preferred when working with [`crate::Semantics`],
+ /// as that caches the parsed file in the semantics' cache.
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>>;
+}
+
+/// NB: Module is !HasSource, because it has two source nodes at the same time:
+/// definition and declaration.
+impl Module {
+ /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
+ pub fn definition_source(self, db: &dyn HirDatabase) -> InFile<ModuleSource> {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].definition_source(db.upcast())
+ }
+
+ pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool {
+ let def_map = self.id.def_map(db.upcast());
+ match def_map[self.id.local_id].origin {
+ ModuleOrigin::File { is_mod_rs, .. } => is_mod_rs,
+ _ => false,
+ }
+ }
+
+ pub fn is_inline(self, db: &dyn HirDatabase) -> bool {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].origin.is_inline()
+ }
+
+ /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
+ /// `None` for the crate root.
+ pub fn declaration_source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Module>> {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].declaration_source(db.upcast())
+ }
+}
+
+impl HasSource for Field {
+ type Ast = FieldSource;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ let var = VariantId::from(self.parent);
+ let src = var.child_source(db.upcast());
+ let field_source = src.map(|it| match it[self.id].clone() {
+ Either::Left(it) => FieldSource::Pos(it),
+ Either::Right(it) => FieldSource::Named(it),
+ });
+ Some(field_source)
+ }
+}
+impl HasSource for Adt {
+ type Ast = ast::Adt;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ match self {
+ Adt::Struct(s) => Some(s.source(db)?.map(ast::Adt::Struct)),
+ Adt::Union(u) => Some(u.source(db)?.map(ast::Adt::Union)),
+ Adt::Enum(e) => Some(e.source(db)?.map(ast::Adt::Enum)),
+ }
+ }
+}
+impl HasSource for Struct {
+ type Ast = ast::Struct;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Union {
+ type Ast = ast::Union;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Enum {
+ type Ast = ast::Enum;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Variant {
+ type Ast = ast::Variant;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Variant>> {
+ Some(self.parent.id.child_source(db.upcast()).map(|map| map[self.id].clone()))
+ }
+}
+impl HasSource for Function {
+ type Ast = ast::Fn;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Const {
+ type Ast = ast::Const;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Static {
+ type Ast = ast::Static;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Trait {
+ type Ast = ast::Trait;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for TypeAlias {
+ type Ast = ast::TypeAlias;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Macro {
+ type Ast = Either<ast::Macro, ast::Fn>;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ match self.id {
+ MacroId::Macro2Id(it) => Some(
+ it.lookup(db.upcast())
+ .source(db.upcast())
+ .map(ast::Macro::MacroDef)
+ .map(Either::Left),
+ ),
+ MacroId::MacroRulesId(it) => Some(
+ it.lookup(db.upcast())
+ .source(db.upcast())
+ .map(ast::Macro::MacroRules)
+ .map(Either::Left),
+ ),
+ MacroId::ProcMacroId(it) => {
+ Some(it.lookup(db.upcast()).source(db.upcast()).map(Either::Right))
+ }
+ }
+ }
+}
+impl HasSource for Impl {
+ type Ast = ast::Impl;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+
+impl HasSource for TypeOrConstParam {
+ type Ast = Either<ast::TypeOrConstParam, ast::Trait>;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ let child_source = self.id.parent.child_source(db.upcast());
+ Some(child_source.map(|it| it[self.id.local_id].clone()))
+ }
+}
+
+impl HasSource for LifetimeParam {
+ type Ast = ast::LifetimeParam;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ let child_source = self.id.parent.child_source(db.upcast());
+ Some(child_source.map(|it| it[self.id.local_id].clone()))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
new file mode 100644
index 000000000..8f984210e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -0,0 +1,3639 @@
+//! HIR (previously known as descriptors) provides a high-level object oriented
+//! access to Rust code.
+//!
+//! The principal difference between HIR and syntax trees is that HIR is bound
+//! to a particular crate instance. That is, it has cfg flags and features
+//! applied. So, the relation between syntax and HIR is many-to-one.
+//!
+//! HIR is the public API of the all of the compiler logic above syntax trees.
+//! It is written in "OO" style. Each type is self contained (as in, it knows it's
+//! parents and full context). It should be "clean code".
+//!
+//! `hir_*` crates are the implementation of the compiler logic.
+//! They are written in "ECS" style, with relatively little abstractions.
+//! Many types are not self-contained, and explicitly use local indexes, arenas, etc.
+//!
+//! `hir` is what insulates the "we don't know how to actually write an incremental compiler"
+//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
+//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![recursion_limit = "512"]
+
+mod semantics;
+mod source_analyzer;
+
+mod from_id;
+mod attrs;
+mod has_source;
+
+pub mod diagnostics;
+pub mod db;
+pub mod symbols;
+
+mod display;
+
+use std::{iter, ops::ControlFlow, sync::Arc};
+
+use arrayvec::ArrayVec;
+use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind};
+use either::Either;
+use hir_def::{
+ adt::{ReprKind, VariantData},
+ body::{BodyDiagnostic, SyntheticSyntax},
+ expr::{BindingAnnotation, LabelId, Pat, PatId},
+ generics::{TypeOrConstParamData, TypeParamProvenance},
+ item_tree::ItemTreeNode,
+ lang_item::LangItemTarget,
+ nameres::{self, diagnostics::DefDiagnostic},
+ per_ns::PerNs,
+ resolver::{HasResolver, Resolver},
+ src::HasSource as _,
+ AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
+ FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
+ LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
+ TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
+};
+use hir_expand::{name::name, MacroCallKind};
+use hir_ty::{
+ all_super_traits, autoderef,
+ consteval::{unknown_const_as_generic, ComputedExpr, ConstEvalError, ConstExt},
+ diagnostics::BodyValidationDiagnostic,
+ method_resolution::{self, TyFingerprint},
+ primitive::UintTy,
+ subst_prefix,
+ traits::FnTrait,
+ AliasEq, AliasTy, BoundVar, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast,
+ ClosureId, DebruijnIndex, GenericArgData, InEnvironment, Interner, ParamKind,
+ QuantifiedWhereClause, Scalar, Solution, Substitution, TraitEnvironment, TraitRefExt, Ty,
+ TyBuilder, TyDefId, TyExt, TyKind, TyVariableKind, WhereClause,
+};
+use itertools::Itertools;
+use nameres::diagnostics::DefDiagnosticKind;
+use once_cell::unsync::Lazy;
+use rustc_hash::FxHashSet;
+use stdx::{format_to, impl_from, never};
+use syntax::{
+ ast::{self, HasAttrs as _, HasDocComments, HasName},
+ AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T,
+};
+
+use crate::db::{DefDatabase, HirDatabase};
+
+pub use crate::{
+ attrs::{HasAttrs, Namespace},
+ diagnostics::{
+ AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, InvalidDeriveTarget,
+ MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms,
+ MissingUnsafe, NoSuchField, ReplaceFilterMapNextWithFindMap, TypeMismatch,
+ UnimplementedBuiltinMacro, UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall,
+ UnresolvedModule, UnresolvedProcMacro,
+ },
+ has_source::HasSource,
+ semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
+};
+
+// Be careful with these re-exports.
+//
+// `hir` is the boundary between the compiler and the IDE. It should try hard to
+// isolate the compiler from the ide, to allow the two to be refactored
+// independently. Re-exporting something from the compiler is the sure way to
+// breach the boundary.
+//
+// Generally, a refactoring which *removes* a name from this list is a good
+// idea!
+pub use {
+ cfg::{CfgAtom, CfgExpr, CfgOptions},
+ hir_def::{
+ adt::StructKind,
+ attr::{Attr, Attrs, AttrsWithOwner, Documentation},
+ builtin_attr::AttributeTemplate,
+ find_path::PrefixKind,
+ import_map,
+ nameres::ModuleSource,
+ path::{ModPath, PathKind},
+ type_ref::{Mutability, TypeRef},
+ visibility::Visibility,
+ },
+ hir_expand::{
+ name::{known, Name},
+ ExpandResult, HirFileId, InFile, MacroFile, Origin,
+ },
+ hir_ty::display::HirDisplay,
+};
+
+// These are negative re-exports: pub using these names is forbidden, they
+// should remain private to hir internals.
+#[allow(unused)]
+use {
+ hir_def::path::Path,
+ hir_expand::{hygiene::Hygiene, name::AsName},
+};
+
+/// hir::Crate describes a single crate. It's the main interface with which
+/// a crate's dependencies interact. Mostly, it should be just a proxy for the
+/// root module.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Crate {
+ pub(crate) id: CrateId,
+}
+
+#[derive(Debug)]
+pub struct CrateDependency {
+ pub krate: Crate,
+ pub name: Name,
+}
+
+impl Crate {
+ pub fn origin(self, db: &dyn HirDatabase) -> CrateOrigin {
+ db.crate_graph()[self.id].origin.clone()
+ }
+
+ pub fn is_builtin(self, db: &dyn HirDatabase) -> bool {
+ matches!(self.origin(db), CrateOrigin::Lang(_))
+ }
+
+ pub fn dependencies(self, db: &dyn HirDatabase) -> Vec<CrateDependency> {
+ db.crate_graph()[self.id]
+ .dependencies
+ .iter()
+ .map(|dep| {
+ let krate = Crate { id: dep.crate_id };
+ let name = dep.as_name();
+ CrateDependency { krate, name }
+ })
+ .collect()
+ }
+
+ pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec<Crate> {
+ let crate_graph = db.crate_graph();
+ crate_graph
+ .iter()
+ .filter(|&krate| {
+ crate_graph[krate].dependencies.iter().any(|it| it.crate_id == self.id)
+ })
+ .map(|id| Crate { id })
+ .collect()
+ }
+
+ pub fn transitive_reverse_dependencies(
+ self,
+ db: &dyn HirDatabase,
+ ) -> impl Iterator<Item = Crate> {
+ db.crate_graph().transitive_rev_deps(self.id).map(|id| Crate { id })
+ }
+
+ pub fn root_module(self, db: &dyn HirDatabase) -> Module {
+ let def_map = db.crate_def_map(self.id);
+ Module { id: def_map.module_id(def_map.root()) }
+ }
+
+ pub fn modules(self, db: &dyn HirDatabase) -> Vec<Module> {
+ let def_map = db.crate_def_map(self.id);
+ def_map.modules().map(|(id, _)| def_map.module_id(id).into()).collect()
+ }
+
+ pub fn root_file(self, db: &dyn HirDatabase) -> FileId {
+ db.crate_graph()[self.id].root_file_id
+ }
+
+ pub fn edition(self, db: &dyn HirDatabase) -> Edition {
+ db.crate_graph()[self.id].edition
+ }
+
+ pub fn version(self, db: &dyn HirDatabase) -> Option<String> {
+ db.crate_graph()[self.id].version.clone()
+ }
+
+ pub fn display_name(self, db: &dyn HirDatabase) -> Option<CrateDisplayName> {
+ db.crate_graph()[self.id].display_name.clone()
+ }
+
+ pub fn query_external_importables(
+ self,
+ db: &dyn DefDatabase,
+ query: import_map::Query,
+ ) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
+ let _p = profile::span("query_external_importables");
+ import_map::search_dependencies(db, self.into(), query).into_iter().map(|item| {
+ match ItemInNs::from(item) {
+ ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
+ ItemInNs::Macros(mac_id) => Either::Right(mac_id),
+ }
+ })
+ }
+
+ pub fn all(db: &dyn HirDatabase) -> Vec<Crate> {
+ db.crate_graph().iter().map(|id| Crate { id }).collect()
+ }
+
+ /// Try to get the root URL of the documentation of a crate.
+ pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> {
+ // Look for #![doc(html_root_url = "...")]
+ let attrs = db.attrs(AttrDefId::ModuleId(self.root_module(db).into()));
+ let doc_url = attrs.by_key("doc").find_string_value_in_tt("html_root_url");
+ doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
+ }
+
+ pub fn cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
+ db.crate_graph()[self.id].cfg_options.clone()
+ }
+
+ pub fn potential_cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
+ db.crate_graph()[self.id].potential_cfg_options.clone()
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Module {
+ pub(crate) id: ModuleId,
+}
+
+/// The defs which can be visible in the module.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ModuleDef {
+ Module(Module),
+ Function(Function),
+ Adt(Adt),
+ // Can't be directly declared, but can be imported.
+ Variant(Variant),
+ Const(Const),
+ Static(Static),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ BuiltinType(BuiltinType),
+ Macro(Macro),
+}
+impl_from!(
+ Module,
+ Function,
+ Adt(Struct, Enum, Union),
+ Variant,
+ Const,
+ Static,
+ Trait,
+ TypeAlias,
+ BuiltinType,
+ Macro
+ for ModuleDef
+);
+
+impl From<VariantDef> for ModuleDef {
+ fn from(var: VariantDef) -> Self {
+ match var {
+ VariantDef::Struct(t) => Adt::from(t).into(),
+ VariantDef::Union(t) => Adt::from(t).into(),
+ VariantDef::Variant(t) => t.into(),
+ }
+ }
+}
+
+impl ModuleDef {
+ pub fn module(self, db: &dyn HirDatabase) -> Option<Module> {
+ match self {
+ ModuleDef::Module(it) => it.parent(db),
+ ModuleDef::Function(it) => Some(it.module(db)),
+ ModuleDef::Adt(it) => Some(it.module(db)),
+ ModuleDef::Variant(it) => Some(it.module(db)),
+ ModuleDef::Const(it) => Some(it.module(db)),
+ ModuleDef::Static(it) => Some(it.module(db)),
+ ModuleDef::Trait(it) => Some(it.module(db)),
+ ModuleDef::TypeAlias(it) => Some(it.module(db)),
+ ModuleDef::Macro(it) => Some(it.module(db)),
+ ModuleDef::BuiltinType(_) => None,
+ }
+ }
+
+ pub fn canonical_path(&self, db: &dyn HirDatabase) -> Option<String> {
+ let mut segments = vec![self.name(db)?];
+ for m in self.module(db)?.path_to_root(db) {
+ segments.extend(m.name(db))
+ }
+ segments.reverse();
+ Some(segments.into_iter().join("::"))
+ }
+
+ pub fn canonical_module_path(
+ &self,
+ db: &dyn HirDatabase,
+ ) -> Option<impl Iterator<Item = Module>> {
+ self.module(db).map(|it| it.path_to_root(db).into_iter().rev())
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ let name = match self {
+ ModuleDef::Module(it) => it.name(db)?,
+ ModuleDef::Const(it) => it.name(db)?,
+ ModuleDef::Adt(it) => it.name(db),
+ ModuleDef::Trait(it) => it.name(db),
+ ModuleDef::Function(it) => it.name(db),
+ ModuleDef::Variant(it) => it.name(db),
+ ModuleDef::TypeAlias(it) => it.name(db),
+ ModuleDef::Static(it) => it.name(db),
+ ModuleDef::Macro(it) => it.name(db),
+ ModuleDef::BuiltinType(it) => it.name(),
+ };
+ Some(name)
+ }
+
+ pub fn diagnostics(self, db: &dyn HirDatabase) -> Vec<AnyDiagnostic> {
+ let id = match self {
+ ModuleDef::Adt(it) => match it {
+ Adt::Struct(it) => it.id.into(),
+ Adt::Enum(it) => it.id.into(),
+ Adt::Union(it) => it.id.into(),
+ },
+ ModuleDef::Trait(it) => it.id.into(),
+ ModuleDef::Function(it) => it.id.into(),
+ ModuleDef::TypeAlias(it) => it.id.into(),
+ ModuleDef::Module(it) => it.id.into(),
+ ModuleDef::Const(it) => it.id.into(),
+ ModuleDef::Static(it) => it.id.into(),
+ _ => return Vec::new(),
+ };
+
+ let module = match self.module(db) {
+ Some(it) => it,
+ None => return Vec::new(),
+ };
+
+ let mut acc = Vec::new();
+
+ match self.as_def_with_body() {
+ Some(def) => {
+ def.diagnostics(db, &mut acc);
+ }
+ None => {
+ for diag in hir_ty::diagnostics::incorrect_case(db, module.id.krate(), id) {
+ acc.push(diag.into())
+ }
+ }
+ }
+
+ acc
+ }
+
+ pub fn as_def_with_body(self) -> Option<DefWithBody> {
+ match self {
+ ModuleDef::Function(it) => Some(it.into()),
+ ModuleDef::Const(it) => Some(it.into()),
+ ModuleDef::Static(it) => Some(it.into()),
+
+ ModuleDef::Module(_)
+ | ModuleDef::Adt(_)
+ | ModuleDef::Variant(_)
+ | ModuleDef::Trait(_)
+ | ModuleDef::TypeAlias(_)
+ | ModuleDef::Macro(_)
+ | ModuleDef::BuiltinType(_) => None,
+ }
+ }
+
+ pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
+ Some(match self {
+ ModuleDef::Module(it) => it.attrs(db),
+ ModuleDef::Function(it) => it.attrs(db),
+ ModuleDef::Adt(it) => it.attrs(db),
+ ModuleDef::Variant(it) => it.attrs(db),
+ ModuleDef::Const(it) => it.attrs(db),
+ ModuleDef::Static(it) => it.attrs(db),
+ ModuleDef::Trait(it) => it.attrs(db),
+ ModuleDef::TypeAlias(it) => it.attrs(db),
+ ModuleDef::Macro(it) => it.attrs(db),
+ ModuleDef::BuiltinType(_) => return None,
+ })
+ }
+}
+
+impl HasVisibility for ModuleDef {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match *self {
+ ModuleDef::Module(it) => it.visibility(db),
+ ModuleDef::Function(it) => it.visibility(db),
+ ModuleDef::Adt(it) => it.visibility(db),
+ ModuleDef::Const(it) => it.visibility(db),
+ ModuleDef::Static(it) => it.visibility(db),
+ ModuleDef::Trait(it) => it.visibility(db),
+ ModuleDef::TypeAlias(it) => it.visibility(db),
+ ModuleDef::Variant(it) => it.visibility(db),
+ ModuleDef::Macro(it) => it.visibility(db),
+ ModuleDef::BuiltinType(_) => Visibility::Public,
+ }
+ }
+}
+
+impl Module {
+ /// Name of this module.
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ let def_map = self.id.def_map(db.upcast());
+ let parent = def_map[self.id.local_id].parent?;
+ def_map[parent].children.iter().find_map(|(name, module_id)| {
+ if *module_id == self.id.local_id {
+ Some(name.clone())
+ } else {
+ None
+ }
+ })
+ }
+
+ /// Returns the crate this module is part of.
+ pub fn krate(self) -> Crate {
+ Crate { id: self.id.krate() }
+ }
+
+ /// Topmost parent of this module. Every module has a `crate_root`, but some
+ /// might be missing `krate`. This can happen if a module's file is not included
+ /// in the module tree of any target in `Cargo.toml`.
+ pub fn crate_root(self, db: &dyn HirDatabase) -> Module {
+ let def_map = db.crate_def_map(self.id.krate());
+ Module { id: def_map.module_id(def_map.root()) }
+ }
+
+ pub fn is_crate_root(self, db: &dyn HirDatabase) -> bool {
+ let def_map = db.crate_def_map(self.id.krate());
+ def_map.root() == self.id.local_id
+ }
+
+ /// Iterates over all child modules.
+ pub fn children(self, db: &dyn HirDatabase) -> impl Iterator<Item = Module> {
+ let def_map = self.id.def_map(db.upcast());
+ let children = def_map[self.id.local_id]
+ .children
+ .iter()
+ .map(|(_, module_id)| Module { id: def_map.module_id(*module_id) })
+ .collect::<Vec<_>>();
+ children.into_iter()
+ }
+
+ /// Finds a parent module.
+ pub fn parent(self, db: &dyn HirDatabase) -> Option<Module> {
+ // FIXME: handle block expressions as modules (their parent is in a different DefMap)
+ let def_map = self.id.def_map(db.upcast());
+ let parent_id = def_map[self.id.local_id].parent?;
+ Some(Module { id: def_map.module_id(parent_id) })
+ }
+
+ pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec<Module> {
+ let mut res = vec![self];
+ let mut curr = self;
+ while let Some(next) = curr.parent(db) {
+ res.push(next);
+ curr = next
+ }
+ res
+ }
+
+ /// Returns a `ModuleScope`: a set of items, visible in this module.
+ pub fn scope(
+ self,
+ db: &dyn HirDatabase,
+ visible_from: Option<Module>,
+ ) -> Vec<(Name, ScopeDef)> {
+ self.id.def_map(db.upcast())[self.id.local_id]
+ .scope
+ .entries()
+ .filter_map(|(name, def)| {
+ if let Some(m) = visible_from {
+ let filtered =
+ def.filter_visibility(|vis| vis.is_visible_from(db.upcast(), m.id));
+ if filtered.is_none() && !def.is_none() {
+ None
+ } else {
+ Some((name, filtered))
+ }
+ } else {
+ Some((name, def))
+ }
+ })
+ .flat_map(|(name, def)| {
+ ScopeDef::all_items(def).into_iter().map(move |item| (name.clone(), item))
+ })
+ .collect()
+ }
+
+ pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
+ let _p = profile::span("Module::diagnostics").detail(|| {
+ format!("{:?}", self.name(db).map_or("<unknown>".into(), |name| name.to_string()))
+ });
+ let def_map = self.id.def_map(db.upcast());
+ for diag in def_map.diagnostics() {
+ if diag.in_module != self.id.local_id {
+ // FIXME: This is accidentally quadratic.
+ continue;
+ }
+ emit_def_diagnostic(db, acc, diag);
+ }
+ for decl in self.declarations(db) {
+ match decl {
+ ModuleDef::Module(m) => {
+ // Only add diagnostics from inline modules
+ if def_map[m.id.local_id].origin.is_inline() {
+ m.diagnostics(db, acc)
+ }
+ }
+ _ => acc.extend(decl.diagnostics(db)),
+ }
+ }
+
+ for impl_def in self.impl_defs(db) {
+ for item in impl_def.items(db) {
+ let def: DefWithBody = match item {
+ AssocItem::Function(it) => it.into(),
+ AssocItem::Const(it) => it.into(),
+ AssocItem::TypeAlias(_) => continue,
+ };
+
+ def.diagnostics(db, acc);
+ }
+ }
+ }
+
+ pub fn declarations(self, db: &dyn HirDatabase) -> Vec<ModuleDef> {
+ let def_map = self.id.def_map(db.upcast());
+ let scope = &def_map[self.id.local_id].scope;
+ scope
+ .declarations()
+ .map(ModuleDef::from)
+ .chain(scope.unnamed_consts().map(|id| ModuleDef::Const(Const::from(id))))
+ .collect()
+ }
+
+ pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec<Macro> {
+ let def_map = self.id.def_map(db.upcast());
+ let scope = &def_map[self.id.local_id].scope;
+ scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| MacroId::from(it).into()).collect()
+ }
+
+ pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].scope.impls().map(Impl::from).collect()
+ }
+
+ /// Finds a path that can be used to refer to the given item from within
+ /// this module, if possible.
+ pub fn find_use_path(self, db: &dyn DefDatabase, item: impl Into<ItemInNs>) -> Option<ModPath> {
+ hir_def::find_path::find_path(db, item.into().into(), self.into())
+ }
+
+ /// Finds a path that can be used to refer to the given item from within
+ /// this module, if possible. This is used for returning import paths for use-statements.
+ pub fn find_use_path_prefixed(
+ self,
+ db: &dyn DefDatabase,
+ item: impl Into<ItemInNs>,
+ prefix_kind: PrefixKind,
+ ) -> Option<ModPath> {
+ hir_def::find_path::find_path_prefixed(db, item.into().into(), self.into(), prefix_kind)
+ }
+}
+
+fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, diag: &DefDiagnostic) {
+ match &diag.kind {
+ DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
+ let decl = declaration.to_node(db.upcast());
+ acc.push(
+ UnresolvedModule {
+ decl: InFile::new(declaration.file_id, AstPtr::new(&decl)),
+ candidates: candidates.clone(),
+ }
+ .into(),
+ )
+ }
+ DefDiagnosticKind::UnresolvedExternCrate { ast } => {
+ let item = ast.to_node(db.upcast());
+ acc.push(
+ UnresolvedExternCrate { decl: InFile::new(ast.file_id, AstPtr::new(&item)) }.into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedImport { id, index } => {
+ let file_id = id.file_id();
+ let item_tree = id.item_tree(db.upcast());
+ let import = &item_tree[id.value];
+
+ let use_tree = import.use_tree_to_ast(db.upcast(), file_id, *index);
+ acc.push(
+ UnresolvedImport { decl: InFile::new(file_id, AstPtr::new(&use_tree)) }.into(),
+ );
+ }
+
+ DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } => {
+ let item = ast.to_node(db.upcast());
+ acc.push(
+ InactiveCode {
+ node: ast.with_value(AstPtr::new(&item).into()),
+ cfg: cfg.clone(),
+ opts: opts.clone(),
+ }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedProcMacro { ast, krate } => {
+ let (node, precise_location, macro_name, kind) = precise_macro_call_location(ast, db);
+ acc.push(
+ UnresolvedProcMacro { node, precise_location, macro_name, kind, krate: *krate }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
+ let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
+ acc.push(
+ UnresolvedMacroCall {
+ macro_call: node,
+ precise_location,
+ path: path.clone(),
+ is_bang: matches!(ast, MacroCallKind::FnLike { .. }),
+ }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::MacroError { ast, message } => {
+ let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
+ acc.push(MacroError { node, precise_location, message: message.clone() }.into());
+ }
+
+ DefDiagnosticKind::UnimplementedBuiltinMacro { ast } => {
+ let node = ast.to_node(db.upcast());
+ // Must have a name, otherwise we wouldn't emit it.
+ let name = node.name().expect("unimplemented builtin macro with no name");
+ acc.push(
+ UnimplementedBuiltinMacro {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&name))),
+ }
+ .into(),
+ );
+ }
+ DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
+ let node = ast.to_node(db.upcast());
+ let derive = node.attrs().nth(*id as usize);
+ match derive {
+ Some(derive) => {
+ acc.push(
+ InvalidDeriveTarget {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
+ }
+ .into(),
+ );
+ }
+ None => stdx::never!("derive diagnostic on item without derive attribute"),
+ }
+ }
+ DefDiagnosticKind::MalformedDerive { ast, id } => {
+ let node = ast.to_node(db.upcast());
+ let derive = node.attrs().nth(*id as usize);
+ match derive {
+ Some(derive) => {
+ acc.push(
+ MalformedDerive {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
+ }
+ .into(),
+ );
+ }
+ None => stdx::never!("derive diagnostic on item without derive attribute"),
+ }
+ }
+ }
+}
+
+fn precise_macro_call_location(
+ ast: &MacroCallKind,
+ db: &dyn HirDatabase,
+) -> (InFile<SyntaxNodePtr>, Option<TextRange>, Option<String>, MacroKind) {
+ // FIXME: maaybe we actually want slightly different ranges for the different macro diagnostics
+ // - e.g. the full attribute for macro errors, but only the name for name resolution
+ match ast {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = ast_id.to_node(db.upcast());
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
+ node.path()
+ .and_then(|it| it.segment())
+ .and_then(|it| it.name_ref())
+ .map(|it| it.syntax().text_range()),
+ node.path().and_then(|it| it.segment()).map(|it| it.to_string()),
+ MacroKind::ProcMacro,
+ )
+ }
+ MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
+ let node = ast_id.to_node(db.upcast());
+ // Compute the precise location of the macro name's token in the derive
+ // list.
+ let token = (|| {
+ let derive_attr = node
+ .doc_comments_and_attrs()
+ .nth(*derive_attr_index as usize)
+ .and_then(Either::left)?;
+ let token_tree = derive_attr.meta()?.token_tree()?;
+ let group_by = token_tree
+ .syntax()
+ .children_with_tokens()
+ .filter_map(|elem| match elem {
+ syntax::NodeOrToken::Token(tok) => Some(tok),
+ _ => None,
+ })
+ .group_by(|t| t.kind() == T![,]);
+ let (_, mut group) = group_by
+ .into_iter()
+ .filter(|&(comma, _)| !comma)
+ .nth(*derive_index as usize)?;
+ group.find(|t| t.kind() == T![ident])
+ })();
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
+ token.as_ref().map(|tok| tok.text_range()),
+ token.as_ref().map(ToString::to_string),
+ MacroKind::Derive,
+ )
+ }
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ let node = ast_id.to_node(db.upcast());
+ let attr = node
+ .doc_comments_and_attrs()
+ .nth((*invoc_attr_index) as usize)
+ .and_then(Either::left)
+ .unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index));
+
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
+ Some(attr.syntax().text_range()),
+ attr.path()
+ .and_then(|path| path.segment())
+ .and_then(|seg| seg.name_ref())
+ .as_ref()
+ .map(ToString::to_string),
+ MacroKind::Attr,
+ )
+ }
+ }
+}
+
+impl HasVisibility for Module {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ let def_map = self.id.def_map(db.upcast());
+ let module_data = &def_map[self.id.local_id];
+ module_data.visibility
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Field {
+ pub(crate) parent: VariantDef,
+ pub(crate) id: LocalFieldId,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum FieldSource {
+ Named(ast::RecordField),
+ Pos(ast::TupleField),
+}
+
+impl Field {
+ pub fn name(&self, db: &dyn HirDatabase) -> Name {
+ self.parent.variant_data(db).fields()[self.id].name.clone()
+ }
+
+ /// Returns the type as in the signature of the struct (i.e., with
+ /// placeholder types for type parameters). Only use this in the context of
+ /// the field definition.
+ pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ let var_id = self.parent.into();
+ let generic_def_id: GenericDefId = match self.parent {
+ VariantDef::Struct(it) => it.id.into(),
+ VariantDef::Union(it) => it.id.into(),
+ VariantDef::Variant(it) => it.parent.id.into(),
+ };
+ let substs = TyBuilder::placeholder_subst(db, generic_def_id);
+ let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
+ Type::new(db, var_id, ty)
+ }
+
+ pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef {
+ self.parent
+ }
+}
+
+impl HasVisibility for Field {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ let variant_data = self.parent.variant_data(db);
+ let visibility = &variant_data.fields()[self.id].visibility;
+ let parent_id: hir_def::VariantId = self.parent.into();
+ visibility.resolve(db.upcast(), &parent_id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Struct {
+ pub(crate) id: StructId,
+}
+
+impl Struct {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.struct_data(self.id).name.clone()
+ }
+
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ db.struct_data(self.id)
+ .variant_data
+ .fields()
+ .iter()
+ .map(|(id, _)| Field { parent: self.into(), id })
+ .collect()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprKind> {
+ db.struct_data(self.id).repr.clone()
+ }
+
+ pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
+ self.variant_data(db).kind()
+ }
+
+ fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ db.struct_data(self.id).variant_data.clone()
+ }
+}
+
+impl HasVisibility for Struct {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.struct_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Union {
+ pub(crate) id: UnionId,
+}
+
+impl Union {
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.union_data(self.id).name.clone()
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ db.union_data(self.id)
+ .variant_data
+ .fields()
+ .iter()
+ .map(|(id, _)| Field { parent: self.into(), id })
+ .collect()
+ }
+
+ fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ db.union_data(self.id).variant_data.clone()
+ }
+}
+
+impl HasVisibility for Union {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.union_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Enum {
+ pub(crate) id: EnumId,
+}
+
+impl Enum {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.enum_data(self.id).name.clone()
+ }
+
+ pub fn variants(self, db: &dyn HirDatabase) -> Vec<Variant> {
+ db.enum_data(self.id).variants.iter().map(|(id, _)| Variant { parent: self, id }).collect()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+}
+
+impl HasVisibility for Enum {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.enum_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Variant {
+ pub(crate) parent: Enum,
+ pub(crate) id: LocalEnumVariantId,
+}
+
+impl Variant {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.parent.module(db)
+ }
+
+ pub fn parent_enum(self, _db: &dyn HirDatabase) -> Enum {
+ self.parent
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.enum_data(self.parent.id).variants[self.id].name.clone()
+ }
+
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ self.variant_data(db)
+ .fields()
+ .iter()
+ .map(|(id, _)| Field { parent: self.into(), id })
+ .collect()
+ }
+
+ pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
+ self.variant_data(db).kind()
+ }
+
+ pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ db.enum_data(self.parent.id).variants[self.id].variant_data.clone()
+ }
+}
+
+/// Variants inherit visibility from the parent enum.
+impl HasVisibility for Variant {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ self.parent_enum(db).visibility(db)
+ }
+}
+
+/// A Data Type
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum Adt {
+ Struct(Struct),
+ Union(Union),
+ Enum(Enum),
+}
+impl_from!(Struct, Union, Enum for Adt);
+
+impl Adt {
+ pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
+ let subst = db.generic_defaults(self.into());
+ subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
+ GenericArgData::Ty(x) => x.is_unknown(),
+ _ => false,
+ })
+ }
+
+ /// Turns this ADT into a type. Any type parameters of the ADT will be
+ /// turned into unknown types, which is good for e.g. finding the most
+ /// general set of completions, but will not look very nice when printed.
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let id = AdtId::from(self);
+ Type::from_def(db, id)
+ }
+
+ /// Turns this ADT into a type with the given type parameters. This isn't
+ /// the greatest API, FIXME find a better one.
+ pub fn ty_with_args(self, db: &dyn HirDatabase, args: &[Type]) -> Type {
+ let id = AdtId::from(self);
+ let mut it = args.iter().map(|t| t.ty.clone());
+ let ty = TyBuilder::def_ty(db, id.into())
+ .fill(|x| {
+ let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
+ match x {
+ ParamKind::Type => GenericArgData::Ty(r).intern(Interner),
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ }
+ })
+ .build();
+ Type::new(db, id, ty)
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ Adt::Struct(s) => s.module(db),
+ Adt::Union(s) => s.module(db),
+ Adt::Enum(e) => e.module(db),
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ match self {
+ Adt::Struct(s) => s.name(db),
+ Adt::Union(u) => u.name(db),
+ Adt::Enum(e) => e.name(db),
+ }
+ }
+
+ pub fn as_enum(&self) -> Option<Enum> {
+ if let Self::Enum(v) = self {
+ Some(*v)
+ } else {
+ None
+ }
+ }
+}
+
+impl HasVisibility for Adt {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match self {
+ Adt::Struct(it) => it.visibility(db),
+ Adt::Union(it) => it.visibility(db),
+ Adt::Enum(it) => it.visibility(db),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum VariantDef {
+ Struct(Struct),
+ Union(Union),
+ Variant(Variant),
+}
+impl_from!(Struct, Union, Variant for VariantDef);
+
+impl VariantDef {
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ match self {
+ VariantDef::Struct(it) => it.fields(db),
+ VariantDef::Union(it) => it.fields(db),
+ VariantDef::Variant(it) => it.fields(db),
+ }
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ VariantDef::Struct(it) => it.module(db),
+ VariantDef::Union(it) => it.module(db),
+ VariantDef::Variant(it) => it.module(db),
+ }
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> Name {
+ match self {
+ VariantDef::Struct(s) => s.name(db),
+ VariantDef::Union(u) => u.name(db),
+ VariantDef::Variant(e) => e.name(db),
+ }
+ }
+
+ pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ match self {
+ VariantDef::Struct(it) => it.variant_data(db),
+ VariantDef::Union(it) => it.variant_data(db),
+ VariantDef::Variant(it) => it.variant_data(db),
+ }
+ }
+}
+
+/// The defs which have a body.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum DefWithBody {
+ Function(Function),
+ Static(Static),
+ Const(Const),
+}
+impl_from!(Function, Const, Static for DefWithBody);
+
+impl DefWithBody {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ DefWithBody::Const(c) => c.module(db),
+ DefWithBody::Function(f) => f.module(db),
+ DefWithBody::Static(s) => s.module(db),
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ match self {
+ DefWithBody::Function(f) => Some(f.name(db)),
+ DefWithBody::Static(s) => Some(s.name(db)),
+ DefWithBody::Const(c) => c.name(db),
+ }
+ }
+
+ /// Returns the type this def's body has to evaluate to.
+ pub fn body_type(self, db: &dyn HirDatabase) -> Type {
+ match self {
+ DefWithBody::Function(it) => it.ret_type(db),
+ DefWithBody::Static(it) => it.ty(db),
+ DefWithBody::Const(it) => it.ty(db),
+ }
+ }
+
+ pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
+ let krate = self.module(db).id.krate();
+
+ let (body, source_map) = db.body_with_source_map(self.into());
+
+ for (_, def_map) in body.blocks(db.upcast()) {
+ for diag in def_map.diagnostics() {
+ emit_def_diagnostic(db, acc, diag);
+ }
+ }
+
+ for diag in source_map.diagnostics() {
+ match diag {
+ BodyDiagnostic::InactiveCode { node, cfg, opts } => acc.push(
+ InactiveCode { node: node.clone(), cfg: cfg.clone(), opts: opts.clone() }
+ .into(),
+ ),
+ BodyDiagnostic::MacroError { node, message } => acc.push(
+ MacroError {
+ node: node.clone().map(|it| it.into()),
+ precise_location: None,
+ message: message.to_string(),
+ }
+ .into(),
+ ),
+ BodyDiagnostic::UnresolvedProcMacro { node, krate } => acc.push(
+ UnresolvedProcMacro {
+ node: node.clone().map(|it| it.into()),
+ precise_location: None,
+ macro_name: None,
+ kind: MacroKind::ProcMacro,
+ krate: *krate,
+ }
+ .into(),
+ ),
+ BodyDiagnostic::UnresolvedMacroCall { node, path } => acc.push(
+ UnresolvedMacroCall {
+ macro_call: node.clone().map(|ast_ptr| ast_ptr.into()),
+ precise_location: None,
+ path: path.clone(),
+ is_bang: true,
+ }
+ .into(),
+ ),
+ }
+ }
+
+ let infer = db.infer(self.into());
+ let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
+ for d in &infer.diagnostics {
+ match d {
+ hir_ty::InferenceDiagnostic::NoSuchField { expr } => {
+ let field = source_map.field_syntax(*expr);
+ acc.push(NoSuchField { field }.into())
+ }
+ hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr } => {
+ let expr = source_map
+ .expr_syntax(*expr)
+ .expect("break outside of loop in synthetic syntax");
+ acc.push(BreakOutsideOfLoop { expr }.into())
+ }
+ hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
+ match source_map.expr_syntax(*call_expr) {
+ Ok(source_ptr) => acc.push(
+ MismatchedArgCount {
+ call_expr: source_ptr,
+ expected: *expected,
+ found: *found,
+ }
+ .into(),
+ ),
+ Err(SyntheticSyntax) => (),
+ }
+ }
+ }
+ }
+ for (expr, mismatch) in infer.expr_type_mismatches() {
+ let expr = match source_map.expr_syntax(expr) {
+ Ok(expr) => expr,
+ Err(SyntheticSyntax) => continue,
+ };
+ acc.push(
+ TypeMismatch {
+ expr,
+ expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected.clone()),
+ actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual.clone()),
+ }
+ .into(),
+ );
+ }
+
+ for expr in hir_ty::diagnostics::missing_unsafe(db, self.into()) {
+ match source_map.expr_syntax(expr) {
+ Ok(expr) => acc.push(MissingUnsafe { expr }.into()),
+ Err(SyntheticSyntax) => {
+ // FIXME: Here and eslwhere in this file, the `expr` was
+ // desugared, report or assert that this doesn't happen.
+ }
+ }
+ }
+
+ for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
+ match diagnostic {
+ BodyValidationDiagnostic::RecordMissingFields {
+ record,
+ variant,
+ missed_fields,
+ } => {
+ let variant_data = variant.variant_data(db.upcast());
+ let missed_fields = missed_fields
+ .into_iter()
+ .map(|idx| variant_data.fields()[idx].name.clone())
+ .collect();
+
+ match record {
+ Either::Left(record_expr) => match source_map.expr_syntax(record_expr) {
+ Ok(source_ptr) => {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Expr::RecordExpr(record_expr) =
+ &source_ptr.value.to_node(&root)
+ {
+ if record_expr.record_expr_field_list().is_some() {
+ acc.push(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: Either::Left(AstPtr::new(
+ record_expr,
+ )),
+ field_list_parent_path: record_expr
+ .path()
+ .map(|path| AstPtr::new(&path)),
+ missed_fields,
+ }
+ .into(),
+ )
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ },
+ Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
+ Ok(source_ptr) => {
+ if let Some(expr) = source_ptr.value.as_ref().left() {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
+ if record_pat.record_pat_field_list().is_some() {
+ acc.push(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: Either::Right(AstPtr::new(
+ &record_pat,
+ )),
+ field_list_parent_path: record_pat
+ .path()
+ .map(|path| AstPtr::new(&path)),
+ missed_fields,
+ }
+ .into(),
+ )
+ }
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ },
+ }
+ }
+ BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => {
+ if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) {
+ acc.push(
+ ReplaceFilterMapNextWithFindMap {
+ file: next_source_ptr.file_id,
+ next_expr: next_source_ptr.value,
+ }
+ .into(),
+ );
+ }
+ }
+ BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
+ match source_map.expr_syntax(match_expr) {
+ Ok(source_ptr) => {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Expr::MatchExpr(match_expr) =
+ &source_ptr.value.to_node(&root)
+ {
+ if let Some(match_expr) = match_expr.expr() {
+ acc.push(
+ MissingMatchArms {
+ file: source_ptr.file_id,
+ match_expr: AstPtr::new(&match_expr),
+ uncovered_patterns,
+ }
+ .into(),
+ );
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ }
+ }
+ }
+ }
+
+ let def: ModuleDef = match self {
+ DefWithBody::Function(it) => it.into(),
+ DefWithBody::Static(it) => it.into(),
+ DefWithBody::Const(it) => it.into(),
+ };
+ for diag in hir_ty::diagnostics::incorrect_case(db, krate, def.into()) {
+ acc.push(diag.into())
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Function {
+ pub(crate) id: FunctionId,
+}
+
+impl Function {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.lookup(db.upcast()).module(db.upcast()).into()
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.function_data(self.id).name.clone()
+ }
+
+ /// Get this function's return type
+ pub fn ret_type(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let ty = callable_sig.ret().clone();
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option<Type> {
+ if !self.is_async(db) {
+ return None;
+ }
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let ret_ty = callable_sig.ret().clone();
+ for pred in ret_ty.impl_trait_bounds(db).into_iter().flatten() {
+ if let WhereClause::AliasEq(output_eq) = pred.into_value_and_skipped_binders().0 {
+ return Type::new_with_resolver_inner(db, &resolver, output_eq.ty).into();
+ }
+ }
+ never!("Async fn ret_type should be impl Future");
+ None
+ }
+
+ pub fn has_self_param(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_self_param()
+ }
+
+ pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
+ self.has_self_param(db).then(|| SelfParam { func: self.id })
+ }
+
+ pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> {
+ let environment = db.trait_environment(self.id.into());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ callable_sig
+ .params()
+ .iter()
+ .enumerate()
+ .map(|(idx, ty)| {
+ let ty = Type { env: environment.clone(), ty: ty.clone() };
+ Param { func: self, ty, idx }
+ })
+ .collect()
+ }
+
+ pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> {
+ if self.self_param(db).is_none() {
+ return None;
+ }
+ Some(self.params_without_self(db))
+ }
+
+ pub fn params_without_self(self, db: &dyn HirDatabase) -> Vec<Param> {
+ let environment = db.trait_environment(self.id.into());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 };
+ callable_sig
+ .params()
+ .iter()
+ .enumerate()
+ .skip(skip)
+ .map(|(idx, ty)| {
+ let ty = Type { env: environment.clone(), ty: ty.clone() };
+ Param { func: self, ty, idx }
+ })
+ .collect()
+ }
+
+ pub fn is_const(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_const_kw()
+ }
+
+ pub fn is_async(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_async_kw()
+ }
+
+ pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool {
+ hir_ty::is_fn_unsafe_to_call(db, self.id)
+ }
+
+ /// Whether this function declaration has a definition.
+ ///
+ /// This is false in the case of required (not provided) trait methods.
+ pub fn has_body(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_body()
+ }
+
+ pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option<Macro> {
+ let function_data = db.function_data(self.id);
+ let attrs = &function_data.attrs;
+ // FIXME: Store this in FunctionData flags?
+ if !(attrs.is_proc_macro()
+ || attrs.is_proc_macro_attribute()
+ || attrs.is_proc_macro_derive())
+ {
+ return None;
+ }
+ let loc = self.id.lookup(db.upcast());
+ let def_map = db.crate_def_map(loc.krate(db).into());
+ def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
+ }
+
+ /// A textual representation of the HIR of this function for debugging purposes.
+ pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
+ let body = db.body(self.id.into());
+
+ let mut result = String::new();
+ format_to!(result, "HIR expressions in the body of `{}`:\n", self.name(db));
+ for (id, expr) in body.exprs.iter() {
+ format_to!(result, "{:?}: {:?}\n", id, expr);
+ }
+
+ result
+ }
+}
+
+// Note: logically, this belongs to `hir_ty`, but we are not using it there yet.
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum Access {
+ Shared,
+ Exclusive,
+ Owned,
+}
+
+impl From<hir_ty::Mutability> for Access {
+ fn from(mutability: hir_ty::Mutability) -> Access {
+ match mutability {
+ hir_ty::Mutability::Not => Access::Shared,
+ hir_ty::Mutability::Mut => Access::Exclusive,
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct Param {
+ func: Function,
+ /// The index in parameter list, including self parameter.
+ idx: usize,
+ ty: Type,
+}
+
+impl Param {
+ pub fn ty(&self) -> &Type {
+ &self.ty
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> Option<Name> {
+ db.function_data(self.func.id).params[self.idx].0.clone()
+ }
+
+ pub fn as_local(&self, db: &dyn HirDatabase) -> Option<Local> {
+ let parent = DefWithBodyId::FunctionId(self.func.into());
+ let body = db.body(parent);
+ let pat_id = body.params[self.idx];
+ if let Pat::Bind { .. } = &body[pat_id] {
+ Some(Local { parent, pat_id: body.params[self.idx] })
+ } else {
+ None
+ }
+ }
+
+ pub fn pattern_source(&self, db: &dyn HirDatabase) -> Option<ast::Pat> {
+ self.source(db).and_then(|p| p.value.pat())
+ }
+
+ pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::Param>> {
+ let InFile { file_id, value } = self.func.source(db)?;
+ let params = value.param_list()?;
+ if params.self_param().is_some() {
+ params.params().nth(self.idx.checked_sub(1)?)
+ } else {
+ params.params().nth(self.idx)
+ }
+ .map(|value| InFile { file_id, value })
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct SelfParam {
+ func: FunctionId,
+}
+
+impl SelfParam {
+ pub fn access(self, db: &dyn HirDatabase) -> Access {
+ let func_data = db.function_data(self.func);
+ func_data
+ .params
+ .first()
+ .map(|(_, param)| match &**param {
+ TypeRef::Reference(.., mutability) => match mutability {
+ hir_def::type_ref::Mutability::Shared => Access::Shared,
+ hir_def::type_ref::Mutability::Mut => Access::Exclusive,
+ },
+ _ => Access::Owned,
+ })
+ .unwrap_or(Access::Owned)
+ }
+
+ pub fn display(self, db: &dyn HirDatabase) -> &'static str {
+ match self.access(db) {
+ Access::Shared => "&self",
+ Access::Exclusive => "&mut self",
+ Access::Owned => "self",
+ }
+ }
+
+ pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::SelfParam>> {
+ let InFile { file_id, value } = Function::from(self.func).source(db)?;
+ value
+ .param_list()
+ .and_then(|params| params.self_param())
+ .map(|value| InFile { file_id, value })
+ }
+
+ pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ let substs = TyBuilder::placeholder_subst(db, self.func);
+ let callable_sig =
+ db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
+ let environment = db.trait_environment(self.func.into());
+ let ty = callable_sig.params()[0].clone();
+ Type { env: environment, ty }
+ }
+}
+
+impl HasVisibility for Function {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.function_visibility(self.id)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Const {
+ pub(crate) id: ConstId,
+}
+
+impl Const {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ db.const_data(self.id).name.clone()
+ }
+
+ pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
+ self.source(db)?.value.body()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let data = db.const_data(self.id);
+ let resolver = self.id.resolver(db.upcast());
+ let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
+ let ty = ctx.lower_ty(&data.type_ref);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ pub fn eval(self, db: &dyn HirDatabase) -> Result<ComputedExpr, ConstEvalError> {
+ db.const_eval(self.id)
+ }
+}
+
+impl HasVisibility for Const {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.const_visibility(self.id)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Static {
+ pub(crate) id: StaticId,
+}
+
+impl Static {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.static_data(self.id).name.clone()
+ }
+
+ pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
+ db.static_data(self.id).mutable
+ }
+
+ pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
+ self.source(db)?.value.body()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let data = db.static_data(self.id);
+ let resolver = self.id.resolver(db.upcast());
+ let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
+ let ty = ctx.lower_ty(&data.type_ref);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+}
+
+impl HasVisibility for Static {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.static_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Trait {
+ pub(crate) id: TraitId,
+}
+
+impl Trait {
+ pub fn lang(db: &dyn HirDatabase, krate: Crate, name: &Name) -> Option<Trait> {
+ db.lang_item(krate.into(), name.to_smol_str())
+ .and_then(LangItemTarget::as_trait)
+ .map(Into::into)
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.trait_data(self.id).name.clone()
+ }
+
+ pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
+ db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect()
+ }
+
+ pub fn items_with_supertraits(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
+ let traits = all_super_traits(db.upcast(), self.into());
+ traits.iter().flat_map(|tr| Trait::from(*tr).items(db)).collect()
+ }
+
+ pub fn is_auto(self, db: &dyn HirDatabase) -> bool {
+ db.trait_data(self.id).is_auto
+ }
+
+ pub fn is_unsafe(&self, db: &dyn HirDatabase) -> bool {
+ db.trait_data(self.id).is_unsafe
+ }
+
+ pub fn type_or_const_param_count(
+ &self,
+ db: &dyn HirDatabase,
+ count_required_only: bool,
+ ) -> usize {
+ db.generic_params(GenericDefId::from(self.id))
+ .type_or_consts
+ .iter()
+ .filter(|(_, ty)| match ty {
+ TypeOrConstParamData::TypeParamData(ty)
+ if ty.provenance != TypeParamProvenance::TypeParamList =>
+ {
+ false
+ }
+ _ => true,
+ })
+ .filter(|(_, ty)| !count_required_only || !ty.has_default())
+ .count()
+ }
+}
+
+impl HasVisibility for Trait {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.trait_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TypeAlias {
+ pub(crate) id: TypeAliasId,
+}
+
+impl TypeAlias {
+ pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
+ let subst = db.generic_defaults(self.id.into());
+ subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
+ GenericArgData::Ty(x) => x.is_unknown(),
+ _ => false,
+ })
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
+ }
+
+ pub fn type_ref(self, db: &dyn HirDatabase) -> Option<TypeRef> {
+ db.type_alias_data(self.id).type_ref.as_deref().cloned()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.type_alias_data(self.id).name.clone()
+ }
+}
+
+impl HasVisibility for TypeAlias {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ let function_data = db.type_alias_data(self.id);
+ let visibility = &function_data.visibility;
+ visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct BuiltinType {
+ pub(crate) inner: hir_def::builtin_type::BuiltinType,
+}
+
+impl BuiltinType {
+ pub fn str() -> BuiltinType {
+ BuiltinType { inner: hir_def::builtin_type::BuiltinType::Str }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::new_for_crate(db.crate_graph().iter().next().unwrap(), TyBuilder::builtin(self.inner))
+ }
+
+ pub fn name(self) -> Name {
+ self.inner.as_name()
+ }
+
+ pub fn is_int(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Int(_))
+ }
+
+ pub fn is_uint(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Uint(_))
+ }
+
+ pub fn is_float(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Float(_))
+ }
+
+ pub fn is_char(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Char)
+ }
+
+ pub fn is_bool(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Bool)
+ }
+
+ pub fn is_str(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Str)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum MacroKind {
+ /// `macro_rules!` or Macros 2.0 macro.
+ Declarative,
+ /// A built-in or custom derive.
+ Derive,
+ /// A built-in function-like macro.
+ BuiltIn,
+ /// A procedural attribute macro.
+ Attr,
+ /// A function-like procedural macro.
+ ProcMacro,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Macro {
+ pub(crate) id: MacroId,
+}
+
+impl Macro {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.module(db.upcast()) }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ match self.id {
+ MacroId::Macro2Id(id) => db.macro2_data(id).name.clone(),
+ MacroId::MacroRulesId(id) => db.macro_rules_data(id).name.clone(),
+ MacroId::ProcMacroId(id) => db.proc_macro_data(id).name.clone(),
+ }
+ }
+
+ pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool {
+ matches!(self.id, MacroId::MacroRulesId(id) if db.macro_rules_data(id).macro_export)
+ }
+
+ pub fn kind(&self, db: &dyn HirDatabase) -> MacroKind {
+ match self.id {
+ MacroId::Macro2Id(it) => match it.lookup(db.upcast()).expander {
+ MacroExpander::Declarative => MacroKind::Declarative,
+ MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
+ MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
+ MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
+ },
+ MacroId::MacroRulesId(it) => match it.lookup(db.upcast()).expander {
+ MacroExpander::Declarative => MacroKind::Declarative,
+ MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
+ MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
+ MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
+ },
+ MacroId::ProcMacroId(it) => match it.lookup(db.upcast()).kind {
+ ProcMacroKind::CustomDerive => MacroKind::Derive,
+ ProcMacroKind::FuncLike => MacroKind::ProcMacro,
+ ProcMacroKind::Attr => MacroKind::Attr,
+ },
+ }
+ }
+
+ pub fn is_fn_like(&self, db: &dyn HirDatabase) -> bool {
+ match self.kind(db) {
+ MacroKind::Declarative | MacroKind::BuiltIn | MacroKind::ProcMacro => true,
+ MacroKind::Attr | MacroKind::Derive => false,
+ }
+ }
+
+ pub fn is_builtin_derive(&self, db: &dyn HirDatabase) -> bool {
+ match self.id {
+ MacroId::Macro2Id(it) => {
+ matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_))
+ }
+ MacroId::MacroRulesId(it) => {
+ matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_))
+ }
+ MacroId::ProcMacroId(_) => false,
+ }
+ }
+
+ pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
+ matches!(self.kind(db), MacroKind::Attr)
+ }
+
+ pub fn is_derive(&self, db: &dyn HirDatabase) -> bool {
+ matches!(self.kind(db), MacroKind::Derive)
+ }
+}
+
+impl HasVisibility for Macro {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match self.id {
+ MacroId::Macro2Id(id) => {
+ let data = db.macro2_data(id);
+ let visibility = &data.visibility;
+ visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+ MacroId::MacroRulesId(_) => Visibility::Public,
+ MacroId::ProcMacroId(_) => Visibility::Public,
+ }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum ItemInNs {
+ Types(ModuleDef),
+ Values(ModuleDef),
+ Macros(Macro),
+}
+
+impl From<Macro> for ItemInNs {
+ fn from(it: Macro) -> Self {
+ Self::Macros(it)
+ }
+}
+
+impl From<ModuleDef> for ItemInNs {
+ fn from(module_def: ModuleDef) -> Self {
+ match module_def {
+ ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => {
+ ItemInNs::Values(module_def)
+ }
+ _ => ItemInNs::Types(module_def),
+ }
+ }
+}
+
+impl ItemInNs {
+ pub fn as_module_def(self) -> Option<ModuleDef> {
+ match self {
+ ItemInNs::Types(id) | ItemInNs::Values(id) => Some(id),
+ ItemInNs::Macros(_) => None,
+ }
+ }
+
+ /// Returns the crate defining this item (or `None` if `self` is built-in).
+ pub fn krate(&self, db: &dyn HirDatabase) -> Option<Crate> {
+ match self {
+ ItemInNs::Types(did) | ItemInNs::Values(did) => did.module(db).map(|m| m.krate()),
+ ItemInNs::Macros(id) => Some(id.module(db).krate()),
+ }
+ }
+
+ pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
+ match self {
+ ItemInNs::Types(it) | ItemInNs::Values(it) => it.attrs(db),
+ ItemInNs::Macros(it) => Some(it.attrs(db)),
+ }
+ }
+}
+
+/// Invariant: `inner.as_assoc_item(db).is_some()`
+/// We do not actively enforce this invariant.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItem {
+ Function(Function),
+ Const(Const),
+ TypeAlias(TypeAlias),
+}
+#[derive(Debug)]
+pub enum AssocItemContainer {
+ Trait(Trait),
+ Impl(Impl),
+}
+pub trait AsAssocItem {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem>;
+}
+
+impl AsAssocItem for Function {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ as_assoc_item(db, AssocItem::Function, self.id)
+ }
+}
+impl AsAssocItem for Const {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ as_assoc_item(db, AssocItem::Const, self.id)
+ }
+}
+impl AsAssocItem for TypeAlias {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ as_assoc_item(db, AssocItem::TypeAlias, self.id)
+ }
+}
+impl AsAssocItem for ModuleDef {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ match self {
+ ModuleDef::Function(it) => it.as_assoc_item(db),
+ ModuleDef::Const(it) => it.as_assoc_item(db),
+ ModuleDef::TypeAlias(it) => it.as_assoc_item(db),
+ _ => None,
+ }
+ }
+}
+fn as_assoc_item<ID, DEF, CTOR, AST>(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option<AssocItem>
+where
+ ID: Lookup<Data = AssocItemLoc<AST>>,
+ DEF: From<ID>,
+ CTOR: FnOnce(DEF) -> AssocItem,
+ AST: ItemTreeNode,
+{
+ match id.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) => Some(ctor(DEF::from(id))),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ }
+}
+
+impl AssocItem {
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ match self {
+ AssocItem::Function(it) => Some(it.name(db)),
+ AssocItem::Const(it) => it.name(db),
+ AssocItem::TypeAlias(it) => Some(it.name(db)),
+ }
+ }
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ AssocItem::Function(f) => f.module(db),
+ AssocItem::Const(c) => c.module(db),
+ AssocItem::TypeAlias(t) => t.module(db),
+ }
+ }
+ pub fn container(self, db: &dyn HirDatabase) -> AssocItemContainer {
+ let container = match self {
+ AssocItem::Function(it) => it.id.lookup(db.upcast()).container,
+ AssocItem::Const(it) => it.id.lookup(db.upcast()).container,
+ AssocItem::TypeAlias(it) => it.id.lookup(db.upcast()).container,
+ };
+ match container {
+ ItemContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()),
+ ItemContainerId::ImplId(id) => AssocItemContainer::Impl(id.into()),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
+ panic!("invalid AssocItem")
+ }
+ }
+ }
+
+ pub fn containing_trait(self, db: &dyn HirDatabase) -> Option<Trait> {
+ match self.container(db) {
+ AssocItemContainer::Trait(t) => Some(t),
+ _ => None,
+ }
+ }
+
+ pub fn containing_trait_impl(self, db: &dyn HirDatabase) -> Option<Trait> {
+ match self.container(db) {
+ AssocItemContainer::Impl(i) => i.trait_(db),
+ _ => None,
+ }
+ }
+
+ pub fn containing_trait_or_trait_impl(self, db: &dyn HirDatabase) -> Option<Trait> {
+ match self.container(db) {
+ AssocItemContainer::Trait(t) => Some(t),
+ AssocItemContainer::Impl(i) => i.trait_(db),
+ }
+ }
+}
+
+impl HasVisibility for AssocItem {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match self {
+ AssocItem::Function(f) => f.visibility(db),
+ AssocItem::Const(c) => c.visibility(db),
+ AssocItem::TypeAlias(t) => t.visibility(db),
+ }
+ }
+}
+
+impl From<AssocItem> for ModuleDef {
+ fn from(assoc: AssocItem) -> Self {
+ match assoc {
+ AssocItem::Function(it) => ModuleDef::Function(it),
+ AssocItem::Const(it) => ModuleDef::Const(it),
+ AssocItem::TypeAlias(it) => ModuleDef::TypeAlias(it),
+ }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum GenericDef {
+ Function(Function),
+ Adt(Adt),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ Impl(Impl),
+ // enum variants cannot have generics themselves, but their parent enums
+ // can, and this makes some code easier to write
+ Variant(Variant),
+ // consts can have type parameters from their parents (i.e. associated consts of traits)
+ Const(Const),
+}
+impl_from!(
+ Function,
+ Adt(Struct, Enum, Union),
+ Trait,
+ TypeAlias,
+ Impl,
+ Variant,
+ Const
+ for GenericDef
+);
+
+impl GenericDef {
+ pub fn params(self, db: &dyn HirDatabase) -> Vec<GenericParam> {
+ let generics = db.generic_params(self.into());
+ let ty_params = generics.type_or_consts.iter().map(|(local_id, _)| {
+ let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } };
+ match toc.split(db) {
+ Either::Left(x) => GenericParam::ConstParam(x),
+ Either::Right(x) => GenericParam::TypeParam(x),
+ }
+ });
+ let lt_params = generics
+ .lifetimes
+ .iter()
+ .map(|(local_id, _)| LifetimeParam {
+ id: LifetimeParamId { parent: self.into(), local_id },
+ })
+ .map(GenericParam::LifetimeParam);
+ lt_params.chain(ty_params).collect()
+ }
+
+ pub fn type_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> {
+ let generics = db.generic_params(self.into());
+ generics
+ .type_or_consts
+ .iter()
+ .map(|(local_id, _)| TypeOrConstParam {
+ id: TypeOrConstParamId { parent: self.into(), local_id },
+ })
+ .collect()
+ }
+}
+
+/// A single local definition.
+///
+/// If the definition of this is part of a "MultiLocal", that is a local that has multiple declarations due to or-patterns
+/// then this only references a single one of those.
+/// To retrieve the other locals you should use [`Local::associated_locals`]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct Local {
+ pub(crate) parent: DefWithBodyId,
+ pub(crate) pat_id: PatId,
+}
+
+impl Local {
+ pub fn is_param(self, db: &dyn HirDatabase) -> bool {
+ let src = self.source(db);
+ match src.value {
+ Either::Left(pat) => pat
+ .syntax()
+ .ancestors()
+ .map(|it| it.kind())
+ .take_while(|&kind| ast::Pat::can_cast(kind) || ast::Param::can_cast(kind))
+ .any(ast::Param::can_cast),
+ Either::Right(_) => true,
+ }
+ }
+
+ pub fn as_self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
+ match self.parent {
+ DefWithBodyId::FunctionId(func) if self.is_self(db) => Some(SelfParam { func }),
+ _ => None,
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let body = db.body(self.parent);
+ match &body[self.pat_id] {
+ Pat::Bind { name, .. } => name.clone(),
+ _ => {
+ stdx::never!("hir::Local is missing a name!");
+ Name::missing()
+ }
+ }
+ }
+
+ pub fn is_self(self, db: &dyn HirDatabase) -> bool {
+ self.name(db) == name![self]
+ }
+
+ pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
+ let body = db.body(self.parent);
+ matches!(&body[self.pat_id], Pat::Bind { mode: BindingAnnotation::Mutable, .. })
+ }
+
+ pub fn is_ref(self, db: &dyn HirDatabase) -> bool {
+ let body = db.body(self.parent);
+ matches!(
+ &body[self.pat_id],
+ Pat::Bind { mode: BindingAnnotation::Ref | BindingAnnotation::RefMut, .. }
+ )
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
+ self.parent.into()
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.parent(db).module(db)
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let def = self.parent;
+ let infer = db.infer(def);
+ let ty = infer[self.pat_id].clone();
+ Type::new(db, def, ty)
+ }
+
+ pub fn associated_locals(self, db: &dyn HirDatabase) -> Box<[Local]> {
+ let body = db.body(self.parent);
+ body.ident_patterns_for(&self.pat_id)
+ .iter()
+ .map(|&pat_id| Local { parent: self.parent, pat_id })
+ .collect()
+ }
+
+ /// If this local is part of a multi-local, retrieve the representative local.
+ /// That is the local that references are being resolved to.
+ pub fn representative(self, db: &dyn HirDatabase) -> Local {
+ let body = db.body(self.parent);
+ Local { pat_id: body.pattern_representative(self.pat_id), ..self }
+ }
+
+ pub fn source(self, db: &dyn HirDatabase) -> InFile<Either<ast::IdentPat, ast::SelfParam>> {
+ let (_body, source_map) = db.body_with_source_map(self.parent);
+ let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
+ let root = src.file_syntax(db.upcast());
+ src.map(|ast| match ast {
+ // Suspicious unwrap
+ Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
+ Either::Right(it) => Either::Right(it.to_node(&root)),
+ })
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct DeriveHelper {
+ pub(crate) derive: MacroId,
+ pub(crate) idx: usize,
+}
+
+impl DeriveHelper {
+ pub fn derive(&self) -> Macro {
+ Macro { id: self.derive.into() }
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> Name {
+ match self.derive {
+ MacroId::Macro2Id(_) => None,
+ MacroId::MacroRulesId(_) => None,
+ MacroId::ProcMacroId(proc_macro) => db
+ .proc_macro_data(proc_macro)
+ .helpers
+ .as_ref()
+ .and_then(|it| it.get(self.idx))
+ .cloned(),
+ }
+ .unwrap_or_else(|| Name::missing())
+ }
+}
+
+// FIXME: Wrong name? This is could also be a registered attribute
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct BuiltinAttr {
+ krate: Option<CrateId>,
+ idx: usize,
+}
+
+impl BuiltinAttr {
+ // FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs?
+ pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
+ if let builtin @ Some(_) = Self::builtin(name) {
+ return builtin;
+ }
+ let idx = db.crate_def_map(krate.id).registered_attrs().iter().position(|it| it == name)?;
+ Some(BuiltinAttr { krate: Some(krate.id), idx })
+ }
+
+ fn builtin(name: &str) -> Option<Self> {
+ hir_def::builtin_attr::INERT_ATTRIBUTES
+ .iter()
+ .position(|tool| tool.name == name)
+ .map(|idx| BuiltinAttr { krate: None, idx })
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
+ // FIXME: Return a `Name` here
+ match self.krate {
+ Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx].clone(),
+ None => SmolStr::new(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].name),
+ }
+ }
+
+ pub fn template(&self, _: &dyn HirDatabase) -> Option<AttributeTemplate> {
+ match self.krate {
+ Some(_) => None,
+ None => Some(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].template),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct ToolModule {
+ krate: Option<CrateId>,
+ idx: usize,
+}
+
+impl ToolModule {
+ // FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs?
+ pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
+ if let builtin @ Some(_) = Self::builtin(name) {
+ return builtin;
+ }
+ let idx = db.crate_def_map(krate.id).registered_tools().iter().position(|it| it == name)?;
+ Some(ToolModule { krate: Some(krate.id), idx })
+ }
+
+ fn builtin(name: &str) -> Option<Self> {
+ hir_def::builtin_attr::TOOL_MODULES
+ .iter()
+ .position(|&tool| tool == name)
+ .map(|idx| ToolModule { krate: None, idx })
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
+ // FIXME: Return a `Name` here
+ match self.krate {
+ Some(krate) => db.crate_def_map(krate).registered_tools()[self.idx].clone(),
+ None => SmolStr::new(hir_def::builtin_attr::TOOL_MODULES[self.idx]),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct Label {
+ pub(crate) parent: DefWithBodyId,
+ pub(crate) label_id: LabelId,
+}
+
+impl Label {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.parent(db).module(db)
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
+ self.parent.into()
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let body = db.body(self.parent);
+ body[self.label_id].name.clone()
+ }
+
+ pub fn source(self, db: &dyn HirDatabase) -> InFile<ast::Label> {
+ let (_body, source_map) = db.body_with_source_map(self.parent);
+ let src = source_map.label_syntax(self.label_id);
+ let root = src.file_syntax(db.upcast());
+ src.map(|ast| ast.to_node(&root))
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum GenericParam {
+ TypeParam(TypeParam),
+ ConstParam(ConstParam),
+ LifetimeParam(LifetimeParam),
+}
+impl_from!(TypeParam, ConstParam, LifetimeParam for GenericParam);
+
+impl GenericParam {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ GenericParam::TypeParam(it) => it.module(db),
+ GenericParam::ConstParam(it) => it.module(db),
+ GenericParam::LifetimeParam(it) => it.module(db),
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ match self {
+ GenericParam::TypeParam(it) => it.name(db),
+ GenericParam::ConstParam(it) => it.name(db),
+ GenericParam::LifetimeParam(it) => it.name(db),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct TypeParam {
+ pub(crate) id: TypeParamId,
+}
+
+impl TypeParam {
+ pub fn merge(self) -> TypeOrConstParam {
+ TypeOrConstParam { id: self.id.into() }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ self.merge().name(db)
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent().module(db.upcast()).into()
+ }
+
+ /// Is this type parameter implicitly introduced (eg. `Self` in a trait or an `impl Trait`
+ /// argument)?
+ pub fn is_implicit(self, db: &dyn HirDatabase) -> bool {
+ let params = db.generic_params(self.id.parent());
+ let data = &params.type_or_consts[self.id.local_id()];
+ match data.type_param().unwrap().provenance {
+ hir_def::generics::TypeParamProvenance::TypeParamList => false,
+ hir_def::generics::TypeParamProvenance::TraitSelf
+ | hir_def::generics::TypeParamProvenance::ArgumentImplTrait => true,
+ }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.parent().resolver(db.upcast());
+ let ty =
+ TyKind::Placeholder(hir_ty::to_placeholder_idx(db, self.id.into())).intern(Interner);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ /// FIXME: this only lists trait bounds from the item defining the type
+ /// parameter, not additional bounds that might be added e.g. by a method if
+ /// the parameter comes from an impl!
+ pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec<Trait> {
+ db.generic_predicates_for_param(self.id.parent(), self.id.into(), None)
+ .iter()
+ .filter_map(|pred| match &pred.skip_binders().skip_binders() {
+ hir_ty::WhereClause::Implemented(trait_ref) => {
+ Some(Trait::from(trait_ref.hir_trait_id()))
+ }
+ _ => None,
+ })
+ .collect()
+ }
+
+ pub fn default(self, db: &dyn HirDatabase) -> Option<Type> {
+ let params = db.generic_defaults(self.id.parent());
+ let local_idx = hir_ty::param_idx(db, self.id.into())?;
+ let resolver = self.id.parent().resolver(db.upcast());
+ let ty = params.get(local_idx)?.clone();
+ let subst = TyBuilder::placeholder_subst(db, self.id.parent());
+ let ty = ty.substitute(Interner, &subst_prefix(&subst, local_idx));
+ match ty.data(Interner) {
+ GenericArgData::Ty(x) => Some(Type::new_with_resolver_inner(db, &resolver, x.clone())),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct LifetimeParam {
+ pub(crate) id: LifetimeParamId,
+}
+
+impl LifetimeParam {
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let params = db.generic_params(self.id.parent);
+ params.lifetimes[self.id.local_id].name.clone()
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent.module(db.upcast()).into()
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent.into()
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct ConstParam {
+ pub(crate) id: ConstParamId,
+}
+
+impl ConstParam {
+ pub fn merge(self) -> TypeOrConstParam {
+ TypeOrConstParam { id: self.id.into() }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let params = db.generic_params(self.id.parent());
+ match params.type_or_consts[self.id.local_id()].name() {
+ Some(x) => x.clone(),
+ None => {
+ never!();
+ Name::missing()
+ }
+ }
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent().module(db.upcast()).into()
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent().into()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::new(db, self.id.parent(), db.const_param_ty(self.id))
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct TypeOrConstParam {
+ pub(crate) id: TypeOrConstParamId,
+}
+
+impl TypeOrConstParam {
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let params = db.generic_params(self.id.parent);
+ match params.type_or_consts[self.id.local_id].name() {
+ Some(n) => n.clone(),
+ _ => Name::missing(),
+ }
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent.module(db.upcast()).into()
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent.into()
+ }
+
+ pub fn split(self, db: &dyn HirDatabase) -> Either<ConstParam, TypeParam> {
+ let params = db.generic_params(self.id.parent);
+ match &params.type_or_consts[self.id.local_id] {
+ hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
+ Either::Right(TypeParam { id: TypeParamId::from_unchecked(self.id) })
+ }
+ hir_def::generics::TypeOrConstParamData::ConstParamData(_) => {
+ Either::Left(ConstParam { id: ConstParamId::from_unchecked(self.id) })
+ }
+ }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ match self.split(db) {
+ Either::Left(x) => x.ty(db),
+ Either::Right(x) => x.ty(db),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Impl {
+ pub(crate) id: ImplId,
+}
+
+impl Impl {
+ pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<Impl> {
+ let inherent = db.inherent_impls_in_crate(krate.id);
+ let trait_ = db.trait_impls_in_crate(krate.id);
+
+ inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect()
+ }
+
+ pub fn all_for_type(db: &dyn HirDatabase, Type { ty, env }: Type) -> Vec<Impl> {
+ let def_crates = match method_resolution::def_crates(db, &ty, env.krate) {
+ Some(def_crates) => def_crates,
+ None => return Vec::new(),
+ };
+
+ let filter = |impl_def: &Impl| {
+ let self_ty = impl_def.self_ty(db);
+ let rref = self_ty.remove_ref();
+ ty.equals_ctor(rref.as_ref().map_or(&self_ty.ty, |it| &it.ty))
+ };
+
+ let fp = TyFingerprint::for_inherent_impl(&ty);
+ let fp = match fp {
+ Some(fp) => fp,
+ None => return Vec::new(),
+ };
+
+ let mut all = Vec::new();
+ def_crates.iter().for_each(|&id| {
+ all.extend(
+ db.inherent_impls_in_crate(id)
+ .for_self_ty(&ty)
+ .iter()
+ .cloned()
+ .map(Self::from)
+ .filter(filter),
+ )
+ });
+ for id in def_crates
+ .iter()
+ .flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db))
+ .map(|Crate { id }| id)
+ .chain(def_crates.iter().copied())
+ .unique()
+ {
+ all.extend(
+ db.trait_impls_in_crate(id)
+ .for_self_ty_without_blanket_impls(fp)
+ .map(Self::from)
+ .filter(filter),
+ );
+ }
+ all
+ }
+
+ pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> {
+ let krate = trait_.module(db).krate();
+ let mut all = Vec::new();
+ for Crate { id } in krate.transitive_reverse_dependencies(db).into_iter() {
+ let impls = db.trait_impls_in_crate(id);
+ all.extend(impls.for_trait(trait_.id).map(Self::from))
+ }
+ all
+ }
+
+ // FIXME: the return type is wrong. This should be a hir version of
+ // `TraitRef` (to account for parameters and qualifiers)
+ pub fn trait_(self, db: &dyn HirDatabase) -> Option<Trait> {
+ let trait_ref = db.impl_trait(self.id)?.skip_binders().clone();
+ let id = hir_ty::from_chalk_trait_id(trait_ref.trait_id);
+ Some(Trait { id })
+ }
+
+ pub fn self_ty(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let ty = db.impl_self_ty(self.id).substitute(Interner, &substs);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
+ db.impl_data(self.id).items.iter().map(|it| (*it).into()).collect()
+ }
+
+ pub fn is_negative(self, db: &dyn HirDatabase) -> bool {
+ db.impl_data(self.id).is_negative
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.lookup(db.upcast()).container.into()
+ }
+
+ pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
+ let src = self.source(db)?;
+ src.file_id.is_builtin_derive(db.upcast())
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct Type {
+ env: Arc<TraitEnvironment>,
+ ty: Ty,
+}
+
+impl Type {
+ pub(crate) fn new_with_resolver(db: &dyn HirDatabase, resolver: &Resolver, ty: Ty) -> Type {
+ Type::new_with_resolver_inner(db, resolver, ty)
+ }
+
+ pub(crate) fn new_with_resolver_inner(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ ty: Ty,
+ ) -> Type {
+ let environment = resolver.generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(resolver.krate())),
+ |d| db.trait_environment(d),
+ );
+ Type { env: environment, ty }
+ }
+
+ pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type {
+ Type { env: Arc::new(TraitEnvironment::empty(krate)), ty }
+ }
+
+ pub fn reference(inner: &Type, m: Mutability) -> Type {
+ inner.derived(
+ TyKind::Ref(
+ if m.is_mut() { hir_ty::Mutability::Mut } else { hir_ty::Mutability::Not },
+ hir_ty::static_lifetime(),
+ inner.ty.clone(),
+ )
+ .intern(Interner),
+ )
+ }
+
+ fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
+ let resolver = lexical_env.resolver(db.upcast());
+ let environment = resolver.generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(resolver.krate())),
+ |d| db.trait_environment(d),
+ );
+ Type { env: environment, ty }
+ }
+
+ fn from_def(db: &dyn HirDatabase, def: impl HasResolver + Into<TyDefId>) -> Type {
+ let ty = TyBuilder::def_ty(db, def.into()).fill_with_unknown().build();
+ Type::new(db, def, ty)
+ }
+
+ pub fn new_slice(ty: Type) -> Type {
+ Type { env: ty.env, ty: TyBuilder::slice(ty.ty) }
+ }
+
+ pub fn is_unit(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Tuple(0, ..))
+ }
+
+ pub fn is_bool(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Bool))
+ }
+
+ pub fn is_never(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Never)
+ }
+
+ pub fn is_mutable_reference(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Ref(hir_ty::Mutability::Mut, ..))
+ }
+
+ pub fn is_reference(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Ref(..))
+ }
+
+ pub fn as_reference(&self) -> Option<(Type, Mutability)> {
+ let (ty, _lt, m) = self.ty.as_reference()?;
+ let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut));
+ Some((self.derived(ty.clone()), m))
+ }
+
+ pub fn is_slice(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Slice(..))
+ }
+
+ pub fn is_usize(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize)))
+ }
+
+ pub fn remove_ref(&self) -> Option<Type> {
+ match &self.ty.kind(Interner) {
+ TyKind::Ref(.., ty) => Some(self.derived(ty.clone())),
+ _ => None,
+ }
+ }
+
+ pub fn strip_references(&self) -> Type {
+ self.derived(self.ty.strip_references().clone())
+ }
+
+ pub fn strip_reference(&self) -> Type {
+ self.derived(self.ty.strip_reference().clone())
+ }
+
+ pub fn is_unknown(&self) -> bool {
+ self.ty.is_unknown()
+ }
+
+ /// Checks that particular type `ty` implements `std::future::Future`.
+ /// This function is used in `.await` syntax completion.
+ pub fn impls_future(&self, db: &dyn HirDatabase) -> bool {
+ let std_future_trait = db
+ .lang_item(self.env.krate, SmolStr::new_inline("future_trait"))
+ .and_then(|it| it.as_trait());
+ let std_future_trait = match std_future_trait {
+ Some(it) => it,
+ None => return false,
+ };
+
+ let canonical_ty =
+ Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
+ method_resolution::implements_trait(&canonical_ty, db, self.env.clone(), std_future_trait)
+ }
+
+ /// Checks that particular type `ty` implements `std::ops::FnOnce`.
+ ///
+ /// This function can be used to check if a particular type is callable, since FnOnce is a
+ /// supertrait of Fn and FnMut, so all callable types implements at least FnOnce.
+ pub fn impls_fnonce(&self, db: &dyn HirDatabase) -> bool {
+ let fnonce_trait = match FnTrait::FnOnce.get_id(db, self.env.krate) {
+ Some(it) => it,
+ None => return false,
+ };
+
+ let canonical_ty =
+ Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
+ method_resolution::implements_trait_unique(
+ &canonical_ty,
+ db,
+ self.env.clone(),
+ fnonce_trait,
+ )
+ }
+
+ pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool {
+ let mut it = args.iter().map(|t| t.ty.clone());
+ let trait_ref = TyBuilder::trait_ref(db, trait_.id)
+ .push(self.ty.clone())
+ .fill(|x| {
+ let r = it.next().unwrap();
+ match x {
+ ParamKind::Type => GenericArgData::Ty(r).intern(Interner),
+ ParamKind::Const(ty) => {
+ // FIXME: this code is not covered in tests.
+ unknown_const_as_generic(ty.clone())
+ }
+ }
+ })
+ .build();
+
+ let goal = Canonical {
+ value: hir_ty::InEnvironment::new(&self.env.env, trait_ref.cast(Interner)),
+ binders: CanonicalVarKinds::empty(Interner),
+ };
+
+ db.trait_solve(self.env.krate, goal).is_some()
+ }
+
+ pub fn normalize_trait_assoc_type(
+ &self,
+ db: &dyn HirDatabase,
+ args: &[Type],
+ alias: TypeAlias,
+ ) -> Option<Type> {
+ let mut args = args.iter();
+ let projection = TyBuilder::assoc_type_projection(db, alias.id)
+ .push(self.ty.clone())
+ .fill(|x| {
+ // FIXME: this code is not covered in tests.
+ match x {
+ ParamKind::Type => {
+ GenericArgData::Ty(args.next().unwrap().ty.clone()).intern(Interner)
+ }
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ }
+ })
+ .build();
+ let goal = hir_ty::make_canonical(
+ InEnvironment::new(
+ &self.env.env,
+ AliasEq {
+ alias: AliasTy::Projection(projection),
+ ty: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
+ .intern(Interner),
+ }
+ .cast(Interner),
+ ),
+ [TyVariableKind::General].into_iter(),
+ );
+
+ match db.trait_solve(self.env.krate, goal)? {
+ Solution::Unique(s) => s
+ .value
+ .subst
+ .as_slice(Interner)
+ .first()
+ .map(|ty| self.derived(ty.assert_ty_ref(Interner).clone())),
+ Solution::Ambig(_) => None,
+ }
+ }
+
+ pub fn is_copy(&self, db: &dyn HirDatabase) -> bool {
+ let lang_item = db.lang_item(self.env.krate, SmolStr::new_inline("copy"));
+ let copy_trait = match lang_item {
+ Some(LangItemTarget::TraitId(it)) => it,
+ _ => return false,
+ };
+ self.impls_trait(db, copy_trait.into(), &[])
+ }
+
+ pub fn as_callable(&self, db: &dyn HirDatabase) -> Option<Callable> {
+ let callee = match self.ty.kind(Interner) {
+ TyKind::Closure(id, _) => Callee::Closure(*id),
+ TyKind::Function(_) => Callee::FnPtr,
+ _ => Callee::Def(self.ty.callable_def(db)?),
+ };
+
+ let sig = self.ty.callable_sig(db)?;
+ Some(Callable { ty: self.clone(), sig, callee, is_bound_method: false })
+ }
+
+ pub fn is_closure(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::Closure { .. })
+ }
+
+ pub fn is_fn(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::FnDef(..) | TyKind::Function { .. })
+ }
+
+ pub fn is_array(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::Array(..))
+ }
+
+ pub fn is_packed(&self, db: &dyn HirDatabase) -> bool {
+ let adt_id = match *self.ty.kind(Interner) {
+ TyKind::Adt(hir_ty::AdtId(adt_id), ..) => adt_id,
+ _ => return false,
+ };
+
+ let adt = adt_id.into();
+ match adt {
+ Adt::Struct(s) => matches!(s.repr(db), Some(ReprKind::Packed)),
+ _ => false,
+ }
+ }
+
+ pub fn is_raw_ptr(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::Raw(..))
+ }
+
+ pub fn contains_unknown(&self) -> bool {
+ return go(&self.ty);
+
+ fn go(ty: &Ty) -> bool {
+ match ty.kind(Interner) {
+ TyKind::Error => true,
+
+ TyKind::Adt(_, substs)
+ | TyKind::AssociatedType(_, substs)
+ | TyKind::Tuple(_, substs)
+ | TyKind::OpaqueType(_, substs)
+ | TyKind::FnDef(_, substs)
+ | TyKind::Closure(_, substs) => {
+ substs.iter(Interner).filter_map(|a| a.ty(Interner)).any(go)
+ }
+
+ TyKind::Array(_ty, len) if len.is_unknown() => true,
+ TyKind::Array(ty, _)
+ | TyKind::Slice(ty)
+ | TyKind::Raw(_, ty)
+ | TyKind::Ref(_, _, ty) => go(ty),
+
+ TyKind::Scalar(_)
+ | TyKind::Str
+ | TyKind::Never
+ | TyKind::Placeholder(_)
+ | TyKind::BoundVar(_)
+ | TyKind::InferenceVar(_, _)
+ | TyKind::Dyn(_)
+ | TyKind::Function(_)
+ | TyKind::Alias(_)
+ | TyKind::Foreign(_)
+ | TyKind::Generator(..)
+ | TyKind::GeneratorWitness(..) => false,
+ }
+ }
+ }
+
+ pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> {
+ let (variant_id, substs) = match self.ty.kind(Interner) {
+ TyKind::Adt(hir_ty::AdtId(AdtId::StructId(s)), substs) => ((*s).into(), substs),
+ TyKind::Adt(hir_ty::AdtId(AdtId::UnionId(u)), substs) => ((*u).into(), substs),
+ _ => return Vec::new(),
+ };
+
+ db.field_types(variant_id)
+ .iter()
+ .map(|(local_id, ty)| {
+ let def = Field { parent: variant_id.into(), id: local_id };
+ let ty = ty.clone().substitute(Interner, substs);
+ (def, self.derived(ty))
+ })
+ .collect()
+ }
+
+ pub fn tuple_fields(&self, _db: &dyn HirDatabase) -> Vec<Type> {
+ if let TyKind::Tuple(_, substs) = &self.ty.kind(Interner) {
+ substs
+ .iter(Interner)
+ .map(|ty| self.derived(ty.assert_ty_ref(Interner).clone()))
+ .collect()
+ } else {
+ Vec::new()
+ }
+ }
+
+ pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
+ self.autoderef_(db).map(move |ty| self.derived(ty))
+ }
+
+ fn autoderef_<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Ty> + 'a {
+ // There should be no inference vars in types passed here
+ let canonical = hir_ty::replace_errors_with_variables(&self.ty);
+ let environment = self.env.clone();
+ autoderef(db, environment, canonical).map(|canonical| canonical.value)
+ }
+
+ // This would be nicer if it just returned an iterator, but that runs into
+ // lifetime problems, because we need to borrow temp `CrateImplDefs`.
+ pub fn iterate_assoc_items<T>(
+ &self,
+ db: &dyn HirDatabase,
+ krate: Crate,
+ mut callback: impl FnMut(AssocItem) -> Option<T>,
+ ) -> Option<T> {
+ let mut slot = None;
+ self.iterate_assoc_items_dyn(db, krate, &mut |assoc_item_id| {
+ slot = callback(assoc_item_id.into());
+ slot.is_some()
+ });
+ slot
+ }
+
+ fn iterate_assoc_items_dyn(
+ &self,
+ db: &dyn HirDatabase,
+ krate: Crate,
+ callback: &mut dyn FnMut(AssocItemId) -> bool,
+ ) {
+ let def_crates = match method_resolution::def_crates(db, &self.ty, krate.id) {
+ Some(it) => it,
+ None => return,
+ };
+ for krate in def_crates {
+ let impls = db.inherent_impls_in_crate(krate);
+
+ for impl_def in impls.for_self_ty(&self.ty) {
+ for &item in db.impl_data(*impl_def).items.iter() {
+ if callback(item) {
+ return;
+ }
+ }
+ }
+ }
+ }
+
+ pub fn type_arguments(&self) -> impl Iterator<Item = Type> + '_ {
+ self.ty
+ .strip_references()
+ .as_adt()
+ .into_iter()
+ .flat_map(|(_, substs)| substs.iter(Interner))
+ .filter_map(|arg| arg.ty(Interner).cloned())
+ .map(move |ty| self.derived(ty))
+ }
+
+ pub fn iterate_method_candidates<T>(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ // FIXME this can be retrieved from `scope`, except autoimport uses this
+ // to specify a different set, so the method needs to be split
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ mut callback: impl FnMut(Function) -> Option<T>,
+ ) -> Option<T> {
+ let _p = profile::span("iterate_method_candidates");
+ let mut slot = None;
+
+ self.iterate_method_candidates_dyn(
+ db,
+ scope,
+ traits_in_scope,
+ with_local_impls,
+ name,
+ &mut |assoc_item_id| {
+ if let AssocItemId::FunctionId(func) = assoc_item_id {
+ if let Some(res) = callback(func.into()) {
+ slot = Some(res);
+ return ControlFlow::Break(());
+ }
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+ }
+
+ fn iterate_method_candidates_dyn(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+ ) {
+ // There should be no inference vars in types passed here
+ let canonical = hir_ty::replace_errors_with_variables(&self.ty);
+
+ let krate = scope.krate();
+ let environment = scope.resolver().generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(krate.id)),
+ |d| db.trait_environment(d),
+ );
+
+ method_resolution::iterate_method_candidates_dyn(
+ &canonical,
+ db,
+ environment,
+ traits_in_scope,
+ with_local_impls.and_then(|b| b.id.containing_block()).into(),
+ name,
+ method_resolution::LookupMode::MethodCall,
+ &mut |_adj, id| callback(id),
+ );
+ }
+
+ pub fn iterate_path_candidates<T>(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ mut callback: impl FnMut(AssocItem) -> Option<T>,
+ ) -> Option<T> {
+ let _p = profile::span("iterate_path_candidates");
+ let mut slot = None;
+ self.iterate_path_candidates_dyn(
+ db,
+ scope,
+ traits_in_scope,
+ with_local_impls,
+ name,
+ &mut |assoc_item_id| {
+ if let Some(res) = callback(assoc_item_id.into()) {
+ slot = Some(res);
+ return ControlFlow::Break(());
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+ }
+
+ fn iterate_path_candidates_dyn(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+ ) {
+ let canonical = hir_ty::replace_errors_with_variables(&self.ty);
+
+ let krate = scope.krate();
+ let environment = scope.resolver().generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(krate.id)),
+ |d| db.trait_environment(d),
+ );
+
+ method_resolution::iterate_path_candidates(
+ &canonical,
+ db,
+ environment,
+ traits_in_scope,
+ with_local_impls.and_then(|b| b.id.containing_block()).into(),
+ name,
+ &mut |id| callback(id),
+ );
+ }
+
+ pub fn as_adt(&self) -> Option<Adt> {
+ let (adt, _subst) = self.ty.as_adt()?;
+ Some(adt.into())
+ }
+
+ pub fn as_builtin(&self) -> Option<BuiltinType> {
+ self.ty.as_builtin().map(|inner| BuiltinType { inner })
+ }
+
+ pub fn as_dyn_trait(&self) -> Option<Trait> {
+ self.ty.dyn_trait().map(Into::into)
+ }
+
+ /// If a type can be represented as `dyn Trait`, returns all traits accessible via this type,
+ /// or an empty iterator otherwise.
+ pub fn applicable_inherent_traits<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ ) -> impl Iterator<Item = Trait> + 'a {
+ let _p = profile::span("applicable_inherent_traits");
+ self.autoderef_(db)
+ .filter_map(|ty| ty.dyn_trait())
+ .flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
+ .map(Trait::from)
+ }
+
+ pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
+ let _p = profile::span("env_traits");
+ self.autoderef_(db)
+ .filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
+ .flat_map(|ty| {
+ self.env
+ .traits_in_scope_from_clauses(ty)
+ .flat_map(|t| hir_ty::all_super_traits(db.upcast(), t))
+ })
+ .map(Trait::from)
+ }
+
+ pub fn as_impl_traits(&self, db: &dyn HirDatabase) -> Option<impl Iterator<Item = Trait>> {
+ self.ty.impl_trait_bounds(db).map(|it| {
+ it.into_iter().filter_map(|pred| match pred.skip_binders() {
+ hir_ty::WhereClause::Implemented(trait_ref) => {
+ Some(Trait::from(trait_ref.hir_trait_id()))
+ }
+ _ => None,
+ })
+ })
+ }
+
+ pub fn as_associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<Trait> {
+ self.ty.associated_type_parent_trait(db).map(Into::into)
+ }
+
+ fn derived(&self, ty: Ty) -> Type {
+ Type { env: self.env.clone(), ty }
+ }
+
+ pub fn walk(&self, db: &dyn HirDatabase, mut cb: impl FnMut(Type)) {
+ // TypeWalk::walk for a Ty at first visits parameters and only after that the Ty itself.
+ // We need a different order here.
+
+ fn walk_substs(
+ db: &dyn HirDatabase,
+ type_: &Type,
+ substs: &Substitution,
+ cb: &mut impl FnMut(Type),
+ ) {
+ for ty in substs.iter(Interner).filter_map(|a| a.ty(Interner)) {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
+ }
+
+ fn walk_bounds(
+ db: &dyn HirDatabase,
+ type_: &Type,
+ bounds: &[QuantifiedWhereClause],
+ cb: &mut impl FnMut(Type),
+ ) {
+ for pred in bounds {
+ if let WhereClause::Implemented(trait_ref) = pred.skip_binders() {
+ cb(type_.clone());
+ // skip the self type. it's likely the type we just got the bounds from
+ for ty in
+ trait_ref.substitution.iter(Interner).skip(1).filter_map(|a| a.ty(Interner))
+ {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
+ }
+ }
+ }
+
+ fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) {
+ let ty = type_.ty.strip_references();
+ match ty.kind(Interner) {
+ TyKind::Adt(_, substs) => {
+ cb(type_.derived(ty.clone()));
+ walk_substs(db, type_, substs, cb);
+ }
+ TyKind::AssociatedType(_, substs) => {
+ if ty.associated_type_parent_trait(db).is_some() {
+ cb(type_.derived(ty.clone()));
+ }
+ walk_substs(db, type_, substs, cb);
+ }
+ TyKind::OpaqueType(_, subst) => {
+ if let Some(bounds) = ty.impl_trait_bounds(db) {
+ walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
+ }
+
+ walk_substs(db, type_, subst, cb);
+ }
+ TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
+ if let Some(bounds) = ty.impl_trait_bounds(db) {
+ walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
+ }
+
+ walk_substs(db, type_, &opaque_ty.substitution, cb);
+ }
+ TyKind::Placeholder(_) => {
+ if let Some(bounds) = ty.impl_trait_bounds(db) {
+ walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
+ }
+ }
+ TyKind::Dyn(bounds) => {
+ walk_bounds(
+ db,
+ &type_.derived(ty.clone()),
+ bounds.bounds.skip_binders().interned(),
+ cb,
+ );
+ }
+
+ TyKind::Ref(_, _, ty)
+ | TyKind::Raw(_, ty)
+ | TyKind::Array(ty, _)
+ | TyKind::Slice(ty) => {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
+
+ TyKind::FnDef(_, substs)
+ | TyKind::Tuple(_, substs)
+ | TyKind::Closure(.., substs) => {
+ walk_substs(db, type_, substs, cb);
+ }
+ TyKind::Function(hir_ty::FnPointer { substitution, .. }) => {
+ walk_substs(db, type_, &substitution.0, cb);
+ }
+
+ _ => {}
+ }
+ }
+
+ walk_type(db, self, &mut cb);
+ }
+
+ pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool {
+ let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
+ hir_ty::could_unify(db, self.env.clone(), &tys)
+ }
+
+ pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool {
+ let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone()));
+ hir_ty::could_coerce(db, self.env.clone(), &tys)
+ }
+
+ pub fn as_type_param(&self, db: &dyn HirDatabase) -> Option<TypeParam> {
+ match self.ty.kind(Interner) {
+ TyKind::Placeholder(p) => Some(TypeParam {
+ id: TypeParamId::from_unchecked(hir_ty::from_placeholder_idx(db, *p)),
+ }),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct Callable {
+ ty: Type,
+ sig: CallableSig,
+ callee: Callee,
+ pub(crate) is_bound_method: bool,
+}
+
+#[derive(Debug)]
+enum Callee {
+ Def(CallableDefId),
+ Closure(ClosureId),
+ FnPtr,
+}
+
+pub enum CallableKind {
+ Function(Function),
+ TupleStruct(Struct),
+ TupleEnumVariant(Variant),
+ Closure,
+ FnPtr,
+}
+
+impl Callable {
+ pub fn kind(&self) -> CallableKind {
+ use Callee::*;
+ match self.callee {
+ Def(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()),
+ Def(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()),
+ Def(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()),
+ Closure(_) => CallableKind::Closure,
+ FnPtr => CallableKind::FnPtr,
+ }
+ }
+ pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<ast::SelfParam> {
+ let func = match self.callee {
+ Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
+ _ => return None,
+ };
+ let src = func.lookup(db.upcast()).source(db.upcast());
+ let param_list = src.value.param_list()?;
+ param_list.self_param()
+ }
+ pub fn n_params(&self) -> usize {
+ self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
+ }
+ pub fn params(
+ &self,
+ db: &dyn HirDatabase,
+ ) -> Vec<(Option<Either<ast::SelfParam, ast::Pat>>, Type)> {
+ let types = self
+ .sig
+ .params()
+ .iter()
+ .skip(if self.is_bound_method { 1 } else { 0 })
+ .map(|ty| self.ty.derived(ty.clone()));
+ let map_param = |it: ast::Param| it.pat().map(Either::Right);
+ let patterns = match self.callee {
+ Callee::Def(CallableDefId::FunctionId(func)) => {
+ let src = func.lookup(db.upcast()).source(db.upcast());
+ src.value.param_list().map(|param_list| {
+ param_list
+ .self_param()
+ .map(|it| Some(Either::Left(it)))
+ .filter(|_| !self.is_bound_method)
+ .into_iter()
+ .chain(param_list.params().map(map_param))
+ })
+ }
+ Callee::Closure(closure_id) => match closure_source(db, closure_id) {
+ Some(src) => src.param_list().map(|param_list| {
+ param_list
+ .self_param()
+ .map(|it| Some(Either::Left(it)))
+ .filter(|_| !self.is_bound_method)
+ .into_iter()
+ .chain(param_list.params().map(map_param))
+ }),
+ None => None,
+ },
+ _ => None,
+ };
+ patterns.into_iter().flatten().chain(iter::repeat(None)).zip(types).collect()
+ }
+ pub fn return_type(&self) -> Type {
+ self.ty.derived(self.sig.ret().clone())
+ }
+}
+
+fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option<ast::ClosureExpr> {
+ let (owner, expr_id) = db.lookup_intern_closure(closure.into());
+ let (_, source_map) = db.body_with_source_map(owner);
+ let ast = source_map.expr_syntax(expr_id).ok()?;
+ let root = ast.file_syntax(db.upcast());
+ let expr = ast.value.to_node(&root);
+ match expr {
+ ast::Expr::ClosureExpr(it) => Some(it),
+ _ => None,
+ }
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum BindingMode {
+ Move,
+ Ref(Mutability),
+}
+
+/// For IDE only
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum ScopeDef {
+ ModuleDef(ModuleDef),
+ GenericParam(GenericParam),
+ ImplSelfType(Impl),
+ AdtSelfType(Adt),
+ Local(Local),
+ Label(Label),
+ Unknown,
+}
+
+impl ScopeDef {
+ pub fn all_items(def: PerNs) -> ArrayVec<Self, 3> {
+ let mut items = ArrayVec::new();
+
+ match (def.take_types(), def.take_values()) {
+ (Some(m1), None) => items.push(ScopeDef::ModuleDef(m1.into())),
+ (None, Some(m2)) => items.push(ScopeDef::ModuleDef(m2.into())),
+ (Some(m1), Some(m2)) => {
+ // Some items, like unit structs and enum variants, are
+ // returned as both a type and a value. Here we want
+ // to de-duplicate them.
+ if m1 != m2 {
+ items.push(ScopeDef::ModuleDef(m1.into()));
+ items.push(ScopeDef::ModuleDef(m2.into()));
+ } else {
+ items.push(ScopeDef::ModuleDef(m1.into()));
+ }
+ }
+ (None, None) => {}
+ };
+
+ if let Some(macro_def_id) = def.take_macros() {
+ items.push(ScopeDef::ModuleDef(ModuleDef::Macro(macro_def_id.into())));
+ }
+
+ if items.is_empty() {
+ items.push(ScopeDef::Unknown);
+ }
+
+ items
+ }
+
+ pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
+ match self {
+ ScopeDef::ModuleDef(it) => it.attrs(db),
+ ScopeDef::GenericParam(it) => Some(it.attrs(db)),
+ ScopeDef::ImplSelfType(_)
+ | ScopeDef::AdtSelfType(_)
+ | ScopeDef::Local(_)
+ | ScopeDef::Label(_)
+ | ScopeDef::Unknown => None,
+ }
+ }
+
+ pub fn krate(&self, db: &dyn HirDatabase) -> Option<Crate> {
+ match self {
+ ScopeDef::ModuleDef(it) => it.module(db).map(|m| m.krate()),
+ ScopeDef::GenericParam(it) => Some(it.module(db).krate()),
+ ScopeDef::ImplSelfType(_) => None,
+ ScopeDef::AdtSelfType(it) => Some(it.module(db).krate()),
+ ScopeDef::Local(it) => Some(it.module(db).krate()),
+ ScopeDef::Label(it) => Some(it.module(db).krate()),
+ ScopeDef::Unknown => None,
+ }
+ }
+}
+
+impl From<ItemInNs> for ScopeDef {
+ fn from(item: ItemInNs) -> Self {
+ match item {
+ ItemInNs::Types(id) => ScopeDef::ModuleDef(id),
+ ItemInNs::Values(id) => ScopeDef::ModuleDef(id),
+ ItemInNs::Macros(id) => ScopeDef::ModuleDef(ModuleDef::Macro(id)),
+ }
+ }
+}
+
+pub trait HasVisibility {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility;
+ fn is_visible_from(&self, db: &dyn HirDatabase, module: Module) -> bool {
+ let vis = self.visibility(db);
+ vis.is_visible_from(db.upcast(), module.id)
+ }
+}
+
+/// Trait for obtaining the defining crate of an item.
+pub trait HasCrate {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate;
+}
+
+impl<T: hir_def::HasModule> HasCrate for T {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db.upcast()).krate().into()
+ }
+}
+
+impl HasCrate for AssocItem {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Struct {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Union {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Field {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.parent_def(db).module(db).krate()
+ }
+}
+
+impl HasCrate for Variant {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Function {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Const {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for TypeAlias {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Type {
+ fn krate(&self, _db: &dyn HirDatabase) -> Crate {
+ self.env.krate.into()
+ }
+}
+
+impl HasCrate for Macro {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Trait {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Static {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Adt {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Module {
+ fn krate(&self, _: &dyn HirDatabase) -> Crate {
+ Module::krate(*self)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
new file mode 100644
index 000000000..c84318b2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -0,0 +1,1540 @@
+//! See `Semantics`.
+
+mod source_to_def;
+
+use std::{cell::RefCell, fmt, iter, ops};
+
+use base_db::{FileId, FileRange};
+use hir_def::{
+ body, macro_id_to_def_id,
+ resolver::{self, HasResolver, Resolver, TypeNs},
+ type_ref::Mutability,
+ AsMacroCall, FunctionId, MacroId, TraitId, VariantId,
+};
+use hir_expand::{
+ db::AstDatabase,
+ name::{known, AsName},
+ ExpansionInfo, MacroCallId,
+};
+use itertools::Itertools;
+use rustc_hash::{FxHashMap, FxHashSet};
+use smallvec::{smallvec, SmallVec};
+use syntax::{
+ algo::skip_trivia_token,
+ ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
+ match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
+};
+
+use crate::{
+ db::HirDatabase,
+ semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
+ source_analyzer::{resolve_hir_path, SourceAnalyzer},
+ Access, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function,
+ HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
+ Name, Path, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum PathResolution {
+ /// An item
+ Def(ModuleDef),
+ /// A local binding (only value namespace)
+ Local(Local),
+ /// A type parameter
+ TypeParam(TypeParam),
+ /// A const parameter
+ ConstParam(ConstParam),
+ SelfType(Impl),
+ BuiltinAttr(BuiltinAttr),
+ ToolModule(ToolModule),
+ DeriveHelper(DeriveHelper),
+}
+
+impl PathResolution {
+ pub(crate) fn in_type_ns(&self) -> Option<TypeNs> {
+ match self {
+ PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
+ PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
+ Some(TypeNs::BuiltinType((*builtin).into()))
+ }
+ PathResolution::Def(
+ ModuleDef::Const(_)
+ | ModuleDef::Variant(_)
+ | ModuleDef::Macro(_)
+ | ModuleDef::Function(_)
+ | ModuleDef::Module(_)
+ | ModuleDef::Static(_)
+ | ModuleDef::Trait(_),
+ ) => None,
+ PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
+ Some(TypeNs::TypeAliasId((*alias).into()))
+ }
+ PathResolution::BuiltinAttr(_)
+ | PathResolution::ToolModule(_)
+ | PathResolution::Local(_)
+ | PathResolution::DeriveHelper(_)
+ | PathResolution::ConstParam(_) => None,
+ PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
+ PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct TypeInfo {
+ /// The original type of the expression or pattern.
+ pub original: Type,
+ /// The adjusted type, if an adjustment happened.
+ pub adjusted: Option<Type>,
+}
+
+impl TypeInfo {
+ pub fn original(self) -> Type {
+ self.original
+ }
+
+ pub fn has_adjustment(&self) -> bool {
+ self.adjusted.is_some()
+ }
+
+ /// The adjusted type, or the original in case no adjustments occurred.
+ pub fn adjusted(self) -> Type {
+ self.adjusted.unwrap_or(self.original)
+ }
+}
+
+/// Primary API to get semantic information, like types, from syntax trees.
+pub struct Semantics<'db, DB> {
+ pub db: &'db DB,
+ imp: SemanticsImpl<'db>,
+}
+
+pub struct SemanticsImpl<'db> {
+ pub db: &'db dyn HirDatabase,
+ s2d_cache: RefCell<SourceToDefCache>,
+ expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
+ // Rootnode to HirFileId cache
+ cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
+ // MacroCall to its expansion's HirFileId cache
+ macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>,
+}
+
+impl<DB> fmt::Debug for Semantics<'_, DB> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Semantics {{ ... }}")
+ }
+}
+
+impl<'db, DB: HirDatabase> Semantics<'db, DB> {
+ pub fn new(db: &DB) -> Semantics<'_, DB> {
+ let impl_ = SemanticsImpl::new(db);
+ Semantics { db, imp: impl_ }
+ }
+
+ pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
+ self.imp.parse(file_id)
+ }
+
+ pub fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+ self.imp.parse_or_expand(file_id)
+ }
+
+ pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+ self.imp.expand(macro_call)
+ }
+
+ /// If `item` has an attribute macro attached to it, expands it.
+ pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
+ self.imp.expand_attr_macro(item)
+ }
+
+ pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
+ self.imp.expand_derive_as_pseudo_attr_macro(attr)
+ }
+
+ pub fn resolve_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<Option<Macro>>> {
+ self.imp.resolve_derive_macro(derive)
+ }
+
+ pub fn expand_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<SyntaxNode>> {
+ self.imp.expand_derive_macro(derive)
+ }
+
+ pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
+ self.imp.is_attr_macro_call(item)
+ }
+
+ pub fn is_derive_annotated(&self, item: &ast::Adt) -> bool {
+ self.imp.is_derive_annotated(item)
+ }
+
+ pub fn speculative_expand(
+ &self,
+ actual_macro_call: &ast::MacroCall,
+ speculative_args: &ast::TokenTree,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
+ }
+
+ pub fn speculative_expand_attr_macro(
+ &self,
+ actual_macro_call: &ast::Item,
+ speculative_args: &ast::Item,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
+ }
+
+ pub fn speculative_expand_derive_as_pseudo_attr_macro(
+ &self,
+ actual_macro_call: &ast::Attr,
+ speculative_args: &ast::Attr,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand_derive_as_pseudo_attr_macro(
+ actual_macro_call,
+ speculative_args,
+ token_to_map,
+ )
+ }
+
+ /// Descend the token into macrocalls to its first mapped counterpart.
+ pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+ self.imp.descend_into_macros_single(token)
+ }
+
+ /// Descend the token into macrocalls to all its mapped counterparts.
+ pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ self.imp.descend_into_macros(token)
+ }
+
+ /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
+ ///
+ /// Returns the original non descended token if none of the mapped counterparts have the same text.
+ pub fn descend_into_macros_with_same_text(
+ &self,
+ token: SyntaxToken,
+ ) -> SmallVec<[SyntaxToken; 1]> {
+ self.imp.descend_into_macros_with_same_text(token)
+ }
+
+ pub fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ self.imp.descend_into_macros_with_kind_preference(token)
+ }
+
+ /// Maps a node down by mapping its first and last token down.
+ pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
+ self.imp.descend_node_into_attributes(node)
+ }
+
+ /// Search for a definition's source and cache its syntax tree
+ pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+ where
+ Def::Ast: AstNode,
+ {
+ self.imp.source(def)
+ }
+
+ pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
+ self.imp.find_file(syntax_node).file_id
+ }
+
+ /// Attempts to map the node out of macro expanded files returning the original file range.
+ /// If upmapping is not possible, this will fall back to the range of the macro call of the
+ /// macro file the node resides in.
+ pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
+ self.imp.original_range(node)
+ }
+
+ /// Attempts to map the node out of macro expanded files returning the original file range.
+ pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
+ self.imp.original_range_opt(node)
+ }
+
+ /// Attempts to map the node out of macro expanded files.
+ /// This only work for attribute expansions, as other ones do not have nodes as input.
+ pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+ self.imp.original_ast_node(node)
+ }
+
+ pub fn diagnostics_display_range(&self, diagnostics: InFile<SyntaxNodePtr>) -> FileRange {
+ self.imp.diagnostics_display_range(diagnostics)
+ }
+
+ pub fn token_ancestors_with_macros(
+ &self,
+ token: SyntaxToken,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
+ }
+
+ /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
+ pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
+ self.imp.ancestors_with_macros(node)
+ }
+
+ pub fn ancestors_at_offset_with_macros(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ self.imp.ancestors_at_offset_with_macros(node, offset)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
+ /// search up until it is of the target AstNode type
+ pub fn find_node_at_offset_with_macros<N: AstNode>(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<N> {
+ self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
+ /// descend it and find again
+ pub fn find_node_at_offset_with_descend<N: AstNode>(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<N> {
+ self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
+ /// descend it and find again
+ pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
+ &'slf self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = N> + 'slf {
+ self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
+ }
+
+ pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+ self.imp.resolve_lifetime_param(lifetime)
+ }
+
+ pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+ self.imp.resolve_label(lifetime)
+ }
+
+ pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ self.imp.resolve_type(ty)
+ }
+
+ pub fn resolve_trait(&self, trait_: &ast::Path) -> Option<Trait> {
+ self.imp.resolve_trait(trait_)
+ }
+
+ // FIXME: Figure out a nice interface to inspect adjustments
+ pub fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
+ self.imp.is_implicit_reborrow(expr)
+ }
+
+ pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ self.imp.type_of_expr(expr)
+ }
+
+ pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ self.imp.type_of_pat(pat)
+ }
+
+ pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ self.imp.type_of_self(param)
+ }
+
+ pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ self.imp.pattern_adjustments(pat)
+ }
+
+ pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ self.imp.binding_mode_of_pat(pat)
+ }
+
+ pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
+ self.imp.resolve_method_call(call).map(Function::from)
+ }
+
+ pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ self.imp.resolve_method_call_as_callable(call)
+ }
+
+ pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
+ self.imp.resolve_field(field)
+ }
+
+ pub fn resolve_record_field(
+ &self,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ self.imp.resolve_record_field(field)
+ }
+
+ pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+ self.imp.resolve_record_pat_field(field)
+ }
+
+ pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
+ self.imp.resolve_macro_call(macro_call)
+ }
+
+ pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ self.imp.is_unsafe_macro_call(macro_call)
+ }
+
+ pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
+ self.imp.resolve_attr_macro_call(item)
+ }
+
+ pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
+ self.imp.resolve_path(path)
+ }
+
+ pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
+ self.imp.resolve_extern_crate(extern_crate)
+ }
+
+ pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
+ self.imp.resolve_variant(record_lit).map(VariantDef::from)
+ }
+
+ pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
+ self.imp.resolve_bind_pat_to_const(pat)
+ }
+
+ pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ self.imp.record_literal_missing_fields(literal)
+ }
+
+ pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ self.imp.record_pattern_missing_fields(pattern)
+ }
+
+ pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
+ let src = self.imp.find_file(src.syntax()).with_value(src).cloned();
+ T::to_def(&self.imp, src)
+ }
+
+ pub fn to_module_def(&self, file: FileId) -> Option<Module> {
+ self.imp.to_module_def(file).next()
+ }
+
+ pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
+ self.imp.to_module_def(file)
+ }
+
+ pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
+ self.imp.scope(node)
+ }
+
+ pub fn scope_at_offset(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SemanticsScope<'db>> {
+ self.imp.scope_at_offset(node, offset)
+ }
+
+ pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
+ self.imp.scope_for_def(def)
+ }
+
+ pub fn assert_contains_node(&self, node: &SyntaxNode) {
+ self.imp.assert_contains_node(node)
+ }
+
+ pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
+ self.imp.is_unsafe_method_call(method_call_expr)
+ }
+
+ pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
+ self.imp.is_unsafe_ref_expr(ref_expr)
+ }
+
+ pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
+ self.imp.is_unsafe_ident_pat(ident_pat)
+ }
+}
+
+impl<'db> SemanticsImpl<'db> {
+ fn new(db: &'db dyn HirDatabase) -> Self {
+ SemanticsImpl {
+ db,
+ s2d_cache: Default::default(),
+ cache: Default::default(),
+ expansion_info_cache: Default::default(),
+ macro_call_cache: Default::default(),
+ }
+ }
+
+ fn parse(&self, file_id: FileId) -> ast::SourceFile {
+ let tree = self.db.parse(file_id).tree();
+ self.cache(tree.syntax().clone(), file_id.into());
+ tree
+ }
+
+ fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+ let node = self.db.parse_or_expand(file_id)?;
+ self.cache(node.clone(), file_id);
+ Some(node)
+ }
+
+ fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+ let sa = self.analyze_no_infer(macro_call.syntax())?;
+ let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
+ let node = self.parse_or_expand(file_id)?;
+ Some(node)
+ }
+
+ fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
+ let src = self.wrap_node_infile(item.clone());
+ let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
+ self.parse_or_expand(macro_call_id.as_file())
+ }
+
+ fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
+ let src = self.wrap_node_infile(attr.clone());
+ let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let call_id = self.with_ctx(|ctx| {
+ ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
+ })?;
+ self.parse_or_expand(call_id.as_file())
+ }
+
+ fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
+ let calls = self.derive_macro_calls(attr)?;
+ self.with_ctx(|ctx| {
+ Some(
+ calls
+ .into_iter()
+ .map(|call| {
+ macro_call_to_macro_id(ctx, self.db.upcast(), call?).map(|id| Macro { id })
+ })
+ .collect(),
+ )
+ })
+ }
+
+ fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> {
+ let res: Vec<_> = self
+ .derive_macro_calls(attr)?
+ .into_iter()
+ .flat_map(|call| {
+ let file_id = call?.as_file();
+ let node = self.db.parse_or_expand(file_id)?;
+ self.cache(node.clone(), file_id);
+ Some(node)
+ })
+ .collect();
+ Some(res)
+ }
+
+ fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
+ let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let file_id = self.find_file(adt.syntax()).file_id;
+ let adt = InFile::new(file_id, &adt);
+ let src = InFile::new(file_id, attr.clone());
+ self.with_ctx(|ctx| {
+ let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
+ Some(res.to_vec())
+ })
+ }
+
+ fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
+ let file_id = self.find_file(adt.syntax()).file_id;
+ let adt = InFile::new(file_id, adt);
+ self.with_ctx(|ctx| ctx.has_derives(adt))
+ }
+
+ fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
+ let file_id = self.find_file(item.syntax()).file_id;
+ let src = InFile::new(file_id, item.clone());
+ self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
+ }
+
+ fn speculative_expand(
+ &self,
+ actual_macro_call: &ast::MacroCall,
+ speculative_args: &ast::TokenTree,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let SourceAnalyzer { file_id, resolver, .. } =
+ self.analyze_no_infer(actual_macro_call.syntax())?;
+ let macro_call = InFile::new(file_id, actual_macro_call);
+ let krate = resolver.krate();
+ let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
+ resolver
+ .resolve_path_as_macro(self.db.upcast(), &path)
+ .map(|it| macro_id_to_def_id(self.db.upcast(), it))
+ })?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ fn speculative_expand_attr(
+ &self,
+ actual_macro_call: &ast::Item,
+ speculative_args: &ast::Item,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let macro_call = self.wrap_node_infile(actual_macro_call.clone());
+ let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ fn speculative_expand_derive_as_pseudo_attr_macro(
+ &self,
+ actual_macro_call: &ast::Attr,
+ speculative_args: &ast::Attr,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let attr = self.wrap_node_infile(actual_macro_call.clone());
+ let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
+ let macro_call_id = self.with_ctx(|ctx| {
+ ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
+ })?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ // This might not be the correct way to do this, but it works for now
+ fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
+ let mut res = smallvec![];
+ let tokens = (|| {
+ let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
+ let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
+ Some((first, last))
+ })();
+ let (first, last) = match tokens {
+ Some(it) => it,
+ None => return res,
+ };
+
+ if first == last {
+ self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
+ if let Some(node) = value.parent_ancestors().find_map(N::cast) {
+ res.push(node)
+ }
+ false
+ });
+ } else {
+ // Descend first and last token, then zip them to look for the node they belong to
+ let mut scratch: SmallVec<[_; 1]> = smallvec![];
+ self.descend_into_macros_impl(first, &mut |token| {
+ scratch.push(token);
+ false
+ });
+
+ let mut scratch = scratch.into_iter();
+ self.descend_into_macros_impl(
+ last,
+ &mut |InFile { value: last, file_id: last_fid }| {
+ if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
+ if first_fid == last_fid {
+ if let Some(p) = first.parent() {
+ let range = first.text_range().cover(last.text_range());
+ let node = find_root(&p)
+ .covering_element(range)
+ .ancestors()
+ .take_while(|it| it.text_range() == range)
+ .find_map(N::cast);
+ if let Some(node) = node {
+ res.push(node);
+ }
+ }
+ }
+ }
+ false
+ },
+ );
+ }
+ res
+ }
+
+ fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ let mut res = smallvec![];
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ res.push(value);
+ false
+ });
+ res
+ }
+
+ fn descend_into_macros_with_same_text(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ let text = token.text();
+ let mut res = smallvec![];
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ if value.text() == text {
+ res.push(value);
+ }
+ false
+ });
+ if res.is_empty() {
+ res.push(token);
+ }
+ res
+ }
+
+ fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ let fetch_kind = |token: &SyntaxToken| match token.parent() {
+ Some(node) => match node.kind() {
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
+ node.parent().map_or(kind, |it| it.kind())
+ }
+ _ => token.kind(),
+ },
+ None => token.kind(),
+ };
+ let preferred_kind = fetch_kind(&token);
+ let mut res = None;
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ if fetch_kind(&value) == preferred_kind {
+ res = Some(value);
+ true
+ } else {
+ if let None = res {
+ res = Some(value)
+ }
+ false
+ }
+ });
+ res.unwrap_or(token)
+ }
+
+ fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+ let mut res = token.clone();
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ res = value;
+ true
+ });
+ res
+ }
+
+ fn descend_into_macros_impl(
+ &self,
+ token: SyntaxToken,
+ f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
+ ) {
+ let _p = profile::span("descend_into_macros");
+ let parent = match token.parent() {
+ Some(it) => it,
+ None => return,
+ };
+ let sa = match self.analyze_no_infer(&parent) {
+ Some(it) => it,
+ None => return,
+ };
+ let def_map = sa.resolver.def_map();
+
+ let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
+ let mut cache = self.expansion_info_cache.borrow_mut();
+ let mut mcache = self.macro_call_cache.borrow_mut();
+
+ let mut process_expansion_for_token =
+ |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
+ .as_ref()?;
+
+ {
+ let InFile { file_id, value } = expansion_info.expanded();
+ self.cache(value, file_id);
+ }
+
+ let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
+ let len = stack.len();
+
+ // requeue the tokens we got from mapping our current token down
+ stack.extend(mapped_tokens);
+ // if the length changed we have found a mapping for the token
+ (stack.len() != len).then(|| ())
+ };
+
+ // Remap the next token in the queue into a macro call its in, if it is not being remapped
+ // either due to not being in a macro-call or because its unused push it into the result vec,
+ // otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
+ while let Some(token) = stack.pop() {
+ self.db.unwind_if_cancelled();
+ let was_not_remapped = (|| {
+ // First expand into attribute invocations
+ let containing_attribute_macro_call = self.with_ctx(|ctx| {
+ token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
+ if item.attrs().next().is_none() {
+ // Don't force populate the dyn cache for items that don't have an attribute anyways
+ return None;
+ }
+ Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
+ })
+ });
+ if let Some((call_id, item)) = containing_attribute_macro_call {
+ let file_id = call_id.as_file();
+ return process_expansion_for_token(
+ &mut stack,
+ file_id,
+ Some(item),
+ token.as_ref(),
+ );
+ }
+
+ // Then check for token trees, that means we are either in a function-like macro or
+ // secondary attribute inputs
+ let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
+ let parent = tt.syntax().parent()?;
+
+ if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
+ return None;
+ }
+ if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
+ return None;
+ }
+
+ if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
+ let mcall = token.with_value(macro_call);
+ let file_id = match mcache.get(&mcall) {
+ Some(&it) => it,
+ None => {
+ let it = sa.expand(self.db, mcall.as_ref())?;
+ mcache.insert(mcall, it);
+ it
+ }
+ };
+ process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
+ } else if let Some(meta) = ast::Meta::cast(parent.clone()) {
+ // attribute we failed expansion for earlier, this might be a derive invocation
+ // or derive helper attribute
+ let attr = meta.parent_attr()?;
+
+ let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) {
+ // this might be a derive, or a derive helper on an ADT
+ let derive_call = self.with_ctx(|ctx| {
+ // so try downmapping the token into the pseudo derive expansion
+ // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
+ ctx.attr_to_derive_macro_call(
+ token.with_value(&adt),
+ token.with_value(attr.clone()),
+ )
+ .map(|(_, call_id, _)| call_id)
+ });
+
+ match derive_call {
+ Some(call_id) => {
+ // resolved to a derive
+ let file_id = call_id.as_file();
+ return process_expansion_for_token(
+ &mut stack,
+ file_id,
+ Some(adt.into()),
+ token.as_ref(),
+ );
+ }
+ None => Some(adt),
+ }
+ } else {
+ // Otherwise this could be a derive helper on a variant or field
+ if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ }
+ }?;
+ if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) {
+ return None;
+ }
+ // Not an attribute, nor a derive, so it's either a builtin or a derive helper
+ // Try to resolve to a derive helper and downmap
+ let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
+ let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
+ let helpers =
+ def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
+ let item = Some(adt.into());
+ let mut res = None;
+ for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
+ res = res.or(process_expansion_for_token(
+ &mut stack,
+ derive.as_file(),
+ item.clone(),
+ token.as_ref(),
+ ));
+ }
+ res
+ } else {
+ None
+ }
+ })()
+ .is_none();
+
+ if was_not_remapped && f(token) {
+ break;
+ }
+ }
+ }
+
+ // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
+ // traversing the inner iterator when it finds a node.
+ // The outer iterator is over the tokens descendants
+ // The inner iterator is the ancestors of a descendant
+ fn descend_node_at_offset(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
+ node.token_at_offset(offset)
+ .map(move |token| self.descend_into_macros(token))
+ .map(|descendants| {
+ descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
+ })
+ // re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first
+ // See algo::ancestors_at_offset, which uses the same approach
+ .kmerge_by(|left, right| {
+ left.clone()
+ .map(|node| node.text_range().len())
+ .lt(right.clone().map(|node| node.text_range().len()))
+ })
+ }
+
+ fn original_range(&self, node: &SyntaxNode) -> FileRange {
+ let node = self.find_file(node);
+ node.original_file_range(self.db.upcast())
+ }
+
+ fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
+ let node = self.find_file(node);
+ node.original_file_range_opt(self.db.upcast())
+ }
+
+ fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+ self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
+ |InFile { file_id, value }| {
+ self.cache(find_root(value.syntax()), file_id);
+ value
+ },
+ )
+ }
+
+ fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
+ let root = self.parse_or_expand(src.file_id).unwrap();
+ let node = src.map(|it| it.to_node(&root));
+ node.as_ref().original_file_range(self.db.upcast())
+ }
+
+ fn token_ancestors_with_macros(
+ &self,
+ token: SyntaxToken,
+ ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
+ token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
+ }
+
+ fn ancestors_with_macros(
+ &self,
+ node: SyntaxNode,
+ ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
+ let node = self.find_file(&node);
+ let db = self.db.upcast();
+ iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| {
+ match value.parent() {
+ Some(parent) => Some(InFile::new(file_id, parent)),
+ None => {
+ self.cache(value.clone(), file_id);
+ file_id.call_node(db)
+ }
+ }
+ })
+ .map(|it| it.value)
+ }
+
+ fn ancestors_at_offset_with_macros(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ node.token_at_offset(offset)
+ .map(|token| self.token_ancestors_with_macros(token))
+ .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
+ }
+
+ fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+ let text = lifetime.text();
+ let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
+ let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
+ gpl.lifetime_params()
+ .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
+ })?;
+ let src = self.wrap_node_infile(lifetime_param);
+ ToDef::to_def(self, src)
+ }
+
+ fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+ let text = lifetime.text();
+ let label = lifetime.syntax().ancestors().find_map(|syn| {
+ let label = match_ast! {
+ match syn {
+ ast::ForExpr(it) => it.label(),
+ ast::WhileExpr(it) => it.label(),
+ ast::LoopExpr(it) => it.label(),
+ ast::BlockExpr(it) => it.label(),
+ _ => None,
+ }
+ };
+ label.filter(|l| {
+ l.lifetime()
+ .and_then(|lt| lt.lifetime_ident_token())
+ .map_or(false, |lt| lt.text() == text)
+ })
+ })?;
+ let src = self.wrap_node_infile(label);
+ ToDef::to_def(self, src)
+ }
+
+ fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ let analyze = self.analyze(ty.syntax())?;
+ let ctx = body::LowerCtx::new(self.db.upcast(), analyze.file_id);
+ let ty = hir_ty::TyLoweringContext::new(self.db, &analyze.resolver)
+ .lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
+ Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
+ }
+
+ fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
+ let analyze = self.analyze(path.syntax())?;
+ let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
+ let ctx = body::LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
+ let hir_path = Path::from_src(path.clone(), &ctx)?;
+ match analyze
+ .resolver
+ .resolve_path_in_type_ns_fully(self.db.upcast(), hir_path.mod_path())?
+ {
+ TypeNs::TraitId(id) => Some(Trait { id }),
+ _ => None,
+ }
+ }
+
+ fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
+ self.analyze(expr.syntax())?.is_implicit_reborrow(self.db, expr)
+ }
+
+ fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ self.analyze(expr.syntax())?
+ .type_of_expr(self.db, expr)
+ .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
+ }
+
+ fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ self.analyze(pat.syntax())?
+ .type_of_pat(self.db, pat)
+ .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
+ }
+
+ fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ self.analyze(param.syntax())?.type_of_self(self.db, param)
+ }
+
+ fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ self.analyze(pat.syntax())
+ .and_then(|it| it.pattern_adjustments(self.db, pat))
+ .unwrap_or_default()
+ }
+
+ fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
+ }
+
+ fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
+ self.analyze(call.syntax())?.resolve_method_call(self.db, call)
+ }
+
+ fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
+ }
+
+ fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
+ self.analyze(field.syntax())?.resolve_field(self.db, field)
+ }
+
+ fn resolve_record_field(
+ &self,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ self.analyze(field.syntax())?.resolve_record_field(self.db, field)
+ }
+
+ fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+ self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
+ }
+
+ fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
+ let sa = self.analyze(macro_call.syntax())?;
+ let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
+ sa.resolve_macro_call(self.db, macro_call)
+ }
+
+ fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ let sa = match self.analyze(macro_call.syntax()) {
+ Some(it) => it,
+ None => return false,
+ };
+ let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
+ sa.is_unsafe_macro_call(self.db, macro_call)
+ }
+
+ fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
+ let item_in_file = self.wrap_node_infile(item.clone());
+ let id = self.with_ctx(|ctx| {
+ let macro_call_id = ctx.item_to_macro_call(item_in_file)?;
+ macro_call_to_macro_id(ctx, self.db.upcast(), macro_call_id)
+ })?;
+ Some(Macro { id })
+ }
+
+ fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
+ self.analyze(path.syntax())?.resolve_path(self.db, path)
+ }
+
+ fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
+ let krate = self.scope(extern_crate.syntax())?.krate();
+ let name = extern_crate.name_ref()?.as_name();
+ if name == known::SELF_PARAM {
+ return Some(krate);
+ }
+ krate
+ .dependencies(self.db)
+ .into_iter()
+ .find_map(|dep| (dep.name == name).then(|| dep.krate))
+ }
+
+ fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
+ self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
+ }
+
+ fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
+ self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
+ }
+
+ fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ self.analyze(literal.syntax())
+ .and_then(|it| it.record_literal_missing_fields(self.db, literal))
+ .unwrap_or_default()
+ }
+
+ fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ self.analyze(pattern.syntax())
+ .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
+ .unwrap_or_default()
+ }
+
+ fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
+ let mut cache = self.s2d_cache.borrow_mut();
+ let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache };
+ f(&mut ctx)
+ }
+
+ fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> {
+ self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
+ }
+
+ fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
+ self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
+ db: self.db,
+ file_id,
+ resolver,
+ })
+ }
+
+ fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> Option<SemanticsScope<'db>> {
+ self.analyze_with_offset_no_infer(node, offset).map(
+ |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
+ db: self.db,
+ file_id,
+ resolver,
+ },
+ )
+ }
+
+ fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
+ let file_id = self.db.lookup_intern_trait(def.id).id.file_id();
+ let resolver = def.id.resolver(self.db.upcast());
+ SemanticsScope { db: self.db, file_id, resolver }
+ }
+
+ fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+ where
+ Def::Ast: AstNode,
+ {
+ let res = def.source(self.db)?;
+ self.cache(find_root(res.value.syntax()), res.file_id);
+ Some(res)
+ }
+
+ /// Returns none if the file of the node is not part of a crate.
+ fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, None, true)
+ }
+
+ /// Returns none if the file of the node is not part of a crate.
+ fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, None, false)
+ }
+
+ fn analyze_with_offset_no_infer(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, Some(offset), false)
+ }
+
+ fn analyze_impl(
+ &self,
+ node: &SyntaxNode,
+ offset: Option<TextSize>,
+ infer_body: bool,
+ ) -> Option<SourceAnalyzer> {
+ let _p = profile::span("Semantics::analyze_impl");
+ let node = self.find_file(node);
+
+ let container = match self.with_ctx(|ctx| ctx.find_container(node)) {
+ Some(it) => it,
+ None => return None,
+ };
+
+ let resolver = match container {
+ ChildContainer::DefWithBodyId(def) => {
+ return Some(if infer_body {
+ SourceAnalyzer::new_for_body(self.db, def, node, offset)
+ } else {
+ SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
+ })
+ }
+ ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::VariantId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
+ };
+ Some(SourceAnalyzer::new_for_resolver(resolver, node))
+ }
+
+ fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
+ assert!(root_node.parent().is_none());
+ let mut cache = self.cache.borrow_mut();
+ let prev = cache.insert(root_node, file_id);
+ assert!(prev == None || prev == Some(file_id))
+ }
+
+ fn assert_contains_node(&self, node: &SyntaxNode) {
+ self.find_file(node);
+ }
+
+ fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
+ let cache = self.cache.borrow();
+ cache.get(root_node).copied()
+ }
+
+ fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
+ let InFile { file_id, .. } = self.find_file(node.syntax());
+ InFile::new(file_id, node)
+ }
+
+ /// Wraps the node in a [`InFile`] with the file id it belongs to.
+ fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
+ let root_node = find_root(node);
+ let file_id = self.lookup(&root_node).unwrap_or_else(|| {
+ panic!(
+ "\n\nFailed to lookup {:?} in this Semantics.\n\
+ Make sure to use only query nodes, derived from this instance of Semantics.\n\
+ root node: {:?}\n\
+ known nodes: {}\n\n",
+ node,
+ root_node,
+ self.cache
+ .borrow()
+ .keys()
+ .map(|it| format!("{:?}", it))
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
+ });
+ InFile::new(file_id, node)
+ }
+
+ fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
+ method_call_expr
+ .receiver()
+ .and_then(|expr| {
+ let field_expr = match expr {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+ let ty = self.type_of_expr(&field_expr.expr()?)?.original;
+ if !ty.is_packed(self.db) {
+ return None;
+ }
+
+ let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
+ let res = match func.self_param(self.db)?.access(self.db) {
+ Access::Shared | Access::Exclusive => true,
+ Access::Owned => false,
+ };
+ Some(res)
+ })
+ .unwrap_or(false)
+ }
+
+ fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
+ ref_expr
+ .expr()
+ .and_then(|expr| {
+ let field_expr = match expr {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+ let expr = field_expr.expr()?;
+ self.type_of_expr(&expr)
+ })
+ // Binding a reference to a packed type is possibly unsafe.
+ .map(|ty| ty.original.is_packed(self.db))
+ .unwrap_or(false)
+
+ // FIXME This needs layout computation to be correct. It will highlight
+ // more than it should with the current implementation.
+ }
+
+ fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
+ if ident_pat.ref_token().is_none() {
+ return false;
+ }
+
+ ident_pat
+ .syntax()
+ .parent()
+ .and_then(|parent| {
+ // `IdentPat` can live under `RecordPat` directly under `RecordPatField` or
+ // `RecordPatFieldList`. `RecordPatField` also lives under `RecordPatFieldList`,
+ // so this tries to lookup the `IdentPat` anywhere along that structure to the
+ // `RecordPat` so we can get the containing type.
+ let record_pat = ast::RecordPatField::cast(parent.clone())
+ .and_then(|record_pat| record_pat.syntax().parent())
+ .or_else(|| Some(parent.clone()))
+ .and_then(|parent| {
+ ast::RecordPatFieldList::cast(parent)?
+ .syntax()
+ .parent()
+ .and_then(ast::RecordPat::cast)
+ });
+
+ // If this doesn't match a `RecordPat`, fallback to a `LetStmt` to see if
+ // this is initialized from a `FieldExpr`.
+ if let Some(record_pat) = record_pat {
+ self.type_of_pat(&ast::Pat::RecordPat(record_pat))
+ } else if let Some(let_stmt) = ast::LetStmt::cast(parent) {
+ let field_expr = match let_stmt.initializer()? {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+
+ self.type_of_expr(&field_expr.expr()?)
+ } else {
+ None
+ }
+ })
+ // Binding a reference to a packed type is possibly unsafe.
+ .map(|ty| ty.original.is_packed(self.db))
+ .unwrap_or(false)
+ }
+}
+
+fn macro_call_to_macro_id(
+ ctx: &mut SourceToDefCtx<'_, '_>,
+ db: &dyn AstDatabase,
+ macro_call_id: MacroCallId,
+) -> Option<MacroId> {
+ let loc = db.lookup_intern_macro_call(macro_call_id);
+ match loc.def.kind {
+ hir_expand::MacroDefKind::Declarative(it)
+ | hir_expand::MacroDefKind::BuiltIn(_, it)
+ | hir_expand::MacroDefKind::BuiltInAttr(_, it)
+ | hir_expand::MacroDefKind::BuiltInDerive(_, it)
+ | hir_expand::MacroDefKind::BuiltInEager(_, it) => {
+ ctx.macro_to_def(InFile::new(it.file_id, it.to_node(db)))
+ }
+ hir_expand::MacroDefKind::ProcMacro(_, _, it) => {
+ ctx.proc_macro_to_def(InFile::new(it.file_id, it.to_node(db)))
+ }
+ }
+}
+
+pub trait ToDef: AstNode + Clone {
+ type Def;
+
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def>;
+}
+
+macro_rules! to_def_impls {
+ ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
+ impl ToDef for $ast {
+ type Def = $def;
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def> {
+ sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
+ }
+ }
+ )*}
+}
+
+to_def_impls![
+ (crate::Module, ast::Module, module_to_def),
+ (crate::Module, ast::SourceFile, source_file_to_def),
+ (crate::Struct, ast::Struct, struct_to_def),
+ (crate::Enum, ast::Enum, enum_to_def),
+ (crate::Union, ast::Union, union_to_def),
+ (crate::Trait, ast::Trait, trait_to_def),
+ (crate::Impl, ast::Impl, impl_to_def),
+ (crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
+ (crate::Const, ast::Const, const_to_def),
+ (crate::Static, ast::Static, static_to_def),
+ (crate::Function, ast::Fn, fn_to_def),
+ (crate::Field, ast::RecordField, record_field_to_def),
+ (crate::Field, ast::TupleField, tuple_field_to_def),
+ (crate::Variant, ast::Variant, enum_variant_to_def),
+ (crate::TypeParam, ast::TypeParam, type_param_to_def),
+ (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
+ (crate::ConstParam, ast::ConstParam, const_param_to_def),
+ (crate::GenericParam, ast::GenericParam, generic_param_to_def),
+ (crate::Macro, ast::Macro, macro_to_def),
+ (crate::Local, ast::IdentPat, bind_pat_to_def),
+ (crate::Local, ast::SelfParam, self_param_to_def),
+ (crate::Label, ast::Label, label_to_def),
+ (crate::Adt, ast::Adt, adt_to_def),
+];
+
+fn find_root(node: &SyntaxNode) -> SyntaxNode {
+ node.ancestors().last().unwrap()
+}
+
+/// `SemanticScope` encapsulates the notion of a scope (the set of visible
+/// names) at a particular program point.
+///
+/// It is a bit tricky, as scopes do not really exist inside the compiler.
+/// Rather, the compiler directly computes for each reference the definition it
+/// refers to. It might transiently compute the explicit scope map while doing
+/// so, but, generally, this is not something left after the analysis.
+///
+/// However, we do very much need explicit scopes for IDE purposes --
+/// completion, at its core, lists the contents of the current scope. The notion
+/// of scope is also useful to answer questions like "what would be the meaning
+/// of this piece of code if we inserted it into this position?".
+///
+/// So `SemanticsScope` is constructed from a specific program point (a syntax
+/// node or just a raw offset) and provides access to the set of visible names
+/// on a somewhat best-effort basis.
+///
+/// Note that if you are wondering "what does this specific existing name mean?",
+/// you'd better use the `resolve_` family of methods.
+#[derive(Debug)]
+pub struct SemanticsScope<'a> {
+ pub db: &'a dyn HirDatabase,
+ file_id: HirFileId,
+ resolver: Resolver,
+}
+
+impl<'a> SemanticsScope<'a> {
+ pub fn module(&self) -> Module {
+ Module { id: self.resolver.module() }
+ }
+
+ pub fn krate(&self) -> Crate {
+ Crate { id: self.resolver.krate() }
+ }
+
+ pub(crate) fn resolver(&self) -> &Resolver {
+ &self.resolver
+ }
+
+ /// Note: `VisibleTraits` should be treated as an opaque type, passed into `Type
+ pub fn visible_traits(&self) -> VisibleTraits {
+ let resolver = &self.resolver;
+ VisibleTraits(resolver.traits_in_scope(self.db.upcast()))
+ }
+
+ pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
+ let scope = self.resolver.names_in_scope(self.db.upcast());
+ for (name, entries) in scope {
+ for entry in entries {
+ let def = match entry {
+ resolver::ScopeDef::ModuleDef(it) => ScopeDef::ModuleDef(it.into()),
+ resolver::ScopeDef::Unknown => ScopeDef::Unknown,
+ resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
+ resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
+ resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
+ resolver::ScopeDef::Local(pat_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Local(Local { parent, pat_id }),
+ None => continue,
+ },
+ resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Label(Label { parent, label_id }),
+ None => continue,
+ },
+ };
+ f(name.clone(), def)
+ }
+ }
+ }
+
+ /// Resolve a path as-if it was written at the given scope. This is
+ /// necessary a heuristic, as it doesn't take hygiene into account.
+ pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
+ let ctx = body::LowerCtx::new(self.db.upcast(), self.file_id);
+ let path = Path::from_src(path.clone(), &ctx)?;
+ resolve_hir_path(self.db, &self.resolver, &path)
+ }
+
+ /// Iterates over associated types that may be specified after the given path (using
+ /// `Ty::Assoc` syntax).
+ pub fn assoc_type_shorthand_candidates<R>(
+ &self,
+ resolution: &PathResolution,
+ mut cb: impl FnMut(&Name, TypeAlias) -> Option<R>,
+ ) -> Option<R> {
+ let def = self.resolver.generic_def()?;
+ hir_ty::associated_type_shorthand_candidates(
+ self.db,
+ def,
+ resolution.in_type_ns()?,
+ |name, _, id| cb(name, id.into()),
+ )
+ }
+}
+
+pub struct VisibleTraits(pub FxHashSet<TraitId>);
+
+impl ops::Deref for VisibleTraits {
+ type Target = FxHashSet<TraitId>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
new file mode 100644
index 000000000..ba9a1cfb6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -0,0 +1,473 @@
+//! Maps *syntax* of various definitions to their semantic ids.
+//!
+//! This is a very interesting module, and, in some sense, can be considered the
+//! heart of the IDE parts of rust-analyzer.
+//!
+//! This module solves the following problem:
+//!
+//! Given a piece of syntax, find the corresponding semantic definition (def).
+//!
+//! This problem is a part of more-or-less every IDE feature implemented. Every
+//! IDE functionality (like goto to definition), conceptually starts with a
+//! specific cursor position in a file. Starting with this text offset, we first
+//! figure out what syntactic construct are we at: is this a pattern, an
+//! expression, an item definition.
+//!
+//! Knowing only the syntax gives us relatively little info. For example,
+//! looking at the syntax of the function we can realise that it is a part of an
+//! `impl` block, but we won't be able to tell what trait function the current
+//! function overrides, and whether it does that correctly. For that, we need to
+//! go from [`ast::Fn`] to [`crate::Function`], and that's exactly what this
+//! module does.
+//!
+//! As syntax trees are values and don't know their place of origin/identity,
+//! this module also requires [`InFile`] wrappers to understand which specific
+//! real or macro-expanded file the tree comes from.
+//!
+//! The actual algorithm to resolve syntax to def is curious in two aspects:
+//!
+//! * It is recursive
+//! * It uses the inverse algorithm (what is the syntax for this def?)
+//!
+//! Specifically, the algorithm goes like this:
+//!
+//! 1. Find the syntactic container for the syntax. For example, field's
+//! container is the struct, and structs container is a module.
+//! 2. Recursively get the def corresponding to container.
+//! 3. Ask the container def for all child defs. These child defs contain
+//! the answer and answer's siblings.
+//! 4. For each child def, ask for it's source.
+//! 5. The child def whose source is the syntax node we've started with
+//! is the answer.
+//!
+//! It's interesting that both Roslyn and Kotlin contain very similar code
+//! shape.
+//!
+//! Let's take a look at Roslyn:
+//!
+//! <https://github.com/dotnet/roslyn/blob/36a0c338d6621cc5fe34b79d414074a95a6a489c/src/Compilers/CSharp/Portable/Compilation/SyntaxTreeSemanticModel.cs#L1403-L1429>
+//! <https://sourceroslyn.io/#Microsoft.CodeAnalysis.CSharp/Compilation/SyntaxTreeSemanticModel.cs,1403>
+//!
+//! The `GetDeclaredType` takes `Syntax` as input, and returns `Symbol` as
+//! output. First, it retrieves a `Symbol` for parent `Syntax`:
+//!
+//! * <https://sourceroslyn.io/#Microsoft.CodeAnalysis.CSharp/Compilation/SyntaxTreeSemanticModel.cs,1423>
+//!
+//! Then, it iterates parent symbol's children, looking for one which has the
+//! same text span as the original node:
+//!
+//! <https://sourceroslyn.io/#Microsoft.CodeAnalysis.CSharp/Compilation/SyntaxTreeSemanticModel.cs,1786>
+//!
+//! Now, let's look at Kotlin:
+//!
+//! <https://github.com/JetBrains/kotlin/blob/a288b8b00e4754a1872b164999c6d3f3b8c8994a/idea/idea-frontend-fir/idea-fir-low-level-api/src/org/jetbrains/kotlin/idea/fir/low/level/api/FirModuleResolveStateImpl.kt#L93-L125>
+//!
+//! This function starts with a syntax node (`KtExpression` is syntax, like all
+//! `Kt` nodes), and returns a def. It uses
+//! `getNonLocalContainingOrThisDeclaration` to get syntactic container for a
+//! current node. Then, `findSourceNonLocalFirDeclaration` gets `Fir` for this
+//! parent. Finally, `findElementIn` function traverses `Fir` children to find
+//! one with the same source we originally started with.
+//!
+//! One question is left though -- where does the recursion stops? This happens
+//! when we get to the file syntax node, which doesn't have a syntactic parent.
+//! In that case, we loop through all the crates that might contain this file
+//! and look for a module whose source is the given file.
+//!
+//! Note that the logic in this module is somewhat fundamentally imprecise --
+//! due to conditional compilation and `#[path]` attributes, there's no
+//! injective mapping from syntax nodes to defs. This is not an edge case --
+//! more or less every item in a `lib.rs` is a part of two distinct crates: a
+//! library with `--cfg test` and a library without.
+//!
+//! At the moment, we don't really handle this well and return the first answer
+//! that works. Ideally, we should first let the caller to pick a specific
+//! active crate for a given position, and then provide an API to resolve all
+//! syntax nodes against this specific crate.
+
+use base_db::FileId;
+use hir_def::{
+ attr::AttrId,
+ child_by_source::ChildBySource,
+ dyn_map::DynMap,
+ expr::{LabelId, PatId},
+ keys::{self, Key},
+ AdtId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId,
+ GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId,
+ TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
+};
+use hir_expand::{name::AsName, HirFileId, MacroCallId};
+use rustc_hash::FxHashMap;
+use smallvec::SmallVec;
+use stdx::impl_from;
+use syntax::{
+ ast::{self, HasName},
+ AstNode, SyntaxNode,
+};
+
+use crate::{db::HirDatabase, InFile};
+
+pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap>;
+
+pub(super) struct SourceToDefCtx<'a, 'b> {
+ pub(super) db: &'b dyn HirDatabase,
+ pub(super) cache: &'a mut SourceToDefCache,
+}
+
+impl SourceToDefCtx<'_, '_> {
+ pub(super) fn file_to_def(&mut self, file: FileId) -> SmallVec<[ModuleId; 1]> {
+ let _p = profile::span("SourceBinder::to_module_def");
+ let mut mods = SmallVec::new();
+ for &crate_id in self.db.relevant_crates(file).iter() {
+ // FIXME: inner items
+ let crate_def_map = self.db.crate_def_map(crate_id);
+ mods.extend(
+ crate_def_map
+ .modules_for_file(file)
+ .map(|local_id| crate_def_map.module_id(local_id)),
+ )
+ }
+ mods
+ }
+
+ pub(super) fn module_to_def(&mut self, src: InFile<ast::Module>) -> Option<ModuleId> {
+ let _p = profile::span("module_to_def");
+ let parent_declaration = src
+ .syntax()
+ .ancestors_with_macros_skip_attr_item(self.db.upcast())
+ .find_map(|it| it.map(ast::Module::cast).transpose());
+
+ let parent_module = match parent_declaration {
+ Some(parent_declaration) => self.module_to_def(parent_declaration),
+ None => {
+ let file_id = src.file_id.original_file(self.db.upcast());
+ self.file_to_def(file_id).get(0).copied()
+ }
+ }?;
+
+ let child_name = src.value.name()?.as_name();
+ let def_map = parent_module.def_map(self.db.upcast());
+ let &child_id = def_map[parent_module.local_id].children.get(&child_name)?;
+ Some(def_map.module_id(child_id))
+ }
+
+ pub(super) fn source_file_to_def(&mut self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
+ let _p = profile::span("source_file_to_def");
+ let file_id = src.file_id.original_file(self.db.upcast());
+ self.file_to_def(file_id).get(0).copied()
+ }
+
+ pub(super) fn trait_to_def(&mut self, src: InFile<ast::Trait>) -> Option<TraitId> {
+ self.to_def(src, keys::TRAIT)
+ }
+ pub(super) fn impl_to_def(&mut self, src: InFile<ast::Impl>) -> Option<ImplId> {
+ self.to_def(src, keys::IMPL)
+ }
+ pub(super) fn fn_to_def(&mut self, src: InFile<ast::Fn>) -> Option<FunctionId> {
+ self.to_def(src, keys::FUNCTION)
+ }
+ pub(super) fn struct_to_def(&mut self, src: InFile<ast::Struct>) -> Option<StructId> {
+ self.to_def(src, keys::STRUCT)
+ }
+ pub(super) fn enum_to_def(&mut self, src: InFile<ast::Enum>) -> Option<EnumId> {
+ self.to_def(src, keys::ENUM)
+ }
+ pub(super) fn union_to_def(&mut self, src: InFile<ast::Union>) -> Option<UnionId> {
+ self.to_def(src, keys::UNION)
+ }
+ pub(super) fn static_to_def(&mut self, src: InFile<ast::Static>) -> Option<StaticId> {
+ self.to_def(src, keys::STATIC)
+ }
+ pub(super) fn const_to_def(&mut self, src: InFile<ast::Const>) -> Option<ConstId> {
+ self.to_def(src, keys::CONST)
+ }
+ pub(super) fn type_alias_to_def(&mut self, src: InFile<ast::TypeAlias>) -> Option<TypeAliasId> {
+ self.to_def(src, keys::TYPE_ALIAS)
+ }
+ pub(super) fn record_field_to_def(&mut self, src: InFile<ast::RecordField>) -> Option<FieldId> {
+ self.to_def(src, keys::RECORD_FIELD)
+ }
+ pub(super) fn tuple_field_to_def(&mut self, src: InFile<ast::TupleField>) -> Option<FieldId> {
+ self.to_def(src, keys::TUPLE_FIELD)
+ }
+ pub(super) fn enum_variant_to_def(
+ &mut self,
+ src: InFile<ast::Variant>,
+ ) -> Option<EnumVariantId> {
+ self.to_def(src, keys::VARIANT)
+ }
+ pub(super) fn adt_to_def(
+ &mut self,
+ InFile { file_id, value }: InFile<ast::Adt>,
+ ) -> Option<AdtId> {
+ match value {
+ ast::Adt::Enum(it) => self.enum_to_def(InFile::new(file_id, it)).map(AdtId::EnumId),
+ ast::Adt::Struct(it) => {
+ self.struct_to_def(InFile::new(file_id, it)).map(AdtId::StructId)
+ }
+ ast::Adt::Union(it) => self.union_to_def(InFile::new(file_id, it)).map(AdtId::UnionId),
+ }
+ }
+ pub(super) fn bind_pat_to_def(
+ &mut self,
+ src: InFile<ast::IdentPat>,
+ ) -> Option<(DefWithBodyId, PatId)> {
+ let container = self.find_pat_or_label_container(src.syntax())?;
+ let (body, source_map) = self.db.body_with_source_map(container);
+ let src = src.map(ast::Pat::from);
+ let pat_id = source_map.node_pat(src.as_ref())?;
+ // the pattern could resolve to a constant, verify that that is not the case
+ if let crate::Pat::Bind { .. } = body[pat_id] {
+ Some((container, pat_id))
+ } else {
+ None
+ }
+ }
+ pub(super) fn self_param_to_def(
+ &mut self,
+ src: InFile<ast::SelfParam>,
+ ) -> Option<(DefWithBodyId, PatId)> {
+ let container = self.find_pat_or_label_container(src.syntax())?;
+ let (_body, source_map) = self.db.body_with_source_map(container);
+ let pat_id = source_map.node_self_param(src.as_ref())?;
+ Some((container, pat_id))
+ }
+ pub(super) fn label_to_def(
+ &mut self,
+ src: InFile<ast::Label>,
+ ) -> Option<(DefWithBodyId, LabelId)> {
+ let container = self.find_pat_or_label_container(src.syntax())?;
+ let (_body, source_map) = self.db.body_with_source_map(container);
+ let label_id = source_map.node_label(src.as_ref())?;
+ Some((container, label_id))
+ }
+
+ pub(super) fn item_to_macro_call(&mut self, src: InFile<ast::Item>) -> Option<MacroCallId> {
+ let map = self.dyn_map(src.as_ref())?;
+ map[keys::ATTR_MACRO_CALL].get(&src.value).copied()
+ }
+
+ /// (AttrId, derive attribute call id, derive call ids)
+ pub(super) fn attr_to_derive_macro_call(
+ &mut self,
+ item: InFile<&ast::Adt>,
+ src: InFile<ast::Attr>,
+ ) -> Option<(AttrId, MacroCallId, &[Option<MacroCallId>])> {
+ let map = self.dyn_map(item)?;
+ map[keys::DERIVE_MACRO_CALL]
+ .get(&src.value)
+ .map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
+ }
+
+ pub(super) fn has_derives(&mut self, adt: InFile<&ast::Adt>) -> bool {
+ self.dyn_map(adt).as_ref().map_or(false, |map| !map[keys::DERIVE_MACRO_CALL].is_empty())
+ }
+
+ fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>(
+ &mut self,
+ src: InFile<Ast>,
+ key: Key<Ast, ID>,
+ ) -> Option<ID> {
+ self.dyn_map(src.as_ref())?[key].get(&src.value).copied()
+ }
+
+ fn dyn_map<Ast: AstNode + 'static>(&mut self, src: InFile<&Ast>) -> Option<&DynMap> {
+ let container = self.find_container(src.map(|it| it.syntax()))?;
+ Some(self.cache_for(container, src.file_id))
+ }
+
+ fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap {
+ let db = self.db;
+ self.cache
+ .entry((container, file_id))
+ .or_insert_with(|| container.child_by_source(db, file_id))
+ }
+
+ pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> {
+ let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
+ let dyn_map = self.cache_for(container, src.file_id);
+ dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(|x| TypeParamId::from_unchecked(x))
+ }
+
+ pub(super) fn lifetime_param_to_def(
+ &mut self,
+ src: InFile<ast::LifetimeParam>,
+ ) -> Option<LifetimeParamId> {
+ let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
+ let dyn_map = self.cache_for(container, src.file_id);
+ dyn_map[keys::LIFETIME_PARAM].get(&src.value).copied()
+ }
+
+ pub(super) fn const_param_to_def(
+ &mut self,
+ src: InFile<ast::ConstParam>,
+ ) -> Option<ConstParamId> {
+ let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
+ let dyn_map = self.cache_for(container, src.file_id);
+ dyn_map[keys::CONST_PARAM].get(&src.value).copied().map(|x| ConstParamId::from_unchecked(x))
+ }
+
+ pub(super) fn generic_param_to_def(
+ &mut self,
+ InFile { file_id, value }: InFile<ast::GenericParam>,
+ ) -> Option<GenericParamId> {
+ match value {
+ ast::GenericParam::ConstParam(it) => {
+ self.const_param_to_def(InFile::new(file_id, it)).map(GenericParamId::ConstParamId)
+ }
+ ast::GenericParam::LifetimeParam(it) => self
+ .lifetime_param_to_def(InFile::new(file_id, it))
+ .map(GenericParamId::LifetimeParamId),
+ ast::GenericParam::TypeParam(it) => {
+ self.type_param_to_def(InFile::new(file_id, it)).map(GenericParamId::TypeParamId)
+ }
+ }
+ }
+
+ pub(super) fn macro_to_def(&mut self, src: InFile<ast::Macro>) -> Option<MacroId> {
+ self.dyn_map(src.as_ref()).and_then(|it| match &src.value {
+ ast::Macro::MacroRules(value) => {
+ it[keys::MACRO_RULES].get(value).copied().map(MacroId::from)
+ }
+ ast::Macro::MacroDef(value) => it[keys::MACRO2].get(value).copied().map(MacroId::from),
+ })
+ }
+
+ pub(super) fn proc_macro_to_def(&mut self, src: InFile<ast::Fn>) -> Option<MacroId> {
+ self.dyn_map(src.as_ref())
+ .and_then(|it| it[keys::PROC_MACRO].get(&src.value).copied().map(MacroId::from))
+ }
+
+ pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
+ for container in src.ancestors_with_macros_skip_attr_item(self.db.upcast()) {
+ if let Some(res) = self.container_to_def(container) {
+ return Some(res);
+ }
+ }
+
+ let def = self.file_to_def(src.file_id.original_file(self.db.upcast())).get(0).copied()?;
+ Some(def.into())
+ }
+
+ fn container_to_def(&mut self, container: InFile<SyntaxNode>) -> Option<ChildContainer> {
+ let cont = if let Some(item) = ast::Item::cast(container.value.clone()) {
+ match item {
+ ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(),
+ ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(),
+ ast::Item::Impl(it) => self.impl_to_def(container.with_value(it))?.into(),
+ ast::Item::Enum(it) => self.enum_to_def(container.with_value(it))?.into(),
+ ast::Item::TypeAlias(it) => {
+ self.type_alias_to_def(container.with_value(it))?.into()
+ }
+ ast::Item::Struct(it) => {
+ let def = self.struct_to_def(container.with_value(it))?;
+ VariantId::from(def).into()
+ }
+ ast::Item::Union(it) => {
+ let def = self.union_to_def(container.with_value(it))?;
+ VariantId::from(def).into()
+ }
+ ast::Item::Fn(it) => {
+ let def = self.fn_to_def(container.with_value(it))?;
+ DefWithBodyId::from(def).into()
+ }
+ ast::Item::Static(it) => {
+ let def = self.static_to_def(container.with_value(it))?;
+ DefWithBodyId::from(def).into()
+ }
+ ast::Item::Const(it) => {
+ let def = self.const_to_def(container.with_value(it))?;
+ DefWithBodyId::from(def).into()
+ }
+ _ => return None,
+ }
+ } else {
+ let it = ast::Variant::cast(container.value)?;
+ let def = self.enum_variant_to_def(InFile::new(container.file_id, it))?;
+ VariantId::from(def).into()
+ };
+ Some(cont)
+ }
+
+ fn find_generic_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> {
+ let ancestors = src.ancestors_with_macros_skip_attr_item(self.db.upcast());
+ for InFile { file_id, value } in ancestors {
+ let item = match ast::Item::cast(value) {
+ Some(it) => it,
+ None => continue,
+ };
+ let res: GenericDefId = match item {
+ ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Struct(it) => self.struct_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Enum(it) => self.enum_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Trait(it) => self.trait_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::TypeAlias(it) => {
+ self.type_alias_to_def(InFile::new(file_id, it))?.into()
+ }
+ ast::Item::Impl(it) => self.impl_to_def(InFile::new(file_id, it))?.into(),
+ _ => continue,
+ };
+ return Some(res);
+ }
+ None
+ }
+
+ fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> {
+ let ancestors = src.ancestors_with_macros_skip_attr_item(self.db.upcast());
+ for InFile { file_id, value } in ancestors {
+ let item = match ast::Item::cast(value) {
+ Some(it) => it,
+ None => continue,
+ };
+ let res: DefWithBodyId = match item {
+ ast::Item::Const(it) => self.const_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Static(it) => self.static_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
+ _ => continue,
+ };
+ return Some(res);
+ }
+ None
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+pub(crate) enum ChildContainer {
+ DefWithBodyId(DefWithBodyId),
+ ModuleId(ModuleId),
+ TraitId(TraitId),
+ ImplId(ImplId),
+ EnumId(EnumId),
+ VariantId(VariantId),
+ TypeAliasId(TypeAliasId),
+ /// XXX: this might be the same def as, for example an `EnumId`. However,
+ /// here the children are generic parameters, and not, eg enum variants.
+ GenericDefId(GenericDefId),
+}
+impl_from! {
+ DefWithBodyId,
+ ModuleId,
+ TraitId,
+ ImplId,
+ EnumId,
+ VariantId,
+ TypeAliasId,
+ GenericDefId
+ for ChildContainer
+}
+
+impl ChildContainer {
+ fn child_by_source(self, db: &dyn HirDatabase, file_id: HirFileId) -> DynMap {
+ let db = db.upcast();
+ match self {
+ ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),
+ ChildContainer::ModuleId(it) => it.child_by_source(db, file_id),
+ ChildContainer::TraitId(it) => it.child_by_source(db, file_id),
+ ChildContainer::ImplId(it) => it.child_by_source(db, file_id),
+ ChildContainer::EnumId(it) => it.child_by_source(db, file_id),
+ ChildContainer::VariantId(it) => it.child_by_source(db, file_id),
+ ChildContainer::TypeAliasId(_) => DynMap::default(),
+ ChildContainer::GenericDefId(it) => it.child_by_source(db, file_id),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
new file mode 100644
index 000000000..1eb51b20c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -0,0 +1,915 @@
+//! Lookup hir elements using positions in the source code. This is a lossy
+//! transformation: in general, a single source might correspond to several
+//! modules, functions, etc, due to macros, cfgs and `#[path=]` attributes on
+//! modules.
+//!
+//! So, this modules should not be used during hir construction, it exists
+//! purely for "IDE needs".
+use std::{
+ iter::{self, once},
+ sync::Arc,
+};
+
+use hir_def::{
+ body::{
+ self,
+ scope::{ExprScopes, ScopeId},
+ Body, BodySourceMap,
+ },
+ expr::{ExprId, Pat, PatId},
+ macro_id_to_def_id,
+ path::{ModPath, Path, PathKind},
+ resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
+ type_ref::Mutability,
+ AsMacroCall, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, LocalFieldId,
+ Lookup, ModuleDefId, VariantId,
+};
+use hir_expand::{
+ builtin_fn_macro::BuiltinFnLikeExpander, hygiene::Hygiene, name::AsName, HirFileId, InFile,
+};
+use hir_ty::{
+ diagnostics::{
+ record_literal_missing_fields, record_pattern_missing_fields, unsafe_expressions,
+ UnsafeExpr,
+ },
+ method_resolution, Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution,
+ TyExt, TyKind, TyLoweringContext,
+};
+use itertools::Itertools;
+use smallvec::SmallVec;
+use syntax::{
+ ast::{self, AstNode},
+ SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+
+use crate::{
+ db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr,
+ BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static,
+ Struct, ToolModule, Trait, Type, TypeAlias, Variant,
+};
+
+/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
+/// original source files. It should not be used inside the HIR itself.
+#[derive(Debug)]
+pub(crate) struct SourceAnalyzer {
+ pub(crate) file_id: HirFileId,
+ pub(crate) resolver: Resolver,
+ def: Option<(DefWithBodyId, Arc<Body>, Arc<BodySourceMap>)>,
+ infer: Option<Arc<InferenceResult>>,
+}
+
+impl SourceAnalyzer {
+ pub(crate) fn new_for_body(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+ node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+ offset: Option<TextSize>,
+ ) -> SourceAnalyzer {
+ let (body, source_map) = db.body_with_source_map(def);
+ let scopes = db.expr_scopes(def);
+ let scope = match offset {
+ None => scope_for(&scopes, &source_map, node),
+ Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
+ };
+ let resolver = resolver_for_scope(db.upcast(), def, scope);
+ SourceAnalyzer {
+ resolver,
+ def: Some((def, body, source_map)),
+ infer: Some(db.infer(def)),
+ file_id,
+ }
+ }
+
+ pub(crate) fn new_for_body_no_infer(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+ node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+ offset: Option<TextSize>,
+ ) -> SourceAnalyzer {
+ let (body, source_map) = db.body_with_source_map(def);
+ let scopes = db.expr_scopes(def);
+ let scope = match offset {
+ None => scope_for(&scopes, &source_map, node),
+ Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
+ };
+ let resolver = resolver_for_scope(db.upcast(), def, scope);
+ SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer: None, file_id }
+ }
+
+ pub(crate) fn new_for_resolver(
+ resolver: Resolver,
+ node: InFile<&SyntaxNode>,
+ ) -> SourceAnalyzer {
+ SourceAnalyzer { resolver, def: None, infer: None, file_id: node.file_id }
+ }
+
+ fn body_source_map(&self) -> Option<&BodySourceMap> {
+ self.def.as_ref().map(|(.., source_map)| &**source_map)
+ }
+ fn body(&self) -> Option<&Body> {
+ self.def.as_ref().map(|(_, body, _)| &**body)
+ }
+
+ fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<ExprId> {
+ let src = match expr {
+ ast::Expr::MacroExpr(expr) => {
+ self.expand_expr(db, InFile::new(self.file_id, expr.macro_call()?.clone()))?
+ }
+ _ => InFile::new(self.file_id, expr.clone()),
+ };
+ let sm = self.body_source_map()?;
+ sm.node_expr(src.as_ref())
+ }
+
+ fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
+ // FIXME: macros, see `expr_id`
+ let src = InFile { file_id: self.file_id, value: pat };
+ self.body_source_map()?.node_pat(src)
+ }
+
+ fn expand_expr(
+ &self,
+ db: &dyn HirDatabase,
+ expr: InFile<ast::MacroCall>,
+ ) -> Option<InFile<ast::Expr>> {
+ let macro_file = self.body_source_map()?.node_macro_file(expr.as_ref())?;
+ let expanded = db.parse_or_expand(macro_file)?;
+
+ let res = match ast::MacroCall::cast(expanded.clone()) {
+ Some(call) => self.expand_expr(db, InFile::new(macro_file, call))?,
+ _ => InFile::new(macro_file, ast::Expr::cast(expanded)?),
+ };
+ Some(res)
+ }
+
+ pub(crate) fn is_implicit_reborrow(
+ &self,
+ db: &dyn HirDatabase,
+ expr: &ast::Expr,
+ ) -> Option<Mutability> {
+ let expr_id = self.expr_id(db, expr)?;
+ let infer = self.infer.as_ref()?;
+ let adjustments = infer.expr_adjustments.get(&expr_id)?;
+ adjustments.windows(2).find_map(|slice| match slice {
+ &[Adjustment {kind: Adjust::Deref(None), ..}, Adjustment {kind: Adjust::Borrow(AutoBorrow::Ref(m)), ..}] => Some(match m {
+ hir_ty::Mutability::Mut => Mutability::Mut,
+ hir_ty::Mutability::Not => Mutability::Shared,
+ }),
+ _ => None,
+ })
+ }
+
+ pub(crate) fn type_of_expr(
+ &self,
+ db: &dyn HirDatabase,
+ expr: &ast::Expr,
+ ) -> Option<(Type, Option<Type>)> {
+ let expr_id = self.expr_id(db, expr)?;
+ let infer = self.infer.as_ref()?;
+ let coerced = infer
+ .expr_adjustments
+ .get(&expr_id)
+ .and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone()));
+ let ty = infer[expr_id].clone();
+ let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
+ Some((mk_ty(ty), coerced.map(mk_ty)))
+ }
+
+ pub(crate) fn type_of_pat(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::Pat,
+ ) -> Option<(Type, Option<Type>)> {
+ let pat_id = self.pat_id(pat)?;
+ let infer = self.infer.as_ref()?;
+ let coerced = infer
+ .pat_adjustments
+ .get(&pat_id)
+ .and_then(|adjusts| adjusts.last().map(|adjust| adjust.clone()));
+ let ty = infer[pat_id].clone();
+ let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
+ Some((mk_ty(ty), coerced.map(mk_ty)))
+ }
+
+ pub(crate) fn type_of_self(
+ &self,
+ db: &dyn HirDatabase,
+ param: &ast::SelfParam,
+ ) -> Option<Type> {
+ let src = InFile { file_id: self.file_id, value: param };
+ let pat_id = self.body_source_map()?.node_self_param(src)?;
+ let ty = self.infer.as_ref()?[pat_id].clone();
+ Some(Type::new_with_resolver(db, &self.resolver, ty))
+ }
+
+ pub(crate) fn binding_mode_of_pat(
+ &self,
+ _db: &dyn HirDatabase,
+ pat: &ast::IdentPat,
+ ) -> Option<BindingMode> {
+ let pat_id = self.pat_id(&pat.clone().into())?;
+ let infer = self.infer.as_ref()?;
+ infer.pat_binding_modes.get(&pat_id).map(|bm| match bm {
+ hir_ty::BindingMode::Move => BindingMode::Move,
+ hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
+ hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
+ BindingMode::Ref(Mutability::Shared)
+ }
+ })
+ }
+ pub(crate) fn pattern_adjustments(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::Pat,
+ ) -> Option<SmallVec<[Type; 1]>> {
+ let pat_id = self.pat_id(&pat)?;
+ let infer = self.infer.as_ref()?;
+ Some(
+ infer
+ .pat_adjustments
+ .get(&pat_id)?
+ .iter()
+ .map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone()))
+ .collect(),
+ )
+ }
+
+ pub(crate) fn resolve_method_call_as_callable(
+ &self,
+ db: &dyn HirDatabase,
+ call: &ast::MethodCallExpr,
+ ) -> Option<Callable> {
+ let expr_id = self.expr_id(db, &call.clone().into())?;
+ let (func, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
+ let ty = db.value_ty(func.into()).substitute(Interner, &substs);
+ let ty = Type::new_with_resolver(db, &self.resolver, ty);
+ let mut res = ty.as_callable(db)?;
+ res.is_bound_method = true;
+ Some(res)
+ }
+
+ pub(crate) fn resolve_method_call(
+ &self,
+ db: &dyn HirDatabase,
+ call: &ast::MethodCallExpr,
+ ) -> Option<FunctionId> {
+ let expr_id = self.expr_id(db, &call.clone().into())?;
+ let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
+ let f_in_impl = self.resolve_impl_method(db, f_in_trait, &substs);
+ f_in_impl.or(Some(f_in_trait))
+ }
+
+ pub(crate) fn resolve_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::FieldExpr,
+ ) -> Option<Field> {
+ let expr_id = self.expr_id(db, &field.clone().into())?;
+ self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_record_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ let record_expr = ast::RecordExpr::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
+ let expr = ast::Expr::from(record_expr);
+ let expr_id = self.body_source_map()?.node_expr(InFile::new(self.file_id, &expr))?;
+
+ let local_name = field.field_name()?.as_name();
+ let local = if field.name_ref().is_some() {
+ None
+ } else {
+ let path = ModPath::from_segments(PathKind::Plain, once(local_name.clone()));
+ match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
+ Some(ValueNs::LocalBinding(pat_id)) => {
+ Some(Local { pat_id, parent: self.resolver.body_owner()? })
+ }
+ _ => None,
+ }
+ };
+ let (_, subst) = self.infer.as_ref()?.type_of_expr.get(expr_id)?.as_adt()?;
+ let variant = self.infer.as_ref()?.variant_resolution_for_expr(expr_id)?;
+ let variant_data = variant.variant_data(db.upcast());
+ let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? };
+ let field_ty =
+ db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
+ Some((field.into(), local, Type::new_with_resolver(db, &self.resolver, field_ty)))
+ }
+
+ pub(crate) fn resolve_record_pat_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::RecordPatField,
+ ) -> Option<Field> {
+ let field_name = field.field_name()?.as_name();
+ let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
+ let pat_id = self.pat_id(&record_pat.into())?;
+ let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id)?;
+ let variant_data = variant.variant_data(db.upcast());
+ let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
+ Some(field.into())
+ }
+
+ pub(crate) fn resolve_macro_call(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> Option<Macro> {
+ let ctx = body::LowerCtx::new(db.upcast(), macro_call.file_id);
+ let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
+ self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_bind_pat_to_const(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::IdentPat,
+ ) -> Option<ModuleDef> {
+ let pat_id = self.pat_id(&pat.clone().into())?;
+ let body = self.body()?;
+ let path = match &body[pat_id] {
+ Pat::Path(path) => path,
+ _ => return None,
+ };
+ let res = resolve_hir_path(db, &self.resolver, path)?;
+ match res {
+ PathResolution::Def(def) => Some(def),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn resolve_path(
+ &self,
+ db: &dyn HirDatabase,
+ path: &ast::Path,
+ ) -> Option<PathResolution> {
+ let parent = path.syntax().parent();
+ let parent = || parent.clone();
+
+ let mut prefer_value_ns = false;
+ let resolved = (|| {
+ if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) {
+ let expr_id = self.expr_id(db, &path_expr.into())?;
+ let infer = self.infer.as_ref()?;
+ if let Some(assoc) = infer.assoc_resolutions_for_expr(expr_id) {
+ let assoc = match assoc {
+ AssocItemId::FunctionId(f_in_trait) => {
+ match infer.type_of_expr.get(expr_id) {
+ None => assoc,
+ Some(func_ty) => {
+ if let TyKind::FnDef(_fn_def, subs) = func_ty.kind(Interner) {
+ self.resolve_impl_method(db, f_in_trait, subs)
+ .map(AssocItemId::FunctionId)
+ .unwrap_or(assoc)
+ } else {
+ assoc
+ }
+ }
+ }
+ }
+
+ _ => assoc,
+ };
+
+ return Some(PathResolution::Def(AssocItem::from(assoc).into()));
+ }
+ if let Some(VariantId::EnumVariantId(variant)) =
+ infer.variant_resolution_for_expr(expr_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ prefer_value_ns = true;
+ } else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) {
+ let pat_id = self.pat_id(&path_pat.into())?;
+ if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
+ return Some(PathResolution::Def(AssocItem::from(assoc).into()));
+ }
+ if let Some(VariantId::EnumVariantId(variant)) =
+ self.infer.as_ref()?.variant_resolution_for_pat(pat_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ } else if let Some(rec_lit) = parent().and_then(ast::RecordExpr::cast) {
+ let expr_id = self.expr_id(db, &rec_lit.into())?;
+ if let Some(VariantId::EnumVariantId(variant)) =
+ self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ } else {
+ let record_pat = parent().and_then(ast::RecordPat::cast).map(ast::Pat::from);
+ let tuple_struct_pat =
+ || parent().and_then(ast::TupleStructPat::cast).map(ast::Pat::from);
+ if let Some(pat) = record_pat.or_else(tuple_struct_pat) {
+ let pat_id = self.pat_id(&pat)?;
+ let variant_res_for_pat =
+ self.infer.as_ref()?.variant_resolution_for_pat(pat_id);
+ if let Some(VariantId::EnumVariantId(variant)) = variant_res_for_pat {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ }
+ }
+ None
+ })();
+ if let Some(_) = resolved {
+ return resolved;
+ }
+
+ // This must be a normal source file rather than macro file.
+ let hygiene = Hygiene::new(db.upcast(), self.file_id);
+ let ctx = body::LowerCtx::with_hygiene(db.upcast(), &hygiene);
+ let hir_path = Path::from_src(path.clone(), &ctx)?;
+
+ // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
+ // trying to resolve foo::bar.
+ if let Some(use_tree) = parent().and_then(ast::UseTree::cast) {
+ if use_tree.coloncolon_token().is_some() {
+ return resolve_hir_path_qualifier(db, &self.resolver, &hir_path);
+ }
+ }
+
+ let meta_path = path
+ .syntax()
+ .ancestors()
+ .take_while(|it| {
+ let kind = it.kind();
+ ast::Path::can_cast(kind) || ast::Meta::can_cast(kind)
+ })
+ .last()
+ .and_then(ast::Meta::cast);
+
+ // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are
+ // trying to resolve foo::bar.
+ if path.parent_path().is_some() {
+ return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path) {
+ None if meta_path.is_some() => {
+ path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
+ ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
+ .map(PathResolution::ToolModule)
+ })
+ }
+ res => res,
+ };
+ } else if let Some(meta_path) = meta_path {
+ // Case where we are resolving the final path segment of a path in an attribute
+ // in this case we have to check for inert/builtin attributes and tools and prioritize
+ // resolution of attributes over other namespaces
+ if let Some(name_ref) = path.as_single_name_ref() {
+ let builtin =
+ BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text());
+ if let Some(_) = builtin {
+ return builtin.map(PathResolution::BuiltinAttr);
+ }
+
+ if let Some(attr) = meta_path.parent_attr() {
+ let adt = if let Some(field) =
+ attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ };
+ if let Some(adt) = adt {
+ let ast_id = db.ast_id_map(self.file_id).ast_id(&adt);
+ if let Some(helpers) = self
+ .resolver
+ .def_map()
+ .derive_helpers_in_scope(InFile::new(self.file_id, ast_id))
+ {
+ // FIXME: Multiple derives can have the same helper
+ let name_ref = name_ref.as_name();
+ for (macro_id, mut helpers) in
+ helpers.iter().group_by(|(_, macro_id, ..)| macro_id).into_iter()
+ {
+ if let Some(idx) = helpers.position(|(name, ..)| *name == name_ref)
+ {
+ return Some(PathResolution::DeriveHelper(DeriveHelper {
+ derive: *macro_id,
+ idx,
+ }));
+ }
+ }
+ }
+ }
+ }
+ }
+ return match resolve_hir_path_as_macro(db, &self.resolver, &hir_path) {
+ Some(m) => Some(PathResolution::Def(ModuleDef::Macro(m))),
+ // this labels any path that starts with a tool module as the tool itself, this is technically wrong
+ // but there is no benefit in differentiating these two cases for the time being
+ None => path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
+ ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
+ .map(PathResolution::ToolModule)
+ }),
+ };
+ }
+ if parent().map_or(false, |it| ast::Visibility::can_cast(it.kind())) {
+ resolve_hir_path_qualifier(db, &self.resolver, &hir_path)
+ } else {
+ resolve_hir_path_(db, &self.resolver, &hir_path, prefer_value_ns)
+ }
+ }
+
+ pub(crate) fn record_literal_missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ literal: &ast::RecordExpr,
+ ) -> Option<Vec<(Field, Type)>> {
+ let body = self.body()?;
+ let infer = self.infer.as_ref()?;
+
+ let expr_id = self.expr_id(db, &literal.clone().into())?;
+ let substs = infer.type_of_expr[expr_id].as_adt()?.1;
+
+ let (variant, missing_fields, _exhaustive) =
+ record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
+ let res = self.missing_fields(db, substs, variant, missing_fields);
+ Some(res)
+ }
+
+ pub(crate) fn record_pattern_missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ pattern: &ast::RecordPat,
+ ) -> Option<Vec<(Field, Type)>> {
+ let body = self.body()?;
+ let infer = self.infer.as_ref()?;
+
+ let pat_id = self.pat_id(&pattern.clone().into())?;
+ let substs = infer.type_of_pat[pat_id].as_adt()?.1;
+
+ let (variant, missing_fields, _exhaustive) =
+ record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
+ let res = self.missing_fields(db, substs, variant, missing_fields);
+ Some(res)
+ }
+
+ fn missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ substs: &Substitution,
+ variant: VariantId,
+ missing_fields: Vec<LocalFieldId>,
+ ) -> Vec<(Field, Type)> {
+ let field_types = db.field_types(variant);
+
+ missing_fields
+ .into_iter()
+ .map(|local_id| {
+ let field = FieldId { parent: variant, local_id };
+ let ty = field_types[local_id].clone().substitute(Interner, substs);
+ (field.into(), Type::new_with_resolver_inner(db, &self.resolver, ty))
+ })
+ .collect()
+ }
+
+ pub(crate) fn expand(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> Option<HirFileId> {
+ let krate = self.resolver.krate();
+ let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
+ self.resolver
+ .resolve_path_as_macro(db.upcast(), &path)
+ .map(|it| macro_id_to_def_id(db.upcast(), it))
+ })?;
+ Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
+ }
+
+ pub(crate) fn resolve_variant(
+ &self,
+ db: &dyn HirDatabase,
+ record_lit: ast::RecordExpr,
+ ) -> Option<VariantId> {
+ let infer = self.infer.as_ref()?;
+ let expr_id = self.expr_id(db, &record_lit.into())?;
+ infer.variant_resolution_for_expr(expr_id)
+ }
+
+ pub(crate) fn is_unsafe_macro_call(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> bool {
+ // check for asm/global_asm
+ if let Some(mac) = self.resolve_macro_call(db, macro_call) {
+ let ex = match mac.id {
+ hir_def::MacroId::Macro2Id(it) => it.lookup(db.upcast()).expander,
+ hir_def::MacroId::MacroRulesId(it) => it.lookup(db.upcast()).expander,
+ _ => hir_def::MacroExpander::Declarative,
+ };
+ match ex {
+ hir_def::MacroExpander::BuiltIn(e)
+ if e == BuiltinFnLikeExpander::Asm || e == BuiltinFnLikeExpander::GlobalAsm =>
+ {
+ return true
+ }
+ _ => (),
+ }
+ }
+ let macro_expr = match macro_call
+ .map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast))
+ .transpose()
+ {
+ Some(it) => it,
+ None => return false,
+ };
+
+ if let (Some((def, body, sm)), Some(infer)) = (&self.def, &self.infer) {
+ if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr.as_ref()) {
+ let mut is_unsafe = false;
+ unsafe_expressions(
+ db,
+ infer,
+ *def,
+ body,
+ expanded_expr,
+ &mut |UnsafeExpr { inside_unsafe_block, .. }| is_unsafe |= !inside_unsafe_block,
+ );
+ return is_unsafe;
+ }
+ }
+ false
+ }
+
+ fn resolve_impl_method(
+ &self,
+ db: &dyn HirDatabase,
+ func: FunctionId,
+ substs: &Substitution,
+ ) -> Option<FunctionId> {
+ let impled_trait = match func.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(trait_id) => trait_id,
+ _ => return None,
+ };
+ if substs.is_empty(Interner) {
+ return None;
+ }
+ let self_ty = substs.at(Interner, 0).ty(Interner)?;
+ let krate = self.resolver.krate();
+ let trait_env = self.resolver.body_owner()?.as_generic_def_id().map_or_else(
+ || Arc::new(hir_ty::TraitEnvironment::empty(krate)),
+ |d| db.trait_environment(d),
+ );
+
+ let fun_data = db.function_data(func);
+ method_resolution::lookup_impl_method(self_ty, db, trait_env, impled_trait, &fun_data.name)
+ }
+}
+
+fn scope_for(
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ node: InFile<&SyntaxNode>,
+) -> Option<ScopeId> {
+ node.value
+ .ancestors()
+ .filter_map(ast::Expr::cast)
+ .filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it)))
+ .find_map(|it| scopes.scope_for(it))
+}
+
+fn scope_for_offset(
+ db: &dyn HirDatabase,
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ from_file: HirFileId,
+ offset: TextSize,
+) -> Option<ScopeId> {
+ scopes
+ .scope_by_expr()
+ .iter()
+ .filter_map(|(id, scope)| {
+ let InFile { file_id, value } = source_map.expr_syntax(*id).ok()?;
+ if from_file == file_id {
+ return Some((value.text_range(), scope));
+ }
+
+ // FIXME handle attribute expansion
+ let source = iter::successors(file_id.call_node(db.upcast()), |it| {
+ it.file_id.call_node(db.upcast())
+ })
+ .find(|it| it.file_id == from_file)
+ .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
+ Some((source.value.text_range(), scope))
+ })
+ .filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
+ // find containing scope
+ .min_by_key(|(expr_range, _scope)| expr_range.len())
+ .map(|(expr_range, scope)| {
+ adjust(db, scopes, source_map, expr_range, from_file, offset).unwrap_or(*scope)
+ })
+}
+
+// XXX: during completion, cursor might be outside of any particular
+// expression. Try to figure out the correct scope...
+fn adjust(
+ db: &dyn HirDatabase,
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ expr_range: TextRange,
+ from_file: HirFileId,
+ offset: TextSize,
+) -> Option<ScopeId> {
+ let child_scopes = scopes
+ .scope_by_expr()
+ .iter()
+ .filter_map(|(id, scope)| {
+ let source = source_map.expr_syntax(*id).ok()?;
+ // FIXME: correctly handle macro expansion
+ if source.file_id != from_file {
+ return None;
+ }
+ let root = source.file_syntax(db.upcast());
+ let node = source.value.to_node(&root);
+ Some((node.syntax().text_range(), scope))
+ })
+ .filter(|&(range, _)| {
+ range.start() <= offset && expr_range.contains_range(range) && range != expr_range
+ });
+
+ child_scopes
+ .max_by(|&(r1, _), &(r2, _)| {
+ if r1.contains_range(r2) {
+ std::cmp::Ordering::Greater
+ } else if r2.contains_range(r1) {
+ std::cmp::Ordering::Less
+ } else {
+ r1.start().cmp(&r2.start())
+ }
+ })
+ .map(|(_ptr, scope)| *scope)
+}
+
+#[inline]
+pub(crate) fn resolve_hir_path(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolve_hir_path_(db, resolver, path, false)
+}
+
+#[inline]
+pub(crate) fn resolve_hir_path_as_macro(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<Macro> {
+ resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(Into::into)
+}
+
+fn resolve_hir_path_(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+ prefer_value_ns: bool,
+) -> Option<PathResolution> {
+ let types = || {
+ let (ty, unresolved) = match path.type_anchor() {
+ Some(type_ref) => {
+ let (_, res) = TyLoweringContext::new(db, resolver).lower_ty_ext(type_ref);
+ res.map(|ty_ns| (ty_ns, path.segments().first()))
+ }
+ None => {
+ let (ty, remaining) =
+ resolver.resolve_path_in_type_ns(db.upcast(), path.mod_path())?;
+ match remaining {
+ Some(remaining) if remaining > 1 => {
+ if remaining + 1 == path.segments().len() {
+ Some((ty, path.segments().last()))
+ } else {
+ None
+ }
+ }
+ _ => Some((ty, path.segments().get(1))),
+ }
+ }
+ }?;
+
+ // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type
+ // within the trait's associated types.
+ if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
+ if let Some(type_alias_id) =
+ db.trait_data(trait_id).associated_type_by_name(unresolved.name)
+ {
+ return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
+ }
+ }
+
+ let res = match ty {
+ TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
+ TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
+ TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
+ PathResolution::Def(Adt::from(it).into())
+ }
+ TypeNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
+ TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
+ TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+ };
+ match unresolved {
+ Some(unresolved) => resolver
+ .generic_def()
+ .and_then(|def| {
+ hir_ty::associated_type_shorthand_candidates(
+ db,
+ def,
+ res.in_type_ns()?,
+ |name, _, id| (name == unresolved.name).then(|| id),
+ )
+ })
+ .map(TypeAlias::from)
+ .map(Into::into)
+ .map(PathResolution::Def),
+ None => Some(res),
+ }
+ };
+
+ let body_owner = resolver.body_owner();
+ let values = || {
+ resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| {
+ let res = match val {
+ ValueNs::LocalBinding(pat_id) => {
+ let var = Local { parent: body_owner?, pat_id };
+ PathResolution::Local(var)
+ }
+ ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
+ ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
+ ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
+ ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
+ ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
+ ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
+ };
+ Some(res)
+ })
+ };
+
+ let items = || {
+ resolver
+ .resolve_module_path_in_items(db.upcast(), path.mod_path())
+ .take_types()
+ .map(|it| PathResolution::Def(it.into()))
+ };
+
+ let macros = || {
+ resolver
+ .resolve_path_as_macro(db.upcast(), path.mod_path())
+ .map(|def| PathResolution::Def(ModuleDef::Macro(def.into())))
+ };
+
+ if prefer_value_ns { values().or_else(types) } else { types().or_else(values) }
+ .or_else(items)
+ .or_else(macros)
+}
+
+/// Resolves a path where we know it is a qualifier of another path.
+///
+/// For example, if we have:
+/// ```
+/// mod my {
+/// pub mod foo {
+/// struct Bar;
+/// }
+///
+/// pub fn foo() {}
+/// }
+/// ```
+/// then we know that `foo` in `my::foo::Bar` refers to the module, not the function.
+fn resolve_hir_path_qualifier(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolver
+ .resolve_path_in_type_ns_fully(db.upcast(), path.mod_path())
+ .map(|ty| match ty {
+ TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
+ TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
+ TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
+ PathResolution::Def(Adt::from(it).into())
+ }
+ TypeNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
+ TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
+ TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+ })
+ .or_else(|| {
+ resolver
+ .resolve_module_path_in_items(db.upcast(), path.mod_path())
+ .take_types()
+ .map(|it| PathResolution::Def(it.into()))
+ })
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
new file mode 100644
index 000000000..616a406c7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -0,0 +1,348 @@
+//! File symbol extraction.
+
+use base_db::FileRange;
+use hir_def::{
+ item_tree::ItemTreeNode, src::HasSource, AdtId, AssocItemId, AssocItemLoc, DefWithBodyId,
+ HasModule, ImplId, ItemContainerId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId,
+};
+use hir_expand::{HirFileId, InFile};
+use hir_ty::db::HirDatabase;
+use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr};
+
+use crate::{Module, Semantics};
+
+/// The actual data that is stored in the index. It should be as compact as
+/// possible.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FileSymbol {
+ pub name: SmolStr,
+ pub loc: DeclarationLocation,
+ pub kind: FileSymbolKind,
+ pub container_name: Option<SmolStr>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct DeclarationLocation {
+ /// The file id for both the `ptr` and `name_ptr`.
+ pub hir_file_id: HirFileId,
+ /// This points to the whole syntax node of the declaration.
+ pub ptr: SyntaxNodePtr,
+ /// This points to the [`syntax::ast::Name`] identifier of the declaration.
+ pub name_ptr: SyntaxNodePtr,
+}
+
+impl DeclarationLocation {
+ pub fn syntax<DB: HirDatabase>(&self, sema: &Semantics<'_, DB>) -> Option<SyntaxNode> {
+ let root = sema.parse_or_expand(self.hir_file_id)?;
+ Some(self.ptr.to_node(&root))
+ }
+
+ pub fn original_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
+ let node = resolve_node(db, self.hir_file_id, &self.ptr)?;
+ Some(node.as_ref().original_file_range(db.upcast()))
+ }
+
+ pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
+ let node = resolve_node(db, self.hir_file_id, &self.name_ptr)?;
+ node.as_ref().original_file_range_opt(db.upcast())
+ }
+}
+
+fn resolve_node(
+ db: &dyn HirDatabase,
+ file_id: HirFileId,
+ ptr: &SyntaxNodePtr,
+) -> Option<InFile<SyntaxNode>> {
+ let root = db.parse_or_expand(file_id)?;
+ let node = ptr.to_node(&root);
+ Some(InFile::new(file_id, node))
+}
+
+#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
+pub enum FileSymbolKind {
+ Const,
+ Enum,
+ Function,
+ Macro,
+ Module,
+ Static,
+ Struct,
+ Trait,
+ TypeAlias,
+ Union,
+}
+
+impl FileSymbolKind {
+ pub fn is_type(self: FileSymbolKind) -> bool {
+ matches!(
+ self,
+ FileSymbolKind::Struct
+ | FileSymbolKind::Enum
+ | FileSymbolKind::Trait
+ | FileSymbolKind::TypeAlias
+ | FileSymbolKind::Union
+ )
+ }
+}
+
+/// Represents an outstanding module that the symbol collector must collect symbols from.
+struct SymbolCollectorWork {
+ module_id: ModuleId,
+ parent: Option<DefWithBodyId>,
+}
+
+pub struct SymbolCollector<'a> {
+ db: &'a dyn HirDatabase,
+ symbols: Vec<FileSymbol>,
+ work: Vec<SymbolCollectorWork>,
+ current_container_name: Option<SmolStr>,
+}
+
+/// Given a [`ModuleId`] and a [`HirDatabase`], use the DefMap for the module's crate to collect
+/// all symbols that should be indexed for the given module.
+impl<'a> SymbolCollector<'a> {
+ pub fn collect(db: &dyn HirDatabase, module: Module) -> Vec<FileSymbol> {
+ let mut symbol_collector = SymbolCollector {
+ db,
+ symbols: Default::default(),
+ current_container_name: None,
+ // The initial work is the root module we're collecting, additional work will
+ // be populated as we traverse the module's definitions.
+ work: vec![SymbolCollectorWork { module_id: module.into(), parent: None }],
+ };
+
+ while let Some(work) = symbol_collector.work.pop() {
+ symbol_collector.do_work(work);
+ }
+
+ symbol_collector.symbols
+ }
+
+ fn do_work(&mut self, work: SymbolCollectorWork) {
+ self.db.unwind_if_cancelled();
+
+ let parent_name = work.parent.and_then(|id| self.def_with_body_id_name(id));
+ self.with_container_name(parent_name, |s| s.collect_from_module(work.module_id));
+ }
+
+ fn collect_from_module(&mut self, module_id: ModuleId) {
+ let def_map = module_id.def_map(self.db.upcast());
+ let scope = &def_map[module_id.local_id].scope;
+
+ for module_def_id in scope.declarations() {
+ match module_def_id {
+ ModuleDefId::ModuleId(id) => self.push_module(id),
+ ModuleDefId::FunctionId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::Function);
+ self.collect_from_body(id);
+ }
+ ModuleDefId::AdtId(AdtId::StructId(id)) => {
+ self.push_decl(id, FileSymbolKind::Struct)
+ }
+ ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, FileSymbolKind::Enum),
+ ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, FileSymbolKind::Union),
+ ModuleDefId::ConstId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::Const);
+ self.collect_from_body(id);
+ }
+ ModuleDefId::StaticId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::Static);
+ self.collect_from_body(id);
+ }
+ ModuleDefId::TraitId(id) => {
+ self.push_decl(id, FileSymbolKind::Trait);
+ self.collect_from_trait(id);
+ }
+ ModuleDefId::TypeAliasId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::TypeAlias);
+ }
+ ModuleDefId::MacroId(id) => match id {
+ MacroId::Macro2Id(id) => self.push_decl(id, FileSymbolKind::Macro),
+ MacroId::MacroRulesId(id) => self.push_decl(id, FileSymbolKind::Macro),
+ MacroId::ProcMacroId(id) => self.push_decl(id, FileSymbolKind::Macro),
+ },
+ // Don't index these.
+ ModuleDefId::BuiltinType(_) => {}
+ ModuleDefId::EnumVariantId(_) => {}
+ }
+ }
+
+ for impl_id in scope.impls() {
+ self.collect_from_impl(impl_id);
+ }
+
+ for const_id in scope.unnamed_consts() {
+ self.collect_from_body(const_id);
+ }
+
+ for (_, id) in scope.legacy_macros() {
+ for &id in id {
+ if id.module(self.db.upcast()) == module_id {
+ match id {
+ MacroId::Macro2Id(id) => self.push_decl(id, FileSymbolKind::Macro),
+ MacroId::MacroRulesId(id) => self.push_decl(id, FileSymbolKind::Macro),
+ MacroId::ProcMacroId(id) => self.push_decl(id, FileSymbolKind::Macro),
+ }
+ }
+ }
+ }
+ }
+
+ fn collect_from_body(&mut self, body_id: impl Into<DefWithBodyId>) {
+ let body_id = body_id.into();
+ let body = self.db.body(body_id);
+
+ // Descend into the blocks and enqueue collection of all modules within.
+ for (_, def_map) in body.blocks(self.db.upcast()) {
+ for (id, _) in def_map.modules() {
+ self.work.push(SymbolCollectorWork {
+ module_id: def_map.module_id(id),
+ parent: Some(body_id),
+ });
+ }
+ }
+ }
+
+ fn collect_from_impl(&mut self, impl_id: ImplId) {
+ let impl_data = self.db.impl_data(impl_id);
+ for &assoc_item_id in &impl_data.items {
+ self.push_assoc_item(assoc_item_id)
+ }
+ }
+
+ fn collect_from_trait(&mut self, trait_id: TraitId) {
+ let trait_data = self.db.trait_data(trait_id);
+ self.with_container_name(trait_data.name.as_text(), |s| {
+ for &(_, assoc_item_id) in &trait_data.items {
+ s.push_assoc_item(assoc_item_id);
+ }
+ });
+ }
+
+ fn with_container_name(&mut self, container_name: Option<SmolStr>, f: impl FnOnce(&mut Self)) {
+ if let Some(container_name) = container_name {
+ let prev = self.current_container_name.replace(container_name);
+ f(self);
+ self.current_container_name = prev;
+ } else {
+ f(self);
+ }
+ }
+
+ fn current_container_name(&self) -> Option<SmolStr> {
+ self.current_container_name.clone()
+ }
+
+ fn def_with_body_id_name(&self, body_id: DefWithBodyId) -> Option<SmolStr> {
+ match body_id {
+ DefWithBodyId::FunctionId(id) => Some(
+ id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
+ ),
+ DefWithBodyId::StaticId(id) => Some(
+ id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
+ ),
+ DefWithBodyId::ConstId(id) => Some(
+ id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
+ ),
+ }
+ }
+
+ fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
+ match assoc_item_id {
+ AssocItemId::FunctionId(id) => self.push_decl_assoc(id, FileSymbolKind::Function),
+ AssocItemId::ConstId(id) => self.push_decl_assoc(id, FileSymbolKind::Const),
+ AssocItemId::TypeAliasId(id) => self.push_decl_assoc(id, FileSymbolKind::TypeAlias),
+ }
+ }
+
+ fn push_decl_assoc<L, T>(&mut self, id: L, kind: FileSymbolKind)
+ where
+ L: Lookup<Data = AssocItemLoc<T>>,
+ T: ItemTreeNode,
+ <T as ItemTreeNode>::Source: HasName,
+ {
+ fn container_name(db: &dyn HirDatabase, container: ItemContainerId) -> Option<SmolStr> {
+ match container {
+ ItemContainerId::ModuleId(module_id) => {
+ let module = Module::from(module_id);
+ module.name(db).and_then(|name| name.as_text())
+ }
+ ItemContainerId::TraitId(trait_id) => {
+ let trait_data = db.trait_data(trait_id);
+ trait_data.name.as_text()
+ }
+ ItemContainerId::ImplId(_) | ItemContainerId::ExternBlockId(_) => None,
+ }
+ }
+
+ self.push_file_symbol(|s| {
+ let loc = id.lookup(s.db.upcast());
+ let source = loc.source(s.db.upcast());
+ let name_node = source.value.name()?;
+ let container_name =
+ container_name(s.db, loc.container).or_else(|| s.current_container_name());
+
+ Some(FileSymbol {
+ name: name_node.text().into(),
+ kind,
+ container_name,
+ loc: DeclarationLocation {
+ hir_file_id: source.file_id,
+ ptr: SyntaxNodePtr::new(source.value.syntax()),
+ name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ },
+ })
+ })
+ }
+
+ fn push_decl<L>(&mut self, id: L, kind: FileSymbolKind)
+ where
+ L: Lookup,
+ <L as Lookup>::Data: HasSource,
+ <<L as Lookup>::Data as HasSource>::Value: HasName,
+ {
+ self.push_file_symbol(|s| {
+ let loc = id.lookup(s.db.upcast());
+ let source = loc.source(s.db.upcast());
+ let name_node = source.value.name()?;
+
+ Some(FileSymbol {
+ name: name_node.text().into(),
+ kind,
+ container_name: s.current_container_name(),
+ loc: DeclarationLocation {
+ hir_file_id: source.file_id,
+ ptr: SyntaxNodePtr::new(source.value.syntax()),
+ name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ },
+ })
+ })
+ }
+
+ fn push_module(&mut self, module_id: ModuleId) {
+ self.push_file_symbol(|s| {
+ let def_map = module_id.def_map(s.db.upcast());
+ let module_data = &def_map[module_id.local_id];
+ let declaration = module_data.origin.declaration()?;
+ let module = declaration.to_node(s.db.upcast());
+ let name_node = module.name()?;
+
+ Some(FileSymbol {
+ name: name_node.text().into(),
+ kind: FileSymbolKind::Module,
+ container_name: s.current_container_name(),
+ loc: DeclarationLocation {
+ hir_file_id: declaration.file_id,
+ ptr: SyntaxNodePtr::new(module.syntax()),
+ name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ },
+ })
+ })
+ }
+
+ fn push_file_symbol(&mut self, f: impl FnOnce(&Self) -> Option<FileSymbol>) {
+ if let Some(file_symbol) = f(self) {
+ self.symbols.push(file_symbol);
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
new file mode 100644
index 000000000..fca09d384
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
@@ -0,0 +1,31 @@
+[package]
+name = "ide-assists"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+
+itertools = "0.10.3"
+either = "1.7.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+hir = { path = "../hir", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+expect-test = "1.4.0"
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
new file mode 100644
index 000000000..d4d148c77
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
@@ -0,0 +1,16 @@
+//! Settings for tweaking assists.
+//!
+//! The fun thing here is `SnippetCap` -- this type can only be created in this
+//! module, and we use to statically check that we only produce snippet
+//! assists if we are allowed to.
+
+use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
+
+use crate::AssistKind;
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct AssistConfig {
+ pub snippet_cap: Option<SnippetCap>,
+ pub allowed: Option<Vec<AssistKind>>,
+ pub insert_use: InsertUseConfig,
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
new file mode 100644
index 000000000..f9b426614
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
@@ -0,0 +1,347 @@
+//! See [`AssistContext`].
+
+use std::mem;
+
+use hir::Semantics;
+use ide_db::{
+ base_db::{AnchoredPathBuf, FileId, FileRange},
+ SnippetCap,
+};
+use ide_db::{
+ label::Label,
+ source_change::{FileSystemEdit, SourceChange},
+ RootDatabase,
+};
+use syntax::{
+ algo::{self, find_node_at_offset, find_node_at_range},
+ AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr,
+ SyntaxToken, TextRange, TextSize, TokenAtOffset,
+};
+use text_edit::{TextEdit, TextEditBuilder};
+
+use crate::{
+ assist_config::AssistConfig, Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel,
+};
+
+/// `AssistContext` allows to apply an assist or check if it could be applied.
+///
+/// Assists use a somewhat over-engineered approach, given the current needs.
+/// The assists workflow consists of two phases. In the first phase, a user asks
+/// for the list of available assists. In the second phase, the user picks a
+/// particular assist and it gets applied.
+///
+/// There are two peculiarities here:
+///
+/// * first, we ideally avoid computing more things then necessary to answer "is
+/// assist applicable" in the first phase.
+/// * second, when we are applying assist, we don't have a guarantee that there
+/// weren't any changes between the point when user asked for assists and when
+/// they applied a particular assist. So, when applying assist, we need to do
+/// all the checks from scratch.
+///
+/// To avoid repeating the same code twice for both "check" and "apply"
+/// functions, we use an approach reminiscent of that of Django's function based
+/// views dealing with forms. Each assist receives a runtime parameter,
+/// `resolve`. It first check if an edit is applicable (potentially computing
+/// info required to compute the actual edit). If it is applicable, and
+/// `resolve` is `true`, it then computes the actual edit.
+///
+/// So, to implement the original assists workflow, we can first apply each edit
+/// with `resolve = false`, and then applying the selected edit again, with
+/// `resolve = true` this time.
+///
+/// Note, however, that we don't actually use such two-phase logic at the
+/// moment, because the LSP API is pretty awkward in this place, and it's much
+/// easier to just compute the edit eagerly :-)
+pub(crate) struct AssistContext<'a> {
+ pub(crate) config: &'a AssistConfig,
+ pub(crate) sema: Semantics<'a, RootDatabase>,
+ frange: FileRange,
+ trimmed_range: TextRange,
+ source_file: SourceFile,
+}
+
+impl<'a> AssistContext<'a> {
+ pub(crate) fn new(
+ sema: Semantics<'a, RootDatabase>,
+ config: &'a AssistConfig,
+ frange: FileRange,
+ ) -> AssistContext<'a> {
+ let source_file = sema.parse(frange.file_id);
+
+ let start = frange.range.start();
+ let end = frange.range.end();
+ let left = source_file.syntax().token_at_offset(start);
+ let right = source_file.syntax().token_at_offset(end);
+ let left =
+ left.right_biased().and_then(|t| algo::skip_whitespace_token(t, Direction::Next));
+ let right =
+ right.left_biased().and_then(|t| algo::skip_whitespace_token(t, Direction::Prev));
+ let left = left.map(|t| t.text_range().start().clamp(start, end));
+ let right = right.map(|t| t.text_range().end().clamp(start, end));
+
+ let trimmed_range = match (left, right) {
+ (Some(left), Some(right)) if left <= right => TextRange::new(left, right),
+ // Selection solely consists of whitespace so just fall back to the original
+ _ => frange.range,
+ };
+
+ AssistContext { config, sema, frange, source_file, trimmed_range }
+ }
+
+ pub(crate) fn db(&self) -> &RootDatabase {
+ self.sema.db
+ }
+
+ // NB, this ignores active selection.
+ pub(crate) fn offset(&self) -> TextSize {
+ self.frange.range.start()
+ }
+
+ pub(crate) fn file_id(&self) -> FileId {
+ self.frange.file_id
+ }
+
+ pub(crate) fn has_empty_selection(&self) -> bool {
+ self.trimmed_range.is_empty()
+ }
+
+ /// Returns the selected range trimmed for whitespace tokens, that is the range will be snapped
+ /// to the nearest enclosed token.
+ pub(crate) fn selection_trimmed(&self) -> TextRange {
+ self.trimmed_range
+ }
+
+ pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
+ self.source_file.syntax().token_at_offset(self.offset())
+ }
+ pub(crate) fn find_token_syntax_at_offset(&self, kind: SyntaxKind) -> Option<SyntaxToken> {
+ self.token_at_offset().find(|it| it.kind() == kind)
+ }
+ pub(crate) fn find_token_at_offset<T: AstToken>(&self) -> Option<T> {
+ self.token_at_offset().find_map(T::cast)
+ }
+ pub(crate) fn find_node_at_offset<N: AstNode>(&self) -> Option<N> {
+ find_node_at_offset(self.source_file.syntax(), self.offset())
+ }
+ pub(crate) fn find_node_at_range<N: AstNode>(&self) -> Option<N> {
+ find_node_at_range(self.source_file.syntax(), self.trimmed_range)
+ }
+ pub(crate) fn find_node_at_offset_with_descend<N: AstNode>(&self) -> Option<N> {
+ self.sema.find_node_at_offset_with_descend(self.source_file.syntax(), self.offset())
+ }
+ /// Returns the element covered by the selection range, this excludes trailing whitespace in the selection.
+ pub(crate) fn covering_element(&self) -> SyntaxElement {
+ self.source_file.syntax().covering_element(self.selection_trimmed())
+ }
+}
+
+pub(crate) struct Assists {
+ file: FileId,
+ resolve: AssistResolveStrategy,
+ buf: Vec<Assist>,
+ allowed: Option<Vec<AssistKind>>,
+}
+
+impl Assists {
+ pub(crate) fn new(ctx: &AssistContext<'_>, resolve: AssistResolveStrategy) -> Assists {
+ Assists {
+ resolve,
+ file: ctx.frange.file_id,
+ buf: Vec::new(),
+ allowed: ctx.config.allowed.clone(),
+ }
+ }
+
+ pub(crate) fn finish(mut self) -> Vec<Assist> {
+ self.buf.sort_by_key(|assist| assist.target.len());
+ self.buf
+ }
+
+ pub(crate) fn add(
+ &mut self,
+ id: AssistId,
+ label: impl Into<String>,
+ target: TextRange,
+ f: impl FnOnce(&mut AssistBuilder),
+ ) -> Option<()> {
+ let mut f = Some(f);
+ self.add_impl(None, id, label.into(), target, &mut |it| f.take().unwrap()(it))
+ }
+
+ pub(crate) fn add_group(
+ &mut self,
+ group: &GroupLabel,
+ id: AssistId,
+ label: impl Into<String>,
+ target: TextRange,
+ f: impl FnOnce(&mut AssistBuilder),
+ ) -> Option<()> {
+ let mut f = Some(f);
+ self.add_impl(Some(group), id, label.into(), target, &mut |it| f.take().unwrap()(it))
+ }
+
+ fn add_impl(
+ &mut self,
+ group: Option<&GroupLabel>,
+ id: AssistId,
+ label: String,
+ target: TextRange,
+ f: &mut dyn FnMut(&mut AssistBuilder),
+ ) -> Option<()> {
+ if !self.is_allowed(&id) {
+ return None;
+ }
+
+ let mut trigger_signature_help = false;
+ let source_change = if self.resolve.should_resolve(&id) {
+ let mut builder = AssistBuilder::new(self.file);
+ f(&mut builder);
+ trigger_signature_help = builder.trigger_signature_help;
+ Some(builder.finish())
+ } else {
+ None
+ };
+
+ let label = Label::new(label);
+ let group = group.cloned();
+ self.buf.push(Assist { id, label, group, target, source_change, trigger_signature_help });
+ Some(())
+ }
+
+ fn is_allowed(&self, id: &AssistId) -> bool {
+ match &self.allowed {
+ Some(allowed) => allowed.iter().any(|kind| kind.contains(id.1)),
+ None => true,
+ }
+ }
+}
+
+pub(crate) struct AssistBuilder {
+ edit: TextEditBuilder,
+ file_id: FileId,
+ source_change: SourceChange,
+ trigger_signature_help: bool,
+
+ /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin.
+ mutated_tree: Option<TreeMutator>,
+}
+
+pub(crate) struct TreeMutator {
+ immutable: SyntaxNode,
+ mutable_clone: SyntaxNode,
+}
+
+impl TreeMutator {
+ pub(crate) fn new(immutable: &SyntaxNode) -> TreeMutator {
+ let immutable = immutable.ancestors().last().unwrap();
+ let mutable_clone = immutable.clone_for_update();
+ TreeMutator { immutable, mutable_clone }
+ }
+
+ pub(crate) fn make_mut<N: AstNode>(&self, node: &N) -> N {
+ N::cast(self.make_syntax_mut(node.syntax())).unwrap()
+ }
+
+ pub(crate) fn make_syntax_mut(&self, node: &SyntaxNode) -> SyntaxNode {
+ let ptr = SyntaxNodePtr::new(node);
+ ptr.to_node(&self.mutable_clone)
+ }
+}
+
+impl AssistBuilder {
+ pub(crate) fn new(file_id: FileId) -> AssistBuilder {
+ AssistBuilder {
+ edit: TextEdit::builder(),
+ file_id,
+ source_change: SourceChange::default(),
+ trigger_signature_help: false,
+ mutated_tree: None,
+ }
+ }
+
+ pub(crate) fn edit_file(&mut self, file_id: FileId) {
+ self.commit();
+ self.file_id = file_id;
+ }
+
+ fn commit(&mut self) {
+ if let Some(tm) = self.mutated_tree.take() {
+ algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit)
+ }
+
+ let edit = mem::take(&mut self.edit).finish();
+ if !edit.is_empty() {
+ self.source_change.insert_source_edit(self.file_id, edit);
+ }
+ }
+
+ pub(crate) fn make_mut<N: AstNode>(&mut self, node: N) -> N {
+ self.mutated_tree.get_or_insert_with(|| TreeMutator::new(node.syntax())).make_mut(&node)
+ }
+ /// Returns a copy of the `node`, suitable for mutation.
+ ///
+ /// Syntax trees in rust-analyzer are typically immutable, and mutating
+ /// operations panic at runtime. However, it is possible to make a copy of
+ /// the tree and mutate the copy freely. Mutation is based on interior
+ /// mutability, and different nodes in the same tree see the same mutations.
+ ///
+ /// The typical pattern for an assist is to find specific nodes in the read
+ /// phase, and then get their mutable couterparts using `make_mut` in the
+ /// mutable state.
+ pub(crate) fn make_syntax_mut(&mut self, node: SyntaxNode) -> SyntaxNode {
+ self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node)
+ }
+
+ /// Remove specified `range` of text.
+ pub(crate) fn delete(&mut self, range: TextRange) {
+ self.edit.delete(range)
+ }
+ /// Append specified `text` at the given `offset`
+ pub(crate) fn insert(&mut self, offset: TextSize, text: impl Into<String>) {
+ self.edit.insert(offset, text.into())
+ }
+ /// Append specified `snippet` at the given `offset`
+ pub(crate) fn insert_snippet(
+ &mut self,
+ _cap: SnippetCap,
+ offset: TextSize,
+ snippet: impl Into<String>,
+ ) {
+ self.source_change.is_snippet = true;
+ self.insert(offset, snippet);
+ }
+ /// Replaces specified `range` of text with a given string.
+ pub(crate) fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) {
+ self.edit.replace(range, replace_with.into())
+ }
+ /// Replaces specified `range` of text with a given `snippet`.
+ pub(crate) fn replace_snippet(
+ &mut self,
+ _cap: SnippetCap,
+ range: TextRange,
+ snippet: impl Into<String>,
+ ) {
+ self.source_change.is_snippet = true;
+ self.replace(range, snippet);
+ }
+ pub(crate) fn replace_ast<N: AstNode>(&mut self, old: N, new: N) {
+ algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit)
+ }
+ pub(crate) fn create_file(&mut self, dst: AnchoredPathBuf, content: impl Into<String>) {
+ let file_system_edit = FileSystemEdit::CreateFile { dst, initial_contents: content.into() };
+ self.source_change.push_file_system_edit(file_system_edit);
+ }
+ pub(crate) fn move_file(&mut self, src: FileId, dst: AnchoredPathBuf) {
+ let file_system_edit = FileSystemEdit::MoveFile { src, dst };
+ self.source_change.push_file_system_edit(file_system_edit);
+ }
+ pub(crate) fn trigger_signature_help(&mut self) {
+ self.trigger_signature_help = true;
+ }
+
+ fn finish(mut self) -> SourceChange {
+ self.commit();
+ mem::take(&mut self.source_change)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs
new file mode 100644
index 000000000..bfa9759ec
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs
@@ -0,0 +1,325 @@
+use hir::HirDisplay;
+use ide_db::syntax_helpers::node_ext::walk_ty;
+use syntax::ast::{self, AstNode, LetStmt, Param};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: add_explicit_type
+//
+// Specify type for a let binding.
+//
+// ```
+// fn main() {
+// let x$0 = 92;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let x: i32 = 92;
+// }
+// ```
+pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let (ascribed_ty, expr, pat) = if let Some(let_stmt) = ctx.find_node_at_offset::<LetStmt>() {
+ let cursor_in_range = {
+ let eq_range = let_stmt.eq_token()?.text_range();
+ ctx.offset() < eq_range.start()
+ };
+ if !cursor_in_range {
+ cov_mark::hit!(add_explicit_type_not_applicable_if_cursor_after_equals);
+ return None;
+ }
+
+ (let_stmt.ty(), let_stmt.initializer(), let_stmt.pat()?)
+ } else if let Some(param) = ctx.find_node_at_offset::<Param>() {
+ if param.syntax().ancestors().nth(2).and_then(ast::ClosureExpr::cast).is_none() {
+ cov_mark::hit!(add_explicit_type_not_applicable_in_fn_param);
+ return None;
+ }
+ (param.ty(), None, param.pat()?)
+ } else {
+ return None;
+ };
+
+ let module = ctx.sema.scope(pat.syntax())?.module();
+ let pat_range = pat.syntax().text_range();
+
+ // Don't enable the assist if there is a type ascription without any placeholders
+ if let Some(ty) = &ascribed_ty {
+ let mut contains_infer_ty = false;
+ walk_ty(ty, &mut |ty| contains_infer_ty |= matches!(ty, ast::Type::InferType(_)));
+ if !contains_infer_ty {
+ cov_mark::hit!(add_explicit_type_not_applicable_if_ty_already_specified);
+ return None;
+ }
+ }
+
+ let ty = match (pat, expr) {
+ (ast::Pat::IdentPat(_), Some(expr)) => ctx.sema.type_of_expr(&expr)?,
+ (pat, _) => ctx.sema.type_of_pat(&pat)?,
+ }
+ .adjusted();
+
+ // Fully unresolved or unnameable types can't be annotated
+ if (ty.contains_unknown() && ty.type_arguments().count() == 0) || ty.is_closure() {
+ cov_mark::hit!(add_explicit_type_not_applicable_if_ty_not_inferred);
+ return None;
+ }
+
+ let inferred_type = ty.display_source_code(ctx.db(), module.into()).ok()?;
+ acc.add(
+ AssistId("add_explicit_type", AssistKind::RefactorRewrite),
+ format!("Insert explicit type `{}`", inferred_type),
+ pat_range,
+ |builder| match ascribed_ty {
+ Some(ascribed_ty) => {
+ builder.replace(ascribed_ty.syntax().text_range(), inferred_type);
+ }
+ None => {
+ builder.insert(pat_range.end(), format!(": {}", inferred_type));
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn add_explicit_type_target() {
+ check_assist_target(add_explicit_type, r#"fn f() { let a$0 = 1; }"#, "a");
+ }
+
+ #[test]
+ fn add_explicit_type_simple() {
+ check_assist(
+ add_explicit_type,
+ r#"fn f() { let a$0 = 1; }"#,
+ r#"fn f() { let a: i32 = 1; }"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_simple_on_infer_ty() {
+ check_assist(
+ add_explicit_type,
+ r#"fn f() { let a$0: _ = 1; }"#,
+ r#"fn f() { let a: i32 = 1; }"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_simple_nested_infer_ty() {
+ check_assist(
+ add_explicit_type,
+ r#"
+//- minicore: option
+fn f() {
+ let a$0: Option<_> = Option::Some(1);
+}
+"#,
+ r#"
+fn f() {
+ let a: Option<i32> = Option::Some(1);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_macro_call_expr() {
+ check_assist(
+ add_explicit_type,
+ r"macro_rules! v { () => {0u64} } fn f() { let a$0 = v!(); }",
+ r"macro_rules! v { () => {0u64} } fn f() { let a: u64 = v!(); }",
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_not_applicable_for_fully_unresolved() {
+ cov_mark::check!(add_explicit_type_not_applicable_if_ty_not_inferred);
+ check_assist_not_applicable(add_explicit_type, r#"fn f() { let a$0 = None; }"#);
+ }
+
+ #[test]
+ fn add_explicit_type_applicable_for_partially_unresolved() {
+ check_assist(
+ add_explicit_type,
+ r#"
+ struct Vec<T, V> { t: T, v: V }
+ impl<T> Vec<T, Vec<ZZZ, i32>> {
+ fn new() -> Self {
+ panic!()
+ }
+ }
+ fn f() { let a$0 = Vec::new(); }"#,
+ r#"
+ struct Vec<T, V> { t: T, v: V }
+ impl<T> Vec<T, Vec<ZZZ, i32>> {
+ fn new() -> Self {
+ panic!()
+ }
+ }
+ fn f() { let a: Vec<_, Vec<_, i32>> = Vec::new(); }"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_not_applicable_closure_expr() {
+ check_assist_not_applicable(add_explicit_type, r#"fn f() { let a$0 = || {}; }"#);
+ }
+
+ #[test]
+ fn add_explicit_type_not_applicable_ty_already_specified() {
+ cov_mark::check!(add_explicit_type_not_applicable_if_ty_already_specified);
+ check_assist_not_applicable(add_explicit_type, r#"fn f() { let a$0: i32 = 1; }"#);
+ }
+
+ #[test]
+ fn add_explicit_type_not_applicable_cursor_after_equals_of_let() {
+ cov_mark::check!(add_explicit_type_not_applicable_if_cursor_after_equals);
+ check_assist_not_applicable(
+ add_explicit_type,
+ r#"fn f() {let a =$0 match 1 {2 => 3, 3 => 5};}"#,
+ )
+ }
+
+ /// https://github.com/rust-lang/rust-analyzer/issues/2922
+ #[test]
+ fn regression_issue_2922() {
+ check_assist(
+ add_explicit_type,
+ r#"
+fn main() {
+ let $0v = [0.0; 2];
+}
+"#,
+ r#"
+fn main() {
+ let v: [f64; 2] = [0.0; 2];
+}
+"#,
+ );
+ // note: this may break later if we add more consteval. it just needs to be something that our
+ // consteval engine doesn't understand
+ check_assist_not_applicable(
+ add_explicit_type,
+ r#"
+//- minicore: option
+
+fn main() {
+ let $0l = [0.0; Some(2).unwrap()];
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn default_generics_should_not_be_added() {
+ check_assist(
+ add_explicit_type,
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let test$0 = Test { t: 23u8, k: 33 };
+}
+"#,
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let test: Test<i32> = Test { t: 23u8, k: 33 };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn type_should_be_added_after_pattern() {
+ // LetStmt = Attr* 'let' Pat (':' Type)? '=' initializer:Expr ';'
+ check_assist(
+ add_explicit_type,
+ r#"
+fn main() {
+ let $0test @ () = ();
+}
+"#,
+ r#"
+fn main() {
+ let test @ (): () = ();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_inserts_coercions() {
+ check_assist(
+ add_explicit_type,
+ r#"
+//- minicore: coerce_unsized
+fn f() {
+ let $0x: *const [_] = &[3];
+}
+"#,
+ r#"
+fn f() {
+ let x: *const [i32] = &[3];
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_not_applicable_fn_param() {
+ cov_mark::check!(add_explicit_type_not_applicable_in_fn_param);
+ check_assist_not_applicable(add_explicit_type, r#"fn f(x$0: ()) {}"#);
+ }
+
+ #[test]
+ fn add_explicit_type_ascribes_closure_param() {
+ check_assist(
+ add_explicit_type,
+ r#"
+fn f() {
+ |y$0| {
+ let x: i32 = y;
+ };
+}
+"#,
+ r#"
+fn f() {
+ |y: i32| {
+ let x: i32 = y;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_ascribes_closure_param_already_ascribed() {
+ check_assist(
+ add_explicit_type,
+ r#"
+//- minicore: option
+fn f() {
+ |mut y$0: Option<_>| {
+ y = Some(3);
+ };
+}
+"#,
+ r#"
+fn f() {
+ |mut y: Option<i32>| {
+ y = Some(3);
+ };
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs
new file mode 100644
index 000000000..001f1e8bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs
@@ -0,0 +1,164 @@
+use ide_db::syntax_helpers::node_ext::for_each_break_and_continue_expr;
+use syntax::{
+ ast::{self, AstNode, HasLoopBody},
+ T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: add_label_to_loop
+//
+// Adds a label to a loop.
+//
+// ```
+// fn main() {
+// loop$0 {
+// break;
+// continue;
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// 'l: loop {
+// break 'l;
+// continue 'l;
+// }
+// }
+// ```
+pub(crate) fn add_label_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let loop_kw = ctx.find_token_syntax_at_offset(T![loop])?;
+ let loop_expr = loop_kw.parent().and_then(ast::LoopExpr::cast)?;
+ if loop_expr.label().is_some() {
+ return None;
+ }
+
+ acc.add(
+ AssistId("add_label_to_loop", AssistKind::Generate),
+ "Add Label",
+ loop_expr.syntax().text_range(),
+ |builder| {
+ builder.insert(loop_kw.text_range().start(), "'l: ");
+
+ let loop_body = loop_expr.loop_body().and_then(|it| it.stmt_list());
+ for_each_break_and_continue_expr(
+ loop_expr.label(),
+ loop_body,
+ &mut |expr| match expr {
+ ast::Expr::BreakExpr(break_expr) => {
+ if let Some(break_token) = break_expr.break_token() {
+ builder.insert(break_token.text_range().end(), " 'l")
+ }
+ }
+ ast::Expr::ContinueExpr(continue_expr) => {
+ if let Some(continue_token) = continue_expr.continue_token() {
+ builder.insert(continue_token.text_range().end(), " 'l")
+ }
+ }
+ _ => {}
+ },
+ );
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn add_label() {
+ check_assist(
+ add_label_to_loop,
+ r#"
+fn main() {
+ loop$0 {
+ break;
+ continue;
+ }
+}"#,
+ r#"
+fn main() {
+ 'l: loop {
+ break 'l;
+ continue 'l;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn add_label_to_outer_loop() {
+ check_assist(
+ add_label_to_loop,
+ r#"
+fn main() {
+ loop$0 {
+ break;
+ continue;
+ loop {
+ break;
+ continue;
+ }
+ }
+}"#,
+ r#"
+fn main() {
+ 'l: loop {
+ break 'l;
+ continue 'l;
+ loop {
+ break;
+ continue;
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn add_label_to_inner_loop() {
+ check_assist(
+ add_label_to_loop,
+ r#"
+fn main() {
+ loop {
+ break;
+ continue;
+ loop$0 {
+ break;
+ continue;
+ }
+ }
+}"#,
+ r#"
+fn main() {
+ loop {
+ break;
+ continue;
+ 'l: loop {
+ break 'l;
+ continue 'l;
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn do_not_add_label_if_exists() {
+ check_assist_not_applicable(
+ add_label_to_loop,
+ r#"
+fn main() {
+ 'l: loop$0 {
+ break 'l;
+ continue 'l;
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs
new file mode 100644
index 000000000..12213c845
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs
@@ -0,0 +1,229 @@
+use syntax::ast::{self, AstNode, HasGenericParams, HasName};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: add_lifetime_to_type
+//
+// Adds a new lifetime to a struct, enum or union.
+//
+// ```
+// struct Point {
+// x: &$0u32,
+// y: u32,
+// }
+// ```
+// ->
+// ```
+// struct Point<'a> {
+// x: &'a u32,
+// y: u32,
+// }
+// ```
+pub(crate) fn add_lifetime_to_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let ref_type_focused = ctx.find_node_at_offset::<ast::RefType>()?;
+ if ref_type_focused.lifetime().is_some() {
+ return None;
+ }
+
+ let node = ctx.find_node_at_offset::<ast::Adt>()?;
+ let has_lifetime = node
+ .generic_param_list()
+ .map_or(false, |gen_list| gen_list.lifetime_params().next().is_some());
+
+ if has_lifetime {
+ return None;
+ }
+
+ let ref_types = fetch_borrowed_types(&node)?;
+ let target = node.syntax().text_range();
+
+ acc.add(
+ AssistId("add_lifetime_to_type", AssistKind::Generate),
+ "Add lifetime",
+ target,
+ |builder| {
+ match node.generic_param_list() {
+ Some(gen_param) => {
+ if let Some(left_angle) = gen_param.l_angle_token() {
+ builder.insert(left_angle.text_range().end(), "'a, ");
+ }
+ }
+ None => {
+ if let Some(name) = node.name() {
+ builder.insert(name.syntax().text_range().end(), "<'a>");
+ }
+ }
+ }
+
+ for ref_type in ref_types {
+ if let Some(amp_token) = ref_type.amp_token() {
+ builder.insert(amp_token.text_range().end(), "'a ");
+ }
+ }
+ },
+ )
+}
+
+fn fetch_borrowed_types(node: &ast::Adt) -> Option<Vec<ast::RefType>> {
+ let ref_types: Vec<ast::RefType> = match node {
+ ast::Adt::Enum(enum_) => {
+ let variant_list = enum_.variant_list()?;
+ variant_list
+ .variants()
+ .filter_map(|variant| {
+ let field_list = variant.field_list()?;
+
+ find_ref_types_from_field_list(&field_list)
+ })
+ .flatten()
+ .collect()
+ }
+ ast::Adt::Struct(strukt) => {
+ let field_list = strukt.field_list()?;
+ find_ref_types_from_field_list(&field_list)?
+ }
+ ast::Adt::Union(un) => {
+ let record_field_list = un.record_field_list()?;
+ record_field_list
+ .fields()
+ .filter_map(|r_field| {
+ if let ast::Type::RefType(ref_type) = r_field.ty()? {
+ if ref_type.lifetime().is_none() {
+ return Some(ref_type);
+ }
+ }
+
+ None
+ })
+ .collect()
+ }
+ };
+
+ if ref_types.is_empty() {
+ None
+ } else {
+ Some(ref_types)
+ }
+}
+
+fn find_ref_types_from_field_list(field_list: &ast::FieldList) -> Option<Vec<ast::RefType>> {
+ let ref_types: Vec<ast::RefType> = match field_list {
+ ast::FieldList::RecordFieldList(record_list) => record_list
+ .fields()
+ .filter_map(|f| {
+ if let ast::Type::RefType(ref_type) = f.ty()? {
+ if ref_type.lifetime().is_none() {
+ return Some(ref_type);
+ }
+ }
+
+ None
+ })
+ .collect(),
+ ast::FieldList::TupleFieldList(tuple_field_list) => tuple_field_list
+ .fields()
+ .filter_map(|f| {
+ if let ast::Type::RefType(ref_type) = f.ty()? {
+ if ref_type.lifetime().is_none() {
+ return Some(ref_type);
+ }
+ }
+
+ None
+ })
+ .collect(),
+ };
+
+ if ref_types.is_empty() {
+ None
+ } else {
+ Some(ref_types)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn add_lifetime_to_struct() {
+ check_assist(
+ add_lifetime_to_type,
+ r#"struct Foo { a: &$0i32 }"#,
+ r#"struct Foo<'a> { a: &'a i32 }"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"struct Foo { a: &$0i32, b: &usize }"#,
+ r#"struct Foo<'a> { a: &'a i32, b: &'a usize }"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"struct Foo { a: &$0i32, b: usize }"#,
+ r#"struct Foo<'a> { a: &'a i32, b: usize }"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"struct Foo<T> { a: &$0T, b: usize }"#,
+ r#"struct Foo<'a, T> { a: &'a T, b: usize }"#,
+ );
+
+ check_assist_not_applicable(add_lifetime_to_type, r#"struct Foo<'a> { a: &$0'a i32 }"#);
+ check_assist_not_applicable(add_lifetime_to_type, r#"struct Foo { a: &'a$0 i32 }"#);
+ }
+
+ #[test]
+ fn add_lifetime_to_enum() {
+ check_assist(
+ add_lifetime_to_type,
+ r#"enum Foo { Bar { a: i32 }, Other, Tuple(u32, &$0u32)}"#,
+ r#"enum Foo<'a> { Bar { a: i32 }, Other, Tuple(u32, &'a u32)}"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"enum Foo { Bar { a: &$0i32 }}"#,
+ r#"enum Foo<'a> { Bar { a: &'a i32 }}"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"enum Foo<T> { Bar { a: &$0i32, b: &T }}"#,
+ r#"enum Foo<'a, T> { Bar { a: &'a i32, b: &'a T }}"#,
+ );
+
+ check_assist_not_applicable(
+ add_lifetime_to_type,
+ r#"enum Foo<'a> { Bar { a: &$0'a i32 }}"#,
+ );
+ check_assist_not_applicable(add_lifetime_to_type, r#"enum Foo { Bar, $0Misc }"#);
+ }
+
+ #[test]
+ fn add_lifetime_to_union() {
+ check_assist(
+ add_lifetime_to_type,
+ r#"union Foo { a: &$0i32 }"#,
+ r#"union Foo<'a> { a: &'a i32 }"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"union Foo { a: &$0i32, b: &usize }"#,
+ r#"union Foo<'a> { a: &'a i32, b: &'a usize }"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"union Foo<T> { a: &$0T, b: usize }"#,
+ r#"union Foo<'a, T> { a: &'a T, b: usize }"#,
+ );
+
+ check_assist_not_applicable(add_lifetime_to_type, r#"struct Foo<'a> { a: &'a $0i32 }"#);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
new file mode 100644
index 000000000..c808c010c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -0,0 +1,1340 @@
+use hir::HasSource;
+use ide_db::{
+ syntax_helpers::insert_whitespace_into_node::insert_ws_into, traits::resolve_target_trait,
+};
+use syntax::ast::{self, make, AstNode};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::{
+ add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, render_snippet,
+ Cursor, DefaultMethods,
+ },
+ AssistId, AssistKind,
+};
+
+// Assist: add_impl_missing_members
+//
+// Adds scaffold for required impl members.
+//
+// ```
+// trait Trait<T> {
+// type X;
+// fn foo(&self) -> T;
+// fn bar(&self) {}
+// }
+//
+// impl Trait<u32> for () {$0
+//
+// }
+// ```
+// ->
+// ```
+// trait Trait<T> {
+// type X;
+// fn foo(&self) -> T;
+// fn bar(&self) {}
+// }
+//
+// impl Trait<u32> for () {
+// $0type X;
+//
+// fn foo(&self) -> u32 {
+// todo!()
+// }
+// }
+// ```
+pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ add_missing_impl_members_inner(
+ acc,
+ ctx,
+ DefaultMethods::No,
+ "add_impl_missing_members",
+ "Implement missing members",
+ )
+}
+
+// Assist: add_impl_default_members
+//
+// Adds scaffold for overriding default impl members.
+//
+// ```
+// trait Trait {
+// type X;
+// fn foo(&self);
+// fn bar(&self) {}
+// }
+//
+// impl Trait for () {
+// type X = ();
+// fn foo(&self) {}$0
+// }
+// ```
+// ->
+// ```
+// trait Trait {
+// type X;
+// fn foo(&self);
+// fn bar(&self) {}
+// }
+//
+// impl Trait for () {
+// type X = ();
+// fn foo(&self) {}
+//
+// $0fn bar(&self) {}
+// }
+// ```
+pub(crate) fn add_missing_default_members(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ add_missing_impl_members_inner(
+ acc,
+ ctx,
+ DefaultMethods::Only,
+ "add_impl_default_members",
+ "Implement default members",
+ )
+}
+
+fn add_missing_impl_members_inner(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ mode: DefaultMethods,
+ assist_id: &'static str,
+ label: &'static str,
+) -> Option<()> {
+ let _p = profile::span("add_missing_impl_members_inner");
+ let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
+ let target_scope = ctx.sema.scope(impl_def.syntax())?;
+ let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?;
+
+ let missing_items = filter_assoc_items(
+ &ctx.sema,
+ &ide_db::traits::get_missing_assoc_items(&ctx.sema, &impl_def),
+ mode,
+ );
+
+ if missing_items.is_empty() {
+ return None;
+ }
+
+ let target = impl_def.syntax().text_range();
+ acc.add(AssistId(assist_id, AssistKind::QuickFix), label, target, |builder| {
+ let missing_items = missing_items
+ .into_iter()
+ .map(|it| {
+ if ctx.sema.hir_file_for(it.syntax()).is_macro() {
+ if let Some(it) = ast::AssocItem::cast(insert_ws_into(it.syntax().clone())) {
+ return it;
+ }
+ }
+ it.clone_for_update()
+ })
+ .collect();
+ let (new_impl_def, first_new_item) = add_trait_assoc_items_to_impl(
+ &ctx.sema,
+ missing_items,
+ trait_,
+ impl_def.clone(),
+ target_scope,
+ );
+ match ctx.config.snippet_cap {
+ None => builder.replace(target, new_impl_def.to_string()),
+ Some(cap) => {
+ let mut cursor = Cursor::Before(first_new_item.syntax());
+ let placeholder;
+ if let DefaultMethods::No = mode {
+ if let ast::AssocItem::Fn(func) = &first_new_item {
+ if try_gen_trait_body(ctx, func, &trait_, &impl_def).is_none() {
+ if let Some(m) =
+ func.syntax().descendants().find_map(ast::MacroCall::cast)
+ {
+ if m.syntax().text() == "todo!()" {
+ placeholder = m;
+ cursor = Cursor::Replace(placeholder.syntax());
+ }
+ }
+ }
+ }
+ }
+ builder.replace_snippet(
+ cap,
+ target,
+ render_snippet(cap, new_impl_def.syntax(), cursor),
+ )
+ }
+ };
+ })
+}
+
+fn try_gen_trait_body(
+ ctx: &AssistContext<'_>,
+ func: &ast::Fn,
+ trait_: &hir::Trait,
+ impl_def: &ast::Impl,
+) -> Option<()> {
+ let trait_path = make::ext::ident_path(&trait_.name(ctx.db()).to_string());
+ let hir_ty = ctx.sema.resolve_type(&impl_def.self_ty()?)?;
+ let adt = hir_ty.as_adt()?.source(ctx.db())?;
+ gen_trait_fn_body(func, &trait_path, &adt.value)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_add_missing_impl_members() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo {
+ type Output;
+
+ const CONST: usize = 42;
+
+ fn foo(&self);
+ fn bar(&self);
+ fn baz(&self);
+}
+
+struct S;
+
+impl Foo for S {
+ fn bar(&self) {}
+$0
+}"#,
+ r#"
+trait Foo {
+ type Output;
+
+ const CONST: usize = 42;
+
+ fn foo(&self);
+ fn bar(&self);
+ fn baz(&self);
+}
+
+struct S;
+
+impl Foo for S {
+ fn bar(&self) {}
+
+ $0type Output;
+
+ const CONST: usize = 42;
+
+ fn foo(&self) {
+ todo!()
+ }
+
+ fn baz(&self) {
+ todo!()
+ }
+
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_copied_overriden_members() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo {
+ fn foo(&self);
+ fn bar(&self) -> bool { true }
+ fn baz(&self) -> u32 { 42 }
+}
+
+struct S;
+
+impl Foo for S {
+ fn bar(&self) {}
+$0
+}"#,
+ r#"
+trait Foo {
+ fn foo(&self);
+ fn bar(&self) -> bool { true }
+ fn baz(&self) -> u32 { 42 }
+}
+
+struct S;
+
+impl Foo for S {
+ fn bar(&self) {}
+
+ fn foo(&self) {
+ ${0:todo!()}
+ }
+
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_empty_impl_def() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S { $0 }"#,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S {
+ fn foo(&self) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_impl_def_without_braces() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S$0"#,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S {
+ fn foo(&self) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn fill_in_type_params_1() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<T> { fn foo(&self, t: T) -> &T; }
+struct S;
+impl Foo<u32> for S { $0 }"#,
+ r#"
+trait Foo<T> { fn foo(&self, t: T) -> &T; }
+struct S;
+impl Foo<u32> for S {
+ fn foo(&self, t: u32) -> &u32 {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn fill_in_type_params_2() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<T> { fn foo(&self, t: T) -> &T; }
+struct S;
+impl<U> Foo<U> for S { $0 }"#,
+ r#"
+trait Foo<T> { fn foo(&self, t: T) -> &T; }
+struct S;
+impl<U> Foo<U> for S {
+ fn foo(&self, t: U) -> &U {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_cursor_after_empty_impl_def() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S {}$0"#,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S {
+ fn foo(&self) {
+ ${0:todo!()}
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_qualify_path_1() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar;
+ trait Foo { fn foo(&self, bar: Bar); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar;
+ trait Foo { fn foo(&self, bar: Bar); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: foo::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_2() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub trait Foo { fn foo(&self, bar: Bar); }
+ }
+}
+
+use foo::bar;
+
+struct S;
+impl bar::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub trait Foo { fn foo(&self, bar: Bar); }
+ }
+}
+
+use foo::bar;
+
+struct S;
+impl bar::Foo for S {
+ fn foo(&self, bar: bar::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_generic() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ trait Foo { fn foo(&self, bar: Bar<u32>); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ trait Foo { fn foo(&self, bar: Bar<u32>); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: foo::Bar<u32>) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_and_substitute_param() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ trait Foo<T> { fn foo(&self, bar: Bar<T>); }
+}
+struct S;
+impl foo::Foo<u32> for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ trait Foo<T> { fn foo(&self, bar: Bar<T>); }
+}
+struct S;
+impl foo::Foo<u32> for S {
+ fn foo(&self, bar: foo::Bar<u32>) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_substitute_param_no_qualify() {
+ // when substituting params, the substituted param should not be qualified!
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ trait Foo<T> { fn foo(&self, bar: T); }
+ pub struct Param;
+}
+struct Param;
+struct S;
+impl foo::Foo<Param> for S { $0 }"#,
+ r#"
+mod foo {
+ trait Foo<T> { fn foo(&self, bar: T); }
+ pub struct Param;
+}
+struct Param;
+struct S;
+impl foo::Foo<Param> for S {
+ fn foo(&self, bar: Param) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_associated_item() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ impl Bar<T> { type Assoc = u32; }
+ trait Foo { fn foo(&self, bar: Bar<u32>::Assoc); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ impl Bar<T> { type Assoc = u32; }
+ trait Foo { fn foo(&self, bar: Bar<u32>::Assoc); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: foo::Bar<u32>::Assoc) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_nested() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ pub struct Baz;
+ trait Foo { fn foo(&self, bar: Bar<Baz>); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ pub struct Baz;
+ trait Foo { fn foo(&self, bar: Bar<Baz>); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: foo::Bar<foo::Baz>) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_fn_trait_notation() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub trait Fn<Args> { type Output; }
+ trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub trait Fn<Args> { type Output; }
+ trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: dyn Fn(u32) -> i32) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_empty_trait() {
+ check_assist_not_applicable(
+ add_missing_impl_members,
+ r#"
+trait Foo;
+struct S;
+impl Foo for S { $0 }"#,
+ )
+ }
+
+ #[test]
+ fn test_ignore_unnamed_trait_members_and_default_methods() {
+ check_assist_not_applicable(
+ add_missing_impl_members,
+ r#"
+trait Foo {
+ fn (arg: u32);
+ fn valid(some: u32) -> bool { false }
+}
+struct S;
+impl Foo for S { $0 }"#,
+ )
+ }
+
+ #[test]
+ fn test_with_docstring_and_attrs() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+#[doc(alias = "test alias")]
+trait Foo {
+ /// doc string
+ type Output;
+
+ #[must_use]
+ fn foo(&self);
+}
+struct S;
+impl Foo for S {}$0"#,
+ r#"
+#[doc(alias = "test alias")]
+trait Foo {
+ /// doc string
+ type Output;
+
+ #[must_use]
+ fn foo(&self);
+}
+struct S;
+impl Foo for S {
+ $0type Output;
+
+ fn foo(&self) {
+ todo!()
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_default_methods() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+trait Foo {
+ type Output;
+
+ const CONST: usize = 42;
+
+ fn valid(some: u32) -> bool { false }
+ fn foo(some: u32) -> bool;
+}
+struct S;
+impl Foo for S { $0 }"#,
+ r#"
+trait Foo {
+ type Output;
+
+ const CONST: usize = 42;
+
+ fn valid(some: u32) -> bool { false }
+ fn foo(some: u32) -> bool;
+}
+struct S;
+impl Foo for S {
+ $0fn valid(some: u32) -> bool { false }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_generic_single_default_parameter() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<T = Self> {
+ fn bar(&self, other: &T);
+}
+
+struct S;
+impl Foo for S { $0 }"#,
+ r#"
+trait Foo<T = Self> {
+ fn bar(&self, other: &T);
+}
+
+struct S;
+impl Foo for S {
+ fn bar(&self, other: &Self) {
+ ${0:todo!()}
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_generic_default_parameter_is_second() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<T1, T2 = Self> {
+ fn bar(&self, this: &T1, that: &T2);
+}
+
+struct S<T>;
+impl Foo<T> for S<T> { $0 }"#,
+ r#"
+trait Foo<T1, T2 = Self> {
+ fn bar(&self, this: &T1, that: &T2);
+}
+
+struct S<T>;
+impl Foo<T> for S<T> {
+ fn bar(&self, this: &T, that: &Self) {
+ ${0:todo!()}
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_assoc_type_bounds_are_removed() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Tr {
+ type Ty: Copy + 'static;
+}
+
+impl Tr for ()$0 {
+}"#,
+ r#"
+trait Tr {
+ type Ty: Copy + 'static;
+}
+
+impl Tr for () {
+ $0type Ty;
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_whitespace_fixup_preserves_bad_tokens() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Tr {
+ fn foo();
+}
+
+impl Tr for ()$0 {
+ +++
+}"#,
+ r#"
+trait Tr {
+ fn foo();
+}
+
+impl Tr for () {
+ fn foo() {
+ ${0:todo!()}
+ }
+ +++
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_whitespace_fixup_preserves_comments() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Tr {
+ fn foo();
+}
+
+impl Tr for ()$0 {
+ // very important
+}"#,
+ r#"
+trait Tr {
+ fn foo();
+}
+
+impl Tr for () {
+ fn foo() {
+ ${0:todo!()}
+ }
+ // very important
+}"#,
+ )
+ }
+
+ #[test]
+ fn weird_path() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Test {
+ fn foo(&self, x: crate)
+}
+impl Test for () {
+ $0
+}
+"#,
+ r#"
+trait Test {
+ fn foo(&self, x: crate)
+}
+impl Test for () {
+ fn foo(&self, x: crate) {
+ ${0:todo!()}
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn missing_generic_type() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<BAR> {
+ fn foo(&self, bar: BAR);
+}
+impl Foo for () {
+ $0
+}
+"#,
+ r#"
+trait Foo<BAR> {
+ fn foo(&self, bar: BAR);
+}
+impl Foo for () {
+ fn foo(&self, bar: BAR) {
+ ${0:todo!()}
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn does_not_requalify_self_as_crate() {
+ check_assist(
+ add_missing_default_members,
+ r"
+struct Wrapper<T>(T);
+
+trait T {
+ fn f(self) -> Wrapper<Self> {
+ Wrapper(self)
+ }
+}
+
+impl T for () {
+ $0
+}
+",
+ r"
+struct Wrapper<T>(T);
+
+trait T {
+ fn f(self) -> Wrapper<Self> {
+ Wrapper(self)
+ }
+}
+
+impl T for () {
+ $0fn f(self) -> Wrapper<Self> {
+ Wrapper(self)
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_default_body_generation() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+//- minicore: default
+struct Foo(usize);
+
+impl Default for Foo {
+ $0
+}
+"#,
+ r#"
+struct Foo(usize);
+
+impl Default for Foo {
+ $0fn default() -> Self {
+ Self(Default::default())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_from_macro() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+macro_rules! foo {
+ () => {
+ trait FooB {
+ fn foo<'lt>(&'lt self) {}
+ }
+ }
+}
+foo!();
+struct Foo(usize);
+
+impl FooB for Foo {
+ $0
+}
+"#,
+ r#"
+macro_rules! foo {
+ () => {
+ trait FooB {
+ fn foo<'lt>(&'lt self) {}
+ }
+ }
+}
+foo!();
+struct Foo(usize);
+
+impl FooB for Foo {
+ $0fn foo< 'lt>(& 'lt self){}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_assoc_type_when_trait_with_same_name_in_scope() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Foo {}
+
+pub trait Types {
+ type Foo;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl<T: Types> Behavior<T> for Impl { $0 }"#,
+ r#"
+pub trait Foo {}
+
+pub trait Types {
+ type Foo;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl<T: Types> Behavior<T> for Impl {
+ fn reproduce(&self, foo: <T as Types>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as Types>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_qualified() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for std::string::String {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<std::string::String> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for std::string::String {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<std::string::String> for Impl {
+ fn reproduce(&self, foo: <std::string::String as Types>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_multi_option_ambiguous() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Foo = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: <T as Types2>::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Foo = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: <T as Types2>::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as Types2>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_multi_option() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Bar;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Bar;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as Types2>::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_multi_option_foreign() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod bar {
+ pub trait Types2 {
+ type Bar;
+ }
+}
+
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl bar::Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + bar::Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+mod bar {
+ pub trait Types2 {
+ type Bar;
+ }
+}
+
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl bar::Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + bar::Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as bar::Types2>::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_transform_path_in_path_expr() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+pub trait Const {
+ const FOO: u32;
+}
+
+pub trait Trait<T: Const> {
+ fn foo() -> bool {
+ match T::FOO {
+ 0 => true,
+ _ => false,
+ }
+ }
+}
+
+impl Const for u32 {
+ const FOO: u32 = 1;
+}
+
+struct Impl;
+
+impl Trait<u32> for Impl { $0 }"#,
+ r#"
+pub trait Const {
+ const FOO: u32;
+}
+
+pub trait Trait<T: Const> {
+ fn foo() -> bool {
+ match T::FOO {
+ 0 => true,
+ _ => false,
+ }
+ }
+}
+
+impl Const for u32 {
+ const FOO: u32 = 1;
+}
+
+struct Impl;
+
+impl Trait<u32> for Impl {
+ $0fn foo() -> bool {
+ match <u32 as Const>::FOO {
+ 0 => true,
+ _ => false,
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_default_partial_eq() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+//- minicore: eq
+struct SomeStruct {
+ data: usize,
+ field: (usize, usize),
+}
+impl PartialEq for SomeStruct {$0}
+"#,
+ r#"
+struct SomeStruct {
+ data: usize,
+ field: (usize, usize),
+}
+impl PartialEq for SomeStruct {
+ $0fn ne(&self, other: &Self) -> bool {
+ !self.eq(other)
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
new file mode 100644
index 000000000..b16f6fe03
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -0,0 +1,1709 @@
+use std::iter::{self, Peekable};
+
+use either::Either;
+use hir::{Adt, Crate, HasAttrs, HasSource, ModuleDef, Semantics};
+use ide_db::RootDatabase;
+use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
+use itertools::Itertools;
+use syntax::ast::{self, make, AstNode, HasName, MatchArmList, MatchExpr, Pat};
+
+use crate::{
+ utils::{self, render_snippet, Cursor},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: add_missing_match_arms
+//
+// Adds missing clauses to a `match` expression.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// $0
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// $0Action::Move { distance } => todo!(),
+// Action::Stop => todo!(),
+// }
+// }
+// ```
+pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let match_expr = ctx.find_node_at_offset_with_descend::<ast::MatchExpr>()?;
+ let match_arm_list = match_expr.match_arm_list()?;
+ let target_range = ctx.sema.original_range(match_expr.syntax()).range;
+
+ if let None = cursor_at_trivial_match_arm_list(ctx, &match_expr, &match_arm_list) {
+ let arm_list_range = ctx.sema.original_range(match_arm_list.syntax()).range;
+ let cursor_in_range = arm_list_range.contains_range(ctx.selection_trimmed());
+ if cursor_in_range {
+ cov_mark::hit!(not_applicable_outside_of_range_right);
+ return None;
+ }
+ }
+
+ let expr = match_expr.expr()?;
+
+ let mut has_catch_all_arm = false;
+
+ let top_lvl_pats: Vec<_> = match_arm_list
+ .arms()
+ .filter_map(|arm| Some((arm.pat()?, arm.guard().is_some())))
+ .flat_map(|(pat, has_guard)| {
+ match pat {
+ // Special case OrPat as separate top-level pats
+ Pat::OrPat(or_pat) => Either::Left(or_pat.pats()),
+ _ => Either::Right(iter::once(pat)),
+ }
+ .map(move |pat| (pat, has_guard))
+ })
+ .map(|(pat, has_guard)| {
+ has_catch_all_arm |= !has_guard && matches!(pat, Pat::WildcardPat(_));
+ pat
+ })
+ // Exclude top level wildcards so that they are expanded by this assist, retains status quo in #8129.
+ .filter(|pat| !matches!(pat, Pat::WildcardPat(_)))
+ .collect();
+
+ let module = ctx.sema.scope(expr.syntax())?.module();
+ let (mut missing_pats, is_non_exhaustive): (
+ Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>,
+ bool,
+ ) = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) {
+ let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate());
+
+ let variants = enum_def.variants(ctx.db());
+
+ let missing_pats = variants
+ .into_iter()
+ .filter_map(|variant| {
+ Some((
+ build_pat(ctx.db(), module, variant)?,
+ variant.should_be_hidden(ctx.db(), module.krate()),
+ ))
+ })
+ .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
+
+ let option_enum = FamousDefs(&ctx.sema, module.krate()).core_option_Option().map(lift_enum);
+ let missing_pats: Box<dyn Iterator<Item = _>> = if Some(enum_def) == option_enum {
+ // Match `Some` variant first.
+ cov_mark::hit!(option_order);
+ Box::new(missing_pats.rev())
+ } else {
+ Box::new(missing_pats)
+ };
+ (missing_pats.peekable(), is_non_exhaustive)
+ } else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr) {
+ let is_non_exhaustive =
+ enum_defs.iter().any(|enum_def| enum_def.is_non_exhaustive(ctx.db(), module.krate()));
+
+ let mut n_arms = 1;
+ let variants_of_enums: Vec<Vec<ExtendedVariant>> = enum_defs
+ .into_iter()
+ .map(|enum_def| enum_def.variants(ctx.db()))
+ .inspect(|variants| n_arms *= variants.len())
+ .collect();
+
+ // When calculating the match arms for a tuple of enums, we want
+ // to create a match arm for each possible combination of enum
+ // values. The `multi_cartesian_product` method transforms
+ // Vec<Vec<EnumVariant>> into Vec<(EnumVariant, .., EnumVariant)>
+ // where each tuple represents a proposed match arm.
+
+ // A number of arms grows very fast on even a small tuple of large enums.
+ // We skip the assist beyond an arbitrary threshold.
+ if n_arms > 256 {
+ return None;
+ }
+ let missing_pats = variants_of_enums
+ .into_iter()
+ .multi_cartesian_product()
+ .inspect(|_| cov_mark::hit!(add_missing_match_arms_lazy_computation))
+ .map(|variants| {
+ let is_hidden = variants
+ .iter()
+ .any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
+ let patterns =
+ variants.into_iter().filter_map(|variant| build_pat(ctx.db(), module, variant));
+
+ (ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
+ })
+ .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
+ ((Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(), is_non_exhaustive)
+ } else {
+ return None;
+ };
+
+ let mut needs_catch_all_arm = is_non_exhaustive && !has_catch_all_arm;
+
+ if !needs_catch_all_arm && missing_pats.peek().is_none() {
+ return None;
+ }
+
+ acc.add(
+ AssistId("add_missing_match_arms", AssistKind::QuickFix),
+ "Fill match arms",
+ target_range,
+ |builder| {
+ let new_match_arm_list = match_arm_list.clone_for_update();
+ let missing_arms = missing_pats
+ .map(|(pat, hidden)| {
+ (make::match_arm(iter::once(pat), None, make::ext::expr_todo()), hidden)
+ })
+ .map(|(it, hidden)| (it.clone_for_update(), hidden));
+
+ let catch_all_arm = new_match_arm_list
+ .arms()
+ .find(|arm| matches!(arm.pat(), Some(ast::Pat::WildcardPat(_))));
+ if let Some(arm) = catch_all_arm {
+ let is_empty_expr = arm.expr().map_or(true, |e| match e {
+ ast::Expr::BlockExpr(b) => {
+ b.statements().next().is_none() && b.tail_expr().is_none()
+ }
+ ast::Expr::TupleExpr(t) => t.fields().next().is_none(),
+ _ => false,
+ });
+ if is_empty_expr {
+ arm.remove();
+ } else {
+ cov_mark::hit!(add_missing_match_arms_empty_expr);
+ }
+ }
+ let mut first_new_arm = None;
+ for (arm, hidden) in missing_arms {
+ if hidden {
+ needs_catch_all_arm = !has_catch_all_arm;
+ } else {
+ first_new_arm.get_or_insert_with(|| arm.clone());
+ new_match_arm_list.add_arm(arm);
+ }
+ }
+ if needs_catch_all_arm && !has_catch_all_arm {
+ cov_mark::hit!(added_wildcard_pattern);
+ let arm = make::match_arm(
+ iter::once(make::wildcard_pat().into()),
+ None,
+ make::ext::expr_todo(),
+ )
+ .clone_for_update();
+ first_new_arm.get_or_insert_with(|| arm.clone());
+ new_match_arm_list.add_arm(arm);
+ }
+
+ let old_range = ctx.sema.original_range(match_arm_list.syntax()).range;
+ match (first_new_arm, ctx.config.snippet_cap) {
+ (Some(first_new_arm), Some(cap)) => {
+ let extend_lifetime;
+ let cursor =
+ match first_new_arm.syntax().descendants().find_map(ast::WildcardPat::cast)
+ {
+ Some(it) => {
+ extend_lifetime = it.syntax().clone();
+ Cursor::Replace(&extend_lifetime)
+ }
+ None => Cursor::Before(first_new_arm.syntax()),
+ };
+ let snippet = render_snippet(cap, new_match_arm_list.syntax(), cursor);
+ builder.replace_snippet(cap, old_range, snippet);
+ }
+ _ => builder.replace(old_range, new_match_arm_list.to_string()),
+ }
+ },
+ )
+}
+
+fn cursor_at_trivial_match_arm_list(
+ ctx: &AssistContext<'_>,
+ match_expr: &MatchExpr,
+ match_arm_list: &MatchArmList,
+) -> Option<()> {
+ // match x { $0 }
+ if match_arm_list.arms().next() == None {
+ cov_mark::hit!(add_missing_match_arms_empty_body);
+ return Some(());
+ }
+
+ // match x {
+ // bar => baz,
+ // $0
+ // }
+ if let Some(last_arm) = match_arm_list.arms().last() {
+ let last_arm_range = last_arm.syntax().text_range();
+ let match_expr_range = match_expr.syntax().text_range();
+ if last_arm_range.end() <= ctx.offset() && ctx.offset() < match_expr_range.end() {
+ cov_mark::hit!(add_missing_match_arms_end_of_last_arm);
+ return Some(());
+ }
+ }
+
+ // match { _$0 => {...} }
+ let wild_pat = ctx.find_node_at_offset_with_descend::<ast::WildcardPat>()?;
+ let arm = wild_pat.syntax().parent().and_then(ast::MatchArm::cast)?;
+ let arm_match_expr = arm.syntax().ancestors().nth(2).and_then(ast::MatchExpr::cast)?;
+ if arm_match_expr == *match_expr {
+ cov_mark::hit!(add_missing_match_arms_trivial_arm);
+ return Some(());
+ }
+
+ None
+}
+
+fn is_variant_missing(existing_pats: &[Pat], var: &Pat) -> bool {
+ !existing_pats.iter().any(|pat| does_pat_match_variant(pat, var))
+}
+
+// Fixme: this is still somewhat limited, use hir_ty::diagnostics::match_check?
+fn does_pat_match_variant(pat: &Pat, var: &Pat) -> bool {
+ match (pat, var) {
+ (Pat::WildcardPat(_), _) => true,
+ (Pat::TuplePat(tpat), Pat::TuplePat(tvar)) => {
+ tpat.fields().zip(tvar.fields()).all(|(p, v)| does_pat_match_variant(&p, &v))
+ }
+ _ => utils::does_pat_match_variant(pat, var),
+ }
+}
+
+#[derive(Eq, PartialEq, Clone, Copy)]
+enum ExtendedEnum {
+ Bool,
+ Enum(hir::Enum),
+}
+
+#[derive(Eq, PartialEq, Clone, Copy)]
+enum ExtendedVariant {
+ True,
+ False,
+ Variant(hir::Variant),
+}
+
+impl ExtendedVariant {
+ fn should_be_hidden(self, db: &RootDatabase, krate: Crate) -> bool {
+ match self {
+ ExtendedVariant::Variant(var) => {
+ var.attrs(db).has_doc_hidden() && var.module(db).krate() != krate
+ }
+ _ => false,
+ }
+ }
+}
+
+fn lift_enum(e: hir::Enum) -> ExtendedEnum {
+ ExtendedEnum::Enum(e)
+}
+
+impl ExtendedEnum {
+ fn is_non_exhaustive(self, db: &RootDatabase, krate: Crate) -> bool {
+ match self {
+ ExtendedEnum::Enum(e) => {
+ e.attrs(db).by_key("non_exhaustive").exists() && e.module(db).krate() != krate
+ }
+ _ => false,
+ }
+ }
+
+ fn variants(self, db: &RootDatabase) -> Vec<ExtendedVariant> {
+ match self {
+ ExtendedEnum::Enum(e) => {
+ e.variants(db).into_iter().map(ExtendedVariant::Variant).collect::<Vec<_>>()
+ }
+ ExtendedEnum::Bool => {
+ Vec::<ExtendedVariant>::from([ExtendedVariant::True, ExtendedVariant::False])
+ }
+ }
+ }
+}
+
+fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> {
+ sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() {
+ Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)),
+ _ => ty.is_bool().then(|| ExtendedEnum::Bool),
+ })
+}
+
+fn resolve_tuple_of_enum_def(
+ sema: &Semantics<'_, RootDatabase>,
+ expr: &ast::Expr,
+) -> Option<Vec<ExtendedEnum>> {
+ sema.type_of_expr(expr)?
+ .adjusted()
+ .tuple_fields(sema.db)
+ .iter()
+ .map(|ty| {
+ ty.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
+ Some(Adt::Enum(e)) => Some(lift_enum(e)),
+ // For now we only handle expansion for a tuple of enums. Here
+ // we map non-enum items to None and rely on `collect` to
+ // convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>.
+ _ => ty.is_bool().then(|| ExtendedEnum::Bool),
+ })
+ })
+ .collect()
+}
+
+fn build_pat(db: &RootDatabase, module: hir::Module, var: ExtendedVariant) -> Option<ast::Pat> {
+ match var {
+ ExtendedVariant::Variant(var) => {
+ let path = mod_path_to_ast(&module.find_use_path(db, ModuleDef::from(var))?);
+
+ // FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
+ let pat: ast::Pat = match var.source(db)?.value.kind() {
+ ast::StructKind::Tuple(field_list) => {
+ let pats =
+ iter::repeat(make::wildcard_pat().into()).take(field_list.fields().count());
+ make::tuple_struct_pat(path, pats).into()
+ }
+ ast::StructKind::Record(field_list) => {
+ let pats = field_list
+ .fields()
+ .map(|f| make::ext::simple_ident_pat(f.name().unwrap()).into());
+ make::record_pat(path, pats).into()
+ }
+ ast::StructKind::Unit => make::path_pat(path),
+ };
+
+ Some(pat)
+ }
+ ExtendedVariant::True => Some(ast::Pat::from(make::literal_pat("true"))),
+ ExtendedVariant::False => Some(ast::Pat::from(make::literal_pat("false"))),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{
+ check_assist, check_assist_not_applicable, check_assist_target, check_assist_unresolved,
+ };
+
+ use super::add_missing_match_arms;
+
+ #[test]
+ fn all_match_arms_provided() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+enum A {
+ As,
+ Bs{x:i32, y:Option<i32>},
+ Cs(i32, Option<i32>),
+}
+fn main() {
+ match A::As$0 {
+ A::As,
+ A::Bs{x,y:Some(_)} => {}
+ A::Cs(_, Some(_)) => {}
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_outside_of_range_left() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+enum A { X, Y }
+
+fn foo(a: A) {
+ $0 match a {
+ A::X => { }
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_outside_of_range_right() {
+ cov_mark::check!(not_applicable_outside_of_range_right);
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+enum A { X, Y }
+
+fn foo(a: A) {
+ match a {$0
+ A::X => { }
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn all_boolean_match_arms_provided() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+fn foo(a: bool) {
+ match a$0 {
+ true => {}
+ false => {}
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn tuple_of_non_enum() {
+ // for now this case is not handled, although it potentially could be
+ // in the future
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+fn main() {
+ match (0, false)$0 {
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_boolean() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(a: bool) {
+ match a$0 {
+ }
+}
+"#,
+ r#"
+fn foo(a: bool) {
+ match a {
+ $0true => todo!(),
+ false => todo!(),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn partial_fill_boolean() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(a: bool) {
+ match a$0 {
+ true => {}
+ }
+}
+"#,
+ r#"
+fn foo(a: bool) {
+ match a {
+ true => {}
+ $0false => todo!(),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn all_boolean_tuple_arms_provided() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+fn foo(a: bool) {
+ match (a, a)$0 {
+ (true, true) => {}
+ (true, false) => {}
+ (false, true) => {}
+ (false, false) => {}
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn fill_boolean_tuple() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(a: bool) {
+ match (a, a)$0 {
+ }
+}
+"#,
+ r#"
+fn foo(a: bool) {
+ match (a, a) {
+ $0(true, true) => todo!(),
+ (true, false) => todo!(),
+ (false, true) => todo!(),
+ (false, false) => todo!(),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn partial_fill_boolean_tuple() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(a: bool) {
+ match (a, a)$0 {
+ (false, true) => {}
+ }
+}
+"#,
+ r#"
+fn foo(a: bool) {
+ match (a, a) {
+ (false, true) => {}
+ $0(true, true) => todo!(),
+ (true, false) => todo!(),
+ (false, false) => todo!(),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn partial_fill_record_tuple() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A {
+ As,
+ Bs { x: i32, y: Option<i32> },
+ Cs(i32, Option<i32>),
+}
+fn main() {
+ match A::As$0 {
+ A::Bs { x, y: Some(_) } => {}
+ A::Cs(_, Some(_)) => {}
+ }
+}
+"#,
+ r#"
+enum A {
+ As,
+ Bs { x: i32, y: Option<i32> },
+ Cs(i32, Option<i32>),
+}
+fn main() {
+ match A::As {
+ A::Bs { x, y: Some(_) } => {}
+ A::Cs(_, Some(_)) => {}
+ $0A::As => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn partial_fill_option() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- minicore: option
+fn main() {
+ match None$0 {
+ None => {}
+ }
+}
+"#,
+ r#"
+fn main() {
+ match None {
+ None => {}
+ Some(${0:_}) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn partial_fill_or_pat() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { As, Bs, Cs(Option<i32>) }
+fn main() {
+ match A::As$0 {
+ A::Cs(_) | A::Bs => {}
+ }
+}
+"#,
+ r#"
+enum A { As, Bs, Cs(Option<i32>) }
+fn main() {
+ match A::As {
+ A::Cs(_) | A::Bs => {}
+ $0A::As => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn partial_fill() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { As, Bs, Cs, Ds(String), Es(B) }
+enum B { Xs, Ys }
+fn main() {
+ match A::As$0 {
+ A::Bs if 0 < 1 => {}
+ A::Ds(_value) => { let x = 1; }
+ A::Es(B::Xs) => (),
+ }
+}
+"#,
+ r#"
+enum A { As, Bs, Cs, Ds(String), Es(B) }
+enum B { Xs, Ys }
+fn main() {
+ match A::As {
+ A::Bs if 0 < 1 => {}
+ A::Ds(_value) => { let x = 1; }
+ A::Es(B::Xs) => (),
+ $0A::As => todo!(),
+ A::Cs => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn partial_fill_bind_pat() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { As, Bs, Cs(Option<i32>) }
+fn main() {
+ match A::As$0 {
+ A::As(_) => {}
+ a @ A::Bs(_) => {}
+ }
+}
+"#,
+ r#"
+enum A { As, Bs, Cs(Option<i32>) }
+fn main() {
+ match A::As {
+ A::As(_) => {}
+ a @ A::Bs(_) => {}
+ A::Cs(${0:_}) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_empty_body() {
+ cov_mark::check!(add_missing_match_arms_empty_body);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { As, Bs, Cs(String), Ds(String, String), Es { x: usize, y: usize } }
+
+fn main() {
+ let a = A::As;
+ match a {$0}
+}
+"#,
+ r#"
+enum A { As, Bs, Cs(String), Ds(String, String), Es { x: usize, y: usize } }
+
+fn main() {
+ let a = A::As;
+ match a {
+ $0A::As => todo!(),
+ A::Bs => todo!(),
+ A::Cs(_) => todo!(),
+ A::Ds(_, _) => todo!(),
+ A::Es { x, y } => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_end_of_last_arm() {
+ cov_mark::check!(add_missing_match_arms_end_of_last_arm);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a, b) {
+ (A::Two, B::One) => {},$0
+ }
+}
+"#,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a, b) {
+ (A::Two, B::One) => {},
+ $0(A::One, B::One) => todo!(),
+ (A::One, B::Two) => todo!(),
+ (A::Two, B::Two) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_tuple_of_enum() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a$0, b) {}
+}
+"#,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a, b) {
+ $0(A::One, B::One) => todo!(),
+ (A::One, B::Two) => todo!(),
+ (A::Two, B::One) => todo!(),
+ (A::Two, B::Two) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_tuple_of_enum_ref() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (&a$0, &b) {}
+}
+"#,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (&a, &b) {
+ $0(A::One, B::One) => todo!(),
+ (A::One, B::Two) => todo!(),
+ (A::Two, B::One) => todo!(),
+ (A::Two, B::Two) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_tuple_of_enum_partial() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a$0, b) {
+ (A::Two, B::One) => {}
+ }
+}
+"#,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a, b) {
+ (A::Two, B::One) => {}
+ $0(A::One, B::One) => todo!(),
+ (A::One, B::Two) => todo!(),
+ (A::Two, B::Two) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_tuple_of_enum_partial_with_wildcards() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- minicore: option
+fn main() {
+ let a = Some(1);
+ let b = Some(());
+ match (a$0, b) {
+ (Some(_), _) => {}
+ (None, Some(_)) => {}
+ }
+}
+"#,
+ r#"
+fn main() {
+ let a = Some(1);
+ let b = Some(());
+ match (a, b) {
+ (Some(_), _) => {}
+ (None, Some(_)) => {}
+ $0(None, None) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_partial_with_deep_pattern() {
+ // Fixme: cannot handle deep patterns
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+//- minicore: option
+fn main() {
+ match $0Some(true) {
+ Some(true) => {}
+ None => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_tuple_of_enum_not_applicable() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a$0, b) {
+ (A::Two, B::One) => {}
+ (A::One, B::One) => {}
+ (A::One, B::Two) => {}
+ (A::Two, B::Two) => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_single_element_tuple_of_enum() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+
+fn main() {
+ let a = A::One;
+ match (a$0, ) {
+ }
+}
+"#,
+ r#"
+enum A { One, Two }
+
+fn main() {
+ let a = A::One;
+ match (a, ) {
+ $0(A::One,) => todo!(),
+ (A::Two,) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_match_arm_refs() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { As }
+
+fn foo(a: &A) {
+ match a$0 {
+ }
+}
+"#,
+ r#"
+enum A { As }
+
+fn foo(a: &A) {
+ match a {
+ $0A::As => todo!(),
+ }
+}
+"#,
+ );
+
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A {
+ Es { x: usize, y: usize }
+}
+
+fn foo(a: &mut A) {
+ match a$0 {
+ }
+}
+"#,
+ r#"
+enum A {
+ Es { x: usize, y: usize }
+}
+
+fn foo(a: &mut A) {
+ match a {
+ $0A::Es { x, y } => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_target_simple() {
+ check_assist_target(
+ add_missing_match_arms,
+ r#"
+enum E { X, Y }
+
+fn main() {
+ match E::X$0 {}
+}
+"#,
+ "match E::X {}",
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_target_complex() {
+ check_assist_target(
+ add_missing_match_arms,
+ r#"
+enum E { X, Y }
+
+fn main() {
+ match E::X$0 {
+ E::X => {}
+ }
+}
+"#,
+ "match E::X {
+ E::X => {}
+ }",
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_trivial_arm() {
+ cov_mark::check!(add_missing_match_arms_trivial_arm);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum E { X, Y }
+
+fn main() {
+ match E::X {
+ $0_ => {}
+ }
+}
+"#,
+ r#"
+enum E { X, Y }
+
+fn main() {
+ match E::X {
+ $0E::X => todo!(),
+ E::Y => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wildcard_inside_expression_not_applicable() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+enum E { X, Y }
+
+fn foo(e : E) {
+ match e {
+ _ => {
+ println!("1");$0
+ println!("2");
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_qualifies_path() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+mod foo { pub enum E { X, Y } }
+use foo::E::X;
+
+fn main() {
+ match X {
+ $0
+ }
+}
+"#,
+ r#"
+mod foo { pub enum E { X, Y } }
+use foo::E::X;
+
+fn main() {
+ match X {
+ $0X => todo!(),
+ foo::E::Y => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_preserves_comments() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+fn foo(a: A) {
+ match a $0 {
+ // foo bar baz
+ A::One => {}
+ // This is where the rest should be
+ }
+}
+"#,
+ r#"
+enum A { One, Two }
+fn foo(a: A) {
+ match a {
+ // foo bar baz
+ A::One => {}
+ $0A::Two => todo!(),
+ // This is where the rest should be
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_preserves_comments_empty() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+fn foo(a: A) {
+ match a {
+ // foo bar baz$0
+ }
+}
+"#,
+ r#"
+enum A { One, Two }
+fn foo(a: A) {
+ match a {
+ $0A::One => todo!(),
+ A::Two => todo!(),
+ // foo bar baz
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_placeholder() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two, }
+fn foo(a: A) {
+ match a$0 {
+ _ => (),
+ }
+}
+"#,
+ r#"
+enum A { One, Two, }
+fn foo(a: A) {
+ match a {
+ $0A::One => todo!(),
+ A::Two => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn option_order() {
+ cov_mark::check!(option_order);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- minicore: option
+fn foo(opt: Option<i32>) {
+ match opt$0 {
+ }
+}
+"#,
+ r#"
+fn foo(opt: Option<i32>) {
+ match opt {
+ Some(${0:_}) => todo!(),
+ None => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn works_inside_macro_call() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+macro_rules! m { ($expr:expr) => {$expr}}
+enum Test {
+ A,
+ B,
+ C,
+}
+
+fn foo(t: Test) {
+ m!(match t$0 {});
+}"#,
+ r#"
+macro_rules! m { ($expr:expr) => {$expr}}
+enum Test {
+ A,
+ B,
+ C,
+}
+
+fn foo(t: Test) {
+ m!(match t {
+ $0Test::A => todo!(),
+ Test::B => todo!(),
+ Test::C => todo!(),
+});
+}"#,
+ );
+ }
+
+ #[test]
+ fn lazy_computation() {
+ // Computing a single missing arm is enough to determine applicability of the assist.
+ cov_mark::check_count!(add_missing_match_arms_lazy_computation, 1);
+ check_assist_unresolved(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two, }
+fn foo(tuple: (A, A)) {
+ match $0tuple {};
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn adds_comma_before_new_arms() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(t: bool) {
+ match $0t {
+ true => 1 + 2
+ }
+}"#,
+ r#"
+fn foo(t: bool) {
+ match t {
+ true => 1 + 2,
+ $0false => todo!(),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn does_not_add_extra_comma() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(t: bool) {
+ match $0t {
+ true => 1 + 2,
+ }
+}"#,
+ r#"
+fn foo(t: bool) {
+ match t {
+ true => 1 + 2,
+ $0false => todo!(),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn does_not_remove_catch_all_with_non_empty_expr() {
+ cov_mark::check!(add_missing_match_arms_empty_expr);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(t: bool) {
+ match $0t {
+ _ => 1 + 2,
+ }
+}"#,
+ r#"
+fn foo(t: bool) {
+ match t {
+ _ => 1 + 2,
+ $0true => todo!(),
+ false => todo!(),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn does_not_fill_hidden_variants() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ }
+}
+//- /e.rs crate:e
+pub enum E { A, #[doc(hidden)] B, }
+"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ $0e::E::A => todo!(),
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn does_not_fill_hidden_variants_tuple() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: (bool, ::e::E)) {
+ match $0t {
+ }
+}
+//- /e.rs crate:e
+pub enum E { A, #[doc(hidden)] B, }
+"#,
+ r#"
+fn foo(t: (bool, ::e::E)) {
+ match t {
+ $0(true, e::E::A) => todo!(),
+ (false, e::E::A) => todo!(),
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn fills_wildcard_with_only_hidden_variants() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ }
+}
+//- /e.rs crate:e
+pub enum E { #[doc(hidden)] A, }
+"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ ${0:_} => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn does_not_fill_wildcard_when_hidden_variants_are_explicit() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ e::E::A => todo!(),
+ }
+}
+//- /e.rs crate:e
+pub enum E { #[doc(hidden)] A, }
+"#,
+ );
+ }
+
+ // FIXME: I don't think the assist should be applicable in this case
+ #[test]
+ fn does_not_fill_wildcard_with_wildcard() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ _ => todo!(),
+ }
+}
+//- /e.rs crate:e
+pub enum E { #[doc(hidden)] A, }
+"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn fills_wildcard_on_non_exhaustive_with_explicit_matches() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ e::E::A => todo!(),
+ }
+}
+//- /e.rs crate:e
+#[non_exhaustive]
+pub enum E { A, }
+"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ e::E::A => todo!(),
+ ${0:_} => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn fills_wildcard_on_non_exhaustive_without_matches() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ }
+}
+//- /e.rs crate:e
+#[non_exhaustive]
+pub enum E { A, }
+"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ $0e::E::A => todo!(),
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn fills_wildcard_on_non_exhaustive_with_doc_hidden() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ }
+}
+//- /e.rs crate:e
+#[non_exhaustive]
+pub enum E { A, #[doc(hidden)] B }"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ $0e::E::A => todo!(),
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn fills_wildcard_on_non_exhaustive_with_doc_hidden_with_explicit_arms() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ e::E::A => todo!(),
+ }
+}
+//- /e.rs crate:e
+#[non_exhaustive]
+pub enum E { A, #[doc(hidden)] B }"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ e::E::A => todo!(),
+ ${0:_} => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn fill_wildcard_with_partial_wildcard() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E, b: bool) {
+ match $0t {
+ _ if b => todo!(),
+ }
+}
+//- /e.rs crate:e
+pub enum E { #[doc(hidden)] A, }"#,
+ r#"
+fn foo(t: ::e::E, b: bool) {
+ match t {
+ _ if b => todo!(),
+ ${0:_} => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn does_not_fill_wildcard_with_partial_wildcard_and_wildcard() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E, b: bool) {
+ match $0t {
+ _ if b => todo!(),
+ _ => todo!(),
+ }
+}
+//- /e.rs crate:e
+pub enum E { #[doc(hidden)] A, }"#,
+ r#"
+fn foo(t: ::e::E, b: bool) {
+ match t {
+ _ if b => todo!(),
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn non_exhaustive_doc_hidden_tuple_fills_wildcard() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ }
+}
+//- /e.rs crate:e
+#[non_exhaustive]
+pub enum E { A, #[doc(hidden)] B, }"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ $0e::E::A => todo!(),
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignores_doc_hidden_for_crate_local_enums() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum E { A, #[doc(hidden)] B, }
+
+fn foo(t: E) {
+ match $0t {
+ }
+}"#,
+ r#"
+enum E { A, #[doc(hidden)] B, }
+
+fn foo(t: E) {
+ match t {
+ $0E::A => todo!(),
+ E::B => todo!(),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn ignores_non_exhaustive_for_crate_local_enums() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+#[non_exhaustive]
+enum E { A, B, }
+
+fn foo(t: E) {
+ match $0t {
+ }
+}"#,
+ r#"
+#[non_exhaustive]
+enum E { A, B, }
+
+fn foo(t: E) {
+ match t {
+ $0E::A => todo!(),
+ E::B => todo!(),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn ignores_doc_hidden_and_non_exhaustive_for_crate_local_enums() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+#[non_exhaustive]
+enum E { A, #[doc(hidden)] B, }
+
+fn foo(t: E) {
+ match $0t {
+ }
+}"#,
+ r#"
+#[non_exhaustive]
+enum E { A, #[doc(hidden)] B, }
+
+fn foo(t: E) {
+ match t {
+ $0E::A => todo!(),
+ E::B => todo!(),
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs
new file mode 100644
index 000000000..f858d7a15
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs
@@ -0,0 +1,447 @@
+use hir::HirDisplay;
+use syntax::{ast, match_ast, AstNode, SyntaxKind, SyntaxToken, TextRange, TextSize};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: add_return_type
+//
+// Adds the return type to a function or closure inferred from its tail expression if it doesn't have a return
+// type specified. This assists is useable in a functions or closures tail expression or return type position.
+//
+// ```
+// fn foo() { 4$02i32 }
+// ```
+// ->
+// ```
+// fn foo() -> i32 { 42i32 }
+// ```
+pub(crate) fn add_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let (fn_type, tail_expr, builder_edit_pos) = extract_tail(ctx)?;
+ let module = ctx.sema.scope(tail_expr.syntax())?.module();
+ let ty = ctx.sema.type_of_expr(&peel_blocks(tail_expr.clone()))?.original();
+ if ty.is_unit() {
+ return None;
+ }
+ let ty = ty.display_source_code(ctx.db(), module.into()).ok()?;
+
+ acc.add(
+ AssistId("add_return_type", AssistKind::RefactorRewrite),
+ match fn_type {
+ FnType::Function => "Add this function's return type",
+ FnType::Closure { .. } => "Add this closure's return type",
+ },
+ tail_expr.syntax().text_range(),
+ |builder| {
+ match builder_edit_pos {
+ InsertOrReplace::Insert(insert_pos, needs_whitespace) => {
+ let preceeding_whitespace = if needs_whitespace { " " } else { "" };
+ builder.insert(insert_pos, &format!("{}-> {} ", preceeding_whitespace, ty))
+ }
+ InsertOrReplace::Replace(text_range) => {
+ builder.replace(text_range, &format!("-> {}", ty))
+ }
+ }
+ if let FnType::Closure { wrap_expr: true } = fn_type {
+ cov_mark::hit!(wrap_closure_non_block_expr);
+ // `|x| x` becomes `|x| -> T x` which is invalid, so wrap it in a block
+ builder.replace(tail_expr.syntax().text_range(), &format!("{{{}}}", tail_expr));
+ }
+ },
+ )
+}
+
+enum InsertOrReplace {
+ Insert(TextSize, bool),
+ Replace(TextRange),
+}
+
+/// Check the potentially already specified return type and reject it or turn it into a builder command
+/// if allowed.
+fn ret_ty_to_action(
+ ret_ty: Option<ast::RetType>,
+ insert_after: SyntaxToken,
+) -> Option<InsertOrReplace> {
+ match ret_ty {
+ Some(ret_ty) => match ret_ty.ty() {
+ Some(ast::Type::InferType(_)) | None => {
+ cov_mark::hit!(existing_infer_ret_type);
+ cov_mark::hit!(existing_infer_ret_type_closure);
+ Some(InsertOrReplace::Replace(ret_ty.syntax().text_range()))
+ }
+ _ => {
+ cov_mark::hit!(existing_ret_type);
+ cov_mark::hit!(existing_ret_type_closure);
+ None
+ }
+ },
+ None => {
+ let insert_after_pos = insert_after.text_range().end();
+ let (insert_pos, needs_whitespace) = match insert_after.next_token() {
+ Some(it) if it.kind() == SyntaxKind::WHITESPACE => {
+ (insert_after_pos + TextSize::from(1), false)
+ }
+ _ => (insert_after_pos, true),
+ };
+
+ Some(InsertOrReplace::Insert(insert_pos, needs_whitespace))
+ }
+ }
+}
+
+enum FnType {
+ Function,
+ Closure { wrap_expr: bool },
+}
+
+/// If we're looking at a block that is supposed to return `()`, type inference
+/// will just tell us it has type `()`. We have to look at the tail expression
+/// to see the mismatched actual type. This 'unpeels' the various blocks to
+/// hopefully let us see the type the user intends. (This still doesn't handle
+/// all situations fully correctly; the 'ideal' way to handle this would be to
+/// run type inference on the function again, but with a variable as the return
+/// type.)
+fn peel_blocks(mut expr: ast::Expr) -> ast::Expr {
+ loop {
+ match_ast! {
+ match (expr.syntax()) {
+ ast::BlockExpr(it) => {
+ if let Some(tail) = it.tail_expr() {
+ expr = tail.clone();
+ } else {
+ break;
+ }
+ },
+ ast::IfExpr(it) => {
+ if let Some(then_branch) = it.then_branch() {
+ expr = ast::Expr::BlockExpr(then_branch.clone());
+ } else {
+ break;
+ }
+ },
+ ast::MatchExpr(it) => {
+ if let Some(arm_expr) = it.match_arm_list().and_then(|l| l.arms().next()).and_then(|a| a.expr()) {
+ expr = arm_expr;
+ } else {
+ break;
+ }
+ },
+ _ => break,
+ }
+ }
+ }
+ expr
+}
+
+fn extract_tail(ctx: &AssistContext<'_>) -> Option<(FnType, ast::Expr, InsertOrReplace)> {
+ let (fn_type, tail_expr, return_type_range, action) =
+ if let Some(closure) = ctx.find_node_at_offset::<ast::ClosureExpr>() {
+ let rpipe = closure.param_list()?.syntax().last_token()?;
+ let rpipe_pos = rpipe.text_range().end();
+
+ let action = ret_ty_to_action(closure.ret_type(), rpipe)?;
+
+ let body = closure.body()?;
+ let body_start = body.syntax().first_token()?.text_range().start();
+ let (tail_expr, wrap_expr) = match body {
+ ast::Expr::BlockExpr(block) => (block.tail_expr()?, false),
+ body => (body, true),
+ };
+
+ let ret_range = TextRange::new(rpipe_pos, body_start);
+ (FnType::Closure { wrap_expr }, tail_expr, ret_range, action)
+ } else {
+ let func = ctx.find_node_at_offset::<ast::Fn>()?;
+
+ let rparen = func.param_list()?.r_paren_token()?;
+ let rparen_pos = rparen.text_range().end();
+ let action = ret_ty_to_action(func.ret_type(), rparen)?;
+
+ let body = func.body()?;
+ let stmt_list = body.stmt_list()?;
+ let tail_expr = stmt_list.tail_expr()?;
+
+ let ret_range_end = stmt_list.l_curly_token()?.text_range().start();
+ let ret_range = TextRange::new(rparen_pos, ret_range_end);
+ (FnType::Function, tail_expr, ret_range, action)
+ };
+ let range = ctx.selection_trimmed();
+ if return_type_range.contains_range(range) {
+ cov_mark::hit!(cursor_in_ret_position);
+ cov_mark::hit!(cursor_in_ret_position_closure);
+ } else if tail_expr.syntax().text_range().contains_range(range) {
+ cov_mark::hit!(cursor_on_tail);
+ cov_mark::hit!(cursor_on_tail_closure);
+ } else {
+ return None;
+ }
+ Some((fn_type, tail_expr, action))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn infer_return_type_specified_inferred() {
+ cov_mark::check!(existing_infer_ret_type);
+ check_assist(
+ add_return_type,
+ r#"fn foo() -> $0_ {
+ 45
+}"#,
+ r#"fn foo() -> i32 {
+ 45
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_specified_inferred_closure() {
+ cov_mark::check!(existing_infer_ret_type_closure);
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ || -> _ {$045};
+}"#,
+ r#"fn foo() {
+ || -> i32 {45};
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_cursor_at_return_type_pos() {
+ cov_mark::check!(cursor_in_ret_position);
+ check_assist(
+ add_return_type,
+ r#"fn foo() $0{
+ 45
+}"#,
+ r#"fn foo() -> i32 {
+ 45
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_cursor_at_return_type_pos_closure() {
+ cov_mark::check!(cursor_in_ret_position_closure);
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ || $045
+}"#,
+ r#"fn foo() {
+ || -> i32 {45}
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type() {
+ cov_mark::check!(cursor_on_tail);
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ 45$0
+}"#,
+ r#"fn foo() -> i32 {
+ 45
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_no_whitespace() {
+ check_assist(
+ add_return_type,
+ r#"fn foo(){
+ 45$0
+}"#,
+ r#"fn foo() -> i32 {
+ 45
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_nested() {
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ if true {
+ 3$0
+ } else {
+ 5
+ }
+}"#,
+ r#"fn foo() -> i32 {
+ if true {
+ 3
+ } else {
+ 5
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_nested_match() {
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ match true {
+ true => { 3$0 },
+ false => { 5 },
+ }
+}"#,
+ r#"fn foo() -> i32 {
+ match true {
+ true => { 3 },
+ false => { 5 },
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_ret_type_specified() {
+ cov_mark::check!(existing_ret_type);
+ check_assist_not_applicable(
+ add_return_type,
+ r#"fn foo() -> i32 {
+ ( 45$0 + 32 ) * 123
+}"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_tail_expr() {
+ check_assist_not_applicable(
+ add_return_type,
+ r#"fn foo() {
+ let x = $03;
+ ( 45 + 32 ) * 123
+}"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_unit_return_type() {
+ check_assist_not_applicable(
+ add_return_type,
+ r#"fn foo() {
+ ($0)
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_closure_block() {
+ cov_mark::check!(cursor_on_tail_closure);
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ |x: i32| {
+ x$0
+ };
+}"#,
+ r#"fn foo() {
+ |x: i32| -> i32 {
+ x
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_closure() {
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ |x: i32| { x$0 };
+}"#,
+ r#"fn foo() {
+ |x: i32| -> i32 { x };
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_closure_no_whitespace() {
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ |x: i32|{ x$0 };
+}"#,
+ r#"fn foo() {
+ |x: i32| -> i32 { x };
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_closure_wrap() {
+ cov_mark::check!(wrap_closure_non_block_expr);
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ |x: i32| x$0;
+}"#,
+ r#"fn foo() {
+ |x: i32| -> i32 {x};
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_nested_closure() {
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ || {
+ if true {
+ 3$0
+ } else {
+ 5
+ }
+ }
+}"#,
+ r#"fn foo() {
+ || -> i32 {
+ if true {
+ 3
+ } else {
+ 5
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_ret_type_specified_closure() {
+ cov_mark::check!(existing_ret_type_closure);
+ check_assist_not_applicable(
+ add_return_type,
+ r#"fn foo() {
+ || -> i32 { 3$0 }
+}"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_tail_expr_closure() {
+ check_assist_not_applicable(
+ add_return_type,
+ r#"fn foo() {
+ || -> i32 {
+ let x = 3$0;
+ 6
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
new file mode 100644
index 000000000..c0bf238db
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
@@ -0,0 +1,400 @@
+use ide_db::defs::{Definition, NameRefClass};
+use itertools::Itertools;
+use syntax::{ast, AstNode, SyntaxKind, T};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: add_turbo_fish
+//
+// Adds `::<_>` to a call of a generic method or function.
+//
+// ```
+// fn make<T>() -> T { todo!() }
+// fn main() {
+// let x = make$0();
+// }
+// ```
+// ->
+// ```
+// fn make<T>() -> T { todo!() }
+// fn main() {
+// let x = make::<${0:_}>();
+// }
+// ```
+pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let ident = ctx.find_token_syntax_at_offset(SyntaxKind::IDENT).or_else(|| {
+ let arg_list = ctx.find_node_at_offset::<ast::ArgList>()?;
+ if arg_list.args().next().is_some() {
+ return None;
+ }
+ cov_mark::hit!(add_turbo_fish_after_call);
+ cov_mark::hit!(add_type_ascription_after_call);
+ arg_list.l_paren_token()?.prev_token().filter(|it| it.kind() == SyntaxKind::IDENT)
+ })?;
+ let next_token = ident.next_token()?;
+ if next_token.kind() == T![::] {
+ cov_mark::hit!(add_turbo_fish_one_fish_is_enough);
+ return None;
+ }
+ let name_ref = ast::NameRef::cast(ident.parent()?)?;
+ let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
+ NameRefClass::Definition(def) => def,
+ NameRefClass::FieldShorthand { .. } => return None,
+ };
+ let fun = match def {
+ Definition::Function(it) => it,
+ _ => return None,
+ };
+ let generics = hir::GenericDef::Function(fun).params(ctx.sema.db);
+ if generics.is_empty() {
+ cov_mark::hit!(add_turbo_fish_non_generic);
+ return None;
+ }
+
+ if let Some(let_stmt) = ctx.find_node_at_offset::<ast::LetStmt>() {
+ if let_stmt.colon_token().is_none() {
+ let type_pos = let_stmt.pat()?.syntax().last_token()?.text_range().end();
+ let semi_pos = let_stmt.syntax().last_token()?.text_range().end();
+
+ acc.add(
+ AssistId("add_type_ascription", AssistKind::RefactorRewrite),
+ "Add `: _` before assignment operator",
+ ident.text_range(),
+ |builder| {
+ if let_stmt.semicolon_token().is_none() {
+ builder.insert(semi_pos, ";");
+ }
+ match ctx.config.snippet_cap {
+ Some(cap) => builder.insert_snippet(cap, type_pos, ": ${0:_}"),
+ None => builder.insert(type_pos, ": _"),
+ }
+ },
+ )?
+ } else {
+ cov_mark::hit!(add_type_ascription_already_typed);
+ }
+ }
+
+ let number_of_arguments = generics
+ .iter()
+ .filter(|param| {
+ matches!(param, hir::GenericParam::TypeParam(_) | hir::GenericParam::ConstParam(_))
+ })
+ .count();
+
+ acc.add(
+ AssistId("add_turbo_fish", AssistKind::RefactorRewrite),
+ "Add `::<>`",
+ ident.text_range(),
+ |builder| {
+ builder.trigger_signature_help();
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snip = format!("::<{}>", get_snippet_fish_head(number_of_arguments));
+ builder.insert_snippet(cap, ident.text_range().end(), snip)
+ }
+ None => {
+ let fish_head = std::iter::repeat("_").take(number_of_arguments).format(", ");
+ let snip = format!("::<{}>", fish_head);
+ builder.insert(ident.text_range().end(), snip);
+ }
+ }
+ },
+ )
+}
+
+/// This will create a snippet string with tabstops marked
+fn get_snippet_fish_head(number_of_arguments: usize) -> String {
+ let mut fish_head = (1..number_of_arguments)
+ .format_with("", |i, f| f(&format_args!("${{{}:_}}, ", i)))
+ .to_string();
+
+ // tabstop 0 is a special case and always the last one
+ fish_head.push_str("${0:_}");
+ fish_head
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_by_label, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn add_turbo_fish_function() {
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ make$0();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ make::<${0:_}>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_function_multiple_generic_types() {
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<T, A>() -> T {}
+fn main() {
+ make$0();
+}
+"#,
+ r#"
+fn make<T, A>() -> T {}
+fn main() {
+ make::<${1:_}, ${0:_}>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_function_many_generic_types() {
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<T, A, B, C, D, E, F>() -> T {}
+fn main() {
+ make$0();
+}
+"#,
+ r#"
+fn make<T, A, B, C, D, E, F>() -> T {}
+fn main() {
+ make::<${1:_}, ${2:_}, ${3:_}, ${4:_}, ${5:_}, ${6:_}, ${0:_}>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_after_call() {
+ cov_mark::check!(add_turbo_fish_after_call);
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ make()$0;
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ make::<${0:_}>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_method() {
+ check_assist(
+ add_turbo_fish,
+ r#"
+struct S;
+impl S {
+ fn make<T>(&self) -> T {}
+}
+fn main() {
+ S.make$0();
+}
+"#,
+ r#"
+struct S;
+impl S {
+ fn make<T>(&self) -> T {}
+}
+fn main() {
+ S.make::<${0:_}>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_one_fish_is_enough() {
+ cov_mark::check!(add_turbo_fish_one_fish_is_enough);
+ check_assist_not_applicable(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ make$0::<()>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_non_generic() {
+ cov_mark::check!(add_turbo_fish_non_generic);
+ check_assist_not_applicable(
+ add_turbo_fish,
+ r#"
+fn make() -> () {}
+fn main() {
+ make$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_type_ascription_function() {
+ check_assist_by_label(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x = make$0();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x: ${0:_} = make();
+}
+"#,
+ "Add `: _` before assignment operator",
+ );
+ }
+
+ #[test]
+ fn add_type_ascription_after_call() {
+ cov_mark::check!(add_type_ascription_after_call);
+ check_assist_by_label(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x = make()$0;
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x: ${0:_} = make();
+}
+"#,
+ "Add `: _` before assignment operator",
+ );
+ }
+
+ #[test]
+ fn add_type_ascription_method() {
+ check_assist_by_label(
+ add_turbo_fish,
+ r#"
+struct S;
+impl S {
+ fn make<T>(&self) -> T {}
+}
+fn main() {
+ let x = S.make$0();
+}
+"#,
+ r#"
+struct S;
+impl S {
+ fn make<T>(&self) -> T {}
+}
+fn main() {
+ let x: ${0:_} = S.make();
+}
+"#,
+ "Add `: _` before assignment operator",
+ );
+ }
+
+ #[test]
+ fn add_type_ascription_already_typed() {
+ cov_mark::check!(add_type_ascription_already_typed);
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x: () = make$0();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x: () = make::<${0:_}>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_type_ascription_append_semicolon() {
+ check_assist_by_label(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x = make$0()
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x: ${0:_} = make();
+}
+"#,
+ "Add `: _` before assignment operator",
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_function_lifetime_parameter() {
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<'a, T, A>(t: T, a: A) {}
+fn main() {
+ make$0(5, 2);
+}
+"#,
+ r#"
+fn make<'a, T, A>(t: T, a: A) {}
+fn main() {
+ make::<${1:_}, ${0:_}>(5, 2);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_function_const_parameter() {
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<T, const N: usize>(t: T) {}
+fn main() {
+ make$0(3);
+}
+"#,
+ r#"
+fn make<T, const N: usize>(t: T) {}
+fn main() {
+ make::<${1:_}, ${0:_}>(3);
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
new file mode 100644
index 000000000..2853d1d1b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
@@ -0,0 +1,234 @@
+use std::collections::VecDeque;
+
+use syntax::ast::{self, AstNode};
+
+use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: apply_demorgan
+//
+// Apply https://en.wikipedia.org/wiki/De_Morgan%27s_laws[De Morgan's law].
+// This transforms expressions of the form `!l || !r` into `!(l && r)`.
+// This also works with `&&`. This assist can only be applied with the cursor
+// on either `||` or `&&`.
+//
+// ```
+// fn main() {
+// if x != 4 ||$0 y < 3.14 {}
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// if !(x == 4 && y >= 3.14) {}
+// }
+// ```
+pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let expr = ctx.find_node_at_offset::<ast::BinExpr>()?;
+ let op = expr.op_kind()?;
+ let op_range = expr.op_token()?.text_range();
+
+ let opposite_op = match op {
+ ast::BinaryOp::LogicOp(ast::LogicOp::And) => "||",
+ ast::BinaryOp::LogicOp(ast::LogicOp::Or) => "&&",
+ _ => return None,
+ };
+
+ let cursor_in_range = op_range.contains_range(ctx.selection_trimmed());
+ if !cursor_in_range {
+ return None;
+ }
+
+ let mut expr = expr;
+
+ // Walk up the tree while we have the same binary operator
+ while let Some(parent_expr) = expr.syntax().parent().and_then(ast::BinExpr::cast) {
+ match expr.op_kind() {
+ Some(parent_op) if parent_op == op => {
+ expr = parent_expr;
+ }
+ _ => break,
+ }
+ }
+
+ let mut expr_stack = vec![expr.clone()];
+ let mut terms = Vec::new();
+ let mut op_ranges = Vec::new();
+
+ // Find all the children with the same binary operator
+ while let Some(expr) = expr_stack.pop() {
+ let mut traverse_bin_expr_arm = |expr| {
+ if let ast::Expr::BinExpr(bin_expr) = expr {
+ if let Some(expr_op) = bin_expr.op_kind() {
+ if expr_op == op {
+ expr_stack.push(bin_expr);
+ } else {
+ terms.push(ast::Expr::BinExpr(bin_expr));
+ }
+ } else {
+ terms.push(ast::Expr::BinExpr(bin_expr));
+ }
+ } else {
+ terms.push(expr);
+ }
+ };
+
+ op_ranges.extend(expr.op_token().map(|t| t.text_range()));
+ traverse_bin_expr_arm(expr.lhs()?);
+ traverse_bin_expr_arm(expr.rhs()?);
+ }
+
+ acc.add(
+ AssistId("apply_demorgan", AssistKind::RefactorRewrite),
+ "Apply De Morgan's law",
+ op_range,
+ |edit| {
+ terms.sort_by_key(|t| t.syntax().text_range().start());
+ let mut terms = VecDeque::from(terms);
+
+ let paren_expr = expr.syntax().parent().and_then(ast::ParenExpr::cast);
+
+ let neg_expr = paren_expr
+ .clone()
+ .and_then(|paren_expr| paren_expr.syntax().parent())
+ .and_then(ast::PrefixExpr::cast)
+ .and_then(|prefix_expr| {
+ if prefix_expr.op_kind().unwrap() == ast::UnaryOp::Not {
+ Some(prefix_expr)
+ } else {
+ None
+ }
+ });
+
+ for op_range in op_ranges {
+ edit.replace(op_range, opposite_op);
+ }
+
+ if let Some(paren_expr) = paren_expr {
+ for term in terms {
+ let range = term.syntax().text_range();
+ let not_term = invert_boolean_expression(term);
+
+ edit.replace(range, not_term.syntax().text());
+ }
+
+ if let Some(neg_expr) = neg_expr {
+ cov_mark::hit!(demorgan_double_negation);
+ edit.replace(neg_expr.op_token().unwrap().text_range(), "");
+ } else {
+ cov_mark::hit!(demorgan_double_parens);
+ edit.replace(paren_expr.l_paren_token().unwrap().text_range(), "!(");
+ }
+ } else {
+ if let Some(lhs) = terms.pop_front() {
+ let lhs_range = lhs.syntax().text_range();
+ let not_lhs = invert_boolean_expression(lhs);
+
+ edit.replace(lhs_range, format!("!({}", not_lhs.syntax().text()));
+ }
+
+ if let Some(rhs) = terms.pop_back() {
+ let rhs_range = rhs.syntax().text_range();
+ let not_rhs = invert_boolean_expression(rhs);
+
+ edit.replace(rhs_range, format!("{})", not_rhs.syntax().text()));
+ }
+
+ for term in terms {
+ let term_range = term.syntax().text_range();
+ let not_term = invert_boolean_expression(term);
+ edit.replace(term_range, not_term.syntax().text());
+ }
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn demorgan_handles_leq() {
+ check_assist(
+ apply_demorgan,
+ r#"
+struct S;
+fn f() { S < S &&$0 S <= S }
+"#,
+ r#"
+struct S;
+fn f() { !(S >= S || S > S) }
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_handles_geq() {
+ check_assist(
+ apply_demorgan,
+ r#"
+struct S;
+fn f() { S > S &&$0 S >= S }
+"#,
+ r#"
+struct S;
+fn f() { !(S <= S || S < S) }
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_turns_and_into_or() {
+ check_assist(apply_demorgan, "fn f() { !x &&$0 !x }", "fn f() { !(x || x) }")
+ }
+
+ #[test]
+ fn demorgan_turns_or_into_and() {
+ check_assist(apply_demorgan, "fn f() { !x ||$0 !x }", "fn f() { !(x && x) }")
+ }
+
+ #[test]
+ fn demorgan_removes_inequality() {
+ check_assist(apply_demorgan, "fn f() { x != x ||$0 !x }", "fn f() { !(x == x && x) }")
+ }
+
+ #[test]
+ fn demorgan_general_case() {
+ check_assist(apply_demorgan, "fn f() { x ||$0 x }", "fn f() { !(!x && !x) }")
+ }
+
+ #[test]
+ fn demorgan_multiple_terms() {
+ check_assist(apply_demorgan, "fn f() { x ||$0 y || z }", "fn f() { !(!x && !y && !z) }");
+ check_assist(apply_demorgan, "fn f() { x || y ||$0 z }", "fn f() { !(!x && !y && !z) }");
+ }
+
+ #[test]
+ fn demorgan_doesnt_apply_with_cursor_not_on_op() {
+ check_assist_not_applicable(apply_demorgan, "fn f() { $0 !x || !x }")
+ }
+
+ #[test]
+ fn demorgan_doesnt_double_negation() {
+ cov_mark::check!(demorgan_double_negation);
+ check_assist(apply_demorgan, "fn f() { !(x ||$0 x) }", "fn f() { (!x && !x) }")
+ }
+
+ #[test]
+ fn demorgan_doesnt_double_parens() {
+ cov_mark::check!(demorgan_double_parens);
+ check_assist(apply_demorgan, "fn f() { (x ||$0 x) }", "fn f() { !(!x && !x) }")
+ }
+
+ // https://github.com/rust-lang/rust-analyzer/issues/10963
+ #[test]
+ fn demorgan_doesnt_hang() {
+ check_assist(
+ apply_demorgan,
+ "fn f() { 1 || 3 &&$0 4 || 5 }",
+ "fn f() { !(!1 || !3 || !4) || 5 }",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
new file mode 100644
index 000000000..949cf3167
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
@@ -0,0 +1,1292 @@
+use std::cmp::Reverse;
+
+use hir::{db::HirDatabase, Module};
+use ide_db::{
+ helpers::mod_path_to_ast,
+ imports::{
+ import_assets::{ImportAssets, ImportCandidate, LocatedImport},
+ insert_use::{insert_use, ImportScope},
+ },
+};
+use syntax::{ast, AstNode, NodeOrToken, SyntaxElement};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
+
+// Feature: Auto Import
+//
+// Using the `auto-import` assist it is possible to insert missing imports for unresolved items.
+// When inserting an import it will do so in a structured manner by keeping imports grouped,
+// separated by a newline in the following order:
+//
+// - `std` and `core`
+// - External Crates
+// - Current Crate, paths prefixed by `crate`
+// - Current Module, paths prefixed by `self`
+// - Super Module, paths prefixed by `super`
+//
+// Example:
+// ```rust
+// use std::fs::File;
+//
+// use itertools::Itertools;
+// use syntax::ast;
+//
+// use crate::utils::insert_use;
+//
+// use self::auto_import;
+//
+// use super::AssistContext;
+// ```
+//
+// .Import Granularity
+//
+// It is possible to configure how use-trees are merged with the `imports.granularity.group` setting.
+// It has the following configurations:
+//
+// - `crate`: Merge imports from the same crate into a single use statement. This kind of
+// nesting is only supported in Rust versions later than 1.24.
+// - `module`: Merge imports from the same module into a single use statement.
+// - `item`: Don't merge imports at all, creating one import per item.
+// - `preserve`: Do not change the granularity of any imports. For auto-import this has the same
+// effect as `item`.
+//
+// In `VS Code` the configuration for this is `rust-analyzer.imports.granularity.group`.
+//
+// .Import Prefix
+//
+// The style of imports in the same crate is configurable through the `imports.prefix` setting.
+// It has the following configurations:
+//
+// - `crate`: This setting will force paths to be always absolute, starting with the `crate`
+// prefix, unless the item is defined outside of the current crate.
+// - `self`: This setting will force paths that are relative to the current module to always
+// start with `self`. This will result in paths that always start with either `crate`, `self`,
+// `super` or an extern crate identifier.
+// - `plain`: This setting does not impose any restrictions in imports.
+//
+// In `VS Code` the configuration for this is `rust-analyzer.imports.prefix`.
+//
+// image::https://user-images.githubusercontent.com/48062697/113020673-b85be580-917a-11eb-9022-59585f35d4f8.gif[]
+
+// Assist: auto_import
+//
+// If the name is unresolved, provides all possible imports for it.
+//
+// ```
+// fn main() {
+// let map = HashMap$0::new();
+// }
+// # pub mod std { pub mod collections { pub struct HashMap { } } }
+// ```
+// ->
+// ```
+// use std::collections::HashMap;
+//
+// fn main() {
+// let map = HashMap::new();
+// }
+// # pub mod std { pub mod collections { pub struct HashMap { } } }
+// ```
+pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
+ let mut proposed_imports =
+ import_assets.search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind);
+ if proposed_imports.is_empty() {
+ return None;
+ }
+
+ let range = match &syntax_under_caret {
+ NodeOrToken::Node(node) => ctx.sema.original_range(node).range,
+ NodeOrToken::Token(token) => token.text_range(),
+ };
+ let group_label = group_label(import_assets.import_candidate());
+ let scope = ImportScope::find_insert_use_container(
+ &match syntax_under_caret {
+ NodeOrToken::Node(it) => it,
+ NodeOrToken::Token(it) => it.parent()?,
+ },
+ &ctx.sema,
+ )?;
+
+ // we aren't interested in different namespaces
+ proposed_imports.dedup_by(|a, b| a.import_path == b.import_path);
+
+ let current_node = match ctx.covering_element() {
+ NodeOrToken::Node(node) => Some(node),
+ NodeOrToken::Token(token) => token.parent(),
+ };
+
+ let current_module =
+ current_node.as_ref().and_then(|node| ctx.sema.scope(node)).map(|scope| scope.module());
+
+ // prioritize more relevant imports
+ proposed_imports
+ .sort_by_key(|import| Reverse(relevance_score(ctx, import, current_module.as_ref())));
+
+ for import in proposed_imports {
+ acc.add_group(
+ &group_label,
+ AssistId("auto_import", AssistKind::QuickFix),
+ format!("Import `{}`", import.import_path),
+ range,
+ |builder| {
+ let scope = match scope.clone() {
+ ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
+ };
+ insert_use(&scope, mod_path_to_ast(&import.import_path), &ctx.config.insert_use);
+ },
+ );
+ }
+ Some(())
+}
+
+pub(super) fn find_importable_node(
+ ctx: &AssistContext<'_>,
+) -> Option<(ImportAssets, SyntaxElement)> {
+ if let Some(path_under_caret) = ctx.find_node_at_offset_with_descend::<ast::Path>() {
+ ImportAssets::for_exact_path(&path_under_caret, &ctx.sema)
+ .zip(Some(path_under_caret.syntax().clone().into()))
+ } else if let Some(method_under_caret) =
+ ctx.find_node_at_offset_with_descend::<ast::MethodCallExpr>()
+ {
+ ImportAssets::for_method_call(&method_under_caret, &ctx.sema)
+ .zip(Some(method_under_caret.syntax().clone().into()))
+ } else if let Some(pat) = ctx
+ .find_node_at_offset_with_descend::<ast::IdentPat>()
+ .filter(ast::IdentPat::is_simple_ident)
+ {
+ ImportAssets::for_ident_pat(&ctx.sema, &pat).zip(Some(pat.syntax().clone().into()))
+ } else {
+ None
+ }
+}
+
+fn group_label(import_candidate: &ImportCandidate) -> GroupLabel {
+ let name = match import_candidate {
+ ImportCandidate::Path(candidate) => format!("Import {}", candidate.name.text()),
+ ImportCandidate::TraitAssocItem(candidate) => {
+ format!("Import a trait for item {}", candidate.assoc_item_name.text())
+ }
+ ImportCandidate::TraitMethod(candidate) => {
+ format!("Import a trait for method {}", candidate.assoc_item_name.text())
+ }
+ };
+ GroupLabel(name)
+}
+
+/// Determine how relevant a given import is in the current context. Higher scores are more
+/// relevant.
+fn relevance_score(
+ ctx: &AssistContext<'_>,
+ import: &LocatedImport,
+ current_module: Option<&Module>,
+) -> i32 {
+ let mut score = 0;
+
+ let db = ctx.db();
+
+ let item_module = match import.item_to_import {
+ hir::ItemInNs::Types(item) | hir::ItemInNs::Values(item) => item.module(db),
+ hir::ItemInNs::Macros(makro) => Some(makro.module(db)),
+ };
+
+ match item_module.zip(current_module) {
+ // get the distance between the imported path and the current module
+ // (prefer items that are more local)
+ Some((item_module, current_module)) => {
+ score -= module_distance_hueristic(db, &current_module, &item_module) as i32;
+ }
+
+ // could not find relevant modules, so just use the length of the path as an estimate
+ None => return -(2 * import.import_path.len() as i32),
+ }
+
+ score
+}
+
+/// A heuristic that gives a higher score to modules that are more separated.
+fn module_distance_hueristic(db: &dyn HirDatabase, current: &Module, item: &Module) -> usize {
+ // get the path starting from the item to the respective crate roots
+ let mut current_path = current.path_to_root(db);
+ let mut item_path = item.path_to_root(db);
+
+ // we want paths going from the root to the item
+ current_path.reverse();
+ item_path.reverse();
+
+ // length of the common prefix of the two paths
+ let prefix_length = current_path.iter().zip(&item_path).take_while(|(a, b)| a == b).count();
+
+ // how many modules differ between the two paths (all modules, removing any duplicates)
+ let distinct_length = current_path.len() + item_path.len() - 2 * prefix_length;
+
+ // cost of importing from another crate
+ let crate_boundary_cost = if current.krate() == item.krate() {
+ 0
+ } else if item.krate().is_builtin(db) {
+ 2
+ } else {
+ 4
+ };
+
+ distinct_length + crate_boundary_cost
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use hir::Semantics;
+ use ide_db::{
+ assists::AssistResolveStrategy,
+ base_db::{fixture::WithFixture, FileRange},
+ RootDatabase,
+ };
+
+ use crate::tests::{
+ check_assist, check_assist_not_applicable, check_assist_target, TEST_CONFIG,
+ };
+
+ fn check_auto_import_order(before: &str, order: &[&str]) {
+ let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
+ let frange = FileRange { file_id, range: range_or_offset.into() };
+
+ let sema = Semantics::new(&db);
+ let config = TEST_CONFIG;
+ let ctx = AssistContext::new(sema, &config, frange);
+ let mut acc = Assists::new(&ctx, AssistResolveStrategy::All);
+ auto_import(&mut acc, &ctx);
+ let assists = acc.finish();
+
+ let labels = assists.iter().map(|assist| assist.label.to_string()).collect::<Vec<_>>();
+
+ assert_eq!(labels, order);
+ }
+
+ #[test]
+ fn prefer_shorter_paths() {
+ let before = r"
+//- /main.rs crate:main deps:foo,bar
+HashMap$0::new();
+
+//- /lib.rs crate:foo
+pub mod collections { pub struct HashMap; }
+
+//- /lib.rs crate:bar
+pub mod collections { pub mod hash_map { pub struct HashMap; } }
+ ";
+
+ check_auto_import_order(
+ before,
+ &["Import `foo::collections::HashMap`", "Import `bar::collections::hash_map::HashMap`"],
+ )
+ }
+
+ #[test]
+ fn prefer_same_crate() {
+ let before = r"
+//- /main.rs crate:main deps:foo
+HashMap$0::new();
+
+mod collections {
+ pub mod hash_map {
+ pub struct HashMap;
+ }
+}
+
+//- /lib.rs crate:foo
+pub struct HashMap;
+ ";
+
+ check_auto_import_order(
+ before,
+ &["Import `collections::hash_map::HashMap`", "Import `foo::HashMap`"],
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_scope_inside_macro() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+mod bar {
+ pub struct Baz;
+}
+macro_rules! foo {
+ ($it:ident) => {
+ mod __ {
+ fn __(x: $it) {}
+ }
+ };
+}
+foo! {
+ Baz$0
+}
+",
+ );
+ }
+
+ #[test]
+ fn applicable_in_attributes() {
+ check_assist(
+ auto_import,
+ r"
+//- proc_macros: identity
+#[proc_macros::identity]
+mod foo {
+ mod bar {
+ const _: Baz$0 = ();
+ }
+}
+mod baz {
+ pub struct Baz;
+}
+",
+ r"
+#[proc_macros::identity]
+mod foo {
+ mod bar {
+ use crate::baz::Baz;
+
+ const _: Baz = ();
+ }
+}
+mod baz {
+ pub struct Baz;
+}
+",
+ );
+ }
+
+ #[test]
+ fn applicable_when_found_an_import_partial() {
+ check_assist(
+ auto_import,
+ r"
+ mod std {
+ pub mod fmt {
+ pub struct Formatter;
+ }
+ }
+
+ use std::fmt;
+
+ $0Formatter
+ ",
+ r"
+ mod std {
+ pub mod fmt {
+ pub struct Formatter;
+ }
+ }
+
+ use std::fmt::{self, Formatter};
+
+ Formatter
+ ",
+ );
+ }
+
+ #[test]
+ fn applicable_when_found_an_import() {
+ check_assist(
+ auto_import,
+ r"
+ $0PubStruct
+
+ pub mod PubMod {
+ pub struct PubStruct;
+ }
+ ",
+ r"
+ use PubMod::PubStruct;
+
+ PubStruct
+
+ pub mod PubMod {
+ pub struct PubStruct;
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn applicable_when_found_an_import_in_macros() {
+ check_assist(
+ auto_import,
+ r"
+ macro_rules! foo {
+ ($i:ident) => { fn foo(a: $i) {} }
+ }
+ foo!(Pub$0Struct);
+
+ pub mod PubMod {
+ pub struct PubStruct;
+ }
+ ",
+ r"
+ use PubMod::PubStruct;
+
+ macro_rules! foo {
+ ($i:ident) => { fn foo(a: $i) {} }
+ }
+ foo!(PubStruct);
+
+ pub mod PubMod {
+ pub struct PubStruct;
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn applicable_when_found_multiple_imports() {
+ check_assist(
+ auto_import,
+ r"
+ PubSt$0ruct
+
+ pub mod PubMod1 {
+ pub struct PubStruct;
+ }
+ pub mod PubMod2 {
+ pub struct PubStruct;
+ }
+ pub mod PubMod3 {
+ pub struct PubStruct;
+ }
+ ",
+ r"
+ use PubMod3::PubStruct;
+
+ PubStruct
+
+ pub mod PubMod1 {
+ pub struct PubStruct;
+ }
+ pub mod PubMod2 {
+ pub struct PubStruct;
+ }
+ pub mod PubMod3 {
+ pub struct PubStruct;
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_already_imported_types() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ use PubMod::PubStruct;
+
+ PubStruct$0
+
+ pub mod PubMod {
+ pub struct PubStruct;
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_types_with_private_paths() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ PrivateStruct$0
+
+ pub mod PubMod {
+ struct PrivateStruct;
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_when_no_imports_found() {
+ check_assist_not_applicable(
+ auto_import,
+ "
+ PubStruct$0",
+ );
+ }
+
+ #[test]
+ fn function_import() {
+ check_assist(
+ auto_import,
+ r"
+ test_function$0
+
+ pub mod PubMod {
+ pub fn test_function() {};
+ }
+ ",
+ r"
+ use PubMod::test_function;
+
+ test_function
+
+ pub mod PubMod {
+ pub fn test_function() {};
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn macro_import() {
+ check_assist(
+ auto_import,
+ r"
+//- /lib.rs crate:crate_with_macro
+#[macro_export]
+macro_rules! foo {
+ () => ()
+}
+
+//- /main.rs crate:main deps:crate_with_macro
+fn main() {
+ foo$0
+}
+",
+ r"use crate_with_macro::foo;
+
+fn main() {
+ foo
+}
+",
+ );
+ }
+
+ #[test]
+ fn auto_import_target() {
+ check_assist_target(
+ auto_import,
+ r"
+ struct AssistInfo {
+ group_label: Option<$0GroupLabel>,
+ }
+
+ mod m { pub struct GroupLabel; }
+ ",
+ "GroupLabel",
+ )
+ }
+
+ #[test]
+ fn not_applicable_when_path_start_is_imported() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ pub mod mod1 {
+ pub mod mod2 {
+ pub mod mod3 {
+ pub struct TestStruct;
+ }
+ }
+ }
+
+ use mod1::mod2;
+ fn main() {
+ mod2::mod3::TestStruct$0
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_function() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ pub mod test_mod {
+ pub fn test_function() {}
+ }
+
+ use test_mod::test_function;
+ fn main() {
+ test_function$0
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn associated_struct_function() {
+ check_assist(
+ auto_import,
+ r"
+ mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ pub fn test_function() {}
+ }
+ }
+
+ fn main() {
+ TestStruct::test_function$0
+ }
+ ",
+ r"
+ use test_mod::TestStruct;
+
+ mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ pub fn test_function() {}
+ }
+ }
+
+ fn main() {
+ TestStruct::test_function
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn associated_struct_const() {
+ check_assist(
+ auto_import,
+ r"
+ mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ fn main() {
+ TestStruct::TEST_CONST$0
+ }
+ ",
+ r"
+ use test_mod::TestStruct;
+
+ mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ fn main() {
+ TestStruct::TEST_CONST
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn associated_trait_function() {
+ check_assist(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::test_function$0
+ }
+ ",
+ r"
+ use test_mod::TestTrait;
+
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::test_function
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_trait_for_function() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub trait TestTrait2 {
+ fn test_function();
+ }
+ pub enum TestEnum {
+ One,
+ Two,
+ }
+ impl TestTrait2 for TestEnum {
+ fn test_function() {}
+ }
+ impl TestTrait for TestEnum {
+ fn test_function() {}
+ }
+ }
+
+ use test_mod::TestTrait2;
+ fn main() {
+ test_mod::TestEnum::test_function$0;
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn associated_trait_const() {
+ check_assist(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::TEST_CONST$0
+ }
+ ",
+ r"
+ use test_mod::TestTrait;
+
+ mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::TEST_CONST
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_trait_for_const() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub trait TestTrait2 {
+ const TEST_CONST: f64;
+ }
+ pub enum TestEnum {
+ One,
+ Two,
+ }
+ impl TestTrait2 for TestEnum {
+ const TEST_CONST: f64 = 42.0;
+ }
+ impl TestTrait for TestEnum {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ use test_mod::TestTrait2;
+ fn main() {
+ test_mod::TestEnum::TEST_CONST$0;
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn trait_method() {
+ check_assist(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+
+ fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+ }
+ ",
+ r"
+ use test_mod::TestTrait;
+
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+
+ fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_method()
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn trait_method_cross_crate() {
+ check_assist(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+ ",
+ r"
+ use dep::test_mod::TestTrait;
+
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_method()
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn assoc_fn_cross_crate() {
+ check_assist(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ dep::test_mod::TestStruct::test_func$0tion
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+ ",
+ r"
+ use dep::test_mod::TestTrait;
+
+ fn main() {
+ dep::test_mod::TestStruct::test_function
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn assoc_const_cross_crate() {
+ check_assist(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ dep::test_mod::TestStruct::CONST$0
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ const CONST: bool;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const CONST: bool = true;
+ }
+ }
+ ",
+ r"
+ use dep::test_mod::TestTrait;
+
+ fn main() {
+ dep::test_mod::TestStruct::CONST
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn assoc_fn_as_method_cross_crate() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_func$0tion()
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn private_trait_cross_crate() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_trait_for_method() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub trait TestTrait2 {
+ fn test_method(&self);
+ }
+ pub enum TestEnum {
+ One,
+ Two,
+ }
+ impl TestTrait2 for TestEnum {
+ fn test_method(&self) {}
+ }
+ impl TestTrait for TestEnum {
+ fn test_method(&self) {}
+ }
+ }
+
+ use test_mod::TestTrait2;
+ fn main() {
+ let one = test_mod::TestEnum::One;
+ one.test$0_method();
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn dep_import() {
+ check_assist(
+ auto_import,
+ r"
+//- /lib.rs crate:dep
+pub struct Struct;
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ Struct$0
+}
+",
+ r"use dep::Struct;
+
+fn main() {
+ Struct
+}
+",
+ );
+ }
+
+ #[test]
+ fn whole_segment() {
+ // Tests that only imports whose last segment matches the identifier get suggested.
+ check_assist(
+ auto_import,
+ r"
+//- /lib.rs crate:dep
+pub mod fmt {
+ pub trait Display {}
+}
+
+pub fn panic_fmt() {}
+
+//- /main.rs crate:main deps:dep
+struct S;
+
+impl f$0mt::Display for S {}
+",
+ r"use dep::fmt;
+
+struct S;
+
+impl fmt::Display for S {}
+",
+ );
+ }
+
+ #[test]
+ fn macro_generated() {
+ // Tests that macro-generated items are suggested from external crates.
+ check_assist(
+ auto_import,
+ r"
+//- /lib.rs crate:dep
+macro_rules! mac {
+ () => {
+ pub struct Cheese;
+ };
+}
+
+mac!();
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ Cheese$0;
+}
+",
+ r"use dep::Cheese;
+
+fn main() {
+ Cheese;
+}
+",
+ );
+ }
+
+ #[test]
+ fn casing() {
+ // Tests that differently cased names don't interfere and we only suggest the matching one.
+ check_assist(
+ auto_import,
+ r"
+//- /lib.rs crate:dep
+pub struct FMT;
+pub struct fmt;
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ FMT$0;
+}
+",
+ r"use dep::FMT;
+
+fn main() {
+ FMT;
+}
+",
+ );
+ }
+
+ #[test]
+ fn inner_items() {
+ check_assist(
+ auto_import,
+ r#"
+mod baz {
+ pub struct Foo {}
+}
+
+mod bar {
+ fn bar() {
+ Foo$0;
+ println!("Hallo");
+ }
+}
+"#,
+ r#"
+mod baz {
+ pub struct Foo {}
+}
+
+mod bar {
+ use crate::baz::Foo;
+
+ fn bar() {
+ Foo;
+ println!("Hallo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn uses_abs_path_with_extern_crate_clash() {
+ cov_mark::check!(ambiguous_crate_start);
+ check_assist(
+ auto_import,
+ r#"
+//- /main.rs crate:main deps:foo
+mod foo {}
+
+const _: () = {
+ Foo$0
+};
+//- /foo.rs crate:foo
+pub struct Foo
+"#,
+ r#"
+use ::foo::Foo;
+
+mod foo {}
+
+const _: () = {
+ Foo
+};
+"#,
+ );
+ }
+
+ #[test]
+ fn works_on_ident_patterns() {
+ check_assist(
+ auto_import,
+ r#"
+mod foo {
+ pub struct Foo {}
+}
+fn foo() {
+ let Foo$0;
+}
+"#,
+ r#"
+use foo::Foo;
+
+mod foo {
+ pub struct Foo {}
+}
+fn foo() {
+ let Foo;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn works_in_derives() {
+ check_assist(
+ auto_import,
+ r#"
+//- minicore:derive
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(Copy$0)]
+struct Foo;
+"#,
+ r#"
+use foo::Copy;
+
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(Copy)]
+struct Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn works_in_use_start() {
+ check_assist(
+ auto_import,
+ r#"
+mod bar {
+ pub mod foo {
+ pub struct Foo;
+ }
+}
+use foo$0::Foo;
+"#,
+ r#"
+mod bar {
+ pub mod foo {
+ pub struct Foo;
+ }
+}
+use bar::foo;
+use foo::Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_in_non_start_use() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+mod bar {
+ pub mod foo {
+ pub struct Foo;
+ }
+}
+use foo::Foo$0;
+",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
new file mode 100644
index 000000000..2b1d8f6f0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
@@ -0,0 +1,216 @@
+use syntax::{
+ ast::{self, HasName, HasVisibility},
+ AstNode,
+ SyntaxKind::{
+ CONST, ENUM, FN, MACRO_DEF, MODULE, STATIC, STRUCT, TRAIT, TYPE_ALIAS, USE, VISIBILITY,
+ },
+ T,
+};
+
+use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: change_visibility
+//
+// Adds or changes existing visibility specifier.
+//
+// ```
+// $0fn frobnicate() {}
+// ```
+// ->
+// ```
+// pub(crate) fn frobnicate() {}
+// ```
+pub(crate) fn change_visibility(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if let Some(vis) = ctx.find_node_at_offset::<ast::Visibility>() {
+ return change_vis(acc, vis);
+ }
+ add_vis(acc, ctx)
+}
+
+fn add_vis(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let item_keyword = ctx.token_at_offset().find(|leaf| {
+ matches!(
+ leaf.kind(),
+ T![const]
+ | T![static]
+ | T![fn]
+ | T![mod]
+ | T![struct]
+ | T![enum]
+ | T![trait]
+ | T![type]
+ | T![use]
+ | T![macro]
+ )
+ });
+
+ let (offset, target) = if let Some(keyword) = item_keyword {
+ let parent = keyword.parent()?;
+ let def_kws =
+ vec![CONST, STATIC, TYPE_ALIAS, FN, MODULE, STRUCT, ENUM, TRAIT, USE, MACRO_DEF];
+ // Parent is not a definition, can't add visibility
+ if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) {
+ return None;
+ }
+ // Already have visibility, do nothing
+ if parent.children().any(|child| child.kind() == VISIBILITY) {
+ return None;
+ }
+ (vis_offset(&parent), keyword.text_range())
+ } else if let Some(field_name) = ctx.find_node_at_offset::<ast::Name>() {
+ let field = field_name.syntax().ancestors().find_map(ast::RecordField::cast)?;
+ if field.name()? != field_name {
+ cov_mark::hit!(change_visibility_field_false_positive);
+ return None;
+ }
+ if field.visibility().is_some() {
+ return None;
+ }
+ (vis_offset(field.syntax()), field_name.syntax().text_range())
+ } else if let Some(field) = ctx.find_node_at_offset::<ast::TupleField>() {
+ if field.visibility().is_some() {
+ return None;
+ }
+ (vis_offset(field.syntax()), field.syntax().text_range())
+ } else {
+ return None;
+ };
+
+ acc.add(
+ AssistId("change_visibility", AssistKind::RefactorRewrite),
+ "Change visibility to pub(crate)",
+ target,
+ |edit| {
+ edit.insert(offset, "pub(crate) ");
+ },
+ )
+}
+
+fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> {
+ if vis.syntax().text() == "pub" {
+ let target = vis.syntax().text_range();
+ return acc.add(
+ AssistId("change_visibility", AssistKind::RefactorRewrite),
+ "Change Visibility to pub(crate)",
+ target,
+ |edit| {
+ edit.replace(vis.syntax().text_range(), "pub(crate)");
+ },
+ );
+ }
+ if vis.syntax().text() == "pub(crate)" {
+ let target = vis.syntax().text_range();
+ return acc.add(
+ AssistId("change_visibility", AssistKind::RefactorRewrite),
+ "Change visibility to pub",
+ target,
+ |edit| {
+ edit.replace(vis.syntax().text_range(), "pub");
+ },
+ );
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn change_visibility_adds_pub_crate_to_items() {
+ check_assist(change_visibility, "$0fn foo() {}", "pub(crate) fn foo() {}");
+ check_assist(change_visibility, "f$0n foo() {}", "pub(crate) fn foo() {}");
+ check_assist(change_visibility, "$0struct Foo {}", "pub(crate) struct Foo {}");
+ check_assist(change_visibility, "$0mod foo {}", "pub(crate) mod foo {}");
+ check_assist(change_visibility, "$0trait Foo {}", "pub(crate) trait Foo {}");
+ check_assist(change_visibility, "m$0od {}", "pub(crate) mod {}");
+ check_assist(change_visibility, "unsafe f$0n foo() {}", "pub(crate) unsafe fn foo() {}");
+ check_assist(change_visibility, "$0macro foo() {}", "pub(crate) macro foo() {}");
+ check_assist(change_visibility, "$0use foo;", "pub(crate) use foo;");
+ }
+
+ #[test]
+ fn change_visibility_works_with_struct_fields() {
+ check_assist(
+ change_visibility,
+ r"struct S { $0field: u32 }",
+ r"struct S { pub(crate) field: u32 }",
+ );
+ check_assist(change_visibility, r"struct S ( $0u32 )", r"struct S ( pub(crate) u32 )");
+ }
+
+ #[test]
+ fn change_visibility_field_false_positive() {
+ cov_mark::check!(change_visibility_field_false_positive);
+ check_assist_not_applicable(
+ change_visibility,
+ r"struct S { field: [(); { let $0x = ();}] }",
+ )
+ }
+
+ #[test]
+ fn change_visibility_pub_to_pub_crate() {
+ check_assist(change_visibility, "$0pub fn foo() {}", "pub(crate) fn foo() {}")
+ }
+
+ #[test]
+ fn change_visibility_pub_crate_to_pub() {
+ check_assist(change_visibility, "$0pub(crate) fn foo() {}", "pub fn foo() {}")
+ }
+
+ #[test]
+ fn change_visibility_const() {
+ check_assist(change_visibility, "$0const FOO = 3u8;", "pub(crate) const FOO = 3u8;");
+ }
+
+ #[test]
+ fn change_visibility_static() {
+ check_assist(change_visibility, "$0static FOO = 3u8;", "pub(crate) static FOO = 3u8;");
+ }
+
+ #[test]
+ fn change_visibility_type_alias() {
+ check_assist(change_visibility, "$0type T = ();", "pub(crate) type T = ();");
+ }
+
+ #[test]
+ fn change_visibility_handles_comment_attrs() {
+ check_assist(
+ change_visibility,
+ r"
+ /// docs
+
+ // comments
+
+ #[derive(Debug)]
+ $0struct Foo;
+ ",
+ r"
+ /// docs
+
+ // comments
+
+ #[derive(Debug)]
+ pub(crate) struct Foo;
+ ",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_enum_variants() {
+ check_assist_not_applicable(
+ change_visibility,
+ r"mod foo { pub enum Foo {Foo1} }
+ fn main() { foo::Foo::Foo1$0 } ",
+ );
+ }
+
+ #[test]
+ fn change_visibility_target() {
+ check_assist_target(change_visibility, "$0fn foo() {}", "fn");
+ check_assist_target(change_visibility, "pub(crate)$0 fn foo() {}", "pub(crate)");
+ check_assist_target(change_visibility, "struct S { $0field: u32 }", "field");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs
new file mode 100644
index 000000000..db96ad330
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs
@@ -0,0 +1,575 @@
+use hir::{known, AsAssocItem, Semantics};
+use ide_db::{
+ famous_defs::FamousDefs,
+ syntax_helpers::node_ext::{
+ block_as_lone_tail, for_each_tail_expr, is_pattern_cond, preorder_expr,
+ },
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{
+ ast::{self, edit::AstNodeEdit, make, HasArgList},
+ ted, AstNode, SyntaxNode,
+};
+
+use crate::{
+ utils::{invert_boolean_expression, unwrap_trivial_block},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: convert_if_to_bool_then
+//
+// Converts an if expression into a corresponding `bool::then` call.
+//
+// ```
+// # //- minicore: option
+// fn main() {
+// if$0 cond {
+// Some(val)
+// } else {
+// None
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// cond.then(|| val)
+// }
+// ```
+pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // FIXME applies to match as well
+ let expr = ctx.find_node_at_offset::<ast::IfExpr>()?;
+ if !expr.if_token()?.text_range().contains_inclusive(ctx.offset()) {
+ return None;
+ }
+
+ let cond = expr.condition().filter(|cond| !is_pattern_cond(cond.clone()))?;
+ let then = expr.then_branch()?;
+ let else_ = match expr.else_branch()? {
+ ast::ElseBranch::Block(b) => b,
+ ast::ElseBranch::IfExpr(_) => {
+ cov_mark::hit!(convert_if_to_bool_then_chain);
+ return None;
+ }
+ };
+
+ let (none_variant, some_variant) = option_variants(&ctx.sema, expr.syntax())?;
+
+ let (invert_cond, closure_body) = match (
+ block_is_none_variant(&ctx.sema, &then, none_variant),
+ block_is_none_variant(&ctx.sema, &else_, none_variant),
+ ) {
+ (invert @ true, false) => (invert, ast::Expr::BlockExpr(else_)),
+ (invert @ false, true) => (invert, ast::Expr::BlockExpr(then)),
+ _ => return None,
+ };
+
+ if is_invalid_body(&ctx.sema, some_variant, &closure_body) {
+ cov_mark::hit!(convert_if_to_bool_then_pattern_invalid_body);
+ return None;
+ }
+
+ let target = expr.syntax().text_range();
+ acc.add(
+ AssistId("convert_if_to_bool_then", AssistKind::RefactorRewrite),
+ "Convert `if` expression to `bool::then` call",
+ target,
+ |builder| {
+ let closure_body = closure_body.clone_for_update();
+ // Rewrite all `Some(e)` in tail position to `e`
+ let mut replacements = Vec::new();
+ for_each_tail_expr(&closure_body, &mut |e| {
+ let e = match e {
+ ast::Expr::BreakExpr(e) => e.expr(),
+ e @ ast::Expr::CallExpr(_) => Some(e.clone()),
+ _ => None,
+ };
+ if let Some(ast::Expr::CallExpr(call)) = e {
+ if let Some(arg_list) = call.arg_list() {
+ if let Some(arg) = arg_list.args().next() {
+ replacements.push((call.syntax().clone(), arg.syntax().clone()));
+ }
+ }
+ }
+ });
+ replacements.into_iter().for_each(|(old, new)| ted::replace(old, new));
+ let closure_body = match closure_body {
+ ast::Expr::BlockExpr(block) => unwrap_trivial_block(block),
+ e => e,
+ };
+
+ let parenthesize = matches!(
+ cond,
+ ast::Expr::BinExpr(_)
+ | ast::Expr::BlockExpr(_)
+ | ast::Expr::BoxExpr(_)
+ | ast::Expr::BreakExpr(_)
+ | ast::Expr::CastExpr(_)
+ | ast::Expr::ClosureExpr(_)
+ | ast::Expr::ContinueExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::IfExpr(_)
+ | ast::Expr::LoopExpr(_)
+ | ast::Expr::MacroExpr(_)
+ | ast::Expr::MatchExpr(_)
+ | ast::Expr::PrefixExpr(_)
+ | ast::Expr::RangeExpr(_)
+ | ast::Expr::RefExpr(_)
+ | ast::Expr::ReturnExpr(_)
+ | ast::Expr::WhileExpr(_)
+ | ast::Expr::YieldExpr(_)
+ );
+ let cond = if invert_cond { invert_boolean_expression(cond) } else { cond };
+ let cond = if parenthesize { make::expr_paren(cond) } else { cond };
+ let arg_list = make::arg_list(Some(make::expr_closure(None, closure_body)));
+ let mcall = make::expr_method_call(cond, make::name_ref("then"), arg_list);
+ builder.replace(target, mcall.to_string());
+ },
+ )
+}
+
+// Assist: convert_bool_then_to_if
+//
+// Converts a `bool::then` method call to an equivalent if expression.
+//
+// ```
+// # //- minicore: bool_impl
+// fn main() {
+// (0 == 0).then$0(|| val)
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// if 0 == 0 {
+// Some(val)
+// } else {
+// None
+// }
+// }
+// ```
+pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let name_ref = ctx.find_node_at_offset::<ast::NameRef>()?;
+ let mcall = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast)?;
+ let receiver = mcall.receiver()?;
+ let closure_body = mcall.arg_list()?.args().exactly_one().ok()?;
+ let closure_body = match closure_body {
+ ast::Expr::ClosureExpr(expr) => expr.body()?,
+ _ => return None,
+ };
+ // Verify this is `bool::then` that is being called.
+ let func = ctx.sema.resolve_method_call(&mcall)?;
+ if func.name(ctx.sema.db).to_string() != "then" {
+ return None;
+ }
+ let assoc = func.as_assoc_item(ctx.sema.db)?;
+ match assoc.container(ctx.sema.db) {
+ hir::AssocItemContainer::Impl(impl_) if impl_.self_ty(ctx.sema.db).is_bool() => {}
+ _ => return None,
+ }
+
+ let target = mcall.syntax().text_range();
+ acc.add(
+ AssistId("convert_bool_then_to_if", AssistKind::RefactorRewrite),
+ "Convert `bool::then` call to `if`",
+ target,
+ |builder| {
+ let closure_body = match closure_body {
+ ast::Expr::BlockExpr(block) => block,
+ e => make::block_expr(None, Some(e)),
+ };
+
+ let closure_body = closure_body.clone_for_update();
+ // Wrap all tails in `Some(...)`
+ let none_path = make::expr_path(make::ext::ident_path("None"));
+ let some_path = make::expr_path(make::ext::ident_path("Some"));
+ let mut replacements = Vec::new();
+ for_each_tail_expr(&ast::Expr::BlockExpr(closure_body.clone()), &mut |e| {
+ let e = match e {
+ ast::Expr::BreakExpr(e) => e.expr(),
+ ast::Expr::ReturnExpr(e) => e.expr(),
+ _ => Some(e.clone()),
+ };
+ if let Some(expr) = e {
+ replacements.push((
+ expr.syntax().clone(),
+ make::expr_call(some_path.clone(), make::arg_list(Some(expr)))
+ .syntax()
+ .clone_for_update(),
+ ));
+ }
+ });
+ replacements.into_iter().for_each(|(old, new)| ted::replace(old, new));
+
+ let cond = match &receiver {
+ ast::Expr::ParenExpr(expr) => expr.expr().unwrap_or(receiver),
+ _ => receiver,
+ };
+ let if_expr = make::expr_if(
+ cond,
+ closure_body.reset_indent(),
+ Some(ast::ElseBranch::Block(make::block_expr(None, Some(none_path)))),
+ )
+ .indent(mcall.indent_level());
+
+ builder.replace(target, if_expr.to_string());
+ },
+ )
+}
+
+fn option_variants(
+ sema: &Semantics<'_, RootDatabase>,
+ expr: &SyntaxNode,
+) -> Option<(hir::Variant, hir::Variant)> {
+ let fam = FamousDefs(sema, sema.scope(expr)?.krate());
+ let option_variants = fam.core_option_Option()?.variants(sema.db);
+ match &*option_variants {
+ &[variant0, variant1] => Some(if variant0.name(sema.db) == known::None {
+ (variant0, variant1)
+ } else {
+ (variant1, variant0)
+ }),
+ _ => None,
+ }
+}
+
+/// Traverses the expression checking if it contains `return` or `?` expressions or if any tail is not a `Some(expr)` expression.
+/// If any of these conditions are met it is impossible to rewrite this as a `bool::then` call.
+fn is_invalid_body(
+ sema: &Semantics<'_, RootDatabase>,
+ some_variant: hir::Variant,
+ expr: &ast::Expr,
+) -> bool {
+ let mut invalid = false;
+ preorder_expr(expr, &mut |e| {
+ invalid |=
+ matches!(e, syntax::WalkEvent::Enter(ast::Expr::TryExpr(_) | ast::Expr::ReturnExpr(_)));
+ invalid
+ });
+ if !invalid {
+ for_each_tail_expr(expr, &mut |e| {
+ if invalid {
+ return;
+ }
+ let e = match e {
+ ast::Expr::BreakExpr(e) => e.expr(),
+ e @ ast::Expr::CallExpr(_) => Some(e.clone()),
+ _ => None,
+ };
+ if let Some(ast::Expr::CallExpr(call)) = e {
+ if let Some(ast::Expr::PathExpr(p)) = call.expr() {
+ let res = p.path().and_then(|p| sema.resolve_path(&p));
+ if let Some(hir::PathResolution::Def(hir::ModuleDef::Variant(v))) = res {
+ return invalid |= v != some_variant;
+ }
+ }
+ }
+ invalid = true
+ });
+ }
+ invalid
+}
+
+fn block_is_none_variant(
+ sema: &Semantics<'_, RootDatabase>,
+ block: &ast::BlockExpr,
+ none_variant: hir::Variant,
+) -> bool {
+ block_as_lone_tail(block).and_then(|e| match e {
+ ast::Expr::PathExpr(pat) => match sema.resolve_path(&pat.path()?)? {
+ hir::PathResolution::Def(hir::ModuleDef::Variant(v)) => Some(v),
+ _ => None,
+ },
+ _ => None,
+ }) == Some(none_variant)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn convert_if_to_bool_then_simple() {
+ check_assist(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ Some(15)
+ } else {
+ None
+ }
+}
+",
+ r"
+fn main() {
+ true.then(|| 15)
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_invert() {
+ check_assist(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ None
+ } else {
+ Some(15)
+ }
+}
+",
+ r"
+fn main() {
+ false.then(|| 15)
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_none_none() {
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ None
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_some_some() {
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ Some(15)
+ } else {
+ Some(15)
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_mixed() {
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ if true {
+ Some(15)
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_chain() {
+ cov_mark::check!(convert_if_to_bool_then_chain);
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ Some(15)
+ } else if true {
+ None
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_pattern_cond() {
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 let true = true {
+ Some(15)
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_pattern_invalid_body() {
+ cov_mark::check_count!(convert_if_to_bool_then_pattern_invalid_body, 2);
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn make_me_an_option() -> Option<i32> { None }
+fn main() {
+ if$0 true {
+ if true {
+ make_me_an_option()
+ } else {
+ Some(15)
+ }
+ } else {
+ None
+ }
+}
+",
+ );
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ if true {
+ return;
+ }
+ Some(15)
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_bool_then_to_if_inapplicable() {
+ check_assist_not_applicable(
+ convert_bool_then_to_if,
+ r"
+//- minicore:bool_impl
+fn main() {
+ 0.t$0hen(|| 15);
+}
+",
+ );
+ check_assist_not_applicable(
+ convert_bool_then_to_if,
+ r"
+//- minicore:bool_impl
+fn main() {
+ true.t$0hen(15);
+}
+",
+ );
+ check_assist_not_applicable(
+ convert_bool_then_to_if,
+ r"
+//- minicore:bool_impl
+fn main() {
+ true.t$0hen(|| 15, 15);
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_bool_then_to_if_simple() {
+ check_assist(
+ convert_bool_then_to_if,
+ r"
+//- minicore:bool_impl
+fn main() {
+ true.t$0hen(|| 15)
+}
+",
+ r"
+fn main() {
+ if true {
+ Some(15)
+ } else {
+ None
+ }
+}
+",
+ );
+ check_assist(
+ convert_bool_then_to_if,
+ r"
+//- minicore:bool_impl
+fn main() {
+ true.t$0hen(|| {
+ 15
+ })
+}
+",
+ r"
+fn main() {
+ if true {
+ Some(15)
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_bool_then_to_if_tails() {
+ check_assist(
+ convert_bool_then_to_if,
+ r"
+//- minicore:bool_impl
+fn main() {
+ true.t$0hen(|| {
+ loop {
+ if false {
+ break 0;
+ }
+ break 15;
+ }
+ })
+}
+",
+ r"
+fn main() {
+ if true {
+ loop {
+ if false {
+ break Some(0);
+ }
+ break Some(15);
+ }
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
new file mode 100644
index 000000000..f171dd81a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
@@ -0,0 +1,395 @@
+use itertools::Itertools;
+use syntax::{
+ ast::{self, edit::IndentLevel, Comment, CommentKind, CommentShape, Whitespace},
+ AstToken, Direction, SyntaxElement, TextRange,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: line_to_block
+//
+// Converts comments between block and single-line form.
+//
+// ```
+// // Multi-line$0
+// // comment
+// ```
+// ->
+// ```
+// /*
+// Multi-line
+// comment
+// */
+// ```
+pub(crate) fn convert_comment_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let comment = ctx.find_token_at_offset::<ast::Comment>()?;
+ // Only allow comments which are alone on their line
+ if let Some(prev) = comment.syntax().prev_token() {
+ if Whitespace::cast(prev).filter(|w| w.text().contains('\n')).is_none() {
+ return None;
+ }
+ }
+
+ match comment.kind().shape {
+ ast::CommentShape::Block => block_to_line(acc, comment),
+ ast::CommentShape::Line => line_to_block(acc, comment),
+ }
+}
+
+fn block_to_line(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
+ let target = comment.syntax().text_range();
+
+ acc.add(
+ AssistId("block_to_line", AssistKind::RefactorRewrite),
+ "Replace block comment with line comments",
+ target,
+ |edit| {
+ let indentation = IndentLevel::from_token(comment.syntax());
+ let line_prefix = CommentKind { shape: CommentShape::Line, ..comment.kind() }.prefix();
+
+ let text = comment.text();
+ let text = &text[comment.prefix().len()..(text.len() - "*/".len())].trim();
+
+ let lines = text.lines().peekable();
+
+ let indent_spaces = indentation.to_string();
+ let output = lines
+ .map(|l| l.trim_start_matches(&indent_spaces))
+ .map(|l| {
+ // Don't introduce trailing whitespace
+ if l.is_empty() {
+ line_prefix.to_string()
+ } else {
+ format!("{} {}", line_prefix, l.trim_start_matches(&indent_spaces))
+ }
+ })
+ .join(&format!("\n{}", indent_spaces));
+
+ edit.replace(target, output)
+ },
+ )
+}
+
+fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
+ // Find all the comments we'll be collapsing into a block
+ let comments = relevant_line_comments(&comment);
+
+ // Establish the target of our edit based on the comments we found
+ let target = TextRange::new(
+ comments[0].syntax().text_range().start(),
+ comments.last().unwrap().syntax().text_range().end(),
+ );
+
+ acc.add(
+ AssistId("line_to_block", AssistKind::RefactorRewrite),
+ "Replace line comments with a single block comment",
+ target,
+ |edit| {
+ // We pick a single indentation level for the whole block comment based on the
+ // comment where the assist was invoked. This will be prepended to the
+ // contents of each line comment when they're put into the block comment.
+ let indentation = IndentLevel::from_token(comment.syntax());
+
+ let block_comment_body =
+ comments.into_iter().map(|c| line_comment_text(indentation, c)).join("\n");
+
+ let block_prefix =
+ CommentKind { shape: CommentShape::Block, ..comment.kind() }.prefix();
+
+ let output = format!("{}\n{}\n{}*/", block_prefix, block_comment_body, indentation);
+
+ edit.replace(target, output)
+ },
+ )
+}
+
+/// The line -> block assist can be invoked from anywhere within a sequence of line comments.
+/// relevant_line_comments crawls backwards and forwards finding the complete sequence of comments that will
+/// be joined.
+fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> {
+ // The prefix identifies the kind of comment we're dealing with
+ let prefix = comment.prefix();
+ let same_prefix = |c: &ast::Comment| c.prefix() == prefix;
+
+ // These tokens are allowed to exist between comments
+ let skippable = |not: &SyntaxElement| {
+ not.clone()
+ .into_token()
+ .and_then(Whitespace::cast)
+ .map(|w| !w.spans_multiple_lines())
+ .unwrap_or(false)
+ };
+
+ // Find all preceding comments (in reverse order) that have the same prefix
+ let prev_comments = comment
+ .syntax()
+ .siblings_with_tokens(Direction::Prev)
+ .filter(|s| !skippable(s))
+ .map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix))
+ .take_while(|opt_com| opt_com.is_some())
+ .flatten()
+ .skip(1); // skip the first element so we don't duplicate it in next_comments
+
+ let next_comments = comment
+ .syntax()
+ .siblings_with_tokens(Direction::Next)
+ .filter(|s| !skippable(s))
+ .map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix))
+ .take_while(|opt_com| opt_com.is_some())
+ .flatten();
+
+ let mut comments: Vec<_> = prev_comments.collect();
+ comments.reverse();
+ comments.extend(next_comments);
+ comments
+}
+
+// Line comments usually begin with a single space character following the prefix as seen here:
+//^
+// But comments can also include indented text:
+// > Hello there
+//
+// We handle this by stripping *AT MOST* one space character from the start of the line
+// This has its own problems because it can cause alignment issues:
+//
+// /*
+// a ----> a
+//b ----> b
+// */
+//
+// But since such comments aren't idiomatic we're okay with this.
+fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String {
+ let contents_without_prefix = comm.text().strip_prefix(comm.prefix()).unwrap();
+ let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix);
+
+ // Don't add the indentation if the line is empty
+ if contents.is_empty() {
+ contents.to_owned()
+ } else {
+ indentation.to_string() + contents
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn single_line_to_block() {
+ check_assist(
+ convert_comment_block,
+ r#"
+// line$0 comment
+fn main() {
+ foo();
+}
+"#,
+ r#"
+/*
+line comment
+*/
+fn main() {
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn single_line_to_block_indented() {
+ check_assist(
+ convert_comment_block,
+ r#"
+fn main() {
+ // line$0 comment
+ foo();
+}
+"#,
+ r#"
+fn main() {
+ /*
+ line comment
+ */
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multiline_to_block() {
+ check_assist(
+ convert_comment_block,
+ r#"
+fn main() {
+ // above
+ // line$0 comment
+ //
+ // below
+ foo();
+}
+"#,
+ r#"
+fn main() {
+ /*
+ above
+ line comment
+
+ below
+ */
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn end_of_line_to_block() {
+ check_assist_not_applicable(
+ convert_comment_block,
+ r#"
+fn main() {
+ foo(); // end-of-line$0 comment
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn single_line_different_kinds() {
+ check_assist(
+ convert_comment_block,
+ r#"
+fn main() {
+ /// different prefix
+ // line$0 comment
+ // below
+ foo();
+}
+"#,
+ r#"
+fn main() {
+ /// different prefix
+ /*
+ line comment
+ below
+ */
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn single_line_separate_chunks() {
+ check_assist(
+ convert_comment_block,
+ r#"
+fn main() {
+ // different chunk
+
+ // line$0 comment
+ // below
+ foo();
+}
+"#,
+ r#"
+fn main() {
+ // different chunk
+
+ /*
+ line comment
+ below
+ */
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn doc_block_comment_to_lines() {
+ check_assist(
+ convert_comment_block,
+ r#"
+/**
+ hi$0 there
+*/
+"#,
+ r#"
+/// hi there
+"#,
+ );
+ }
+
+ #[test]
+ fn block_comment_to_lines() {
+ check_assist(
+ convert_comment_block,
+ r#"
+/*
+ hi$0 there
+*/
+"#,
+ r#"
+// hi there
+"#,
+ );
+ }
+
+ #[test]
+ fn inner_doc_block_to_lines() {
+ check_assist(
+ convert_comment_block,
+ r#"
+/*!
+ hi$0 there
+*/
+"#,
+ r#"
+//! hi there
+"#,
+ );
+ }
+
+ #[test]
+ fn block_to_lines_indent() {
+ check_assist(
+ convert_comment_block,
+ r#"
+fn main() {
+ /*!
+ hi$0 there
+
+ ```
+ code_sample
+ ```
+ */
+}
+"#,
+ r#"
+fn main() {
+ //! hi there
+ //!
+ //! ```
+ //! code_sample
+ //! ```
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn end_of_line_block_to_line() {
+ check_assist_not_applicable(
+ convert_comment_block,
+ r#"
+fn main() {
+ foo(); /* end-of-line$0 comment */
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs
new file mode 100644
index 000000000..9060696cd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs
@@ -0,0 +1,268 @@
+use syntax::{ast, ast::Radix, AstToken};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
+
+// Assist: convert_integer_literal
+//
+// Converts the base of integer literals to other bases.
+//
+// ```
+// const _: i32 = 10$0;
+// ```
+// ->
+// ```
+// const _: i32 = 0b1010;
+// ```
+pub(crate) fn convert_integer_literal(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let literal = ctx.find_node_at_offset::<ast::Literal>()?;
+ let literal = match literal.kind() {
+ ast::LiteralKind::IntNumber(it) => it,
+ _ => return None,
+ };
+ let radix = literal.radix();
+ let value = literal.value()?;
+ let suffix = literal.suffix();
+
+ let range = literal.syntax().text_range();
+ let group_id = GroupLabel("Convert integer base".into());
+
+ for &target_radix in Radix::ALL {
+ if target_radix == radix {
+ continue;
+ }
+
+ let mut converted = match target_radix {
+ Radix::Binary => format!("0b{:b}", value),
+ Radix::Octal => format!("0o{:o}", value),
+ Radix::Decimal => value.to_string(),
+ Radix::Hexadecimal => format!("0x{:X}", value),
+ };
+
+ let label = format!("Convert {} to {}{}", literal, converted, suffix.unwrap_or_default());
+
+ // Appends the type suffix back into the new literal if it exists.
+ if let Some(suffix) = suffix {
+ converted.push_str(suffix);
+ }
+
+ acc.add_group(
+ &group_id,
+ AssistId("convert_integer_literal", AssistKind::RefactorInline),
+ label,
+ range,
+ |builder| builder.replace(range, converted),
+ );
+ }
+
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist_by_label, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn binary_target() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 0b1010$0;", "0b1010");
+ }
+
+ #[test]
+ fn octal_target() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 0o12$0;", "0o12");
+ }
+
+ #[test]
+ fn decimal_target() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 10$0;", "10");
+ }
+
+ #[test]
+ fn hexadecimal_target() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 0xA$0;", "0xA");
+ }
+
+ #[test]
+ fn binary_target_with_underscores() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 0b10_10$0;", "0b10_10");
+ }
+
+ #[test]
+ fn octal_target_with_underscores() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 0o1_2$0;", "0o1_2");
+ }
+
+ #[test]
+ fn decimal_target_with_underscores() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 1_0$0;", "1_0");
+ }
+
+ #[test]
+ fn hexadecimal_target_with_underscores() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 0x_A$0;", "0x_A");
+ }
+
+ #[test]
+ fn convert_decimal_integer() {
+ let before = "const _: i32 = 1000$0;";
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0b1111101000;",
+ "Convert 1000 to 0b1111101000",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0o1750;",
+ "Convert 1000 to 0o1750",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0x3E8;",
+ "Convert 1000 to 0x3E8",
+ );
+ }
+
+ #[test]
+ fn convert_hexadecimal_integer() {
+ let before = "const _: i32 = 0xFF$0;";
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0b11111111;",
+ "Convert 0xFF to 0b11111111",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0o377;",
+ "Convert 0xFF to 0o377",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 255;",
+ "Convert 0xFF to 255",
+ );
+ }
+
+ #[test]
+ fn convert_binary_integer() {
+ let before = "const _: i32 = 0b11111111$0;";
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0o377;",
+ "Convert 0b11111111 to 0o377",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 255;",
+ "Convert 0b11111111 to 255",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0xFF;",
+ "Convert 0b11111111 to 0xFF",
+ );
+ }
+
+ #[test]
+ fn convert_octal_integer() {
+ let before = "const _: i32 = 0o377$0;";
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0b11111111;",
+ "Convert 0o377 to 0b11111111",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 255;",
+ "Convert 0o377 to 255",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0xFF;",
+ "Convert 0o377 to 0xFF",
+ );
+ }
+
+ #[test]
+ fn convert_integer_with_underscores() {
+ let before = "const _: i32 = 1_00_0$0;";
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0b1111101000;",
+ "Convert 1_00_0 to 0b1111101000",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0o1750;",
+ "Convert 1_00_0 to 0o1750",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0x3E8;",
+ "Convert 1_00_0 to 0x3E8",
+ );
+ }
+
+ #[test]
+ fn convert_integer_with_suffix() {
+ let before = "const _: i32 = 1000i32$0;";
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0b1111101000i32;",
+ "Convert 1000i32 to 0b1111101000i32",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0o1750i32;",
+ "Convert 1000i32 to 0o1750i32",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0x3E8i32;",
+ "Convert 1000i32 to 0x3E8i32",
+ );
+ }
+
+ #[test]
+ fn convert_overflowing_literal() {
+ let before = "const _: i32 =
+ 111111111111111111111111111111111111111111111111111111111111111111111111$0;";
+ check_assist_not_applicable(convert_integer_literal, before);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
new file mode 100644
index 000000000..30f6dd41a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
@@ -0,0 +1,351 @@
+use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait};
+use syntax::ast::{self, AstNode, HasName};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// FIXME: this should be a diagnostic
+
+// Assist: convert_into_to_from
+//
+// Converts an Into impl to an equivalent From impl.
+//
+// ```
+// # //- minicore: from
+// impl $0Into<Thing> for usize {
+// fn into(self) -> Thing {
+// Thing {
+// b: self.to_string(),
+// a: self
+// }
+// }
+// }
+// ```
+// ->
+// ```
+// impl From<usize> for Thing {
+// fn from(val: usize) -> Self {
+// Thing {
+// b: val.to_string(),
+// a: val
+// }
+// }
+// }
+// ```
+pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let impl_ = ctx.find_node_at_offset::<ast::Impl>()?;
+ let src_type = impl_.self_ty()?;
+ let ast_trait = impl_.trait_()?;
+
+ let module = ctx.sema.scope(impl_.syntax())?.module();
+
+ let trait_ = resolve_target_trait(&ctx.sema, &impl_)?;
+ if trait_ != FamousDefs(&ctx.sema, module.krate()).core_convert_Into()? {
+ return None;
+ }
+
+ let src_type_path = {
+ let src_type_path = src_type.syntax().descendants().find_map(ast::Path::cast)?;
+ let src_type_def = match ctx.sema.resolve_path(&src_type_path) {
+ Some(hir::PathResolution::Def(module_def)) => module_def,
+ _ => return None,
+ };
+
+ mod_path_to_ast(&module.find_use_path(ctx.db(), src_type_def)?)
+ };
+
+ let dest_type = match &ast_trait {
+ ast::Type::PathType(path) => {
+ path.path()?.segment()?.generic_arg_list()?.generic_args().next()?
+ }
+ _ => return None,
+ };
+
+ let into_fn = impl_.assoc_item_list()?.assoc_items().find_map(|item| {
+ if let ast::AssocItem::Fn(f) = item {
+ if f.name()?.text() == "into" {
+ return Some(f);
+ }
+ };
+ None
+ })?;
+
+ let into_fn_name = into_fn.name()?;
+ let into_fn_params = into_fn.param_list()?;
+ let into_fn_return = into_fn.ret_type()?;
+
+ let selfs = into_fn
+ .body()?
+ .syntax()
+ .descendants()
+ .filter_map(ast::NameRef::cast)
+ .filter(|name| name.text() == "self" || name.text() == "Self");
+
+ acc.add(
+ AssistId("convert_into_to_from", AssistKind::RefactorRewrite),
+ "Convert Into to From",
+ impl_.syntax().text_range(),
+ |builder| {
+ builder.replace(src_type.syntax().text_range(), dest_type.to_string());
+ builder.replace(ast_trait.syntax().text_range(), format!("From<{}>", src_type));
+ builder.replace(into_fn_return.syntax().text_range(), "-> Self");
+ builder.replace(into_fn_params.syntax().text_range(), format!("(val: {})", src_type));
+ builder.replace(into_fn_name.syntax().text_range(), "from");
+
+ for s in selfs {
+ match s.text().as_ref() {
+ "self" => builder.replace(s.syntax().text_range(), "val"),
+ "Self" => builder.replace(s.syntax().text_range(), src_type_path.to_string()),
+ _ => {}
+ }
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn convert_into_to_from_converts_a_struct() {
+ check_assist(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+struct Thing {
+ a: String,
+ b: usize
+}
+
+impl $0core::convert::Into<Thing> for usize {
+ fn into(self) -> Thing {
+ Thing {
+ b: self.to_string(),
+ a: self
+ }
+ }
+}
+"#,
+ r#"
+struct Thing {
+ a: String,
+ b: usize
+}
+
+impl From<usize> for Thing {
+ fn from(val: usize) -> Self {
+ Thing {
+ b: val.to_string(),
+ a: val
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn convert_into_to_from_converts_enums() {
+ check_assist(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+enum Thing {
+ Foo(String),
+ Bar(String)
+}
+
+impl $0core::convert::Into<String> for Thing {
+ fn into(self) -> String {
+ match self {
+ Self::Foo(s) => s,
+ Self::Bar(s) => s
+ }
+ }
+}
+"#,
+ r#"
+enum Thing {
+ Foo(String),
+ Bar(String)
+}
+
+impl From<Thing> for String {
+ fn from(val: Thing) -> Self {
+ match val {
+ Thing::Foo(s) => s,
+ Thing::Bar(s) => s
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn convert_into_to_from_on_enum_with_lifetimes() {
+ check_assist(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+enum Thing<'a> {
+ Foo(&'a str),
+ Bar(&'a str)
+}
+
+impl<'a> $0core::convert::Into<&'a str> for Thing<'a> {
+ fn into(self) -> &'a str {
+ match self {
+ Self::Foo(s) => s,
+ Self::Bar(s) => s
+ }
+ }
+}
+"#,
+ r#"
+enum Thing<'a> {
+ Foo(&'a str),
+ Bar(&'a str)
+}
+
+impl<'a> From<Thing<'a>> for &'a str {
+ fn from(val: Thing<'a>) -> Self {
+ match val {
+ Thing::Foo(s) => s,
+ Thing::Bar(s) => s
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn convert_into_to_from_works_on_references() {
+ check_assist(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+struct Thing(String);
+
+impl $0core::convert::Into<String> for &Thing {
+ fn into(self) -> Thing {
+ self.0.clone()
+ }
+}
+"#,
+ r#"
+struct Thing(String);
+
+impl From<&Thing> for String {
+ fn from(val: &Thing) -> Self {
+ val.0.clone()
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn convert_into_to_from_works_on_qualified_structs() {
+ check_assist(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+mod things {
+ pub struct Thing(String);
+ pub struct BetterThing(String);
+}
+
+impl $0core::convert::Into<things::BetterThing> for &things::Thing {
+ fn into(self) -> Thing {
+ things::BetterThing(self.0.clone())
+ }
+}
+"#,
+ r#"
+mod things {
+ pub struct Thing(String);
+ pub struct BetterThing(String);
+}
+
+impl From<&things::Thing> for things::BetterThing {
+ fn from(val: &things::Thing) -> Self {
+ things::BetterThing(val.0.clone())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn convert_into_to_from_works_on_qualified_enums() {
+ check_assist(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+mod things {
+ pub enum Thing {
+ A(String)
+ }
+ pub struct BetterThing {
+ B(String)
+ }
+}
+
+impl $0core::convert::Into<things::BetterThing> for &things::Thing {
+ fn into(self) -> Thing {
+ match self {
+ Self::A(s) => things::BetterThing::B(s)
+ }
+ }
+}
+"#,
+ r#"
+mod things {
+ pub enum Thing {
+ A(String)
+ }
+ pub struct BetterThing {
+ B(String)
+ }
+}
+
+impl From<&things::Thing> for things::BetterThing {
+ fn from(val: &things::Thing) -> Self {
+ match val {
+ things::Thing::A(s) => things::BetterThing::B(s)
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn convert_into_to_from_not_applicable_on_any_trait_named_into() {
+ check_assist_not_applicable(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+pub trait Into<T> {
+ pub fn into(self) -> T;
+}
+
+struct Thing {
+ a: String,
+}
+
+impl $0Into<Thing> for String {
+ fn into(self) -> Thing {
+ Thing {
+ a: self
+ }
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
new file mode 100644
index 000000000..2cf370c09
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
@@ -0,0 +1,556 @@
+use hir::known;
+use ide_db::famous_defs::FamousDefs;
+use stdx::format_to;
+use syntax::{
+ ast::{self, edit_in_place::Indent, make, HasArgList, HasLoopBody},
+ AstNode,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: convert_iter_for_each_to_for
+//
+// Converts an Iterator::for_each function into a for loop.
+//
+// ```
+// # //- minicore: iterators
+// # use core::iter;
+// fn main() {
+// let iter = iter::repeat((9, 2));
+// iter.for_each$0(|(x, y)| {
+// println!("x: {}, y: {}", x, y);
+// });
+// }
+// ```
+// ->
+// ```
+// # use core::iter;
+// fn main() {
+// let iter = iter::repeat((9, 2));
+// for (x, y) in iter {
+// println!("x: {}, y: {}", x, y);
+// }
+// }
+// ```
+pub(crate) fn convert_iter_for_each_to_for(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let method = ctx.find_node_at_offset::<ast::MethodCallExpr>()?;
+
+ let closure = match method.arg_list()?.args().next()? {
+ ast::Expr::ClosureExpr(expr) => expr,
+ _ => return None,
+ };
+
+ let (method, receiver) = validate_method_call_expr(ctx, method)?;
+
+ let param_list = closure.param_list()?;
+ let param = param_list.params().next()?.pat()?;
+ let body = closure.body()?;
+
+ let stmt = method.syntax().parent().and_then(ast::ExprStmt::cast);
+ let range = stmt.as_ref().map_or(method.syntax(), AstNode::syntax).text_range();
+
+ acc.add(
+ AssistId("convert_iter_for_each_to_for", AssistKind::RefactorRewrite),
+ "Replace this `Iterator::for_each` with a for loop",
+ range,
+ |builder| {
+ let indent =
+ stmt.as_ref().map_or_else(|| method.indent_level(), ast::ExprStmt::indent_level);
+
+ let block = match body {
+ ast::Expr::BlockExpr(block) => block,
+ _ => make::block_expr(Vec::new(), Some(body)),
+ }
+ .clone_for_update();
+ block.reindent_to(indent);
+
+ let expr_for_loop = make::expr_for_loop(param, receiver, block);
+ builder.replace(range, expr_for_loop.to_string())
+ },
+ )
+}
+
+// Assist: convert_for_loop_with_for_each
+//
+// Converts a for loop into a for_each loop on the Iterator.
+//
+// ```
+// fn main() {
+// let x = vec![1, 2, 3];
+// for$0 v in x {
+// let y = v * 2;
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let x = vec![1, 2, 3];
+// x.into_iter().for_each(|v| {
+// let y = v * 2;
+// });
+// }
+// ```
+pub(crate) fn convert_for_loop_with_for_each(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let for_loop = ctx.find_node_at_offset::<ast::ForExpr>()?;
+ let iterable = for_loop.iterable()?;
+ let pat = for_loop.pat()?;
+ let body = for_loop.loop_body()?;
+ if body.syntax().text_range().start() < ctx.offset() {
+ cov_mark::hit!(not_available_in_body);
+ return None;
+ }
+
+ acc.add(
+ AssistId("convert_for_loop_with_for_each", AssistKind::RefactorRewrite),
+ "Replace this for loop with `Iterator::for_each`",
+ for_loop.syntax().text_range(),
+ |builder| {
+ let mut buf = String::new();
+
+ if let Some((expr_behind_ref, method)) =
+ is_ref_and_impls_iter_method(&ctx.sema, &iterable)
+ {
+ // We have either "for x in &col" and col implements a method called iter
+ // or "for x in &mut col" and col implements a method called iter_mut
+ format_to!(buf, "{}.{}()", expr_behind_ref, method);
+ } else if let ast::Expr::RangeExpr(..) = iterable {
+ // range expressions need to be parenthesized for the syntax to be correct
+ format_to!(buf, "({})", iterable);
+ } else if impls_core_iter(&ctx.sema, &iterable) {
+ format_to!(buf, "{}", iterable);
+ } else if let ast::Expr::RefExpr(_) = iterable {
+ format_to!(buf, "({}).into_iter()", iterable);
+ } else {
+ format_to!(buf, "{}.into_iter()", iterable);
+ }
+
+ format_to!(buf, ".for_each(|{}| {});", pat, body);
+
+ builder.replace(for_loop.syntax().text_range(), buf)
+ },
+ )
+}
+
+/// If iterable is a reference where the expression behind the reference implements a method
+/// returning an Iterator called iter or iter_mut (depending on the type of reference) then return
+/// the expression behind the reference and the method name
+fn is_ref_and_impls_iter_method(
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+ iterable: &ast::Expr,
+) -> Option<(ast::Expr, hir::Name)> {
+ let ref_expr = match iterable {
+ ast::Expr::RefExpr(r) => r,
+ _ => return None,
+ };
+ let wanted_method = if ref_expr.mut_token().is_some() { known::iter_mut } else { known::iter };
+ let expr_behind_ref = ref_expr.expr()?;
+ let ty = sema.type_of_expr(&expr_behind_ref)?.adjusted();
+ let scope = sema.scope(iterable.syntax())?;
+ let krate = scope.krate();
+ let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
+
+ let has_wanted_method = ty
+ .iterate_method_candidates(
+ sema.db,
+ &scope,
+ &scope.visible_traits().0,
+ None,
+ Some(&wanted_method),
+ |func| {
+ if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) {
+ return Some(());
+ }
+ None
+ },
+ )
+ .is_some();
+ if !has_wanted_method {
+ return None;
+ }
+
+ Some((expr_behind_ref, wanted_method))
+}
+
+/// Whether iterable implements core::Iterator
+fn impls_core_iter(sema: &hir::Semantics<'_, ide_db::RootDatabase>, iterable: &ast::Expr) -> bool {
+ (|| {
+ let it_typ = sema.type_of_expr(iterable)?.adjusted();
+
+ let module = sema.scope(iterable.syntax())?.module();
+
+ let krate = module.krate();
+ let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
+ cov_mark::hit!(test_already_impls_iterator);
+ Some(it_typ.impls_trait(sema.db, iter_trait, &[]))
+ })()
+ .unwrap_or(false)
+}
+
+fn validate_method_call_expr(
+ ctx: &AssistContext<'_>,
+ expr: ast::MethodCallExpr,
+) -> Option<(ast::Expr, ast::Expr)> {
+ let name_ref = expr.name_ref()?;
+ if !name_ref.syntax().text_range().contains_range(ctx.selection_trimmed()) {
+ cov_mark::hit!(test_for_each_not_applicable_invalid_cursor_pos);
+ return None;
+ }
+ if name_ref.text() != "for_each" {
+ return None;
+ }
+
+ let sema = &ctx.sema;
+
+ let receiver = expr.receiver()?;
+ let expr = ast::Expr::MethodCallExpr(expr);
+
+ let it_type = sema.type_of_expr(&receiver)?.adjusted();
+ let module = sema.scope(receiver.syntax())?.module();
+ let krate = module.krate();
+
+ let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
+ it_type.impls_trait(sema.db, iter_trait, &[]).then(|| (expr, receiver))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_for_each_in_method_stmt() {
+ check_assist(
+ convert_iter_for_each_to_for,
+ r#"
+//- minicore: iterators
+fn main() {
+ let it = core::iter::repeat(92);
+ it.$0for_each(|(x, y)| {
+ println!("x: {}, y: {}", x, y);
+ });
+}
+"#,
+ r#"
+fn main() {
+ let it = core::iter::repeat(92);
+ for (x, y) in it {
+ println!("x: {}, y: {}", x, y);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_for_each_in_method() {
+ check_assist(
+ convert_iter_for_each_to_for,
+ r#"
+//- minicore: iterators
+fn main() {
+ let it = core::iter::repeat(92);
+ it.$0for_each(|(x, y)| {
+ println!("x: {}, y: {}", x, y);
+ })
+}
+"#,
+ r#"
+fn main() {
+ let it = core::iter::repeat(92);
+ for (x, y) in it {
+ println!("x: {}, y: {}", x, y);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_for_each_without_braces_stmt() {
+ check_assist(
+ convert_iter_for_each_to_for,
+ r#"
+//- minicore: iterators
+fn main() {
+ let it = core::iter::repeat(92);
+ it.$0for_each(|(x, y)| println!("x: {}, y: {}", x, y));
+}
+"#,
+ r#"
+fn main() {
+ let it = core::iter::repeat(92);
+ for (x, y) in it {
+ println!("x: {}, y: {}", x, y)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_for_each_not_applicable() {
+ check_assist_not_applicable(
+ convert_iter_for_each_to_for,
+ r#"
+//- minicore: iterators
+fn main() {
+ ().$0for_each(|x| println!("{}", x));
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_for_each_not_applicable_invalid_cursor_pos() {
+ cov_mark::check!(test_for_each_not_applicable_invalid_cursor_pos);
+ check_assist_not_applicable(
+ convert_iter_for_each_to_for,
+ r#"
+//- minicore: iterators
+fn main() {
+ core::iter::repeat(92).for_each(|(x, y)| $0println!("x: {}, y: {}", x, y));
+}"#,
+ )
+ }
+
+ #[test]
+ fn each_to_for_not_for() {
+ check_assist_not_applicable(
+ convert_for_loop_with_for_each,
+ r"
+let mut x = vec![1, 2, 3];
+x.iter_mut().$0for_each(|v| *v *= 2);
+ ",
+ )
+ }
+
+ #[test]
+ fn each_to_for_simple_for() {
+ check_assist(
+ convert_for_loop_with_for_each,
+ r"
+fn main() {
+ let x = vec![1, 2, 3];
+ for $0v in x {
+ v *= 2;
+ }
+}",
+ r"
+fn main() {
+ let x = vec![1, 2, 3];
+ x.into_iter().for_each(|v| {
+ v *= 2;
+ });
+}",
+ )
+ }
+
+ #[test]
+ fn each_to_for_for_in_range() {
+ check_assist(
+ convert_for_loop_with_for_each,
+ r#"
+//- minicore: range, iterators
+impl<T> core::iter::Iterator for core::ops::Range<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ for $0x in 0..92 {
+ print!("{}", x);
+ }
+}"#,
+ r#"
+impl<T> core::iter::Iterator for core::ops::Range<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ (0..92).for_each(|x| {
+ print!("{}", x);
+ });
+}"#,
+ )
+ }
+
+ #[test]
+ fn each_to_for_not_available_in_body() {
+ cov_mark::check!(not_available_in_body);
+ check_assist_not_applicable(
+ convert_for_loop_with_for_each,
+ r"
+fn main() {
+ let x = vec![1, 2, 3];
+ for v in x {
+ $0v *= 2;
+ }
+}",
+ )
+ }
+
+ #[test]
+ fn each_to_for_for_borrowed() {
+ check_assist(
+ convert_for_loop_with_for_each,
+ r#"
+//- minicore: iterators
+use core::iter::{Repeat, repeat};
+
+struct S;
+impl S {
+ fn iter(&self) -> Repeat<i32> { repeat(92) }
+ fn iter_mut(&mut self) -> Repeat<i32> { repeat(92) }
+}
+
+fn main() {
+ let x = S;
+ for $0v in &x {
+ let a = v * 2;
+ }
+}
+"#,
+ r#"
+use core::iter::{Repeat, repeat};
+
+struct S;
+impl S {
+ fn iter(&self) -> Repeat<i32> { repeat(92) }
+ fn iter_mut(&mut self) -> Repeat<i32> { repeat(92) }
+}
+
+fn main() {
+ let x = S;
+ x.iter().for_each(|v| {
+ let a = v * 2;
+ });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn each_to_for_for_borrowed_no_iter_method() {
+ check_assist(
+ convert_for_loop_with_for_each,
+ r"
+struct NoIterMethod;
+fn main() {
+ let x = NoIterMethod;
+ for $0v in &x {
+ let a = v * 2;
+ }
+}
+",
+ r"
+struct NoIterMethod;
+fn main() {
+ let x = NoIterMethod;
+ (&x).into_iter().for_each(|v| {
+ let a = v * 2;
+ });
+}
+",
+ )
+ }
+
+ #[test]
+ fn each_to_for_for_borrowed_mut() {
+ check_assist(
+ convert_for_loop_with_for_each,
+ r#"
+//- minicore: iterators
+use core::iter::{Repeat, repeat};
+
+struct S;
+impl S {
+ fn iter(&self) -> Repeat<i32> { repeat(92) }
+ fn iter_mut(&mut self) -> Repeat<i32> { repeat(92) }
+}
+
+fn main() {
+ let x = S;
+ for $0v in &mut x {
+ let a = v * 2;
+ }
+}
+"#,
+ r#"
+use core::iter::{Repeat, repeat};
+
+struct S;
+impl S {
+ fn iter(&self) -> Repeat<i32> { repeat(92) }
+ fn iter_mut(&mut self) -> Repeat<i32> { repeat(92) }
+}
+
+fn main() {
+ let x = S;
+ x.iter_mut().for_each(|v| {
+ let a = v * 2;
+ });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn each_to_for_for_borrowed_mut_behind_var() {
+ check_assist(
+ convert_for_loop_with_for_each,
+ r"
+fn main() {
+ let x = vec![1, 2, 3];
+ let y = &mut x;
+ for $0v in y {
+ *v *= 2;
+ }
+}",
+ r"
+fn main() {
+ let x = vec![1, 2, 3];
+ let y = &mut x;
+ y.into_iter().for_each(|v| {
+ *v *= 2;
+ });
+}",
+ )
+ }
+
+ #[test]
+ fn each_to_for_already_impls_iterator() {
+ cov_mark::check!(test_already_impls_iterator);
+ check_assist(
+ convert_for_loop_with_for_each,
+ r#"
+//- minicore: iterators
+fn main() {
+ for$0 a in core::iter::repeat(92).take(1) {
+ println!("{}", a);
+ }
+}
+"#,
+ r#"
+fn main() {
+ core::iter::repeat(92).take(1).for_each(|a| {
+ println!("{}", a);
+ });
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
new file mode 100644
index 000000000..00095de25
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
@@ -0,0 +1,497 @@
+use hir::Semantics;
+use ide_db::RootDatabase;
+use syntax::ast::{edit::AstNodeEdit, AstNode, HasName, LetStmt, Name, Pat};
+use syntax::T;
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+/// Gets a list of binders in a pattern, and whether they are mut.
+fn binders_in_pat(
+ acc: &mut Vec<(Name, bool)>,
+ pat: &Pat,
+ sem: &Semantics<'_, RootDatabase>,
+) -> Option<()> {
+ use Pat::*;
+ match pat {
+ IdentPat(p) => {
+ let ident = p.name()?;
+ let ismut = p.ref_token().is_none() && p.mut_token().is_some();
+ // check for const reference
+ if sem.resolve_bind_pat_to_const(p).is_none() {
+ acc.push((ident, ismut));
+ }
+ if let Some(inner) = p.pat() {
+ binders_in_pat(acc, &inner, sem)?;
+ }
+ Some(())
+ }
+ BoxPat(p) => p.pat().and_then(|p| binders_in_pat(acc, &p, sem)),
+ RestPat(_) | LiteralPat(_) | PathPat(_) | WildcardPat(_) | ConstBlockPat(_) => Some(()),
+ OrPat(p) => {
+ for p in p.pats() {
+ binders_in_pat(acc, &p, sem)?;
+ }
+ Some(())
+ }
+ ParenPat(p) => p.pat().and_then(|p| binders_in_pat(acc, &p, sem)),
+ RangePat(p) => {
+ if let Some(st) = p.start() {
+ binders_in_pat(acc, &st, sem)?
+ }
+ if let Some(ed) = p.end() {
+ binders_in_pat(acc, &ed, sem)?
+ }
+ Some(())
+ }
+ RecordPat(p) => {
+ for f in p.record_pat_field_list()?.fields() {
+ let pat = f.pat()?;
+ binders_in_pat(acc, &pat, sem)?;
+ }
+ Some(())
+ }
+ RefPat(p) => p.pat().and_then(|p| binders_in_pat(acc, &p, sem)),
+ SlicePat(p) => {
+ for p in p.pats() {
+ binders_in_pat(acc, &p, sem)?;
+ }
+ Some(())
+ }
+ TuplePat(p) => {
+ for p in p.fields() {
+ binders_in_pat(acc, &p, sem)?;
+ }
+ Some(())
+ }
+ TupleStructPat(p) => {
+ for p in p.fields() {
+ binders_in_pat(acc, &p, sem)?;
+ }
+ Some(())
+ }
+ // don't support macro pat yet
+ MacroPat(_) => None,
+ }
+}
+
+fn binders_to_str(binders: &[(Name, bool)], addmut: bool) -> String {
+ let vars = binders
+ .iter()
+ .map(
+ |(ident, ismut)| {
+ if *ismut && addmut {
+ format!("mut {}", ident)
+ } else {
+ ident.to_string()
+ }
+ },
+ )
+ .collect::<Vec<_>>()
+ .join(", ");
+ if binders.is_empty() {
+ String::from("{}")
+ } else if binders.len() == 1 {
+ vars
+ } else {
+ format!("({})", vars)
+ }
+}
+
+// Assist: convert_let_else_to_match
+//
+// Converts let-else statement to let statement and match expression.
+//
+// ```
+// fn main() {
+// let Ok(mut x) = f() else$0 { return };
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let mut x = match f() {
+// Ok(x) => x,
+// _ => return,
+// };
+// }
+// ```
+pub(crate) fn convert_let_else_to_match(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // should focus on else token to trigger
+ let else_token = ctx.find_token_syntax_at_offset(T![else])?;
+ let let_stmt = LetStmt::cast(else_token.parent()?.parent()?)?;
+ let let_else_block = let_stmt.let_else()?.block_expr()?;
+ let let_init = let_stmt.initializer()?;
+ if let_stmt.ty().is_some() {
+ // don't support let with type annotation
+ return None;
+ }
+ let pat = let_stmt.pat()?;
+ let mut binders = Vec::new();
+ binders_in_pat(&mut binders, &pat, &ctx.sema)?;
+
+ let target = let_stmt.syntax().text_range();
+ acc.add(
+ AssistId("convert_let_else_to_match", AssistKind::RefactorRewrite),
+ "Convert let-else to let and match",
+ target,
+ |edit| {
+ let indent_level = let_stmt.indent_level().0 as usize;
+ let indent = " ".repeat(indent_level);
+ let indent1 = " ".repeat(indent_level + 1);
+
+ let binders_str = binders_to_str(&binders, false);
+ let binders_str_mut = binders_to_str(&binders, true);
+
+ let init_expr = let_init.syntax().text();
+ let mut pat_no_mut = pat.syntax().text().to_string();
+ // remove the mut from the pattern
+ for (b, ismut) in binders.iter() {
+ if *ismut {
+ pat_no_mut = pat_no_mut.replace(&format!("mut {b}"), &b.to_string());
+ }
+ }
+
+ let only_expr = let_else_block.statements().next().is_none();
+ let branch2 = match &let_else_block.tail_expr() {
+ Some(tail) if only_expr => format!("{},", tail.syntax().text()),
+ _ => let_else_block.syntax().text().to_string(),
+ };
+ let replace = if binders.is_empty() {
+ format!(
+ "match {init_expr} {{
+{indent1}{pat_no_mut} => {binders_str}
+{indent1}_ => {branch2}
+{indent}}}"
+ )
+ } else {
+ format!(
+ "let {binders_str_mut} = match {init_expr} {{
+{indent1}{pat_no_mut} => {binders_str},
+{indent1}_ => {branch2}
+{indent}}};"
+ )
+ };
+ edit.replace(target, replace);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn convert_let_else_to_match_no_type_let() {
+ check_assist_not_applicable(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let 1: u32 = v.iter().sum() else$0 { return };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_on_else() {
+ check_assist_not_applicable(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let Ok(x) = f() else {$0 return };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_no_macropat() {
+ check_assist_not_applicable(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let m!() = g() else$0 { return };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_target() {
+ check_assist_target(
+ convert_let_else_to_match,
+ r"
+fn main() {
+ let Ok(x) = f() else$0 { continue };
+}",
+ "let Ok(x) = f() else { continue };",
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_basic() {
+ check_assist(
+ convert_let_else_to_match,
+ r"
+fn main() {
+ let Ok(x) = f() else$0 { continue };
+}",
+ r"
+fn main() {
+ let x = match f() {
+ Ok(x) => x,
+ _ => continue,
+ };
+}",
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_const_ref() {
+ check_assist(
+ convert_let_else_to_match,
+ r"
+enum Option<T> {
+ Some(T),
+ None,
+}
+use Option::*;
+fn main() {
+ let None = f() el$0se { continue };
+}",
+ r"
+enum Option<T> {
+ Some(T),
+ None,
+}
+use Option::*;
+fn main() {
+ match f() {
+ None => {}
+ _ => continue,
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_const_ref_const() {
+ check_assist(
+ convert_let_else_to_match,
+ r"
+const NEG1: i32 = -1;
+fn main() {
+ let NEG1 = f() el$0se { continue };
+}",
+ r"
+const NEG1: i32 = -1;
+fn main() {
+ match f() {
+ NEG1 => {}
+ _ => continue,
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_mut() {
+ check_assist(
+ convert_let_else_to_match,
+ r"
+fn main() {
+ let Ok(mut x) = f() el$0se { continue };
+}",
+ r"
+fn main() {
+ let mut x = match f() {
+ Ok(x) => x,
+ _ => continue,
+ };
+}",
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_multi_binders() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let ControlFlow::Break((x, "tag", y, ..)) = f() else$0 { g(); return };
+}"#,
+ r#"
+fn main() {
+ let (x, y) = match f() {
+ ControlFlow::Break((x, "tag", y, ..)) => (x, y),
+ _ => { g(); return }
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_slice() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let [one, 1001, other] = f() else$0 { break };
+}"#,
+ r#"
+fn main() {
+ let (one, other) = match f() {
+ [one, 1001, other] => (one, other),
+ _ => break,
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_struct() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let [Struct { inner: Some(it) }, 1001, other] = f() else$0 { break };
+}"#,
+ r#"
+fn main() {
+ let (it, other) = match f() {
+ [Struct { inner: Some(it) }, 1001, other] => (it, other),
+ _ => break,
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_struct_ident_pat() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let [Struct { inner }, 1001, other] = f() else$0 { break };
+}"#,
+ r#"
+fn main() {
+ let (inner, other) = match f() {
+ [Struct { inner }, 1001, other] => (inner, other),
+ _ => break,
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_no_binder() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let (8 | 9) = f() else$0 { panic!() };
+}"#,
+ r#"
+fn main() {
+ match f() {
+ (8 | 9) => {}
+ _ => panic!(),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_range() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let 1.. = f() e$0lse { return };
+}"#,
+ r#"
+fn main() {
+ match f() {
+ 1.. => {}
+ _ => return,
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_refpat() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let Ok(&mut x) = f(&mut 0) else$0 { return };
+}"#,
+ r#"
+fn main() {
+ let x = match f(&mut 0) {
+ Ok(&mut x) => x,
+ _ => return,
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_refmut() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let Ok(ref mut x) = f() else$0 { return };
+}"#,
+ r#"
+fn main() {
+ let x = match f() {
+ Ok(ref mut x) => x,
+ _ => return,
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_atpat() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let out @ Ok(ins) = f() else$0 { return };
+}"#,
+ r#"
+fn main() {
+ let (out, ins) = match f() {
+ out @ Ok(ins) => (out, ins),
+ _ => return,
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_complex_init() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let v = vec![1, 2, 3];
+ let &[mut x, y, ..] = &v.iter().collect::<Vec<_>>()[..] else$0 { return };
+}"#,
+ r#"
+fn main() {
+ let v = vec![1, 2, 3];
+ let (mut x, y) = match &v.iter().collect::<Vec<_>>()[..] {
+ &[x, y, ..] => (x, y),
+ _ => return,
+ };
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
new file mode 100644
index 000000000..cb75619ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
@@ -0,0 +1,574 @@
+use std::iter::once;
+
+use ide_db::syntax_helpers::node_ext::{is_pattern_cond, single_let};
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make,
+ },
+ ted, AstNode,
+ SyntaxKind::{FN, LOOP_EXPR, WHILE_EXPR, WHITESPACE},
+ T,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::invert_boolean_expression,
+ AssistId, AssistKind,
+};
+
+// Assist: convert_to_guarded_return
+//
+// Replace a large conditional with a guarded return.
+//
+// ```
+// fn main() {
+// $0if cond {
+// foo();
+// bar();
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// if !cond {
+// return;
+// }
+// foo();
+// bar();
+// }
+// ```
+pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let if_expr: ast::IfExpr = ctx.find_node_at_offset()?;
+ if if_expr.else_branch().is_some() {
+ return None;
+ }
+
+ let cond = if_expr.condition()?;
+
+ // Check if there is an IfLet that we can handle.
+ let (if_let_pat, cond_expr) = if is_pattern_cond(cond.clone()) {
+ let let_ = single_let(cond)?;
+ match let_.pat() {
+ Some(ast::Pat::TupleStructPat(pat)) if pat.fields().count() == 1 => {
+ let path = pat.path()?;
+ if path.qualifier().is_some() {
+ return None;
+ }
+
+ let bound_ident = pat.fields().next().unwrap();
+ if !ast::IdentPat::can_cast(bound_ident.syntax().kind()) {
+ return None;
+ }
+
+ (Some((path, bound_ident)), let_.expr()?)
+ }
+ _ => return None, // Unsupported IfLet.
+ }
+ } else {
+ (None, cond)
+ };
+
+ let then_block = if_expr.then_branch()?;
+ let then_block = then_block.stmt_list()?;
+
+ let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?;
+
+ if parent_block.tail_expr()? != if_expr.clone().into() {
+ return None;
+ }
+
+ // FIXME: This relies on untyped syntax tree and casts to much. It should be
+ // rewritten to use strongly-typed APIs.
+
+ // check for early return and continue
+ let first_in_then_block = then_block.syntax().first_child()?;
+ if ast::ReturnExpr::can_cast(first_in_then_block.kind())
+ || ast::ContinueExpr::can_cast(first_in_then_block.kind())
+ || first_in_then_block
+ .children()
+ .any(|x| ast::ReturnExpr::can_cast(x.kind()) || ast::ContinueExpr::can_cast(x.kind()))
+ {
+ return None;
+ }
+
+ let parent_container = parent_block.syntax().parent()?;
+
+ let early_expression: ast::Expr = match parent_container.kind() {
+ WHILE_EXPR | LOOP_EXPR => make::expr_continue(None),
+ FN => make::expr_return(None),
+ _ => return None,
+ };
+
+ if then_block.syntax().first_child_or_token().map(|t| t.kind() == T!['{']).is_none() {
+ return None;
+ }
+
+ then_block.syntax().last_child_or_token().filter(|t| t.kind() == T!['}'])?;
+
+ let target = if_expr.syntax().text_range();
+ acc.add(
+ AssistId("convert_to_guarded_return", AssistKind::RefactorRewrite),
+ "Convert to guarded return",
+ target,
+ |edit| {
+ let if_expr = edit.make_mut(if_expr);
+ let if_indent_level = IndentLevel::from_node(if_expr.syntax());
+ let replacement = match if_let_pat {
+ None => {
+ // If.
+ let new_expr = {
+ let then_branch =
+ make::block_expr(once(make::expr_stmt(early_expression).into()), None);
+ let cond = invert_boolean_expression(cond_expr);
+ make::expr_if(cond, then_branch, None).indent(if_indent_level)
+ };
+ new_expr.syntax().clone_for_update()
+ }
+ Some((path, bound_ident)) => {
+ // If-let.
+ let match_expr = {
+ let happy_arm = {
+ let pat = make::tuple_struct_pat(
+ path,
+ once(make::ext::simple_ident_pat(make::name("it")).into()),
+ );
+ let expr = {
+ let path = make::ext::ident_path("it");
+ make::expr_path(path)
+ };
+ make::match_arm(once(pat.into()), None, expr)
+ };
+
+ let sad_arm = make::match_arm(
+ // FIXME: would be cool to use `None` or `Err(_)` if appropriate
+ once(make::wildcard_pat().into()),
+ None,
+ early_expression,
+ );
+
+ make::expr_match(cond_expr, make::match_arm_list(vec![happy_arm, sad_arm]))
+ };
+
+ let let_stmt = make::let_stmt(bound_ident, None, Some(match_expr));
+ let let_stmt = let_stmt.indent(if_indent_level);
+ let_stmt.syntax().clone_for_update()
+ }
+ };
+
+ let then_block_items = then_block.dedent(IndentLevel(1)).clone_for_update();
+
+ let end_of_then = then_block_items.syntax().last_child_or_token().unwrap();
+ let end_of_then =
+ if end_of_then.prev_sibling_or_token().map(|n| n.kind()) == Some(WHITESPACE) {
+ end_of_then.prev_sibling_or_token().unwrap()
+ } else {
+ end_of_then
+ };
+
+ let then_statements = replacement
+ .children_with_tokens()
+ .chain(
+ then_block_items
+ .syntax()
+ .children_with_tokens()
+ .skip(1)
+ .take_while(|i| *i != end_of_then),
+ )
+ .collect();
+
+ ted::replace_with_many(if_expr.syntax(), then_statements)
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn convert_inside_fn() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ bar();
+ if$0 true {
+ foo();
+
+ // comment
+ bar();
+ }
+}
+"#,
+ r#"
+fn main() {
+ bar();
+ if false {
+ return;
+ }
+ foo();
+
+ // comment
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_inside_fn() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main(n: Option<String>) {
+ bar();
+ if$0 let Some(n) = n {
+ foo(n);
+
+ // comment
+ bar();
+ }
+}
+"#,
+ r#"
+fn main(n: Option<String>) {
+ bar();
+ let n = match n {
+ Some(it) => it,
+ _ => return,
+ };
+ foo(n);
+
+ // comment
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_if_let_result() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if$0 let Ok(x) = Err(92) {
+ foo(x);
+ }
+}
+"#,
+ r#"
+fn main() {
+ let x = match Err(92) {
+ Ok(it) => it,
+ _ => return,
+ };
+ foo(x);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_ok_inside_fn() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main(n: Option<String>) {
+ bar();
+ if$0 let Some(n) = n {
+ foo(n);
+
+ // comment
+ bar();
+ }
+}
+"#,
+ r#"
+fn main(n: Option<String>) {
+ bar();
+ let n = match n {
+ Some(it) => it,
+ _ => return,
+ };
+ foo(n);
+
+ // comment
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_mut_ok_inside_fn() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main(n: Option<String>) {
+ bar();
+ if$0 let Some(mut n) = n {
+ foo(n);
+
+ // comment
+ bar();
+ }
+}
+"#,
+ r#"
+fn main(n: Option<String>) {
+ bar();
+ let mut n = match n {
+ Some(it) => it,
+ _ => return,
+ };
+ foo(n);
+
+ // comment
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_ref_ok_inside_fn() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main(n: Option<&str>) {
+ bar();
+ if$0 let Some(ref n) = n {
+ foo(n);
+
+ // comment
+ bar();
+ }
+}
+"#,
+ r#"
+fn main(n: Option<&str>) {
+ bar();
+ let ref n = match n {
+ Some(it) => it,
+ _ => return,
+ };
+ foo(n);
+
+ // comment
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_inside_while() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ while true {
+ if$0 true {
+ foo();
+ bar();
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ while true {
+ if false {
+ continue;
+ }
+ foo();
+ bar();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_inside_while() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ while true {
+ if$0 let Some(n) = n {
+ foo(n);
+ bar();
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ while true {
+ let n = match n {
+ Some(it) => it,
+ _ => continue,
+ };
+ foo(n);
+ bar();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_inside_loop() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ loop {
+ if$0 true {
+ foo();
+ bar();
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ loop {
+ if false {
+ continue;
+ }
+ foo();
+ bar();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_inside_loop() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ loop {
+ if$0 let Some(n) = n {
+ foo(n);
+ bar();
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ loop {
+ let n = match n {
+ Some(it) => it,
+ _ => continue,
+ };
+ foo(n);
+ bar();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_already_converted_if() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if$0 true {
+ return;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_already_converted_loop() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ loop {
+ if$0 true {
+ continue;
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_return() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if$0 true {
+ return
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_else_branch() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if$0 true {
+ foo();
+ } else {
+ bar()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_statements_aftert_if() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if$0 true {
+ foo();
+ }
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_statements_inside_if() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if false {
+ if$0 true {
+ foo();
+ }
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
new file mode 100644
index 000000000..4ab8e93a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -0,0 +1,840 @@
+use either::Either;
+use ide_db::defs::{Definition, NameRefClass};
+use syntax::{
+ ast::{self, AstNode, HasGenericParams, HasVisibility},
+ match_ast, SyntaxNode,
+};
+
+use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: convert_tuple_struct_to_named_struct
+//
+// Converts tuple struct to struct with named fields, and analogously for tuple enum variants.
+//
+// ```
+// struct Point$0(f32, f32);
+//
+// impl Point {
+// pub fn new(x: f32, y: f32) -> Self {
+// Point(x, y)
+// }
+//
+// pub fn x(&self) -> f32 {
+// self.0
+// }
+//
+// pub fn y(&self) -> f32 {
+// self.1
+// }
+// }
+// ```
+// ->
+// ```
+// struct Point { field1: f32, field2: f32 }
+//
+// impl Point {
+// pub fn new(x: f32, y: f32) -> Self {
+// Point { field1: x, field2: y }
+// }
+//
+// pub fn x(&self) -> f32 {
+// self.field1
+// }
+//
+// pub fn y(&self) -> f32 {
+// self.field2
+// }
+// }
+// ```
+pub(crate) fn convert_tuple_struct_to_named_struct(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let strukt = ctx
+ .find_node_at_offset::<ast::Struct>()
+ .map(Either::Left)
+ .or_else(|| ctx.find_node_at_offset::<ast::Variant>().map(Either::Right))?;
+ let field_list = strukt.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
+ let tuple_fields = match field_list {
+ ast::FieldList::TupleFieldList(it) => it,
+ ast::FieldList::RecordFieldList(_) => return None,
+ };
+ let strukt_def = match &strukt {
+ Either::Left(s) => Either::Left(ctx.sema.to_def(s)?),
+ Either::Right(v) => Either::Right(ctx.sema.to_def(v)?),
+ };
+ let target = strukt.as_ref().either(|s| s.syntax(), |v| v.syntax()).text_range();
+
+ acc.add(
+ AssistId("convert_tuple_struct_to_named_struct", AssistKind::RefactorRewrite),
+ "Convert to named struct",
+ target,
+ |edit| {
+ let names = generate_names(tuple_fields.fields());
+ edit_field_references(ctx, edit, tuple_fields.fields(), &names);
+ edit_struct_references(ctx, edit, strukt_def, &names);
+ edit_struct_def(ctx, edit, &strukt, tuple_fields, names);
+ },
+ )
+}
+
+fn edit_struct_def(
+ ctx: &AssistContext<'_>,
+ edit: &mut AssistBuilder,
+ strukt: &Either<ast::Struct, ast::Variant>,
+ tuple_fields: ast::TupleFieldList,
+ names: Vec<ast::Name>,
+) {
+ let record_fields = tuple_fields
+ .fields()
+ .zip(names)
+ .filter_map(|(f, name)| Some(ast::make::record_field(f.visibility(), name, f.ty()?)));
+ let record_fields = ast::make::record_field_list(record_fields);
+ let tuple_fields_text_range = tuple_fields.syntax().text_range();
+
+ edit.edit_file(ctx.file_id());
+
+ if let Either::Left(strukt) = strukt {
+ if let Some(w) = strukt.where_clause() {
+ edit.delete(w.syntax().text_range());
+ edit.insert(
+ tuple_fields_text_range.start(),
+ ast::make::tokens::single_newline().text(),
+ );
+ edit.insert(tuple_fields_text_range.start(), w.syntax().text());
+ edit.insert(tuple_fields_text_range.start(), ",");
+ edit.insert(
+ tuple_fields_text_range.start(),
+ ast::make::tokens::single_newline().text(),
+ );
+ } else {
+ edit.insert(tuple_fields_text_range.start(), ast::make::tokens::single_space().text());
+ }
+ if let Some(t) = strukt.semicolon_token() {
+ edit.delete(t.text_range());
+ }
+ } else {
+ edit.insert(tuple_fields_text_range.start(), ast::make::tokens::single_space().text());
+ }
+
+ edit.replace(tuple_fields_text_range, record_fields.to_string());
+}
+
+fn edit_struct_references(
+ ctx: &AssistContext<'_>,
+ edit: &mut AssistBuilder,
+ strukt: Either<hir::Struct, hir::Variant>,
+ names: &[ast::Name],
+) {
+ let strukt_def = match strukt {
+ Either::Left(s) => Definition::Adt(hir::Adt::Struct(s)),
+ Either::Right(v) => Definition::Variant(v),
+ };
+ let usages = strukt_def.usages(&ctx.sema).include_self_refs().all();
+
+ let edit_node = |edit: &mut AssistBuilder, node: SyntaxNode| -> Option<()> {
+ match_ast! {
+ match node {
+ ast::TupleStructPat(tuple_struct_pat) => {
+ edit.replace(
+ tuple_struct_pat.syntax().text_range(),
+ ast::make::record_pat_with_fields(
+ tuple_struct_pat.path()?,
+ ast::make::record_pat_field_list(tuple_struct_pat.fields().zip(names).map(
+ |(pat, name)| {
+ ast::make::record_pat_field(
+ ast::make::name_ref(&name.to_string()),
+ pat,
+ )
+ },
+ )),
+ )
+ .to_string(),
+ );
+ },
+ // for tuple struct creations like Foo(42)
+ ast::CallExpr(call_expr) => {
+ let path = call_expr.syntax().descendants().find_map(ast::PathExpr::cast).and_then(|expr| expr.path())?;
+
+ // this also includes method calls like Foo::new(42), we should skip them
+ if let Some(name_ref) = path.segment().and_then(|s| s.name_ref()) {
+ match NameRefClass::classify(&ctx.sema, &name_ref) {
+ Some(NameRefClass::Definition(Definition::SelfType(_))) => {},
+ Some(NameRefClass::Definition(def)) if def == strukt_def => {},
+ _ => return None,
+ };
+ }
+
+ let arg_list = call_expr.syntax().descendants().find_map(ast::ArgList::cast)?;
+
+ edit.replace(
+ call_expr.syntax().text_range(),
+ ast::make::record_expr(
+ path,
+ ast::make::record_expr_field_list(arg_list.args().zip(names).map(
+ |(expr, name)| {
+ ast::make::record_expr_field(
+ ast::make::name_ref(&name.to_string()),
+ Some(expr),
+ )
+ },
+ )),
+ )
+ .to_string(),
+ );
+ },
+ _ => return None,
+ }
+ }
+ Some(())
+ };
+
+ for (file_id, refs) in usages {
+ edit.edit_file(file_id);
+ for r in refs {
+ for node in r.name.syntax().ancestors() {
+ if edit_node(edit, node).is_some() {
+ break;
+ }
+ }
+ }
+ }
+}
+
+fn edit_field_references(
+ ctx: &AssistContext<'_>,
+ edit: &mut AssistBuilder,
+ fields: impl Iterator<Item = ast::TupleField>,
+ names: &[ast::Name],
+) {
+ for (field, name) in fields.zip(names) {
+ let field = match ctx.sema.to_def(&field) {
+ Some(it) => it,
+ None => continue,
+ };
+ let def = Definition::Field(field);
+ let usages = def.usages(&ctx.sema).all();
+ for (file_id, refs) in usages {
+ edit.edit_file(file_id);
+ for r in refs {
+ if let Some(name_ref) = r.name.as_name_ref() {
+ edit.replace(name_ref.syntax().text_range(), name.text());
+ }
+ }
+ }
+ }
+}
+
+fn generate_names(fields: impl Iterator<Item = ast::TupleField>) -> Vec<ast::Name> {
+ fields.enumerate().map(|(i, _)| ast::make::name(&format!("field{}", i + 1))).collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn not_applicable_other_than_tuple_struct() {
+ check_assist_not_applicable(
+ convert_tuple_struct_to_named_struct,
+ r#"struct Foo$0 { bar: u32 };"#,
+ );
+ check_assist_not_applicable(convert_tuple_struct_to_named_struct, r#"struct Foo$0;"#);
+ }
+
+ #[test]
+ fn convert_simple_struct() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Inner;
+struct A$0(Inner);
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A(inner)
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.0
+ }
+}"#,
+ r#"
+struct Inner;
+struct A { field1: Inner }
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A { field1: inner }
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.field1
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_referenced_via_self_kw() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Inner;
+struct A$0(Inner);
+
+impl A {
+ fn new(inner: Inner) -> Self {
+ Self(inner)
+ }
+
+ fn new_with_default() -> Self {
+ Self::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.0
+ }
+}"#,
+ r#"
+struct Inner;
+struct A { field1: Inner }
+
+impl A {
+ fn new(inner: Inner) -> Self {
+ Self { field1: inner }
+ }
+
+ fn new_with_default() -> Self {
+ Self::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.field1
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_destructured_struct() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Inner;
+struct A$0(Inner);
+
+impl A {
+ fn into_inner(self) -> Inner {
+ let A(first) = self;
+ first
+ }
+
+ fn into_inner_via_self(self) -> Inner {
+ let Self(first) = self;
+ first
+ }
+}"#,
+ r#"
+struct Inner;
+struct A { field1: Inner }
+
+impl A {
+ fn into_inner(self) -> Inner {
+ let A { field1: first } = self;
+ first
+ }
+
+ fn into_inner_via_self(self) -> Inner {
+ let Self { field1: first } = self;
+ first
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_with_visibility() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct A$0(pub u32, pub(crate) u64);
+
+impl A {
+ fn new() -> A {
+ A(42, 42)
+ }
+
+ fn into_first(self) -> u32 {
+ self.0
+ }
+
+ fn into_second(self) -> u64 {
+ self.1
+ }
+}"#,
+ r#"
+struct A { pub field1: u32, pub(crate) field2: u64 }
+
+impl A {
+ fn new() -> A {
+ A { field1: 42, field2: 42 }
+ }
+
+ fn into_first(self) -> u32 {
+ self.field1
+ }
+
+ fn into_second(self) -> u64 {
+ self.field2
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_with_wrapped_references() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Inner$0(u32);
+struct Outer(Inner);
+
+impl Outer {
+ fn new() -> Self {
+ Self(Inner(42))
+ }
+
+ fn into_inner(self) -> u32 {
+ (self.0).0
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer(Inner(x)) = self;
+ x
+ }
+}"#,
+ r#"
+struct Inner { field1: u32 }
+struct Outer(Inner);
+
+impl Outer {
+ fn new() -> Self {
+ Self(Inner { field1: 42 })
+ }
+
+ fn into_inner(self) -> u32 {
+ (self.0).field1
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer(Inner { field1: x }) = self;
+ x
+ }
+}"#,
+ );
+
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Inner(u32);
+struct Outer$0(Inner);
+
+impl Outer {
+ fn new() -> Self {
+ Self(Inner(42))
+ }
+
+ fn into_inner(self) -> u32 {
+ (self.0).0
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer(Inner(x)) = self;
+ x
+ }
+}"#,
+ r#"
+struct Inner(u32);
+struct Outer { field1: Inner }
+
+impl Outer {
+ fn new() -> Self {
+ Self { field1: Inner(42) }
+ }
+
+ fn into_inner(self) -> u32 {
+ (self.field1).0
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer { field1: Inner(x) } = self;
+ x
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_with_multi_file_references() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+//- /main.rs
+struct Inner;
+struct A$0(Inner);
+
+mod foo;
+
+//- /foo.rs
+use crate::{A, Inner};
+fn f() {
+ let a = A(Inner);
+}
+"#,
+ r#"
+//- /main.rs
+struct Inner;
+struct A { field1: Inner }
+
+mod foo;
+
+//- /foo.rs
+use crate::{A, Inner};
+fn f() {
+ let a = A { field1: Inner };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_with_where_clause() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Wrap$0<T>(T)
+where
+ T: Display;
+"#,
+ r#"
+struct Wrap<T>
+where
+ T: Display,
+{ field1: T }
+
+"#,
+ );
+ }
+ #[test]
+ fn not_applicable_other_than_tuple_variant() {
+ check_assist_not_applicable(
+ convert_tuple_struct_to_named_struct,
+ r#"enum Enum { Variant$0 { value: usize } };"#,
+ );
+ check_assist_not_applicable(
+ convert_tuple_struct_to_named_struct,
+ r#"enum Enum { Variant$0 }"#,
+ );
+ }
+
+ #[test]
+ fn convert_simple_variant() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+enum A {
+ $0Variant(usize),
+}
+
+impl A {
+ fn new(value: usize) -> A {
+ A::Variant(value)
+ }
+
+ fn new_with_default() -> A {
+ A::new(Default::default())
+ }
+
+ fn value(self) -> usize {
+ match self {
+ A::Variant(value) => value,
+ }
+ }
+}"#,
+ r#"
+enum A {
+ Variant { field1: usize },
+}
+
+impl A {
+ fn new(value: usize) -> A {
+ A::Variant { field1: value }
+ }
+
+ fn new_with_default() -> A {
+ A::new(Default::default())
+ }
+
+ fn value(self) -> usize {
+ match self {
+ A::Variant { field1: value } => value,
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_variant_referenced_via_self_kw() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+enum A {
+ $0Variant(usize),
+}
+
+impl A {
+ fn new(value: usize) -> A {
+ Self::Variant(value)
+ }
+
+ fn new_with_default() -> A {
+ Self::new(Default::default())
+ }
+
+ fn value(self) -> usize {
+ match self {
+ Self::Variant(value) => value,
+ }
+ }
+}"#,
+ r#"
+enum A {
+ Variant { field1: usize },
+}
+
+impl A {
+ fn new(value: usize) -> A {
+ Self::Variant { field1: value }
+ }
+
+ fn new_with_default() -> A {
+ Self::new(Default::default())
+ }
+
+ fn value(self) -> usize {
+ match self {
+ Self::Variant { field1: value } => value,
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_destructured_variant() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+enum A {
+ $0Variant(usize),
+}
+
+impl A {
+ fn into_inner(self) -> usize {
+ let A::Variant(first) = self;
+ first
+ }
+
+ fn into_inner_via_self(self) -> usize {
+ let Self::Variant(first) = self;
+ first
+ }
+}"#,
+ r#"
+enum A {
+ Variant { field1: usize },
+}
+
+impl A {
+ fn into_inner(self) -> usize {
+ let A::Variant { field1: first } = self;
+ first
+ }
+
+ fn into_inner_via_self(self) -> usize {
+ let Self::Variant { field1: first } = self;
+ first
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_variant_with_wrapped_references() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+enum Inner {
+ $0Variant(usize),
+}
+enum Outer {
+ Variant(Inner),
+}
+
+impl Outer {
+ fn new() -> Self {
+ Self::Variant(Inner::Variant(42))
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer::Variant(Inner::Variant(x)) = self;
+ x
+ }
+}"#,
+ r#"
+enum Inner {
+ Variant { field1: usize },
+}
+enum Outer {
+ Variant(Inner),
+}
+
+impl Outer {
+ fn new() -> Self {
+ Self::Variant(Inner::Variant { field1: 42 })
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer::Variant(Inner::Variant { field1: x }) = self;
+ x
+ }
+}"#,
+ );
+
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+enum Inner {
+ Variant(usize),
+}
+enum Outer {
+ $0Variant(Inner),
+}
+
+impl Outer {
+ fn new() -> Self {
+ Self::Variant(Inner::Variant(42))
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer::Variant(Inner::Variant(x)) = self;
+ x
+ }
+}"#,
+ r#"
+enum Inner {
+ Variant(usize),
+}
+enum Outer {
+ Variant { field1: Inner },
+}
+
+impl Outer {
+ fn new() -> Self {
+ Self::Variant { field1: Inner::Variant(42) }
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer::Variant { field1: Inner::Variant(x) } = self;
+ x
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_variant_with_multi_file_references() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+//- /main.rs
+struct Inner;
+enum A {
+ $0Variant(Inner),
+}
+
+mod foo;
+
+//- /foo.rs
+use crate::{A, Inner};
+fn f() {
+ let a = A::Variant(Inner);
+}
+"#,
+ r#"
+//- /main.rs
+struct Inner;
+enum A {
+ Variant { field1: Inner },
+}
+
+mod foo;
+
+//- /foo.rs
+use crate::{A, Inner};
+fn f() {
+ let a = A::Variant { field1: Inner };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_directly_used_variant() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+//- /main.rs
+struct Inner;
+enum A {
+ $0Variant(Inner),
+}
+
+mod foo;
+
+//- /foo.rs
+use crate::{A::Variant, Inner};
+fn f() {
+ let a = Variant(Inner);
+}
+"#,
+ r#"
+//- /main.rs
+struct Inner;
+enum A {
+ Variant { field1: Inner },
+}
+
+mod foo;
+
+//- /foo.rs
+use crate::{A::Variant, Inner};
+fn f() {
+ let a = Variant { field1: Inner };
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs
new file mode 100644
index 000000000..c34b68411
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs
@@ -0,0 +1,188 @@
+use std::iter::once;
+
+use ide_db::syntax_helpers::node_ext::is_pattern_cond;
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make, HasLoopBody,
+ },
+ AstNode, T,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::invert_boolean_expression,
+ AssistId, AssistKind,
+};
+
+// Assist: convert_while_to_loop
+//
+// Replace a while with a loop.
+//
+// ```
+// fn main() {
+// $0while cond {
+// foo();
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// loop {
+// if !cond {
+// break;
+// }
+// foo();
+// }
+// }
+// ```
+pub(crate) fn convert_while_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let while_kw = ctx.find_token_syntax_at_offset(T![while])?;
+ let while_expr = while_kw.parent().and_then(ast::WhileExpr::cast)?;
+ let while_body = while_expr.loop_body()?;
+ let while_cond = while_expr.condition()?;
+
+ let target = while_expr.syntax().text_range();
+ acc.add(
+ AssistId("convert_while_to_loop", AssistKind::RefactorRewrite),
+ "Convert while to loop",
+ target,
+ |edit| {
+ let while_indent_level = IndentLevel::from_node(while_expr.syntax());
+
+ let break_block =
+ make::block_expr(once(make::expr_stmt(make::expr_break(None, None)).into()), None)
+ .indent(while_indent_level);
+ let block_expr = if is_pattern_cond(while_cond.clone()) {
+ let if_expr = make::expr_if(while_cond, while_body, Some(break_block.into()));
+ let stmts = once(make::expr_stmt(if_expr).into());
+ make::block_expr(stmts, None)
+ } else {
+ let if_cond = invert_boolean_expression(while_cond);
+ let if_expr = make::expr_if(if_cond, break_block, None);
+ let stmts = once(make::expr_stmt(if_expr).into()).chain(while_body.statements());
+ make::block_expr(stmts, while_body.tail_expr())
+ };
+
+ let replacement = make::expr_loop(block_expr.indent(while_indent_level));
+ edit.replace(target, replacement.syntax().text())
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn convert_inside_fn() {
+ check_assist(
+ convert_while_to_loop,
+ r#"
+fn main() {
+ while$0 cond {
+ foo();
+ }
+}
+"#,
+ r#"
+fn main() {
+ loop {
+ if !cond {
+ break;
+ }
+ foo();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_busy_wait() {
+ check_assist(
+ convert_while_to_loop,
+ r#"
+fn main() {
+ while$0 cond() {}
+}
+"#,
+ r#"
+fn main() {
+ loop {
+ if !cond() {
+ break;
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_trailing_expr() {
+ check_assist(
+ convert_while_to_loop,
+ r#"
+fn main() {
+ while$0 cond() {
+ bar()
+ }
+}
+"#,
+ r#"
+fn main() {
+ loop {
+ if !cond() {
+ break;
+ }
+ bar()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_while_let() {
+ check_assist(
+ convert_while_to_loop,
+ r#"
+fn main() {
+ while$0 let Some(_) = foo() {
+ bar();
+ }
+}
+"#,
+ r#"
+fn main() {
+ loop {
+ if let Some(_) = foo() {
+ bar();
+ } else {
+ break;
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_cursor_in_body() {
+ check_assist_not_applicable(
+ convert_while_to_loop,
+ r#"
+fn main() {
+ while cond {$0
+ bar();
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
new file mode 100644
index 000000000..c1f57532b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
@@ -0,0 +1,2147 @@
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ defs::Definition,
+ search::{FileReference, SearchScope, UsageSearchResult},
+};
+use syntax::{
+ ast::{self, AstNode, FieldExpr, HasName, IdentPat, MethodCallExpr},
+ TextRange,
+};
+
+use crate::assist_context::{AssistBuilder, AssistContext, Assists};
+
+// Assist: destructure_tuple_binding
+//
+// Destructures a tuple binding in place.
+//
+// ```
+// fn main() {
+// let $0t = (1,2);
+// let v = t.0;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let ($0_0, _1) = (1,2);
+// let v = _0;
+// }
+// ```
+pub(crate) fn destructure_tuple_binding(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ destructure_tuple_binding_impl(acc, ctx, false)
+}
+
+// And when `with_sub_pattern` enabled (currently disabled):
+// Assist: destructure_tuple_binding_in_sub_pattern
+//
+// Destructures tuple items in sub-pattern (after `@`).
+//
+// ```
+// fn main() {
+// let $0t = (1,2);
+// let v = t.0;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let t @ ($0_0, _1) = (1,2);
+// let v = _0;
+// }
+// ```
+pub(crate) fn destructure_tuple_binding_impl(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ with_sub_pattern: bool,
+) -> Option<()> {
+ let ident_pat = ctx.find_node_at_offset::<ast::IdentPat>()?;
+ let data = collect_data(ident_pat, ctx)?;
+
+ if with_sub_pattern {
+ acc.add(
+ AssistId("destructure_tuple_binding_in_sub_pattern", AssistKind::RefactorRewrite),
+ "Destructure tuple in sub-pattern",
+ data.range,
+ |builder| {
+ edit_tuple_assignment(ctx, builder, &data, true);
+ edit_tuple_usages(&data, builder, ctx, true);
+ },
+ );
+ }
+
+ acc.add(
+ AssistId("destructure_tuple_binding", AssistKind::RefactorRewrite),
+ if with_sub_pattern { "Destructure tuple in place" } else { "Destructure tuple" },
+ data.range,
+ |builder| {
+ edit_tuple_assignment(ctx, builder, &data, false);
+ edit_tuple_usages(&data, builder, ctx, false);
+ },
+ );
+
+ Some(())
+}
+
+fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleData> {
+ if ident_pat.at_token().is_some() {
+ // Cannot destructure pattern with sub-pattern:
+ // Only IdentPat can have sub-pattern,
+ // but not TuplePat (`(a,b)`).
+ cov_mark::hit!(destructure_tuple_subpattern);
+ return None;
+ }
+
+ let ty = ctx.sema.type_of_pat(&ident_pat.clone().into())?.adjusted();
+ let ref_type = if ty.is_mutable_reference() {
+ Some(RefType::Mutable)
+ } else if ty.is_reference() {
+ Some(RefType::ReadOnly)
+ } else {
+ None
+ };
+ // might be reference
+ let ty = ty.strip_references();
+ // must be tuple
+ let field_types = ty.tuple_fields(ctx.db());
+ if field_types.is_empty() {
+ cov_mark::hit!(destructure_tuple_no_tuple);
+ return None;
+ }
+
+ let name = ident_pat.name()?.to_string();
+ let range = ident_pat.syntax().text_range();
+
+ let usages = ctx.sema.to_def(&ident_pat).map(|def| {
+ Definition::Local(def)
+ .usages(&ctx.sema)
+ .in_scope(SearchScope::single_file(ctx.file_id()))
+ .all()
+ });
+
+ let field_names = (0..field_types.len())
+ .map(|i| generate_name(ctx, i, &name, &ident_pat, &usages))
+ .collect::<Vec<_>>();
+
+ Some(TupleData { ident_pat, range, ref_type, field_names, usages })
+}
+
+fn generate_name(
+ _ctx: &AssistContext<'_>,
+ index: usize,
+ _tuple_name: &str,
+ _ident_pat: &IdentPat,
+ _usages: &Option<UsageSearchResult>,
+) -> String {
+ // FIXME: detect if name already used
+ format!("_{}", index)
+}
+
+enum RefType {
+ ReadOnly,
+ Mutable,
+}
+struct TupleData {
+ ident_pat: IdentPat,
+ // name: String,
+ range: TextRange,
+ ref_type: Option<RefType>,
+ field_names: Vec<String>,
+ // field_types: Vec<Type>,
+ usages: Option<UsageSearchResult>,
+}
+fn edit_tuple_assignment(
+ ctx: &AssistContext<'_>,
+ builder: &mut AssistBuilder,
+ data: &TupleData,
+ in_sub_pattern: bool,
+) {
+ let tuple_pat = {
+ let original = &data.ident_pat;
+ let is_ref = original.ref_token().is_some();
+ let is_mut = original.mut_token().is_some();
+ let fields = data.field_names.iter().map(|name| {
+ ast::Pat::from(ast::make::ident_pat(is_ref, is_mut, ast::make::name(name)))
+ });
+ ast::make::tuple_pat(fields)
+ };
+
+ let add_cursor = |text: &str| {
+ // place cursor on first tuple item
+ let first_tuple = &data.field_names[0];
+ text.replacen(first_tuple, &format!("$0{}", first_tuple), 1)
+ };
+
+ // with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)`
+ if in_sub_pattern {
+ let text = format!(" @ {}", tuple_pat);
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snip = add_cursor(&text);
+ builder.insert_snippet(cap, data.range.end(), snip);
+ }
+ None => builder.insert(data.range.end(), text),
+ };
+ } else {
+ let text = tuple_pat.to_string();
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snip = add_cursor(&text);
+ builder.replace_snippet(cap, data.range, snip);
+ }
+ None => builder.replace(data.range, text),
+ };
+ }
+}
+
+fn edit_tuple_usages(
+ data: &TupleData,
+ builder: &mut AssistBuilder,
+ ctx: &AssistContext<'_>,
+ in_sub_pattern: bool,
+) {
+ if let Some(usages) = data.usages.as_ref() {
+ for (file_id, refs) in usages.iter() {
+ builder.edit_file(*file_id);
+
+ for r in refs {
+ edit_tuple_usage(ctx, builder, r, data, in_sub_pattern);
+ }
+ }
+ }
+}
+fn edit_tuple_usage(
+ ctx: &AssistContext<'_>,
+ builder: &mut AssistBuilder,
+ usage: &FileReference,
+ data: &TupleData,
+ in_sub_pattern: bool,
+) {
+ match detect_tuple_index(usage, data) {
+ Some(index) => edit_tuple_field_usage(ctx, builder, data, index),
+ None => {
+ if in_sub_pattern {
+ cov_mark::hit!(destructure_tuple_call_with_subpattern);
+ return;
+ }
+
+ // no index access -> make invalid -> requires handling by user
+ // -> put usage in block comment
+ //
+ // Note: For macro invocations this might result in still valid code:
+ // When a macro accepts the tuple as argument, as well as no arguments at all,
+ // uncommenting the tuple still leaves the macro call working (see `tests::in_macro_call::empty_macro`).
+ // But this is an unlikely case. Usually the resulting macro call will become erroneous.
+ builder.insert(usage.range.start(), "/*");
+ builder.insert(usage.range.end(), "*/");
+ }
+ }
+}
+
+fn edit_tuple_field_usage(
+ ctx: &AssistContext<'_>,
+ builder: &mut AssistBuilder,
+ data: &TupleData,
+ index: TupleIndex,
+) {
+ let field_name = &data.field_names[index.index];
+
+ if data.ref_type.is_some() {
+ let ref_data = handle_ref_field_usage(ctx, &index.field_expr);
+ builder.replace(ref_data.range, ref_data.format(field_name));
+ } else {
+ builder.replace(index.range, field_name);
+ }
+}
+struct TupleIndex {
+ index: usize,
+ range: TextRange,
+ field_expr: FieldExpr,
+}
+fn detect_tuple_index(usage: &FileReference, data: &TupleData) -> Option<TupleIndex> {
+ // usage is IDENT
+ // IDENT
+ // NAME_REF
+ // PATH_SEGMENT
+ // PATH
+ // PATH_EXPR
+ // PAREN_EXRP*
+ // FIELD_EXPR
+
+ let node = usage
+ .name
+ .syntax()
+ .ancestors()
+ .skip_while(|s| !ast::PathExpr::can_cast(s.kind()))
+ .skip(1) // PATH_EXPR
+ .find(|s| !ast::ParenExpr::can_cast(s.kind()))?; // skip parentheses
+
+ if let Some(field_expr) = ast::FieldExpr::cast(node) {
+ let idx = field_expr.name_ref()?.as_tuple_field()?;
+ if idx < data.field_names.len() {
+ // special case: in macro call -> range of `field_expr` in applied macro, NOT range in actual file!
+ if field_expr.syntax().ancestors().any(|a| ast::MacroStmts::can_cast(a.kind())) {
+ cov_mark::hit!(destructure_tuple_macro_call);
+
+ // issue: cannot differentiate between tuple index passed into macro or tuple index as result of macro:
+ // ```rust
+ // macro_rules! m {
+ // ($t1:expr, $t2:expr) => { $t1; $t2.0 }
+ // }
+ // let t = (1,2);
+ // m!(t.0, t)
+ // ```
+ // -> 2 tuple index usages detected!
+ //
+ // -> only handle `t`
+ return None;
+ }
+
+ Some(TupleIndex { index: idx, range: field_expr.syntax().text_range(), field_expr })
+ } else {
+ // tuple index out of range
+ None
+ }
+ } else {
+ None
+ }
+}
+
+struct RefData {
+ range: TextRange,
+ needs_deref: bool,
+ needs_parentheses: bool,
+}
+impl RefData {
+ fn format(&self, field_name: &str) -> String {
+ match (self.needs_deref, self.needs_parentheses) {
+ (true, true) => format!("(*{})", field_name),
+ (true, false) => format!("*{}", field_name),
+ (false, true) => format!("({})", field_name),
+ (false, false) => field_name.to_string(),
+ }
+ }
+}
+fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> RefData {
+ let s = field_expr.syntax();
+ let mut ref_data =
+ RefData { range: s.text_range(), needs_deref: true, needs_parentheses: true };
+
+ let parent = match s.parent().map(ast::Expr::cast) {
+ Some(Some(parent)) => parent,
+ Some(None) => {
+ ref_data.needs_parentheses = false;
+ return ref_data;
+ }
+ None => return ref_data,
+ };
+
+ match parent {
+ ast::Expr::ParenExpr(it) => {
+ // already parens in place -> don't replace
+ ref_data.needs_parentheses = false;
+ // there might be a ref outside: `&(t.0)` -> can be removed
+ if let Some(it) = it.syntax().parent().and_then(ast::RefExpr::cast) {
+ ref_data.needs_deref = false;
+ ref_data.range = it.syntax().text_range();
+ }
+ }
+ ast::Expr::RefExpr(it) => {
+ // `&*` -> cancel each other out
+ ref_data.needs_deref = false;
+ ref_data.needs_parentheses = false;
+ // might be surrounded by parens -> can be removed too
+ match it.syntax().parent().and_then(ast::ParenExpr::cast) {
+ Some(parent) => ref_data.range = parent.syntax().text_range(),
+ None => ref_data.range = it.syntax().text_range(),
+ };
+ }
+ // higher precedence than deref `*`
+ // https://doc.rust-lang.org/reference/expressions.html#expression-precedence
+ // -> requires parentheses
+ ast::Expr::PathExpr(_it) => {}
+ ast::Expr::MethodCallExpr(it) => {
+ // `field_expr` is `self_param` (otherwise it would be in `ArgList`)
+
+ // test if there's already auto-ref in place (`value` -> `&value`)
+ // -> no method accepting `self`, but `&self` -> no need for deref
+ //
+ // other combinations (`&value` -> `value`, `&&value` -> `&value`, `&value` -> `&&value`) might or might not be able to auto-ref/deref,
+ // but there might be trait implementations an added `&` might resolve to
+ // -> ONLY handle auto-ref from `value` to `&value`
+ fn is_auto_ref(ctx: &AssistContext<'_>, call_expr: &MethodCallExpr) -> bool {
+ fn impl_(ctx: &AssistContext<'_>, call_expr: &MethodCallExpr) -> Option<bool> {
+ let rec = call_expr.receiver()?;
+ let rec_ty = ctx.sema.type_of_expr(&rec)?.original();
+ // input must be actual value
+ if rec_ty.is_reference() {
+ return Some(false);
+ }
+
+ // doesn't resolve trait impl
+ let f = ctx.sema.resolve_method_call(call_expr)?;
+ let self_param = f.self_param(ctx.db())?;
+ // self must be ref
+ match self_param.access(ctx.db()) {
+ hir::Access::Shared | hir::Access::Exclusive => Some(true),
+ hir::Access::Owned => Some(false),
+ }
+ }
+ impl_(ctx, call_expr).unwrap_or(false)
+ }
+
+ if is_auto_ref(ctx, &it) {
+ ref_data.needs_deref = false;
+ ref_data.needs_parentheses = false;
+ }
+ }
+ ast::Expr::FieldExpr(_it) => {
+ // `t.0.my_field`
+ ref_data.needs_deref = false;
+ ref_data.needs_parentheses = false;
+ }
+ ast::Expr::IndexExpr(_it) => {
+ // `t.0[1]`
+ ref_data.needs_deref = false;
+ ref_data.needs_parentheses = false;
+ }
+ ast::Expr::TryExpr(_it) => {
+ // `t.0?`
+ // requires deref and parens: `(*_0)`
+ }
+ // lower precedence than deref `*` -> no parens
+ _ => {
+ ref_data.needs_parentheses = false;
+ }
+ };
+
+ ref_data
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ // Tests for direct tuple destructure:
+ // `let $0t = (1,2);` -> `let (_0, _1) = (1,2);`
+
+ fn assist(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ destructure_tuple_binding_impl(acc, ctx, false)
+ }
+
+ #[test]
+ fn dont_trigger_on_unit() {
+ cov_mark::check!(destructure_tuple_no_tuple);
+ check_assist_not_applicable(
+ assist,
+ r#"
+fn main() {
+let $0v = ();
+}
+ "#,
+ )
+ }
+ #[test]
+ fn dont_trigger_on_number() {
+ cov_mark::check!(destructure_tuple_no_tuple);
+ check_assist_not_applicable(
+ assist,
+ r#"
+fn main() {
+let $0v = 32;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn destructure_3_tuple() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2,3);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1, _2) = (1,2,3);
+}
+ "#,
+ )
+ }
+ #[test]
+ fn destructure_2_tuple() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = (1,2);
+}
+ "#,
+ )
+ }
+ #[test]
+ fn replace_indices() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2,3);
+ let v1 = tup.0;
+ let v2 = tup.1;
+ let v3 = tup.2;
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1, _2) = (1,2,3);
+ let v1 = _0;
+ let v2 = _1;
+ let v3 = _2;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn replace_usage_in_parentheses() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2,3);
+ let a = (tup).1;
+ let b = ((tup)).1;
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1, _2) = (1,2,3);
+ let a = _1;
+ let b = _1;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn handle_function_call() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2);
+ let v = tup.into();
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = (1,2);
+ let v = /*tup*/.into();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn handle_invalid_index() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2);
+ let v = tup.3;
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = (1,2);
+ let v = /*tup*/.3;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_replace_variable_with_same_name_as_tuple() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let tup = (1,2);
+ let v = tup.1;
+ let $0tup = (1,2,3);
+ let v = tup.1;
+ let tup = (1,2,3);
+ let v = tup.1;
+}
+ "#,
+ r#"
+fn main() {
+ let tup = (1,2);
+ let v = tup.1;
+ let ($0_0, _1, _2) = (1,2,3);
+ let v = _1;
+ let tup = (1,2,3);
+ let v = tup.1;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn keep_function_call_in_tuple_item() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0t = ("3.14", 0);
+ let pi: f32 = t.0.parse().unwrap_or(0.0);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = ("3.14", 0);
+ let pi: f32 = _0.parse().unwrap_or(0.0);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn keep_type() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0t: (usize, i32) = (1,2);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1): (usize, i32) = (1,2);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn destructure_reference() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let t = (1,2);
+ let $0t = &t;
+ let v = t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let t = (1,2);
+ let ($0_0, _1) = &t;
+ let v = *_0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn destructure_multiple_reference() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let t = (1,2);
+ let $0t = &&t;
+ let v = t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let t = (1,2);
+ let ($0_0, _1) = &&t;
+ let v = *_0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn keep_reference() {
+ check_assist(
+ assist,
+ r#"
+fn foo(t: &(usize, usize)) -> usize {
+ match t {
+ &$0t => t.0
+ }
+}
+ "#,
+ r#"
+fn foo(t: &(usize, usize)) -> usize {
+ match t {
+ &($0_0, _1) => _0
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn with_ref() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let ref $0t = (1,2);
+ let v = t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let (ref $0_0, ref _1) = (1,2);
+ let v = *_0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn with_mut() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let mut $0t = (1,2);
+ t.0 = 42;
+ let v = t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let (mut $0_0, mut _1) = (1,2);
+ _0 = 42;
+ let v = _0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn with_ref_mut() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let ref mut $0t = (1,2);
+ t.0 = 42;
+ let v = t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let (ref mut $0_0, ref mut _1) = (1,2);
+ *_0 = 42;
+ let v = *_0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_for_non_tuple_reference() {
+ check_assist_not_applicable(
+ assist,
+ r#"
+fn main() {
+ let v = 42;
+ let $0v = &42;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_on_static_tuple() {
+ check_assist_not_applicable(
+ assist,
+ r#"
+static $0TUP: (usize, usize) = (1,2);
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_on_wildcard() {
+ check_assist_not_applicable(
+ assist,
+ r#"
+fn main() {
+ let $0_ = (1,2);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_in_struct() {
+ check_assist_not_applicable(
+ assist,
+ r#"
+struct S {
+ $0tup: (usize, usize),
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_in_struct_creation() {
+ check_assist_not_applicable(
+ assist,
+ r#"
+struct S {
+ tup: (usize, usize),
+}
+fn main() {
+ let s = S {
+ $0tup: (1,2),
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_on_tuple_struct() {
+ check_assist_not_applicable(
+ assist,
+ r#"
+struct S(usize, usize);
+fn main() {
+ let $0s = S(1,2);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_when_subpattern_exists() {
+ // sub-pattern is only allowed with IdentPat (name), not other patterns (like TuplePat)
+ cov_mark::check!(destructure_tuple_subpattern);
+ check_assist_not_applicable(
+ assist,
+ r#"
+fn sum(t: (usize, usize)) -> usize {
+ match t {
+ $0t @ (1..=3,1..=3) => t.0 + t.1,
+ _ => 0,
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_subpattern() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let t1 @ (_, $0t2) = (1, (2,3));
+ let v = t1.0 + t2.0 + t2.1;
+}
+ "#,
+ r#"
+fn main() {
+ let t1 @ (_, ($0_0, _1)) = (1, (2,3));
+ let v = t1.0 + _0 + _1;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_nested_tuple() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let ($0tup, v) = ((1,2),3);
+}
+ "#,
+ r#"
+fn main() {
+ let (($0_0, _1), v) = ((1,2),3);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_closure() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2,3);
+ let f = |v| v + tup.1;
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1, _2) = (1,2,3);
+ let f = |v| v + _1;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_closure_args() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let f = |$0t| t.0 + t.1;
+ let v = f((1,2));
+}
+ "#,
+ r#"
+fn main() {
+ let f = |($0_0, _1)| _0 + _1;
+ let v = f((1,2));
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_function_args() {
+ check_assist(
+ assist,
+ r#"
+fn f($0t: (usize, usize)) {
+ let v = t.0;
+}
+ "#,
+ r#"
+fn f(($0_0, _1): (usize, usize)) {
+ let v = _0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_if_let() {
+ check_assist(
+ assist,
+ r#"
+fn f(t: (usize, usize)) {
+ if let $0t = t {
+ let v = t.0;
+ }
+}
+ "#,
+ r#"
+fn f(t: (usize, usize)) {
+ if let ($0_0, _1) = t {
+ let v = _0;
+ }
+}
+ "#,
+ )
+ }
+ #[test]
+ fn in_if_let_option() {
+ check_assist(
+ assist,
+ r#"
+//- minicore: option
+fn f(o: Option<(usize, usize)>) {
+ if let Some($0t) = o {
+ let v = t.0;
+ }
+}
+ "#,
+ r#"
+fn f(o: Option<(usize, usize)>) {
+ if let Some(($0_0, _1)) = o {
+ let v = _0;
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_match() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ match (1,2) {
+ $0t => t.1,
+ };
+}
+ "#,
+ r#"
+fn main() {
+ match (1,2) {
+ ($0_0, _1) => _1,
+ };
+}
+ "#,
+ )
+ }
+ #[test]
+ fn in_match_option() {
+ check_assist(
+ assist,
+ r#"
+//- minicore: option
+fn main() {
+ match Some((1,2)) {
+ Some($0t) => t.1,
+ _ => 0,
+ };
+}
+ "#,
+ r#"
+fn main() {
+ match Some((1,2)) {
+ Some(($0_0, _1)) => _1,
+ _ => 0,
+ };
+}
+ "#,
+ )
+ }
+ #[test]
+ fn in_match_reference_option() {
+ check_assist(
+ assist,
+ r#"
+//- minicore: option
+fn main() {
+ let t = (1,2);
+ match Some(&t) {
+ Some($0t) => t.1,
+ _ => 0,
+ };
+}
+ "#,
+ r#"
+fn main() {
+ let t = (1,2);
+ match Some(&t) {
+ Some(($0_0, _1)) => *_1,
+ _ => 0,
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_for() {
+ check_assist(
+ assist,
+ r#"
+//- minicore: iterators
+fn main() {
+ for $0t in core::iter::repeat((1,2)) {
+ let v = t.1;
+ }
+}
+ "#,
+ r#"
+fn main() {
+ for ($0_0, _1) in core::iter::repeat((1,2)) {
+ let v = _1;
+ }
+}
+ "#,
+ )
+ }
+ #[test]
+ fn in_for_nested() {
+ check_assist(
+ assist,
+ r#"
+//- minicore: iterators
+fn main() {
+ for (a, $0b) in core::iter::repeat((1,(2,3))) {
+ let v = b.1;
+ }
+}
+ "#,
+ r#"
+fn main() {
+ for (a, ($0_0, _1)) in core::iter::repeat((1,(2,3))) {
+ let v = _1;
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_on_tuple_usage() {
+ //Improvement: might be reasonable to allow & implement
+ check_assist_not_applicable(
+ assist,
+ r#"
+fn main() {
+ let t = (1,2);
+ let v = $0t.0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn replace_all() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0t = (1,2);
+ let v = t.1;
+ let s = (t.0 + t.1) / 2;
+ let f = |v| v + t.0;
+ let r = f(t.1);
+ let e = t == (9,0);
+ let m =
+ match t {
+ (_,2) if t.0 > 2 => 1,
+ _ => 0,
+ };
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = (1,2);
+ let v = _1;
+ let s = (_0 + _1) / 2;
+ let f = |v| v + _0;
+ let r = f(_1);
+ let e = /*t*/ == (9,0);
+ let m =
+ match /*t*/ {
+ (_,2) if _0 > 2 => 1,
+ _ => 0,
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn non_trivial_tuple_assignment() {
+ check_assist(
+ assist,
+ r#"
+fn main {
+ let $0t =
+ if 1 > 2 {
+ (1,2)
+ } else {
+ (5,6)
+ };
+ let v1 = t.0;
+ let v2 =
+ if t.0 > t.1 {
+ t.0 - t.1
+ } else {
+ t.1 - t.0
+ };
+}
+ "#,
+ r#"
+fn main {
+ let ($0_0, _1) =
+ if 1 > 2 {
+ (1,2)
+ } else {
+ (5,6)
+ };
+ let v1 = _0;
+ let v2 =
+ if _0 > _1 {
+ _0 - _1
+ } else {
+ _1 - _0
+ };
+}
+ "#,
+ )
+ }
+
+ mod assist {
+ use super::*;
+ use crate::tests::check_assist_by_label;
+
+ fn assist(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ destructure_tuple_binding_impl(acc, ctx, true)
+ }
+ fn in_place_assist(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ destructure_tuple_binding_impl(acc, ctx, false)
+ }
+
+ pub(crate) fn check_in_place_assist(ra_fixture_before: &str, ra_fixture_after: &str) {
+ check_assist_by_label(
+ in_place_assist,
+ ra_fixture_before,
+ ra_fixture_after,
+ // "Destructure tuple in place",
+ "Destructure tuple",
+ );
+ }
+
+ pub(crate) fn check_sub_pattern_assist(ra_fixture_before: &str, ra_fixture_after: &str) {
+ check_assist_by_label(
+ assist,
+ ra_fixture_before,
+ ra_fixture_after,
+ "Destructure tuple in sub-pattern",
+ );
+ }
+
+ pub(crate) fn check_both_assists(
+ ra_fixture_before: &str,
+ ra_fixture_after_in_place: &str,
+ ra_fixture_after_in_sub_pattern: &str,
+ ) {
+ check_in_place_assist(ra_fixture_before, ra_fixture_after_in_place);
+ check_sub_pattern_assist(ra_fixture_before, ra_fixture_after_in_sub_pattern);
+ }
+ }
+
+ /// Tests for destructure of tuple in sub-pattern:
+ /// `let $0t = (1,2);` -> `let t @ (_0, _1) = (1,2);`
+ mod sub_pattern {
+ use super::assist::*;
+ use super::*;
+ use crate::tests::check_assist_by_label;
+
+ #[test]
+ fn destructure_in_sub_pattern() {
+ check_sub_pattern_assist(
+ r#"
+#![feature(bindings_after_at)]
+
+fn main() {
+ let $0t = (1,2);
+}
+ "#,
+ r#"
+#![feature(bindings_after_at)]
+
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn trigger_both_destructure_tuple_assists() {
+ fn assist(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ destructure_tuple_binding_impl(acc, ctx, true)
+ }
+ let text = r#"
+fn main() {
+ let $0t = (1,2);
+}
+ "#;
+ check_assist_by_label(
+ assist,
+ text,
+ r#"
+fn main() {
+ let ($0_0, _1) = (1,2);
+}
+ "#,
+ "Destructure tuple in place",
+ );
+ check_assist_by_label(
+ assist,
+ text,
+ r#"
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+}
+ "#,
+ "Destructure tuple in sub-pattern",
+ );
+ }
+
+ #[test]
+ fn replace_indices() {
+ check_sub_pattern_assist(
+ r#"
+fn main() {
+ let $0t = (1,2);
+ let v1 = t.0;
+ let v2 = t.1;
+}
+ "#,
+ r#"
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ let v1 = _0;
+ let v2 = _1;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn keep_function_call() {
+ cov_mark::check!(destructure_tuple_call_with_subpattern);
+ check_sub_pattern_assist(
+ r#"
+fn main() {
+ let $0t = (1,2);
+ let v = t.into();
+}
+ "#,
+ r#"
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ let v = t.into();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn keep_type() {
+ check_sub_pattern_assist(
+ r#"
+fn main() {
+ let $0t: (usize, i32) = (1,2);
+ let v = t.1;
+ let f = t.into();
+}
+ "#,
+ r#"
+fn main() {
+ let t @ ($0_0, _1): (usize, i32) = (1,2);
+ let v = _1;
+ let f = t.into();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_function_args() {
+ check_sub_pattern_assist(
+ r#"
+fn f($0t: (usize, usize)) {
+ let v = t.0;
+ let f = t.into();
+}
+ "#,
+ r#"
+fn f(t @ ($0_0, _1): (usize, usize)) {
+ let v = _0;
+ let f = t.into();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn with_ref() {
+ check_sub_pattern_assist(
+ r#"
+fn main() {
+ let ref $0t = (1,2);
+ let v = t.1;
+ let f = t.into();
+}
+ "#,
+ r#"
+fn main() {
+ let ref t @ (ref $0_0, ref _1) = (1,2);
+ let v = *_1;
+ let f = t.into();
+}
+ "#,
+ )
+ }
+ #[test]
+ fn with_mut() {
+ check_sub_pattern_assist(
+ r#"
+fn main() {
+ let mut $0t = (1,2);
+ let v = t.1;
+ let f = t.into();
+}
+ "#,
+ r#"
+fn main() {
+ let mut t @ (mut $0_0, mut _1) = (1,2);
+ let v = _1;
+ let f = t.into();
+}
+ "#,
+ )
+ }
+ #[test]
+ fn with_ref_mut() {
+ check_sub_pattern_assist(
+ r#"
+fn main() {
+ let ref mut $0t = (1,2);
+ let v = t.1;
+ let f = t.into();
+}
+ "#,
+ r#"
+fn main() {
+ let ref mut t @ (ref mut $0_0, ref mut _1) = (1,2);
+ let v = *_1;
+ let f = t.into();
+}
+ "#,
+ )
+ }
+ }
+
+ /// Tests for tuple usage in macro call:
+ /// `println!("{}", t.0)`
+ mod in_macro_call {
+ use super::assist::*;
+
+ #[test]
+ fn detect_macro_call() {
+ cov_mark::check!(destructure_tuple_macro_call);
+ check_in_place_assist(
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!(t.0);
+}
+ "#,
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!(/*t*/.0);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn tuple_usage() {
+ check_both_assists(
+ // leading `"foo"` to ensure `$e` doesn't start at position `0`
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!(t);
+}
+ "#,
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!(/*t*/);
+}
+ "#,
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ m!(t);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn tuple_function_usage() {
+ check_both_assists(
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!(t.into());
+}
+ "#,
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!(/*t*/.into());
+}
+ "#,
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ m!(t.into());
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn tuple_index_usage() {
+ check_both_assists(
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!(t.0);
+}
+ "#,
+ // FIXME: replace `t.0` with `_0` (cannot detect range of tuple index in macro call)
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!(/*t*/.0);
+}
+ "#,
+ // FIXME: replace `t.0` with `_0`
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ m!(t.0);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn tuple_in_parentheses_index_usage() {
+ check_both_assists(
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!((t).0);
+}
+ "#,
+ // FIXME: replace `(t).0` with `_0`
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!((/*t*/).0);
+}
+ "#,
+ // FIXME: replace `(t).0` with `_0`
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ m!((t).0);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn empty_macro() {
+ check_in_place_assist(
+ r#"
+macro_rules! m {
+ () => { "foo" };
+ ($e:expr) => { $e; "foo" };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!(t);
+}
+ "#,
+ // FIXME: macro allows no arg -> is valid. But assist should result in invalid code
+ r#"
+macro_rules! m {
+ () => { "foo" };
+ ($e:expr) => { $e; "foo" };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!(/*t*/);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn tuple_index_in_macro() {
+ check_both_assists(
+ r#"
+macro_rules! m {
+ ($t:expr, $i:expr) => { $t.0 + $i };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!(t, t.0);
+}
+ "#,
+ // FIXME: replace `t.0` in macro call (not IN macro) with `_0`
+ r#"
+macro_rules! m {
+ ($t:expr, $i:expr) => { $t.0 + $i };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!(/*t*/, /*t*/.0);
+}
+ "#,
+ // FIXME: replace `t.0` in macro call with `_0`
+ r#"
+macro_rules! m {
+ ($t:expr, $i:expr) => { $t.0 + $i };
+}
+
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ m!(t, t.0);
+}
+ "#,
+ )
+ }
+ }
+
+ mod refs {
+ use super::assist::*;
+
+ #[test]
+ fn no_ref() {
+ check_in_place_assist(
+ r#"
+fn main() {
+ let $0t = &(1,2);
+ let v: i32 = t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = &(1,2);
+ let v: i32 = *_0;
+}
+ "#,
+ )
+ }
+ #[test]
+ fn no_ref_with_parens() {
+ check_in_place_assist(
+ r#"
+fn main() {
+ let $0t = &(1,2);
+ let v: i32 = (t.0);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = &(1,2);
+ let v: i32 = (*_0);
+}
+ "#,
+ )
+ }
+ #[test]
+ fn with_ref() {
+ check_in_place_assist(
+ r#"
+fn main() {
+ let $0t = &(1,2);
+ let v: &i32 = &t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = &(1,2);
+ let v: &i32 = _0;
+}
+ "#,
+ )
+ }
+ #[test]
+ fn with_ref_in_parens_ref() {
+ check_in_place_assist(
+ r#"
+fn main() {
+ let $0t = &(1,2);
+ let v: &i32 = &(t.0);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = &(1,2);
+ let v: &i32 = _0;
+}
+ "#,
+ )
+ }
+ #[test]
+ fn with_ref_in_ref_parens() {
+ check_in_place_assist(
+ r#"
+fn main() {
+ let $0t = &(1,2);
+ let v: &i32 = (&t.0);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = &(1,2);
+ let v: &i32 = _0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn deref_and_parentheses() {
+ // Operator/Expressions with higher precedence than deref (`*`):
+ // https://doc.rust-lang.org/reference/expressions.html#expression-precedence
+ // * Path
+ // * Method call
+ // * Field expression
+ // * Function calls, array indexing
+ // * `?`
+ check_in_place_assist(
+ r#"
+//- minicore: option
+fn f1(v: i32) {}
+fn f2(v: &i32) {}
+trait T {
+ fn do_stuff(self) {}
+}
+impl T for i32 {
+ fn do_stuff(self) {}
+}
+impl T for &i32 {
+ fn do_stuff(self) {}
+}
+struct S4 {
+ value: i32,
+}
+
+fn foo() -> Option<()> {
+ let $0t = &(0, (1,"1"), Some(2), [3;3], S4 { value: 4 }, &5);
+ let v: i32 = t.0; // deref, no parens
+ let v: &i32 = &t.0; // no deref, no parens, remove `&`
+ f1(t.0); // deref, no parens
+ f2(&t.0); // `&*` -> cancel out -> no deref, no parens
+ // https://github.com/rust-lang/rust-analyzer/issues/1109#issuecomment-658868639
+ // let v: i32 = t.1.0; // no deref, no parens
+ let v: i32 = t.4.value; // no deref, no parens
+ t.0.do_stuff(); // deref, parens
+ let v: i32 = t.2?; // deref, parens
+ let v: i32 = t.3[0]; // no deref, no parens
+ (t.0).do_stuff(); // deref, no additional parens
+ let v: i32 = *t.5; // deref (-> 2), no parens
+
+ None
+}
+ "#,
+ r#"
+fn f1(v: i32) {}
+fn f2(v: &i32) {}
+trait T {
+ fn do_stuff(self) {}
+}
+impl T for i32 {
+ fn do_stuff(self) {}
+}
+impl T for &i32 {
+ fn do_stuff(self) {}
+}
+struct S4 {
+ value: i32,
+}
+
+fn foo() -> Option<()> {
+ let ($0_0, _1, _2, _3, _4, _5) = &(0, (1,"1"), Some(2), [3;3], S4 { value: 4 }, &5);
+ let v: i32 = *_0; // deref, no parens
+ let v: &i32 = _0; // no deref, no parens, remove `&`
+ f1(*_0); // deref, no parens
+ f2(_0); // `&*` -> cancel out -> no deref, no parens
+ // https://github.com/rust-lang/rust-analyzer/issues/1109#issuecomment-658868639
+ // let v: i32 = t.1.0; // no deref, no parens
+ let v: i32 = _4.value; // no deref, no parens
+ (*_0).do_stuff(); // deref, parens
+ let v: i32 = (*_2)?; // deref, parens
+ let v: i32 = _3[0]; // no deref, no parens
+ (*_0).do_stuff(); // deref, no additional parens
+ let v: i32 = **_5; // deref (-> 2), no parens
+
+ None
+}
+ "#,
+ )
+ }
+
+ // ---------
+ // auto-ref/deref
+
+ #[test]
+ fn self_auto_ref_doesnt_need_deref() {
+ check_in_place_assist(
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn f(&self) {}
+}
+
+fn main() {
+ let $0t = &(S,2);
+ let s = t.0.f();
+}
+ "#,
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn f(&self) {}
+}
+
+fn main() {
+ let ($0_0, _1) = &(S,2);
+ let s = _0.f();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn self_owned_requires_deref() {
+ check_in_place_assist(
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn f(self) {}
+}
+
+fn main() {
+ let $0t = &(S,2);
+ let s = t.0.f();
+}
+ "#,
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn f(self) {}
+}
+
+fn main() {
+ let ($0_0, _1) = &(S,2);
+ let s = (*_0).f();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn self_auto_ref_in_trait_call_doesnt_require_deref() {
+ check_in_place_assist(
+ r#"
+trait T {
+ fn f(self);
+}
+#[derive(Clone, Copy)]
+struct S;
+impl T for &S {
+ fn f(self) {}
+}
+
+fn main() {
+ let $0t = &(S,2);
+ let s = t.0.f();
+}
+ "#,
+ // FIXME: doesn't need deref * parens. But `ctx.sema.resolve_method_call` doesn't resolve trait implementations
+ r#"
+trait T {
+ fn f(self);
+}
+#[derive(Clone, Copy)]
+struct S;
+impl T for &S {
+ fn f(self) {}
+}
+
+fn main() {
+ let ($0_0, _1) = &(S,2);
+ let s = (*_0).f();
+}
+ "#,
+ )
+ }
+ #[test]
+ fn no_auto_deref_because_of_owned_and_ref_trait_impl() {
+ check_in_place_assist(
+ r#"
+trait T {
+ fn f(self);
+}
+#[derive(Clone, Copy)]
+struct S;
+impl T for S {
+ fn f(self) {}
+}
+impl T for &S {
+ fn f(self) {}
+}
+
+fn main() {
+ let $0t = &(S,2);
+ let s = t.0.f();
+}
+ "#,
+ r#"
+trait T {
+ fn f(self);
+}
+#[derive(Clone, Copy)]
+struct S;
+impl T for S {
+ fn f(self) {}
+}
+impl T for &S {
+ fn f(self) {}
+}
+
+fn main() {
+ let ($0_0, _1) = &(S,2);
+ let s = (*_0).f();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn no_outer_parens_when_ref_deref() {
+ check_in_place_assist(
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn do_stuff(&self) -> i32 { 42 }
+}
+fn main() {
+ let $0t = &(S,&S);
+ let v = (&t.0).do_stuff();
+}
+ "#,
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn do_stuff(&self) -> i32 { 42 }
+}
+fn main() {
+ let ($0_0, _1) = &(S,&S);
+ let v = _0.do_stuff();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn auto_ref_deref() {
+ check_in_place_assist(
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn do_stuff(&self) -> i32 { 42 }
+}
+fn main() {
+ let $0t = &(S,&S);
+ let v = (&t.0).do_stuff(); // no deref, remove parens
+ // `t.0` gets auto-refed -> no deref needed -> no parens
+ let v = t.0.do_stuff(); // no deref, no parens
+ let v = &t.0.do_stuff(); // `&` is for result -> no deref, no parens
+ // deref: `_1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S`
+ let v = t.1.do_stuff(); // deref, parens
+}
+ "#,
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn do_stuff(&self) -> i32 { 42 }
+}
+fn main() {
+ let ($0_0, _1) = &(S,&S);
+ let v = _0.do_stuff(); // no deref, remove parens
+ // `t.0` gets auto-refed -> no deref needed -> no parens
+ let v = _0.do_stuff(); // no deref, no parens
+ let v = &_0.do_stuff(); // `&` is for result -> no deref, no parens
+ // deref: `_1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S`
+ let v = (*_1).do_stuff(); // deref, parens
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn mutable() {
+ check_in_place_assist(
+ r#"
+fn f_owned(v: i32) {}
+fn f(v: &i32) {}
+fn f_mut(v: &mut i32) { *v = 42; }
+
+fn main() {
+ let $0t = &mut (1,2);
+ let v = t.0;
+ t.0 = 42;
+ f_owned(t.0);
+ f(&t.0);
+ f_mut(&mut t.0);
+}
+ "#,
+ r#"
+fn f_owned(v: i32) {}
+fn f(v: &i32) {}
+fn f_mut(v: &mut i32) { *v = 42; }
+
+fn main() {
+ let ($0_0, _1) = &mut (1,2);
+ let v = *_0;
+ *_0 = 42;
+ f_owned(*_0);
+ f(_0);
+ f_mut(_0);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn with_ref_keyword() {
+ check_in_place_assist(
+ r#"
+fn f_owned(v: i32) {}
+fn f(v: &i32) {}
+
+fn main() {
+ let ref $0t = (1,2);
+ let v = t.0;
+ f_owned(t.0);
+ f(&t.0);
+}
+ "#,
+ r#"
+fn f_owned(v: i32) {}
+fn f(v: &i32) {}
+
+fn main() {
+ let (ref $0_0, ref _1) = (1,2);
+ let v = *_0;
+ f_owned(*_0);
+ f(_0);
+}
+ "#,
+ )
+ }
+ #[test]
+ fn with_ref_mut_keywords() {
+ check_in_place_assist(
+ r#"
+fn f_owned(v: i32) {}
+fn f(v: &i32) {}
+fn f_mut(v: &mut i32) { *v = 42; }
+
+fn main() {
+ let ref mut $0t = (1,2);
+ let v = t.0;
+ t.0 = 42;
+ f_owned(t.0);
+ f(&t.0);
+ f_mut(&mut t.0);
+}
+ "#,
+ r#"
+fn f_owned(v: i32) {}
+fn f(v: &i32) {}
+fn f_mut(v: &mut i32) { *v = 42; }
+
+fn main() {
+ let (ref mut $0_0, ref mut _1) = (1,2);
+ let v = *_0;
+ *_0 = 42;
+ f_owned(*_0);
+ f(_0);
+ f_mut(_0);
+}
+ "#,
+ )
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
new file mode 100644
index 000000000..87f5018fb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
@@ -0,0 +1,900 @@
+use either::Either;
+use hir::{AssocItem, HasVisibility, Module, ModuleDef, Name, PathResolution, ScopeDef};
+use ide_db::{
+ defs::{Definition, NameRefClass},
+ search::SearchScope,
+};
+use stdx::never;
+use syntax::{
+ ast::{self, make},
+ ted, AstNode, Direction, SyntaxNode, SyntaxToken, T,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: expand_glob_import
+//
+// Expands glob imports.
+//
+// ```
+// mod foo {
+// pub struct Bar;
+// pub struct Baz;
+// }
+//
+// use foo::*$0;
+//
+// fn qux(bar: Bar, baz: Baz) {}
+// ```
+// ->
+// ```
+// mod foo {
+// pub struct Bar;
+// pub struct Baz;
+// }
+//
+// use foo::{Bar, Baz};
+//
+// fn qux(bar: Bar, baz: Baz) {}
+// ```
+pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let star = ctx.find_token_syntax_at_offset(T![*])?;
+ let use_tree = star.parent().and_then(ast::UseTree::cast)?;
+ let (parent, mod_path) = find_parent_and_path(&star)?;
+ let target_module = match ctx.sema.resolve_path(&mod_path)? {
+ PathResolution::Def(ModuleDef::Module(it)) => it,
+ _ => return None,
+ };
+
+ let current_scope = ctx.sema.scope(&star.parent()?)?;
+ let current_module = current_scope.module();
+
+ let refs_in_target = find_refs_in_mod(ctx, target_module, current_module)?;
+ let imported_defs = find_imported_defs(ctx, star)?;
+
+ let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone());
+ acc.add(
+ AssistId("expand_glob_import", AssistKind::RefactorRewrite),
+ "Expand glob import",
+ target.text_range(),
+ |builder| {
+ let use_tree = builder.make_mut(use_tree);
+
+ let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs);
+ let expanded = make::use_tree_list(names_to_import.iter().map(|n| {
+ let path = make::ext::ident_path(&n.to_string());
+ make::use_tree(path, None, None, false)
+ }))
+ .clone_for_update();
+
+ match use_tree.star_token() {
+ Some(star) => {
+ let needs_braces = use_tree.path().is_some() && names_to_import.len() != 1;
+ if needs_braces {
+ ted::replace(star, expanded.syntax())
+ } else {
+ let without_braces = expanded
+ .syntax()
+ .children_with_tokens()
+ .filter(|child| !matches!(child.kind(), T!['{'] | T!['}']))
+ .collect();
+ ted::replace_with_many(star, without_braces)
+ }
+ }
+ None => never!(),
+ }
+ },
+ )
+}
+
+fn find_parent_and_path(
+ star: &SyntaxToken,
+) -> Option<(Either<ast::UseTree, ast::UseTreeList>, ast::Path)> {
+ return star.parent_ancestors().find_map(|n| {
+ find_use_tree_list(n.clone())
+ .map(|(u, p)| (Either::Right(u), p))
+ .or_else(|| find_use_tree(n).map(|(u, p)| (Either::Left(u), p)))
+ });
+
+ fn find_use_tree_list(n: SyntaxNode) -> Option<(ast::UseTreeList, ast::Path)> {
+ let use_tree_list = ast::UseTreeList::cast(n)?;
+ let path = use_tree_list.parent_use_tree().path()?;
+ Some((use_tree_list, path))
+ }
+
+ fn find_use_tree(n: SyntaxNode) -> Option<(ast::UseTree, ast::Path)> {
+ let use_tree = ast::UseTree::cast(n)?;
+ let path = use_tree.path()?;
+ Some((use_tree, path))
+ }
+}
+
+fn def_is_referenced_in(def: Definition, ctx: &AssistContext<'_>) -> bool {
+ let search_scope = SearchScope::single_file(ctx.file_id());
+ def.usages(&ctx.sema).in_scope(search_scope).at_least_one()
+}
+
+#[derive(Debug, Clone)]
+struct Ref {
+ // could be alias
+ visible_name: Name,
+ def: Definition,
+}
+
+impl Ref {
+ fn from_scope_def(name: Name, scope_def: ScopeDef) -> Option<Self> {
+ match scope_def {
+ ScopeDef::ModuleDef(def) => {
+ Some(Ref { visible_name: name, def: Definition::from(def) })
+ }
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+struct Refs(Vec<Ref>);
+
+impl Refs {
+ fn used_refs(&self, ctx: &AssistContext<'_>) -> Refs {
+ Refs(
+ self.0
+ .clone()
+ .into_iter()
+ .filter(|r| {
+ if let Definition::Trait(tr) = r.def {
+ if tr.items(ctx.db()).into_iter().any(|ai| {
+ if let AssocItem::Function(f) = ai {
+ def_is_referenced_in(Definition::Function(f), ctx)
+ } else {
+ false
+ }
+ }) {
+ return true;
+ }
+ }
+
+ def_is_referenced_in(r.def, ctx)
+ })
+ .collect(),
+ )
+ }
+
+ fn filter_out_by_defs(&self, defs: Vec<Definition>) -> Refs {
+ Refs(self.0.clone().into_iter().filter(|r| !defs.contains(&r.def)).collect())
+ }
+}
+
+fn find_refs_in_mod(ctx: &AssistContext<'_>, module: Module, visible_from: Module) -> Option<Refs> {
+ if !is_mod_visible_from(ctx, module, visible_from) {
+ return None;
+ }
+
+ let module_scope = module.scope(ctx.db(), Some(visible_from));
+ let refs = module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect();
+ Some(Refs(refs))
+}
+
+fn is_mod_visible_from(ctx: &AssistContext<'_>, module: Module, from: Module) -> bool {
+ match module.parent(ctx.db()) {
+ Some(parent) => {
+ module.visibility(ctx.db()).is_visible_from(ctx.db(), from.into())
+ && is_mod_visible_from(ctx, parent, from)
+ }
+ None => true,
+ }
+}
+
+// looks for name refs in parent use block's siblings
+//
+// mod bar {
+// mod qux {
+// struct Qux;
+// }
+//
+// pub use qux::Qux;
+// }
+//
+// ↓ ---------------
+// use foo::*$0;
+// use baz::Baz;
+// ↑ ---------------
+fn find_imported_defs(ctx: &AssistContext<'_>, star: SyntaxToken) -> Option<Vec<Definition>> {
+ let parent_use_item_syntax = star.parent_ancestors().find_map(|n| {
+ if ast::Use::can_cast(n.kind()) {
+ Some(n)
+ } else {
+ None
+ }
+ })?;
+
+ Some(
+ [Direction::Prev, Direction::Next]
+ .into_iter()
+ .flat_map(|dir| {
+ parent_use_item_syntax
+ .siblings(dir.to_owned())
+ .filter(|n| ast::Use::can_cast(n.kind()))
+ })
+ .flat_map(|n| n.descendants().filter_map(ast::NameRef::cast))
+ .filter_map(|r| match NameRefClass::classify(&ctx.sema, &r)? {
+ NameRefClass::Definition(
+ def @ (Definition::Macro(_)
+ | Definition::Module(_)
+ | Definition::Function(_)
+ | Definition::Adt(_)
+ | Definition::Variant(_)
+ | Definition::Const(_)
+ | Definition::Static(_)
+ | Definition::Trait(_)
+ | Definition::TypeAlias(_)),
+ ) => Some(def),
+ _ => None,
+ })
+ .collect(),
+ )
+}
+
+fn find_names_to_import(
+ ctx: &AssistContext<'_>,
+ refs_in_target: Refs,
+ imported_defs: Vec<Definition>,
+) -> Vec<Name> {
+ let used_refs = refs_in_target.used_refs(ctx).filter_out_by_defs(imported_defs);
+ used_refs.0.iter().map(|r| r.visible_name.clone()).collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn expanding_glob_import() {
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::*$0;
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+}
+",
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::{Bar, Baz, f};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+}
+",
+ )
+ }
+
+ #[test]
+ fn expanding_glob_import_unused() {
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::*$0;
+
+fn qux() {}
+",
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::{};
+
+fn qux() {}
+",
+ )
+ }
+
+ #[test]
+ fn expanding_glob_import_with_existing_explicit_names() {
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::{*$0, f};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+}
+",
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::{Bar, Baz, f};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+}
+",
+ )
+ }
+
+ #[test]
+ fn expanding_glob_import_with_existing_uses_in_same_module() {
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::Bar;
+use foo::{*$0, f};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+}
+",
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::Bar;
+use foo::{Baz, f};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+}
+",
+ )
+ }
+
+ #[test]
+ fn expanding_nested_glob_import() {
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+ }
+}
+
+use foo::{bar::{*$0, f}, baz::*};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+}
+",
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+ }
+}
+
+use foo::{bar::{Bar, Baz, f}, baz::*};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+}
+",
+ );
+
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+ }
+}
+
+use foo::{bar::{Bar, Baz, f}, baz::*$0};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+}
+",
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+ }
+}
+
+use foo::{bar::{Bar, Baz, f}, baz::g};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+}
+",
+ );
+
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+
+ pub mod qux {
+ pub fn h() {}
+ pub fn m() {}
+
+ pub mod q {
+ pub fn j() {}
+ }
+ }
+ }
+}
+
+use foo::{
+ bar::{*, f},
+ baz::{g, qux::*$0}
+};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+ h();
+ q::j();
+}
+",
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+
+ pub mod qux {
+ pub fn h() {}
+ pub fn m() {}
+
+ pub mod q {
+ pub fn j() {}
+ }
+ }
+ }
+}
+
+use foo::{
+ bar::{*, f},
+ baz::{g, qux::{h, q}}
+};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+ h();
+ q::j();
+}
+",
+ );
+
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+
+ pub mod qux {
+ pub fn h() {}
+ pub fn m() {}
+
+ pub mod q {
+ pub fn j() {}
+ }
+ }
+ }
+}
+
+use foo::{
+ bar::{*, f},
+ baz::{g, qux::{h, q::*$0}}
+};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+ h();
+ j();
+}
+",
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+
+ pub mod qux {
+ pub fn h() {}
+ pub fn m() {}
+
+ pub mod q {
+ pub fn j() {}
+ }
+ }
+ }
+}
+
+use foo::{
+ bar::{*, f},
+ baz::{g, qux::{h, q::j}}
+};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+ h();
+ j();
+}
+",
+ );
+
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+
+ pub mod qux {
+ pub fn h() {}
+ pub fn m() {}
+
+ pub mod q {
+ pub fn j() {}
+ }
+ }
+ }
+}
+
+use foo::{
+ bar::{*, f},
+ baz::{g, qux::{q::j, *$0}}
+};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+ h();
+ j();
+}
+",
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+
+ pub mod qux {
+ pub fn h() {}
+ pub fn m() {}
+
+ pub mod q {
+ pub fn j() {}
+ }
+ }
+ }
+}
+
+use foo::{
+ bar::{*, f},
+ baz::{g, qux::{q::j, h}}
+};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+ h();
+ j();
+}
+",
+ );
+ }
+
+ #[test]
+ fn expanding_glob_import_with_macro_defs() {
+ check_assist(
+ expand_glob_import,
+ r#"
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! bar {
+ () => ()
+}
+
+pub fn baz() {}
+
+//- /main.rs crate:main deps:foo
+use foo::*$0;
+
+fn main() {
+ bar!();
+ baz();
+}
+"#,
+ r#"
+use foo::{bar, baz};
+
+fn main() {
+ bar!();
+ baz();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn expanding_glob_import_with_trait_method_uses() {
+ check_assist(
+ expand_glob_import,
+ r"
+//- /lib.rs crate:foo
+pub trait Tr {
+ fn method(&self) {}
+}
+impl Tr for () {}
+
+//- /main.rs crate:main deps:foo
+use foo::*$0;
+
+fn main() {
+ ().method();
+}
+",
+ r"
+use foo::Tr;
+
+fn main() {
+ ().method();
+}
+",
+ );
+
+ check_assist(
+ expand_glob_import,
+ r"
+//- /lib.rs crate:foo
+pub trait Tr {
+ fn method(&self) {}
+}
+impl Tr for () {}
+
+pub trait Tr2 {
+ fn method2(&self) {}
+}
+impl Tr2 for () {}
+
+//- /main.rs crate:main deps:foo
+use foo::*$0;
+
+fn main() {
+ ().method();
+}
+",
+ r"
+use foo::Tr;
+
+fn main() {
+ ().method();
+}
+",
+ );
+ }
+
+ #[test]
+ fn expanding_is_not_applicable_if_target_module_is_not_accessible_from_current_scope() {
+ check_assist_not_applicable(
+ expand_glob_import,
+ r"
+mod foo {
+ mod bar {
+ pub struct Bar;
+ }
+}
+
+use foo::bar::*$0;
+
+fn baz(bar: Bar) {}
+",
+ );
+
+ check_assist_not_applicable(
+ expand_glob_import,
+ r"
+mod foo {
+ mod bar {
+ pub mod baz {
+ pub struct Baz;
+ }
+ }
+}
+
+use foo::bar::baz::*$0;
+
+fn qux(baz: Baz) {}
+",
+ );
+ }
+
+ #[test]
+ fn expanding_is_not_applicable_if_cursor_is_not_in_star_token() {
+ check_assist_not_applicable(
+ expand_glob_import,
+ r"
+ mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+ }
+
+ use foo::Bar$0;
+
+ fn qux(bar: Bar, baz: Baz) {}
+ ",
+ )
+ }
+
+ #[test]
+ fn expanding_glob_import_single_nested_glob_only() {
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub struct Bar;
+}
+
+use foo::{*$0};
+
+struct Baz {
+ bar: Bar
+}
+",
+ r"
+mod foo {
+ pub struct Bar;
+}
+
+use foo::{Bar};
+
+struct Baz {
+ bar: Bar
+}
+",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
new file mode 100644
index 000000000..52a55ead3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
@@ -0,0 +1,5333 @@
+use std::iter;
+
+use ast::make;
+use either::Either;
+use hir::{
+ HasSource, HirDisplay, InFile, Local, ModuleDef, PathResolution, Semantics, TypeInfo, TypeParam,
+};
+use ide_db::{
+ defs::{Definition, NameRefClass},
+ famous_defs::FamousDefs,
+ helpers::mod_path_to_ast,
+ imports::insert_use::{insert_use, ImportScope},
+ search::{FileReference, ReferenceCategory, SearchScope},
+ syntax_helpers::node_ext::{preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr},
+ FxIndexSet, RootDatabase,
+};
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ AstNode, HasGenericParams,
+ },
+ match_ast, ted, SyntaxElement,
+ SyntaxKind::{self, COMMENT},
+ SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists, TreeMutator},
+ utils::generate_impl_text,
+ AssistId,
+};
+
+// Assist: extract_function
+//
+// Extracts selected statements and comments into new function.
+//
+// ```
+// fn main() {
+// let n = 1;
+// $0let m = n + 2;
+// // calculate
+// let k = m + n;$0
+// let g = 3;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let n = 1;
+// fun_name(n);
+// let g = 3;
+// }
+//
+// fn $0fun_name(n: i32) {
+// let m = n + 2;
+// // calculate
+// let k = m + n;
+// }
+// ```
+pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let range = ctx.selection_trimmed();
+ if range.is_empty() {
+ return None;
+ }
+
+ let node = ctx.covering_element();
+ if node.kind() == COMMENT {
+ cov_mark::hit!(extract_function_in_comment_is_not_applicable);
+ return None;
+ }
+
+ let node = match node {
+ syntax::NodeOrToken::Node(n) => n,
+ syntax::NodeOrToken::Token(t) => t.parent()?,
+ };
+
+ let body = extraction_target(&node, range)?;
+ let container_info = body.analyze_container(&ctx.sema)?;
+
+ let (locals_used, self_param) = body.analyze(&ctx.sema);
+
+ let anchor = if self_param.is_some() { Anchor::Method } else { Anchor::Freestanding };
+ let insert_after = node_to_insert_after(&body, anchor)?;
+ let semantics_scope = ctx.sema.scope(&insert_after)?;
+ let module = semantics_scope.module();
+
+ let ret_ty = body.return_ty(ctx)?;
+ let control_flow = body.external_control_flow(ctx, &container_info)?;
+ let ret_values = body.ret_values(ctx, node.parent().as_ref().unwrap_or(&node));
+
+ let target_range = body.text_range();
+
+ let scope = ImportScope::find_insert_use_container(&node, &ctx.sema)?;
+
+ acc.add(
+ AssistId("extract_function", crate::AssistKind::RefactorExtract),
+ "Extract into function",
+ target_range,
+ move |builder| {
+ let outliving_locals: Vec<_> = ret_values.collect();
+ if stdx::never!(!outliving_locals.is_empty() && !ret_ty.is_unit()) {
+ // We should not have variables that outlive body if we have expression block
+ return;
+ }
+
+ let params =
+ body.extracted_function_params(ctx, &container_info, locals_used.iter().copied());
+
+ let extracted_from_trait_impl = body.extracted_from_trait_impl();
+
+ let name = make_function_name(&semantics_scope);
+
+ let fun = Function {
+ name,
+ self_param,
+ params,
+ control_flow,
+ ret_ty,
+ body,
+ outliving_locals,
+ mods: container_info,
+ };
+
+ let new_indent = IndentLevel::from_node(&insert_after);
+ let old_indent = fun.body.indent_level();
+
+ builder.replace(target_range, make_call(ctx, &fun, old_indent));
+
+ let fn_def = match fun.self_param_adt(ctx) {
+ Some(adt) if extracted_from_trait_impl => {
+ let fn_def = format_function(ctx, module, &fun, old_indent, new_indent + 1);
+ generate_impl_text(&adt, &fn_def).replace("{\n\n", "{")
+ }
+ _ => format_function(ctx, module, &fun, old_indent, new_indent),
+ };
+
+ if fn_def.contains("ControlFlow") {
+ let scope = match scope {
+ ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
+ };
+
+ let control_flow_enum =
+ FamousDefs(&ctx.sema, module.krate()).core_ops_ControlFlow();
+
+ if let Some(control_flow_enum) = control_flow_enum {
+ let mod_path = module.find_use_path_prefixed(
+ ctx.sema.db,
+ ModuleDef::from(control_flow_enum),
+ ctx.config.insert_use.prefix_kind,
+ );
+
+ if let Some(mod_path) = mod_path {
+ insert_use(&scope, mod_path_to_ast(&mod_path), &ctx.config.insert_use);
+ }
+ }
+ }
+
+ let insert_offset = insert_after.text_range().end();
+
+ match ctx.config.snippet_cap {
+ Some(cap) => builder.insert_snippet(cap, insert_offset, fn_def),
+ None => builder.insert(insert_offset, fn_def),
+ };
+ },
+ )
+}
+
+fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef {
+ let mut names_in_scope = vec![];
+ semantics_scope.process_all_names(&mut |name, _| names_in_scope.push(name.to_string()));
+
+ let default_name = "fun_name";
+
+ let mut name = default_name.to_string();
+ let mut counter = 0;
+ while names_in_scope.contains(&name) {
+ counter += 1;
+ name = format!("{}{}", &default_name, counter)
+ }
+ make::name_ref(&name)
+}
+
+/// Try to guess what user wants to extract
+///
+/// We have basically have two cases:
+/// * We want whole node, like `loop {}`, `2 + 2`, `{ let n = 1; }` exprs.
+/// Then we can use `ast::Expr`
+/// * We want a few statements for a block. E.g.
+/// ```rust,no_run
+/// fn foo() -> i32 {
+/// let m = 1;
+/// $0
+/// let n = 2;
+/// let k = 3;
+/// k + n
+/// $0
+/// }
+/// ```
+///
+fn extraction_target(node: &SyntaxNode, selection_range: TextRange) -> Option<FunctionBody> {
+ if let Some(stmt) = ast::Stmt::cast(node.clone()) {
+ return match stmt {
+ ast::Stmt::Item(_) => None,
+ ast::Stmt::ExprStmt(_) | ast::Stmt::LetStmt(_) => Some(FunctionBody::from_range(
+ node.parent().and_then(ast::StmtList::cast)?,
+ node.text_range(),
+ )),
+ };
+ }
+
+ // Covering element returned the parent block of one or multiple statements that have been selected
+ if let Some(stmt_list) = ast::StmtList::cast(node.clone()) {
+ if let Some(block_expr) = stmt_list.syntax().parent().and_then(ast::BlockExpr::cast) {
+ if block_expr.syntax().text_range() == selection_range {
+ return FunctionBody::from_expr(block_expr.into());
+ }
+ }
+
+ // Extract the full statements.
+ return Some(FunctionBody::from_range(stmt_list, selection_range));
+ }
+
+ let expr = ast::Expr::cast(node.clone())?;
+ // A node got selected fully
+ if node.text_range() == selection_range {
+ return FunctionBody::from_expr(expr);
+ }
+
+ node.ancestors().find_map(ast::Expr::cast).and_then(FunctionBody::from_expr)
+}
+
+#[derive(Debug)]
+struct Function {
+ name: ast::NameRef,
+ self_param: Option<ast::SelfParam>,
+ params: Vec<Param>,
+ control_flow: ControlFlow,
+ ret_ty: RetType,
+ body: FunctionBody,
+ outliving_locals: Vec<OutlivedLocal>,
+ mods: ContainerInfo,
+}
+
+#[derive(Debug)]
+struct Param {
+ var: Local,
+ ty: hir::Type,
+ move_local: bool,
+ requires_mut: bool,
+ is_copy: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum ParamKind {
+ Value,
+ MutValue,
+ SharedRef,
+ MutRef,
+}
+
+#[derive(Debug, Eq, PartialEq)]
+enum FunType {
+ Unit,
+ Single(hir::Type),
+ Tuple(Vec<hir::Type>),
+}
+
+/// Where to put extracted function definition
+#[derive(Debug)]
+enum Anchor {
+ /// Extract free function and put right after current top-level function
+ Freestanding,
+ /// Extract method and put right after current function in the impl-block
+ Method,
+}
+
+// FIXME: ControlFlow and ContainerInfo both track some function modifiers, feels like these two should
+// probably be merged somehow.
+#[derive(Debug)]
+struct ControlFlow {
+ kind: Option<FlowKind>,
+ is_async: bool,
+ is_unsafe: bool,
+}
+
+/// The thing whose expression we are extracting from. Can be a function, const, static, const arg, ...
+#[derive(Clone, Debug)]
+struct ContainerInfo {
+ is_const: bool,
+ is_in_tail: bool,
+ parent_loop: Option<SyntaxNode>,
+ /// The function's return type, const's type etc.
+ ret_type: Option<hir::Type>,
+ generic_param_lists: Vec<ast::GenericParamList>,
+ where_clauses: Vec<ast::WhereClause>,
+}
+
+/// Control flow that is exported from extracted function
+///
+/// E.g.:
+/// ```rust,no_run
+/// loop {
+/// $0
+/// if 42 == 42 {
+/// break;
+/// }
+/// $0
+/// }
+/// ```
+#[derive(Debug, Clone)]
+enum FlowKind {
+ /// Return with value (`return $expr;`)
+ Return(Option<ast::Expr>),
+ Try {
+ kind: TryKind,
+ },
+ /// Break with label and value (`break 'label $expr;`)
+ Break(Option<ast::Lifetime>, Option<ast::Expr>),
+ /// Continue with label (`continue 'label;`)
+ Continue(Option<ast::Lifetime>),
+}
+
+#[derive(Debug, Clone)]
+enum TryKind {
+ Option,
+ Result { ty: hir::Type },
+}
+
+#[derive(Debug)]
+enum RetType {
+ Expr(hir::Type),
+ Stmt,
+}
+
+impl RetType {
+ fn is_unit(&self) -> bool {
+ match self {
+ RetType::Expr(ty) => ty.is_unit(),
+ RetType::Stmt => true,
+ }
+ }
+}
+
+/// Semantically same as `ast::Expr`, but preserves identity when using only part of the Block
+/// This is the future function body, the part that is being extracted.
+#[derive(Debug)]
+enum FunctionBody {
+ Expr(ast::Expr),
+ Span { parent: ast::StmtList, text_range: TextRange },
+}
+
+#[derive(Debug)]
+struct OutlivedLocal {
+ local: Local,
+ mut_usage_outside_body: bool,
+}
+
+/// Container of local variable usages
+///
+/// Semanticall same as `UsageSearchResult`, but provides more convenient interface
+struct LocalUsages(ide_db::search::UsageSearchResult);
+
+impl LocalUsages {
+ fn find_local_usages(ctx: &AssistContext<'_>, var: Local) -> Self {
+ Self(
+ Definition::Local(var)
+ .usages(&ctx.sema)
+ .in_scope(SearchScope::single_file(ctx.file_id()))
+ .all(),
+ )
+ }
+
+ fn iter(&self) -> impl Iterator<Item = &FileReference> + '_ {
+ self.0.iter().flat_map(|(_, rs)| rs)
+ }
+}
+
+impl Function {
+ fn return_type(&self, ctx: &AssistContext<'_>) -> FunType {
+ match &self.ret_ty {
+ RetType::Expr(ty) if ty.is_unit() => FunType::Unit,
+ RetType::Expr(ty) => FunType::Single(ty.clone()),
+ RetType::Stmt => match self.outliving_locals.as_slice() {
+ [] => FunType::Unit,
+ [var] => FunType::Single(var.local.ty(ctx.db())),
+ vars => {
+ let types = vars.iter().map(|v| v.local.ty(ctx.db())).collect();
+ FunType::Tuple(types)
+ }
+ },
+ }
+ }
+
+ fn self_param_adt(&self, ctx: &AssistContext<'_>) -> Option<ast::Adt> {
+ let self_param = self.self_param.as_ref()?;
+ let def = ctx.sema.to_def(self_param)?;
+ let adt = def.ty(ctx.db()).strip_references().as_adt()?;
+ let InFile { file_id: _, value } = adt.source(ctx.db())?;
+ Some(value)
+ }
+}
+
+impl ParamKind {
+ fn is_ref(&self) -> bool {
+ matches!(self, ParamKind::SharedRef | ParamKind::MutRef)
+ }
+}
+
+impl Param {
+ fn kind(&self) -> ParamKind {
+ match (self.move_local, self.requires_mut, self.is_copy) {
+ (false, true, _) => ParamKind::MutRef,
+ (false, false, false) => ParamKind::SharedRef,
+ (true, true, _) => ParamKind::MutValue,
+ (_, false, _) => ParamKind::Value,
+ }
+ }
+
+ fn to_arg(&self, ctx: &AssistContext<'_>) -> ast::Expr {
+ let var = path_expr_from_local(ctx, self.var);
+ match self.kind() {
+ ParamKind::Value | ParamKind::MutValue => var,
+ ParamKind::SharedRef => make::expr_ref(var, false),
+ ParamKind::MutRef => make::expr_ref(var, true),
+ }
+ }
+
+ fn to_param(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Param {
+ let var = self.var.name(ctx.db()).to_string();
+ let var_name = make::name(&var);
+ let pat = match self.kind() {
+ ParamKind::MutValue => make::ident_pat(false, true, var_name),
+ ParamKind::Value | ParamKind::SharedRef | ParamKind::MutRef => {
+ make::ext::simple_ident_pat(var_name)
+ }
+ };
+
+ let ty = make_ty(&self.ty, ctx, module);
+ let ty = match self.kind() {
+ ParamKind::Value | ParamKind::MutValue => ty,
+ ParamKind::SharedRef => make::ty_ref(ty, false),
+ ParamKind::MutRef => make::ty_ref(ty, true),
+ };
+
+ make::param(pat.into(), ty)
+ }
+}
+
+impl TryKind {
+ fn of_ty(ty: hir::Type, ctx: &AssistContext<'_>) -> Option<TryKind> {
+ if ty.is_unknown() {
+ // We favour Result for `expr?`
+ return Some(TryKind::Result { ty });
+ }
+ let adt = ty.as_adt()?;
+ let name = adt.name(ctx.db());
+ // FIXME: use lang items to determine if it is std type or user defined
+ // E.g. if user happens to define type named `Option`, we would have false positive
+ match name.to_string().as_str() {
+ "Option" => Some(TryKind::Option),
+ "Result" => Some(TryKind::Result { ty }),
+ _ => None,
+ }
+ }
+}
+
+impl FlowKind {
+ fn make_result_handler(&self, expr: Option<ast::Expr>) -> ast::Expr {
+ match self {
+ FlowKind::Return(_) => make::expr_return(expr),
+ FlowKind::Break(label, _) => make::expr_break(label.clone(), expr),
+ FlowKind::Try { .. } => {
+ stdx::never!("cannot have result handler with try");
+ expr.unwrap_or_else(|| make::expr_return(None))
+ }
+ FlowKind::Continue(label) => {
+ stdx::always!(expr.is_none(), "continue with value is not possible");
+ make::expr_continue(label.clone())
+ }
+ }
+ }
+
+ fn expr_ty(&self, ctx: &AssistContext<'_>) -> Option<hir::Type> {
+ match self {
+ FlowKind::Return(Some(expr)) | FlowKind::Break(_, Some(expr)) => {
+ ctx.sema.type_of_expr(expr).map(TypeInfo::adjusted)
+ }
+ FlowKind::Try { .. } => {
+ stdx::never!("try does not have defined expr_ty");
+ None
+ }
+ _ => None,
+ }
+ }
+}
+
+impl FunctionBody {
+ fn parent(&self) -> Option<SyntaxNode> {
+ match self {
+ FunctionBody::Expr(expr) => expr.syntax().parent(),
+ FunctionBody::Span { parent, .. } => Some(parent.syntax().clone()),
+ }
+ }
+
+ fn node(&self) -> &SyntaxNode {
+ match self {
+ FunctionBody::Expr(e) => e.syntax(),
+ FunctionBody::Span { parent, .. } => parent.syntax(),
+ }
+ }
+
+ fn extracted_from_trait_impl(&self) -> bool {
+ match self.node().ancestors().find_map(ast::Impl::cast) {
+ Some(c) => return c.trait_().is_some(),
+ None => false,
+ }
+ }
+
+ fn descendants(&self) -> impl Iterator<Item = SyntaxNode> {
+ match self {
+ FunctionBody::Expr(expr) => expr.syntax().descendants(),
+ FunctionBody::Span { parent, .. } => parent.syntax().descendants(),
+ }
+ }
+
+ fn descendant_paths(&self) -> impl Iterator<Item = ast::Path> {
+ self.descendants().filter_map(|node| {
+ match_ast! {
+ match node {
+ ast::Path(it) => Some(it),
+ _ => None
+ }
+ }
+ })
+ }
+
+ fn from_expr(expr: ast::Expr) -> Option<Self> {
+ match expr {
+ ast::Expr::BreakExpr(it) => it.expr().map(Self::Expr),
+ ast::Expr::ReturnExpr(it) => it.expr().map(Self::Expr),
+ ast::Expr::BlockExpr(it) if !it.is_standalone() => None,
+ expr => Some(Self::Expr(expr)),
+ }
+ }
+
+ fn from_range(parent: ast::StmtList, selected: TextRange) -> FunctionBody {
+ let full_body = parent.syntax().children_with_tokens();
+
+ let mut text_range = full_body
+ .filter(|it| ast::Stmt::can_cast(it.kind()) || it.kind() == COMMENT)
+ .map(|element| element.text_range())
+ .filter(|&range| selected.intersect(range).filter(|it| !it.is_empty()).is_some())
+ .reduce(|acc, stmt| acc.cover(stmt));
+
+ if let Some(tail_range) = parent
+ .tail_expr()
+ .map(|it| it.syntax().text_range())
+ .filter(|&it| selected.intersect(it).is_some())
+ {
+ text_range = Some(match text_range {
+ Some(text_range) => text_range.cover(tail_range),
+ None => tail_range,
+ });
+ }
+ Self::Span { parent, text_range: text_range.unwrap_or(selected) }
+ }
+
+ fn indent_level(&self) -> IndentLevel {
+ match &self {
+ FunctionBody::Expr(expr) => IndentLevel::from_node(expr.syntax()),
+ FunctionBody::Span { parent, .. } => IndentLevel::from_node(parent.syntax()) + 1,
+ }
+ }
+
+ fn tail_expr(&self) -> Option<ast::Expr> {
+ match &self {
+ FunctionBody::Expr(expr) => Some(expr.clone()),
+ FunctionBody::Span { parent, text_range } => {
+ let tail_expr = parent.tail_expr()?;
+ text_range.contains_range(tail_expr.syntax().text_range()).then(|| tail_expr)
+ }
+ }
+ }
+
+ fn walk_expr(&self, cb: &mut dyn FnMut(ast::Expr)) {
+ match self {
+ FunctionBody::Expr(expr) => walk_expr(expr, cb),
+ FunctionBody::Span { parent, text_range } => {
+ parent
+ .statements()
+ .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
+ .filter_map(|stmt| match stmt {
+ ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr(),
+ ast::Stmt::Item(_) => None,
+ ast::Stmt::LetStmt(stmt) => stmt.initializer(),
+ })
+ .for_each(|expr| walk_expr(&expr, cb));
+ if let Some(expr) = parent
+ .tail_expr()
+ .filter(|it| text_range.contains_range(it.syntax().text_range()))
+ {
+ walk_expr(&expr, cb);
+ }
+ }
+ }
+ }
+
+ fn preorder_expr(&self, cb: &mut dyn FnMut(WalkEvent<ast::Expr>) -> bool) {
+ match self {
+ FunctionBody::Expr(expr) => preorder_expr(expr, cb),
+ FunctionBody::Span { parent, text_range } => {
+ parent
+ .statements()
+ .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
+ .filter_map(|stmt| match stmt {
+ ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr(),
+ ast::Stmt::Item(_) => None,
+ ast::Stmt::LetStmt(stmt) => stmt.initializer(),
+ })
+ .for_each(|expr| preorder_expr(&expr, cb));
+ if let Some(expr) = parent
+ .tail_expr()
+ .filter(|it| text_range.contains_range(it.syntax().text_range()))
+ {
+ preorder_expr(&expr, cb);
+ }
+ }
+ }
+ }
+
+ fn walk_pat(&self, cb: &mut dyn FnMut(ast::Pat)) {
+ match self {
+ FunctionBody::Expr(expr) => walk_patterns_in_expr(expr, cb),
+ FunctionBody::Span { parent, text_range } => {
+ parent
+ .statements()
+ .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
+ .for_each(|stmt| match stmt {
+ ast::Stmt::ExprStmt(expr_stmt) => {
+ if let Some(expr) = expr_stmt.expr() {
+ walk_patterns_in_expr(&expr, cb)
+ }
+ }
+ ast::Stmt::Item(_) => (),
+ ast::Stmt::LetStmt(stmt) => {
+ if let Some(pat) = stmt.pat() {
+ walk_pat(&pat, cb);
+ }
+ if let Some(expr) = stmt.initializer() {
+ walk_patterns_in_expr(&expr, cb);
+ }
+ }
+ });
+ if let Some(expr) = parent
+ .tail_expr()
+ .filter(|it| text_range.contains_range(it.syntax().text_range()))
+ {
+ walk_patterns_in_expr(&expr, cb);
+ }
+ }
+ }
+ }
+
+ fn text_range(&self) -> TextRange {
+ match self {
+ FunctionBody::Expr(expr) => expr.syntax().text_range(),
+ &FunctionBody::Span { text_range, .. } => text_range,
+ }
+ }
+
+ fn contains_range(&self, range: TextRange) -> bool {
+ self.text_range().contains_range(range)
+ }
+
+ fn precedes_range(&self, range: TextRange) -> bool {
+ self.text_range().end() <= range.start()
+ }
+
+ fn contains_node(&self, node: &SyntaxNode) -> bool {
+ self.contains_range(node.text_range())
+ }
+}
+
+impl FunctionBody {
+ /// Analyzes a function body, returning the used local variables that are referenced in it as well as
+ /// whether it contains an await expression.
+ fn analyze(
+ &self,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> (FxIndexSet<Local>, Option<ast::SelfParam>) {
+ let mut self_param = None;
+ let mut res = FxIndexSet::default();
+ let mut cb = |name_ref: Option<_>| {
+ let local_ref =
+ match name_ref.and_then(|name_ref| NameRefClass::classify(sema, &name_ref)) {
+ Some(
+ NameRefClass::Definition(Definition::Local(local_ref))
+ | NameRefClass::FieldShorthand { local_ref, field_ref: _ },
+ ) => local_ref,
+ _ => return,
+ };
+ let InFile { file_id, value } = local_ref.source(sema.db);
+ // locals defined inside macros are not relevant to us
+ if !file_id.is_macro() {
+ match value {
+ Either::Right(it) => {
+ self_param.replace(it);
+ }
+ Either::Left(_) => {
+ res.insert(local_ref);
+ }
+ }
+ }
+ };
+ self.walk_expr(&mut |expr| match expr {
+ ast::Expr::PathExpr(path_expr) => {
+ cb(path_expr.path().and_then(|it| it.as_single_name_ref()))
+ }
+ ast::Expr::ClosureExpr(closure_expr) => {
+ if let Some(body) = closure_expr.body() {
+ body.syntax().descendants().map(ast::NameRef::cast).for_each(|it| cb(it));
+ }
+ }
+ ast::Expr::MacroExpr(expr) => {
+ if let Some(tt) = expr.macro_call().and_then(|call| call.token_tree()) {
+ tt.syntax()
+ .children_with_tokens()
+ .flat_map(SyntaxElement::into_token)
+ .filter(|it| it.kind() == SyntaxKind::IDENT)
+ .flat_map(|t| sema.descend_into_macros(t))
+ .for_each(|t| cb(t.parent().and_then(ast::NameRef::cast)));
+ }
+ }
+ _ => (),
+ });
+ (res, self_param)
+ }
+
+ fn analyze_container(&self, sema: &Semantics<'_, RootDatabase>) -> Option<ContainerInfo> {
+ let mut ancestors = self.parent()?.ancestors();
+ let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted);
+ let mut parent_loop = None;
+ let mut set_parent_loop = |loop_: &dyn ast::HasLoopBody| {
+ if loop_
+ .loop_body()
+ .map_or(false, |it| it.syntax().text_range().contains_range(self.text_range()))
+ {
+ parent_loop.get_or_insert(loop_.syntax().clone());
+ }
+ };
+
+ let (is_const, expr, ty) = loop {
+ let anc = ancestors.next()?;
+ break match_ast! {
+ match anc {
+ ast::ClosureExpr(closure) => (false, closure.body(), infer_expr_opt(closure.body())),
+ ast::BlockExpr(block_expr) => {
+ let (constness, block) = match block_expr.modifier() {
+ Some(ast::BlockModifier::Const(_)) => (true, block_expr),
+ Some(ast::BlockModifier::Try(_)) => (false, block_expr),
+ Some(ast::BlockModifier::Label(label)) if label.lifetime().is_some() => (false, block_expr),
+ _ => continue,
+ };
+ let expr = Some(ast::Expr::BlockExpr(block));
+ (constness, expr.clone(), infer_expr_opt(expr))
+ },
+ ast::Fn(fn_) => {
+ let func = sema.to_def(&fn_)?;
+ let mut ret_ty = func.ret_type(sema.db);
+ if func.is_async(sema.db) {
+ if let Some(async_ret) = func.async_ret_type(sema.db) {
+ ret_ty = async_ret;
+ }
+ }
+ (fn_.const_token().is_some(), fn_.body().map(ast::Expr::BlockExpr), Some(ret_ty))
+ },
+ ast::Static(statik) => {
+ (true, statik.body(), Some(sema.to_def(&statik)?.ty(sema.db)))
+ },
+ ast::ConstArg(ca) => {
+ (true, ca.expr(), infer_expr_opt(ca.expr()))
+ },
+ ast::Const(konst) => {
+ (true, konst.body(), Some(sema.to_def(&konst)?.ty(sema.db)))
+ },
+ ast::ConstParam(cp) => {
+ (true, cp.default_val(), Some(sema.to_def(&cp)?.ty(sema.db)))
+ },
+ ast::ConstBlockPat(cbp) => {
+ let expr = cbp.block_expr().map(ast::Expr::BlockExpr);
+ (true, expr.clone(), infer_expr_opt(expr))
+ },
+ ast::Variant(__) => return None,
+ ast::Meta(__) => return None,
+ ast::LoopExpr(it) => {
+ set_parent_loop(&it);
+ continue;
+ },
+ ast::ForExpr(it) => {
+ set_parent_loop(&it);
+ continue;
+ },
+ ast::WhileExpr(it) => {
+ set_parent_loop(&it);
+ continue;
+ },
+ _ => continue,
+ }
+ };
+ };
+ let container_tail = match expr? {
+ ast::Expr::BlockExpr(block) => block.tail_expr(),
+ expr => Some(expr),
+ };
+ let is_in_tail =
+ container_tail.zip(self.tail_expr()).map_or(false, |(container_tail, body_tail)| {
+ container_tail.syntax().text_range().contains_range(body_tail.syntax().text_range())
+ });
+
+ let parent = self.parent()?;
+ let parents = generic_parents(&parent);
+ let generic_param_lists = parents.iter().filter_map(|it| it.generic_param_list()).collect();
+ let where_clauses = parents.iter().filter_map(|it| it.where_clause()).collect();
+
+ Some(ContainerInfo {
+ is_in_tail,
+ is_const,
+ parent_loop,
+ ret_type: ty,
+ generic_param_lists,
+ where_clauses,
+ })
+ }
+
+ fn return_ty(&self, ctx: &AssistContext<'_>) -> Option<RetType> {
+ match self.tail_expr() {
+ Some(expr) => ctx.sema.type_of_expr(&expr).map(TypeInfo::original).map(RetType::Expr),
+ None => Some(RetType::Stmt),
+ }
+ }
+
+ /// Local variables defined inside `body` that are accessed outside of it
+ fn ret_values<'a>(
+ &self,
+ ctx: &'a AssistContext<'_>,
+ parent: &SyntaxNode,
+ ) -> impl Iterator<Item = OutlivedLocal> + 'a {
+ let parent = parent.clone();
+ let range = self.text_range();
+ locals_defined_in_body(&ctx.sema, self)
+ .into_iter()
+ .filter_map(move |local| local_outlives_body(ctx, range, local, &parent))
+ }
+
+ /// Analyses the function body for external control flow.
+ fn external_control_flow(
+ &self,
+ ctx: &AssistContext<'_>,
+ container_info: &ContainerInfo,
+ ) -> Option<ControlFlow> {
+ let mut ret_expr = None;
+ let mut try_expr = None;
+ let mut break_expr = None;
+ let mut continue_expr = None;
+ let mut is_async = false;
+ let mut _is_unsafe = false;
+
+ let mut unsafe_depth = 0;
+ let mut loop_depth = 0;
+
+ self.preorder_expr(&mut |expr| {
+ let expr = match expr {
+ WalkEvent::Enter(e) => e,
+ WalkEvent::Leave(expr) => {
+ match expr {
+ ast::Expr::LoopExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::WhileExpr(_) => loop_depth -= 1,
+ ast::Expr::BlockExpr(block_expr) if block_expr.unsafe_token().is_some() => {
+ unsafe_depth -= 1
+ }
+ _ => (),
+ }
+ return false;
+ }
+ };
+ match expr {
+ ast::Expr::LoopExpr(_) | ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) => {
+ loop_depth += 1;
+ }
+ ast::Expr::BlockExpr(block_expr) if block_expr.unsafe_token().is_some() => {
+ unsafe_depth += 1
+ }
+ ast::Expr::ReturnExpr(it) => {
+ ret_expr = Some(it);
+ }
+ ast::Expr::TryExpr(it) => {
+ try_expr = Some(it);
+ }
+ ast::Expr::BreakExpr(it) if loop_depth == 0 => {
+ break_expr = Some(it);
+ }
+ ast::Expr::ContinueExpr(it) if loop_depth == 0 => {
+ continue_expr = Some(it);
+ }
+ ast::Expr::AwaitExpr(_) => is_async = true,
+ // FIXME: Do unsafe analysis on expression, sem highlighting knows this so we should be able
+ // to just lift that out of there
+ // expr if unsafe_depth ==0 && expr.is_unsafe => is_unsafe = true,
+ _ => {}
+ }
+ false
+ });
+
+ let kind = match (try_expr, ret_expr, break_expr, continue_expr) {
+ (Some(_), _, None, None) => {
+ let ret_ty = container_info.ret_type.clone()?;
+ let kind = TryKind::of_ty(ret_ty, ctx)?;
+
+ Some(FlowKind::Try { kind })
+ }
+ (Some(_), _, _, _) => {
+ cov_mark::hit!(external_control_flow_try_and_bc);
+ return None;
+ }
+ (None, Some(r), None, None) => Some(FlowKind::Return(r.expr())),
+ (None, Some(_), _, _) => {
+ cov_mark::hit!(external_control_flow_return_and_bc);
+ return None;
+ }
+ (None, None, Some(_), Some(_)) => {
+ cov_mark::hit!(external_control_flow_break_and_continue);
+ return None;
+ }
+ (None, None, Some(b), None) => Some(FlowKind::Break(b.lifetime(), b.expr())),
+ (None, None, None, Some(c)) => Some(FlowKind::Continue(c.lifetime())),
+ (None, None, None, None) => None,
+ };
+
+ Some(ControlFlow { kind, is_async, is_unsafe: _is_unsafe })
+ }
+
+ /// find variables that should be extracted as params
+ ///
+ /// Computes additional info that affects param type and mutability
+ fn extracted_function_params(
+ &self,
+ ctx: &AssistContext<'_>,
+ container_info: &ContainerInfo,
+ locals: impl Iterator<Item = Local>,
+ ) -> Vec<Param> {
+ locals
+ .map(|local| (local, local.source(ctx.db())))
+ .filter(|(_, src)| is_defined_outside_of_body(ctx, self, src))
+ .filter_map(|(local, src)| match src.value {
+ Either::Left(src) => Some((local, src)),
+ Either::Right(_) => {
+ stdx::never!(false, "Local::is_self returned false, but source is SelfParam");
+ None
+ }
+ })
+ .map(|(var, src)| {
+ let usages = LocalUsages::find_local_usages(ctx, var);
+ let ty = var.ty(ctx.db());
+
+ let defined_outside_parent_loop = container_info
+ .parent_loop
+ .as_ref()
+ .map_or(true, |it| it.text_range().contains_range(src.syntax().text_range()));
+
+ let is_copy = ty.is_copy(ctx.db());
+ let has_usages = self.has_usages_after_body(&usages);
+ let requires_mut =
+ !ty.is_mutable_reference() && has_exclusive_usages(ctx, &usages, self);
+ // We can move the value into the function call if it's not used after the call,
+ // if the var is not used but defined outside a loop we are extracting from we can't move it either
+ // as the function will reuse it in the next iteration.
+ let move_local = (!has_usages && defined_outside_parent_loop) || ty.is_reference();
+ Param { var, ty, move_local, requires_mut, is_copy }
+ })
+ .collect()
+ }
+
+ fn has_usages_after_body(&self, usages: &LocalUsages) -> bool {
+ usages.iter().any(|reference| self.precedes_range(reference.range))
+ }
+}
+
+enum GenericParent {
+ Fn(ast::Fn),
+ Impl(ast::Impl),
+ Trait(ast::Trait),
+}
+
+impl GenericParent {
+ fn generic_param_list(&self) -> Option<ast::GenericParamList> {
+ match self {
+ GenericParent::Fn(fn_) => fn_.generic_param_list(),
+ GenericParent::Impl(impl_) => impl_.generic_param_list(),
+ GenericParent::Trait(trait_) => trait_.generic_param_list(),
+ }
+ }
+
+ fn where_clause(&self) -> Option<ast::WhereClause> {
+ match self {
+ GenericParent::Fn(fn_) => fn_.where_clause(),
+ GenericParent::Impl(impl_) => impl_.where_clause(),
+ GenericParent::Trait(trait_) => trait_.where_clause(),
+ }
+ }
+}
+
+/// Search `parent`'s ancestors for items with potentially applicable generic parameters
+fn generic_parents(parent: &SyntaxNode) -> Vec<GenericParent> {
+ let mut list = Vec::new();
+ if let Some(parent_item) = parent.ancestors().find_map(ast::Item::cast) {
+ match parent_item {
+ ast::Item::Fn(ref fn_) => {
+ if let Some(parent_parent) = parent_item
+ .syntax()
+ .parent()
+ .and_then(|it| it.parent())
+ .and_then(ast::Item::cast)
+ {
+ match parent_parent {
+ ast::Item::Impl(impl_) => list.push(GenericParent::Impl(impl_)),
+ ast::Item::Trait(trait_) => list.push(GenericParent::Trait(trait_)),
+ _ => (),
+ }
+ }
+ list.push(GenericParent::Fn(fn_.clone()));
+ }
+ _ => (),
+ }
+ }
+ list
+}
+
+/// checks if relevant var is used with `&mut` access inside body
+fn has_exclusive_usages(
+ ctx: &AssistContext<'_>,
+ usages: &LocalUsages,
+ body: &FunctionBody,
+) -> bool {
+ usages
+ .iter()
+ .filter(|reference| body.contains_range(reference.range))
+ .any(|reference| reference_is_exclusive(reference, body, ctx))
+}
+
+/// checks if this reference requires `&mut` access inside node
+fn reference_is_exclusive(
+ reference: &FileReference,
+ node: &dyn HasTokenAtOffset,
+ ctx: &AssistContext<'_>,
+) -> bool {
+ // we directly modify variable with set: `n = 0`, `n += 1`
+ if reference.category == Some(ReferenceCategory::Write) {
+ return true;
+ }
+
+ // we take `&mut` reference to variable: `&mut v`
+ let path = match path_element_of_reference(node, reference) {
+ Some(path) => path,
+ None => return false,
+ };
+
+ expr_require_exclusive_access(ctx, &path).unwrap_or(false)
+}
+
+/// checks if this expr requires `&mut` access, recurses on field access
+fn expr_require_exclusive_access(ctx: &AssistContext<'_>, expr: &ast::Expr) -> Option<bool> {
+ if let ast::Expr::MacroExpr(_) = expr {
+ // FIXME: expand macro and check output for mutable usages of the variable?
+ return None;
+ }
+
+ let parent = expr.syntax().parent()?;
+
+ if let Some(bin_expr) = ast::BinExpr::cast(parent.clone()) {
+ if matches!(bin_expr.op_kind()?, ast::BinaryOp::Assignment { .. }) {
+ return Some(bin_expr.lhs()?.syntax() == expr.syntax());
+ }
+ return Some(false);
+ }
+
+ if let Some(ref_expr) = ast::RefExpr::cast(parent.clone()) {
+ return Some(ref_expr.mut_token().is_some());
+ }
+
+ if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
+ let func = ctx.sema.resolve_method_call(&method_call)?;
+ let self_param = func.self_param(ctx.db())?;
+ let access = self_param.access(ctx.db());
+
+ return Some(matches!(access, hir::Access::Exclusive));
+ }
+
+ if let Some(field) = ast::FieldExpr::cast(parent) {
+ return expr_require_exclusive_access(ctx, &field.into());
+ }
+
+ Some(false)
+}
+
+trait HasTokenAtOffset {
+ fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken>;
+}
+
+impl HasTokenAtOffset for SyntaxNode {
+ fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> {
+ SyntaxNode::token_at_offset(self, offset)
+ }
+}
+
+impl HasTokenAtOffset for FunctionBody {
+ fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> {
+ match self {
+ FunctionBody::Expr(expr) => expr.syntax().token_at_offset(offset),
+ FunctionBody::Span { parent, text_range } => {
+ match parent.syntax().token_at_offset(offset) {
+ TokenAtOffset::None => TokenAtOffset::None,
+ TokenAtOffset::Single(t) => {
+ if text_range.contains_range(t.text_range()) {
+ TokenAtOffset::Single(t)
+ } else {
+ TokenAtOffset::None
+ }
+ }
+ TokenAtOffset::Between(a, b) => {
+ match (
+ text_range.contains_range(a.text_range()),
+ text_range.contains_range(b.text_range()),
+ ) {
+ (true, true) => TokenAtOffset::Between(a, b),
+ (true, false) => TokenAtOffset::Single(a),
+ (false, true) => TokenAtOffset::Single(b),
+ (false, false) => TokenAtOffset::None,
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+/// find relevant `ast::Expr` for reference
+///
+/// # Preconditions
+///
+/// `node` must cover `reference`, that is `node.text_range().contains_range(reference.range)`
+fn path_element_of_reference(
+ node: &dyn HasTokenAtOffset,
+ reference: &FileReference,
+) -> Option<ast::Expr> {
+ let token = node.token_at_offset(reference.range.start()).right_biased().or_else(|| {
+ stdx::never!(false, "cannot find token at variable usage: {:?}", reference);
+ None
+ })?;
+ let path = token.parent_ancestors().find_map(ast::Expr::cast).or_else(|| {
+ stdx::never!(false, "cannot find path parent of variable usage: {:?}", token);
+ None
+ })?;
+ stdx::always!(
+ matches!(path, ast::Expr::PathExpr(_) | ast::Expr::MacroExpr(_)),
+ "unexpected expression type for variable usage: {:?}",
+ path
+ );
+ Some(path)
+}
+
+/// list local variables defined inside `body`
+fn locals_defined_in_body(
+ sema: &Semantics<'_, RootDatabase>,
+ body: &FunctionBody,
+) -> FxIndexSet<Local> {
+ // FIXME: this doesn't work well with macros
+ // see https://github.com/rust-lang/rust-analyzer/pull/7535#discussion_r570048550
+ let mut res = FxIndexSet::default();
+ body.walk_pat(&mut |pat| {
+ if let ast::Pat::IdentPat(pat) = pat {
+ if let Some(local) = sema.to_def(&pat) {
+ res.insert(local);
+ }
+ }
+ });
+ res
+}
+
+/// Returns usage details if local variable is used after(outside of) body
+fn local_outlives_body(
+ ctx: &AssistContext<'_>,
+ body_range: TextRange,
+ local: Local,
+ parent: &SyntaxNode,
+) -> Option<OutlivedLocal> {
+ let usages = LocalUsages::find_local_usages(ctx, local);
+ let mut has_mut_usages = false;
+ let mut any_outlives = false;
+ for usage in usages.iter() {
+ if body_range.end() <= usage.range.start() {
+ has_mut_usages |= reference_is_exclusive(usage, parent, ctx);
+ any_outlives |= true;
+ if has_mut_usages {
+ break; // no need to check more elements we have all the info we wanted
+ }
+ }
+ }
+ if !any_outlives {
+ return None;
+ }
+ Some(OutlivedLocal { local, mut_usage_outside_body: has_mut_usages })
+}
+
+/// checks if the relevant local was defined before(outside of) body
+fn is_defined_outside_of_body(
+ ctx: &AssistContext<'_>,
+ body: &FunctionBody,
+ src: &hir::InFile<Either<ast::IdentPat, ast::SelfParam>>,
+) -> bool {
+ src.file_id.original_file(ctx.db()) == ctx.file_id()
+ && !body.contains_node(either_syntax(&src.value))
+}
+
+fn either_syntax(value: &Either<ast::IdentPat, ast::SelfParam>) -> &SyntaxNode {
+ match value {
+ Either::Left(pat) => pat.syntax(),
+ Either::Right(it) => it.syntax(),
+ }
+}
+
+/// find where to put extracted function definition
+///
+/// Function should be put right after returned node
+fn node_to_insert_after(body: &FunctionBody, anchor: Anchor) -> Option<SyntaxNode> {
+ let node = body.node();
+ let mut ancestors = node.ancestors().peekable();
+ let mut last_ancestor = None;
+ while let Some(next_ancestor) = ancestors.next() {
+ match next_ancestor.kind() {
+ SyntaxKind::SOURCE_FILE => break,
+ SyntaxKind::ITEM_LIST if !matches!(anchor, Anchor::Freestanding) => continue,
+ SyntaxKind::ITEM_LIST => {
+ if ancestors.peek().map(SyntaxNode::kind) == Some(SyntaxKind::MODULE) {
+ break;
+ }
+ }
+ SyntaxKind::ASSOC_ITEM_LIST if !matches!(anchor, Anchor::Method) => continue,
+ SyntaxKind::ASSOC_ITEM_LIST if body.extracted_from_trait_impl() => continue,
+ SyntaxKind::ASSOC_ITEM_LIST => {
+ if ancestors.peek().map(SyntaxNode::kind) == Some(SyntaxKind::IMPL) {
+ break;
+ }
+ }
+ _ => (),
+ }
+ last_ancestor = Some(next_ancestor);
+ }
+ last_ancestor
+}
+
+fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> String {
+ let ret_ty = fun.return_type(ctx);
+
+ let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx)));
+ let name = fun.name.clone();
+ let mut call_expr = if fun.self_param.is_some() {
+ let self_arg = make::expr_path(make::ext::ident_path("self"));
+ make::expr_method_call(self_arg, name, args)
+ } else {
+ let func = make::expr_path(make::path_unqualified(make::path_segment(name)));
+ make::expr_call(func, args)
+ };
+
+ let handler = FlowHandler::from_ret_ty(fun, &ret_ty);
+
+ if fun.control_flow.is_async {
+ call_expr = make::expr_await(call_expr);
+ }
+ let expr = handler.make_call_expr(call_expr).indent(indent);
+
+ let mut_modifier = |var: &OutlivedLocal| if var.mut_usage_outside_body { "mut " } else { "" };
+
+ let mut buf = String::new();
+ match fun.outliving_locals.as_slice() {
+ [] => {}
+ [var] => {
+ format_to!(buf, "let {}{} = ", mut_modifier(var), var.local.name(ctx.db()))
+ }
+ vars => {
+ buf.push_str("let (");
+ let bindings = vars.iter().format_with(", ", |local, f| {
+ f(&format_args!("{}{}", mut_modifier(local), local.local.name(ctx.db())))
+ });
+ format_to!(buf, "{}", bindings);
+ buf.push_str(") = ");
+ }
+ }
+
+ format_to!(buf, "{}", expr);
+ let insert_comma = fun
+ .body
+ .parent()
+ .and_then(ast::MatchArm::cast)
+ .map_or(false, |it| it.comma_token().is_none());
+ if insert_comma {
+ buf.push(',');
+ } else if fun.ret_ty.is_unit() && (!fun.outliving_locals.is_empty() || !expr.is_block_like()) {
+ buf.push(';');
+ }
+ buf
+}
+
+enum FlowHandler {
+ None,
+ Try { kind: TryKind },
+ If { action: FlowKind },
+ IfOption { action: FlowKind },
+ MatchOption { none: FlowKind },
+ MatchResult { err: FlowKind },
+}
+
+impl FlowHandler {
+ fn from_ret_ty(fun: &Function, ret_ty: &FunType) -> FlowHandler {
+ match &fun.control_flow.kind {
+ None => FlowHandler::None,
+ Some(flow_kind) => {
+ let action = flow_kind.clone();
+ if *ret_ty == FunType::Unit {
+ match flow_kind {
+ FlowKind::Return(None)
+ | FlowKind::Break(_, None)
+ | FlowKind::Continue(_) => FlowHandler::If { action },
+ FlowKind::Return(_) | FlowKind::Break(_, _) => {
+ FlowHandler::IfOption { action }
+ }
+ FlowKind::Try { kind } => FlowHandler::Try { kind: kind.clone() },
+ }
+ } else {
+ match flow_kind {
+ FlowKind::Return(None)
+ | FlowKind::Break(_, None)
+ | FlowKind::Continue(_) => FlowHandler::MatchOption { none: action },
+ FlowKind::Return(_) | FlowKind::Break(_, _) => {
+ FlowHandler::MatchResult { err: action }
+ }
+ FlowKind::Try { kind } => FlowHandler::Try { kind: kind.clone() },
+ }
+ }
+ }
+ }
+ }
+
+ fn make_call_expr(&self, call_expr: ast::Expr) -> ast::Expr {
+ match self {
+ FlowHandler::None => call_expr,
+ FlowHandler::Try { kind: _ } => make::expr_try(call_expr),
+ FlowHandler::If { action } => {
+ let action = action.make_result_handler(None);
+ let stmt = make::expr_stmt(action);
+ let block = make::block_expr(iter::once(stmt.into()), None);
+ let controlflow_break_path = make::path_from_text("ControlFlow::Break");
+ let condition = make::expr_let(
+ make::tuple_struct_pat(
+ controlflow_break_path,
+ iter::once(make::wildcard_pat().into()),
+ )
+ .into(),
+ call_expr,
+ );
+ make::expr_if(condition.into(), block, None)
+ }
+ FlowHandler::IfOption { action } => {
+ let path = make::ext::ident_path("Some");
+ let value_pat = make::ext::simple_ident_pat(make::name("value"));
+ let pattern = make::tuple_struct_pat(path, iter::once(value_pat.into()));
+ let cond = make::expr_let(pattern.into(), call_expr);
+ let value = make::expr_path(make::ext::ident_path("value"));
+ let action_expr = action.make_result_handler(Some(value));
+ let action_stmt = make::expr_stmt(action_expr);
+ let then = make::block_expr(iter::once(action_stmt.into()), None);
+ make::expr_if(cond.into(), then, None)
+ }
+ FlowHandler::MatchOption { none } => {
+ let some_name = "value";
+
+ let some_arm = {
+ let path = make::ext::ident_path("Some");
+ let value_pat = make::ext::simple_ident_pat(make::name(some_name));
+ let pat = make::tuple_struct_pat(path, iter::once(value_pat.into()));
+ let value = make::expr_path(make::ext::ident_path(some_name));
+ make::match_arm(iter::once(pat.into()), None, value)
+ };
+ let none_arm = {
+ let path = make::ext::ident_path("None");
+ let pat = make::path_pat(path);
+ make::match_arm(iter::once(pat), None, none.make_result_handler(None))
+ };
+ let arms = make::match_arm_list(vec![some_arm, none_arm]);
+ make::expr_match(call_expr, arms)
+ }
+ FlowHandler::MatchResult { err } => {
+ let ok_name = "value";
+ let err_name = "value";
+
+ let ok_arm = {
+ let path = make::ext::ident_path("Ok");
+ let value_pat = make::ext::simple_ident_pat(make::name(ok_name));
+ let pat = make::tuple_struct_pat(path, iter::once(value_pat.into()));
+ let value = make::expr_path(make::ext::ident_path(ok_name));
+ make::match_arm(iter::once(pat.into()), None, value)
+ };
+ let err_arm = {
+ let path = make::ext::ident_path("Err");
+ let value_pat = make::ext::simple_ident_pat(make::name(err_name));
+ let pat = make::tuple_struct_pat(path, iter::once(value_pat.into()));
+ let value = make::expr_path(make::ext::ident_path(err_name));
+ make::match_arm(
+ iter::once(pat.into()),
+ None,
+ err.make_result_handler(Some(value)),
+ )
+ };
+ let arms = make::match_arm_list(vec![ok_arm, err_arm]);
+ make::expr_match(call_expr, arms)
+ }
+ }
+ }
+}
+
+fn path_expr_from_local(ctx: &AssistContext<'_>, var: Local) -> ast::Expr {
+ let name = var.name(ctx.db()).to_string();
+ make::expr_path(make::ext::ident_path(&name))
+}
+
+fn format_function(
+ ctx: &AssistContext<'_>,
+ module: hir::Module,
+ fun: &Function,
+ old_indent: IndentLevel,
+ new_indent: IndentLevel,
+) -> String {
+ let mut fn_def = String::new();
+ let params = fun.make_param_list(ctx, module);
+ let ret_ty = fun.make_ret_ty(ctx, module);
+ let body = make_body(ctx, old_indent, new_indent, fun);
+ let const_kw = if fun.mods.is_const { "const " } else { "" };
+ let async_kw = if fun.control_flow.is_async { "async " } else { "" };
+ let unsafe_kw = if fun.control_flow.is_unsafe { "unsafe " } else { "" };
+ let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun);
+ match ctx.config.snippet_cap {
+ Some(_) => format_to!(
+ fn_def,
+ "\n\n{}{}{}{}fn $0{}",
+ new_indent,
+ const_kw,
+ async_kw,
+ unsafe_kw,
+ fun.name,
+ ),
+ None => format_to!(
+ fn_def,
+ "\n\n{}{}{}{}fn {}",
+ new_indent,
+ const_kw,
+ async_kw,
+ unsafe_kw,
+ fun.name,
+ ),
+ }
+
+ if let Some(generic_params) = generic_params {
+ format_to!(fn_def, "{}", generic_params);
+ }
+
+ format_to!(fn_def, "{}", params);
+
+ if let Some(ret_ty) = ret_ty {
+ format_to!(fn_def, " {}", ret_ty);
+ }
+
+ if let Some(where_clause) = where_clause {
+ format_to!(fn_def, " {}", where_clause);
+ }
+
+ format_to!(fn_def, " {}", body);
+
+ fn_def
+}
+
+fn make_generic_params_and_where_clause(
+ ctx: &AssistContext<'_>,
+ fun: &Function,
+) -> (Option<ast::GenericParamList>, Option<ast::WhereClause>) {
+ let used_type_params = fun.type_params(ctx);
+
+ let generic_param_list = make_generic_param_list(ctx, fun, &used_type_params);
+ let where_clause = make_where_clause(ctx, fun, &used_type_params);
+
+ (generic_param_list, where_clause)
+}
+
+fn make_generic_param_list(
+ ctx: &AssistContext<'_>,
+ fun: &Function,
+ used_type_params: &[TypeParam],
+) -> Option<ast::GenericParamList> {
+ let mut generic_params = fun
+ .mods
+ .generic_param_lists
+ .iter()
+ .flat_map(|parent_params| {
+ parent_params
+ .generic_params()
+ .filter(|param| param_is_required(ctx, param, used_type_params))
+ })
+ .peekable();
+
+ if generic_params.peek().is_some() {
+ Some(make::generic_param_list(generic_params))
+ } else {
+ None
+ }
+}
+
+fn param_is_required(
+ ctx: &AssistContext<'_>,
+ param: &ast::GenericParam,
+ used_type_params: &[TypeParam],
+) -> bool {
+ match param {
+ ast::GenericParam::ConstParam(_) | ast::GenericParam::LifetimeParam(_) => false,
+ ast::GenericParam::TypeParam(type_param) => match &ctx.sema.to_def(type_param) {
+ Some(def) => used_type_params.contains(def),
+ _ => false,
+ },
+ }
+}
+
+fn make_where_clause(
+ ctx: &AssistContext<'_>,
+ fun: &Function,
+ used_type_params: &[TypeParam],
+) -> Option<ast::WhereClause> {
+ let mut predicates = fun
+ .mods
+ .where_clauses
+ .iter()
+ .flat_map(|parent_where_clause| {
+ parent_where_clause
+ .predicates()
+ .filter(|pred| pred_is_required(ctx, pred, used_type_params))
+ })
+ .peekable();
+
+ if predicates.peek().is_some() {
+ Some(make::where_clause(predicates))
+ } else {
+ None
+ }
+}
+
+fn pred_is_required(
+ ctx: &AssistContext<'_>,
+ pred: &ast::WherePred,
+ used_type_params: &[TypeParam],
+) -> bool {
+ match resolved_type_param(ctx, pred) {
+ Some(it) => used_type_params.contains(&it),
+ None => false,
+ }
+}
+
+fn resolved_type_param(ctx: &AssistContext<'_>, pred: &ast::WherePred) -> Option<TypeParam> {
+ let path = match pred.ty()? {
+ ast::Type::PathType(path_type) => path_type.path(),
+ _ => None,
+ }?;
+
+ match ctx.sema.resolve_path(&path)? {
+ PathResolution::TypeParam(type_param) => Some(type_param),
+ _ => None,
+ }
+}
+
+impl Function {
+ /// Collect all the `TypeParam`s used in the `body` and `params`.
+ fn type_params(&self, ctx: &AssistContext<'_>) -> Vec<TypeParam> {
+ let type_params_in_descendant_paths =
+ self.body.descendant_paths().filter_map(|it| match ctx.sema.resolve_path(&it) {
+ Some(PathResolution::TypeParam(type_param)) => Some(type_param),
+ _ => None,
+ });
+ let type_params_in_params = self.params.iter().filter_map(|p| p.ty.as_type_param(ctx.db()));
+ type_params_in_descendant_paths.chain(type_params_in_params).collect()
+ }
+
+ fn make_param_list(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::ParamList {
+ let self_param = self.self_param.clone();
+ let params = self.params.iter().map(|param| param.to_param(ctx, module));
+ make::param_list(self_param, params)
+ }
+
+ fn make_ret_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> Option<ast::RetType> {
+ let fun_ty = self.return_type(ctx);
+ let handler = if self.mods.is_in_tail {
+ FlowHandler::None
+ } else {
+ FlowHandler::from_ret_ty(self, &fun_ty)
+ };
+ let ret_ty = match &handler {
+ FlowHandler::None => {
+ if matches!(fun_ty, FunType::Unit) {
+ return None;
+ }
+ fun_ty.make_ty(ctx, module)
+ }
+ FlowHandler::Try { kind: TryKind::Option } => {
+ make::ext::ty_option(fun_ty.make_ty(ctx, module))
+ }
+ FlowHandler::Try { kind: TryKind::Result { ty: parent_ret_ty } } => {
+ let handler_ty = parent_ret_ty
+ .type_arguments()
+ .nth(1)
+ .map(|ty| make_ty(&ty, ctx, module))
+ .unwrap_or_else(make::ty_placeholder);
+ make::ext::ty_result(fun_ty.make_ty(ctx, module), handler_ty)
+ }
+ FlowHandler::If { .. } => make::ty("ControlFlow<()>"),
+ FlowHandler::IfOption { action } => {
+ let handler_ty = action
+ .expr_ty(ctx)
+ .map(|ty| make_ty(&ty, ctx, module))
+ .unwrap_or_else(make::ty_placeholder);
+ make::ext::ty_option(handler_ty)
+ }
+ FlowHandler::MatchOption { .. } => make::ext::ty_option(fun_ty.make_ty(ctx, module)),
+ FlowHandler::MatchResult { err } => {
+ let handler_ty = err
+ .expr_ty(ctx)
+ .map(|ty| make_ty(&ty, ctx, module))
+ .unwrap_or_else(make::ty_placeholder);
+ make::ext::ty_result(fun_ty.make_ty(ctx, module), handler_ty)
+ }
+ };
+ Some(make::ret_type(ret_ty))
+ }
+}
+
+impl FunType {
+ fn make_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
+ match self {
+ FunType::Unit => make::ty_unit(),
+ FunType::Single(ty) => make_ty(ty, ctx, module),
+ FunType::Tuple(types) => match types.as_slice() {
+ [] => {
+ stdx::never!("tuple type with 0 elements");
+ make::ty_unit()
+ }
+ [ty] => {
+ stdx::never!("tuple type with 1 element");
+ make_ty(ty, ctx, module)
+ }
+ types => {
+ let types = types.iter().map(|ty| make_ty(ty, ctx, module));
+ make::ty_tuple(types)
+ }
+ },
+ }
+ }
+}
+
+fn make_body(
+ ctx: &AssistContext<'_>,
+ old_indent: IndentLevel,
+ new_indent: IndentLevel,
+ fun: &Function,
+) -> ast::BlockExpr {
+ let ret_ty = fun.return_type(ctx);
+ let handler = if fun.mods.is_in_tail {
+ FlowHandler::None
+ } else {
+ FlowHandler::from_ret_ty(fun, &ret_ty)
+ };
+
+ let block = match &fun.body {
+ FunctionBody::Expr(expr) => {
+ let expr = rewrite_body_segment(ctx, &fun.params, &handler, expr.syntax());
+ let expr = ast::Expr::cast(expr).unwrap();
+ match expr {
+ ast::Expr::BlockExpr(block) => {
+ // If the extracted expression is itself a block, there is no need to wrap it inside another block.
+ let block = block.dedent(old_indent);
+ // Recreate the block for formatting consistency with other extracted functions.
+ make::block_expr(block.statements(), block.tail_expr())
+ }
+ _ => {
+ let expr = expr.dedent(old_indent).indent(IndentLevel(1));
+
+ make::block_expr(Vec::new(), Some(expr))
+ }
+ }
+ }
+ FunctionBody::Span { parent, text_range } => {
+ let mut elements: Vec<_> = parent
+ .syntax()
+ .children_with_tokens()
+ .filter(|it| text_range.contains_range(it.text_range()))
+ .map(|it| match &it {
+ syntax::NodeOrToken::Node(n) => syntax::NodeOrToken::Node(
+ rewrite_body_segment(ctx, &fun.params, &handler, n),
+ ),
+ _ => it,
+ })
+ .collect();
+
+ let mut tail_expr = match &elements.last() {
+ Some(syntax::NodeOrToken::Node(node)) if ast::Expr::can_cast(node.kind()) => {
+ ast::Expr::cast(node.clone())
+ }
+ _ => None,
+ };
+
+ match tail_expr {
+ Some(_) => {
+ elements.pop();
+ }
+ None => match fun.outliving_locals.as_slice() {
+ [] => {}
+ [var] => {
+ tail_expr = Some(path_expr_from_local(ctx, var.local));
+ }
+ vars => {
+ let exprs = vars.iter().map(|var| path_expr_from_local(ctx, var.local));
+ let expr = make::expr_tuple(exprs);
+ tail_expr = Some(expr);
+ }
+ },
+ };
+
+ let body_indent = IndentLevel(1);
+ let elements = elements
+ .into_iter()
+ .map(|node_or_token| match &node_or_token {
+ syntax::NodeOrToken::Node(node) => match ast::Stmt::cast(node.clone()) {
+ Some(stmt) => {
+ let indented = stmt.dedent(old_indent).indent(body_indent);
+ let ast_node = indented.syntax().clone_subtree();
+ syntax::NodeOrToken::Node(ast_node)
+ }
+ _ => node_or_token,
+ },
+ _ => node_or_token,
+ })
+ .collect::<Vec<SyntaxElement>>();
+ let tail_expr = tail_expr.map(|expr| expr.dedent(old_indent).indent(body_indent));
+
+ make::hacky_block_expr_with_comments(elements, tail_expr)
+ }
+ };
+
+ let block = match &handler {
+ FlowHandler::None => block,
+ FlowHandler::Try { kind } => {
+ let block = with_default_tail_expr(block, make::expr_unit());
+ map_tail_expr(block, |tail_expr| {
+ let constructor = match kind {
+ TryKind::Option => "Some",
+ TryKind::Result { .. } => "Ok",
+ };
+ let func = make::expr_path(make::ext::ident_path(constructor));
+ let args = make::arg_list(iter::once(tail_expr));
+ make::expr_call(func, args)
+ })
+ }
+ FlowHandler::If { .. } => {
+ let controlflow_continue = make::expr_call(
+ make::expr_path(make::path_from_text("ControlFlow::Continue")),
+ make::arg_list(iter::once(make::expr_unit())),
+ );
+ with_tail_expr(block, controlflow_continue)
+ }
+ FlowHandler::IfOption { .. } => {
+ let none = make::expr_path(make::ext::ident_path("None"));
+ with_tail_expr(block, none)
+ }
+ FlowHandler::MatchOption { .. } => map_tail_expr(block, |tail_expr| {
+ let some = make::expr_path(make::ext::ident_path("Some"));
+ let args = make::arg_list(iter::once(tail_expr));
+ make::expr_call(some, args)
+ }),
+ FlowHandler::MatchResult { .. } => map_tail_expr(block, |tail_expr| {
+ let ok = make::expr_path(make::ext::ident_path("Ok"));
+ let args = make::arg_list(iter::once(tail_expr));
+ make::expr_call(ok, args)
+ }),
+ };
+
+ block.indent(new_indent)
+}
+
+fn map_tail_expr(block: ast::BlockExpr, f: impl FnOnce(ast::Expr) -> ast::Expr) -> ast::BlockExpr {
+ let tail_expr = match block.tail_expr() {
+ Some(tail_expr) => tail_expr,
+ None => return block,
+ };
+ make::block_expr(block.statements(), Some(f(tail_expr)))
+}
+
+fn with_default_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr {
+ match block.tail_expr() {
+ Some(_) => block,
+ None => make::block_expr(block.statements(), Some(tail_expr)),
+ }
+}
+
+fn with_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr {
+ let stmt_tail = block.tail_expr().map(|expr| make::expr_stmt(expr).into());
+ let stmts = block.statements().chain(stmt_tail);
+ make::block_expr(stmts, Some(tail_expr))
+}
+
+fn format_type(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> String {
+ ty.display_source_code(ctx.db(), module.into()).ok().unwrap_or_else(|| "_".to_string())
+}
+
+fn make_ty(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
+ let ty_str = format_type(ty, ctx, module);
+ make::ty(&ty_str)
+}
+
+fn rewrite_body_segment(
+ ctx: &AssistContext<'_>,
+ params: &[Param],
+ handler: &FlowHandler,
+ syntax: &SyntaxNode,
+) -> SyntaxNode {
+ let syntax = fix_param_usages(ctx, params, syntax);
+ update_external_control_flow(handler, &syntax);
+ syntax
+}
+
+/// change all usages to account for added `&`/`&mut` for some params
+fn fix_param_usages(ctx: &AssistContext<'_>, params: &[Param], syntax: &SyntaxNode) -> SyntaxNode {
+ let mut usages_for_param: Vec<(&Param, Vec<ast::Expr>)> = Vec::new();
+
+ let tm = TreeMutator::new(syntax);
+
+ for param in params {
+ if !param.kind().is_ref() {
+ continue;
+ }
+
+ let usages = LocalUsages::find_local_usages(ctx, param.var);
+ let usages = usages
+ .iter()
+ .filter(|reference| syntax.text_range().contains_range(reference.range))
+ .filter_map(|reference| path_element_of_reference(syntax, reference))
+ .map(|expr| tm.make_mut(&expr));
+
+ usages_for_param.push((param, usages.collect()));
+ }
+
+ let res = tm.make_syntax_mut(syntax);
+
+ for (param, usages) in usages_for_param {
+ for usage in usages {
+ match usage.syntax().ancestors().skip(1).find_map(ast::Expr::cast) {
+ Some(ast::Expr::MethodCallExpr(_) | ast::Expr::FieldExpr(_)) => {
+ // do nothing
+ }
+ Some(ast::Expr::RefExpr(node))
+ if param.kind() == ParamKind::MutRef && node.mut_token().is_some() =>
+ {
+ ted::replace(node.syntax(), node.expr().unwrap().syntax());
+ }
+ Some(ast::Expr::RefExpr(node))
+ if param.kind() == ParamKind::SharedRef && node.mut_token().is_none() =>
+ {
+ ted::replace(node.syntax(), node.expr().unwrap().syntax());
+ }
+ Some(_) | None => {
+ let p = &make::expr_prefix(T![*], usage.clone()).clone_for_update();
+ ted::replace(usage.syntax(), p.syntax())
+ }
+ }
+ }
+ }
+
+ res
+}
+
+fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) {
+ let mut nested_loop = None;
+ let mut nested_scope = None;
+ for event in syntax.preorder() {
+ match event {
+ WalkEvent::Enter(e) => match e.kind() {
+ SyntaxKind::LOOP_EXPR | SyntaxKind::WHILE_EXPR | SyntaxKind::FOR_EXPR => {
+ if nested_loop.is_none() {
+ nested_loop = Some(e.clone());
+ }
+ }
+ SyntaxKind::FN
+ | SyntaxKind::CONST
+ | SyntaxKind::STATIC
+ | SyntaxKind::IMPL
+ | SyntaxKind::MODULE => {
+ if nested_scope.is_none() {
+ nested_scope = Some(e.clone());
+ }
+ }
+ _ => {}
+ },
+ WalkEvent::Leave(e) => {
+ if nested_scope.is_none() {
+ if let Some(expr) = ast::Expr::cast(e.clone()) {
+ match expr {
+ ast::Expr::ReturnExpr(return_expr) if nested_scope.is_none() => {
+ let expr = return_expr.expr();
+ if let Some(replacement) = make_rewritten_flow(handler, expr) {
+ ted::replace(return_expr.syntax(), replacement.syntax())
+ }
+ }
+ ast::Expr::BreakExpr(break_expr) if nested_loop.is_none() => {
+ let expr = break_expr.expr();
+ if let Some(replacement) = make_rewritten_flow(handler, expr) {
+ ted::replace(break_expr.syntax(), replacement.syntax())
+ }
+ }
+ ast::Expr::ContinueExpr(continue_expr) if nested_loop.is_none() => {
+ if let Some(replacement) = make_rewritten_flow(handler, None) {
+ ted::replace(continue_expr.syntax(), replacement.syntax())
+ }
+ }
+ _ => {
+ // do nothing
+ }
+ }
+ }
+ }
+
+ if nested_loop.as_ref() == Some(&e) {
+ nested_loop = None;
+ }
+ if nested_scope.as_ref() == Some(&e) {
+ nested_scope = None;
+ }
+ }
+ };
+ }
+}
+
+fn make_rewritten_flow(handler: &FlowHandler, arg_expr: Option<ast::Expr>) -> Option<ast::Expr> {
+ let value = match handler {
+ FlowHandler::None | FlowHandler::Try { .. } => return None,
+ FlowHandler::If { .. } => make::expr_call(
+ make::expr_path(make::path_from_text("ControlFlow::Break")),
+ make::arg_list(iter::once(make::expr_unit())),
+ ),
+ FlowHandler::IfOption { .. } => {
+ let expr = arg_expr.unwrap_or_else(|| make::expr_tuple(Vec::new()));
+ let args = make::arg_list(iter::once(expr));
+ make::expr_call(make::expr_path(make::ext::ident_path("Some")), args)
+ }
+ FlowHandler::MatchOption { .. } => make::expr_path(make::ext::ident_path("None")),
+ FlowHandler::MatchResult { .. } => {
+ let expr = arg_expr.unwrap_or_else(|| make::expr_tuple(Vec::new()));
+ let args = make::arg_list(iter::once(expr));
+ make::expr_call(make::expr_path(make::ext::ident_path("Err")), args)
+ }
+ };
+ Some(make::expr_return(Some(value)).clone_for_update())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn no_args_from_binary_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ foo($01 + 1$0);
+}
+"#,
+ r#"
+fn foo() {
+ foo(fun_name());
+}
+
+fn $0fun_name() -> i32 {
+ 1 + 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_binary_expr_in_module() {
+ check_assist(
+ extract_function,
+ r#"
+mod bar {
+ fn foo() {
+ foo($01 + 1$0);
+ }
+}
+"#,
+ r#"
+mod bar {
+ fn foo() {
+ foo(fun_name());
+ }
+
+ fn $0fun_name() -> i32 {
+ 1 + 1
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_binary_expr_indented() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0{ 1 + 1 }$0;
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() -> i32 {
+ 1 + 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_stmt_with_last_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ let k = 1;
+ $0let m = 1;
+ m + 1$0
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let k = 1;
+ fun_name()
+}
+
+fn $0fun_name() -> i32 {
+ let m = 1;
+ m + 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_stmt_unit() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let k = 3;
+ $0let m = 1;
+ let n = m + 1;$0
+ let g = 5;
+}
+"#,
+ r#"
+fn foo() {
+ let k = 3;
+ fun_name();
+ let g = 5;
+}
+
+fn $0fun_name() {
+ let m = 1;
+ let n = m + 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_if() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0if true { }$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ if true { }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_if_else() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ $0if true { 1 } else { 2 }$0
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ fun_name()
+}
+
+fn $0fun_name() -> i32 {
+ if true { 1 } else { 2 }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_if_let_else() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ $0if let true = false { 1 } else { 2 }$0
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ fun_name()
+}
+
+fn $0fun_name() -> i32 {
+ if let true = false { 1 } else { 2 }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_match() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ $0match true {
+ true => 1,
+ false => 2,
+ }$0
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ fun_name()
+}
+
+fn $0fun_name() -> i32 {
+ match true {
+ true => 1,
+ false => 2,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_while() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0while true { }$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ while true { }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_for() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0for v in &[0, 1] { }$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ for v in &[0, 1] { }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_loop_unit() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0loop {
+ let m = 1;
+ }$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name()
+}
+
+fn $0fun_name() -> ! {
+ loop {
+ let m = 1;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_loop_with_return() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let v = $0loop {
+ let m = 1;
+ break m;
+ }$0;
+}
+"#,
+ r#"
+fn foo() {
+ let v = fun_name();
+}
+
+fn $0fun_name() -> i32 {
+ loop {
+ let m = 1;
+ break m;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_match() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let v: i32 = $0match Some(1) {
+ Some(x) => x,
+ None => 0,
+ }$0;
+}
+"#,
+ r#"
+fn foo() {
+ let v: i32 = fun_name();
+}
+
+fn $0fun_name() -> i32 {
+ match Some(1) {
+ Some(x) => x,
+ None => 0,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_partial_block_single_line() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ let mut v = $0n * n;$0
+ v += 1;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let mut v = fun_name(n);
+ v += 1;
+}
+
+fn $0fun_name(n: i32) -> i32 {
+ let mut v = n * n;
+ v
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_partial_block() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let m = 2;
+ let n = 1;
+ let mut v = m $0* n;
+ let mut w = 3;$0
+ v += 1;
+ w += 1;
+}
+"#,
+ r#"
+fn foo() {
+ let m = 2;
+ let n = 1;
+ let (mut v, mut w) = fun_name(m, n);
+ v += 1;
+ w += 1;
+}
+
+fn $0fun_name(m: i32, n: i32) -> (i32, i32) {
+ let mut v = m * n;
+ let mut w = 3;
+ (v, w)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn argument_form_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ $0n+2$0
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ fun_name(n)
+}
+
+fn $0fun_name(n: u32) -> u32 {
+ n+2
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn argument_used_twice_form_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ $0n+n$0
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ fun_name(n)
+}
+
+fn $0fun_name(n: u32) -> u32 {
+ n+n
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn two_arguments_form_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ let m = 3;
+ $0n+n*m$0
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ let m = 3;
+ fun_name(n, m)
+}
+
+fn $0fun_name(n: u32, m: u32) -> u32 {
+ n+n*m
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn argument_and_locals() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ $0let m = 1;
+ n + m$0
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ fun_name(n)
+}
+
+fn $0fun_name(n: u32) -> u32 {
+ let m = 1;
+ n + m
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn in_comment_is_not_applicable() {
+ cov_mark::check!(extract_function_in_comment_is_not_applicable);
+ check_assist_not_applicable(extract_function, r"fn main() { 1 + /* $0comment$0 */ 1; }");
+ }
+
+ #[test]
+ fn part_of_expr_stmt() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $01$0 + 1;
+}
+"#,
+ r#"
+fn foo() {
+ fun_name() + 1;
+}
+
+fn $0fun_name() -> i32 {
+ 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0bar(1 + 1)$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ bar(1 + 1)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_from_nested() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => ($02 + 2$0, true)
+ _ => (0, false)
+ };
+}
+"#,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => (fun_name(), true)
+ _ => (0, false)
+ };
+}
+
+fn $0fun_name() -> i32 {
+ 2 + 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn param_from_closure() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ let lambda = |x: u32| $0x * 2$0;
+}
+"#,
+ r#"
+fn main() {
+ let lambda = |x: u32| fun_name(x);
+}
+
+fn $0fun_name(x: u32) -> u32 {
+ x * 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_return_stmt() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ $0return 2 + 2$0;
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ return fun_name();
+}
+
+fn $0fun_name() -> u32 {
+ 2 + 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn does_not_add_extra_whitespace() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+
+
+ $0return 2 + 2$0;
+}
+"#,
+ r#"
+fn foo() -> u32 {
+
+
+ return fun_name();
+}
+
+fn $0fun_name() -> u32 {
+ 2 + 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_stmt() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ let result = loop {
+ $0break 2 + 2$0;
+ };
+}
+"#,
+ r#"
+fn main() {
+ let result = loop {
+ break fun_name();
+ };
+}
+
+fn $0fun_name() -> i32 {
+ 2 + 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_cast() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ let v = $00f32 as u32$0;
+}
+"#,
+ r#"
+fn main() {
+ let v = fun_name();
+}
+
+fn $0fun_name() -> u32 {
+ 0f32 as u32
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_not_applicable() {
+ check_assist_not_applicable(extract_function, r"fn foo() { $0return$0; } ");
+ }
+
+ #[test]
+ fn method_to_freestanding() {
+ check_assist(
+ extract_function,
+ r#"
+struct S;
+
+impl S {
+ fn foo(&self) -> i32 {
+ $01+1$0
+ }
+}
+"#,
+ r#"
+struct S;
+
+impl S {
+ fn foo(&self) -> i32 {
+ fun_name()
+ }
+}
+
+fn $0fun_name() -> i32 {
+ 1+1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_with_reference() {
+ check_assist(
+ extract_function,
+ r#"
+struct S { f: i32 };
+
+impl S {
+ fn foo(&self) -> i32 {
+ $0self.f+self.f$0
+ }
+}
+"#,
+ r#"
+struct S { f: i32 };
+
+impl S {
+ fn foo(&self) -> i32 {
+ self.fun_name()
+ }
+
+ fn $0fun_name(&self) -> i32 {
+ self.f+self.f
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_with_mut() {
+ check_assist(
+ extract_function,
+ r#"
+struct S { f: i32 };
+
+impl S {
+ fn foo(&mut self) {
+ $0self.f += 1;$0
+ }
+}
+"#,
+ r#"
+struct S { f: i32 };
+
+impl S {
+ fn foo(&mut self) {
+ self.fun_name();
+ }
+
+ fn $0fun_name(&mut self) {
+ self.f += 1;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn variable_defined_inside_and_used_after_no_ret() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ $0let k = n * n;$0
+ let m = k + 1;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let k = fun_name(n);
+ let m = k + 1;
+}
+
+fn $0fun_name(n: i32) -> i32 {
+ let k = n * n;
+ k
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn variable_defined_inside_and_used_after_mutably_no_ret() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ $0let mut k = n * n;$0
+ k += 1;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let mut k = fun_name(n);
+ k += 1;
+}
+
+fn $0fun_name(n: i32) -> i32 {
+ let mut k = n * n;
+ k
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn two_variables_defined_inside_and_used_after_no_ret() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ $0let k = n * n;
+ let m = k + 2;$0
+ let h = k + m;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let (k, m) = fun_name(n);
+ let h = k + m;
+}
+
+fn $0fun_name(n: i32) -> (i32, i32) {
+ let k = n * n;
+ let m = k + 2;
+ (k, m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multi_variables_defined_inside_and_used_after_mutably_no_ret() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ $0let mut k = n * n;
+ let mut m = k + 2;
+ let mut o = m + 3;
+ o += 1;$0
+ k += o;
+ m = 1;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let (mut k, mut m, o) = fun_name(n);
+ k += o;
+ m = 1;
+}
+
+fn $0fun_name(n: i32) -> (i32, i32, i32) {
+ let mut k = n * n;
+ let mut m = k + 2;
+ let mut o = m + 3;
+ o += 1;
+ (k, m, o)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nontrivial_patterns_define_variables() {
+ check_assist(
+ extract_function,
+ r#"
+struct Counter(i32);
+fn foo() {
+ $0let Counter(n) = Counter(0);$0
+ let m = n;
+}
+"#,
+ r#"
+struct Counter(i32);
+fn foo() {
+ let n = fun_name();
+ let m = n;
+}
+
+fn $0fun_name() -> i32 {
+ let Counter(n) = Counter(0);
+ n
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_with_two_fields_pattern_define_variables() {
+ check_assist(
+ extract_function,
+ r#"
+struct Counter { n: i32, m: i32 };
+fn foo() {
+ $0let Counter { n, m: k } = Counter { n: 1, m: 2 };$0
+ let h = n + k;
+}
+"#,
+ r#"
+struct Counter { n: i32, m: i32 };
+fn foo() {
+ let (n, k) = fun_name();
+ let h = n + k;
+}
+
+fn $0fun_name() -> (i32, i32) {
+ let Counter { n, m: k } = Counter { n: 1, m: 2 };
+ (n, k)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_var_from_outer_scope() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let mut n = 1;
+ $0n += 1;$0
+ let m = n + 1;
+}
+"#,
+ r#"
+fn foo() {
+ let mut n = 1;
+ fun_name(&mut n);
+ let m = n + 1;
+}
+
+fn $0fun_name(n: &mut i32) {
+ *n += 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_field_from_outer_scope() {
+ check_assist(
+ extract_function,
+ r#"
+struct C { n: i32 }
+fn foo() {
+ let mut c = C { n: 0 };
+ $0c.n += 1;$0
+ let m = c.n + 1;
+}
+"#,
+ r#"
+struct C { n: i32 }
+fn foo() {
+ let mut c = C { n: 0 };
+ fun_name(&mut c);
+ let m = c.n + 1;
+}
+
+fn $0fun_name(c: &mut C) {
+ c.n += 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_nested_field_from_outer_scope() {
+ check_assist(
+ extract_function,
+ r#"
+struct P { n: i32}
+struct C { p: P }
+fn foo() {
+ let mut c = C { p: P { n: 0 } };
+ let mut v = C { p: P { n: 0 } };
+ let u = C { p: P { n: 0 } };
+ $0c.p.n += u.p.n;
+ let r = &mut v.p.n;$0
+ let m = c.p.n + v.p.n + u.p.n;
+}
+"#,
+ r#"
+struct P { n: i32}
+struct C { p: P }
+fn foo() {
+ let mut c = C { p: P { n: 0 } };
+ let mut v = C { p: P { n: 0 } };
+ let u = C { p: P { n: 0 } };
+ fun_name(&mut c, &u, &mut v);
+ let m = c.p.n + v.p.n + u.p.n;
+}
+
+fn $0fun_name(c: &mut C, u: &C, v: &mut C) {
+ c.p.n += u.p.n;
+ let r = &mut v.p.n;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_param_many_usages_stmt() {
+ check_assist(
+ extract_function,
+ r#"
+fn bar(k: i32) {}
+trait I: Copy {
+ fn succ(&self) -> Self;
+ fn inc(&mut self) -> Self { let v = self.succ(); *self = v; v }
+}
+impl I for i32 {
+ fn succ(&self) -> Self { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ $0n += n;
+ bar(n);
+ bar(n+1);
+ bar(n*n);
+ bar(&n);
+ n.inc();
+ let v = &mut n;
+ *v = v.succ();
+ n.succ();$0
+ let m = n + 1;
+}
+"#,
+ r#"
+fn bar(k: i32) {}
+trait I: Copy {
+ fn succ(&self) -> Self;
+ fn inc(&mut self) -> Self { let v = self.succ(); *self = v; v }
+}
+impl I for i32 {
+ fn succ(&self) -> Self { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(&mut n);
+ let m = n + 1;
+}
+
+fn $0fun_name(n: &mut i32) {
+ *n += *n;
+ bar(*n);
+ bar(*n+1);
+ bar(*n**n);
+ bar(&*n);
+ n.inc();
+ let v = n;
+ *v = v.succ();
+ n.succ();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_param_many_usages_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn bar(k: i32) {}
+trait I: Copy {
+ fn succ(&self) -> Self;
+ fn inc(&mut self) -> Self { let v = self.succ(); *self = v; v }
+}
+impl I for i32 {
+ fn succ(&self) -> Self { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ $0{
+ n += n;
+ bar(n);
+ bar(n+1);
+ bar(n*n);
+ bar(&n);
+ n.inc();
+ let v = &mut n;
+ *v = v.succ();
+ n.succ();
+ }$0
+ let m = n + 1;
+}
+"#,
+ r#"
+fn bar(k: i32) {}
+trait I: Copy {
+ fn succ(&self) -> Self;
+ fn inc(&mut self) -> Self { let v = self.succ(); *self = v; v }
+}
+impl I for i32 {
+ fn succ(&self) -> Self { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(&mut n);
+ let m = n + 1;
+}
+
+fn $0fun_name(n: &mut i32) {
+ *n += *n;
+ bar(*n);
+ bar(*n+1);
+ bar(*n**n);
+ bar(&*n);
+ n.inc();
+ let v = n;
+ *v = v.succ();
+ n.succ();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_param_by_value() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let mut n = 1;
+ $0n += 1;$0
+}
+"#,
+ r"
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(mut n: i32) {
+ n += 1;
+}
+",
+ );
+ }
+
+ #[test]
+ fn mut_param_because_of_mut_ref() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let mut n = 1;
+ $0let v = &mut n;
+ *v += 1;$0
+ let k = n;
+}
+"#,
+ r#"
+fn foo() {
+ let mut n = 1;
+ fun_name(&mut n);
+ let k = n;
+}
+
+fn $0fun_name(n: &mut i32) {
+ let v = n;
+ *v += 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_param_by_value_because_of_mut_ref() {
+ check_assist(
+ extract_function,
+ r"
+fn foo() {
+ let mut n = 1;
+ $0let v = &mut n;
+ *v += 1;$0
+}
+",
+ r#"
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(mut n: i32) {
+ let v = &mut n;
+ *v += 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_method_call() {
+ check_assist(
+ extract_function,
+ r#"
+trait I {
+ fn inc(&mut self);
+}
+impl I for i32 {
+ fn inc(&mut self) { *self += 1 }
+}
+fn foo() {
+ let mut n = 1;
+ $0n.inc();$0
+}
+"#,
+ r#"
+trait I {
+ fn inc(&mut self);
+}
+impl I for i32 {
+ fn inc(&mut self) { *self += 1 }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(mut n: i32) {
+ n.inc();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn shared_method_call() {
+ check_assist(
+ extract_function,
+ r#"
+trait I {
+ fn succ(&self);
+}
+impl I for i32 {
+ fn succ(&self) { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ $0n.succ();$0
+}
+"#,
+ r"
+trait I {
+ fn succ(&self);
+}
+impl I for i32 {
+ fn succ(&self) { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(n: i32) {
+ n.succ();
+}
+",
+ );
+ }
+
+ #[test]
+ fn mut_method_call_with_other_receiver() {
+ check_assist(
+ extract_function,
+ r#"
+trait I {
+ fn inc(&mut self, n: i32);
+}
+impl I for i32 {
+ fn inc(&mut self, n: i32) { *self += n }
+}
+fn foo() {
+ let mut n = 1;
+ $0let mut m = 2;
+ m.inc(n);$0
+}
+"#,
+ r"
+trait I {
+ fn inc(&mut self, n: i32);
+}
+impl I for i32 {
+ fn inc(&mut self, n: i32) { *self += n }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(n: i32) {
+ let mut m = 2;
+ m.inc(n);
+}
+",
+ );
+ }
+
+ #[test]
+ fn non_copy_without_usages_after() {
+ check_assist(
+ extract_function,
+ r#"
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ $0let n = c.0;$0
+}
+"#,
+ r"
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ fun_name(c);
+}
+
+fn $0fun_name(c: Counter) {
+ let n = c.0;
+}
+",
+ );
+ }
+
+ #[test]
+ fn non_copy_used_after() {
+ check_assist(
+ extract_function,
+ r"
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ $0let n = c.0;$0
+ let m = c.0;
+}
+",
+ r#"
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ fun_name(&c);
+ let m = c.0;
+}
+
+fn $0fun_name(c: &Counter) {
+ let n = c.0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn copy_used_after() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: copy
+fn foo() {
+ let n = 0;
+ $0let m = n;$0
+ let k = n;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 0;
+ fun_name(n);
+ let k = n;
+}
+
+fn $0fun_name(n: i32) {
+ let m = n;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn copy_custom_used_after() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: copy, derive
+#[derive(Clone, Copy)]
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ $0let n = c.0;$0
+ let m = c.0;
+}
+"#,
+ r#"
+#[derive(Clone, Copy)]
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ fun_name(c);
+ let m = c.0;
+}
+
+fn $0fun_name(c: Counter) {
+ let n = c.0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn indented_stmts() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ if true {
+ loop {
+ $0let n = 1;
+ let m = 2;$0
+ }
+ }
+}
+"#,
+ r#"
+fn foo() {
+ if true {
+ loop {
+ fun_name();
+ }
+ }
+}
+
+fn $0fun_name() {
+ let n = 1;
+ let m = 2;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn indented_stmts_inside_mod() {
+ check_assist(
+ extract_function,
+ r#"
+mod bar {
+ fn foo() {
+ if true {
+ loop {
+ $0let n = 1;
+ let m = 2;$0
+ }
+ }
+ }
+}
+"#,
+ r#"
+mod bar {
+ fn foo() {
+ if true {
+ loop {
+ fun_name();
+ }
+ }
+ }
+
+ fn $0fun_name() {
+ let n = 1;
+ let m = 2;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_loop() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: option
+fn foo() {
+ loop {
+ let n = 1;
+ $0let m = n + 1;
+ break;
+ let k = 2;$0
+ let h = 1 + k;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let k = match fun_name(n) {
+ Some(value) => value,
+ None => break,
+ };
+ let h = 1 + k;
+ }
+}
+
+fn $0fun_name(n: i32) -> Option<i32> {
+ let m = n + 1;
+ return None;
+ let k = 2;
+ Some(k)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_to_parent() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: copy, result
+fn foo() -> i64 {
+ let n = 1;
+ $0let m = n + 1;
+ return 1;
+ let k = 2;$0
+ (n + k) as i64
+}
+"#,
+ r#"
+fn foo() -> i64 {
+ let n = 1;
+ let k = match fun_name(n) {
+ Ok(value) => value,
+ Err(value) => return value,
+ };
+ (n + k) as i64
+}
+
+fn $0fun_name(n: i32) -> Result<i32, i64> {
+ let m = n + 1;
+ return Err(1);
+ let k = 2;
+ Ok(k)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_and_continue() {
+ cov_mark::check!(external_control_flow_break_and_continue);
+ check_assist_not_applicable(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let m = n + 1;
+ break;
+ let k = 2;
+ continue;
+ let k = k + 1;$0
+ let r = n + k;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_and_break() {
+ cov_mark::check!(external_control_flow_return_and_bc);
+ check_assist_not_applicable(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let m = n + 1;
+ break;
+ let k = 2;
+ return;
+ let k = k + 1;$0
+ let r = n + k;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_loop_with_if() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn foo() {
+ loop {
+ let mut n = 1;
+ $0let m = n + 1;
+ break;
+ n += m;$0
+ let h = 1 + n;
+ }
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn foo() {
+ loop {
+ let mut n = 1;
+ if let ControlFlow::Break(_) = fun_name(&mut n) {
+ break;
+ }
+ let h = 1 + n;
+ }
+}
+
+fn $0fun_name(n: &mut i32) -> ControlFlow<()> {
+ let m = *n + 1;
+ return ControlFlow::Break(());
+ *n += m;
+ ControlFlow::Continue(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_loop_nested() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn foo() {
+ loop {
+ let mut n = 1;
+ $0let m = n + 1;
+ if m == 42 {
+ break;
+ }$0
+ let h = 1;
+ }
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn foo() {
+ loop {
+ let mut n = 1;
+ if let ControlFlow::Break(_) = fun_name(n) {
+ break;
+ }
+ let h = 1;
+ }
+}
+
+fn $0fun_name(n: i32) -> ControlFlow<()> {
+ let m = n + 1;
+ if m == 42 {
+ return ControlFlow::Break(());
+ }
+ ControlFlow::Continue(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_loop_nested_labeled() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn foo() {
+ 'bar: loop {
+ loop {
+ $0break 'bar;$0
+ }
+ }
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn foo() {
+ 'bar: loop {
+ loop {
+ if let ControlFlow::Break(_) = fun_name() {
+ break 'bar;
+ }
+ }
+ }
+}
+
+fn $0fun_name() -> ControlFlow<()> {
+ return ControlFlow::Break(());
+ ControlFlow::Continue(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn continue_loop_nested_labeled() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn foo() {
+ 'bar: loop {
+ loop {
+ $0continue 'bar;$0
+ }
+ }
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn foo() {
+ 'bar: loop {
+ loop {
+ if let ControlFlow::Break(_) = fun_name() {
+ continue 'bar;
+ }
+ }
+ }
+}
+
+fn $0fun_name() -> ControlFlow<()> {
+ return ControlFlow::Break(());
+ ControlFlow::Continue(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_from_nested_loop() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;$0
+ let k = 1;
+ loop {
+ return;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let m = match fun_name() {
+ Some(value) => value,
+ None => return,
+ };
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = 1;
+ loop {
+ return None;
+ }
+ let m = k + 1;
+ Some(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_from_nested_loop() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let k = 1;
+ loop {
+ break;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let m = fun_name();
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> i32 {
+ let k = 1;
+ loop {
+ break;
+ }
+ let m = k + 1;
+ m
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_from_nested_and_outer_loops() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let k = 1;
+ loop {
+ break;
+ }
+ if k == 42 {
+ break;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let m = match fun_name() {
+ Some(value) => value,
+ None => break,
+ };
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = 1;
+ loop {
+ break;
+ }
+ if k == 42 {
+ return None;
+ }
+ let m = k + 1;
+ Some(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_from_nested_fn() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let k = 1;
+ fn test() {
+ return;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let m = fun_name();
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> i32 {
+ let k = 1;
+ fn test() {
+ return;
+ }
+ let m = k + 1;
+ m
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_with_value() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ loop {
+ let n = 1;
+ $0let k = 1;
+ if k == 42 {
+ break 3;
+ }
+ let m = k + 1;$0
+ let h = 1;
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ loop {
+ let n = 1;
+ if let Some(value) = fun_name() {
+ break value;
+ }
+ let h = 1;
+ }
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = 1;
+ if k == 42 {
+ return Some(3);
+ }
+ let m = k + 1;
+ None
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_with_value_and_label() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ 'bar: loop {
+ let n = 1;
+ $0let k = 1;
+ if k == 42 {
+ break 'bar 4;
+ }
+ let m = k + 1;$0
+ let h = 1;
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ 'bar: loop {
+ let n = 1;
+ if let Some(value) = fun_name() {
+ break 'bar value;
+ }
+ let h = 1;
+ }
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = 1;
+ if k == 42 {
+ return Some(4);
+ }
+ let m = k + 1;
+ None
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_with_value_and_return() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i64 {
+ loop {
+ let n = 1;$0
+ let k = 1;
+ if k == 42 {
+ break 3;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() -> i64 {
+ loop {
+ let n = 1;
+ let m = match fun_name() {
+ Ok(value) => value,
+ Err(value) => break value,
+ };
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> Result<i32, i64> {
+ let k = 1;
+ if k == 42 {
+ return Err(3);
+ }
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_option() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: option
+fn bar() -> Option<i32> { None }
+fn foo() -> Option<()> {
+ let n = bar()?;
+ $0let k = foo()?;
+ let m = k + 1;$0
+ let h = 1 + m;
+ Some(())
+}
+"#,
+ r#"
+fn bar() -> Option<i32> { None }
+fn foo() -> Option<()> {
+ let n = bar()?;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Some(())
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = foo()?;
+ let m = k + 1;
+ Some(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_option_unit() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: option
+fn foo() -> Option<()> {
+ let n = 1;
+ $0let k = foo()?;
+ let m = k + 1;$0
+ let h = 1 + n;
+ Some(())
+}
+"#,
+ r#"
+fn foo() -> Option<()> {
+ let n = 1;
+ fun_name()?;
+ let h = 1 + n;
+ Some(())
+}
+
+fn $0fun_name() -> Option<()> {
+ let k = foo()?;
+ let m = k + 1;
+ Some(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_result() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ $0let k = foo()?;
+ let m = k + 1;$0
+ let h = 1 + m;
+ Ok(())
+}
+"#,
+ r#"
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Ok(())
+}
+
+fn $0fun_name() -> Result<i32, i64> {
+ let k = foo()?;
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_option_with_return() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: option
+fn foo() -> Option<()> {
+ let n = 1;
+ $0let k = foo()?;
+ if k == 42 {
+ return None;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ Some(())
+}
+"#,
+ r#"
+fn foo() -> Option<()> {
+ let n = 1;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Some(())
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = foo()?;
+ if k == 42 {
+ return None;
+ }
+ let m = k + 1;
+ Some(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_result_with_return() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ $0let k = foo()?;
+ if k == 42 {
+ return Err(1);
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ Ok(())
+}
+"#,
+ r#"
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Ok(())
+}
+
+fn $0fun_name() -> Result<i32, i64> {
+ let k = foo()?;
+ if k == 42 {
+ return Err(1);
+ }
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_and_break() {
+ cov_mark::check!(external_control_flow_try_and_bc);
+ check_assist_not_applicable(
+ extract_function,
+ r#"
+//- minicore: option
+fn foo() -> Option<()> {
+ loop {
+ let n = Some(1);
+ $0let m = n? + 1;
+ break;
+ let k = 2;
+ let k = k + 1;$0
+ let r = n + k;
+ }
+ Some(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_and_return_ok() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ $0let k = foo()?;
+ if k == 42 {
+ return Ok(1);
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ Ok(())
+}
+"#,
+ r#"
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Ok(())
+}
+
+fn $0fun_name() -> Result<i32, i64> {
+ let k = foo()?;
+ if k == 42 {
+ return Ok(1);
+ }
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn param_usage_in_macro() {
+ check_assist(
+ extract_function,
+ r#"
+macro_rules! m {
+ ($val:expr) => { $val };
+}
+
+fn foo() {
+ let n = 1;
+ $0let k = n * m!(n);$0
+ let m = k + 1;
+}
+"#,
+ r#"
+macro_rules! m {
+ ($val:expr) => { $val };
+}
+
+fn foo() {
+ let n = 1;
+ let k = fun_name(n);
+ let m = k + 1;
+}
+
+fn $0fun_name(n: i32) -> i32 {
+ let k = n * m!(n);
+ k
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_await() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: future
+fn main() {
+ $0some_function().await;$0
+}
+
+async fn some_function() {
+
+}
+"#,
+ r#"
+fn main() {
+ fun_name().await;
+}
+
+async fn $0fun_name() {
+ some_function().await;
+}
+
+async fn some_function() {
+
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_await_and_result_not_producing_match_expr() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: future, result
+async fn foo() -> Result<(), ()> {
+ $0async {}.await;
+ Err(())?$0
+}
+"#,
+ r#"
+async fn foo() -> Result<(), ()> {
+ fun_name().await?
+}
+
+async fn $0fun_name() -> Result<(), ()> {
+ async {}.await;
+ Err(())?
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_await_and_result_producing_match_expr() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: future
+async fn foo() -> i32 {
+ loop {
+ let n = 1;$0
+ let k = async { 1 }.await;
+ if k == 42 {
+ break 3;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+async fn foo() -> i32 {
+ loop {
+ let n = 1;
+ let m = match fun_name().await {
+ Ok(value) => value,
+ Err(value) => break value,
+ };
+ let h = 1 + m;
+ }
+}
+
+async fn $0fun_name() -> Result<i32, i32> {
+ let k = async { 1 }.await;
+ if k == 42 {
+ return Err(3);
+ }
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_await_in_args() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: future
+fn main() {
+ $0function_call("a", some_function().await);$0
+}
+
+async fn some_function() {
+
+}
+"#,
+ r#"
+fn main() {
+ fun_name().await;
+}
+
+async fn $0fun_name() {
+ function_call("a", some_function().await);
+}
+
+async fn some_function() {
+
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_extract_standalone_blocks() {
+ check_assist_not_applicable(
+ extract_function,
+ r#"
+fn main() $0{}$0
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_adds_comma_for_match_arm() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ match 6 {
+ 100 => $0{ 100 }$0
+ _ => 0,
+ };
+}
+"#,
+ r#"
+fn main() {
+ match 6 {
+ 100 => fun_name(),
+ _ => 0,
+ };
+}
+
+fn $0fun_name() -> i32 {
+ 100
+}
+"#,
+ );
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ match 6 {
+ 100 => $0{ 100 }$0,
+ _ => 0,
+ };
+}
+"#,
+ r#"
+fn main() {
+ match 6 {
+ 100 => fun_name(),
+ _ => 0,
+ };
+}
+
+fn $0fun_name() -> i32 {
+ 100
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_tear_comments_apart() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ /*$0*/
+ foo();
+ foo();
+ /*$0*/
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ /**/
+ foo();
+ foo();
+ /**/
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_tear_body_apart() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0foo();
+}$0
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_wrap_res_in_res() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), i64> {
+ $0Result::<i32, i64>::Ok(0)?;
+ Ok(())$0
+}
+"#,
+ r#"
+fn foo() -> Result<(), i64> {
+ fun_name()?
+}
+
+fn $0fun_name() -> Result<(), i64> {
+ Result::<i32, i64>::Ok(0)?;
+ Ok(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_knows_const() {
+ check_assist(
+ extract_function,
+ r#"
+const fn foo() {
+ $0()$0
+}
+"#,
+ r#"
+const fn foo() {
+ fun_name();
+}
+
+const fn $0fun_name() {
+ ()
+}
+"#,
+ );
+ check_assist(
+ extract_function,
+ r#"
+const FOO: () = {
+ $0()$0
+};
+"#,
+ r#"
+const FOO: () = {
+ fun_name();
+};
+
+const fn $0fun_name() {
+ ()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_move_outer_loop_vars() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let mut x = 5;
+ for _ in 0..10 {
+ $0x += 1;$0
+ }
+}
+"#,
+ r#"
+fn foo() {
+ let mut x = 5;
+ for _ in 0..10 {
+ fun_name(&mut x);
+ }
+}
+
+fn $0fun_name(x: &mut i32) {
+ *x += 1;
+}
+"#,
+ );
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ for _ in 0..10 {
+ let mut x = 5;
+ $0x += 1;$0
+ }
+}
+"#,
+ r#"
+fn foo() {
+ for _ in 0..10 {
+ let mut x = 5;
+ fun_name(x);
+ }
+}
+
+fn $0fun_name(mut x: i32) {
+ x += 1;
+}
+"#,
+ );
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let mut x = 5;
+ for _ in 0..10 {
+ $0x += 1;$0
+ }
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let mut x = 5;
+ for _ in 0..10 {
+ fun_name(&mut x);
+ }
+ }
+}
+
+fn $0fun_name(x: &mut i32) {
+ *x += 1;
+}
+"#,
+ );
+ }
+
+ // regression test for #9822
+ #[test]
+ fn extract_mut_ref_param_has_no_mut_binding_in_loop() {
+ check_assist(
+ extract_function,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&mut self) {}
+}
+fn foo() {
+ let mut x = Foo;
+ while false {
+ let y = &mut x;
+ $0y.foo();$0
+ }
+ let z = x;
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&mut self) {}
+}
+fn foo() {
+ let mut x = Foo;
+ while false {
+ let y = &mut x;
+ fun_name(y);
+ }
+ let z = x;
+}
+
+fn $0fun_name(y: &mut Foo) {
+ y.foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_macro_arg() {
+ check_assist(
+ extract_function,
+ r#"
+macro_rules! m {
+ ($val:expr) => { $val };
+}
+fn main() {
+ let bar = "bar";
+ $0m!(bar);$0
+}
+"#,
+ r#"
+macro_rules! m {
+ ($val:expr) => { $val };
+}
+fn main() {
+ let bar = "bar";
+ fun_name(bar);
+}
+
+fn $0fun_name(bar: &str) {
+ m!(bar);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unresolveable_types_default_to_placeholder() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let a = __unresolved;
+ let _ = $0{a}$0;
+}
+"#,
+ r#"
+fn foo() {
+ let a = __unresolved;
+ let _ = fun_name(a);
+}
+
+fn $0fun_name(a: _) -> _ {
+ a
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn reference_mutable_param_with_further_usages() {
+ check_assist(
+ extract_function,
+ r#"
+pub struct Foo {
+ field: u32,
+}
+
+pub fn testfn(arg: &mut Foo) {
+ $0arg.field = 8;$0
+ // Simulating access after the extracted portion
+ arg.field = 16;
+}
+"#,
+ r#"
+pub struct Foo {
+ field: u32,
+}
+
+pub fn testfn(arg: &mut Foo) {
+ fun_name(arg);
+ // Simulating access after the extracted portion
+ arg.field = 16;
+}
+
+fn $0fun_name(arg: &mut Foo) {
+ arg.field = 8;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn reference_mutable_param_without_further_usages() {
+ check_assist(
+ extract_function,
+ r#"
+pub struct Foo {
+ field: u32,
+}
+
+pub fn testfn(arg: &mut Foo) {
+ $0arg.field = 8;$0
+}
+"#,
+ r#"
+pub struct Foo {
+ field: u32,
+}
+
+pub fn testfn(arg: &mut Foo) {
+ fun_name(arg);
+}
+
+fn $0fun_name(arg: &mut Foo) {
+ arg.field = 8;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_copies_comment_at_start() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0// comment here!
+ let x = 0;$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ // comment here!
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_copies_comment_in_between() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;$0
+ let a = 0;
+ // comment here!
+ let x = 0;$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ let a = 0;
+ // comment here!
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_copies_comment_at_end() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0let x = 0;
+ // comment here!$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ let x = 0;
+ // comment here!
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_copies_comment_indented() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0let x = 0;
+ while(true) {
+ // comment here!
+ }$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ let x = 0;
+ while(true) {
+ // comment here!
+ }
+}
+"#,
+ );
+ }
+
+ // FIXME: we do want to preserve whitespace
+ #[test]
+ fn extract_function_does_not_preserve_whitespace() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0let a = 0;
+
+ let x = 0;$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ let a = 0;
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_long_form_comment() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0/* a comment */
+ let x = 0;$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ /* a comment */
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn it_should_not_generate_duplicate_function_names() {
+ check_assist(
+ extract_function,
+ r#"
+fn fun_name() {
+ $0let x = 0;$0
+}
+"#,
+ r#"
+fn fun_name() {
+ fun_name1();
+}
+
+fn $0fun_name1() {
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn should_increment_suffix_until_it_finds_space() {
+ check_assist(
+ extract_function,
+ r#"
+fn fun_name1() {
+ let y = 0;
+}
+
+fn fun_name() {
+ $0let x = 0;$0
+}
+"#,
+ r#"
+fn fun_name1() {
+ let y = 0;
+}
+
+fn fun_name() {
+ fun_name2();
+}
+
+fn $0fun_name2() {
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_method_from_trait_impl() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct(i32);
+trait Trait {
+ fn bar(&self) -> i32;
+}
+
+impl Trait for Struct {
+ fn bar(&self) -> i32 {
+ $0self.0 + 2$0
+ }
+}
+"#,
+ r#"
+struct Struct(i32);
+trait Trait {
+ fn bar(&self) -> i32;
+}
+
+impl Trait for Struct {
+ fn bar(&self) -> i32 {
+ self.fun_name()
+ }
+}
+
+impl Struct {
+ fn $0fun_name(&self) -> i32 {
+ self.0 + 2
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn closure_arguments() {
+ check_assist(
+ extract_function,
+ r#"
+fn parent(factor: i32) {
+ let v = &[1, 2, 3];
+
+ $0v.iter().map(|it| it * factor);$0
+}
+"#,
+ r#"
+fn parent(factor: i32) {
+ let v = &[1, 2, 3];
+
+ fun_name(v, factor);
+}
+
+fn $0fun_name(v: &[i32; 3], factor: i32) {
+ v.iter().map(|it| it * factor);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn preserve_generics() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T: Debug>(i: T) {
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T: Debug>(i: T) {
+ fun_name(i);
+}
+
+fn $0fun_name<T: Debug>(i: T) {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn preserve_generics_from_body() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T: Default>() -> T {
+ $0T::default()$0
+}
+"#,
+ r#"
+fn func<T: Default>() -> T {
+ fun_name()
+}
+
+fn $0fun_name<T: Default>() -> T {
+ T::default()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn filter_unused_generics() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T: Debug, U: Copy>(i: T, u: U) {
+ bar(u);
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T: Debug, U: Copy>(i: T, u: U) {
+ bar(u);
+ fun_name(i);
+}
+
+fn $0fun_name<T: Debug>(i: T) {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn empty_generic_param_list() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T: Debug>(t: T, i: u32) {
+ bar(t);
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T: Debug>(t: T, i: u32) {
+ bar(t);
+ fun_name(i);
+}
+
+fn $0fun_name(i: u32) {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn preserve_where_clause() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T>(i: T) where T: Debug {
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T>(i: T) where T: Debug {
+ fun_name(i);
+}
+
+fn $0fun_name<T>(i: T) where T: Debug {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn filter_unused_where_clause() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T, U>(i: T, u: U) where T: Debug, U: Copy {
+ bar(u);
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T, U>(i: T, u: U) where T: Debug, U: Copy {
+ bar(u);
+ fun_name(i);
+}
+
+fn $0fun_name<T>(i: T) where T: Debug {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nested_generics() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct<T: Into<i32>>(T);
+impl <T: Into<i32> + Copy> Struct<T> {
+ fn func<V: Into<i32>>(&self, v: V) -> i32 {
+ let t = self.0;
+ $0t.into() + v.into()$0
+ }
+}
+"#,
+ r#"
+struct Struct<T: Into<i32>>(T);
+impl <T: Into<i32> + Copy> Struct<T> {
+ fn func<V: Into<i32>>(&self, v: V) -> i32 {
+ let t = self.0;
+ fun_name(t, v)
+ }
+}
+
+fn $0fun_name<T: Into<i32> + Copy, V: Into<i32>>(t: T, v: V) -> i32 {
+ t.into() + v.into()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn filters_unused_nested_generics() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct<T: Into<i32>, U: Debug>(T, U);
+impl <T: Into<i32> + Copy, U: Debug> Struct<T, U> {
+ fn func<V: Into<i32>>(&self, v: V) -> i32 {
+ let t = self.0;
+ $0t.into() + v.into()$0
+ }
+}
+"#,
+ r#"
+struct Struct<T: Into<i32>, U: Debug>(T, U);
+impl <T: Into<i32> + Copy, U: Debug> Struct<T, U> {
+ fn func<V: Into<i32>>(&self, v: V) -> i32 {
+ let t = self.0;
+ fun_name(t, v)
+ }
+}
+
+fn $0fun_name<T: Into<i32> + Copy, V: Into<i32>>(t: T, v: V) -> i32 {
+ t.into() + v.into()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nested_where_clauses() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct<T>(T) where T: Into<i32>;
+impl <T> Struct<T> where T: Into<i32> + Copy {
+ fn func<V>(&self, v: V) -> i32 where V: Into<i32> {
+ let t = self.0;
+ $0t.into() + v.into()$0
+ }
+}
+"#,
+ r#"
+struct Struct<T>(T) where T: Into<i32>;
+impl <T> Struct<T> where T: Into<i32> + Copy {
+ fn func<V>(&self, v: V) -> i32 where V: Into<i32> {
+ let t = self.0;
+ fun_name(t, v)
+ }
+}
+
+fn $0fun_name<T, V>(t: T, v: V) -> i32 where T: Into<i32> + Copy, V: Into<i32> {
+ t.into() + v.into()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn filters_unused_nested_where_clauses() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct<T, U>(T, U) where T: Into<i32>, U: Debug;
+impl <T, U> Struct<T, U> where T: Into<i32> + Copy, U: Debug {
+ fn func<V>(&self, v: V) -> i32 where V: Into<i32> {
+ let t = self.0;
+ $0t.into() + v.into()$0
+ }
+}
+"#,
+ r#"
+struct Struct<T, U>(T, U) where T: Into<i32>, U: Debug;
+impl <T, U> Struct<T, U> where T: Into<i32> + Copy, U: Debug {
+ fn func<V>(&self, v: V) -> i32 where V: Into<i32> {
+ let t = self.0;
+ fun_name(t, v)
+ }
+}
+
+fn $0fun_name<T, V>(t: T, v: V) -> i32 where T: Into<i32> + Copy, V: Into<i32> {
+ t.into() + v.into()
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
new file mode 100644
index 000000000..b3c4d306a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
@@ -0,0 +1,1770 @@
+use std::{
+ collections::{HashMap, HashSet},
+ iter,
+};
+
+use hir::{HasSource, ModuleSource};
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ base_db::FileId,
+ defs::{Definition, NameClass, NameRefClass},
+ search::{FileReference, SearchScope},
+};
+use stdx::format_to;
+use syntax::{
+ algo::find_node_at_range,
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make, HasName, HasVisibility,
+ },
+ match_ast, ted, AstNode, SourceFile,
+ SyntaxKind::{self, WHITESPACE},
+ SyntaxNode, TextRange,
+};
+
+use crate::{AssistContext, Assists};
+
+use super::remove_unused_param::range_to_remove;
+
+// Assist: extract_module
+//
+// Extracts a selected region as seperate module. All the references, visibility and imports are
+// resolved.
+//
+// ```
+// $0fn foo(name: i32) -> i32 {
+// name + 1
+// }$0
+//
+// fn bar(name: i32) -> i32 {
+// name + 2
+// }
+// ```
+// ->
+// ```
+// mod modname {
+// pub(crate) fn foo(name: i32) -> i32 {
+// name + 1
+// }
+// }
+//
+// fn bar(name: i32) -> i32 {
+// name + 2
+// }
+// ```
+pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if ctx.has_empty_selection() {
+ return None;
+ }
+
+ let node = ctx.covering_element();
+ let node = match node {
+ syntax::NodeOrToken::Node(n) => n,
+ syntax::NodeOrToken::Token(t) => t.parent()?,
+ };
+
+ //If the selection is inside impl block, we need to place new module outside impl block,
+ //as impl blocks cannot contain modules
+
+ let mut impl_parent: Option<ast::Impl> = None;
+ let mut impl_child_count: usize = 0;
+ if let Some(parent_assoc_list) = node.parent() {
+ if let Some(parent_impl) = parent_assoc_list.parent() {
+ if let Some(impl_) = ast::Impl::cast(parent_impl) {
+ impl_child_count = parent_assoc_list.children().count();
+ impl_parent = Some(impl_);
+ }
+ }
+ }
+
+ let mut curr_parent_module: Option<ast::Module> = None;
+ if let Some(mod_syn_opt) = node.ancestors().find(|it| ast::Module::can_cast(it.kind())) {
+ curr_parent_module = ast::Module::cast(mod_syn_opt);
+ }
+
+ let mut module = extract_target(&node, ctx.selection_trimmed())?;
+ if module.body_items.is_empty() {
+ return None;
+ }
+
+ let old_item_indent = module.body_items[0].indent_level();
+
+ acc.add(
+ AssistId("extract_module", AssistKind::RefactorExtract),
+ "Extract Module",
+ module.text_range,
+ |builder| {
+ //This takes place in three steps:
+ //
+ //- Firstly, we will update the references(usages) e.g. converting a
+ // function call bar() to modname::bar(), and similarly for other items
+ //
+ //- Secondly, changing the visibility of each item inside the newly selected module
+ // i.e. making a fn a() {} to pub(crate) fn a() {}
+ //
+ //- Thirdly, resolving all the imports this includes removing paths from imports
+ // outside the module, shifting/cloning them inside new module, or shifting the imports, or making
+ // new import statemnts
+
+ //We are getting item usages and record_fields together, record_fields
+ //for change_visibility and usages for first point mentioned above in the process
+ let (usages_to_be_processed, record_fields) = module.get_usages_and_record_fields(ctx);
+
+ let import_paths_to_be_removed = module.resolve_imports(curr_parent_module, ctx);
+ module.change_visibility(record_fields);
+
+ let mut body_items: Vec<String> = Vec::new();
+ let mut items_to_be_processed: Vec<ast::Item> = module.body_items.clone();
+ let mut new_item_indent = old_item_indent + 1;
+
+ if impl_parent.is_some() {
+ new_item_indent = old_item_indent + 2;
+ } else {
+ items_to_be_processed = [module.use_items.clone(), items_to_be_processed].concat();
+ }
+
+ for item in items_to_be_processed {
+ let item = item.indent(IndentLevel(1));
+ let mut indented_item = String::new();
+ format_to!(indented_item, "{}{}", new_item_indent, item.to_string());
+ body_items.push(indented_item);
+ }
+
+ let mut body = body_items.join("\n\n");
+
+ if let Some(impl_) = &impl_parent {
+ let mut impl_body_def = String::new();
+
+ if let Some(self_ty) = impl_.self_ty() {
+ format_to!(
+ impl_body_def,
+ "{}impl {} {{\n{}\n{}}}",
+ old_item_indent + 1,
+ self_ty.to_string(),
+ body,
+ old_item_indent + 1
+ );
+
+ body = impl_body_def;
+
+ // Add the import for enum/struct corresponding to given impl block
+ module.make_use_stmt_of_node_with_super(self_ty.syntax());
+ for item in module.use_items {
+ let mut indented_item = String::new();
+ format_to!(indented_item, "{}{}", old_item_indent + 1, item.to_string());
+ body = format!("{}\n\n{}", indented_item, body);
+ }
+ }
+ }
+
+ let mut module_def = String::new();
+
+ format_to!(module_def, "mod {} {{\n{}\n{}}}", module.name, body, old_item_indent);
+
+ let mut usages_to_be_updated_for_curr_file = vec![];
+ for usages_to_be_updated_for_file in usages_to_be_processed {
+ if usages_to_be_updated_for_file.0 == ctx.file_id() {
+ usages_to_be_updated_for_curr_file = usages_to_be_updated_for_file.1;
+ continue;
+ }
+ builder.edit_file(usages_to_be_updated_for_file.0);
+ for usage_to_be_processed in usages_to_be_updated_for_file.1 {
+ builder.replace(usage_to_be_processed.0, usage_to_be_processed.1)
+ }
+ }
+
+ builder.edit_file(ctx.file_id());
+ for usage_to_be_processed in usages_to_be_updated_for_curr_file {
+ builder.replace(usage_to_be_processed.0, usage_to_be_processed.1)
+ }
+
+ for import_path_text_range in import_paths_to_be_removed {
+ builder.delete(import_path_text_range);
+ }
+
+ if let Some(impl_) = impl_parent {
+ // Remove complete impl block if it has only one child (as such it will be empty
+ // after deleting that child)
+ let node_to_be_removed = if impl_child_count == 1 {
+ impl_.syntax()
+ } else {
+ //Remove selected node
+ &node
+ };
+
+ builder.delete(node_to_be_removed.text_range());
+ // Remove preceding indentation from node
+ if let Some(range) = indent_range_before_given_node(node_to_be_removed) {
+ builder.delete(range);
+ }
+
+ builder.insert(impl_.syntax().text_range().end(), format!("\n\n{}", module_def));
+ } else {
+ builder.replace(module.text_range, module_def)
+ }
+ },
+ )
+}
+
+#[derive(Debug)]
+struct Module {
+ text_range: TextRange,
+ name: &'static str,
+ /// All items except use items.
+ body_items: Vec<ast::Item>,
+ /// Use items are kept separately as they help when the selection is inside an impl block,
+ /// we can directly take these items and keep them outside generated impl block inside
+ /// generated module.
+ use_items: Vec<ast::Item>,
+}
+
+fn extract_target(node: &SyntaxNode, selection_range: TextRange) -> Option<Module> {
+ let selected_nodes = node
+ .children()
+ .filter(|node| selection_range.contains_range(node.text_range()))
+ .chain(iter::once(node.clone()));
+ let (use_items, body_items) = selected_nodes
+ .filter_map(ast::Item::cast)
+ .partition(|item| matches!(item, ast::Item::Use(..)));
+
+ Some(Module { text_range: selection_range, name: "modname", body_items, use_items })
+}
+
+impl Module {
+ fn get_usages_and_record_fields(
+ &self,
+ ctx: &AssistContext<'_>,
+ ) -> (HashMap<FileId, Vec<(TextRange, String)>>, Vec<SyntaxNode>) {
+ let mut adt_fields = Vec::new();
+ let mut refs: HashMap<FileId, Vec<(TextRange, String)>> = HashMap::new();
+
+ //Here impl is not included as each item inside impl will be tied to the parent of
+ //implementing block(a struct, enum, etc), if the parent is in selected module, it will
+ //get updated by ADT section given below or if it is not, then we dont need to do any operation
+ for item in &self.body_items {
+ match_ast! {
+ match (item.syntax()) {
+ ast::Adt(it) => {
+ if let Some( nod ) = ctx.sema.to_def(&it) {
+ let node_def = Definition::Adt(nod);
+ self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+
+ //Enum Fields are not allowed to explicitly specify pub, it is implied
+ match it {
+ ast::Adt::Struct(x) => {
+ if let Some(field_list) = x.field_list() {
+ match field_list {
+ ast::FieldList::RecordFieldList(record_field_list) => {
+ record_field_list.fields().for_each(|record_field| {
+ adt_fields.push(record_field.syntax().clone());
+ });
+ },
+ ast::FieldList::TupleFieldList(tuple_field_list) => {
+ tuple_field_list.fields().for_each(|tuple_field| {
+ adt_fields.push(tuple_field.syntax().clone());
+ });
+ },
+ }
+ }
+ },
+ ast::Adt::Union(x) => {
+ if let Some(record_field_list) = x.record_field_list() {
+ record_field_list.fields().for_each(|record_field| {
+ adt_fields.push(record_field.syntax().clone());
+ });
+ }
+ },
+ ast::Adt::Enum(_) => {},
+ }
+ }
+ },
+ ast::TypeAlias(it) => {
+ if let Some( nod ) = ctx.sema.to_def(&it) {
+ let node_def = Definition::TypeAlias(nod);
+ self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+ }
+ },
+ ast::Const(it) => {
+ if let Some( nod ) = ctx.sema.to_def(&it) {
+ let node_def = Definition::Const(nod);
+ self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+ }
+ },
+ ast::Static(it) => {
+ if let Some( nod ) = ctx.sema.to_def(&it) {
+ let node_def = Definition::Static(nod);
+ self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+ }
+ },
+ ast::Fn(it) => {
+ if let Some( nod ) = ctx.sema.to_def(&it) {
+ let node_def = Definition::Function(nod);
+ self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+ }
+ },
+ ast::Macro(it) => {
+ if let Some(nod) = ctx.sema.to_def(&it) {
+ self.expand_and_group_usages_file_wise(ctx, Definition::Macro(nod), &mut refs);
+ }
+ },
+ _ => (),
+ }
+ }
+ }
+
+ (refs, adt_fields)
+ }
+
+ fn expand_and_group_usages_file_wise(
+ &self,
+ ctx: &AssistContext<'_>,
+ node_def: Definition,
+ refs_in_files: &mut HashMap<FileId, Vec<(TextRange, String)>>,
+ ) {
+ for (file_id, references) in node_def.usages(&ctx.sema).all() {
+ let source_file = ctx.sema.parse(file_id);
+ let usages_in_file = references
+ .into_iter()
+ .filter_map(|usage| self.get_usage_to_be_processed(&source_file, usage));
+ refs_in_files.entry(file_id).or_default().extend(usages_in_file);
+ }
+ }
+
+ fn get_usage_to_be_processed(
+ &self,
+ source_file: &SourceFile,
+ FileReference { range, name, .. }: FileReference,
+ ) -> Option<(TextRange, String)> {
+ let path: ast::Path = find_node_at_range(source_file.syntax(), range)?;
+
+ for desc in path.syntax().descendants() {
+ if desc.to_string() == name.syntax().to_string()
+ && !self.text_range.contains_range(desc.text_range())
+ {
+ if let Some(name_ref) = ast::NameRef::cast(desc) {
+ return Some((
+ name_ref.syntax().text_range(),
+ format!("{}::{}", self.name, name_ref),
+ ));
+ }
+ }
+ }
+
+ None
+ }
+
+ fn change_visibility(&mut self, record_fields: Vec<SyntaxNode>) {
+ let (mut replacements, record_field_parents, impls) =
+ get_replacements_for_visibilty_change(&mut self.body_items, false);
+
+ let mut impl_items: Vec<ast::Item> = impls
+ .into_iter()
+ .flat_map(|impl_| impl_.syntax().descendants())
+ .filter_map(ast::Item::cast)
+ .collect();
+
+ let (mut impl_item_replacements, _, _) =
+ get_replacements_for_visibilty_change(&mut impl_items, true);
+
+ replacements.append(&mut impl_item_replacements);
+
+ for (_, field_owner) in record_field_parents {
+ for desc in field_owner.descendants().filter_map(ast::RecordField::cast) {
+ let is_record_field_present =
+ record_fields.clone().into_iter().any(|x| x.to_string() == desc.to_string());
+ if is_record_field_present {
+ replacements.push((desc.visibility(), desc.syntax().clone()));
+ }
+ }
+ }
+
+ for (vis, syntax) in replacements {
+ let item = syntax.children_with_tokens().find(|node_or_token| {
+ match node_or_token.kind() {
+ // We're skipping comments, doc comments, and attribute macros that may precede the keyword
+ // that the visibility should be placed before.
+ SyntaxKind::COMMENT | SyntaxKind::ATTR | SyntaxKind::WHITESPACE => false,
+ _ => true,
+ }
+ });
+
+ add_change_vis(vis, item);
+ }
+ }
+
+ fn resolve_imports(
+ &mut self,
+ curr_parent_module: Option<ast::Module>,
+ ctx: &AssistContext<'_>,
+ ) -> Vec<TextRange> {
+ let mut import_paths_to_be_removed: Vec<TextRange> = vec![];
+ let mut node_set: HashSet<String> = HashSet::new();
+
+ for item in self.body_items.clone() {
+ for x in item.syntax().descendants() {
+ if let Some(name) = ast::Name::cast(x.clone()) {
+ if let Some(name_classify) = NameClass::classify(&ctx.sema, &name) {
+ //Necessary to avoid two same names going through
+ if !node_set.contains(&name.syntax().to_string()) {
+ node_set.insert(name.syntax().to_string());
+ let def_opt: Option<Definition> = match name_classify {
+ NameClass::Definition(def) => Some(def),
+ _ => None,
+ };
+
+ if let Some(def) = def_opt {
+ if let Some(import_path) = self
+ .process_names_and_namerefs_for_import_resolve(
+ def,
+ name.syntax(),
+ &curr_parent_module,
+ ctx,
+ )
+ {
+ check_intersection_and_push(
+ &mut import_paths_to_be_removed,
+ import_path,
+ );
+ }
+ }
+ }
+ }
+ }
+
+ if let Some(name_ref) = ast::NameRef::cast(x) {
+ if let Some(name_classify) = NameRefClass::classify(&ctx.sema, &name_ref) {
+ //Necessary to avoid two same names going through
+ if !node_set.contains(&name_ref.syntax().to_string()) {
+ node_set.insert(name_ref.syntax().to_string());
+ let def_opt: Option<Definition> = match name_classify {
+ NameRefClass::Definition(def) => Some(def),
+ _ => None,
+ };
+
+ if let Some(def) = def_opt {
+ if let Some(import_path) = self
+ .process_names_and_namerefs_for_import_resolve(
+ def,
+ name_ref.syntax(),
+ &curr_parent_module,
+ ctx,
+ )
+ {
+ check_intersection_and_push(
+ &mut import_paths_to_be_removed,
+ import_path,
+ );
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ import_paths_to_be_removed
+ }
+
+ fn process_names_and_namerefs_for_import_resolve(
+ &mut self,
+ def: Definition,
+ node_syntax: &SyntaxNode,
+ curr_parent_module: &Option<ast::Module>,
+ ctx: &AssistContext<'_>,
+ ) -> Option<TextRange> {
+ //We only need to find in the current file
+ let selection_range = ctx.selection_trimmed();
+ let curr_file_id = ctx.file_id();
+ let search_scope = SearchScope::single_file(curr_file_id);
+ let usage_res = def.usages(&ctx.sema).in_scope(search_scope).all();
+ let file = ctx.sema.parse(curr_file_id);
+
+ let mut exists_inside_sel = false;
+ let mut exists_outside_sel = false;
+ for (_, refs) in usage_res.iter() {
+ let mut non_use_nodes_itr = refs.iter().filter_map(|x| {
+ if find_node_at_range::<ast::Use>(file.syntax(), x.range).is_none() {
+ let path_opt = find_node_at_range::<ast::Path>(file.syntax(), x.range);
+ return path_opt;
+ }
+
+ None
+ });
+
+ if non_use_nodes_itr
+ .clone()
+ .any(|x| !selection_range.contains_range(x.syntax().text_range()))
+ {
+ exists_outside_sel = true;
+ }
+ if non_use_nodes_itr.any(|x| selection_range.contains_range(x.syntax().text_range())) {
+ exists_inside_sel = true;
+ }
+ }
+
+ let source_exists_outside_sel_in_same_mod = does_source_exists_outside_sel_in_same_mod(
+ def,
+ ctx,
+ curr_parent_module,
+ selection_range,
+ curr_file_id,
+ );
+
+ let use_stmt_opt: Option<ast::Use> = usage_res.into_iter().find_map(|(file_id, refs)| {
+ if file_id == curr_file_id {
+ refs.into_iter()
+ .rev()
+ .find_map(|fref| find_node_at_range(file.syntax(), fref.range))
+ } else {
+ None
+ }
+ });
+
+ let mut use_tree_str_opt: Option<Vec<ast::Path>> = None;
+ //Exists inside and outside selection
+ // - Use stmt for item is present -> get the use_tree_str and reconstruct the path in new
+ // module
+ // - Use stmt for item is not present ->
+ //If it is not found, the definition is either ported inside new module or it stays
+ //outside:
+ //- Def is inside: Nothing to import
+ //- Def is outside: Import it inside with super
+
+ //Exists inside selection but not outside -> Check for the import of it in original module,
+ //get the use_tree_str, reconstruct the use stmt in new module
+
+ let mut import_path_to_be_removed: Option<TextRange> = None;
+ if exists_inside_sel && exists_outside_sel {
+ //Changes to be made only inside new module
+
+ //If use_stmt exists, find the use_tree_str, reconstruct it inside new module
+ //If not, insert a use stmt with super and the given nameref
+ if let Some((use_tree_str, _)) =
+ self.process_use_stmt_for_import_resolve(use_stmt_opt, node_syntax)
+ {
+ use_tree_str_opt = Some(use_tree_str);
+ } else if source_exists_outside_sel_in_same_mod {
+ //Considered only after use_stmt is not present
+ //source_exists_outside_sel_in_same_mod | exists_outside_sel(exists_inside_sel =
+ //true for all cases)
+ // false | false -> Do nothing
+ // false | true -> If source is in selection -> nothing to do, If source is outside
+ // mod -> ust_stmt transversal
+ // true | false -> super import insertion
+ // true | true -> super import insertion
+ self.make_use_stmt_of_node_with_super(node_syntax);
+ }
+ } else if exists_inside_sel && !exists_outside_sel {
+ //Changes to be made inside new module, and remove import from outside
+
+ if let Some((mut use_tree_str, text_range_opt)) =
+ self.process_use_stmt_for_import_resolve(use_stmt_opt, node_syntax)
+ {
+ if let Some(text_range) = text_range_opt {
+ import_path_to_be_removed = Some(text_range);
+ }
+
+ if source_exists_outside_sel_in_same_mod {
+ if let Some(first_path_in_use_tree) = use_tree_str.last() {
+ let first_path_in_use_tree_str = first_path_in_use_tree.to_string();
+ if !first_path_in_use_tree_str.contains("super")
+ && !first_path_in_use_tree_str.contains("crate")
+ {
+ let super_path = make::ext::ident_path("super");
+ use_tree_str.push(super_path);
+ }
+ }
+ }
+
+ use_tree_str_opt = Some(use_tree_str);
+ } else if source_exists_outside_sel_in_same_mod {
+ self.make_use_stmt_of_node_with_super(node_syntax);
+ }
+ }
+
+ if let Some(use_tree_str) = use_tree_str_opt {
+ let mut use_tree_str = use_tree_str;
+ use_tree_str.reverse();
+
+ if !(!exists_outside_sel && exists_inside_sel && source_exists_outside_sel_in_same_mod)
+ {
+ if let Some(first_path_in_use_tree) = use_tree_str.first() {
+ let first_path_in_use_tree_str = first_path_in_use_tree.to_string();
+ if first_path_in_use_tree_str.contains("super") {
+ let super_path = make::ext::ident_path("super");
+ use_tree_str.insert(0, super_path)
+ }
+ }
+ }
+
+ let use_ =
+ make::use_(None, make::use_tree(make::join_paths(use_tree_str), None, None, false));
+ let item = ast::Item::from(use_);
+ self.use_items.insert(0, item);
+ }
+
+ import_path_to_be_removed
+ }
+
+ fn make_use_stmt_of_node_with_super(&mut self, node_syntax: &SyntaxNode) -> ast::Item {
+ let super_path = make::ext::ident_path("super");
+ let node_path = make::ext::ident_path(&node_syntax.to_string());
+ let use_ = make::use_(
+ None,
+ make::use_tree(make::join_paths(vec![super_path, node_path]), None, None, false),
+ );
+
+ let item = ast::Item::from(use_);
+ self.use_items.insert(0, item.clone());
+ item
+ }
+
+ fn process_use_stmt_for_import_resolve(
+ &self,
+ use_stmt_opt: Option<ast::Use>,
+ node_syntax: &SyntaxNode,
+ ) -> Option<(Vec<ast::Path>, Option<TextRange>)> {
+ if let Some(use_stmt) = use_stmt_opt {
+ for desc in use_stmt.syntax().descendants() {
+ if let Some(path_seg) = ast::PathSegment::cast(desc) {
+ if path_seg.syntax().to_string() == node_syntax.to_string() {
+ let mut use_tree_str = vec![path_seg.parent_path()];
+ get_use_tree_paths_from_path(path_seg.parent_path(), &mut use_tree_str);
+ for ancs in path_seg.syntax().ancestors() {
+ //Here we are looking for use_tree with same string value as node
+ //passed above as the range_to_remove function looks for a comma and
+ //then includes it in the text range to remove it. But the comma only
+ //appears at the use_tree level
+ if let Some(use_tree) = ast::UseTree::cast(ancs) {
+ if use_tree.syntax().to_string() == node_syntax.to_string() {
+ return Some((
+ use_tree_str,
+ Some(range_to_remove(use_tree.syntax())),
+ ));
+ }
+ }
+ }
+
+ return Some((use_tree_str, None));
+ }
+ }
+ }
+ }
+
+ None
+ }
+}
+
+fn check_intersection_and_push(
+ import_paths_to_be_removed: &mut Vec<TextRange>,
+ import_path: TextRange,
+) {
+ if import_paths_to_be_removed.len() > 0 {
+ // Text ranges recieved here for imports are extended to the
+ // next/previous comma which can cause intersections among them
+ // and later deletion of these can cause panics similar
+ // to reported in #11766. So to mitigate it, we
+ // check for intersection between all current members
+ // and if it exists we combine both text ranges into
+ // one
+ let r = import_paths_to_be_removed
+ .into_iter()
+ .position(|it| it.intersect(import_path).is_some());
+ match r {
+ Some(it) => {
+ import_paths_to_be_removed[it] = import_paths_to_be_removed[it].cover(import_path)
+ }
+ None => import_paths_to_be_removed.push(import_path),
+ }
+ } else {
+ import_paths_to_be_removed.push(import_path);
+ }
+}
+
+fn does_source_exists_outside_sel_in_same_mod(
+ def: Definition,
+ ctx: &AssistContext<'_>,
+ curr_parent_module: &Option<ast::Module>,
+ selection_range: TextRange,
+ curr_file_id: FileId,
+) -> bool {
+ let mut source_exists_outside_sel_in_same_mod = false;
+ match def {
+ Definition::Module(x) => {
+ let source = x.definition_source(ctx.db());
+ let have_same_parent;
+ if let Some(ast_module) = &curr_parent_module {
+ if let Some(hir_module) = x.parent(ctx.db()) {
+ have_same_parent =
+ compare_hir_and_ast_module(ast_module, hir_module, ctx).is_some();
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ have_same_parent = source_file_id == curr_file_id;
+ }
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ have_same_parent = source_file_id == curr_file_id;
+ }
+
+ if have_same_parent {
+ match source.value {
+ ModuleSource::Module(module_) => {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(module_.syntax().text_range());
+ }
+ _ => {}
+ }
+ }
+ }
+ Definition::Function(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ Definition::Adt(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ Definition::Variant(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ Definition::Const(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ Definition::Static(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ Definition::Trait(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ Definition::TypeAlias(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ _ => {}
+ }
+
+ source_exists_outside_sel_in_same_mod
+}
+
+fn get_replacements_for_visibilty_change(
+ items: &mut [ast::Item],
+ is_clone_for_updated: bool,
+) -> (
+ Vec<(Option<ast::Visibility>, SyntaxNode)>,
+ Vec<(Option<ast::Visibility>, SyntaxNode)>,
+ Vec<ast::Impl>,
+) {
+ let mut replacements = Vec::new();
+ let mut record_field_parents = Vec::new();
+ let mut impls = Vec::new();
+
+ for item in items {
+ if !is_clone_for_updated {
+ *item = item.clone_for_update();
+ }
+ //Use stmts are ignored
+ match item {
+ ast::Item::Const(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::Enum(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::ExternCrate(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::Fn(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ //Associated item's visibility should not be changed
+ ast::Item::Impl(it) if it.for_token().is_none() => impls.push(it.clone()),
+ ast::Item::MacroDef(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::Module(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::Static(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::Struct(it) => {
+ replacements.push((it.visibility(), it.syntax().clone()));
+ record_field_parents.push((it.visibility(), it.syntax().clone()));
+ }
+ ast::Item::Trait(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::TypeAlias(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::Union(it) => {
+ replacements.push((it.visibility(), it.syntax().clone()));
+ record_field_parents.push((it.visibility(), it.syntax().clone()));
+ }
+ _ => (),
+ }
+ }
+
+ (replacements, record_field_parents, impls)
+}
+
+fn get_use_tree_paths_from_path(
+ path: ast::Path,
+ use_tree_str: &mut Vec<ast::Path>,
+) -> Option<&mut Vec<ast::Path>> {
+ path.syntax().ancestors().filter(|x| x.to_string() != path.to_string()).find_map(|x| {
+ if let Some(use_tree) = ast::UseTree::cast(x) {
+ if let Some(upper_tree_path) = use_tree.path() {
+ if upper_tree_path.to_string() != path.to_string() {
+ use_tree_str.push(upper_tree_path.clone());
+ get_use_tree_paths_from_path(upper_tree_path, use_tree_str);
+ return Some(use_tree);
+ }
+ }
+ }
+ None
+ })?;
+
+ Some(use_tree_str)
+}
+
+fn add_change_vis(vis: Option<ast::Visibility>, node_or_token_opt: Option<syntax::SyntaxElement>) {
+ if vis.is_none() {
+ if let Some(node_or_token) = node_or_token_opt {
+ let pub_crate_vis = make::visibility_pub_crate().clone_for_update();
+ ted::insert(ted::Position::before(node_or_token), pub_crate_vis.syntax());
+ }
+ }
+}
+
+fn compare_hir_and_ast_module(
+ ast_module: &ast::Module,
+ hir_module: hir::Module,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let hir_mod_name = hir_module.name(ctx.db())?;
+ let ast_mod_name = ast_module.name()?;
+ if hir_mod_name.to_string() != ast_mod_name.to_string() {
+ return None;
+ }
+
+ Some(())
+}
+
+fn indent_range_before_given_node(node: &SyntaxNode) -> Option<TextRange> {
+ node.siblings_with_tokens(syntax::Direction::Prev)
+ .find(|x| x.kind() == WHITESPACE)
+ .map(|x| x.text_range())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_not_applicable_without_selection() {
+ check_assist_not_applicable(
+ extract_module,
+ r"
+$0pub struct PublicStruct {
+ field: i32,
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module() {
+ check_assist(
+ extract_module,
+ r"
+ mod thirdpartycrate {
+ pub mod nest {
+ pub struct SomeType;
+ pub struct SomeType2;
+ }
+ pub struct SomeType1;
+ }
+
+ mod bar {
+ use crate::thirdpartycrate::{nest::{SomeType, SomeType2}, SomeType1};
+
+ pub struct PublicStruct {
+ field: PrivateStruct,
+ field1: SomeType1,
+ }
+
+ impl PublicStruct {
+ pub fn new() -> Self {
+ Self { field: PrivateStruct::new(), field1: SomeType1 }
+ }
+ }
+
+ fn foo() {
+ let _s = PrivateStruct::new();
+ let _a = bar();
+ }
+
+$0struct PrivateStruct {
+ inner: SomeType,
+}
+
+pub struct PrivateStruct1 {
+ pub inner: i32,
+}
+
+impl PrivateStruct {
+ fn new() -> Self {
+ PrivateStruct { inner: SomeType }
+ }
+}
+
+fn bar() -> i32 {
+ 2
+}$0
+ }
+ ",
+ r"
+ mod thirdpartycrate {
+ pub mod nest {
+ pub struct SomeType;
+ pub struct SomeType2;
+ }
+ pub struct SomeType1;
+ }
+
+ mod bar {
+ use crate::thirdpartycrate::{nest::{SomeType2}, SomeType1};
+
+ pub struct PublicStruct {
+ field: modname::PrivateStruct,
+ field1: SomeType1,
+ }
+
+ impl PublicStruct {
+ pub fn new() -> Self {
+ Self { field: modname::PrivateStruct::new(), field1: SomeType1 }
+ }
+ }
+
+ fn foo() {
+ let _s = modname::PrivateStruct::new();
+ let _a = modname::bar();
+ }
+
+mod modname {
+ use crate::thirdpartycrate::nest::SomeType;
+
+ pub(crate) struct PrivateStruct {
+ pub(crate) inner: SomeType,
+ }
+
+ pub struct PrivateStruct1 {
+ pub inner: i32,
+ }
+
+ impl PrivateStruct {
+ pub(crate) fn new() -> Self {
+ PrivateStruct { inner: SomeType }
+ }
+ }
+
+ pub(crate) fn bar() -> i32 {
+ 2
+ }
+}
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn test_extract_module_for_function_only() {
+ check_assist(
+ extract_module,
+ r"
+$0fn foo(name: i32) -> i32 {
+ name + 1
+}$0
+
+ fn bar(name: i32) -> i32 {
+ name + 2
+ }
+ ",
+ r"
+mod modname {
+ pub(crate) fn foo(name: i32) -> i32 {
+ name + 1
+ }
+}
+
+ fn bar(name: i32) -> i32 {
+ name + 2
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module_for_impl_having_corresponding_adt_in_selection() {
+ check_assist(
+ extract_module,
+ r"
+ mod impl_play {
+$0struct A {}
+
+impl A {
+ pub fn new_a() -> i32 {
+ 2
+ }
+}$0
+
+ fn a() {
+ let _a = A::new_a();
+ }
+ }
+ ",
+ r"
+ mod impl_play {
+mod modname {
+ pub(crate) struct A {}
+
+ impl A {
+ pub fn new_a() -> i32 {
+ 2
+ }
+ }
+}
+
+ fn a() {
+ let _a = modname::A::new_a();
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_import_resolve_when_its_only_inside_selection() {
+ check_assist(
+ extract_module,
+ r"
+ mod foo {
+ pub struct PrivateStruct;
+ pub struct PrivateStruct1;
+ }
+
+ mod bar {
+ use super::foo::{PrivateStruct, PrivateStruct1};
+
+$0struct Strukt {
+ field: PrivateStruct,
+}$0
+
+ struct Strukt1 {
+ field: PrivateStruct1,
+ }
+ }
+ ",
+ r"
+ mod foo {
+ pub struct PrivateStruct;
+ pub struct PrivateStruct1;
+ }
+
+ mod bar {
+ use super::foo::{PrivateStruct1};
+
+mod modname {
+ use super::super::foo::PrivateStruct;
+
+ pub(crate) struct Strukt {
+ pub(crate) field: PrivateStruct,
+ }
+}
+
+ struct Strukt1 {
+ field: PrivateStruct1,
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_import_resolve_when_its_inside_and_outside_selection_and_source_not_in_same_mod() {
+ check_assist(
+ extract_module,
+ r"
+ mod foo {
+ pub struct PrivateStruct;
+ }
+
+ mod bar {
+ use super::foo::PrivateStruct;
+
+$0struct Strukt {
+ field: PrivateStruct,
+}$0
+
+ struct Strukt1 {
+ field: PrivateStruct,
+ }
+ }
+ ",
+ r"
+ mod foo {
+ pub struct PrivateStruct;
+ }
+
+ mod bar {
+ use super::foo::PrivateStruct;
+
+mod modname {
+ use super::super::foo::PrivateStruct;
+
+ pub(crate) struct Strukt {
+ pub(crate) field: PrivateStruct,
+ }
+}
+
+ struct Strukt1 {
+ field: PrivateStruct,
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_import_resolve_when_its_inside_and_outside_selection_and_source_is_in_same_mod() {
+ check_assist(
+ extract_module,
+ r"
+ mod bar {
+ pub struct PrivateStruct;
+
+$0struct Strukt {
+ field: PrivateStruct,
+}$0
+
+ struct Strukt1 {
+ field: PrivateStruct,
+ }
+ }
+ ",
+ r"
+ mod bar {
+ pub struct PrivateStruct;
+
+mod modname {
+ use super::PrivateStruct;
+
+ pub(crate) struct Strukt {
+ pub(crate) field: PrivateStruct,
+ }
+}
+
+ struct Strukt1 {
+ field: PrivateStruct,
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module_for_correspoding_adt_of_impl_present_in_same_mod_but_not_in_selection() {
+ check_assist(
+ extract_module,
+ r"
+ mod impl_play {
+ struct A {}
+
+$0impl A {
+ pub fn new_a() -> i32 {
+ 2
+ }
+}$0
+
+ fn a() {
+ let _a = A::new_a();
+ }
+ }
+ ",
+ r"
+ mod impl_play {
+ struct A {}
+
+mod modname {
+ use super::A;
+
+ impl A {
+ pub fn new_a() -> i32 {
+ 2
+ }
+ }
+}
+
+ fn a() {
+ let _a = A::new_a();
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module_for_impl_not_having_corresponding_adt_in_selection_and_not_in_same_mod_but_with_super(
+ ) {
+ check_assist(
+ extract_module,
+ r"
+ mod foo {
+ pub struct A {}
+ }
+ mod impl_play {
+ use super::foo::A;
+
+$0impl A {
+ pub fn new_a() -> i32 {
+ 2
+ }
+}$0
+
+ fn a() {
+ let _a = A::new_a();
+ }
+ }
+ ",
+ r"
+ mod foo {
+ pub struct A {}
+ }
+ mod impl_play {
+ use super::foo::A;
+
+mod modname {
+ use super::super::foo::A;
+
+ impl A {
+ pub fn new_a() -> i32 {
+ 2
+ }
+ }
+}
+
+ fn a() {
+ let _a = A::new_a();
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_import_resolve_for_trait_bounds_on_function() {
+ check_assist(
+ extract_module,
+ r"
+ mod impl_play2 {
+ trait JustATrait {}
+
+$0struct A {}
+
+fn foo<T: JustATrait>(arg: T) -> T {
+ arg
+}
+
+impl JustATrait for A {}
+
+fn bar() {
+ let a = A {};
+ foo(a);
+}$0
+ }
+ ",
+ r"
+ mod impl_play2 {
+ trait JustATrait {}
+
+mod modname {
+ use super::JustATrait;
+
+ pub(crate) struct A {}
+
+ pub(crate) fn foo<T: JustATrait>(arg: T) -> T {
+ arg
+ }
+
+ impl JustATrait for A {}
+
+ pub(crate) fn bar() {
+ let a = A {};
+ foo(a);
+ }
+}
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module_for_module() {
+ check_assist(
+ extract_module,
+ r"
+ mod impl_play2 {
+$0mod impl_play {
+ pub struct A {}
+}$0
+ }
+ ",
+ r"
+ mod impl_play2 {
+mod modname {
+ pub(crate) mod impl_play {
+ pub struct A {}
+ }
+}
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module_with_multiple_files() {
+ check_assist(
+ extract_module,
+ r"
+ //- /main.rs
+ mod foo;
+
+ use foo::PrivateStruct;
+
+ pub struct Strukt {
+ field: PrivateStruct,
+ }
+
+ fn main() {
+ $0struct Strukt1 {
+ field: Strukt,
+ }$0
+ }
+ //- /foo.rs
+ pub struct PrivateStruct;
+ ",
+ r"
+ mod foo;
+
+ use foo::PrivateStruct;
+
+ pub struct Strukt {
+ field: PrivateStruct,
+ }
+
+ fn main() {
+ mod modname {
+ use super::Strukt;
+
+ pub(crate) struct Strukt1 {
+ pub(crate) field: Strukt,
+ }
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module_macro_rules() {
+ check_assist(
+ extract_module,
+ r"
+$0macro_rules! m {
+ () => {};
+}$0
+m! {}
+ ",
+ r"
+mod modname {
+ macro_rules! m {
+ () => {};
+ }
+}
+modname::m! {}
+ ",
+ );
+ }
+
+ #[test]
+ fn test_do_not_apply_visibility_modifier_to_trait_impl_items() {
+ check_assist(
+ extract_module,
+ r"
+ trait ATrait {
+ fn function();
+ }
+
+ struct A {}
+
+$0impl ATrait for A {
+ fn function() {}
+}$0
+ ",
+ r"
+ trait ATrait {
+ fn function();
+ }
+
+ struct A {}
+
+mod modname {
+ use super::A;
+
+ use super::ATrait;
+
+ impl ATrait for A {
+ fn function() {}
+ }
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn test_if_inside_impl_block_generate_module_outside() {
+ check_assist(
+ extract_module,
+ r"
+ struct A {}
+
+ impl A {
+$0fn foo() {}$0
+ fn bar() {}
+ }
+ ",
+ r"
+ struct A {}
+
+ impl A {
+ fn bar() {}
+ }
+
+mod modname {
+ use super::A;
+
+ impl A {
+ pub(crate) fn foo() {}
+ }
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn test_if_inside_impl_block_generate_module_outside_but_impl_block_having_one_child() {
+ check_assist(
+ extract_module,
+ r"
+ struct A {}
+ struct B {}
+
+ impl A {
+$0fn foo(x: B) {}$0
+ }
+ ",
+ r"
+ struct A {}
+ struct B {}
+
+mod modname {
+ use super::B;
+
+ use super::A;
+
+ impl A {
+ pub(crate) fn foo(x: B) {}
+ }
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn test_issue_11766() {
+ //https://github.com/rust-lang/rust-analyzer/issues/11766
+ check_assist(
+ extract_module,
+ r"
+ mod x {
+ pub struct Foo;
+ pub struct Bar;
+ }
+
+ use x::{Bar, Foo};
+
+ $0type A = (Foo, Bar);$0
+ ",
+ r"
+ mod x {
+ pub struct Foo;
+ pub struct Bar;
+ }
+
+ use x::{};
+
+ mod modname {
+ use super::x::Bar;
+
+ use super::x::Foo;
+
+ pub(crate) type A = (Foo, Bar);
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_issue_12790() {
+ check_assist(
+ extract_module,
+ r"
+ $0/// A documented function
+ fn documented_fn() {}
+
+ // A commented function with a #[] attribute macro
+ #[cfg(test)]
+ fn attribute_fn() {}
+
+ // A normally commented function
+ fn normal_fn() {}
+
+ /// A documented Struct
+ struct DocumentedStruct {
+ // Normal field
+ x: i32,
+
+ /// Documented field
+ y: i32,
+
+ // Macroed field
+ #[cfg(test)]
+ z: i32,
+ }
+
+ // A macroed Struct
+ #[cfg(test)]
+ struct MacroedStruct {
+ // Normal field
+ x: i32,
+
+ /// Documented field
+ y: i32,
+
+ // Macroed field
+ #[cfg(test)]
+ z: i32,
+ }
+
+ // A normal Struct
+ struct NormalStruct {
+ // Normal field
+ x: i32,
+
+ /// Documented field
+ y: i32,
+
+ // Macroed field
+ #[cfg(test)]
+ z: i32,
+ }
+
+ /// A documented type
+ type DocumentedType = i32;
+
+ // A macroed type
+ #[cfg(test)]
+ type MacroedType = i32;
+
+ /// A module to move
+ mod module {}
+
+ /// An impl to move
+ impl NormalStruct {
+ /// A method
+ fn new() {}
+ }
+
+ /// A documented trait
+ trait DocTrait {
+ /// Inner function
+ fn doc() {}
+ }
+
+ /// An enum
+ enum DocumentedEnum {
+ /// A variant
+ A,
+ /// Another variant
+ B { x: i32, y: i32 }
+ }
+
+ /// Documented const
+ const MY_CONST: i32 = 0;$0
+ ",
+ r"
+ mod modname {
+ /// A documented function
+ pub(crate) fn documented_fn() {}
+
+ // A commented function with a #[] attribute macro
+ #[cfg(test)]
+ pub(crate) fn attribute_fn() {}
+
+ // A normally commented function
+ pub(crate) fn normal_fn() {}
+
+ /// A documented Struct
+ pub(crate) struct DocumentedStruct {
+ // Normal field
+ pub(crate) x: i32,
+
+ /// Documented field
+ pub(crate) y: i32,
+
+ // Macroed field
+ #[cfg(test)]
+ pub(crate) z: i32,
+ }
+
+ // A macroed Struct
+ #[cfg(test)]
+ pub(crate) struct MacroedStruct {
+ // Normal field
+ pub(crate) x: i32,
+
+ /// Documented field
+ pub(crate) y: i32,
+
+ // Macroed field
+ #[cfg(test)]
+ pub(crate) z: i32,
+ }
+
+ // A normal Struct
+ pub(crate) struct NormalStruct {
+ // Normal field
+ pub(crate) x: i32,
+
+ /// Documented field
+ pub(crate) y: i32,
+
+ // Macroed field
+ #[cfg(test)]
+ pub(crate) z: i32,
+ }
+
+ /// A documented type
+ pub(crate) type DocumentedType = i32;
+
+ // A macroed type
+ #[cfg(test)]
+ pub(crate) type MacroedType = i32;
+
+ /// A module to move
+ pub(crate) mod module {}
+
+ /// An impl to move
+ impl NormalStruct {
+ /// A method
+ pub(crate) fn new() {}
+ }
+
+ /// A documented trait
+ pub(crate) trait DocTrait {
+ /// Inner function
+ fn doc() {}
+ }
+
+ /// An enum
+ pub(crate) enum DocumentedEnum {
+ /// A variant
+ A,
+ /// Another variant
+ B { x: i32, y: i32 }
+ }
+
+ /// Documented const
+ pub(crate) const MY_CONST: i32 = 0;
+ }
+ ",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
new file mode 100644
index 000000000..a93648f2d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
@@ -0,0 +1,1076 @@
+use std::iter;
+
+use either::Either;
+use hir::{Module, ModuleDef, Name, Variant};
+use ide_db::{
+ defs::Definition,
+ helpers::mod_path_to_ast,
+ imports::insert_use::{insert_use, ImportScope, InsertUseConfig},
+ search::FileReference,
+ FxHashSet, RootDatabase,
+};
+use itertools::{Itertools, Position};
+use syntax::{
+ ast::{
+ self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasAttrs, HasGenericParams,
+ HasName, HasVisibility,
+ },
+ match_ast, ted, SyntaxElement,
+ SyntaxKind::*,
+ SyntaxNode, T,
+};
+
+use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: extract_struct_from_enum_variant
+//
+// Extracts a struct from enum variant.
+//
+// ```
+// enum A { $0One(u32, u32) }
+// ```
+// ->
+// ```
+// struct One(u32, u32);
+//
+// enum A { One(One) }
+// ```
+pub(crate) fn extract_struct_from_enum_variant(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let variant = ctx.find_node_at_offset::<ast::Variant>()?;
+ let field_list = extract_field_list_if_applicable(&variant)?;
+
+ let variant_name = variant.name()?;
+ let variant_hir = ctx.sema.to_def(&variant)?;
+ if existing_definition(ctx.db(), &variant_name, &variant_hir) {
+ cov_mark::hit!(test_extract_enum_not_applicable_if_struct_exists);
+ return None;
+ }
+
+ let enum_ast = variant.parent_enum();
+ let enum_hir = ctx.sema.to_def(&enum_ast)?;
+ let target = variant.syntax().text_range();
+ acc.add(
+ AssistId("extract_struct_from_enum_variant", AssistKind::RefactorRewrite),
+ "Extract struct from enum variant",
+ target,
+ |builder| {
+ let variant_hir_name = variant_hir.name(ctx.db());
+ let enum_module_def = ModuleDef::from(enum_hir);
+ let usages = Definition::Variant(variant_hir).usages(&ctx.sema).all();
+
+ let mut visited_modules_set = FxHashSet::default();
+ let current_module = enum_hir.module(ctx.db());
+ visited_modules_set.insert(current_module);
+ // record file references of the file the def resides in, we only want to swap to the edited file in the builder once
+ let mut def_file_references = None;
+ for (file_id, references) in usages {
+ if file_id == ctx.file_id() {
+ def_file_references = Some(references);
+ continue;
+ }
+ builder.edit_file(file_id);
+ let processed = process_references(
+ ctx,
+ builder,
+ &mut visited_modules_set,
+ &enum_module_def,
+ &variant_hir_name,
+ references,
+ );
+ processed.into_iter().for_each(|(path, node, import)| {
+ apply_references(ctx.config.insert_use, path, node, import)
+ });
+ }
+ builder.edit_file(ctx.file_id());
+
+ let variant = builder.make_mut(variant.clone());
+ if let Some(references) = def_file_references {
+ let processed = process_references(
+ ctx,
+ builder,
+ &mut visited_modules_set,
+ &enum_module_def,
+ &variant_hir_name,
+ references,
+ );
+ processed.into_iter().for_each(|(path, node, import)| {
+ apply_references(ctx.config.insert_use, path, node, import)
+ });
+ }
+
+ let indent = enum_ast.indent_level();
+ let generic_params = enum_ast
+ .generic_param_list()
+ .and_then(|known_generics| extract_generic_params(&known_generics, &field_list));
+ let generics = generic_params.as_ref().map(|generics| generics.clone_for_update());
+ let def =
+ create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast);
+ def.reindent_to(indent);
+
+ let start_offset = &variant.parent_enum().syntax().clone();
+ ted::insert_all_raw(
+ ted::Position::before(start_offset),
+ vec![
+ def.syntax().clone().into(),
+ make::tokens::whitespace(&format!("\n\n{}", indent)).into(),
+ ],
+ );
+
+ update_variant(&variant, generic_params.map(|g| g.clone_for_update()));
+ },
+ )
+}
+
+fn extract_field_list_if_applicable(
+ variant: &ast::Variant,
+) -> Option<Either<ast::RecordFieldList, ast::TupleFieldList>> {
+ match variant.kind() {
+ ast::StructKind::Record(field_list) if field_list.fields().next().is_some() => {
+ Some(Either::Left(field_list))
+ }
+ ast::StructKind::Tuple(field_list) if field_list.fields().count() > 1 => {
+ Some(Either::Right(field_list))
+ }
+ _ => None,
+ }
+}
+
+fn existing_definition(db: &RootDatabase, variant_name: &ast::Name, variant: &Variant) -> bool {
+ variant
+ .parent_enum(db)
+ .module(db)
+ .scope(db, None)
+ .into_iter()
+ .filter(|(_, def)| match def {
+ // only check type-namespace
+ hir::ScopeDef::ModuleDef(def) => matches!(
+ def,
+ ModuleDef::Module(_)
+ | ModuleDef::Adt(_)
+ | ModuleDef::Variant(_)
+ | ModuleDef::Trait(_)
+ | ModuleDef::TypeAlias(_)
+ | ModuleDef::BuiltinType(_)
+ ),
+ _ => false,
+ })
+ .any(|(name, _)| name.to_string() == variant_name.to_string())
+}
+
+fn extract_generic_params(
+ known_generics: &ast::GenericParamList,
+ field_list: &Either<ast::RecordFieldList, ast::TupleFieldList>,
+) -> Option<ast::GenericParamList> {
+ let mut generics = known_generics.generic_params().map(|param| (param, false)).collect_vec();
+
+ let tagged_one = match field_list {
+ Either::Left(field_list) => field_list
+ .fields()
+ .filter_map(|f| f.ty())
+ .fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged),
+ Either::Right(field_list) => field_list
+ .fields()
+ .filter_map(|f| f.ty())
+ .fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged),
+ };
+
+ let generics = generics.into_iter().filter_map(|(param, tag)| tag.then(|| param));
+ tagged_one.then(|| make::generic_param_list(generics))
+}
+
+fn tag_generics_in_variant(ty: &ast::Type, generics: &mut [(ast::GenericParam, bool)]) -> bool {
+ let mut tagged_one = false;
+
+ for token in ty.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token) {
+ for (param, tag) in generics.iter_mut().filter(|(_, tag)| !tag) {
+ match param {
+ ast::GenericParam::LifetimeParam(lt)
+ if matches!(token.kind(), T![lifetime_ident]) =>
+ {
+ if let Some(lt) = lt.lifetime() {
+ if lt.text().as_str() == token.text() {
+ *tag = true;
+ tagged_one = true;
+ break;
+ }
+ }
+ }
+ param if matches!(token.kind(), T![ident]) => {
+ if match param {
+ ast::GenericParam::ConstParam(konst) => konst
+ .name()
+ .map(|name| name.text().as_str() == token.text())
+ .unwrap_or_default(),
+ ast::GenericParam::TypeParam(ty) => ty
+ .name()
+ .map(|name| name.text().as_str() == token.text())
+ .unwrap_or_default(),
+ ast::GenericParam::LifetimeParam(lt) => lt
+ .lifetime()
+ .map(|lt| lt.text().as_str() == token.text())
+ .unwrap_or_default(),
+ } {
+ *tag = true;
+ tagged_one = true;
+ break;
+ }
+ }
+ _ => (),
+ }
+ }
+ }
+
+ tagged_one
+}
+
+fn create_struct_def(
+ variant_name: ast::Name,
+ variant: &ast::Variant,
+ field_list: &Either<ast::RecordFieldList, ast::TupleFieldList>,
+ generics: Option<ast::GenericParamList>,
+ enum_: &ast::Enum,
+) -> ast::Struct {
+ let enum_vis = enum_.visibility();
+
+ let insert_vis = |node: &'_ SyntaxNode, vis: &'_ SyntaxNode| {
+ let vis = vis.clone_for_update();
+ ted::insert(ted::Position::before(node), vis);
+ };
+
+ // for fields without any existing visibility, use visibility of enum
+ let field_list: ast::FieldList = match field_list {
+ Either::Left(field_list) => {
+ let field_list = field_list.clone_for_update();
+
+ if let Some(vis) = &enum_vis {
+ field_list
+ .fields()
+ .filter(|field| field.visibility().is_none())
+ .filter_map(|field| field.name())
+ .for_each(|it| insert_vis(it.syntax(), vis.syntax()));
+ }
+
+ field_list.into()
+ }
+ Either::Right(field_list) => {
+ let field_list = field_list.clone_for_update();
+
+ if let Some(vis) = &enum_vis {
+ field_list
+ .fields()
+ .filter(|field| field.visibility().is_none())
+ .filter_map(|field| field.ty())
+ .for_each(|it| insert_vis(it.syntax(), vis.syntax()));
+ }
+
+ field_list.into()
+ }
+ };
+
+ field_list.reindent_to(IndentLevel::single());
+
+ let strukt = make::struct_(enum_vis, variant_name, generics, field_list).clone_for_update();
+
+ // FIXME: Consider making this an actual function somewhere (like in `AttrsOwnerEdit`) after some deliberation
+ let attrs_and_docs = |node: &SyntaxNode| {
+ let mut select_next_ws = false;
+ node.children_with_tokens().filter(move |child| {
+ let accept = match child.kind() {
+ ATTR | COMMENT => {
+ select_next_ws = true;
+ return true;
+ }
+ WHITESPACE if select_next_ws => true,
+ _ => false,
+ };
+ select_next_ws = false;
+
+ accept
+ })
+ };
+
+ // copy attributes & comments from variant
+ let variant_attrs = attrs_and_docs(variant.syntax())
+ .map(|tok| match tok.kind() {
+ WHITESPACE => make::tokens::single_newline().into(),
+ _ => tok,
+ })
+ .collect();
+ ted::insert_all(ted::Position::first_child_of(strukt.syntax()), variant_attrs);
+
+ // copy attributes from enum
+ ted::insert_all(
+ ted::Position::first_child_of(strukt.syntax()),
+ enum_.attrs().map(|it| it.syntax().clone_for_update().into()).collect(),
+ );
+ strukt
+}
+
+fn update_variant(variant: &ast::Variant, generics: Option<ast::GenericParamList>) -> Option<()> {
+ let name = variant.name()?;
+ let ty = generics
+ .filter(|generics| generics.generic_params().count() > 0)
+ .map(|generics| {
+ let mut generic_str = String::with_capacity(8);
+
+ for (p, more) in generics.generic_params().with_position().map(|p| match p {
+ Position::First(p) | Position::Middle(p) => (p, true),
+ Position::Last(p) | Position::Only(p) => (p, false),
+ }) {
+ match p {
+ ast::GenericParam::ConstParam(konst) => {
+ if let Some(name) = konst.name() {
+ generic_str.push_str(name.text().as_str());
+ }
+ }
+ ast::GenericParam::LifetimeParam(lt) => {
+ if let Some(lt) = lt.lifetime() {
+ generic_str.push_str(lt.text().as_str());
+ }
+ }
+ ast::GenericParam::TypeParam(ty) => {
+ if let Some(name) = ty.name() {
+ generic_str.push_str(name.text().as_str());
+ }
+ }
+ }
+ if more {
+ generic_str.push_str(", ");
+ }
+ }
+
+ make::ty(&format!("{}<{}>", &name.text(), &generic_str))
+ })
+ .unwrap_or_else(|| make::ty(&name.text()));
+
+ let tuple_field = make::tuple_field(None, ty);
+ let replacement = make::variant(
+ name,
+ Some(ast::FieldList::TupleFieldList(make::tuple_field_list(iter::once(tuple_field)))),
+ )
+ .clone_for_update();
+ ted::replace(variant.syntax(), replacement.syntax());
+ Some(())
+}
+
+fn apply_references(
+ insert_use_cfg: InsertUseConfig,
+ segment: ast::PathSegment,
+ node: SyntaxNode,
+ import: Option<(ImportScope, hir::ModPath)>,
+) {
+ if let Some((scope, path)) = import {
+ insert_use(&scope, mod_path_to_ast(&path), &insert_use_cfg);
+ }
+ // deep clone to prevent cycle
+ let path = make::path_from_segments(iter::once(segment.clone_subtree()), false);
+ ted::insert_raw(ted::Position::before(segment.syntax()), path.clone_for_update().syntax());
+ ted::insert_raw(ted::Position::before(segment.syntax()), make::token(T!['(']));
+ ted::insert_raw(ted::Position::after(&node), make::token(T![')']));
+}
+
+fn process_references(
+ ctx: &AssistContext<'_>,
+ builder: &mut AssistBuilder,
+ visited_modules: &mut FxHashSet<Module>,
+ enum_module_def: &ModuleDef,
+ variant_hir_name: &Name,
+ refs: Vec<FileReference>,
+) -> Vec<(ast::PathSegment, SyntaxNode, Option<(ImportScope, hir::ModPath)>)> {
+ // we have to recollect here eagerly as we are about to edit the tree we need to calculate the changes
+ // and corresponding nodes up front
+ refs.into_iter()
+ .flat_map(|reference| {
+ let (segment, scope_node, module) = reference_to_node(&ctx.sema, reference)?;
+ let segment = builder.make_mut(segment);
+ let scope_node = builder.make_syntax_mut(scope_node);
+ if !visited_modules.contains(&module) {
+ let mod_path = module.find_use_path_prefixed(
+ ctx.sema.db,
+ *enum_module_def,
+ ctx.config.insert_use.prefix_kind,
+ );
+ if let Some(mut mod_path) = mod_path {
+ mod_path.pop_segment();
+ mod_path.push_segment(variant_hir_name.clone());
+ let scope = ImportScope::find_insert_use_container(&scope_node, &ctx.sema)?;
+ visited_modules.insert(module);
+ return Some((segment, scope_node, Some((scope, mod_path))));
+ }
+ }
+ Some((segment, scope_node, None))
+ })
+ .collect()
+}
+
+fn reference_to_node(
+ sema: &hir::Semantics<'_, RootDatabase>,
+ reference: FileReference,
+) -> Option<(ast::PathSegment, SyntaxNode, hir::Module)> {
+ let segment =
+ reference.name.as_name_ref()?.syntax().parent().and_then(ast::PathSegment::cast)?;
+ let parent = segment.parent_path().syntax().parent()?;
+ let expr_or_pat = match_ast! {
+ match parent {
+ ast::PathExpr(_it) => parent.parent()?,
+ ast::RecordExpr(_it) => parent,
+ ast::TupleStructPat(_it) => parent,
+ ast::RecordPat(_it) => parent,
+ _ => return None,
+ }
+ };
+ let module = sema.scope(&expr_or_pat)?.module();
+ Some((segment, expr_or_pat, module))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_extract_struct_several_fields_tuple() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One(u32, u32) }",
+ r#"struct One(u32, u32);
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_several_fields_named() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One { foo: u32, bar: u32 } }",
+ r#"struct One{ foo: u32, bar: u32 }
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_one_field_named() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One { foo: u32 } }",
+ r#"struct One{ foo: u32 }
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_carries_over_generics() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r"enum En<T> { Var { a: T$0 } }",
+ r#"struct Var<T>{ a: T }
+
+enum En<T> { Var(Var<T>) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_carries_over_attributes() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"#[derive(Debug)]
+#[derive(Clone)]
+enum Enum { Variant{ field: u32$0 } }"#,
+ r#"#[derive(Debug)]#[derive(Clone)] struct Variant{ field: u32 }
+
+#[derive(Debug)]
+#[derive(Clone)]
+enum Enum { Variant(Variant) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_indent_to_parent_enum() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum Enum {
+ Variant {
+ field: u32$0
+ }
+}"#,
+ r#"
+struct Variant{
+ field: u32
+}
+
+enum Enum {
+ Variant(Variant)
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_indent_to_parent_enum_in_mod() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+mod indenting {
+ enum Enum {
+ Variant {
+ field: u32$0
+ }
+ }
+}"#,
+ r#"
+mod indenting {
+ struct Variant{
+ field: u32
+ }
+
+ enum Enum {
+ Variant(Variant)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_comments_and_attrs_one_field_named() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum A {
+ $0One {
+ // leading comment
+ /// doc comment
+ #[an_attr]
+ foo: u32
+ // trailing comment
+ }
+}"#,
+ r#"
+struct One{
+ // leading comment
+ /// doc comment
+ #[an_attr]
+ foo: u32
+ // trailing comment
+}
+
+enum A {
+ One(One)
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_comments_and_attrs_several_fields_named() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum A {
+ $0One {
+ // comment
+ /// doc
+ #[attr]
+ foo: u32,
+ // comment
+ #[attr]
+ /// doc
+ bar: u32
+ }
+}"#,
+ r#"
+struct One{
+ // comment
+ /// doc
+ #[attr]
+ foo: u32,
+ // comment
+ #[attr]
+ /// doc
+ bar: u32
+}
+
+enum A {
+ One(One)
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_comments_and_attrs_several_fields_tuple() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One(/* comment */ #[attr] u32, /* another */ u32 /* tail */) }",
+ r#"
+struct One(/* comment */ #[attr] u32, /* another */ u32 /* tail */);
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_comments_and_attrs_on_variant_struct() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum A {
+ /* comment */
+ // other
+ /// comment
+ #[attr]
+ $0One {
+ a: u32
+ }
+}"#,
+ r#"
+/* comment */
+// other
+/// comment
+#[attr]
+struct One{
+ a: u32
+}
+
+enum A {
+ One(One)
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_comments_and_attrs_on_variant_tuple() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum A {
+ /* comment */
+ // other
+ /// comment
+ #[attr]
+ $0One(u32, u32)
+}"#,
+ r#"
+/* comment */
+// other
+/// comment
+#[attr]
+struct One(u32, u32);
+
+enum A {
+ One(One)
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_existing_visibility_named() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One{ a: u32, pub(crate) b: u32, pub(super) c: u32, d: u32 } }",
+ r#"
+struct One{ a: u32, pub(crate) b: u32, pub(super) c: u32, d: u32 }
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_existing_visibility_tuple() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One(u32, pub(crate) u32, pub(super) u32, u32) }",
+ r#"
+struct One(u32, pub(crate) u32, pub(super) u32, u32);
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_enum_variant_name_value_namespace() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"const One: () = ();
+enum A { $0One(u32, u32) }"#,
+ r#"const One: () = ();
+struct One(u32, u32);
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_no_visibility() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One(u32, u32) }",
+ r#"
+struct One(u32, u32);
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_pub_visibility() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "pub enum A { $0One(u32, u32) }",
+ r#"
+pub struct One(pub u32, pub u32);
+
+pub enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_pub_in_mod_visibility() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "pub(in something) enum A { $0One{ a: u32, b: u32 } }",
+ r#"
+pub(in something) struct One{ pub(in something) a: u32, pub(in something) b: u32 }
+
+pub(in something) enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_pub_crate_visibility() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "pub(crate) enum A { $0One{ a: u32, b: u32, c: u32 } }",
+ r#"
+pub(crate) struct One{ pub(crate) a: u32, pub(crate) b: u32, pub(crate) c: u32 }
+
+pub(crate) enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_with_complex_imports() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"mod my_mod {
+ fn another_fn() {
+ let m = my_other_mod::MyEnum::MyField(1, 1);
+ }
+
+ pub mod my_other_mod {
+ fn another_fn() {
+ let m = MyEnum::MyField(1, 1);
+ }
+
+ pub enum MyEnum {
+ $0MyField(u8, u8),
+ }
+ }
+}
+
+fn another_fn() {
+ let m = my_mod::my_other_mod::MyEnum::MyField(1, 1);
+}"#,
+ r#"use my_mod::my_other_mod::MyField;
+
+mod my_mod {
+ use self::my_other_mod::MyField;
+
+ fn another_fn() {
+ let m = my_other_mod::MyEnum::MyField(MyField(1, 1));
+ }
+
+ pub mod my_other_mod {
+ fn another_fn() {
+ let m = MyEnum::MyField(MyField(1, 1));
+ }
+
+ pub struct MyField(pub u8, pub u8);
+
+ pub enum MyEnum {
+ MyField(MyField),
+ }
+ }
+}
+
+fn another_fn() {
+ let m = my_mod::my_other_mod::MyEnum::MyField(MyField(1, 1));
+}"#,
+ );
+ }
+
+ #[test]
+ fn extract_record_fix_references() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum E {
+ $0V { i: i32, j: i32 }
+}
+
+fn f() {
+ let E::V { i, j } = E::V { i: 9, j: 2 };
+}
+"#,
+ r#"
+struct V{ i: i32, j: i32 }
+
+enum E {
+ V(V)
+}
+
+fn f() {
+ let E::V(V { i, j }) = E::V(V { i: 9, j: 2 });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_record_fix_references2() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum E {
+ $0V(i32, i32)
+}
+
+fn f() {
+ let E::V(i, j) = E::V(9, 2);
+}
+"#,
+ r#"
+struct V(i32, i32);
+
+enum E {
+ V(V)
+}
+
+fn f() {
+ let E::V(V(i, j)) = E::V(V(9, 2));
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_several_files() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+//- /main.rs
+enum E {
+ $0V(i32, i32)
+}
+mod foo;
+
+//- /foo.rs
+use crate::E;
+fn f() {
+ let e = E::V(9, 2);
+}
+"#,
+ r#"
+//- /main.rs
+struct V(i32, i32);
+
+enum E {
+ V(V)
+}
+mod foo;
+
+//- /foo.rs
+use crate::{E, V};
+fn f() {
+ let e = E::V(V(9, 2));
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_several_files_record() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+//- /main.rs
+enum E {
+ $0V { i: i32, j: i32 }
+}
+mod foo;
+
+//- /foo.rs
+use crate::E;
+fn f() {
+ let e = E::V { i: 9, j: 2 };
+}
+"#,
+ r#"
+//- /main.rs
+struct V{ i: i32, j: i32 }
+
+enum E {
+ V(V)
+}
+mod foo;
+
+//- /foo.rs
+use crate::{E, V};
+fn f() {
+ let e = E::V(V { i: 9, j: 2 });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_extract_struct_record_nested_call_exp() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum A { $0One { a: u32, b: u32 } }
+
+struct B(A);
+
+fn foo() {
+ let _ = B(A::One { a: 1, b: 2 });
+}
+"#,
+ r#"
+struct One{ a: u32, b: u32 }
+
+enum A { One(One) }
+
+struct B(A);
+
+fn foo() {
+ let _ = B(A::One(One { a: 1, b: 2 }));
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_enum_not_applicable_for_element_with_no_fields() {
+ check_assist_not_applicable(extract_struct_from_enum_variant, r#"enum A { $0One }"#);
+ }
+
+ #[test]
+ fn test_extract_enum_not_applicable_if_struct_exists() {
+ cov_mark::check!(test_extract_enum_not_applicable_if_struct_exists);
+ check_assist_not_applicable(
+ extract_struct_from_enum_variant,
+ r#"
+struct One;
+enum A { $0One(u8, u32) }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_not_applicable_one_field() {
+ check_assist_not_applicable(extract_struct_from_enum_variant, r"enum A { $0One(u32) }");
+ }
+
+ #[test]
+ fn test_extract_not_applicable_no_field_tuple() {
+ check_assist_not_applicable(extract_struct_from_enum_variant, r"enum A { $0None() }");
+ }
+
+ #[test]
+ fn test_extract_not_applicable_no_field_named() {
+ check_assist_not_applicable(extract_struct_from_enum_variant, r"enum A { $0None {} }");
+ }
+
+ #[test]
+ fn test_extract_struct_only_copies_needed_generics() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum X<'a, 'b, 'x> {
+ $0A { a: &'a &'x mut () },
+ B { b: &'b () },
+ C { c: () },
+}
+"#,
+ r#"
+struct A<'a, 'x>{ a: &'a &'x mut () }
+
+enum X<'a, 'b, 'x> {
+ A(A<'a, 'x>),
+ B { b: &'b () },
+ C { c: () },
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_with_liftime_type_const() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum X<'b, T, V, const C: usize> {
+ $0A { a: T, b: X<'b>, c: [u8; C] },
+ D { d: V },
+}
+"#,
+ r#"
+struct A<'b, T, const C: usize>{ a: T, b: X<'b>, c: [u8; C] }
+
+enum X<'b, T, V, const C: usize> {
+ A(A<'b, T, C>),
+ D { d: V },
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_without_generics() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum X<'a, 'b> {
+ A { a: &'a () },
+ B { b: &'b () },
+ $0C { c: () },
+}
+"#,
+ r#"
+struct C{ c: () }
+
+enum X<'a, 'b> {
+ A { a: &'a () },
+ B { b: &'b () },
+ C(C),
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keeps_trait_bounds() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum En<T: TraitT, V: TraitV> {
+ $0A { a: T },
+ B { b: V },
+}
+"#,
+ r#"
+struct A<T: TraitT>{ a: T }
+
+enum En<T: TraitT, V: TraitV> {
+ A(A<T>),
+ B { b: V },
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
new file mode 100644
index 000000000..af584cdb4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
@@ -0,0 +1,360 @@
+use either::Either;
+use ide_db::syntax_helpers::node_ext::walk_ty;
+use itertools::Itertools;
+use syntax::{
+ ast::{self, edit::IndentLevel, AstNode, HasGenericParams, HasName},
+ match_ast,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: extract_type_alias
+//
+// Extracts the selected type as a type alias.
+//
+// ```
+// struct S {
+// field: $0(u8, u8, u8)$0,
+// }
+// ```
+// ->
+// ```
+// type $0Type = (u8, u8, u8);
+//
+// struct S {
+// field: Type,
+// }
+// ```
+pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if ctx.has_empty_selection() {
+ return None;
+ }
+
+ let ty = ctx.find_node_at_range::<ast::Type>()?;
+ let item = ty.syntax().ancestors().find_map(ast::Item::cast)?;
+ let assoc_owner = item.syntax().ancestors().nth(2).and_then(|it| {
+ match_ast! {
+ match it {
+ ast::Trait(tr) => Some(Either::Left(tr)),
+ ast::Impl(impl_) => Some(Either::Right(impl_)),
+ _ => None,
+ }
+ }
+ });
+ let node = assoc_owner.as_ref().map_or_else(
+ || item.syntax(),
+ |impl_| impl_.as_ref().either(AstNode::syntax, AstNode::syntax),
+ );
+ let insert_pos = node.text_range().start();
+ let target = ty.syntax().text_range();
+
+ acc.add(
+ AssistId("extract_type_alias", AssistKind::RefactorExtract),
+ "Extract type as type alias",
+ target,
+ |builder| {
+ let mut known_generics = match item.generic_param_list() {
+ Some(it) => it.generic_params().collect(),
+ None => Vec::new(),
+ };
+ if let Some(it) = assoc_owner.as_ref().and_then(|it| match it {
+ Either::Left(it) => it.generic_param_list(),
+ Either::Right(it) => it.generic_param_list(),
+ }) {
+ known_generics.extend(it.generic_params());
+ }
+ let generics = collect_used_generics(&ty, &known_generics);
+
+ let replacement = if !generics.is_empty() {
+ format!(
+ "Type<{}>",
+ generics.iter().format_with(", ", |generic, f| {
+ match generic {
+ ast::GenericParam::ConstParam(cp) => f(&cp.name().unwrap()),
+ ast::GenericParam::LifetimeParam(lp) => f(&lp.lifetime().unwrap()),
+ ast::GenericParam::TypeParam(tp) => f(&tp.name().unwrap()),
+ }
+ })
+ )
+ } else {
+ String::from("Type")
+ };
+ builder.replace(target, replacement);
+
+ let indent = IndentLevel::from_node(node);
+ let generics = if !generics.is_empty() {
+ format!("<{}>", generics.iter().format(", "))
+ } else {
+ String::new()
+ };
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ builder.insert_snippet(
+ cap,
+ insert_pos,
+ format!("type $0Type{} = {};\n\n{}", generics, ty, indent),
+ );
+ }
+ None => {
+ builder.insert(
+ insert_pos,
+ format!("type Type{} = {};\n\n{}", generics, ty, indent),
+ );
+ }
+ }
+ },
+ )
+}
+
+fn collect_used_generics<'gp>(
+ ty: &ast::Type,
+ known_generics: &'gp [ast::GenericParam],
+) -> Vec<&'gp ast::GenericParam> {
+ // can't use a closure -> closure here cause lifetime inference fails for that
+ fn find_lifetime(text: &str) -> impl Fn(&&ast::GenericParam) -> bool + '_ {
+ move |gp: &&ast::GenericParam| match gp {
+ ast::GenericParam::LifetimeParam(lp) => {
+ lp.lifetime().map_or(false, |lt| lt.text() == text)
+ }
+ _ => false,
+ }
+ }
+
+ let mut generics = Vec::new();
+ walk_ty(ty, &mut |ty| match ty {
+ ast::Type::PathType(ty) => {
+ if let Some(path) = ty.path() {
+ if let Some(name_ref) = path.as_single_name_ref() {
+ if let Some(param) = known_generics.iter().find(|gp| {
+ match gp {
+ ast::GenericParam::ConstParam(cp) => cp.name(),
+ ast::GenericParam::TypeParam(tp) => tp.name(),
+ _ => None,
+ }
+ .map_or(false, |n| n.text() == name_ref.text())
+ }) {
+ generics.push(param);
+ }
+ }
+ generics.extend(
+ path.segments()
+ .filter_map(|seg| seg.generic_arg_list())
+ .flat_map(|it| it.generic_args())
+ .filter_map(|it| match it {
+ ast::GenericArg::LifetimeArg(lt) => {
+ let lt = lt.lifetime()?;
+ known_generics.iter().find(find_lifetime(&lt.text()))
+ }
+ _ => None,
+ }),
+ );
+ }
+ }
+ ast::Type::ImplTraitType(impl_ty) => {
+ if let Some(it) = impl_ty.type_bound_list() {
+ generics.extend(
+ it.bounds()
+ .filter_map(|it| it.lifetime())
+ .filter_map(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
+ );
+ }
+ }
+ ast::Type::DynTraitType(dyn_ty) => {
+ if let Some(it) = dyn_ty.type_bound_list() {
+ generics.extend(
+ it.bounds()
+ .filter_map(|it| it.lifetime())
+ .filter_map(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
+ );
+ }
+ }
+ ast::Type::RefType(ref_) => generics.extend(
+ ref_.lifetime().and_then(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
+ ),
+ _ => (),
+ });
+ // stable resort to lifetime, type, const
+ generics.sort_by_key(|gp| match gp {
+ ast::GenericParam::ConstParam(_) => 2,
+ ast::GenericParam::LifetimeParam(_) => 0,
+ ast::GenericParam::TypeParam(_) => 1,
+ });
+ generics
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_not_applicable_without_selection() {
+ check_assist_not_applicable(
+ extract_type_alias,
+ r"
+struct S {
+ field: $0(u8, u8, u8),
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn test_simple_types() {
+ check_assist(
+ extract_type_alias,
+ r"
+struct S {
+ field: $0u8$0,
+}
+ ",
+ r#"
+type $0Type = u8;
+
+struct S {
+ field: Type,
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_generic_type_arg() {
+ check_assist(
+ extract_type_alias,
+ r"
+fn generic<T>() {}
+
+fn f() {
+ generic::<$0()$0>();
+}
+ ",
+ r#"
+fn generic<T>() {}
+
+type $0Type = ();
+
+fn f() {
+ generic::<Type>();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_inner_type_arg() {
+ check_assist(
+ extract_type_alias,
+ r"
+struct Vec<T> {}
+struct S {
+ v: Vec<Vec<$0Vec<u8>$0>>,
+}
+ ",
+ r#"
+struct Vec<T> {}
+type $0Type = Vec<u8>;
+
+struct S {
+ v: Vec<Vec<Type>>,
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_extract_inner_type() {
+ check_assist(
+ extract_type_alias,
+ r"
+struct S {
+ field: ($0u8$0,),
+}
+ ",
+ r#"
+type $0Type = u8;
+
+struct S {
+ field: (Type,),
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn extract_from_impl_or_trait() {
+ // When invoked in an impl/trait, extracted type alias should be placed next to the
+ // impl/trait, not inside.
+ check_assist(
+ extract_type_alias,
+ r#"
+impl S {
+ fn f() -> $0(u8, u8)$0 {}
+}
+ "#,
+ r#"
+type $0Type = (u8, u8);
+
+impl S {
+ fn f() -> Type {}
+}
+ "#,
+ );
+ check_assist(
+ extract_type_alias,
+ r#"
+trait Tr {
+ fn f() -> $0(u8, u8)$0 {}
+}
+ "#,
+ r#"
+type $0Type = (u8, u8);
+
+trait Tr {
+ fn f() -> Type {}
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indentation() {
+ check_assist(
+ extract_type_alias,
+ r#"
+mod m {
+ fn f() -> $0u8$0 {}
+}
+ "#,
+ r#"
+mod m {
+ type $0Type = u8;
+
+ fn f() -> Type {}
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn generics() {
+ check_assist(
+ extract_type_alias,
+ r#"
+struct Struct<const C: usize>;
+impl<'outer, Outer, const OUTER: usize> () {
+ fn func<'inner, Inner, const INNER: usize>(_: $0&(Struct<INNER>, Struct<OUTER>, Outer, &'inner (), Inner, &'outer ())$0) {}
+}
+"#,
+ r#"
+struct Struct<const C: usize>;
+type $0Type<'inner, 'outer, Outer, Inner, const INNER: usize, const OUTER: usize> = &(Struct<INNER>, Struct<OUTER>, Outer, &'inner (), Inner, &'outer ());
+
+impl<'outer, Outer, const OUTER: usize> () {
+ fn func<'inner, Inner, const INNER: usize>(_: Type<'inner, 'outer, Outer, Inner, INNER, OUTER>) {}
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
new file mode 100644
index 000000000..3596b6f82
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
@@ -0,0 +1,1279 @@
+use stdx::format_to;
+use syntax::{
+ ast::{self, AstNode},
+ NodeOrToken,
+ SyntaxKind::{
+ BLOCK_EXPR, BREAK_EXPR, CLOSURE_EXPR, COMMENT, LOOP_EXPR, MATCH_ARM, MATCH_GUARD,
+ PATH_EXPR, RETURN_EXPR,
+ },
+ SyntaxNode,
+};
+
+use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: extract_variable
+//
+// Extracts subexpression into a variable.
+//
+// ```
+// fn main() {
+// $0(1 + 2)$0 * 4;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let $0var_name = (1 + 2);
+// var_name * 4;
+// }
+// ```
+pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if ctx.has_empty_selection() {
+ return None;
+ }
+
+ let node = match ctx.covering_element() {
+ NodeOrToken::Node(it) => it,
+ NodeOrToken::Token(it) if it.kind() == COMMENT => {
+ cov_mark::hit!(extract_var_in_comment_is_not_applicable);
+ return None;
+ }
+ NodeOrToken::Token(it) => it.parent()?,
+ };
+ let node = node.ancestors().take_while(|anc| anc.text_range() == node.text_range()).last()?;
+ let to_extract = node
+ .descendants()
+ .take_while(|it| ctx.selection_trimmed().contains_range(it.text_range()))
+ .find_map(valid_target_expr)?;
+
+ if let Some(ty_info) = ctx.sema.type_of_expr(&to_extract) {
+ if ty_info.adjusted().is_unit() {
+ return None;
+ }
+ }
+
+ let reference_modifier = match get_receiver_type(ctx, &to_extract) {
+ Some(receiver_type) if receiver_type.is_mutable_reference() => "&mut ",
+ Some(receiver_type) if receiver_type.is_reference() => "&",
+ _ => "",
+ };
+
+ let parent_ref_expr = to_extract.syntax().parent().and_then(ast::RefExpr::cast);
+ let var_modifier = match parent_ref_expr {
+ Some(expr) if expr.mut_token().is_some() => "mut ",
+ _ => "",
+ };
+
+ let anchor = Anchor::from(&to_extract)?;
+ let indent = anchor.syntax().prev_sibling_or_token()?.as_token()?.clone();
+ let target = to_extract.syntax().text_range();
+ acc.add(
+ AssistId("extract_variable", AssistKind::RefactorExtract),
+ "Extract into variable",
+ target,
+ move |edit| {
+ let field_shorthand =
+ match to_extract.syntax().parent().and_then(ast::RecordExprField::cast) {
+ Some(field) => field.name_ref(),
+ None => None,
+ };
+
+ let mut buf = String::new();
+
+ let var_name = match &field_shorthand {
+ Some(it) => it.to_string(),
+ None => suggest_name::for_variable(&to_extract, &ctx.sema),
+ };
+ let expr_range = match &field_shorthand {
+ Some(it) => it.syntax().text_range().cover(to_extract.syntax().text_range()),
+ None => to_extract.syntax().text_range(),
+ };
+
+ match anchor {
+ Anchor::Before(_) | Anchor::Replace(_) => {
+ format_to!(buf, "let {}{} = {}", var_modifier, var_name, reference_modifier)
+ }
+ Anchor::WrapInBlock(_) => {
+ format_to!(buf, "{{ let {} = {}", var_name, reference_modifier)
+ }
+ };
+ format_to!(buf, "{}", to_extract.syntax());
+
+ if let Anchor::Replace(stmt) = anchor {
+ cov_mark::hit!(test_extract_var_expr_stmt);
+ if stmt.semicolon_token().is_none() {
+ buf.push(';');
+ }
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snip = buf.replace(
+ &format!("let {}{}", var_modifier, var_name),
+ &format!("let {}$0{}", var_modifier, var_name),
+ );
+ edit.replace_snippet(cap, expr_range, snip)
+ }
+ None => edit.replace(expr_range, buf),
+ }
+ return;
+ }
+
+ buf.push(';');
+
+ // We want to maintain the indent level,
+ // but we do not want to duplicate possible
+ // extra newlines in the indent block
+ let text = indent.text();
+ if text.starts_with('\n') {
+ buf.push('\n');
+ buf.push_str(text.trim_start_matches('\n'));
+ } else {
+ buf.push_str(text);
+ }
+
+ edit.replace(expr_range, var_name.clone());
+ let offset = anchor.syntax().text_range().start();
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snip = buf.replace(
+ &format!("let {}{}", var_modifier, var_name),
+ &format!("let {}$0{}", var_modifier, var_name),
+ );
+ edit.insert_snippet(cap, offset, snip)
+ }
+ None => edit.insert(offset, buf),
+ }
+
+ if let Anchor::WrapInBlock(_) = anchor {
+ edit.insert(anchor.syntax().text_range().end(), " }");
+ }
+ },
+ )
+}
+
+/// Check whether the node is a valid expression which can be extracted to a variable.
+/// In general that's true for any expression, but in some cases that would produce invalid code.
+fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> {
+ match node.kind() {
+ PATH_EXPR | LOOP_EXPR => None,
+ BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()),
+ RETURN_EXPR => ast::ReturnExpr::cast(node).and_then(|e| e.expr()),
+ BLOCK_EXPR => {
+ ast::BlockExpr::cast(node).filter(|it| it.is_standalone()).map(ast::Expr::from)
+ }
+ _ => ast::Expr::cast(node),
+ }
+}
+
+fn get_receiver_type(ctx: &AssistContext<'_>, expression: &ast::Expr) -> Option<hir::Type> {
+ let receiver = get_receiver(expression.clone())?;
+ Some(ctx.sema.type_of_expr(&receiver)?.original())
+}
+
+/// In the expression `a.b.c.x()`, find `a`
+fn get_receiver(expression: ast::Expr) -> Option<ast::Expr> {
+ match expression {
+ ast::Expr::FieldExpr(field) if field.expr().is_some() => {
+ let nested_expression = &field.expr()?;
+ get_receiver(nested_expression.to_owned())
+ }
+ _ => Some(expression),
+ }
+}
+
+#[derive(Debug)]
+enum Anchor {
+ Before(SyntaxNode),
+ Replace(ast::ExprStmt),
+ WrapInBlock(SyntaxNode),
+}
+
+impl Anchor {
+ fn from(to_extract: &ast::Expr) -> Option<Anchor> {
+ to_extract
+ .syntax()
+ .ancestors()
+ .take_while(|it| !ast::Item::can_cast(it.kind()) || ast::MacroCall::can_cast(it.kind()))
+ .find_map(|node| {
+ if ast::MacroCall::can_cast(node.kind()) {
+ return None;
+ }
+ if let Some(expr) =
+ node.parent().and_then(ast::StmtList::cast).and_then(|it| it.tail_expr())
+ {
+ if expr.syntax() == &node {
+ cov_mark::hit!(test_extract_var_last_expr);
+ return Some(Anchor::Before(node));
+ }
+ }
+
+ if let Some(parent) = node.parent() {
+ if parent.kind() == CLOSURE_EXPR {
+ cov_mark::hit!(test_extract_var_in_closure_no_block);
+ return Some(Anchor::WrapInBlock(node));
+ }
+ if parent.kind() == MATCH_ARM {
+ if node.kind() == MATCH_GUARD {
+ cov_mark::hit!(test_extract_var_in_match_guard);
+ } else {
+ cov_mark::hit!(test_extract_var_in_match_arm_no_block);
+ return Some(Anchor::WrapInBlock(node));
+ }
+ }
+ }
+
+ if let Some(stmt) = ast::Stmt::cast(node.clone()) {
+ if let ast::Stmt::ExprStmt(stmt) = stmt {
+ if stmt.expr().as_ref() == Some(to_extract) {
+ return Some(Anchor::Replace(stmt));
+ }
+ }
+ return Some(Anchor::Before(node));
+ }
+ None
+ })
+ }
+
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Anchor::Before(it) | Anchor::WrapInBlock(it) => it,
+ Anchor::Replace(stmt) => stmt.syntax(),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn test_extract_var_simple() {
+ check_assist(
+ extract_variable,
+ r#"
+fn foo() {
+ foo($01 + 1$0);
+}"#,
+ r#"
+fn foo() {
+ let $0var_name = 1 + 1;
+ foo(var_name);
+}"#,
+ );
+ }
+
+ #[test]
+ fn extract_var_in_comment_is_not_applicable() {
+ cov_mark::check!(extract_var_in_comment_is_not_applicable);
+ check_assist_not_applicable(extract_variable, "fn main() { 1 + /* $0comment$0 */ 1; }");
+ }
+
+ #[test]
+ fn test_extract_var_expr_stmt() {
+ cov_mark::check!(test_extract_var_expr_stmt);
+ check_assist(
+ extract_variable,
+ r#"
+fn foo() {
+ $0 1 + 1$0;
+}"#,
+ r#"
+fn foo() {
+ let $0var_name = 1 + 1;
+}"#,
+ );
+ check_assist(
+ extract_variable,
+ r"
+fn foo() {
+ $0{ let x = 0; x }$0
+ something_else();
+}",
+ r"
+fn foo() {
+ let $0var_name = { let x = 0; x };
+ something_else();
+}",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_part_of_expr_stmt() {
+ check_assist(
+ extract_variable,
+ r"
+fn foo() {
+ $01$0 + 1;
+}",
+ r"
+fn foo() {
+ let $0var_name = 1;
+ var_name + 1;
+}",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_last_expr() {
+ cov_mark::check!(test_extract_var_last_expr);
+ check_assist(
+ extract_variable,
+ r#"
+fn foo() {
+ bar($01 + 1$0)
+}
+"#,
+ r#"
+fn foo() {
+ let $0var_name = 1 + 1;
+ bar(var_name)
+}
+"#,
+ );
+ check_assist(
+ extract_variable,
+ r#"
+fn foo() -> i32 {
+ $0bar(1 + 1)$0
+}
+
+fn bar(i: i32) -> i32 {
+ i
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let $0bar = bar(1 + 1);
+ bar
+}
+
+fn bar(i: i32) -> i32 {
+ i
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_extract_var_in_match_arm_no_block() {
+ cov_mark::check!(test_extract_var_in_match_arm_no_block);
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => ($02 + 2$0, true)
+ _ => (0, false)
+ };
+}
+"#,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => { let $0var_name = 2 + 2; (var_name, true) }
+ _ => (0, false)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_in_match_arm_with_block() {
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => {
+ let y = 1;
+ ($02 + y$0, true)
+ }
+ _ => (0, false)
+ };
+}
+"#,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => {
+ let y = 1;
+ let $0var_name = 2 + y;
+ (var_name, true)
+ }
+ _ => (0, false)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_in_match_guard() {
+ cov_mark::check!(test_extract_var_in_match_guard);
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ match () {
+ () if $010 > 0$0 => 1
+ _ => 2
+ };
+}
+"#,
+ r#"
+fn main() {
+ let $0var_name = 10 > 0;
+ match () {
+ () if var_name => 1
+ _ => 2
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_in_closure_no_block() {
+ cov_mark::check!(test_extract_var_in_closure_no_block);
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ let lambda = |x: u32| $0x * 2$0;
+}
+"#,
+ r#"
+fn main() {
+ let lambda = |x: u32| { let $0var_name = x * 2; var_name };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_in_closure_with_block() {
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ let lambda = |x: u32| { $0x * 2$0 };
+}
+"#,
+ r#"
+fn main() {
+ let lambda = |x: u32| { let $0var_name = x * 2; var_name };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_path_simple() {
+ check_assist(
+ extract_variable,
+ "
+fn main() {
+ let o = $0Some(true)$0;
+}
+",
+ "
+fn main() {
+ let $0var_name = Some(true);
+ let o = var_name;
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_path_method() {
+ check_assist(
+ extract_variable,
+ "
+fn main() {
+ let v = $0bar.foo()$0;
+}
+",
+ "
+fn main() {
+ let $0foo = bar.foo();
+ let v = foo;
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_return() {
+ check_assist(
+ extract_variable,
+ "
+fn foo() -> u32 {
+ $0return 2 + 2$0;
+}
+",
+ "
+fn foo() -> u32 {
+ let $0var_name = 2 + 2;
+ return var_name;
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_does_not_add_extra_whitespace() {
+ check_assist(
+ extract_variable,
+ "
+fn foo() -> u32 {
+
+
+ $0return 2 + 2$0;
+}
+",
+ "
+fn foo() -> u32 {
+
+
+ let $0var_name = 2 + 2;
+ return var_name;
+}
+",
+ );
+
+ check_assist(
+ extract_variable,
+ "
+fn foo() -> u32 {
+
+ $0return 2 + 2$0;
+}
+",
+ "
+fn foo() -> u32 {
+
+ let $0var_name = 2 + 2;
+ return var_name;
+}
+",
+ );
+
+ check_assist(
+ extract_variable,
+ "
+fn foo() -> u32 {
+ let foo = 1;
+
+ // bar
+
+
+ $0return 2 + 2$0;
+}
+",
+ "
+fn foo() -> u32 {
+ let foo = 1;
+
+ // bar
+
+
+ let $0var_name = 2 + 2;
+ return var_name;
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_break() {
+ check_assist(
+ extract_variable,
+ "
+fn main() {
+ let result = loop {
+ $0break 2 + 2$0;
+ };
+}
+",
+ "
+fn main() {
+ let result = loop {
+ let $0var_name = 2 + 2;
+ break var_name;
+ };
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_for_cast() {
+ check_assist(
+ extract_variable,
+ "
+fn main() {
+ let v = $00f32 as u32$0;
+}
+",
+ "
+fn main() {
+ let $0var_name = 0f32 as u32;
+ let v = var_name;
+}
+",
+ );
+ }
+
+ #[test]
+ fn extract_var_field_shorthand() {
+ check_assist(
+ extract_variable,
+ r#"
+struct S {
+ foo: i32
+}
+
+fn main() {
+ S { foo: $01 + 1$0 }
+}
+"#,
+ r#"
+struct S {
+ foo: i32
+}
+
+fn main() {
+ let $0foo = 1 + 1;
+ S { foo }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_name_from_type() {
+ check_assist(
+ extract_variable,
+ r#"
+struct Test(i32);
+
+fn foo() -> Test {
+ $0{ Test(10) }$0
+}
+"#,
+ r#"
+struct Test(i32);
+
+fn foo() -> Test {
+ let $0test = { Test(10) };
+ test
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_name_from_parameter() {
+ check_assist(
+ extract_variable,
+ r#"
+fn bar(test: u32, size: u32)
+
+fn foo() {
+ bar(1, $01+1$0);
+}
+"#,
+ r#"
+fn bar(test: u32, size: u32)
+
+fn foo() {
+ let $0size = 1+1;
+ bar(1, size);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_parameter_name_has_precedence_over_type() {
+ check_assist(
+ extract_variable,
+ r#"
+struct TextSize(u32);
+fn bar(test: u32, size: TextSize)
+
+fn foo() {
+ bar(1, $0{ TextSize(1+1) }$0);
+}
+"#,
+ r#"
+struct TextSize(u32);
+fn bar(test: u32, size: TextSize)
+
+fn foo() {
+ let $0size = { TextSize(1+1) };
+ bar(1, size);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_name_from_function() {
+ check_assist(
+ extract_variable,
+ r#"
+fn is_required(test: u32, size: u32) -> bool
+
+fn foo() -> bool {
+ $0is_required(1, 2)$0
+}
+"#,
+ r#"
+fn is_required(test: u32, size: u32) -> bool
+
+fn foo() -> bool {
+ let $0is_required = is_required(1, 2);
+ is_required
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_name_from_method() {
+ check_assist(
+ extract_variable,
+ r#"
+struct S;
+impl S {
+ fn bar(&self, n: u32) -> u32 { n }
+}
+
+fn foo() -> u32 {
+ $0S.bar(1)$0
+}
+"#,
+ r#"
+struct S;
+impl S {
+ fn bar(&self, n: u32) -> u32 { n }
+}
+
+fn foo() -> u32 {
+ let $0bar = S.bar(1);
+ bar
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_name_from_method_param() {
+ check_assist(
+ extract_variable,
+ r#"
+struct S;
+impl S {
+ fn bar(&self, n: u32, size: u32) { n }
+}
+
+fn foo() {
+ S.bar($01 + 1$0, 2)
+}
+"#,
+ r#"
+struct S;
+impl S {
+ fn bar(&self, n: u32, size: u32) { n }
+}
+
+fn foo() {
+ let $0n = 1 + 1;
+ S.bar(n, 2)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_name_from_ufcs_method_param() {
+ check_assist(
+ extract_variable,
+ r#"
+struct S;
+impl S {
+ fn bar(&self, n: u32, size: u32) { n }
+}
+
+fn foo() {
+ S::bar(&S, $01 + 1$0, 2)
+}
+"#,
+ r#"
+struct S;
+impl S {
+ fn bar(&self, n: u32, size: u32) { n }
+}
+
+fn foo() {
+ let $0n = 1 + 1;
+ S::bar(&S, n, 2)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_parameter_name_has_precedence_over_function() {
+ check_assist(
+ extract_variable,
+ r#"
+fn bar(test: u32, size: u32)
+
+fn foo() {
+ bar(1, $0symbol_size(1, 2)$0);
+}
+"#,
+ r#"
+fn bar(test: u32, size: u32)
+
+fn foo() {
+ let $0size = symbol_size(1, 2);
+ bar(1, size);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_macro_call() {
+ check_assist(
+ extract_variable,
+ r"
+struct Vec;
+macro_rules! vec {
+ () => {Vec}
+}
+fn main() {
+ let _ = $0vec![]$0;
+}
+",
+ r"
+struct Vec;
+macro_rules! vec {
+ () => {Vec}
+}
+fn main() {
+ let $0vec = vec![];
+ let _ = vec;
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_for_return_not_applicable() {
+ check_assist_not_applicable(extract_variable, "fn foo() { $0return$0; } ");
+ }
+
+ #[test]
+ fn test_extract_var_for_break_not_applicable() {
+ check_assist_not_applicable(extract_variable, "fn main() { loop { $0break$0; }; }");
+ }
+
+ #[test]
+ fn test_extract_var_unit_expr_not_applicable() {
+ check_assist_not_applicable(
+ extract_variable,
+ r#"
+fn foo() {
+ let mut i = 3;
+ $0if i >= 0 {
+ i += 1;
+ } else {
+ i -= 1;
+ }$0
+}"#,
+ );
+ }
+
+ // FIXME: This is not quite correct, but good enough(tm) for the sorting heuristic
+ #[test]
+ fn extract_var_target() {
+ check_assist_target(extract_variable, "fn foo() -> u32 { $0return 2 + 2$0; }", "2 + 2");
+
+ check_assist_target(
+ extract_variable,
+ "
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => ($02 + 2$0, true)
+ _ => (0, false)
+ };
+}
+",
+ "2 + 2",
+ );
+ }
+
+ #[test]
+ fn extract_var_no_block_body() {
+ check_assist_not_applicable(
+ extract_variable,
+ r"
+const X: usize = $0100$0;
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_mutable_reference_parameter() {
+ check_assist(
+ extract_variable,
+ r#"
+struct S {
+ vec: Vec<u8>
+}
+
+fn foo(s: &mut S) {
+ $0s.vec$0.push(0);
+}"#,
+ r#"
+struct S {
+ vec: Vec<u8>
+}
+
+fn foo(s: &mut S) {
+ let $0vec = &mut s.vec;
+ vec.push(0);
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_mutable_reference_parameter_deep_nesting() {
+ check_assist(
+ extract_variable,
+ r#"
+struct Y {
+ field: X
+}
+struct X {
+ field: S
+}
+struct S {
+ vec: Vec<u8>
+}
+
+fn foo(f: &mut Y) {
+ $0f.field.field.vec$0.push(0);
+}"#,
+ r#"
+struct Y {
+ field: X
+}
+struct X {
+ field: S
+}
+struct S {
+ vec: Vec<u8>
+}
+
+fn foo(f: &mut Y) {
+ let $0vec = &mut f.field.field.vec;
+ vec.push(0);
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_reference_parameter() {
+ check_assist(
+ extract_variable,
+ r#"
+struct X;
+
+impl X {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: &S) {
+ $0s.sub$0.do_thing();
+}"#,
+ r#"
+struct X;
+
+impl X {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: &S) {
+ let $0x = &s.sub;
+ x.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_reference_parameter_deep_nesting() {
+ check_assist(
+ extract_variable,
+ r#"
+struct Z;
+impl Z {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct Y {
+ field: Z
+}
+
+struct X {
+ field: Y
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: &S) {
+ $0s.sub.field.field$0.do_thing();
+}"#,
+ r#"
+struct Z;
+impl Z {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct Y {
+ field: Z
+}
+
+struct X {
+ field: Y
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: &S) {
+ let $0z = &s.sub.field.field;
+ z.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_regular_parameter() {
+ check_assist(
+ extract_variable,
+ r#"
+struct X;
+
+impl X {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: S) {
+ $0s.sub$0.do_thing();
+}"#,
+ r#"
+struct X;
+
+impl X {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: S) {
+ let $0x = s.sub;
+ x.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_mutable_reference_local() {
+ check_assist(
+ extract_variable,
+ r#"
+struct X;
+
+struct S {
+ sub: X
+}
+
+impl S {
+ fn new() -> S {
+ S {
+ sub: X::new()
+ }
+ }
+}
+
+impl X {
+ fn new() -> X {
+ X { }
+ }
+ fn do_thing(&self) {
+
+ }
+}
+
+
+fn foo() {
+ let local = &mut S::new();
+ $0local.sub$0.do_thing();
+}"#,
+ r#"
+struct X;
+
+struct S {
+ sub: X
+}
+
+impl S {
+ fn new() -> S {
+ S {
+ sub: X::new()
+ }
+ }
+}
+
+impl X {
+ fn new() -> X {
+ X { }
+ }
+ fn do_thing(&self) {
+
+ }
+}
+
+
+fn foo() {
+ let local = &mut S::new();
+ let $0x = &mut local.sub;
+ x.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_reference_local() {
+ check_assist(
+ extract_variable,
+ r#"
+struct X;
+
+struct S {
+ sub: X
+}
+
+impl S {
+ fn new() -> S {
+ S {
+ sub: X::new()
+ }
+ }
+}
+
+impl X {
+ fn new() -> X {
+ X { }
+ }
+ fn do_thing(&self) {
+
+ }
+}
+
+
+fn foo() {
+ let local = &S::new();
+ $0local.sub$0.do_thing();
+}"#,
+ r#"
+struct X;
+
+struct S {
+ sub: X
+}
+
+impl S {
+ fn new() -> S {
+ S {
+ sub: X::new()
+ }
+ }
+}
+
+impl X {
+ fn new() -> X {
+ X { }
+ }
+ fn do_thing(&self) {
+
+ }
+}
+
+
+fn foo() {
+ let local = &S::new();
+ let $0x = &local.sub;
+ x.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_for_mutable_borrow() {
+ check_assist(
+ extract_variable,
+ r#"
+fn foo() {
+ let v = &mut $00$0;
+}"#,
+ r#"
+fn foo() {
+ let mut $0var_name = 0;
+ let v = &mut var_name;
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
new file mode 100644
index 000000000..b33846f54
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
@@ -0,0 +1,606 @@
+use hir::{db::HirDatabase, HasSource, HasVisibility, PathResolution};
+use ide_db::base_db::FileId;
+use syntax::{
+ ast::{self, HasVisibility as _},
+ AstNode, TextRange, TextSize,
+};
+
+use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
+
+// FIXME: this really should be a fix for diagnostic, rather than an assist.
+
+// Assist: fix_visibility
+//
+// Makes inaccessible item public.
+//
+// ```
+// mod m {
+// fn frobnicate() {}
+// }
+// fn main() {
+// m::frobnicate$0() {}
+// }
+// ```
+// ->
+// ```
+// mod m {
+// $0pub(crate) fn frobnicate() {}
+// }
+// fn main() {
+// m::frobnicate() {}
+// }
+// ```
+pub(crate) fn fix_visibility(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ add_vis_to_referenced_module_def(acc, ctx)
+ .or_else(|| add_vis_to_referenced_record_field(acc, ctx))
+}
+
+fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let path: ast::Path = ctx.find_node_at_offset()?;
+ let path_res = ctx.sema.resolve_path(&path)?;
+ let def = match path_res {
+ PathResolution::Def(def) => def,
+ _ => return None,
+ };
+
+ let current_module = ctx.sema.scope(path.syntax())?.module();
+ let target_module = def.module(ctx.db())?;
+
+ if def.visibility(ctx.db()).is_visible_from(ctx.db(), current_module.into()) {
+ return None;
+ };
+
+ let (offset, current_visibility, target, target_file, target_name) =
+ target_data_for_def(ctx.db(), def)?;
+
+ let missing_visibility =
+ if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
+
+ let assist_label = match target_name {
+ None => format!("Change visibility to {}", missing_visibility),
+ Some(name) => format!("Change visibility of {} to {}", name, missing_visibility),
+ };
+
+ acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
+ builder.edit_file(target_file);
+ match ctx.config.snippet_cap {
+ Some(cap) => match current_visibility {
+ Some(current_visibility) => builder.replace_snippet(
+ cap,
+ current_visibility.syntax().text_range(),
+ format!("$0{}", missing_visibility),
+ ),
+ None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)),
+ },
+ None => match current_visibility {
+ Some(current_visibility) => {
+ builder.replace(current_visibility.syntax().text_range(), missing_visibility)
+ }
+ None => builder.insert(offset, format!("{} ", missing_visibility)),
+ },
+ }
+ })
+}
+
+fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let record_field: ast::RecordExprField = ctx.find_node_at_offset()?;
+ let (record_field_def, _, _) = ctx.sema.resolve_record_field(&record_field)?;
+
+ let current_module = ctx.sema.scope(record_field.syntax())?.module();
+ let visibility = record_field_def.visibility(ctx.db());
+ if visibility.is_visible_from(ctx.db(), current_module.into()) {
+ return None;
+ }
+
+ let parent = record_field_def.parent_def(ctx.db());
+ let parent_name = parent.name(ctx.db());
+ let target_module = parent.module(ctx.db());
+
+ let in_file_source = record_field_def.source(ctx.db())?;
+ let (offset, current_visibility, target) = match in_file_source.value {
+ hir::FieldSource::Named(it) => {
+ let s = it.syntax();
+ (vis_offset(s), it.visibility(), s.text_range())
+ }
+ hir::FieldSource::Pos(it) => {
+ let s = it.syntax();
+ (vis_offset(s), it.visibility(), s.text_range())
+ }
+ };
+
+ let missing_visibility =
+ if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
+ let target_file = in_file_source.file_id.original_file(ctx.db());
+
+ let target_name = record_field_def.name(ctx.db());
+ let assist_label =
+ format!("Change visibility of {}.{} to {}", parent_name, target_name, missing_visibility);
+
+ acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
+ builder.edit_file(target_file);
+ match ctx.config.snippet_cap {
+ Some(cap) => match current_visibility {
+ Some(current_visibility) => builder.replace_snippet(
+ cap,
+ current_visibility.syntax().text_range(),
+ format!("$0{}", missing_visibility),
+ ),
+ None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)),
+ },
+ None => match current_visibility {
+ Some(current_visibility) => {
+ builder.replace(current_visibility.syntax().text_range(), missing_visibility)
+ }
+ None => builder.insert(offset, format!("{} ", missing_visibility)),
+ },
+ }
+ })
+}
+
+fn target_data_for_def(
+ db: &dyn HirDatabase,
+ def: hir::ModuleDef,
+) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId, Option<hir::Name>)> {
+ fn offset_target_and_file_id<S, Ast>(
+ db: &dyn HirDatabase,
+ x: S,
+ ) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId)>
+ where
+ S: HasSource<Ast = Ast>,
+ Ast: AstNode + ast::HasVisibility,
+ {
+ let source = x.source(db)?;
+ let in_file_syntax = source.syntax();
+ let file_id = in_file_syntax.file_id;
+ let syntax = in_file_syntax.value;
+ let current_visibility = source.value.visibility();
+ Some((
+ vis_offset(syntax),
+ current_visibility,
+ syntax.text_range(),
+ file_id.original_file(db.upcast()),
+ ))
+ }
+
+ let target_name;
+ let (offset, current_visibility, target, target_file) = match def {
+ hir::ModuleDef::Function(f) => {
+ target_name = Some(f.name(db));
+ offset_target_and_file_id(db, f)?
+ }
+ hir::ModuleDef::Adt(adt) => {
+ target_name = Some(adt.name(db));
+ match adt {
+ hir::Adt::Struct(s) => offset_target_and_file_id(db, s)?,
+ hir::Adt::Union(u) => offset_target_and_file_id(db, u)?,
+ hir::Adt::Enum(e) => offset_target_and_file_id(db, e)?,
+ }
+ }
+ hir::ModuleDef::Const(c) => {
+ target_name = c.name(db);
+ offset_target_and_file_id(db, c)?
+ }
+ hir::ModuleDef::Static(s) => {
+ target_name = Some(s.name(db));
+ offset_target_and_file_id(db, s)?
+ }
+ hir::ModuleDef::Trait(t) => {
+ target_name = Some(t.name(db));
+ offset_target_and_file_id(db, t)?
+ }
+ hir::ModuleDef::TypeAlias(t) => {
+ target_name = Some(t.name(db));
+ offset_target_and_file_id(db, t)?
+ }
+ hir::ModuleDef::Module(m) => {
+ target_name = m.name(db);
+ let in_file_source = m.declaration_source(db)?;
+ let file_id = in_file_source.file_id.original_file(db.upcast());
+ let syntax = in_file_source.value.syntax();
+ (vis_offset(syntax), in_file_source.value.visibility(), syntax.text_range(), file_id)
+ }
+ // FIXME
+ hir::ModuleDef::Macro(_) => return None,
+ // Enum variants can't be private, we can't modify builtin types
+ hir::ModuleDef::Variant(_) | hir::ModuleDef::BuiltinType(_) => return None,
+ };
+
+ Some((offset, current_visibility, target, target_file, target_name))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn fix_visibility_of_fn() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { fn foo() {} }
+ fn main() { foo::foo$0() } ",
+ r"mod foo { $0pub(crate) fn foo() {} }
+ fn main() { foo::foo() } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub fn foo() {} }
+ fn main() { foo::foo$0() } ",
+ )
+ }
+
+ #[test]
+ fn fix_visibility_of_adt_in_submodule() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { struct Foo; }
+ fn main() { foo::Foo$0 } ",
+ r"mod foo { $0pub(crate) struct Foo; }
+ fn main() { foo::Foo } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub struct Foo; }
+ fn main() { foo::Foo$0 } ",
+ );
+ check_assist(
+ fix_visibility,
+ r"mod foo { enum Foo; }
+ fn main() { foo::Foo$0 } ",
+ r"mod foo { $0pub(crate) enum Foo; }
+ fn main() { foo::Foo } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub enum Foo; }
+ fn main() { foo::Foo$0 } ",
+ );
+ check_assist(
+ fix_visibility,
+ r"mod foo { union Foo; }
+ fn main() { foo::Foo$0 } ",
+ r"mod foo { $0pub(crate) union Foo; }
+ fn main() { foo::Foo } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub union Foo; }
+ fn main() { foo::Foo$0 } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_adt_in_other_file() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs
+mod foo;
+fn main() { foo::Foo$0 }
+
+//- /foo.rs
+struct Foo;
+",
+ r"$0pub(crate) struct Foo;
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_struct_field() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { pub struct Foo { bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ r"mod foo { pub struct Foo { $0pub(crate) bar: (), } }
+ fn main() { foo::Foo { bar: () }; } ",
+ );
+ check_assist(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub struct Foo { bar: () }
+",
+ r"pub struct Foo { $0pub(crate) bar: () }
+",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub struct Foo { pub bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub struct Foo { pub bar: () }
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_enum_variant_field() {
+ // Enum variants, as well as their fields, always get the enum's visibility. In fact, rustc
+ // rejects any visibility specifiers on them, so this assist should never fire on them.
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub enum Foo { Bar { bar: () } } }
+ fn main() { foo::Foo::Bar { $0bar: () }; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo::Bar { $0bar: () }; }
+//- /foo.rs
+pub enum Foo { Bar { bar: () } }
+",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub struct Foo { pub bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub struct Foo { pub bar: () }
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_union_field() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { pub union Foo { bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ r"mod foo { pub union Foo { $0pub(crate) bar: (), } }
+ fn main() { foo::Foo { bar: () }; } ",
+ );
+ check_assist(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub union Foo { bar: () }
+",
+ r"pub union Foo { $0pub(crate) bar: () }
+",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub union Foo { pub bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub union Foo { pub bar: () }
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_const() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { const FOO: () = (); }
+ fn main() { foo::FOO$0 } ",
+ r"mod foo { $0pub(crate) const FOO: () = (); }
+ fn main() { foo::FOO } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub const FOO: () = (); }
+ fn main() { foo::FOO$0 } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_static() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { static FOO: () = (); }
+ fn main() { foo::FOO$0 } ",
+ r"mod foo { $0pub(crate) static FOO: () = (); }
+ fn main() { foo::FOO } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub static FOO: () = (); }
+ fn main() { foo::FOO$0 } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_trait() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { trait Foo { fn foo(&self) {} } }
+ fn main() { let x: &dyn foo::$0Foo; } ",
+ r"mod foo { $0pub(crate) trait Foo { fn foo(&self) {} } }
+ fn main() { let x: &dyn foo::Foo; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub trait Foo { fn foo(&self) {} } }
+ fn main() { let x: &dyn foo::Foo$0; } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_type_alias() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { type Foo = (); }
+ fn main() { let x: foo::Foo$0; } ",
+ r"mod foo { $0pub(crate) type Foo = (); }
+ fn main() { let x: foo::Foo; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub type Foo = (); }
+ fn main() { let x: foo::Foo$0; } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_module() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { mod bar { fn bar() {} } }
+ fn main() { foo::bar$0::bar(); } ",
+ r"mod foo { $0pub(crate) mod bar { fn bar() {} } }
+ fn main() { foo::bar::bar(); } ",
+ );
+
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs
+mod foo;
+fn main() { foo::bar$0::baz(); }
+
+//- /foo.rs
+mod bar {
+ pub fn baz() {}
+}
+",
+ r"$0pub(crate) mod bar {
+ pub fn baz() {}
+}
+",
+ );
+
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub mod bar { pub fn bar() {} } }
+ fn main() { foo::bar$0::bar(); } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_inline_module_in_other_file() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs
+mod foo;
+fn main() { foo::bar$0::baz(); }
+
+//- /foo.rs
+mod bar;
+//- /foo/bar.rs
+pub fn baz() {}
+",
+ r"$0pub(crate) mod bar;
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_module_declaration_in_other_file() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs
+mod foo;
+fn main() { foo::bar$0>::baz(); }
+
+//- /foo.rs
+mod bar {
+ pub fn baz() {}
+}
+",
+ r"$0pub(crate) mod bar {
+ pub fn baz() {}
+}
+",
+ );
+ }
+
+ #[test]
+ fn adds_pub_when_target_is_in_another_crate() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs crate:a deps:foo
+foo::Bar$0
+//- /lib.rs crate:foo
+struct Bar;
+",
+ r"$0pub struct Bar;
+",
+ )
+ }
+
+ #[test]
+ fn replaces_pub_crate_with_pub() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs crate:a deps:foo
+foo::Bar$0
+//- /lib.rs crate:foo
+pub(crate) struct Bar;
+",
+ r"$0pub struct Bar;
+",
+ );
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs crate:a deps:foo
+fn main() {
+ foo::Foo { $0bar: () };
+}
+//- /lib.rs crate:foo
+pub struct Foo { pub(crate) bar: () }
+",
+ r"pub struct Foo { $0pub bar: () }
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_reexport() {
+ // FIXME: broken test, this should fix visibility of the re-export
+ // rather than the struct.
+ check_assist(
+ fix_visibility,
+ r#"
+mod foo {
+ use bar::Baz;
+ mod bar { pub(super) struct Baz; }
+}
+foo::Baz$0
+"#,
+ r#"
+mod foo {
+ use bar::Baz;
+ mod bar { $0pub(crate) struct Baz; }
+}
+foo::Baz
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
new file mode 100644
index 000000000..2ea6f58fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
@@ -0,0 +1,139 @@
+use syntax::ast::{self, AstNode, BinExpr};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: flip_binexpr
+//
+// Flips operands of a binary expression.
+//
+// ```
+// fn main() {
+// let _ = 90 +$0 2;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let _ = 2 + 90;
+// }
+// ```
+pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let expr = ctx.find_node_at_offset::<BinExpr>()?;
+ let lhs = expr.lhs()?.syntax().clone();
+ let rhs = expr.rhs()?.syntax().clone();
+ let op_range = expr.op_token()?.text_range();
+ // The assist should be applied only if the cursor is on the operator
+ let cursor_in_range = op_range.contains_range(ctx.selection_trimmed());
+ if !cursor_in_range {
+ return None;
+ }
+ let action: FlipAction = expr.op_kind()?.into();
+ // The assist should not be applied for certain operators
+ if let FlipAction::DontFlip = action {
+ return None;
+ }
+
+ acc.add(
+ AssistId("flip_binexpr", AssistKind::RefactorRewrite),
+ "Flip binary expression",
+ op_range,
+ |edit| {
+ if let FlipAction::FlipAndReplaceOp(new_op) = action {
+ edit.replace(op_range, new_op);
+ }
+ edit.replace(lhs.text_range(), rhs.text());
+ edit.replace(rhs.text_range(), lhs.text());
+ },
+ )
+}
+
+enum FlipAction {
+ // Flip the expression
+ Flip,
+ // Flip the expression and replace the operator with this string
+ FlipAndReplaceOp(&'static str),
+ // Do not flip the expression
+ DontFlip,
+}
+
+impl From<ast::BinaryOp> for FlipAction {
+ fn from(op_kind: ast::BinaryOp) -> Self {
+ match op_kind {
+ ast::BinaryOp::Assignment { .. } => FlipAction::DontFlip,
+ ast::BinaryOp::CmpOp(ast::CmpOp::Ord { ordering, strict }) => {
+ let rev_op = match (ordering, strict) {
+ (ast::Ordering::Less, true) => ">",
+ (ast::Ordering::Less, false) => ">=",
+ (ast::Ordering::Greater, true) => "<",
+ (ast::Ordering::Greater, false) => "<=",
+ };
+ FlipAction::FlipAndReplaceOp(rev_op)
+ }
+ _ => FlipAction::Flip,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn flip_binexpr_target_is_the_op() {
+ check_assist_target(flip_binexpr, "fn f() { let res = 1 ==$0 2; }", "==")
+ }
+
+ #[test]
+ fn flip_binexpr_not_applicable_for_assignment() {
+ check_assist_not_applicable(flip_binexpr, "fn f() { let mut _x = 1; _x +=$0 2 }")
+ }
+
+ #[test]
+ fn flip_binexpr_works_for_eq() {
+ check_assist(flip_binexpr, "fn f() { let res = 1 ==$0 2; }", "fn f() { let res = 2 == 1; }")
+ }
+
+ #[test]
+ fn flip_binexpr_works_for_gt() {
+ check_assist(flip_binexpr, "fn f() { let res = 1 >$0 2; }", "fn f() { let res = 2 < 1; }")
+ }
+
+ #[test]
+ fn flip_binexpr_works_for_lteq() {
+ check_assist(flip_binexpr, "fn f() { let res = 1 <=$0 2; }", "fn f() { let res = 2 >= 1; }")
+ }
+
+ #[test]
+ fn flip_binexpr_works_for_complex_expr() {
+ check_assist(
+ flip_binexpr,
+ "fn f() { let res = (1 + 1) ==$0 (2 + 2); }",
+ "fn f() { let res = (2 + 2) == (1 + 1); }",
+ )
+ }
+
+ #[test]
+ fn flip_binexpr_works_inside_match() {
+ check_assist(
+ flip_binexpr,
+ r#"
+ fn dyn_eq(&self, other: &dyn Diagnostic) -> bool {
+ match other.downcast_ref::<Self>() {
+ None => false,
+ Some(it) => it ==$0 self,
+ }
+ }
+ "#,
+ r#"
+ fn dyn_eq(&self, other: &dyn Diagnostic) -> bool {
+ match other.downcast_ref::<Self>() {
+ None => false,
+ Some(it) => self == it,
+ }
+ }
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs
new file mode 100644
index 000000000..f40f2713a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs
@@ -0,0 +1,92 @@
+use syntax::{algo::non_trivia_sibling, Direction, SyntaxKind, T};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: flip_comma
+//
+// Flips two comma-separated items.
+//
+// ```
+// fn main() {
+// ((1, 2),$0 (3, 4));
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// ((3, 4), (1, 2));
+// }
+// ```
+pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let comma = ctx.find_token_syntax_at_offset(T![,])?;
+ let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?;
+ let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?;
+
+ // Don't apply a "flip" in case of a last comma
+ // that typically comes before punctuation
+ if next.kind().is_punct() {
+ return None;
+ }
+
+ // Don't apply a "flip" inside the macro call
+ // since macro input are just mere tokens
+ if comma.parent_ancestors().any(|it| it.kind() == SyntaxKind::MACRO_CALL) {
+ return None;
+ }
+
+ acc.add(
+ AssistId("flip_comma", AssistKind::RefactorRewrite),
+ "Flip comma",
+ comma.text_range(),
+ |edit| {
+ edit.replace(prev.text_range(), next.to_string());
+ edit.replace(next.text_range(), prev.to_string());
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn flip_comma_works_for_function_parameters() {
+ check_assist(
+ flip_comma,
+ r#"fn foo(x: i32,$0 y: Result<(), ()>) {}"#,
+ r#"fn foo(y: Result<(), ()>, x: i32) {}"#,
+ )
+ }
+
+ #[test]
+ fn flip_comma_target() {
+ check_assist_target(flip_comma, r#"fn foo(x: i32,$0 y: Result<(), ()>) {}"#, ",")
+ }
+
+ #[test]
+ fn flip_comma_before_punct() {
+ // See https://github.com/rust-lang/rust-analyzer/issues/1619
+ // "Flip comma" assist shouldn't be applicable to the last comma in enum or struct
+ // declaration body.
+ check_assist_not_applicable(flip_comma, "pub enum Test { A,$0 }");
+ check_assist_not_applicable(flip_comma, "pub struct Test { foo: usize,$0 }");
+ }
+
+ #[test]
+ fn flip_comma_works() {
+ check_assist(
+ flip_comma,
+ r#"fn main() {((1, 2),$0 (3, 4));}"#,
+ r#"fn main() {((3, 4), (1, 2));}"#,
+ )
+ }
+
+ #[test]
+ fn flip_comma_not_applicable_for_macro_input() {
+ // "Flip comma" assist shouldn't be applicable inside the macro call
+ // See https://github.com/rust-lang/rust-analyzer/issues/7693
+ check_assist_not_applicable(flip_comma, r#"bar!(a,$0 b)"#);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs
new file mode 100644
index 000000000..e3ae4970b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs
@@ -0,0 +1,121 @@
+use syntax::{
+ algo::non_trivia_sibling,
+ ast::{self, AstNode},
+ Direction, T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: flip_trait_bound
+//
+// Flips two trait bounds.
+//
+// ```
+// fn foo<T: Clone +$0 Copy>() { }
+// ```
+// ->
+// ```
+// fn foo<T: Copy + Clone>() { }
+// ```
+pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // We want to replicate the behavior of `flip_binexpr` by only suggesting
+ // the assist when the cursor is on a `+`
+ let plus = ctx.find_token_syntax_at_offset(T![+])?;
+
+ // Make sure we're in a `TypeBoundList`
+ if ast::TypeBoundList::cast(plus.parent()?).is_none() {
+ return None;
+ }
+
+ let (before, after) = (
+ non_trivia_sibling(plus.clone().into(), Direction::Prev)?,
+ non_trivia_sibling(plus.clone().into(), Direction::Next)?,
+ );
+
+ let target = plus.text_range();
+ acc.add(
+ AssistId("flip_trait_bound", AssistKind::RefactorRewrite),
+ "Flip trait bounds",
+ target,
+ |edit| {
+ edit.replace(before.text_range(), after.to_string());
+ edit.replace(after.text_range(), before.to_string());
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn flip_trait_bound_assist_available() {
+ check_assist_target(flip_trait_bound, "struct S<T> where T: A $0+ B + C { }", "+")
+ }
+
+ #[test]
+ fn flip_trait_bound_not_applicable_for_single_trait_bound() {
+ check_assist_not_applicable(flip_trait_bound, "struct S<T> where T: $0A { }")
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_struct() {
+ check_assist(
+ flip_trait_bound,
+ "struct S<T> where T: A $0+ B { }",
+ "struct S<T> where T: B + A { }",
+ )
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_trait_impl() {
+ check_assist(
+ flip_trait_bound,
+ "impl X for S<T> where T: A +$0 B { }",
+ "impl X for S<T> where T: B + A { }",
+ )
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_fn() {
+ check_assist(flip_trait_bound, "fn f<T: A $0+ B>(t: T) { }", "fn f<T: B + A>(t: T) { }")
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_fn_where_clause() {
+ check_assist(
+ flip_trait_bound,
+ "fn f<T>(t: T) where T: A +$0 B { }",
+ "fn f<T>(t: T) where T: B + A { }",
+ )
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_lifetime() {
+ check_assist(
+ flip_trait_bound,
+ "fn f<T>(t: T) where T: A $0+ 'static { }",
+ "fn f<T>(t: T) where T: 'static + A { }",
+ )
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_complex_bounds() {
+ check_assist(
+ flip_trait_bound,
+ "struct S<T> where T: A<T> $0+ b_mod::B<T> + C<T> { }",
+ "struct S<T> where T: b_mod::B<T> + A<T> + C<T> { }",
+ )
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_long_bounds() {
+ check_assist(
+ flip_trait_bound,
+ "struct S<T> where T: A + B + C + D + E + F +$0 G + H + I + J { }",
+ "struct S<T> where T: A + B + C + D + E + G + F + H + I + J { }",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
new file mode 100644
index 000000000..eaa6de73e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
@@ -0,0 +1,255 @@
+use crate::assist_context::{AssistContext, Assists};
+use hir::{HasVisibility, HirDisplay, Module};
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ base_db::{FileId, Upcast},
+ defs::{Definition, NameRefClass},
+};
+use syntax::{
+ ast::{self, edit::IndentLevel, NameRef},
+ AstNode, Direction, SyntaxKind, TextSize,
+};
+
+// Assist: generate_constant
+//
+// Generate a named constant.
+//
+// ```
+// struct S { i: usize }
+// impl S { pub fn new(n: usize) {} }
+// fn main() {
+// let v = S::new(CAPA$0CITY);
+// }
+// ```
+// ->
+// ```
+// struct S { i: usize }
+// impl S { pub fn new(n: usize) {} }
+// fn main() {
+// const CAPACITY: usize = $0;
+// let v = S::new(CAPACITY);
+// }
+// ```
+
+pub(crate) fn generate_constant(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let constant_token = ctx.find_node_at_offset::<ast::NameRef>()?;
+ if constant_token.to_string().chars().any(|it| !(it.is_uppercase() || it == '_')) {
+ cov_mark::hit!(not_constant_name);
+ return None;
+ }
+ if NameRefClass::classify(&ctx.sema, &constant_token).is_some() {
+ cov_mark::hit!(already_defined);
+ return None;
+ }
+ let expr = constant_token.syntax().ancestors().find_map(ast::Expr::cast)?;
+ let statement = expr.syntax().ancestors().find_map(ast::Stmt::cast)?;
+ let ty = ctx.sema.type_of_expr(&expr)?;
+ let scope = ctx.sema.scope(statement.syntax())?;
+ let constant_module = scope.module();
+ let type_name = ty.original().display_source_code(ctx.db(), constant_module.into()).ok()?;
+ let target = statement.syntax().parent()?.text_range();
+ let path = constant_token.syntax().ancestors().find_map(ast::Path::cast)?;
+
+ let name_refs = path.segments().map(|s| s.name_ref());
+ let mut outer_exists = false;
+ let mut not_exist_name_ref = Vec::new();
+ let mut current_module = constant_module;
+ for name_ref in name_refs {
+ let name_ref_value = name_ref?;
+ let name_ref_class = NameRefClass::classify(&ctx.sema, &name_ref_value);
+ match name_ref_class {
+ Some(NameRefClass::Definition(Definition::Module(m))) => {
+ if !m.visibility(ctx.sema.db).is_visible_from(ctx.sema.db, constant_module.into()) {
+ return None;
+ }
+ outer_exists = true;
+ current_module = m;
+ }
+ Some(_) => {
+ return None;
+ }
+ None => {
+ not_exist_name_ref.push(name_ref_value);
+ }
+ }
+ }
+ let (offset, indent, file_id, post_string) =
+ target_data_for_generate_constant(ctx, current_module, constant_module).unwrap_or_else(
+ || {
+ let indent = IndentLevel::from_node(statement.syntax());
+ (statement.syntax().text_range().start(), indent, None, format!("\n{}", indent))
+ },
+ );
+
+ let text = get_text_for_generate_constant(not_exist_name_ref, indent, outer_exists, type_name)?;
+ acc.add(
+ AssistId("generate_constant", AssistKind::QuickFix),
+ "Generate constant",
+ target,
+ |builder| {
+ if let Some(file_id) = file_id {
+ builder.edit_file(file_id);
+ }
+ builder.insert(offset, format!("{}{}", text, post_string));
+ },
+ )
+}
+
+fn get_text_for_generate_constant(
+ mut not_exist_name_ref: Vec<NameRef>,
+ indent: IndentLevel,
+ outer_exists: bool,
+ type_name: String,
+) -> Option<String> {
+ let constant_token = not_exist_name_ref.pop()?;
+ let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " };
+ let mut text = format!("{}const {}: {} = $0;", vis, constant_token, type_name);
+ while let Some(name_ref) = not_exist_name_ref.pop() {
+ let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " };
+ text = text.replace("\n", "\n ");
+ text = format!("{}mod {} {{{}\n}}", vis, name_ref.to_string(), text);
+ }
+ Some(text.replace("\n", &format!("\n{}", indent)))
+}
+
+fn target_data_for_generate_constant(
+ ctx: &AssistContext<'_>,
+ current_module: Module,
+ constant_module: Module,
+) -> Option<(TextSize, IndentLevel, Option<FileId>, String)> {
+ if current_module == constant_module {
+ // insert in current file
+ return None;
+ }
+ let in_file_source = current_module.definition_source(ctx.sema.db);
+ let file_id = in_file_source.file_id.original_file(ctx.sema.db.upcast());
+ match in_file_source.value {
+ hir::ModuleSource::Module(module_node) => {
+ let indent = IndentLevel::from_node(module_node.syntax());
+ let l_curly_token = module_node.item_list()?.l_curly_token()?;
+ let offset = l_curly_token.text_range().end();
+
+ let siblings_has_newline = l_curly_token
+ .siblings_with_tokens(Direction::Next)
+ .find(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains("\n"))
+ .is_some();
+ let post_string =
+ if siblings_has_newline { format!("{}", indent) } else { format!("\n{}", indent) };
+ Some((offset, indent + 1, Some(file_id), post_string))
+ }
+ _ => Some((TextSize::from(0), 0.into(), Some(file_id), "\n".into())),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn test_trivial() {
+ check_assist(
+ generate_constant,
+ r#"struct S { i: usize }
+impl S {
+ pub fn new(n: usize) {}
+}
+fn main() {
+ let v = S::new(CAPA$0CITY);
+}"#,
+ r#"struct S { i: usize }
+impl S {
+ pub fn new(n: usize) {}
+}
+fn main() {
+ const CAPACITY: usize = $0;
+ let v = S::new(CAPACITY);
+}"#,
+ );
+ }
+ #[test]
+ fn test_wont_apply_when_defined() {
+ cov_mark::check!(already_defined);
+ check_assist_not_applicable(
+ generate_constant,
+ r#"struct S { i: usize }
+impl S {
+ pub fn new(n: usize) {}
+}
+fn main() {
+ const CAPACITY: usize = 10;
+ let v = S::new(CAPAC$0ITY);
+}"#,
+ );
+ }
+ #[test]
+ fn test_wont_apply_when_maybe_not_constant() {
+ cov_mark::check!(not_constant_name);
+ check_assist_not_applicable(
+ generate_constant,
+ r#"struct S { i: usize }
+impl S {
+ pub fn new(n: usize) {}
+}
+fn main() {
+ let v = S::new(capa$0city);
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_constant_with_path() {
+ check_assist(
+ generate_constant,
+ r#"mod foo {}
+fn bar() -> i32 {
+ foo::A_CON$0STANT
+}"#,
+ r#"mod foo {
+ pub const A_CONSTANT: i32 = $0;
+}
+fn bar() -> i32 {
+ foo::A_CONSTANT
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_constant_with_longer_path() {
+ check_assist(
+ generate_constant,
+ r#"mod foo {
+ pub mod goo {}
+}
+fn bar() -> i32 {
+ foo::goo::A_CON$0STANT
+}"#,
+ r#"mod foo {
+ pub mod goo {
+ pub const A_CONSTANT: i32 = $0;
+ }
+}
+fn bar() -> i32 {
+ foo::goo::A_CONSTANT
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_constant_with_not_exist_longer_path() {
+ check_assist(
+ generate_constant,
+ r#"fn bar() -> i32 {
+ foo::goo::A_CON$0STANT
+}"#,
+ r#"mod foo {
+ pub mod goo {
+ pub const A_CONSTANT: i32 = $0;
+ }
+}
+fn bar() -> i32 {
+ foo::goo::A_CONSTANT
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs
new file mode 100644
index 000000000..5e9995a98
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs
@@ -0,0 +1,179 @@
+use ide_db::{famous_defs::FamousDefs, RootDatabase};
+use syntax::ast::{self, AstNode, HasName};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: generate_default_from_enum_variant
+//
+// Adds a Default impl for an enum using a variant.
+//
+// ```
+// enum Version {
+// Undefined,
+// Minor$0,
+// Major,
+// }
+// ```
+// ->
+// ```
+// enum Version {
+// Undefined,
+// Minor,
+// Major,
+// }
+//
+// impl Default for Version {
+// fn default() -> Self {
+// Self::Minor
+// }
+// }
+// ```
+pub(crate) fn generate_default_from_enum_variant(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let variant = ctx.find_node_at_offset::<ast::Variant>()?;
+ let variant_name = variant.name()?;
+ let enum_name = variant.parent_enum().name()?;
+ if !matches!(variant.kind(), ast::StructKind::Unit) {
+ cov_mark::hit!(test_gen_default_on_non_unit_variant_not_implemented);
+ return None;
+ }
+
+ if existing_default_impl(&ctx.sema, &variant).is_some() {
+ cov_mark::hit!(test_gen_default_impl_already_exists);
+ return None;
+ }
+
+ let target = variant.syntax().text_range();
+ acc.add(
+ AssistId("generate_default_from_enum_variant", AssistKind::Generate),
+ "Generate `Default` impl from this enum variant",
+ target,
+ |edit| {
+ let start_offset = variant.parent_enum().syntax().text_range().end();
+ let buf = format!(
+ r#"
+
+impl Default for {0} {{
+ fn default() -> Self {{
+ Self::{1}
+ }}
+}}"#,
+ enum_name, variant_name
+ );
+ edit.insert(start_offset, buf);
+ },
+ )
+}
+
+fn existing_default_impl(
+ sema: &'_ hir::Semantics<'_, RootDatabase>,
+ variant: &ast::Variant,
+) -> Option<()> {
+ let variant = sema.to_def(variant)?;
+ let enum_ = variant.parent_enum(sema.db);
+ let krate = enum_.module(sema.db).krate();
+
+ let default_trait = FamousDefs(sema, krate).core_default_Default()?;
+ let enum_type = enum_.ty(sema.db);
+
+ if enum_type.impls_trait(sema.db, default_trait, &[]) {
+ Some(())
+ } else {
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_default_from_variant() {
+ check_assist(
+ generate_default_from_enum_variant,
+ r#"
+//- minicore: default
+enum Variant {
+ Undefined,
+ Minor$0,
+ Major,
+}
+"#,
+ r#"
+enum Variant {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Default for Variant {
+ fn default() -> Self {
+ Self::Minor
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_default_already_implemented() {
+ cov_mark::check!(test_gen_default_impl_already_exists);
+ check_assist_not_applicable(
+ generate_default_from_enum_variant,
+ r#"
+//- minicore: default
+enum Variant {
+ Undefined,
+ Minor$0,
+ Major,
+}
+
+impl Default for Variant {
+ fn default() -> Self {
+ Self::Minor
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_no_element() {
+ cov_mark::check!(test_gen_default_on_non_unit_variant_not_implemented);
+ check_assist_not_applicable(
+ generate_default_from_enum_variant,
+ r#"
+//- minicore: default
+enum Variant {
+ Undefined,
+ Minor(u32)$0,
+ Major,
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_default_from_variant_with_one_variant() {
+ check_assist(
+ generate_default_from_enum_variant,
+ r#"
+//- minicore: default
+enum Variant { Undefi$0ned }
+"#,
+ r#"
+enum Variant { Undefined }
+
+impl Default for Variant {
+ fn default() -> Self {
+ Self::Undefined
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
new file mode 100644
index 000000000..cbd33de19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
@@ -0,0 +1,657 @@
+use ide_db::famous_defs::FamousDefs;
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::{
+ ast::{self, HasGenericParams, HasName, HasTypeBounds, Impl},
+ AstNode,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId,
+};
+
+// Assist: generate_default_from_new
+//
+// Generates default implementation from new method.
+//
+// ```
+// struct Example { _inner: () }
+//
+// impl Example {
+// pub fn n$0ew() -> Self {
+// Self { _inner: () }
+// }
+// }
+// ```
+// ->
+// ```
+// struct Example { _inner: () }
+//
+// impl Example {
+// pub fn new() -> Self {
+// Self { _inner: () }
+// }
+// }
+//
+// impl Default for Example {
+// fn default() -> Self {
+// Self::new()
+// }
+// }
+// ```
+pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let fn_node = ctx.find_node_at_offset::<ast::Fn>()?;
+ let fn_name = fn_node.name()?;
+
+ if fn_name.text() != "new" {
+ cov_mark::hit!(other_function_than_new);
+ return None;
+ }
+
+ if fn_node.param_list()?.params().next().is_some() {
+ cov_mark::hit!(new_function_with_parameters);
+ return None;
+ }
+
+ let impl_ = fn_node.syntax().ancestors().into_iter().find_map(ast::Impl::cast)?;
+ if is_default_implemented(ctx, &impl_) {
+ cov_mark::hit!(default_block_is_already_present);
+ cov_mark::hit!(struct_in_module_with_default);
+ return None;
+ }
+
+ let insert_location = impl_.syntax().text_range();
+
+ acc.add(
+ AssistId("generate_default_from_new", crate::AssistKind::Generate),
+ "Generate a Default impl from a new fn",
+ insert_location,
+ move |builder| {
+ let default_code = " fn default() -> Self {
+ Self::new()
+ }";
+ let code = generate_trait_impl_text_from_impl(&impl_, "Default", default_code);
+ builder.insert(insert_location.end(), code);
+ },
+ )
+}
+
+fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code: &str) -> String {
+ let generic_params = impl_.generic_param_list();
+ let mut buf = String::with_capacity(code.len());
+ buf.push_str("\n\n");
+ buf.push_str("impl");
+
+ if let Some(generic_params) = &generic_params {
+ let lifetimes = generic_params.lifetime_params().map(|lt| format!("{}", lt.syntax()));
+ let toc_params = generic_params.type_or_const_params().map(|toc_param| match toc_param {
+ ast::TypeOrConstParam::Type(type_param) => {
+ let mut buf = String::new();
+ if let Some(it) = type_param.name() {
+ format_to!(buf, "{}", it.syntax());
+ }
+ if let Some(it) = type_param.colon_token() {
+ format_to!(buf, "{} ", it);
+ }
+ if let Some(it) = type_param.type_bound_list() {
+ format_to!(buf, "{}", it.syntax());
+ }
+ buf
+ }
+ ast::TypeOrConstParam::Const(const_param) => const_param.syntax().to_string(),
+ });
+ let generics = lifetimes.chain(toc_params).format(", ");
+ format_to!(buf, "<{}>", generics);
+ }
+
+ buf.push(' ');
+ buf.push_str(trait_text);
+ buf.push_str(" for ");
+ buf.push_str(&impl_.self_ty().unwrap().syntax().text().to_string());
+
+ match impl_.where_clause() {
+ Some(where_clause) => {
+ format_to!(buf, "\n{}\n{{\n{}\n}}", where_clause, code);
+ }
+ None => {
+ format_to!(buf, " {{\n{}\n}}", code);
+ }
+ }
+
+ buf
+}
+
+fn is_default_implemented(ctx: &AssistContext<'_>, impl_: &Impl) -> bool {
+ let db = ctx.sema.db;
+ let impl_ = ctx.sema.to_def(impl_);
+ let impl_def = match impl_ {
+ Some(value) => value,
+ None => return false,
+ };
+
+ let ty = impl_def.self_ty(db);
+ let krate = impl_def.module(db).krate();
+ let default = FamousDefs(&ctx.sema, krate).core_default_Default();
+ let default_trait = match default {
+ Some(value) => value,
+ None => return false,
+ };
+
+ ty.impls_trait(db, default_trait, &[])
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn generate_default() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+struct Example { _inner: () }
+
+impl Example {
+ pub fn ne$0w() -> Self {
+ Self { _inner: () }
+ }
+}
+
+fn main() {}
+"#,
+ r#"
+struct Example { _inner: () }
+
+impl Example {
+ pub fn new() -> Self {
+ Self { _inner: () }
+ }
+}
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+fn main() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn generate_default2() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+struct Test { value: u32 }
+
+impl Test {
+ pub fn ne$0w() -> Self {
+ Self { value: 0 }
+ }
+}
+"#,
+ r#"
+struct Test { value: u32 }
+
+impl Test {
+ pub fn new() -> Self {
+ Self { value: 0 }
+ }
+}
+
+impl Default for Test {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_generic() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+pub struct Foo<T> {
+ _bar: *mut T,
+}
+
+impl<T> Foo<T> {
+ pub fn ne$0w() -> Self {
+ unimplemented!()
+ }
+}
+"#,
+ r#"
+pub struct Foo<T> {
+ _bar: *mut T,
+}
+
+impl<T> Foo<T> {
+ pub fn new() -> Self {
+ unimplemented!()
+ }
+}
+
+impl<T> Default for Foo<T> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_generics() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+pub struct Foo<T, B> {
+ _tars: *mut T,
+ _bar: *mut B,
+}
+
+impl<T, B> Foo<T, B> {
+ pub fn ne$0w() -> Self {
+ unimplemented!()
+ }
+}
+"#,
+ r#"
+pub struct Foo<T, B> {
+ _tars: *mut T,
+ _bar: *mut B,
+}
+
+impl<T, B> Foo<T, B> {
+ pub fn new() -> Self {
+ unimplemented!()
+ }
+}
+
+impl<T, B> Default for Foo<T, B> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_generic_and_bound() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+pub struct Foo<T> {
+ t: T,
+}
+
+impl<T: From<i32>> Foo<T> {
+ pub fn ne$0w() -> Self {
+ Foo { t: 0.into() }
+ }
+}
+"#,
+ r#"
+pub struct Foo<T> {
+ t: T,
+}
+
+impl<T: From<i32>> Foo<T> {
+ pub fn new() -> Self {
+ Foo { t: 0.into() }
+ }
+}
+
+impl<T: From<i32>> Default for Foo<T> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_generics_and_bounds() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+pub struct Foo<T, B> {
+ _tars: T,
+ _bar: B,
+}
+
+impl<T: From<i32>, B: From<i64>> Foo<T, B> {
+ pub fn ne$0w() -> Self {
+ unimplemented!()
+ }
+}
+"#,
+ r#"
+pub struct Foo<T, B> {
+ _tars: T,
+ _bar: B,
+}
+
+impl<T: From<i32>, B: From<i64>> Foo<T, B> {
+ pub fn new() -> Self {
+ unimplemented!()
+ }
+}
+
+impl<T: From<i32>, B: From<i64>> Default for Foo<T, B> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_generic_and_where() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+pub struct Foo<T> {
+ t: T,
+}
+
+impl<T: From<i32>> Foo<T>
+where
+ Option<T>: Debug
+{
+ pub fn ne$0w() -> Self {
+ Foo { t: 0.into() }
+ }
+}
+"#,
+ r#"
+pub struct Foo<T> {
+ t: T,
+}
+
+impl<T: From<i32>> Foo<T>
+where
+ Option<T>: Debug
+{
+ pub fn new() -> Self {
+ Foo { t: 0.into() }
+ }
+}
+
+impl<T: From<i32>> Default for Foo<T>
+where
+ Option<T>: Debug
+{
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_generics_and_wheres() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+pub struct Foo<T, B> {
+ _tars: T,
+ _bar: B,
+}
+
+impl<T: From<i32>, B: From<i64>> Foo<T, B>
+where
+ Option<T>: Debug, Option<B>: Debug,
+{
+ pub fn ne$0w() -> Self {
+ unimplemented!()
+ }
+}
+"#,
+ r#"
+pub struct Foo<T, B> {
+ _tars: T,
+ _bar: B,
+}
+
+impl<T: From<i32>, B: From<i64>> Foo<T, B>
+where
+ Option<T>: Debug, Option<B>: Debug,
+{
+ pub fn new() -> Self {
+ unimplemented!()
+ }
+}
+
+impl<T: From<i32>, B: From<i64>> Default for Foo<T, B>
+where
+ Option<T>: Debug, Option<B>: Debug,
+{
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_parameters() {
+ cov_mark::check!(new_function_with_parameters);
+ check_assist_not_applicable(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+struct Example { _inner: () }
+
+impl Example {
+ pub fn $0new(value: ()) -> Self {
+ Self { _inner: value }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn other_function_than_new() {
+ cov_mark::check!(other_function_than_new);
+ check_assist_not_applicable(
+ generate_default_from_new,
+ r#"
+struct Example { _inner: () }
+
+impl Example {
+ pub fn a$0dd() -> Self {
+ Self { _inner: () }
+ }
+}
+
+"#,
+ );
+ }
+
+ #[test]
+ fn default_block_is_already_present() {
+ cov_mark::check!(default_block_is_already_present);
+ check_assist_not_applicable(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+struct Example { _inner: () }
+
+impl Example {
+ pub fn n$0ew() -> Self {
+ Self { _inner: () }
+ }
+}
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn standalone_new_function() {
+ check_assist_not_applicable(
+ generate_default_from_new,
+ r#"
+fn n$0ew() -> u32 {
+ 0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multiple_struct_blocks() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+struct Example { _inner: () }
+struct Test { value: u32 }
+
+impl Example {
+ pub fn new$0() -> Self {
+ Self { _inner: () }
+ }
+}
+"#,
+ r#"
+struct Example { _inner: () }
+struct Test { value: u32 }
+
+impl Example {
+ pub fn new() -> Self {
+ Self { _inner: () }
+ }
+}
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn when_struct_is_after_impl() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+impl Example {
+ pub fn $0new() -> Self {
+ Self { _inner: () }
+ }
+}
+
+struct Example { _inner: () }
+"#,
+ r#"
+impl Example {
+ pub fn new() -> Self {
+ Self { _inner: () }
+ }
+}
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+struct Example { _inner: () }
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_in_module() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+mod test {
+ struct Example { _inner: () }
+
+ impl Example {
+ pub fn n$0ew() -> Self {
+ Self { _inner: () }
+ }
+ }
+}
+"#,
+ r#"
+mod test {
+ struct Example { _inner: () }
+
+ impl Example {
+ pub fn new() -> Self {
+ Self { _inner: () }
+ }
+ }
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_in_module_with_default() {
+ cov_mark::check!(struct_in_module_with_default);
+ check_assist_not_applicable(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+mod test {
+ struct Example { _inner: () }
+
+ impl Example {
+ pub fn n$0ew() -> Self {
+ Self { _inner: () }
+ }
+ }
+
+ impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
new file mode 100644
index 000000000..85b193663
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -0,0 +1,334 @@
+use hir::{self, HasCrate, HasSource, HasVisibility};
+use syntax::ast::{self, make, AstNode, HasGenericParams, HasName, HasVisibility as _};
+
+use crate::{
+ utils::{convert_param_list_to_arg_list, find_struct_impl, render_snippet, Cursor},
+ AssistContext, AssistId, AssistKind, Assists, GroupLabel,
+};
+use syntax::ast::edit::AstNodeEdit;
+
+// Assist: generate_delegate_methods
+//
+// Generate delegate methods.
+//
+// ```
+// struct Age(u8);
+// impl Age {
+// fn age(&self) -> u8 {
+// self.0
+// }
+// }
+//
+// struct Person {
+// ag$0e: Age,
+// }
+// ```
+// ->
+// ```
+// struct Age(u8);
+// impl Age {
+// fn age(&self) -> u8 {
+// self.0
+// }
+// }
+//
+// struct Person {
+// age: Age,
+// }
+//
+// impl Person {
+// $0fn age(&self) -> u8 {
+// self.age.age()
+// }
+// }
+// ```
+pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let strukt_name = strukt.name()?;
+ let current_module = ctx.sema.scope(strukt.syntax())?.module();
+
+ let (field_name, field_ty, target) = match ctx.find_node_at_offset::<ast::RecordField>() {
+ Some(field) => {
+ let field_name = field.name()?;
+ let field_ty = field.ty()?;
+ (format!("{}", field_name), field_ty, field.syntax().text_range())
+ }
+ None => {
+ let field = ctx.find_node_at_offset::<ast::TupleField>()?;
+ let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
+ let field_list_index = field_list.fields().position(|it| it == field)?;
+ let field_ty = field.ty()?;
+ (format!("{}", field_list_index), field_ty, field.syntax().text_range())
+ }
+ };
+
+ let sema_field_ty = ctx.sema.resolve_type(&field_ty)?;
+ let krate = sema_field_ty.krate(ctx.db());
+ let mut methods = vec![];
+ sema_field_ty.iterate_assoc_items(ctx.db(), krate, |item| {
+ if let hir::AssocItem::Function(f) = item {
+ if f.self_param(ctx.db()).is_some() && f.is_visible_from(ctx.db(), current_module) {
+ methods.push(f)
+ }
+ }
+ Option::<()>::None
+ });
+
+ for method in methods {
+ let adt = ast::Adt::Struct(strukt.clone());
+ let name = method.name(ctx.db()).to_string();
+ let impl_def = find_struct_impl(ctx, &adt, &name).flatten();
+ acc.add_group(
+ &GroupLabel("Generate delegate methods…".to_owned()),
+ AssistId("generate_delegate_methods", AssistKind::Generate),
+ format!("Generate delegate for `{}.{}()`", field_name, method.name(ctx.db())),
+ target,
+ |builder| {
+ // Create the function
+ let method_source = match method.source(ctx.db()) {
+ Some(source) => source.value,
+ None => return,
+ };
+ let method_name = method.name(ctx.db());
+ let vis = method_source.visibility();
+ let name = make::name(&method.name(ctx.db()).to_string());
+ let params =
+ method_source.param_list().unwrap_or_else(|| make::param_list(None, []));
+ let type_params = method_source.generic_param_list();
+ let arg_list = match method_source.param_list() {
+ Some(list) => convert_param_list_to_arg_list(list),
+ None => make::arg_list([]),
+ };
+ let tail_expr = make::expr_method_call(
+ make::ext::field_from_idents(["self", &field_name]).unwrap(), // This unwrap is ok because we have at least 1 arg in the list
+ make::name_ref(&method_name.to_string()),
+ arg_list,
+ );
+ let body = make::block_expr([], Some(tail_expr));
+ let ret_type = method_source.ret_type();
+ let is_async = method_source.async_token().is_some();
+ let f = make::fn_(vis, name, type_params, params, body, ret_type, is_async)
+ .indent(ast::edit::IndentLevel(1))
+ .clone_for_update();
+
+ let cursor = Cursor::Before(f.syntax());
+
+ // Create or update an impl block, attach the function to it,
+ // then insert into our code.
+ match impl_def {
+ Some(impl_def) => {
+ // Remember where in our source our `impl` block lives.
+ let impl_def = impl_def.clone_for_update();
+ let old_range = impl_def.syntax().text_range();
+
+ // Attach the function to the impl block
+ let assoc_items = impl_def.get_or_create_assoc_item_list();
+ assoc_items.add_item(f.clone().into());
+
+ // Update the impl block.
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snippet = render_snippet(cap, impl_def.syntax(), cursor);
+ builder.replace_snippet(cap, old_range, snippet);
+ }
+ None => {
+ builder.replace(old_range, impl_def.syntax().to_string());
+ }
+ }
+ }
+ None => {
+ // Attach the function to the impl block
+ let name = &strukt_name.to_string();
+ let params = strukt.generic_param_list();
+ let ty_params = params.clone();
+ let impl_def = make::impl_(make::ext::ident_path(name), params, ty_params)
+ .clone_for_update();
+ let assoc_items = impl_def.get_or_create_assoc_item_list();
+ assoc_items.add_item(f.clone().into());
+
+ // Insert the impl block.
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let offset = strukt.syntax().text_range().end();
+ let snippet = render_snippet(cap, impl_def.syntax(), cursor);
+ let snippet = format!("\n\n{}", snippet);
+ builder.insert_snippet(cap, offset, snippet);
+ }
+ None => {
+ let offset = strukt.syntax().text_range().end();
+ let snippet = format!("\n\n{}", impl_def.syntax());
+ builder.insert(offset, snippet);
+ }
+ }
+ }
+ }
+ },
+ )?;
+ }
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_delegate_create_impl_block() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ ag$0e: Age,
+}"#,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ age: Age,
+}
+
+impl Person {
+ $0fn age(&self) -> u8 {
+ self.age.age()
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_delegate_update_impl_block() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ ag$0e: Age,
+}
+
+impl Person {}"#,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ age: Age,
+}
+
+impl Person {
+ $0fn age(&self) -> u8 {
+ self.age.age()
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_delegate_tuple_struct() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person(A$0ge);"#,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person(Age);
+
+impl Person {
+ $0fn age(&self) -> u8 {
+ self.0.age()
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_delegate_enable_all_attributes() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Age<T>(T);
+impl<T> Age<T> {
+ pub(crate) async fn age<J, 'a>(&'a mut self, ty: T, arg: J) -> T {
+ self.0
+ }
+}
+
+struct Person<T> {
+ ag$0e: Age<T>,
+}"#,
+ r#"
+struct Age<T>(T);
+impl<T> Age<T> {
+ pub(crate) async fn age<J, 'a>(&'a mut self, ty: T, arg: J) -> T {
+ self.0
+ }
+}
+
+struct Person<T> {
+ age: Age<T>,
+}
+
+impl<T> Person<T> {
+ $0pub(crate) async fn age<J, 'a>(&'a mut self, ty: T, arg: J) -> T {
+ self.age.age(ty, arg)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_delegate_visibility() {
+ check_assist_not_applicable(
+ generate_delegate_methods,
+ r#"
+mod m {
+ pub struct Age(u8);
+ impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+ }
+}
+
+struct Person {
+ ag$0e: m::Age,
+}"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
new file mode 100644
index 000000000..b9637ee8d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
@@ -0,0 +1,343 @@
+use std::fmt::Display;
+
+use hir::{ModPath, ModuleDef};
+use ide_db::{famous_defs::FamousDefs, RootDatabase};
+use syntax::{
+ ast::{self, HasName},
+ AstNode, SyntaxNode,
+};
+
+use crate::{
+ assist_context::{AssistBuilder, AssistContext, Assists},
+ utils::generate_trait_impl_text,
+ AssistId, AssistKind,
+};
+
+// Assist: generate_deref
+//
+// Generate `Deref` impl using the given struct field.
+//
+// ```
+// # //- minicore: deref, deref_mut
+// struct A;
+// struct B {
+// $0a: A
+// }
+// ```
+// ->
+// ```
+// struct A;
+// struct B {
+// a: A
+// }
+//
+// impl core::ops::Deref for B {
+// type Target = A;
+//
+// fn deref(&self) -> &Self::Target {
+// &self.a
+// }
+// }
+// ```
+pub(crate) fn generate_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ generate_record_deref(acc, ctx).or_else(|| generate_tuple_deref(acc, ctx))
+}
+
+fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let field = ctx.find_node_at_offset::<ast::RecordField>()?;
+
+ let deref_type_to_generate = match existing_deref_impl(&ctx.sema, &strukt) {
+ None => DerefType::Deref,
+ Some(DerefType::Deref) => DerefType::DerefMut,
+ Some(DerefType::DerefMut) => {
+ cov_mark::hit!(test_add_record_deref_impl_already_exists);
+ return None;
+ }
+ };
+
+ let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
+ let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
+ let trait_path = module.find_use_path(ctx.db(), ModuleDef::Trait(trait_))?;
+
+ let field_type = field.ty()?;
+ let field_name = field.name()?;
+ let target = field.syntax().text_range();
+ acc.add(
+ AssistId("generate_deref", AssistKind::Generate),
+ format!("Generate `{:?}` impl using `{}`", deref_type_to_generate, field_name),
+ target,
+ |edit| {
+ generate_edit(
+ edit,
+ strukt,
+ field_type.syntax(),
+ field_name.syntax(),
+ deref_type_to_generate,
+ trait_path,
+ )
+ },
+ )
+}
+
+fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let field = ctx.find_node_at_offset::<ast::TupleField>()?;
+ let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
+ let field_list_index =
+ field_list.syntax().children().into_iter().position(|s| &s == field.syntax())?;
+
+ let deref_type_to_generate = match existing_deref_impl(&ctx.sema, &strukt) {
+ None => DerefType::Deref,
+ Some(DerefType::Deref) => DerefType::DerefMut,
+ Some(DerefType::DerefMut) => {
+ cov_mark::hit!(test_add_field_deref_impl_already_exists);
+ return None;
+ }
+ };
+
+ let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
+ let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
+ let trait_path = module.find_use_path(ctx.db(), ModuleDef::Trait(trait_))?;
+
+ let field_type = field.ty()?;
+ let target = field.syntax().text_range();
+ acc.add(
+ AssistId("generate_deref", AssistKind::Generate),
+ format!("Generate `{:?}` impl using `{}`", deref_type_to_generate, field.syntax()),
+ target,
+ |edit| {
+ generate_edit(
+ edit,
+ strukt,
+ field_type.syntax(),
+ field_list_index,
+ deref_type_to_generate,
+ trait_path,
+ )
+ },
+ )
+}
+
+fn generate_edit(
+ edit: &mut AssistBuilder,
+ strukt: ast::Struct,
+ field_type_syntax: &SyntaxNode,
+ field_name: impl Display,
+ deref_type: DerefType,
+ trait_path: ModPath,
+) {
+ let start_offset = strukt.syntax().text_range().end();
+ let impl_code = match deref_type {
+ DerefType::Deref => format!(
+ r#" type Target = {0};
+
+ fn deref(&self) -> &Self::Target {{
+ &self.{1}
+ }}"#,
+ field_type_syntax, field_name
+ ),
+ DerefType::DerefMut => format!(
+ r#" fn deref_mut(&mut self) -> &mut Self::Target {{
+ &mut self.{}
+ }}"#,
+ field_name
+ ),
+ };
+ let strukt_adt = ast::Adt::Struct(strukt);
+ let deref_impl = generate_trait_impl_text(&strukt_adt, &trait_path.to_string(), &impl_code);
+ edit.insert(start_offset, deref_impl);
+}
+
+fn existing_deref_impl(
+ sema: &hir::Semantics<'_, RootDatabase>,
+ strukt: &ast::Struct,
+) -> Option<DerefType> {
+ let strukt = sema.to_def(strukt)?;
+ let krate = strukt.module(sema.db).krate();
+
+ let deref_trait = FamousDefs(sema, krate).core_ops_Deref()?;
+ let deref_mut_trait = FamousDefs(sema, krate).core_ops_DerefMut()?;
+ let strukt_type = strukt.ty(sema.db);
+
+ if strukt_type.impls_trait(sema.db, deref_trait, &[]) {
+ if strukt_type.impls_trait(sema.db, deref_mut_trait, &[]) {
+ Some(DerefType::DerefMut)
+ } else {
+ Some(DerefType::Deref)
+ }
+ } else {
+ None
+ }
+}
+
+#[derive(Debug)]
+enum DerefType {
+ Deref,
+ DerefMut,
+}
+
+impl DerefType {
+ fn to_trait(
+ &self,
+ sema: &hir::Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+ ) -> Option<hir::Trait> {
+ match self {
+ DerefType::Deref => FamousDefs(sema, krate).core_ops_Deref(),
+ DerefType::DerefMut => FamousDefs(sema, krate).core_ops_DerefMut(),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_record_deref() {
+ check_assist(
+ generate_deref,
+ r#"
+//- minicore: deref
+struct A { }
+struct B { $0a: A }"#,
+ r#"
+struct A { }
+struct B { a: A }
+
+impl core::ops::Deref for B {
+ type Target = A;
+
+ fn deref(&self) -> &Self::Target {
+ &self.a
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_record_deref_short_path() {
+ check_assist(
+ generate_deref,
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+struct A { }
+struct B { $0a: A }"#,
+ r#"
+use core::ops::Deref;
+struct A { }
+struct B { a: A }
+
+impl Deref for B {
+ type Target = A;
+
+ fn deref(&self) -> &Self::Target {
+ &self.a
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_field_deref_idx_0() {
+ check_assist(
+ generate_deref,
+ r#"
+//- minicore: deref
+struct A { }
+struct B($0A);"#,
+ r#"
+struct A { }
+struct B(A);
+
+impl core::ops::Deref for B {
+ type Target = A;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}"#,
+ );
+ }
+ #[test]
+ fn test_generate_field_deref_idx_1() {
+ check_assist(
+ generate_deref,
+ r#"
+//- minicore: deref
+struct A { }
+struct B(u8, $0A);"#,
+ r#"
+struct A { }
+struct B(u8, A);
+
+impl core::ops::Deref for B {
+ type Target = A;
+
+ fn deref(&self) -> &Self::Target {
+ &self.1
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generates_derefmut_when_deref_present() {
+ check_assist(
+ generate_deref,
+ r#"
+//- minicore: deref, deref_mut
+struct B { $0a: u8 }
+
+impl core::ops::Deref for B {}
+"#,
+ r#"
+struct B { a: u8 }
+
+impl core::ops::DerefMut for B {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.a
+ }
+}
+
+impl core::ops::Deref for B {}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_record_deref_not_applicable_if_already_impl() {
+ cov_mark::check!(test_add_record_deref_impl_already_exists);
+ check_assist_not_applicable(
+ generate_deref,
+ r#"
+//- minicore: deref, deref_mut
+struct A { }
+struct B { $0a: A }
+
+impl core::ops::Deref for B {}
+impl core::ops::DerefMut for B {}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_generate_field_deref_not_applicable_if_already_impl() {
+ cov_mark::check!(test_add_field_deref_impl_already_exists);
+ check_assist_not_applicable(
+ generate_deref,
+ r#"
+//- minicore: deref, deref_mut
+struct A { }
+struct B($0A)
+
+impl core::ops::Deref for B {}
+impl core::ops::DerefMut for B {}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
new file mode 100644
index 000000000..339245b94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
@@ -0,0 +1,132 @@
+use syntax::{
+ ast::{self, AstNode, HasAttrs},
+ SyntaxKind::{COMMENT, WHITESPACE},
+ TextSize,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: generate_derive
+//
+// Adds a new `#[derive()]` clause to a struct or enum.
+//
+// ```
+// struct Point {
+// x: u32,
+// y: u32,$0
+// }
+// ```
+// ->
+// ```
+// #[derive($0)]
+// struct Point {
+// x: u32,
+// y: u32,
+// }
+// ```
+pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let cap = ctx.config.snippet_cap?;
+ let nominal = ctx.find_node_at_offset::<ast::Adt>()?;
+ let node_start = derive_insertion_offset(&nominal)?;
+ let target = nominal.syntax().text_range();
+ acc.add(
+ AssistId("generate_derive", AssistKind::Generate),
+ "Add `#[derive]`",
+ target,
+ |builder| {
+ let derive_attr = nominal
+ .attrs()
+ .filter_map(|x| x.as_simple_call())
+ .filter(|(name, _arg)| name == "derive")
+ .map(|(_name, arg)| arg)
+ .next();
+ match derive_attr {
+ None => {
+ builder.insert_snippet(cap, node_start, "#[derive($0)]\n");
+ }
+ Some(tt) => {
+ // Just move the cursor.
+ builder.insert_snippet(
+ cap,
+ tt.syntax().text_range().end() - TextSize::of(')'),
+ "$0",
+ )
+ }
+ };
+ },
+ )
+}
+
+// Insert `derive` after doc comments.
+fn derive_insertion_offset(nominal: &ast::Adt) -> Option<TextSize> {
+ let non_ws_child = nominal
+ .syntax()
+ .children_with_tokens()
+ .find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?;
+ Some(non_ws_child.text_range().start())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn add_derive_new() {
+ check_assist(
+ generate_derive,
+ "struct Foo { a: i32, $0}",
+ "#[derive($0)]\nstruct Foo { a: i32, }",
+ );
+ check_assist(
+ generate_derive,
+ "struct Foo { $0 a: i32, }",
+ "#[derive($0)]\nstruct Foo { a: i32, }",
+ );
+ }
+
+ #[test]
+ fn add_derive_existing() {
+ check_assist(
+ generate_derive,
+ "#[derive(Clone)]\nstruct Foo { a: i32$0, }",
+ "#[derive(Clone$0)]\nstruct Foo { a: i32, }",
+ );
+ }
+
+ #[test]
+ fn add_derive_new_with_doc_comment() {
+ check_assist(
+ generate_derive,
+ "
+/// `Foo` is a pretty important struct.
+/// It does stuff.
+struct Foo { a: i32$0, }
+ ",
+ "
+/// `Foo` is a pretty important struct.
+/// It does stuff.
+#[derive($0)]
+struct Foo { a: i32, }
+ ",
+ );
+ }
+
+ #[test]
+ fn add_derive_target() {
+ check_assist_target(
+ generate_derive,
+ "
+struct SomeThingIrrelevant;
+/// `Foo` is a pretty important struct.
+/// It does stuff.
+struct Foo { a: i32$0, }
+struct EvenMoreIrrelevant;
+ ",
+ "/// `Foo` is a pretty important struct.
+/// It does stuff.
+struct Foo { a: i32, }",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs
new file mode 100644
index 000000000..c91141f8e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs
@@ -0,0 +1,1328 @@
+use hir::{AsAssocItem, HasVisibility, ModuleDef, Visibility};
+use ide_db::assists::{AssistId, AssistKind};
+use itertools::Itertools;
+use stdx::{format_to, to_lower_snake_case};
+use syntax::{
+ algo::skip_whitespace_token,
+ ast::{self, edit::IndentLevel, HasDocComments, HasName},
+ match_ast, AstNode, AstToken,
+};
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: generate_documentation_template
+//
+// Adds a documentation template above a function definition / declaration.
+//
+// ```
+// pub struct S;
+// impl S {
+// pub unsafe fn set_len$0(&mut self, len: usize) -> Result<(), std::io::Error> {
+// /* ... */
+// }
+// }
+// ```
+// ->
+// ```
+// pub struct S;
+// impl S {
+// /// Sets the length of this [`S`].
+// ///
+// /// # Errors
+// ///
+// /// This function will return an error if .
+// ///
+// /// # Safety
+// ///
+// /// .
+// pub unsafe fn set_len(&mut self, len: usize) -> Result<(), std::io::Error> {
+// /* ... */
+// }
+// }
+// ```
+pub(crate) fn generate_documentation_template(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let name = ctx.find_node_at_offset::<ast::Name>()?;
+ let ast_func = name.syntax().parent().and_then(ast::Fn::cast)?;
+ if is_in_trait_impl(&ast_func, ctx) || ast_func.doc_comments().next().is_some() {
+ return None;
+ }
+
+ let parent_syntax = ast_func.syntax();
+ let text_range = parent_syntax.text_range();
+ let indent_level = IndentLevel::from_node(parent_syntax);
+
+ acc.add(
+ AssistId("generate_documentation_template", AssistKind::Generate),
+ "Generate a documentation template",
+ text_range,
+ |builder| {
+ // Introduction / short function description before the sections
+ let mut doc_lines = vec![introduction_builder(&ast_func, ctx).unwrap_or(".".into())];
+ // Then come the sections
+ for section_builder in [panics_builder, errors_builder, safety_builder] {
+ if let Some(mut lines) = section_builder(&ast_func) {
+ doc_lines.push("".into());
+ doc_lines.append(&mut lines);
+ }
+ }
+ builder.insert(text_range.start(), documentation_from_lines(doc_lines, indent_level));
+ },
+ )
+}
+
+// Assist: generate_doc_example
+//
+// Generates a rustdoc example when editing an item's documentation.
+//
+// ```
+// /// Adds two numbers.$0
+// pub fn add(a: i32, b: i32) -> i32 { a + b }
+// ```
+// ->
+// ```
+// /// Adds two numbers.
+// ///
+// /// # Examples
+// ///
+// /// ```
+// /// use test::add;
+// ///
+// /// assert_eq!(add(a, b), );
+// /// ```
+// pub fn add(a: i32, b: i32) -> i32 { a + b }
+// ```
+pub(crate) fn generate_doc_example(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let tok: ast::Comment = ctx.find_token_at_offset()?;
+ let node = tok.syntax().parent()?;
+ let last_doc_token =
+ ast::AnyHasDocComments::cast(node.clone())?.doc_comments().last()?.syntax().clone();
+ let next_token = skip_whitespace_token(last_doc_token.next_token()?, syntax::Direction::Next)?;
+
+ let example = match_ast! {
+ match node {
+ ast::Fn(it) => make_example_for_fn(&it, ctx)?,
+ _ => return None,
+ }
+ };
+
+ let mut lines = string_vec_from(&["", "# Examples", "", "```"]);
+ lines.extend(example.lines().map(String::from));
+ lines.push("```".into());
+ let indent_level = IndentLevel::from_node(&node);
+
+ acc.add(
+ AssistId("generate_doc_example", AssistKind::Generate),
+ "Generate a documentation example",
+ node.text_range(),
+ |builder| {
+ builder.insert(
+ next_token.text_range().start(),
+ documentation_from_lines(lines, indent_level),
+ );
+ },
+ )
+}
+
+fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<String> {
+ if !is_public(ast_func, ctx)? {
+ // Doctests for private items can't actually name the item, so they're pretty useless.
+ return None;
+ }
+
+ if is_in_trait_def(ast_func, ctx) {
+ // This is not yet implemented.
+ return None;
+ }
+
+ let mut example = String::new();
+
+ let is_unsafe = ast_func.unsafe_token().is_some();
+ let param_list = ast_func.param_list()?;
+ let ref_mut_params = ref_mut_params(&param_list);
+ let self_name = self_name(ast_func);
+
+ format_to!(example, "use {};\n\n", build_path(ast_func, ctx)?);
+ if let Some(self_name) = &self_name {
+ if let Some(mtbl) = is_ref_mut_self(ast_func) {
+ let mtbl = if mtbl == true { " mut" } else { "" };
+ format_to!(example, "let{} {} = ;\n", mtbl, self_name);
+ }
+ }
+ for param_name in &ref_mut_params {
+ format_to!(example, "let mut {} = ;\n", param_name);
+ }
+ // Call the function, check result
+ let function_call = function_call(ast_func, &param_list, self_name.as_deref(), is_unsafe)?;
+ if returns_a_value(ast_func, ctx) {
+ if count_parameters(&param_list) < 3 {
+ format_to!(example, "assert_eq!({}, );\n", function_call);
+ } else {
+ format_to!(example, "let result = {};\n", function_call);
+ example.push_str("assert_eq!(result, );\n");
+ }
+ } else {
+ format_to!(example, "{};\n", function_call);
+ }
+ // Check the mutated values
+ if is_ref_mut_self(ast_func) == Some(true) {
+ format_to!(example, "assert_eq!({}, );", self_name?);
+ }
+ for param_name in &ref_mut_params {
+ format_to!(example, "assert_eq!({}, );", param_name);
+ }
+ Some(example)
+}
+
+fn introduction_builder(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<String> {
+ let hir_func = ctx.sema.to_def(ast_func)?;
+ let container = hir_func.as_assoc_item(ctx.db())?.container(ctx.db());
+ if let hir::AssocItemContainer::Impl(imp) = container {
+ let ret_ty = hir_func.ret_type(ctx.db());
+ let self_ty = imp.self_ty(ctx.db());
+ let name = ast_func.name()?.to_string();
+ let linkable_self_ty = self_type_without_lifetimes(ast_func);
+ let linkable_self_ty = linkable_self_ty.as_deref();
+
+ let intro_for_new = || {
+ let is_new = name == "new";
+ if is_new && ret_ty == self_ty {
+ Some(format!("Creates a new [`{}`].", linkable_self_ty?))
+ } else {
+ None
+ }
+ };
+
+ let intro_for_getter = || match (
+ hir_func.self_param(ctx.sema.db),
+ &*hir_func.params_without_self(ctx.sema.db),
+ ) {
+ (Some(self_param), []) if self_param.access(ctx.sema.db) != hir::Access::Owned => {
+ if name.starts_with("as_") || name.starts_with("to_") || name == "get" {
+ return None;
+ }
+ let mut what = name.trim_end_matches("_mut").replace('_', " ");
+ if what == "len" {
+ what = "length".into()
+ }
+ let reference = if ret_ty.is_mutable_reference() {
+ " a mutable reference to"
+ } else if ret_ty.is_reference() {
+ " a reference to"
+ } else {
+ ""
+ };
+ Some(format!("Returns{reference} the {what} of this [`{}`].", linkable_self_ty?))
+ }
+ _ => None,
+ };
+
+ let intro_for_setter = || {
+ if !name.starts_with("set_") {
+ return None;
+ }
+
+ let mut what = name.trim_start_matches("set_").replace('_', " ");
+ if what == "len" {
+ what = "length".into()
+ };
+ Some(format!("Sets the {what} of this [`{}`].", linkable_self_ty?))
+ };
+
+ if let Some(intro) = intro_for_new() {
+ return Some(intro);
+ }
+ if let Some(intro) = intro_for_getter() {
+ return Some(intro);
+ }
+ if let Some(intro) = intro_for_setter() {
+ return Some(intro);
+ }
+ }
+ None
+}
+
+/// Builds an optional `# Panics` section
+fn panics_builder(ast_func: &ast::Fn) -> Option<Vec<String>> {
+ match can_panic(ast_func) {
+ Some(true) => Some(string_vec_from(&["# Panics", "", "Panics if ."])),
+ _ => None,
+ }
+}
+
+/// Builds an optional `# Errors` section
+fn errors_builder(ast_func: &ast::Fn) -> Option<Vec<String>> {
+ match return_type(ast_func)?.to_string().contains("Result") {
+ true => Some(string_vec_from(&["# Errors", "", "This function will return an error if ."])),
+ false => None,
+ }
+}
+
+/// Builds an optional `# Safety` section
+fn safety_builder(ast_func: &ast::Fn) -> Option<Vec<String>> {
+ let is_unsafe = ast_func.unsafe_token().is_some();
+ match is_unsafe {
+ true => Some(string_vec_from(&["# Safety", "", "."])),
+ false => None,
+ }
+}
+
+/// Checks if the function is public / exported
+fn is_public(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<bool> {
+ let hir_func = ctx.sema.to_def(ast_func)?;
+ Some(
+ hir_func.visibility(ctx.db()) == Visibility::Public
+ && all_parent_mods_public(&hir_func, ctx),
+ )
+}
+
+/// Checks that all parent modules of the function are public / exported
+fn all_parent_mods_public(hir_func: &hir::Function, ctx: &AssistContext<'_>) -> bool {
+ let mut module = hir_func.module(ctx.db());
+ loop {
+ if let Some(parent) = module.parent(ctx.db()) {
+ match ModuleDef::from(module).visibility(ctx.db()) {
+ Visibility::Public => module = parent,
+ _ => break false,
+ }
+ } else {
+ break true;
+ }
+ }
+}
+
+/// Returns the name of the current crate
+fn crate_name(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<String> {
+ let krate = ctx.sema.scope(ast_func.syntax())?.krate();
+ Some(krate.display_name(ctx.db())?.to_string())
+}
+
+/// `None` if function without a body; some bool to guess if function can panic
+fn can_panic(ast_func: &ast::Fn) -> Option<bool> {
+ let body = ast_func.body()?.to_string();
+ let can_panic = body.contains("panic!(")
+ // FIXME it would be better to not match `debug_assert*!` macro invocations
+ || body.contains("assert!(")
+ || body.contains(".unwrap()")
+ || body.contains(".expect(");
+ Some(can_panic)
+}
+
+/// Helper function to get the name that should be given to `self` arguments
+fn self_name(ast_func: &ast::Fn) -> Option<String> {
+ self_partial_type(ast_func).map(|name| to_lower_snake_case(&name))
+}
+
+/// Heper function to get the name of the type of `self`
+fn self_type(ast_func: &ast::Fn) -> Option<ast::Type> {
+ ast_func.syntax().ancestors().find_map(ast::Impl::cast).and_then(|i| i.self_ty())
+}
+
+/// Output the real name of `Self` like `MyType<T>`, without the lifetimes.
+fn self_type_without_lifetimes(ast_func: &ast::Fn) -> Option<String> {
+ let path_segment = match self_type(ast_func)? {
+ ast::Type::PathType(path_type) => path_type.path()?.segment()?,
+ _ => return None,
+ };
+ let mut name = path_segment.name_ref()?.to_string();
+ let generics = path_segment.generic_arg_list().into_iter().flat_map(|list| {
+ list.generic_args()
+ .filter(|generic| matches!(generic, ast::GenericArg::TypeArg(_)))
+ .map(|generic| generic.to_string())
+ });
+ let generics: String = generics.format(", ").to_string();
+ if !generics.is_empty() {
+ name.push('<');
+ name.push_str(&generics);
+ name.push('>');
+ }
+ Some(name)
+}
+
+/// Heper function to get the name of the type of `self` without generic arguments
+fn self_partial_type(ast_func: &ast::Fn) -> Option<String> {
+ let mut self_type = self_type(ast_func)?.to_string();
+ if let Some(idx) = self_type.find(|c| ['<', ' '].contains(&c)) {
+ self_type.truncate(idx);
+ }
+ Some(self_type)
+}
+
+/// Helper function to determine if the function is in a trait implementation
+fn is_in_trait_impl(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> bool {
+ ctx.sema
+ .to_def(ast_func)
+ .and_then(|hir_func| hir_func.as_assoc_item(ctx.db()))
+ .and_then(|assoc_item| assoc_item.containing_trait_impl(ctx.db()))
+ .is_some()
+}
+
+/// Helper function to determine if the function definition is in a trait definition
+fn is_in_trait_def(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> bool {
+ ctx.sema
+ .to_def(ast_func)
+ .and_then(|hir_func| hir_func.as_assoc_item(ctx.db()))
+ .and_then(|assoc_item| assoc_item.containing_trait(ctx.db()))
+ .is_some()
+}
+
+/// Returns `None` if no `self` at all, `Some(true)` if there is `&mut self` else `Some(false)`
+fn is_ref_mut_self(ast_func: &ast::Fn) -> Option<bool> {
+ let self_param = ast_func.param_list()?.self_param()?;
+ Some(self_param.mut_token().is_some() && self_param.amp_token().is_some())
+}
+
+/// Helper function to determine if a parameter is `&mut`
+fn is_a_ref_mut_param(param: &ast::Param) -> bool {
+ match param.ty() {
+ Some(ast::Type::RefType(param_ref)) => param_ref.mut_token().is_some(),
+ _ => false,
+ }
+}
+
+/// Helper function to build the list of `&mut` parameters
+fn ref_mut_params(param_list: &ast::ParamList) -> Vec<String> {
+ param_list
+ .params()
+ .filter_map(|param| match is_a_ref_mut_param(&param) {
+ // Maybe better filter the param name (to do this maybe extract a function from
+ // `arguments_from_params`?) in case of a `mut a: &mut T`. Anyway managing most (not
+ // all) cases might be enough, the goal is just to produce a template.
+ true => Some(param.pat()?.to_string()),
+ false => None,
+ })
+ .collect()
+}
+
+/// Helper function to build the comma-separated list of arguments of the function
+fn arguments_from_params(param_list: &ast::ParamList) -> String {
+ let args_iter = param_list.params().map(|param| match param.pat() {
+ // To avoid `mut` in the function call (which would be a nonsense), `Pat` should not be
+ // written as is so its variants must be managed independently. Other variants (for
+ // instance `TuplePat`) could be managed later.
+ Some(ast::Pat::IdentPat(ident_pat)) => match ident_pat.name() {
+ Some(name) => match is_a_ref_mut_param(&param) {
+ true => format!("&mut {}", name),
+ false => name.to_string(),
+ },
+ None => "_".to_string(),
+ },
+ _ => "_".to_string(),
+ });
+ args_iter.format(", ").to_string()
+}
+
+/// Helper function to build a function call. `None` if expected `self_name` was not provided
+fn function_call(
+ ast_func: &ast::Fn,
+ param_list: &ast::ParamList,
+ self_name: Option<&str>,
+ is_unsafe: bool,
+) -> Option<String> {
+ let name = ast_func.name()?;
+ let arguments = arguments_from_params(param_list);
+ let function_call = if param_list.self_param().is_some() {
+ format!("{}.{}({})", self_name?, name, arguments)
+ } else if let Some(implementation) = self_partial_type(ast_func) {
+ format!("{}::{}({})", implementation, name, arguments)
+ } else {
+ format!("{}({})", name, arguments)
+ };
+ match is_unsafe {
+ true => Some(format!("unsafe {{ {} }}", function_call)),
+ false => Some(function_call),
+ }
+}
+
+/// Helper function to count the parameters including `self`
+fn count_parameters(param_list: &ast::ParamList) -> usize {
+ param_list.params().count() + if param_list.self_param().is_some() { 1 } else { 0 }
+}
+
+/// Helper function to transform lines of documentation into a Rust code documentation
+fn documentation_from_lines(doc_lines: Vec<String>, indent_level: IndentLevel) -> String {
+ let mut result = String::new();
+ for doc_line in doc_lines {
+ result.push_str("///");
+ if !doc_line.is_empty() {
+ result.push(' ');
+ result.push_str(&doc_line);
+ }
+ result.push('\n');
+ result.push_str(&indent_level.to_string());
+ }
+ result
+}
+
+/// Helper function to transform an array of borrowed strings to an owned `Vec<String>`
+fn string_vec_from(string_array: &[&str]) -> Vec<String> {
+ string_array.iter().map(|&s| s.to_owned()).collect()
+}
+
+/// Helper function to build the path of the module in the which is the node
+fn build_path(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<String> {
+ let crate_name = crate_name(ast_func, ctx)?;
+ let leaf = self_partial_type(ast_func)
+ .or_else(|| ast_func.name().map(|n| n.to_string()))
+ .unwrap_or_else(|| "*".into());
+ let module_def: ModuleDef = ctx.sema.to_def(ast_func)?.module(ctx.db()).into();
+ match module_def.canonical_path(ctx.db()) {
+ Some(path) => Some(format!("{}::{}::{}", crate_name, path, leaf)),
+ None => Some(format!("{}::{}", crate_name, leaf)),
+ }
+}
+
+/// Helper function to get the return type of a function
+fn return_type(ast_func: &ast::Fn) -> Option<ast::Type> {
+ ast_func.ret_type()?.ty()
+}
+
+/// Helper function to determine if the function returns some data
+fn returns_a_value(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> bool {
+ ctx.sema
+ .to_def(ast_func)
+ .map(|hir_func| hir_func.ret_type(ctx.db()))
+ .map(|ret_ty| !ret_ty.is_unit() && !ret_ty.is_never())
+ .unwrap_or(false)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn not_applicable_on_function_calls() {
+ check_assist_not_applicable(
+ generate_documentation_template,
+ r#"
+fn hello_world() {}
+fn calls_hello_world() {
+ hello_world$0();
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_in_trait_impl() {
+ check_assist_not_applicable(
+ generate_documentation_template,
+ r#"
+trait MyTrait {}
+struct MyStruct;
+impl MyTrait for MyStruct {
+ fn hello_world$0();
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_function_already_documented() {
+ check_assist_not_applicable(
+ generate_documentation_template,
+ r#"
+/// Some documentation here
+pub fn $0documented_function() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_noop_function() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub fn no$0op() {}
+"#,
+ r#"
+/// .
+pub fn noop() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn is_applicable_if_function_is_private() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+fn priv$0ate() {}
+"#,
+ r#"
+/// .
+fn private() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_doc_example_for_private_fn() {
+ check_assist_not_applicable(
+ generate_doc_example,
+ r#"
+///$0
+fn private() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_a_parameter() {
+ check_assist(
+ generate_doc_example,
+ r#"
+/// $0.
+pub fn noop_with_param(_a: i32) {}
+"#,
+ r#"
+/// .
+///
+/// # Examples
+///
+/// ```
+/// use test::noop_with_param;
+///
+/// noop_with_param(_a);
+/// ```
+pub fn noop_with_param(_a: i32) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn detects_unsafe_function() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub unsafe fn no$0op_unsafe() {}
+"#,
+ r#"
+/// .
+///
+/// # Safety
+///
+/// .
+pub unsafe fn noop_unsafe() {}
+"#,
+ );
+ check_assist(
+ generate_doc_example,
+ r#"
+/// .
+///
+/// # Safety$0
+///
+/// .
+pub unsafe fn noop_unsafe() {}
+"#,
+ r#"
+/// .
+///
+/// # Safety
+///
+/// .
+///
+/// # Examples
+///
+/// ```
+/// use test::noop_unsafe;
+///
+/// unsafe { noop_unsafe() };
+/// ```
+pub unsafe fn noop_unsafe() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn guesses_panic_macro_can_panic() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub fn panic$0s_if(a: bool) {
+ if a {
+ panic!();
+ }
+}
+"#,
+ r#"
+/// .
+///
+/// # Panics
+///
+/// Panics if .
+pub fn panics_if(a: bool) {
+ if a {
+ panic!();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn guesses_assert_macro_can_panic() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub fn $0panics_if_not(a: bool) {
+ assert!(a == true);
+}
+"#,
+ r#"
+/// .
+///
+/// # Panics
+///
+/// Panics if .
+pub fn panics_if_not(a: bool) {
+ assert!(a == true);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn guesses_unwrap_can_panic() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub fn $0panics_if_none(a: Option<()>) {
+ a.unwrap();
+}
+"#,
+ r#"
+/// .
+///
+/// # Panics
+///
+/// Panics if .
+pub fn panics_if_none(a: Option<()>) {
+ a.unwrap();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn guesses_expect_can_panic() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub fn $0panics_if_none2(a: Option<()>) {
+ a.expect("Bouh!");
+}
+"#,
+ r#"
+/// .
+///
+/// # Panics
+///
+/// Panics if .
+pub fn panics_if_none2(a: Option<()>) {
+ a.expect("Bouh!");
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn checks_output_in_example() {
+ check_assist(
+ generate_doc_example,
+ r#"
+///$0
+pub fn returns_a_value$0() -> i32 {
+ 0
+}
+"#,
+ r#"
+///
+///
+/// # Examples
+///
+/// ```
+/// use test::returns_a_value;
+///
+/// assert_eq!(returns_a_value(), );
+/// ```
+pub fn returns_a_value() -> i32 {
+ 0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn detects_result_output() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub fn returns_a_result$0() -> Result<i32, std::io::Error> {
+ Ok(0)
+}
+"#,
+ r#"
+/// .
+///
+/// # Errors
+///
+/// This function will return an error if .
+pub fn returns_a_result() -> Result<i32, std::io::Error> {
+ Ok(0)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn checks_ref_mut_in_example() {
+ check_assist(
+ generate_doc_example,
+ r#"
+///$0
+pub fn modifies_a_value$0(a: &mut i32) {
+ *a = 0;
+}
+"#,
+ r#"
+///
+///
+/// # Examples
+///
+/// ```
+/// use test::modifies_a_value;
+///
+/// let mut a = ;
+/// modifies_a_value(&mut a);
+/// assert_eq!(a, );
+/// ```
+pub fn modifies_a_value(a: &mut i32) {
+ *a = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn stores_result_if_at_least_3_params() {
+ check_assist(
+ generate_doc_example,
+ r#"
+///$0
+pub fn sum3$0(a: i32, b: i32, c: i32) -> i32 {
+ a + b + c
+}
+"#,
+ r#"
+///
+///
+/// # Examples
+///
+/// ```
+/// use test::sum3;
+///
+/// let result = sum3(a, b, c);
+/// assert_eq!(result, );
+/// ```
+pub fn sum3(a: i32, b: i32, c: i32) -> i32 {
+ a + b + c
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_fn_in_mods() {
+ check_assist(
+ generate_doc_example,
+ r#"
+pub mod a {
+ pub mod b {
+ ///$0
+ pub fn noop() {}
+ }
+}
+"#,
+ r#"
+pub mod a {
+ pub mod b {
+ ///
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use test::a::b::noop;
+ ///
+ /// noop();
+ /// ```
+ pub fn noop() {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_fn_in_impl() {
+ check_assist(
+ generate_doc_example,
+ r#"
+pub struct MyStruct;
+impl MyStruct {
+ ///$0
+ pub fn noop() {}
+}
+"#,
+ r#"
+pub struct MyStruct;
+impl MyStruct {
+ ///
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use test::MyStruct;
+ ///
+ /// MyStruct::noop();
+ /// ```
+ pub fn noop() {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_unsafe_fn_in_trait() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub trait MyTrait {
+ unsafe fn unsafe_funct$0ion_trait();
+}
+"#,
+ r#"
+pub trait MyTrait {
+ /// .
+ ///
+ /// # Safety
+ ///
+ /// .
+ unsafe fn unsafe_function_trait();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_fn_in_trait_with_default_panicking() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub trait MyTrait {
+ fn function_trait_with_$0default_panicking() {
+ panic!()
+ }
+}
+"#,
+ r#"
+pub trait MyTrait {
+ /// .
+ ///
+ /// # Panics
+ ///
+ /// Panics if .
+ fn function_trait_with_default_panicking() {
+ panic!()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_fn_in_trait_returning_result() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub trait MyTrait {
+ fn function_tr$0ait_returning_result() -> Result<(), std::io::Error>;
+}
+"#,
+ r#"
+pub trait MyTrait {
+ /// .
+ ///
+ /// # Errors
+ ///
+ /// This function will return an error if .
+ fn function_trait_returning_result() -> Result<(), std::io::Error>;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn detects_new() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct String(u8);
+impl String {
+ pub fn new$0(x: u8) -> String {
+ String(x)
+ }
+}
+"#,
+ r#"
+pub struct String(u8);
+impl String {
+ /// Creates a new [`String`].
+ pub fn new(x: u8) -> String {
+ String(x)
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_documentation_template,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<T> {
+ pub x: T,
+}
+impl<T> MyGenericStruct<T> {
+ pub fn new$0(x: T) -> MyGenericStruct<T> {
+ MyGenericStruct { x }
+ }
+}
+"#,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<T> {
+ pub x: T,
+}
+impl<T> MyGenericStruct<T> {
+ /// Creates a new [`MyGenericStruct<T>`].
+ pub fn new(x: T) -> MyGenericStruct<T> {
+ MyGenericStruct { x }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn removes_one_lifetime_from_description() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<'a, T> {
+ pub x: &'a T,
+}
+impl<'a, T> MyGenericStruct<'a, T> {
+ pub fn new$0(x: &'a T) -> Self {
+ MyGenericStruct { x }
+ }
+}
+"#,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<'a, T> {
+ pub x: &'a T,
+}
+impl<'a, T> MyGenericStruct<'a, T> {
+ /// Creates a new [`MyGenericStruct<T>`].
+ pub fn new(x: &'a T) -> Self {
+ MyGenericStruct { x }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn removes_all_lifetimes_from_description() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<'a, 'b, T> {
+ pub x: &'a T,
+ pub y: &'b T,
+}
+impl<'a, 'b, T> MyGenericStruct<'a, 'b, T> {
+ pub fn new$0(x: &'a T, y: &'b T) -> Self {
+ MyGenericStruct { x, y }
+ }
+}
+"#,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<'a, 'b, T> {
+ pub x: &'a T,
+ pub y: &'b T,
+}
+impl<'a, 'b, T> MyGenericStruct<'a, 'b, T> {
+ /// Creates a new [`MyGenericStruct<T>`].
+ pub fn new(x: &'a T, y: &'b T) -> Self {
+ MyGenericStruct { x, y }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn removes_all_lifetimes_and_brackets_from_description() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<'a, 'b> {
+ pub x: &'a usize,
+ pub y: &'b usize,
+}
+impl<'a, 'b> MyGenericStruct<'a, 'b> {
+ pub fn new$0(x: &'a usize, y: &'b usize) -> Self {
+ MyGenericStruct { x, y }
+ }
+}
+"#,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<'a, 'b> {
+ pub x: &'a usize,
+ pub y: &'b usize,
+}
+impl<'a, 'b> MyGenericStruct<'a, 'b> {
+ /// Creates a new [`MyGenericStruct`].
+ pub fn new(x: &'a usize, y: &'b usize) -> Self {
+ MyGenericStruct { x, y }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn detects_new_with_self() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct2<T> {
+ pub x: T,
+}
+impl<T> MyGenericStruct2<T> {
+ pub fn new$0(x: T) -> Self {
+ MyGenericStruct2 { x }
+ }
+}
+"#,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct2<T> {
+ pub x: T,
+}
+impl<T> MyGenericStruct2<T> {
+ /// Creates a new [`MyGenericStruct2<T>`].
+ pub fn new(x: T) -> Self {
+ MyGenericStruct2 { x }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_method_call() {
+ check_assist(
+ generate_doc_example,
+ r#"
+impl<T> MyGenericStruct<T> {
+ ///$0
+ pub fn consume(self) {}
+}
+"#,
+ r#"
+impl<T> MyGenericStruct<T> {
+ ///
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use test::MyGenericStruct;
+ ///
+ /// let my_generic_struct = ;
+ /// my_generic_struct.consume();
+ /// ```
+ pub fn consume(self) {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn checks_modified_self_param() {
+ check_assist(
+ generate_doc_example,
+ r#"
+impl<T> MyGenericStruct<T> {
+ ///$0
+ pub fn modify(&mut self, new_value: T) {
+ self.x = new_value;
+ }
+}
+"#,
+ r#"
+impl<T> MyGenericStruct<T> {
+ ///
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use test::MyGenericStruct;
+ ///
+ /// let mut my_generic_struct = ;
+ /// my_generic_struct.modify(new_value);
+ /// assert_eq!(my_generic_struct, );
+ /// ```
+ pub fn modify(&mut self, new_value: T) {
+ self.x = new_value;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generates_intro_for_getters() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn speed$0(&self) -> f32 { 0.0 }
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// Returns the speed of this [`S`].
+ pub fn speed(&self) -> f32 { 0.0 }
+}
+"#,
+ );
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn data$0(&self) -> &[u8] { &[] }
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// Returns a reference to the data of this [`S`].
+ pub fn data(&self) -> &[u8] { &[] }
+}
+"#,
+ );
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn data$0(&mut self) -> &mut [u8] { &mut [] }
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// Returns a mutable reference to the data of this [`S`].
+ pub fn data(&mut self) -> &mut [u8] { &mut [] }
+}
+"#,
+ );
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn data_mut$0(&mut self) -> &mut [u8] { &mut [] }
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// Returns a mutable reference to the data of this [`S`].
+ pub fn data_mut(&mut self) -> &mut [u8] { &mut [] }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_getter_intro_for_prefixed_methods() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn as_bytes$0(&self) -> &[u8] { &[] }
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// .
+ pub fn as_bytes(&self) -> &[u8] { &[] }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generates_intro_for_setters() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn set_data$0(&mut self, data: Vec<u8>) {}
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// Sets the data of this [`S`].
+ pub fn set_data(&mut self, data: Vec<u8>) {}
+}
+"#,
+ );
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn set_domain_name$0(&mut self, name: String) {}
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// Sets the domain name of this [`S`].
+ pub fn set_domain_name(&mut self, name: String) {}
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs
new file mode 100644
index 000000000..52d27d8a7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs
@@ -0,0 +1,316 @@
+use ide_db::assists::GroupLabel;
+use stdx::to_lower_snake_case;
+use syntax::ast::HasVisibility;
+use syntax::ast::{self, AstNode, HasName};
+
+use crate::{
+ utils::{add_method_to_adt, find_struct_impl},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: generate_enum_is_method
+//
+// Generate an `is_` method for this enum variant.
+//
+// ```
+// enum Version {
+// Undefined,
+// Minor$0,
+// Major,
+// }
+// ```
+// ->
+// ```
+// enum Version {
+// Undefined,
+// Minor,
+// Major,
+// }
+//
+// impl Version {
+// /// Returns `true` if the version is [`Minor`].
+// ///
+// /// [`Minor`]: Version::Minor
+// #[must_use]
+// fn is_minor(&self) -> bool {
+// matches!(self, Self::Minor)
+// }
+// }
+// ```
+pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let variant = ctx.find_node_at_offset::<ast::Variant>()?;
+ let variant_name = variant.name()?;
+ let parent_enum = ast::Adt::Enum(variant.parent_enum());
+ let pattern_suffix = match variant.kind() {
+ ast::StructKind::Record(_) => " { .. }",
+ ast::StructKind::Tuple(_) => "(..)",
+ ast::StructKind::Unit => "",
+ };
+
+ let enum_name = parent_enum.name()?;
+ let enum_lowercase_name = to_lower_snake_case(&enum_name.to_string()).replace('_', " ");
+ let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text()));
+
+ // Return early if we've found an existing new fn
+ let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?;
+
+ let target = variant.syntax().text_range();
+ acc.add_group(
+ &GroupLabel("Generate an `is_`,`as_`, or `try_into_` for this enum variant".to_owned()),
+ AssistId("generate_enum_is_method", AssistKind::Generate),
+ "Generate an `is_` method for this enum variant",
+ target,
+ |builder| {
+ let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let method = format!(
+ " /// Returns `true` if the {} is [`{variant}`].
+ ///
+ /// [`{variant}`]: {}::{variant}
+ #[must_use]
+ {}fn {}(&self) -> bool {{
+ matches!(self, Self::{variant}{})
+ }}",
+ enum_lowercase_name,
+ enum_name,
+ vis,
+ fn_name,
+ pattern_suffix,
+ variant = variant_name
+ );
+
+ add_method_to_adt(builder, &parent_enum, impl_def, &method);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_enum_is_from_variant() {
+ check_assist(
+ generate_enum_is_method,
+ r#"
+enum Variant {
+ Undefined,
+ Minor$0,
+ Major,
+}"#,
+ r#"enum Variant {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Variant {
+ /// Returns `true` if the variant is [`Minor`].
+ ///
+ /// [`Minor`]: Variant::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_is_already_implemented() {
+ check_assist_not_applicable(
+ generate_enum_is_method,
+ r#"
+enum Variant {
+ Undefined,
+ Minor$0,
+ Major,
+}
+
+impl Variant {
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_is_from_tuple_variant() {
+ check_assist(
+ generate_enum_is_method,
+ r#"
+enum Variant {
+ Undefined,
+ Minor(u32)$0,
+ Major,
+}"#,
+ r#"enum Variant {
+ Undefined,
+ Minor(u32),
+ Major,
+}
+
+impl Variant {
+ /// Returns `true` if the variant is [`Minor`].
+ ///
+ /// [`Minor`]: Variant::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor(..))
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_is_from_record_variant() {
+ check_assist(
+ generate_enum_is_method,
+ r#"
+enum Variant {
+ Undefined,
+ Minor { foo: i32 }$0,
+ Major,
+}"#,
+ r#"enum Variant {
+ Undefined,
+ Minor { foo: i32 },
+ Major,
+}
+
+impl Variant {
+ /// Returns `true` if the variant is [`Minor`].
+ ///
+ /// [`Minor`]: Variant::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor { .. })
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_is_from_variant_with_one_variant() {
+ check_assist(
+ generate_enum_is_method,
+ r#"enum Variant { Undefi$0ned }"#,
+ r#"
+enum Variant { Undefined }
+
+impl Variant {
+ /// Returns `true` if the variant is [`Undefined`].
+ ///
+ /// [`Undefined`]: Variant::Undefined
+ #[must_use]
+ fn is_undefined(&self) -> bool {
+ matches!(self, Self::Undefined)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_is_from_variant_with_visibility_marker() {
+ check_assist(
+ generate_enum_is_method,
+ r#"
+pub(crate) enum Variant {
+ Undefined,
+ Minor$0,
+ Major,
+}"#,
+ r#"pub(crate) enum Variant {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Variant {
+ /// Returns `true` if the variant is [`Minor`].
+ ///
+ /// [`Minor`]: Variant::Minor
+ #[must_use]
+ pub(crate) fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_multiple_generate_enum_is_from_variant() {
+ check_assist(
+ generate_enum_is_method,
+ r#"
+enum Variant {
+ Undefined,
+ Minor,
+ Major$0,
+}
+
+impl Variant {
+ /// Returns `true` if the variant is [`Minor`].
+ ///
+ /// [`Minor`]: Variant::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+}"#,
+ r#"enum Variant {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Variant {
+ /// Returns `true` if the variant is [`Minor`].
+ ///
+ /// [`Minor`]: Variant::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+
+ /// Returns `true` if the variant is [`Major`].
+ ///
+ /// [`Major`]: Variant::Major
+ #[must_use]
+ fn is_major(&self) -> bool {
+ matches!(self, Self::Major)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_is_variant_names() {
+ check_assist(
+ generate_enum_is_method,
+ r#"
+enum GeneratorState {
+ Yielded,
+ Complete$0,
+ Major,
+}"#,
+ r#"enum GeneratorState {
+ Yielded,
+ Complete,
+ Major,
+}
+
+impl GeneratorState {
+ /// Returns `true` if the generator state is [`Complete`].
+ ///
+ /// [`Complete`]: GeneratorState::Complete
+ #[must_use]
+ fn is_complete(&self) -> bool {
+ matches!(self, Self::Complete)
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
new file mode 100644
index 000000000..b19aa0f65
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
@@ -0,0 +1,342 @@
+use ide_db::assists::GroupLabel;
+use itertools::Itertools;
+use stdx::to_lower_snake_case;
+use syntax::ast::HasVisibility;
+use syntax::ast::{self, AstNode, HasName};
+
+use crate::{
+ utils::{add_method_to_adt, find_struct_impl},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: generate_enum_try_into_method
+//
+// Generate a `try_into_` method for this enum variant.
+//
+// ```
+// enum Value {
+// Number(i32),
+// Text(String)$0,
+// }
+// ```
+// ->
+// ```
+// enum Value {
+// Number(i32),
+// Text(String),
+// }
+//
+// impl Value {
+// fn try_into_text(self) -> Result<String, Self> {
+// if let Self::Text(v) = self {
+// Ok(v)
+// } else {
+// Err(self)
+// }
+// }
+// }
+// ```
+pub(crate) fn generate_enum_try_into_method(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ generate_enum_projection_method(
+ acc,
+ ctx,
+ "generate_enum_try_into_method",
+ "Generate a `try_into_` method for this enum variant",
+ ProjectionProps {
+ fn_name_prefix: "try_into",
+ self_param: "self",
+ return_prefix: "Result<",
+ return_suffix: ", Self>",
+ happy_case: "Ok",
+ sad_case: "Err(self)",
+ },
+ )
+}
+
+// Assist: generate_enum_as_method
+//
+// Generate an `as_` method for this enum variant.
+//
+// ```
+// enum Value {
+// Number(i32),
+// Text(String)$0,
+// }
+// ```
+// ->
+// ```
+// enum Value {
+// Number(i32),
+// Text(String),
+// }
+//
+// impl Value {
+// fn as_text(&self) -> Option<&String> {
+// if let Self::Text(v) = self {
+// Some(v)
+// } else {
+// None
+// }
+// }
+// }
+// ```
+pub(crate) fn generate_enum_as_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ generate_enum_projection_method(
+ acc,
+ ctx,
+ "generate_enum_as_method",
+ "Generate an `as_` method for this enum variant",
+ ProjectionProps {
+ fn_name_prefix: "as",
+ self_param: "&self",
+ return_prefix: "Option<&",
+ return_suffix: ">",
+ happy_case: "Some",
+ sad_case: "None",
+ },
+ )
+}
+
+struct ProjectionProps {
+ fn_name_prefix: &'static str,
+ self_param: &'static str,
+ return_prefix: &'static str,
+ return_suffix: &'static str,
+ happy_case: &'static str,
+ sad_case: &'static str,
+}
+
+fn generate_enum_projection_method(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ assist_id: &'static str,
+ assist_description: &str,
+ props: ProjectionProps,
+) -> Option<()> {
+ let variant = ctx.find_node_at_offset::<ast::Variant>()?;
+ let variant_name = variant.name()?;
+ let parent_enum = ast::Adt::Enum(variant.parent_enum());
+
+ let (pattern_suffix, field_type, bound_name) = match variant.kind() {
+ ast::StructKind::Record(record) => {
+ let (field,) = record.fields().collect_tuple()?;
+ let name = field.name()?.to_string();
+ let ty = field.ty()?;
+ let pattern_suffix = format!(" {{ {} }}", name);
+ (pattern_suffix, ty, name)
+ }
+ ast::StructKind::Tuple(tuple) => {
+ let (field,) = tuple.fields().collect_tuple()?;
+ let ty = field.ty()?;
+ ("(v)".to_owned(), ty, "v".to_owned())
+ }
+ ast::StructKind::Unit => return None,
+ };
+
+ let fn_name =
+ format!("{}_{}", props.fn_name_prefix, &to_lower_snake_case(&variant_name.text()));
+
+ // Return early if we've found an existing new fn
+ let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?;
+
+ let target = variant.syntax().text_range();
+ acc.add_group(
+ &GroupLabel("Generate an `is_`,`as_`, or `try_into_` for this enum variant".to_owned()),
+ AssistId(assist_id, AssistKind::Generate),
+ assist_description,
+ target,
+ |builder| {
+ let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let method = format!(
+ " {0}fn {1}({2}) -> {3}{4}{5} {{
+ if let Self::{6}{7} = self {{
+ {8}({9})
+ }} else {{
+ {10}
+ }}
+ }}",
+ vis,
+ fn_name,
+ props.self_param,
+ props.return_prefix,
+ field_type.syntax(),
+ props.return_suffix,
+ variant_name,
+ pattern_suffix,
+ props.happy_case,
+ bound_name,
+ props.sad_case,
+ );
+
+ add_method_to_adt(builder, &parent_enum, impl_def, &method);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_enum_try_into_tuple_variant() {
+ check_assist(
+ generate_enum_try_into_method,
+ r#"
+enum Value {
+ Number(i32),
+ Text(String)$0,
+}"#,
+ r#"enum Value {
+ Number(i32),
+ Text(String),
+}
+
+impl Value {
+ fn try_into_text(self) -> Result<String, Self> {
+ if let Self::Text(v) = self {
+ Ok(v)
+ } else {
+ Err(self)
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_try_into_already_implemented() {
+ check_assist_not_applicable(
+ generate_enum_try_into_method,
+ r#"enum Value {
+ Number(i32),
+ Text(String)$0,
+}
+
+impl Value {
+ fn try_into_text(self) -> Result<String, Self> {
+ if let Self::Text(v) = self {
+ Ok(v)
+ } else {
+ Err(self)
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_try_into_unit_variant() {
+ check_assist_not_applicable(
+ generate_enum_try_into_method,
+ r#"enum Value {
+ Number(i32),
+ Text(String),
+ Unit$0,
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_try_into_record_with_multiple_fields() {
+ check_assist_not_applicable(
+ generate_enum_try_into_method,
+ r#"enum Value {
+ Number(i32),
+ Text(String),
+ Both { first: i32, second: String }$0,
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_try_into_tuple_with_multiple_fields() {
+ check_assist_not_applicable(
+ generate_enum_try_into_method,
+ r#"enum Value {
+ Number(i32),
+ Text(String, String)$0,
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_try_into_record_variant() {
+ check_assist(
+ generate_enum_try_into_method,
+ r#"enum Value {
+ Number(i32),
+ Text { text: String }$0,
+}"#,
+ r#"enum Value {
+ Number(i32),
+ Text { text: String },
+}
+
+impl Value {
+ fn try_into_text(self) -> Result<String, Self> {
+ if let Self::Text { text } = self {
+ Ok(text)
+ } else {
+ Err(self)
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_as_tuple_variant() {
+ check_assist(
+ generate_enum_as_method,
+ r#"
+enum Value {
+ Number(i32),
+ Text(String)$0,
+}"#,
+ r#"enum Value {
+ Number(i32),
+ Text(String),
+}
+
+impl Value {
+ fn as_text(&self) -> Option<&String> {
+ if let Self::Text(v) = self {
+ Some(v)
+ } else {
+ None
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_as_record_variant() {
+ check_assist(
+ generate_enum_as_method,
+ r#"enum Value {
+ Number(i32),
+ Text { text: String }$0,
+}"#,
+ r#"enum Value {
+ Number(i32),
+ Text { text: String },
+}
+
+impl Value {
+ fn as_text(&self) -> Option<&String> {
+ if let Self::Text { text } = self {
+ Some(text)
+ } else {
+ None
+ }
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
new file mode 100644
index 000000000..4461fbd5a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
@@ -0,0 +1,227 @@
+use hir::{HasSource, InFile};
+use ide_db::assists::{AssistId, AssistKind};
+use syntax::{
+ ast::{self, edit::IndentLevel},
+ AstNode, TextSize,
+};
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: generate_enum_variant
+//
+// Adds a variant to an enum.
+//
+// ```
+// enum Countries {
+// Ghana,
+// }
+//
+// fn main() {
+// let country = Countries::Lesotho$0;
+// }
+// ```
+// ->
+// ```
+// enum Countries {
+// Ghana,
+// Lesotho,
+// }
+//
+// fn main() {
+// let country = Countries::Lesotho;
+// }
+// ```
+pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let path_expr: ast::PathExpr = ctx.find_node_at_offset()?;
+ let path = path_expr.path()?;
+
+ if ctx.sema.resolve_path(&path).is_some() {
+ // No need to generate anything if the path resolves
+ return None;
+ }
+
+ let name_ref = path.segment()?.name_ref()?;
+ if name_ref.text().starts_with(char::is_lowercase) {
+ // Don't suggest generating variant if the name starts with a lowercase letter
+ return None;
+ }
+
+ if let Some(hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Enum(e)))) =
+ ctx.sema.resolve_path(&path.qualifier()?)
+ {
+ let target = path.syntax().text_range();
+ return add_variant_to_accumulator(acc, ctx, target, e, &name_ref);
+ }
+
+ None
+}
+
+fn add_variant_to_accumulator(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ target: syntax::TextRange,
+ adt: hir::Enum,
+ name_ref: &ast::NameRef,
+) -> Option<()> {
+ let db = ctx.db();
+ let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
+ let enum_indent = IndentLevel::from_node(&enum_node.syntax());
+
+ let variant_list = enum_node.variant_list()?;
+ let offset = variant_list.syntax().text_range().end() - TextSize::of('}');
+ let empty_enum = variant_list.variants().next().is_none();
+
+ acc.add(
+ AssistId("generate_enum_variant", AssistKind::Generate),
+ "Generate variant",
+ target,
+ |builder| {
+ builder.edit_file(file_id.original_file(db));
+ let text = format!(
+ "{maybe_newline}{indent_1}{name},\n{enum_indent}",
+ maybe_newline = if empty_enum { "\n" } else { "" },
+ indent_1 = IndentLevel(1),
+ name = name_ref,
+ enum_indent = enum_indent
+ );
+ builder.insert(offset, text)
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn generate_basic_enum_variant_in_empty_enum() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::Bar$0
+}
+",
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::Bar
+}
+",
+ )
+ }
+
+ #[test]
+ fn generate_basic_enum_variant_in_non_empty_enum() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::Baz$0
+}
+",
+ r"
+enum Foo {
+ Bar,
+ Baz,
+}
+fn main() {
+ Foo::Baz
+}
+",
+ )
+ }
+
+ #[test]
+ fn generate_basic_enum_variant_in_different_file() {
+ check_assist(
+ generate_enum_variant,
+ r"
+//- /main.rs
+mod foo;
+use foo::Foo;
+
+fn main() {
+ Foo::Baz$0
+}
+
+//- /foo.rs
+enum Foo {
+ Bar,
+}
+",
+ r"
+enum Foo {
+ Bar,
+ Baz,
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_existing_variant() {
+ check_assist_not_applicable(
+ generate_enum_variant,
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::Bar$0
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_lowercase() {
+ check_assist_not_applicable(
+ generate_enum_variant,
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::new$0
+}
+",
+ )
+ }
+
+ #[test]
+ fn indentation_level_is_correct() {
+ check_assist(
+ generate_enum_variant,
+ r"
+mod m {
+ enum Foo {
+ Bar,
+ }
+}
+fn main() {
+ m::Foo::Baz$0
+}
+",
+ r"
+mod m {
+ enum Foo {
+ Bar,
+ Baz,
+ }
+}
+fn main() {
+ m::Foo::Baz
+}
+",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
new file mode 100644
index 000000000..507ea012b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
@@ -0,0 +1,310 @@
+use ide_db::{famous_defs::FamousDefs, RootDatabase};
+use syntax::ast::{self, AstNode, HasName};
+
+use crate::{utils::generate_trait_impl_text, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: generate_from_impl_for_enum
+//
+// Adds a From impl for this enum variant with one tuple field.
+//
+// ```
+// enum A { $0One(u32) }
+// ```
+// ->
+// ```
+// enum A { One(u32) }
+//
+// impl From<u32> for A {
+// fn from(v: u32) -> Self {
+// Self::One(v)
+// }
+// }
+// ```
+pub(crate) fn generate_from_impl_for_enum(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let variant = ctx.find_node_at_offset::<ast::Variant>()?;
+ let variant_name = variant.name()?;
+ let enum_ = ast::Adt::Enum(variant.parent_enum());
+ let (field_name, field_type) = match variant.kind() {
+ ast::StructKind::Tuple(field_list) => {
+ if field_list.fields().count() != 1 {
+ return None;
+ }
+ (None, field_list.fields().next()?.ty()?)
+ }
+ ast::StructKind::Record(field_list) => {
+ if field_list.fields().count() != 1 {
+ return None;
+ }
+ let field = field_list.fields().next()?;
+ (Some(field.name()?), field.ty()?)
+ }
+ ast::StructKind::Unit => return None,
+ };
+
+ if existing_from_impl(&ctx.sema, &variant).is_some() {
+ cov_mark::hit!(test_add_from_impl_already_exists);
+ return None;
+ }
+
+ let target = variant.syntax().text_range();
+ acc.add(
+ AssistId("generate_from_impl_for_enum", AssistKind::Generate),
+ "Generate `From` impl for this enum variant",
+ target,
+ |edit| {
+ let start_offset = variant.parent_enum().syntax().text_range().end();
+ let from_trait = format!("From<{}>", field_type.syntax());
+ let impl_code = if let Some(name) = field_name {
+ format!(
+ r#" fn from({0}: {1}) -> Self {{
+ Self::{2} {{ {0} }}
+ }}"#,
+ name.text(),
+ field_type.syntax(),
+ variant_name,
+ )
+ } else {
+ format!(
+ r#" fn from(v: {}) -> Self {{
+ Self::{}(v)
+ }}"#,
+ field_type.syntax(),
+ variant_name,
+ )
+ };
+ let from_impl = generate_trait_impl_text(&enum_, &from_trait, &impl_code);
+ edit.insert(start_offset, from_impl);
+ },
+ )
+}
+
+fn existing_from_impl(
+ sema: &'_ hir::Semantics<'_, RootDatabase>,
+ variant: &ast::Variant,
+) -> Option<()> {
+ let variant = sema.to_def(variant)?;
+ let enum_ = variant.parent_enum(sema.db);
+ let krate = enum_.module(sema.db).krate();
+
+ let from_trait = FamousDefs(sema, krate).core_convert_From()?;
+
+ let enum_type = enum_.ty(sema.db);
+
+ let wrapped_type = variant.fields(sema.db).get(0)?.ty(sema.db);
+
+ if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) {
+ Some(())
+ } else {
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_from_impl_for_enum() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One(u32) }
+"#,
+ r#"
+enum A { One(u32) }
+
+impl From<u32> for A {
+ fn from(v: u32) -> Self {
+ Self::One(v)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_from_impl_for_enum_complicated_path() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One(foo::bar::baz::Boo) }
+"#,
+ r#"
+enum A { One(foo::bar::baz::Boo) }
+
+impl From<foo::bar::baz::Boo> for A {
+ fn from(v: foo::bar::baz::Boo) -> Self {
+ Self::One(v)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_no_element() {
+ check_assist_not_applicable(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_more_than_one_element_in_tuple() {
+ check_assist_not_applicable(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One(u32, String) }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_struct_variant() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One { x: u32 } }
+"#,
+ r#"
+enum A { One { x: u32 } }
+
+impl From<u32> for A {
+ fn from(x: u32) -> Self {
+ Self::One { x }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_already_exists() {
+ cov_mark::check!(test_add_from_impl_already_exists);
+ check_assist_not_applicable(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One(u32), }
+
+impl From<u32> for A {
+ fn from(v: u32) -> Self {
+ Self::One(v)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_different_variant_impl_exists() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One(u32), Two(String), }
+
+impl From<String> for A {
+ fn from(v: String) -> Self {
+ A::Two(v)
+ }
+}
+
+pub trait From<T> {
+ fn from(T) -> Self;
+}
+"#,
+ r#"
+enum A { One(u32), Two(String), }
+
+impl From<u32> for A {
+ fn from(v: u32) -> Self {
+ Self::One(v)
+ }
+}
+
+impl From<String> for A {
+ fn from(v: String) -> Self {
+ A::Two(v)
+ }
+}
+
+pub trait From<T> {
+ fn from(T) -> Self;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_static_str() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One(&'static str) }
+"#,
+ r#"
+enum A { One(&'static str) }
+
+impl From<&'static str> for A {
+ fn from(v: &'static str) -> Self {
+ Self::One(v)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_generic_enum() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum Generic<T, U: Clone> { $0One(T), Two(U) }
+"#,
+ r#"
+enum Generic<T, U: Clone> { One(T), Two(U) }
+
+impl<T, U: Clone> From<T> for Generic<T, U> {
+ fn from(v: T) -> Self {
+ Self::One(v)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_with_lifetime() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum Generic<'a> { $0One(&'a i32) }
+"#,
+ r#"
+enum Generic<'a> { One(&'a i32) }
+
+impl<'a> From<&'a i32> for Generic<'a> {
+ fn from(v: &'a i32) -> Self {
+ Self::One(v)
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
new file mode 100644
index 000000000..d564a0540
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
@@ -0,0 +1,1787 @@
+use hir::{HasSource, HirDisplay, Module, Semantics, TypeInfo};
+use ide_db::{
+ base_db::FileId,
+ defs::{Definition, NameRefClass},
+ famous_defs::FamousDefs,
+ FxHashMap, FxHashSet, RootDatabase, SnippetCap,
+};
+use stdx::to_lower_snake_case;
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make, AstNode, CallExpr, HasArgList, HasModuleItem,
+ },
+ SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+
+use crate::{
+ utils::convert_reference_type,
+ utils::{find_struct_impl, render_snippet, Cursor},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: generate_function
+//
+// Adds a stub function with a signature matching the function under the cursor.
+//
+// ```
+// struct Baz;
+// fn baz() -> Baz { Baz }
+// fn foo() {
+// bar$0("", baz());
+// }
+//
+// ```
+// ->
+// ```
+// struct Baz;
+// fn baz() -> Baz { Baz }
+// fn foo() {
+// bar("", baz());
+// }
+//
+// fn bar(arg: &str, baz: Baz) ${0:-> _} {
+// todo!()
+// }
+//
+// ```
+pub(crate) fn generate_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ gen_fn(acc, ctx).or_else(|| gen_method(acc, ctx))
+}
+
+fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let path_expr: ast::PathExpr = ctx.find_node_at_offset()?;
+ let call = path_expr.syntax().parent().and_then(ast::CallExpr::cast)?;
+ let path = path_expr.path()?;
+ let name_ref = path.segment()?.name_ref()?;
+ if ctx.sema.resolve_path(&path).is_some() {
+ // The function call already resolves, no need to add a function
+ return None;
+ }
+
+ let fn_name = &*name_ref.text();
+ let target_module;
+ let mut adt_name = None;
+
+ let (target, file, insert_offset) = match path.qualifier() {
+ Some(qualifier) => match ctx.sema.resolve_path(&qualifier) {
+ Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => {
+ target_module = Some(module);
+ get_fn_target(ctx, &target_module, call.clone())?
+ }
+ Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) => {
+ if let hir::Adt::Enum(_) = adt {
+ // Don't suggest generating function if the name starts with an uppercase letter
+ if name_ref.text().starts_with(char::is_uppercase) {
+ return None;
+ }
+ }
+
+ let current_module = ctx.sema.scope(call.syntax())?.module();
+ let module = adt.module(ctx.sema.db);
+ target_module = if current_module == module { None } else { Some(module) };
+ if current_module.krate() != module.krate() {
+ return None;
+ }
+ let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?;
+ let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?;
+ adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
+ (target, file, insert_offset)
+ }
+ _ => {
+ return None;
+ }
+ },
+ _ => {
+ target_module = None;
+ get_fn_target(ctx, &target_module, call.clone())?
+ }
+ };
+ let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?;
+ let text_range = call.syntax().text_range();
+ let label = format!("Generate {} function", function_builder.fn_name);
+ add_func_to_accumulator(
+ acc,
+ ctx,
+ text_range,
+ function_builder,
+ insert_offset,
+ file,
+ adt_name,
+ label,
+ )
+}
+
+fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
+ if ctx.sema.resolve_method_call(&call).is_some() {
+ return None;
+ }
+
+ let fn_name = call.name_ref()?;
+ let adt = ctx.sema.type_of_expr(&call.receiver()?)?.original().strip_references().as_adt()?;
+
+ let current_module = ctx.sema.scope(call.syntax())?.module();
+ let target_module = adt.module(ctx.sema.db);
+
+ if current_module.krate() != target_module.krate() {
+ return None;
+ }
+ let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?;
+ let (target, insert_offset) = get_method_target(ctx, &target_module, &impl_)?;
+ let function_builder =
+ FunctionBuilder::from_method_call(ctx, &call, &fn_name, target_module, target)?;
+ let text_range = call.syntax().text_range();
+ let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
+ let label = format!("Generate {} method", function_builder.fn_name);
+ add_func_to_accumulator(
+ acc,
+ ctx,
+ text_range,
+ function_builder,
+ insert_offset,
+ file,
+ adt_name,
+ label,
+ )
+}
+
+fn add_func_to_accumulator(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ text_range: TextRange,
+ function_builder: FunctionBuilder,
+ insert_offset: TextSize,
+ file: FileId,
+ adt_name: Option<hir::Name>,
+ label: String,
+) -> Option<()> {
+ acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |builder| {
+ let function_template = function_builder.render();
+ let mut func = function_template.to_string(ctx.config.snippet_cap);
+ if let Some(name) = adt_name {
+ func = format!("\nimpl {} {{\n{}\n}}", name, func);
+ }
+ builder.edit_file(file);
+ match ctx.config.snippet_cap {
+ Some(cap) => builder.insert_snippet(cap, insert_offset, func),
+ None => builder.insert(insert_offset, func),
+ }
+ })
+}
+
+fn get_adt_source(
+ ctx: &AssistContext<'_>,
+ adt: &hir::Adt,
+ fn_name: &str,
+) -> Option<(Option<ast::Impl>, FileId)> {
+ let range = adt.source(ctx.sema.db)?.syntax().original_file_range(ctx.sema.db);
+ let file = ctx.sema.parse(range.file_id);
+ let adt_source =
+ ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
+ find_struct_impl(ctx, &adt_source, fn_name).map(|impl_| (impl_, range.file_id))
+}
+
+struct FunctionTemplate {
+ leading_ws: String,
+ fn_def: ast::Fn,
+ ret_type: Option<ast::RetType>,
+ should_focus_return_type: bool,
+ trailing_ws: String,
+ tail_expr: ast::Expr,
+}
+
+impl FunctionTemplate {
+ fn to_string(&self, cap: Option<SnippetCap>) -> String {
+ let f = match cap {
+ Some(cap) => {
+ let cursor = if self.should_focus_return_type {
+ // Focus the return type if there is one
+ match self.ret_type {
+ Some(ref ret_type) => ret_type.syntax(),
+ None => self.tail_expr.syntax(),
+ }
+ } else {
+ self.tail_expr.syntax()
+ };
+ render_snippet(cap, self.fn_def.syntax(), Cursor::Replace(cursor))
+ }
+ None => self.fn_def.to_string(),
+ };
+
+ format!("{}{}{}", self.leading_ws, f, self.trailing_ws)
+ }
+}
+
+struct FunctionBuilder {
+ target: GeneratedFunctionTarget,
+ fn_name: ast::Name,
+ type_params: Option<ast::GenericParamList>,
+ params: ast::ParamList,
+ ret_type: Option<ast::RetType>,
+ should_focus_return_type: bool,
+ needs_pub: bool,
+ is_async: bool,
+}
+
+impl FunctionBuilder {
+ /// Prepares a generated function that matches `call`.
+ /// The function is generated in `target_module` or next to `call`
+ fn from_call(
+ ctx: &AssistContext<'_>,
+ call: &ast::CallExpr,
+ fn_name: &str,
+ target_module: Option<hir::Module>,
+ target: GeneratedFunctionTarget,
+ ) -> Option<Self> {
+ let needs_pub = target_module.is_some();
+ let target_module =
+ target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?;
+ let fn_name = make::name(fn_name);
+ let (type_params, params) =
+ fn_args(ctx, target_module, ast::CallableExpr::Call(call.clone()))?;
+
+ let await_expr = call.syntax().parent().and_then(ast::AwaitExpr::cast);
+ let is_async = await_expr.is_some();
+
+ let (ret_type, should_focus_return_type) =
+ make_return_type(ctx, &ast::Expr::CallExpr(call.clone()), target_module);
+
+ Some(Self {
+ target,
+ fn_name,
+ type_params,
+ params,
+ ret_type,
+ should_focus_return_type,
+ needs_pub,
+ is_async,
+ })
+ }
+
+ fn from_method_call(
+ ctx: &AssistContext<'_>,
+ call: &ast::MethodCallExpr,
+ name: &ast::NameRef,
+ target_module: Module,
+ target: GeneratedFunctionTarget,
+ ) -> Option<Self> {
+ let needs_pub =
+ !module_is_descendant(&ctx.sema.scope(call.syntax())?.module(), &target_module, ctx);
+ let fn_name = make::name(&name.text());
+ let (type_params, params) =
+ fn_args(ctx, target_module, ast::CallableExpr::MethodCall(call.clone()))?;
+
+ let await_expr = call.syntax().parent().and_then(ast::AwaitExpr::cast);
+ let is_async = await_expr.is_some();
+
+ let (ret_type, should_focus_return_type) =
+ make_return_type(ctx, &ast::Expr::MethodCallExpr(call.clone()), target_module);
+
+ Some(Self {
+ target,
+ fn_name,
+ type_params,
+ params,
+ ret_type,
+ should_focus_return_type,
+ needs_pub,
+ is_async,
+ })
+ }
+
+ fn render(self) -> FunctionTemplate {
+ let placeholder_expr = make::ext::expr_todo();
+ let fn_body = make::block_expr(vec![], Some(placeholder_expr));
+ let visibility = if self.needs_pub { Some(make::visibility_pub_crate()) } else { None };
+ let mut fn_def = make::fn_(
+ visibility,
+ self.fn_name,
+ self.type_params,
+ self.params,
+ fn_body,
+ self.ret_type,
+ self.is_async,
+ );
+ let leading_ws;
+ let trailing_ws;
+
+ match self.target {
+ GeneratedFunctionTarget::BehindItem(it) => {
+ let indent = IndentLevel::from_node(&it);
+ leading_ws = format!("\n\n{}", indent);
+ fn_def = fn_def.indent(indent);
+ trailing_ws = String::new();
+ }
+ GeneratedFunctionTarget::InEmptyItemList(it) => {
+ let indent = IndentLevel::from_node(&it);
+ leading_ws = format!("\n{}", indent + 1);
+ fn_def = fn_def.indent(indent + 1);
+ trailing_ws = format!("\n{}", indent);
+ }
+ };
+
+ FunctionTemplate {
+ leading_ws,
+ ret_type: fn_def.ret_type(),
+ // PANIC: we guarantee we always create a function body with a tail expr
+ tail_expr: fn_def.body().unwrap().tail_expr().unwrap(),
+ should_focus_return_type: self.should_focus_return_type,
+ fn_def,
+ trailing_ws,
+ }
+ }
+}
+
+/// Makes an optional return type along with whether the return type should be focused by the cursor.
+/// If we cannot infer what the return type should be, we create a placeholder type.
+///
+/// The rule for whether we focus a return type or not (and thus focus the function body),
+/// is rather simple:
+/// * If we could *not* infer what the return type should be, focus it (so the user can fill-in
+/// the correct return type).
+/// * If we could infer the return type, don't focus it (and thus focus the function body) so the
+/// user can change the `todo!` function body.
+fn make_return_type(
+ ctx: &AssistContext<'_>,
+ call: &ast::Expr,
+ target_module: Module,
+) -> (Option<ast::RetType>, bool) {
+ let (ret_ty, should_focus_return_type) = {
+ match ctx.sema.type_of_expr(call).map(TypeInfo::original) {
+ Some(ty) if ty.is_unknown() => (Some(make::ty_placeholder()), true),
+ None => (Some(make::ty_placeholder()), true),
+ Some(ty) if ty.is_unit() => (None, false),
+ Some(ty) => {
+ let rendered = ty.display_source_code(ctx.db(), target_module.into());
+ match rendered {
+ Ok(rendered) => (Some(make::ty(&rendered)), false),
+ Err(_) => (Some(make::ty_placeholder()), true),
+ }
+ }
+ }
+ };
+ let ret_type = ret_ty.map(make::ret_type);
+ (ret_type, should_focus_return_type)
+}
+
+fn get_fn_target(
+ ctx: &AssistContext<'_>,
+ target_module: &Option<Module>,
+ call: CallExpr,
+) -> Option<(GeneratedFunctionTarget, FileId, TextSize)> {
+ let mut file = ctx.file_id();
+ let target = match target_module {
+ Some(target_module) => {
+ let module_source = target_module.definition_source(ctx.db());
+ let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?;
+ file = in_file;
+ target
+ }
+ None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?,
+ };
+ Some((target.clone(), file, get_insert_offset(&target)))
+}
+
+fn get_method_target(
+ ctx: &AssistContext<'_>,
+ target_module: &Module,
+ impl_: &Option<ast::Impl>,
+) -> Option<(GeneratedFunctionTarget, TextSize)> {
+ let target = match impl_ {
+ Some(impl_) => next_space_for_fn_in_impl(impl_)?,
+ None => {
+ next_space_for_fn_in_module(ctx.sema.db, &target_module.definition_source(ctx.sema.db))?
+ .1
+ }
+ };
+ Some((target.clone(), get_insert_offset(&target)))
+}
+
+fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
+ match &target {
+ GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
+ GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'),
+ }
+}
+
+#[derive(Clone)]
+enum GeneratedFunctionTarget {
+ BehindItem(SyntaxNode),
+ InEmptyItemList(SyntaxNode),
+}
+
+impl GeneratedFunctionTarget {
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GeneratedFunctionTarget::BehindItem(it) => it,
+ GeneratedFunctionTarget::InEmptyItemList(it) => it,
+ }
+ }
+}
+
+/// Computes the type variables and arguments required for the generated function
+fn fn_args(
+ ctx: &AssistContext<'_>,
+ target_module: hir::Module,
+ call: ast::CallableExpr,
+) -> Option<(Option<ast::GenericParamList>, ast::ParamList)> {
+ let mut arg_names = Vec::new();
+ let mut arg_types = Vec::new();
+ for arg in call.arg_list()?.args() {
+ arg_names.push(fn_arg_name(&ctx.sema, &arg));
+ arg_types.push(fn_arg_type(ctx, target_module, &arg));
+ }
+ deduplicate_arg_names(&mut arg_names);
+ let params = arg_names.into_iter().zip(arg_types).map(|(name, ty)| {
+ make::param(make::ext::simple_ident_pat(make::name(&name)).into(), make::ty(&ty))
+ });
+
+ Some((
+ None,
+ make::param_list(
+ match call {
+ ast::CallableExpr::Call(_) => None,
+ ast::CallableExpr::MethodCall(_) => Some(make::self_param()),
+ },
+ params,
+ ),
+ ))
+}
+
+/// Makes duplicate argument names unique by appending incrementing numbers.
+///
+/// ```
+/// let mut names: Vec<String> =
+/// vec!["foo".into(), "foo".into(), "bar".into(), "baz".into(), "bar".into()];
+/// deduplicate_arg_names(&mut names);
+/// let expected: Vec<String> =
+/// vec!["foo_1".into(), "foo_2".into(), "bar_1".into(), "baz".into(), "bar_2".into()];
+/// assert_eq!(names, expected);
+/// ```
+fn deduplicate_arg_names(arg_names: &mut Vec<String>) {
+ let mut arg_name_counts = FxHashMap::default();
+ for name in arg_names.iter() {
+ *arg_name_counts.entry(name).or_insert(0) += 1;
+ }
+ let duplicate_arg_names: FxHashSet<String> = arg_name_counts
+ .into_iter()
+ .filter(|(_, count)| *count >= 2)
+ .map(|(name, _)| name.clone())
+ .collect();
+
+ let mut counter_per_name = FxHashMap::default();
+ for arg_name in arg_names.iter_mut() {
+ if duplicate_arg_names.contains(arg_name) {
+ let counter = counter_per_name.entry(arg_name.clone()).or_insert(1);
+ arg_name.push('_');
+ arg_name.push_str(&counter.to_string());
+ *counter += 1;
+ }
+ }
+}
+
+fn fn_arg_name(sema: &Semantics<'_, RootDatabase>, arg_expr: &ast::Expr) -> String {
+ let name = (|| match arg_expr {
+ ast::Expr::CastExpr(cast_expr) => Some(fn_arg_name(sema, &cast_expr.expr()?)),
+ expr => {
+ let name_ref = expr
+ .syntax()
+ .descendants()
+ .filter_map(ast::NameRef::cast)
+ .filter(|name| name.ident_token().is_some())
+ .last()?;
+ if let Some(NameRefClass::Definition(Definition::Const(_) | Definition::Static(_))) =
+ NameRefClass::classify(sema, &name_ref)
+ {
+ return Some(name_ref.to_string().to_lowercase());
+ };
+ Some(to_lower_snake_case(&name_ref.to_string()))
+ }
+ })();
+ match name {
+ Some(mut name) if name.starts_with(|c: char| c.is_ascii_digit()) => {
+ name.insert_str(0, "arg");
+ name
+ }
+ Some(name) => name,
+ None => "arg".to_string(),
+ }
+}
+
+fn fn_arg_type(ctx: &AssistContext<'_>, target_module: hir::Module, fn_arg: &ast::Expr) -> String {
+ fn maybe_displayed_type(
+ ctx: &AssistContext<'_>,
+ target_module: hir::Module,
+ fn_arg: &ast::Expr,
+ ) -> Option<String> {
+ let ty = ctx.sema.type_of_expr(fn_arg)?.adjusted();
+ if ty.is_unknown() {
+ return None;
+ }
+
+ if ty.is_reference() || ty.is_mutable_reference() {
+ let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax())?.krate());
+ convert_reference_type(ty.strip_references(), ctx.db(), famous_defs)
+ .map(|conversion| conversion.convert_type(ctx.db()))
+ .or_else(|| ty.display_source_code(ctx.db(), target_module.into()).ok())
+ } else {
+ ty.display_source_code(ctx.db(), target_module.into()).ok()
+ }
+ }
+
+ maybe_displayed_type(ctx, target_module, fn_arg).unwrap_or_else(|| String::from("_"))
+}
+
+/// Returns the position inside the current mod or file
+/// directly after the current block
+/// We want to write the generated function directly after
+/// fns, impls or macro calls, but inside mods
+fn next_space_for_fn_after_call_site(expr: ast::CallableExpr) -> Option<GeneratedFunctionTarget> {
+ let mut ancestors = expr.syntax().ancestors().peekable();
+ let mut last_ancestor: Option<SyntaxNode> = None;
+ while let Some(next_ancestor) = ancestors.next() {
+ match next_ancestor.kind() {
+ SyntaxKind::SOURCE_FILE => {
+ break;
+ }
+ SyntaxKind::ITEM_LIST => {
+ if ancestors.peek().map(|a| a.kind()) == Some(SyntaxKind::MODULE) {
+ break;
+ }
+ }
+ _ => {}
+ }
+ last_ancestor = Some(next_ancestor);
+ }
+ last_ancestor.map(GeneratedFunctionTarget::BehindItem)
+}
+
+fn next_space_for_fn_in_module(
+ db: &dyn hir::db::AstDatabase,
+ module_source: &hir::InFile<hir::ModuleSource>,
+) -> Option<(FileId, GeneratedFunctionTarget)> {
+ let file = module_source.file_id.original_file(db);
+ let assist_item = match &module_source.value {
+ hir::ModuleSource::SourceFile(it) => match it.items().last() {
+ Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()),
+ None => GeneratedFunctionTarget::BehindItem(it.syntax().clone()),
+ },
+ hir::ModuleSource::Module(it) => match it.item_list().and_then(|it| it.items().last()) {
+ Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()),
+ None => GeneratedFunctionTarget::InEmptyItemList(it.item_list()?.syntax().clone()),
+ },
+ hir::ModuleSource::BlockExpr(it) => {
+ if let Some(last_item) =
+ it.statements().take_while(|stmt| matches!(stmt, ast::Stmt::Item(_))).last()
+ {
+ GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())
+ } else {
+ GeneratedFunctionTarget::InEmptyItemList(it.syntax().clone())
+ }
+ }
+ };
+ Some((file, assist_item))
+}
+
+fn next_space_for_fn_in_impl(impl_: &ast::Impl) -> Option<GeneratedFunctionTarget> {
+ if let Some(last_item) = impl_.assoc_item_list().and_then(|it| it.assoc_items().last()) {
+ Some(GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()))
+ } else {
+ Some(GeneratedFunctionTarget::InEmptyItemList(impl_.assoc_item_list()?.syntax().clone()))
+ }
+}
+
+fn module_is_descendant(module: &hir::Module, ans: &hir::Module, ctx: &AssistContext<'_>) -> bool {
+ if module == ans {
+ return true;
+ }
+ for c in ans.children(ctx.sema.db) {
+ if module_is_descendant(module, &c, ctx) {
+ return true;
+ }
+ }
+ false
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn add_function_with_no_args() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ bar$0();
+}
+",
+ r"
+fn foo() {
+ bar();
+}
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_from_method() {
+ // This ensures that the function is correctly generated
+ // in the next outer mod or file
+ check_assist(
+ generate_function,
+ r"
+impl Foo {
+ fn foo() {
+ bar$0();
+ }
+}
+",
+ r"
+impl Foo {
+ fn foo() {
+ bar();
+ }
+}
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_directly_after_current_block() {
+ // The new fn should not be created at the end of the file or module
+ check_assist(
+ generate_function,
+ r"
+fn foo1() {
+ bar$0();
+}
+
+fn foo2() {}
+",
+ r"
+fn foo1() {
+ bar();
+}
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+
+fn foo2() {}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_no_args_in_same_module() {
+ check_assist(
+ generate_function,
+ r"
+mod baz {
+ fn foo() {
+ bar$0();
+ }
+}
+",
+ r"
+mod baz {
+ fn foo() {
+ bar();
+ }
+
+ fn bar() ${0:-> _} {
+ todo!()
+ }
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_upper_camel_case_arg() {
+ check_assist(
+ generate_function,
+ r"
+struct BazBaz;
+fn foo() {
+ bar$0(BazBaz);
+}
+",
+ r"
+struct BazBaz;
+fn foo() {
+ bar(BazBaz);
+}
+
+fn bar(baz_baz: BazBaz) ${0:-> _} {
+ todo!()
+}
+",
+ );
+ }
+
+ #[test]
+ fn add_function_with_upper_camel_case_arg_as_cast() {
+ check_assist(
+ generate_function,
+ r"
+struct BazBaz;
+fn foo() {
+ bar$0(&BazBaz as *const BazBaz);
+}
+",
+ r"
+struct BazBaz;
+fn foo() {
+ bar(&BazBaz as *const BazBaz);
+}
+
+fn bar(baz_baz: *const BazBaz) ${0:-> _} {
+ todo!()
+}
+",
+ );
+ }
+
+ #[test]
+ fn add_function_with_function_call_arg() {
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+fn baz() -> Baz { todo!() }
+fn foo() {
+ bar$0(baz());
+}
+",
+ r"
+struct Baz;
+fn baz() -> Baz { todo!() }
+fn foo() {
+ bar(baz());
+}
+
+fn bar(baz: Baz) ${0:-> _} {
+ todo!()
+}
+",
+ );
+ }
+
+ #[test]
+ fn add_function_with_method_call_arg() {
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+impl Baz {
+ fn foo(&self) -> Baz {
+ ba$0r(self.baz())
+ }
+ fn baz(&self) -> Baz {
+ Baz
+ }
+}
+",
+ r"
+struct Baz;
+impl Baz {
+ fn foo(&self) -> Baz {
+ bar(self.baz())
+ }
+ fn baz(&self) -> Baz {
+ Baz
+ }
+}
+
+fn bar(baz: Baz) -> Baz {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_string_literal_arg() {
+ check_assist(
+ generate_function,
+ r#"
+fn foo() {
+ $0bar("bar")
+}
+"#,
+ r#"
+fn foo() {
+ bar("bar")
+}
+
+fn bar(arg: &str) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_function_with_char_literal_arg() {
+ check_assist(
+ generate_function,
+ r#"
+fn foo() {
+ $0bar('x')
+}
+"#,
+ r#"
+fn foo() {
+ bar('x')
+}
+
+fn bar(arg: char) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_function_with_int_literal_arg() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ $0bar(42)
+}
+",
+ r"
+fn foo() {
+ bar(42)
+}
+
+fn bar(arg: i32) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_cast_int_literal_arg() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ $0bar(42 as u8)
+}
+",
+ r"
+fn foo() {
+ bar(42 as u8)
+}
+
+fn bar(arg: u8) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn name_of_cast_variable_is_used() {
+ // Ensures that the name of the cast type isn't used
+ // in the generated function signature.
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ let x = 42;
+ bar$0(x as u8)
+}
+",
+ r"
+fn foo() {
+ let x = 42;
+ bar(x as u8)
+}
+
+fn bar(x: u8) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_variable_arg() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ let worble = ();
+ $0bar(worble)
+}
+",
+ r"
+fn foo() {
+ let worble = ();
+ bar(worble)
+}
+
+fn bar(worble: ()) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_impl_trait_arg() {
+ check_assist(
+ generate_function,
+ r#"
+//- minicore: sized
+trait Foo {}
+fn foo() -> impl Foo {
+ todo!()
+}
+fn baz() {
+ $0bar(foo())
+}
+"#,
+ r#"
+trait Foo {}
+fn foo() -> impl Foo {
+ todo!()
+}
+fn baz() {
+ bar(foo())
+}
+
+fn bar(foo: impl Foo) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn borrowed_arg() {
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+fn baz() -> Baz { todo!() }
+
+fn foo() {
+ bar$0(&baz())
+}
+",
+ r"
+struct Baz;
+fn baz() -> Baz { todo!() }
+
+fn foo() {
+ bar(&baz())
+}
+
+fn bar(baz: &Baz) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_qualified_path_arg() {
+ check_assist(
+ generate_function,
+ r"
+mod Baz {
+ pub struct Bof;
+ pub fn baz() -> Bof { Bof }
+}
+fn foo() {
+ $0bar(Baz::baz())
+}
+",
+ r"
+mod Baz {
+ pub struct Bof;
+ pub fn baz() -> Bof { Bof }
+}
+fn foo() {
+ bar(Baz::baz())
+}
+
+fn bar(baz: Baz::Bof) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_generic_arg() {
+ // FIXME: This is wrong, generated `bar` should include generic parameter.
+ check_assist(
+ generate_function,
+ r"
+fn foo<T>(t: T) {
+ $0bar(t)
+}
+",
+ r"
+fn foo<T>(t: T) {
+ bar(t)
+}
+
+fn bar(t: T) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_fn_arg() {
+ // FIXME: The argument in `bar` is wrong.
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+impl Baz {
+ fn new() -> Self { Baz }
+}
+fn foo() {
+ $0bar(Baz::new);
+}
+",
+ r"
+struct Baz;
+impl Baz {
+ fn new() -> Self { Baz }
+}
+fn foo() {
+ bar(Baz::new);
+}
+
+fn bar(new: fn) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_closure_arg() {
+ // FIXME: The argument in `bar` is wrong.
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ let closure = |x: i64| x - 1;
+ $0bar(closure)
+}
+",
+ r"
+fn foo() {
+ let closure = |x: i64| x - 1;
+ bar(closure)
+}
+
+fn bar(closure: _) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn unresolveable_types_default_to_placeholder() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ $0bar(baz)
+}
+",
+ r"
+fn foo() {
+ bar(baz)
+}
+
+fn bar(baz: _) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn arg_names_dont_overlap() {
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ $0bar(baz(), baz())
+}
+",
+ r"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar(baz(), baz())
+}
+
+fn bar(baz_1: Baz, baz_2: Baz) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn arg_name_counters_start_at_1_per_name() {
+ check_assist(
+ generate_function,
+ r#"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ $0bar(baz(), baz(), "foo", "bar")
+}
+"#,
+ r#"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar(baz(), baz(), "foo", "bar")
+}
+
+fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_function_in_module() {
+ check_assist(
+ generate_function,
+ r"
+mod bar {}
+
+fn foo() {
+ bar::my_fn$0()
+}
+",
+ r"
+mod bar {
+ pub(crate) fn my_fn() {
+ ${0:todo!()}
+ }
+}
+
+fn foo() {
+ bar::my_fn()
+}
+",
+ )
+ }
+
+ #[test]
+ fn qualified_path_uses_correct_scope() {
+ check_assist(
+ generate_function,
+ r#"
+mod foo {
+ pub struct Foo;
+}
+fn bar() {
+ use foo::Foo;
+ let foo = Foo;
+ baz$0(foo)
+}
+"#,
+ r#"
+mod foo {
+ pub struct Foo;
+}
+fn bar() {
+ use foo::Foo;
+ let foo = Foo;
+ baz(foo)
+}
+
+fn baz(foo: foo::Foo) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_function_in_module_containing_other_items() {
+ check_assist(
+ generate_function,
+ r"
+mod bar {
+ fn something_else() {}
+}
+
+fn foo() {
+ bar::my_fn$0()
+}
+",
+ r"
+mod bar {
+ fn something_else() {}
+
+ pub(crate) fn my_fn() {
+ ${0:todo!()}
+ }
+}
+
+fn foo() {
+ bar::my_fn()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_in_nested_module() {
+ check_assist(
+ generate_function,
+ r"
+mod bar {
+ mod baz {}
+}
+
+fn foo() {
+ bar::baz::my_fn$0()
+}
+",
+ r"
+mod bar {
+ mod baz {
+ pub(crate) fn my_fn() {
+ ${0:todo!()}
+ }
+ }
+}
+
+fn foo() {
+ bar::baz::my_fn()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_in_another_file() {
+ check_assist(
+ generate_function,
+ r"
+//- /main.rs
+mod foo;
+
+fn main() {
+ foo::bar$0()
+}
+//- /foo.rs
+",
+ r"
+
+
+pub(crate) fn bar() {
+ ${0:todo!()}
+}",
+ )
+ }
+
+ #[test]
+ fn add_function_with_return_type() {
+ check_assist(
+ generate_function,
+ r"
+fn main() {
+ let x: u32 = foo$0();
+}
+",
+ r"
+fn main() {
+ let x: u32 = foo();
+}
+
+fn foo() -> u32 {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_not_applicable_if_function_already_exists() {
+ check_assist_not_applicable(
+ generate_function,
+ r"
+fn foo() {
+ bar$0();
+}
+
+fn bar() {}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_not_applicable_if_unresolved_variable_in_call_is_selected() {
+ check_assist_not_applicable(
+ // bar is resolved, but baz isn't.
+ // The assist is only active if the cursor is on an unresolved path,
+ // but the assist should only be offered if the path is a function call.
+ generate_function,
+ r#"
+fn foo() {
+ bar(b$0az);
+}
+
+fn bar(baz: ()) {}
+"#,
+ )
+ }
+
+ #[test]
+ fn create_method_with_no_args() {
+ check_assist(
+ generate_function,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {
+ self.bar()$0;
+ }
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {
+ self.bar();
+ }
+
+ fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn create_function_with_async() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ $0bar(42).await();
+}
+",
+ r"
+fn foo() {
+ bar(42).await();
+}
+
+async fn bar(arg: i32) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn create_method() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {S.bar$0();}
+",
+ r"
+struct S;
+fn foo() {S.bar();}
+impl S {
+
+
+fn bar(&self) ${0:-> _} {
+ todo!()
+}
+}
+",
+ )
+ }
+
+ #[test]
+ fn create_method_within_an_impl() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {S.bar$0();}
+impl S {}
+
+",
+ r"
+struct S;
+fn foo() {S.bar();}
+impl S {
+ fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+}
+
+",
+ )
+ }
+
+ #[test]
+ fn create_method_from_different_module() {
+ check_assist(
+ generate_function,
+ r"
+mod s {
+ pub struct S;
+}
+fn foo() {s::S.bar$0();}
+",
+ r"
+mod s {
+ pub struct S;
+impl S {
+
+
+ pub(crate) fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+}
+}
+fn foo() {s::S.bar();}
+",
+ )
+ }
+
+ #[test]
+ fn create_method_from_descendant_module() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+mod s {
+ fn foo() {
+ super::S.bar$0();
+ }
+}
+
+",
+ r"
+struct S;
+mod s {
+ fn foo() {
+ super::S.bar();
+ }
+}
+impl S {
+
+
+fn bar(&self) ${0:-> _} {
+ todo!()
+}
+}
+
+",
+ )
+ }
+
+ #[test]
+ fn create_method_with_cursor_anywhere_on_call_expresion() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {$0S.bar();}
+",
+ r"
+struct S;
+fn foo() {S.bar();}
+impl S {
+
+
+fn bar(&self) ${0:-> _} {
+ todo!()
+}
+}
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {S::bar$0();}
+",
+ r"
+struct S;
+fn foo() {S::bar();}
+impl S {
+
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+}
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method_within_an_impl() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {S::bar$0();}
+impl S {}
+
+",
+ r"
+struct S;
+fn foo() {S::bar();}
+impl S {
+ fn bar() ${0:-> _} {
+ todo!()
+ }
+}
+
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method_from_different_module() {
+ check_assist(
+ generate_function,
+ r"
+mod s {
+ pub struct S;
+}
+fn foo() {s::S::bar$0();}
+",
+ r"
+mod s {
+ pub struct S;
+impl S {
+
+
+ pub(crate) fn bar() ${0:-> _} {
+ todo!()
+ }
+}
+}
+fn foo() {s::S::bar();}
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method_with_cursor_anywhere_on_call_expresion() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {$0S::bar();}
+",
+ r"
+struct S;
+fn foo() {S::bar();}
+impl S {
+
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+}
+",
+ )
+ }
+
+ #[test]
+ fn no_panic_on_invalid_global_path() {
+ check_assist(
+ generate_function,
+ r"
+fn main() {
+ ::foo$0();
+}
+",
+ r"
+fn main() {
+ ::foo();
+}
+
+fn foo() ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn handle_tuple_indexing() {
+ check_assist(
+ generate_function,
+ r"
+fn main() {
+ let a = ((),);
+ foo$0(a.0);
+}
+",
+ r"
+fn main() {
+ let a = ((),);
+ foo(a.0);
+}
+
+fn foo(a: ()) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_const_arg() {
+ check_assist(
+ generate_function,
+ r"
+const VALUE: usize = 0;
+fn main() {
+ foo$0(VALUE);
+}
+",
+ r"
+const VALUE: usize = 0;
+fn main() {
+ foo(VALUE);
+}
+
+fn foo(value: usize) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_static_arg() {
+ check_assist(
+ generate_function,
+ r"
+static VALUE: usize = 0;
+fn main() {
+ foo$0(VALUE);
+}
+",
+ r"
+static VALUE: usize = 0;
+fn main() {
+ foo(VALUE);
+}
+
+fn foo(value: usize) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_static_mut_arg() {
+ check_assist(
+ generate_function,
+ r"
+static mut VALUE: usize = 0;
+fn main() {
+ foo$0(VALUE);
+}
+",
+ r"
+static mut VALUE: usize = 0;
+fn main() {
+ foo(VALUE);
+}
+
+fn foo(value: usize) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_enum_variant() {
+ check_assist_not_applicable(
+ generate_function,
+ r"
+enum Foo {}
+fn main() {
+ Foo::Bar$0(true)
+}
+",
+ );
+ }
+
+ #[test]
+ fn applicable_for_enum_method() {
+ check_assist(
+ generate_function,
+ r"
+enum Foo {}
+fn main() {
+ Foo::new$0();
+}
+",
+ r"
+enum Foo {}
+fn main() {
+ Foo::new();
+}
+impl Foo {
+
+
+fn new() ${0:-> _} {
+ todo!()
+}
+}
+",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs
new file mode 100644
index 000000000..76fcef0ca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs
@@ -0,0 +1,492 @@
+use ide_db::famous_defs::FamousDefs;
+use stdx::{format_to, to_lower_snake_case};
+use syntax::ast::{self, AstNode, HasName, HasVisibility};
+
+use crate::{
+ utils::{convert_reference_type, find_impl_block_end, find_struct_impl, generate_impl_text},
+ AssistContext, AssistId, AssistKind, Assists, GroupLabel,
+};
+
+// Assist: generate_getter
+//
+// Generate a getter method.
+//
+// ```
+// # //- minicore: as_ref
+// # pub struct String;
+// # impl AsRef<str> for String {
+// # fn as_ref(&self) -> &str {
+// # ""
+// # }
+// # }
+// #
+// struct Person {
+// nam$0e: String,
+// }
+// ```
+// ->
+// ```
+// # pub struct String;
+// # impl AsRef<str> for String {
+// # fn as_ref(&self) -> &str {
+// # ""
+// # }
+// # }
+// #
+// struct Person {
+// name: String,
+// }
+//
+// impl Person {
+// fn $0name(&self) -> &str {
+// self.name.as_ref()
+// }
+// }
+// ```
+pub(crate) fn generate_getter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ generate_getter_impl(acc, ctx, false)
+}
+
+// Assist: generate_getter_mut
+//
+// Generate a mut getter method.
+//
+// ```
+// struct Person {
+// nam$0e: String,
+// }
+// ```
+// ->
+// ```
+// struct Person {
+// name: String,
+// }
+//
+// impl Person {
+// fn $0name_mut(&mut self) -> &mut String {
+// &mut self.name
+// }
+// }
+// ```
+pub(crate) fn generate_getter_mut(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ generate_getter_impl(acc, ctx, true)
+}
+
+pub(crate) fn generate_getter_impl(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ mutable: bool,
+) -> Option<()> {
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let field = ctx.find_node_at_offset::<ast::RecordField>()?;
+
+ let field_name = field.name()?;
+ let field_ty = field.ty()?;
+
+ // Return early if we've found an existing fn
+ let mut fn_name = to_lower_snake_case(&field_name.to_string());
+ if mutable {
+ format_to!(fn_name, "_mut");
+ }
+ let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?;
+
+ let (id, label) = if mutable {
+ ("generate_getter_mut", "Generate a mut getter method")
+ } else {
+ ("generate_getter", "Generate a getter method")
+ };
+ let target = field.syntax().text_range();
+ acc.add_group(
+ &GroupLabel("Generate getter/setter".to_owned()),
+ AssistId(id, AssistKind::Generate),
+ label,
+ target,
+ |builder| {
+ let mut buf = String::with_capacity(512);
+
+ if impl_def.is_some() {
+ buf.push('\n');
+ }
+
+ let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let (ty, body) = if mutable {
+ (format!("&mut {}", field_ty), format!("&mut self.{}", field_name))
+ } else {
+ (|| {
+ let krate = ctx.sema.scope(field_ty.syntax())?.krate();
+ let famous_defs = &FamousDefs(&ctx.sema, krate);
+ ctx.sema
+ .resolve_type(&field_ty)
+ .and_then(|ty| convert_reference_type(ty, ctx.db(), famous_defs))
+ .map(|conversion| {
+ cov_mark::hit!(convert_reference_type);
+ (
+ conversion.convert_type(ctx.db()),
+ conversion.getter(field_name.to_string()),
+ )
+ })
+ })()
+ .unwrap_or_else(|| (format!("&{}", field_ty), format!("&self.{}", field_name)))
+ };
+
+ format_to!(
+ buf,
+ " {}fn {}(&{}self) -> {} {{
+ {}
+ }}",
+ vis,
+ fn_name,
+ mutable.then(|| "mut ").unwrap_or_default(),
+ ty,
+ body,
+ );
+
+ let start_offset = impl_def
+ .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
+ .unwrap_or_else(|| {
+ buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf);
+ strukt.syntax().text_range().end()
+ });
+
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ builder.insert_snippet(cap, start_offset, buf.replacen("fn ", "fn $0", 1))
+ }
+ None => builder.insert(start_offset, buf),
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_getter_from_field() {
+ check_assist(
+ generate_getter,
+ r#"
+struct Context {
+ dat$0a: Data,
+}
+"#,
+ r#"
+struct Context {
+ data: Data,
+}
+
+impl Context {
+ fn $0data(&self) -> &Data {
+ &self.data
+ }
+}
+"#,
+ );
+
+ check_assist(
+ generate_getter_mut,
+ r#"
+struct Context {
+ dat$0a: Data,
+}
+"#,
+ r#"
+struct Context {
+ data: Data,
+}
+
+impl Context {
+ fn $0data_mut(&mut self) -> &mut Data {
+ &mut self.data
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_getter_already_implemented() {
+ check_assist_not_applicable(
+ generate_getter,
+ r#"
+struct Context {
+ dat$0a: Data,
+}
+
+impl Context {
+ fn data(&self) -> &Data {
+ &self.data
+ }
+}
+"#,
+ );
+
+ check_assist_not_applicable(
+ generate_getter_mut,
+ r#"
+struct Context {
+ dat$0a: Data,
+}
+
+impl Context {
+ fn data_mut(&mut self) -> &mut Data {
+ &mut self.data
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_getter_from_field_with_visibility_marker() {
+ check_assist(
+ generate_getter,
+ r#"
+pub(crate) struct Context {
+ dat$0a: Data,
+}
+"#,
+ r#"
+pub(crate) struct Context {
+ data: Data,
+}
+
+impl Context {
+ pub(crate) fn $0data(&self) -> &Data {
+ &self.data
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_multiple_generate_getter() {
+ check_assist(
+ generate_getter,
+ r#"
+struct Context {
+ data: Data,
+ cou$0nt: usize,
+}
+
+impl Context {
+ fn data(&self) -> &Data {
+ &self.data
+ }
+}
+"#,
+ r#"
+struct Context {
+ data: Data,
+ count: usize,
+}
+
+impl Context {
+ fn data(&self) -> &Data {
+ &self.data
+ }
+
+ fn $0count(&self) -> &usize {
+ &self.count
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_not_a_special_case() {
+ cov_mark::check_count!(convert_reference_type, 0);
+ // Fake string which doesn't implement AsRef<str>
+ check_assist(
+ generate_getter,
+ r#"
+pub struct String;
+
+struct S { foo: $0String }
+"#,
+ r#"
+pub struct String;
+
+struct S { foo: String }
+
+impl S {
+ fn $0foo(&self) -> &String {
+ &self.foo
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_convert_reference_type() {
+ cov_mark::check_count!(convert_reference_type, 6);
+
+ // Copy
+ check_assist(
+ generate_getter,
+ r#"
+//- minicore: copy
+struct S { foo: $0bool }
+"#,
+ r#"
+struct S { foo: bool }
+
+impl S {
+ fn $0foo(&self) -> bool {
+ self.foo
+ }
+}
+"#,
+ );
+
+ // AsRef<str>
+ check_assist(
+ generate_getter,
+ r#"
+//- minicore: as_ref
+pub struct String;
+impl AsRef<str> for String {
+ fn as_ref(&self) -> &str {
+ ""
+ }
+}
+
+struct S { foo: $0String }
+"#,
+ r#"
+pub struct String;
+impl AsRef<str> for String {
+ fn as_ref(&self) -> &str {
+ ""
+ }
+}
+
+struct S { foo: String }
+
+impl S {
+ fn $0foo(&self) -> &str {
+ self.foo.as_ref()
+ }
+}
+"#,
+ );
+
+ // AsRef<T>
+ check_assist(
+ generate_getter,
+ r#"
+//- minicore: as_ref
+struct Sweets;
+
+pub struct Box<T>(T);
+impl<T> AsRef<T> for Box<T> {
+ fn as_ref(&self) -> &T {
+ &self.0
+ }
+}
+
+struct S { foo: $0Box<Sweets> }
+"#,
+ r#"
+struct Sweets;
+
+pub struct Box<T>(T);
+impl<T> AsRef<T> for Box<T> {
+ fn as_ref(&self) -> &T {
+ &self.0
+ }
+}
+
+struct S { foo: Box<Sweets> }
+
+impl S {
+ fn $0foo(&self) -> &Sweets {
+ self.foo.as_ref()
+ }
+}
+"#,
+ );
+
+ // AsRef<[T]>
+ check_assist(
+ generate_getter,
+ r#"
+//- minicore: as_ref
+pub struct Vec<T>;
+impl<T> AsRef<[T]> for Vec<T> {
+ fn as_ref(&self) -> &[T] {
+ &[]
+ }
+}
+
+struct S { foo: $0Vec<()> }
+"#,
+ r#"
+pub struct Vec<T>;
+impl<T> AsRef<[T]> for Vec<T> {
+ fn as_ref(&self) -> &[T] {
+ &[]
+ }
+}
+
+struct S { foo: Vec<()> }
+
+impl S {
+ fn $0foo(&self) -> &[()] {
+ self.foo.as_ref()
+ }
+}
+"#,
+ );
+
+ // Option
+ check_assist(
+ generate_getter,
+ r#"
+//- minicore: option
+struct Failure;
+
+struct S { foo: $0Option<Failure> }
+"#,
+ r#"
+struct Failure;
+
+struct S { foo: Option<Failure> }
+
+impl S {
+ fn $0foo(&self) -> Option<&Failure> {
+ self.foo.as_ref()
+ }
+}
+"#,
+ );
+
+ // Result
+ check_assist(
+ generate_getter,
+ r#"
+//- minicore: result
+struct Context {
+ dat$0a: Result<bool, i32>,
+}
+"#,
+ r#"
+struct Context {
+ data: Result<bool, i32>,
+}
+
+impl Context {
+ fn $0data(&self) -> Result<&bool, &i32> {
+ self.data.as_ref()
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs
new file mode 100644
index 000000000..68287a20b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs
@@ -0,0 +1,177 @@
+use syntax::ast::{self, AstNode, HasName};
+
+use crate::{utils::generate_impl_text, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: generate_impl
+//
+// Adds a new inherent impl for a type.
+//
+// ```
+// struct Ctx<T: Clone> {
+// data: T,$0
+// }
+// ```
+// ->
+// ```
+// struct Ctx<T: Clone> {
+// data: T,
+// }
+//
+// impl<T: Clone> Ctx<T> {
+// $0
+// }
+// ```
+pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let nominal = ctx.find_node_at_offset::<ast::Adt>()?;
+ let name = nominal.name()?;
+ let target = nominal.syntax().text_range();
+
+ acc.add(
+ AssistId("generate_impl", AssistKind::Generate),
+ format!("Generate impl for `{}`", name),
+ target,
+ |edit| {
+ let start_offset = nominal.syntax().text_range().end();
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snippet = generate_impl_text(&nominal, " $0");
+ edit.insert_snippet(cap, start_offset, snippet);
+ }
+ None => {
+ let snippet = generate_impl_text(&nominal, "");
+ edit.insert(start_offset, snippet);
+ }
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn test_add_impl() {
+ check_assist(
+ generate_impl,
+ "struct Foo {$0}\n",
+ "struct Foo {}\n\nimpl Foo {\n $0\n}\n",
+ );
+ check_assist(
+ generate_impl,
+ "struct Foo<T: Clone> {$0}",
+ "struct Foo<T: Clone> {}\n\nimpl<T: Clone> Foo<T> {\n $0\n}",
+ );
+ check_assist(
+ generate_impl,
+ "struct Foo<'a, T: Foo<'a>> {$0}",
+ "struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n $0\n}",
+ );
+ check_assist(
+ generate_impl,
+ r#"
+ struct MyOwnArray<T, const S: usize> {}$0"#,
+ r#"
+ struct MyOwnArray<T, const S: usize> {}
+
+ impl<T, const S: usize> MyOwnArray<T, S> {
+ $0
+ }"#,
+ );
+ check_assist(
+ generate_impl,
+ r#"
+ #[cfg(feature = "foo")]
+ struct Foo<'a, T: Foo<'a>> {$0}"#,
+ r#"
+ #[cfg(feature = "foo")]
+ struct Foo<'a, T: Foo<'a>> {}
+
+ #[cfg(feature = "foo")]
+ impl<'a, T: Foo<'a>> Foo<'a, T> {
+ $0
+ }"#,
+ );
+
+ check_assist(
+ generate_impl,
+ r#"
+ #[cfg(not(feature = "foo"))]
+ struct Foo<'a, T: Foo<'a>> {$0}"#,
+ r#"
+ #[cfg(not(feature = "foo"))]
+ struct Foo<'a, T: Foo<'a>> {}
+
+ #[cfg(not(feature = "foo"))]
+ impl<'a, T: Foo<'a>> Foo<'a, T> {
+ $0
+ }"#,
+ );
+
+ check_assist(
+ generate_impl,
+ r#"
+ struct Defaulted<T = i32> {}$0"#,
+ r#"
+ struct Defaulted<T = i32> {}
+
+ impl<T> Defaulted<T> {
+ $0
+ }"#,
+ );
+
+ check_assist(
+ generate_impl,
+ r#"
+ struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {}$0"#,
+ r#"
+ struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {}
+
+ impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> Defaulted<'a, 'b, T, S> {
+ $0
+ }"#,
+ );
+
+ check_assist(
+ generate_impl,
+ r#"pub trait Trait {}
+struct Struct<T>$0
+where
+ T: Trait,
+{
+ inner: T,
+}"#,
+ r#"pub trait Trait {}
+struct Struct<T>
+where
+ T: Trait,
+{
+ inner: T,
+}
+
+impl<T> Struct<T>
+where
+ T: Trait,
+{
+ $0
+}"#,
+ );
+ }
+
+ #[test]
+ fn add_impl_target() {
+ check_assist_target(
+ generate_impl,
+ "
+struct SomeThingIrrelevant;
+/// Has a lifetime parameter
+struct Foo<'a, T: Foo<'a>> {$0}
+struct EvenMoreIrrelevant;
+",
+ "/// Has a lifetime parameter
+struct Foo<'a, T: Foo<'a>> {}",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
new file mode 100644
index 000000000..9ce525ca3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
@@ -0,0 +1,295 @@
+use hir::{known, HasSource, Name};
+use syntax::{
+ ast::{self, HasName},
+ AstNode,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: generate_is_empty_from_len
+//
+// Generates is_empty implementation from the len method.
+//
+// ```
+// struct MyStruct { data: Vec<String> }
+//
+// impl MyStruct {
+// #[must_use]
+// p$0ub fn len(&self) -> usize {
+// self.data.len()
+// }
+// }
+// ```
+// ->
+// ```
+// struct MyStruct { data: Vec<String> }
+//
+// impl MyStruct {
+// #[must_use]
+// pub fn len(&self) -> usize {
+// self.data.len()
+// }
+//
+// #[must_use]
+// pub fn is_empty(&self) -> bool {
+// self.len() == 0
+// }
+// }
+// ```
+pub(crate) fn generate_is_empty_from_len(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let fn_node = ctx.find_node_at_offset::<ast::Fn>()?;
+ let fn_name = fn_node.name()?;
+
+ if fn_name.text() != "len" {
+ cov_mark::hit!(len_function_not_present);
+ return None;
+ }
+
+ if fn_node.param_list()?.params().next().is_some() {
+ cov_mark::hit!(len_function_with_parameters);
+ return None;
+ }
+
+ let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?;
+ let len_fn = get_impl_method(ctx, &impl_, &known::len)?;
+ if !len_fn.ret_type(ctx.sema.db).is_usize() {
+ cov_mark::hit!(len_fn_different_return_type);
+ return None;
+ }
+
+ if get_impl_method(ctx, &impl_, &known::is_empty).is_some() {
+ cov_mark::hit!(is_empty_already_implemented);
+ return None;
+ }
+
+ let node = len_fn.source(ctx.sema.db)?;
+ let range = node.syntax().value.text_range();
+
+ acc.add(
+ AssistId("generate_is_empty_from_len", AssistKind::Generate),
+ "Generate a is_empty impl from a len function",
+ range,
+ |builder| {
+ let code = r#"
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }"#
+ .to_string();
+ builder.insert(range.end(), code)
+ },
+ )
+}
+
+fn get_impl_method(
+ ctx: &AssistContext<'_>,
+ impl_: &ast::Impl,
+ fn_name: &Name,
+) -> Option<hir::Function> {
+ let db = ctx.sema.db;
+ let impl_def: hir::Impl = ctx.sema.to_def(impl_)?;
+
+ let scope = ctx.sema.scope(impl_.syntax())?;
+ let ty = impl_def.self_ty(db);
+ ty.iterate_method_candidates(
+ db,
+ &scope,
+ &scope.visible_traits().0,
+ None,
+ Some(fn_name),
+ |func| Some(func),
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn len_function_not_present() {
+ cov_mark::check!(len_function_not_present);
+ check_assist_not_applicable(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ p$0ub fn test(&self) -> usize {
+ self.data.len()
+ }
+ }
+"#,
+ );
+ }
+
+ #[test]
+ fn len_function_with_parameters() {
+ cov_mark::check!(len_function_with_parameters);
+ check_assist_not_applicable(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self, _i: bool) -> usize {
+ self.data.len()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn is_empty_already_implemented() {
+ cov_mark::check!(is_empty_already_implemented);
+ check_assist_not_applicable(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn len_fn_different_return_type() {
+ cov_mark::check!(len_fn_different_return_type);
+ check_assist_not_applicable(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self) -> u32 {
+ self.data.len()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generate_is_empty() {
+ check_assist(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self) -> usize {
+ self.data.len()
+ }
+}
+"#,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multiple_functions_in_impl() {
+ check_assist(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ pub fn new() -> Self {
+ Self { data: 0 }
+ }
+
+ #[must_use]
+ p$0ub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ pub fn work(&self) -> Option<usize> {
+
+ }
+}
+"#,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ pub fn new() -> Self {
+ Self { data: 0 }
+ }
+
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ pub fn work(&self) -> Option<usize> {
+
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multiple_impls() {
+ check_assist_not_applicable(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self) -> usize {
+ self.data.len()
+ }
+}
+
+impl MyStruct {
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
new file mode 100644
index 000000000..6c93875e9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
@@ -0,0 +1,495 @@
+use ide_db::{
+ imports::import_assets::item_for_path_search, use_trivial_contructor::use_trivial_constructor,
+};
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::ast::{self, AstNode, HasName, HasVisibility, StructKind};
+
+use crate::{
+ utils::{find_impl_block_start, find_struct_impl, generate_impl_text},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: generate_new
+//
+// Adds a `fn new` for a type.
+//
+// ```
+// struct Ctx<T: Clone> {
+// data: T,$0
+// }
+// ```
+// ->
+// ```
+// struct Ctx<T: Clone> {
+// data: T,
+// }
+//
+// impl<T: Clone> Ctx<T> {
+// fn $0new(data: T) -> Self { Self { data } }
+// }
+// ```
+pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+
+ // We want to only apply this to non-union structs with named fields
+ let field_list = match strukt.kind() {
+ StructKind::Record(named) => named,
+ _ => return None,
+ };
+
+ // Return early if we've found an existing new fn
+ let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), "new")?;
+
+ let current_module = ctx.sema.scope(strukt.syntax())?.module();
+
+ let target = strukt.syntax().text_range();
+ acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| {
+ let mut buf = String::with_capacity(512);
+
+ if impl_def.is_some() {
+ buf.push('\n');
+ }
+
+ let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v));
+
+ let trivial_constructors = field_list
+ .fields()
+ .map(|f| {
+ let ty = ctx.sema.resolve_type(&f.ty()?)?;
+
+ let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
+
+ let type_path = current_module
+ .find_use_path(ctx.sema.db, item_for_path_search(ctx.sema.db, item_in_ns)?)?;
+
+ let expr = use_trivial_constructor(
+ &ctx.sema.db,
+ ide_db::helpers::mod_path_to_ast(&type_path),
+ &ty,
+ )?;
+
+ Some(format!("{}: {}", f.name()?.syntax(), expr))
+ })
+ .collect::<Vec<_>>();
+
+ let params = field_list
+ .fields()
+ .enumerate()
+ .filter_map(|(i, f)| {
+ if trivial_constructors[i].is_none() {
+ Some(format!("{}: {}", f.name()?.syntax(), f.ty()?.syntax()))
+ } else {
+ None
+ }
+ })
+ .format(", ");
+
+ let fields = field_list
+ .fields()
+ .enumerate()
+ .filter_map(|(i, f)| {
+ let contructor = trivial_constructors[i].clone();
+ if contructor.is_some() {
+ contructor
+ } else {
+ Some(f.name()?.to_string())
+ }
+ })
+ .format(", ");
+
+ format_to!(buf, " {}fn new({}) -> Self {{ Self {{ {} }} }}", vis, params, fields);
+
+ let start_offset = impl_def
+ .and_then(|impl_def| find_impl_block_start(impl_def, &mut buf))
+ .unwrap_or_else(|| {
+ buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf);
+ strukt.syntax().text_range().end()
+ });
+
+ match ctx.config.snippet_cap {
+ None => builder.insert(start_offset, buf),
+ Some(cap) => {
+ buf = buf.replace("fn new", "fn $0new");
+ builder.insert_snippet(cap, start_offset, buf);
+ }
+ }
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_new_with_zst_fields() {
+ check_assist(
+ generate_new,
+ r#"
+struct Empty;
+
+struct Foo { empty: Empty $0}
+"#,
+ r#"
+struct Empty;
+
+struct Foo { empty: Empty }
+
+impl Foo {
+ fn $0new() -> Self { Self { empty: Empty } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Empty;
+
+struct Foo { baz: String, empty: Empty $0}
+"#,
+ r#"
+struct Empty;
+
+struct Foo { baz: String, empty: Empty }
+
+impl Foo {
+ fn $0new(baz: String) -> Self { Self { baz, empty: Empty } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+enum Empty { Bar }
+
+struct Foo { empty: Empty $0}
+"#,
+ r#"
+enum Empty { Bar }
+
+struct Foo { empty: Empty }
+
+impl Foo {
+ fn $0new() -> Self { Self { empty: Empty::Bar } }
+}
+"#,
+ );
+
+ // make sure the assist only works on unit variants
+ check_assist(
+ generate_new,
+ r#"
+struct Empty {}
+
+struct Foo { empty: Empty $0}
+"#,
+ r#"
+struct Empty {}
+
+struct Foo { empty: Empty }
+
+impl Foo {
+ fn $0new(empty: Empty) -> Self { Self { empty } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+enum Empty { Bar {} }
+
+struct Foo { empty: Empty $0}
+"#,
+ r#"
+enum Empty { Bar {} }
+
+struct Foo { empty: Empty }
+
+impl Foo {
+ fn $0new(empty: Empty) -> Self { Self { empty } }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_new() {
+ check_assist(
+ generate_new,
+ r#"
+struct Foo {$0}
+"#,
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn $0new() -> Self { Self { } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo<T: Clone> {$0}
+"#,
+ r#"
+struct Foo<T: Clone> {}
+
+impl<T: Clone> Foo<T> {
+ fn $0new() -> Self { Self { } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo<'a, T: Foo<'a>> {$0}
+"#,
+ r#"
+struct Foo<'a, T: Foo<'a>> {}
+
+impl<'a, T: Foo<'a>> Foo<'a, T> {
+ fn $0new() -> Self { Self { } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo { baz: String $0}
+"#,
+ r#"
+struct Foo { baz: String }
+
+impl Foo {
+ fn $0new(baz: String) -> Self { Self { baz } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo { baz: String, qux: Vec<i32> $0}
+"#,
+ r#"
+struct Foo { baz: String, qux: Vec<i32> }
+
+impl Foo {
+ fn $0new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn check_that_visibility_modifiers_dont_get_brought_in() {
+ check_assist(
+ generate_new,
+ r#"
+struct Foo { pub baz: String, pub qux: Vec<i32> $0}
+"#,
+ r#"
+struct Foo { pub baz: String, pub qux: Vec<i32> }
+
+impl Foo {
+ fn $0new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn check_it_reuses_existing_impls() {
+ check_assist(
+ generate_new,
+ r#"
+struct Foo {$0}
+
+impl Foo {}
+"#,
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn $0new() -> Self { Self { } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo {$0}
+
+impl Foo {
+ fn qux(&self) {}
+}
+"#,
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn $0new() -> Self { Self { } }
+
+ fn qux(&self) {}
+}
+"#,
+ );
+
+ check_assist(
+ generate_new,
+ r#"
+struct Foo {$0}
+
+impl Foo {
+ fn qux(&self) {}
+ fn baz() -> i32 {
+ 5
+ }
+}
+"#,
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn $0new() -> Self { Self { } }
+
+ fn qux(&self) {}
+ fn baz() -> i32 {
+ 5
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn check_visibility_of_new_fn_based_on_struct() {
+ check_assist(
+ generate_new,
+ r#"
+pub struct Foo {$0}
+"#,
+ r#"
+pub struct Foo {}
+
+impl Foo {
+ pub fn $0new() -> Self { Self { } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+pub(crate) struct Foo {$0}
+"#,
+ r#"
+pub(crate) struct Foo {}
+
+impl Foo {
+ pub(crate) fn $0new() -> Self { Self { } }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generate_new_not_applicable_if_fn_exists() {
+ check_assist_not_applicable(
+ generate_new,
+ r#"
+struct Foo {$0}
+
+impl Foo {
+ fn new() -> Self {
+ Self
+ }
+}
+"#,
+ );
+
+ check_assist_not_applicable(
+ generate_new,
+ r#"
+struct Foo {$0}
+
+impl Foo {
+ fn New() -> Self {
+ Self
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generate_new_target() {
+ check_assist_target(
+ generate_new,
+ r#"
+struct SomeThingIrrelevant;
+/// Has a lifetime parameter
+struct Foo<'a, T: Foo<'a>> {$0}
+struct EvenMoreIrrelevant;
+"#,
+ "/// Has a lifetime parameter
+struct Foo<'a, T: Foo<'a>> {}",
+ );
+ }
+
+ #[test]
+ fn test_unrelated_new() {
+ check_assist(
+ generate_new,
+ r#"
+pub struct AstId<N: AstNode> {
+ file_id: HirFileId,
+ file_ast_id: FileAstId<N>,
+}
+
+impl<N: AstNode> AstId<N> {
+ pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
+ AstId { file_id, file_ast_id }
+ }
+}
+
+pub struct Source<T> {
+ pub file_id: HirFileId,$0
+ pub ast: T,
+}
+
+impl<T> Source<T> {
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
+ Source { file_id: self.file_id, ast: f(self.ast) }
+ }
+}
+"#,
+ r#"
+pub struct AstId<N: AstNode> {
+ file_id: HirFileId,
+ file_ast_id: FileAstId<N>,
+}
+
+impl<N: AstNode> AstId<N> {
+ pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
+ AstId { file_id, file_ast_id }
+ }
+}
+
+pub struct Source<T> {
+ pub file_id: HirFileId,
+ pub ast: T,
+}
+
+impl<T> Source<T> {
+ pub fn $0new(file_id: HirFileId, ast: T) -> Self { Self { file_id, ast } }
+
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
+ Source { file_id: self.file_id, ast: f(self.ast) }
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs
new file mode 100644
index 000000000..2a7ad6ce3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs
@@ -0,0 +1,184 @@
+use stdx::{format_to, to_lower_snake_case};
+use syntax::ast::{self, AstNode, HasName, HasVisibility};
+
+use crate::{
+ utils::{find_impl_block_end, find_struct_impl, generate_impl_text},
+ AssistContext, AssistId, AssistKind, Assists, GroupLabel,
+};
+
+// Assist: generate_setter
+//
+// Generate a setter method.
+//
+// ```
+// struct Person {
+// nam$0e: String,
+// }
+// ```
+// ->
+// ```
+// struct Person {
+// name: String,
+// }
+//
+// impl Person {
+// fn set_name(&mut self, name: String) {
+// self.name = name;
+// }
+// }
+// ```
+pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let field = ctx.find_node_at_offset::<ast::RecordField>()?;
+
+ let field_name = field.name()?;
+ let field_ty = field.ty()?;
+
+ // Return early if we've found an existing fn
+ let fn_name = to_lower_snake_case(&field_name.to_string());
+ let impl_def = find_struct_impl(
+ ctx,
+ &ast::Adt::Struct(strukt.clone()),
+ format!("set_{}", fn_name).as_str(),
+ )?;
+
+ let target = field.syntax().text_range();
+ acc.add_group(
+ &GroupLabel("Generate getter/setter".to_owned()),
+ AssistId("generate_setter", AssistKind::Generate),
+ "Generate a setter method",
+ target,
+ |builder| {
+ let mut buf = String::with_capacity(512);
+
+ if impl_def.is_some() {
+ buf.push('\n');
+ }
+
+ let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v));
+ format_to!(
+ buf,
+ " {}fn set_{}(&mut self, {}: {}) {{
+ self.{} = {};
+ }}",
+ vis,
+ fn_name,
+ fn_name,
+ field_ty,
+ fn_name,
+ fn_name,
+ );
+
+ let start_offset = impl_def
+ .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
+ .unwrap_or_else(|| {
+ buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf);
+ strukt.syntax().text_range().end()
+ });
+
+ builder.insert(start_offset, buf);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ fn check_not_applicable(ra_fixture: &str) {
+ check_assist_not_applicable(generate_setter, ra_fixture)
+ }
+
+ #[test]
+ fn test_generate_setter_from_field() {
+ check_assist(
+ generate_setter,
+ r#"
+struct Person<T: Clone> {
+ dat$0a: T,
+}"#,
+ r#"
+struct Person<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Person<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_setter_already_implemented() {
+ check_not_applicable(
+ r#"
+struct Person<T: Clone> {
+ dat$0a: T,
+}
+
+impl<T: Clone> Person<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_setter_from_field_with_visibility_marker() {
+ check_assist(
+ generate_setter,
+ r#"
+pub(crate) struct Person<T: Clone> {
+ dat$0a: T,
+}"#,
+ r#"
+pub(crate) struct Person<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Person<T> {
+ pub(crate) fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_multiple_generate_setter() {
+ check_assist(
+ generate_setter,
+ r#"
+struct Context<T: Clone> {
+ data: T,
+ cou$0nt: usize,
+}
+
+impl<T: Clone> Context<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ r#"
+struct Context<T: Clone> {
+ data: T,
+ count: usize,
+}
+
+impl<T: Clone> Context<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+
+ fn set_count(&mut self, count: usize) {
+ self.count = count;
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
new file mode 100644
index 000000000..80d3b9255
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
@@ -0,0 +1,1194 @@
+use ast::make;
+use either::Either;
+use hir::{db::HirDatabase, PathResolution, Semantics, TypeInfo};
+use ide_db::{
+ base_db::{FileId, FileRange},
+ defs::Definition,
+ imports::insert_use::remove_path_if_in_use_stmt,
+ path_transform::PathTransform,
+ search::{FileReference, SearchScope},
+ syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref},
+ RootDatabase,
+};
+use itertools::{izip, Itertools};
+use syntax::{
+ ast::{self, edit_in_place::Indent, HasArgList, PathExpr},
+ ted, AstNode,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: inline_into_callers
+//
+// Inline a function or method body into all of its callers where possible, creating a `let` statement per parameter
+// unless the parameter can be inlined. The parameter will be inlined either if it the supplied argument is a simple local
+// or if the parameter is only accessed inside the function body once.
+// If all calls can be inlined the function will be removed.
+//
+// ```
+// fn print(_: &str) {}
+// fn foo$0(word: &str) {
+// if !word.is_empty() {
+// print(word);
+// }
+// }
+// fn bar() {
+// foo("안녕하세요");
+// foo("여러분");
+// }
+// ```
+// ->
+// ```
+// fn print(_: &str) {}
+//
+// fn bar() {
+// {
+// let word = "안녕하세요";
+// if !word.is_empty() {
+// print(word);
+// }
+// };
+// {
+// let word = "여러분";
+// if !word.is_empty() {
+// print(word);
+// }
+// };
+// }
+// ```
+pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let def_file = ctx.file_id();
+ let name = ctx.find_node_at_offset::<ast::Name>()?;
+ let ast_func = name.syntax().parent().and_then(ast::Fn::cast)?;
+ let func_body = ast_func.body()?;
+ let param_list = ast_func.param_list()?;
+
+ let function = ctx.sema.to_def(&ast_func)?;
+
+ let params = get_fn_params(ctx.sema.db, function, &param_list)?;
+
+ let usages = Definition::Function(function).usages(&ctx.sema);
+ if !usages.at_least_one() {
+ return None;
+ }
+
+ let is_recursive_fn = usages
+ .clone()
+ .in_scope(SearchScope::file_range(FileRange {
+ file_id: def_file,
+ range: func_body.syntax().text_range(),
+ }))
+ .at_least_one();
+ if is_recursive_fn {
+ cov_mark::hit!(inline_into_callers_recursive);
+ return None;
+ }
+
+ acc.add(
+ AssistId("inline_into_callers", AssistKind::RefactorInline),
+ "Inline into all callers",
+ name.syntax().text_range(),
+ |builder| {
+ let mut usages = usages.all();
+ let current_file_usage = usages.references.remove(&def_file);
+
+ let mut remove_def = true;
+ let mut inline_refs_for_file = |file_id, refs: Vec<FileReference>| {
+ builder.edit_file(file_id);
+ let count = refs.len();
+ // The collects are required as we are otherwise iterating while mutating 🙅‍♀️🙅‍♂️
+ let (name_refs, name_refs_use): (Vec<_>, Vec<_>) = refs
+ .into_iter()
+ .filter_map(|file_ref| match file_ref.name {
+ ast::NameLike::NameRef(name_ref) => Some(name_ref),
+ _ => None,
+ })
+ .partition_map(|name_ref| {
+ match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) {
+ Some(use_tree) => Either::Right(builder.make_mut(use_tree)),
+ None => Either::Left(name_ref),
+ }
+ });
+ let call_infos: Vec<_> = name_refs
+ .into_iter()
+ .filter_map(CallInfo::from_name_ref)
+ .map(|call_info| {
+ let mut_node = builder.make_syntax_mut(call_info.node.syntax().clone());
+ (call_info, mut_node)
+ })
+ .collect();
+ let replaced = call_infos
+ .into_iter()
+ .map(|(call_info, mut_node)| {
+ let replacement =
+ inline(&ctx.sema, def_file, function, &func_body, &params, &call_info);
+ ted::replace(mut_node, replacement.syntax());
+ })
+ .count();
+ if replaced + name_refs_use.len() == count {
+ // we replaced all usages in this file, so we can remove the imports
+ name_refs_use.into_iter().for_each(|use_tree| {
+ if let Some(path) = use_tree.path() {
+ remove_path_if_in_use_stmt(&path);
+ }
+ })
+ } else {
+ remove_def = false;
+ }
+ };
+ for (file_id, refs) in usages.into_iter() {
+ inline_refs_for_file(file_id, refs);
+ }
+ match current_file_usage {
+ Some(refs) => inline_refs_for_file(def_file, refs),
+ None => builder.edit_file(def_file),
+ }
+ if remove_def {
+ builder.delete(ast_func.syntax().text_range());
+ }
+ },
+ )
+}
+
+// Assist: inline_call
+//
+// Inlines a function or method body creating a `let` statement per parameter unless the parameter
+// can be inlined. The parameter will be inlined either if it the supplied argument is a simple local
+// or if the parameter is only accessed inside the function body once.
+//
+// ```
+// # //- minicore: option
+// fn foo(name: Option<&str>) {
+// let name = name.unwrap$0();
+// }
+// ```
+// ->
+// ```
+// fn foo(name: Option<&str>) {
+// let name = match name {
+// Some(val) => val,
+// None => panic!("called `Option::unwrap()` on a `None` value"),
+// };
+// }
+// ```
+pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let name_ref: ast::NameRef = ctx.find_node_at_offset()?;
+ let call_info = CallInfo::from_name_ref(name_ref.clone())?;
+ let (function, label) = match &call_info.node {
+ ast::CallableExpr::Call(call) => {
+ let path = match call.expr()? {
+ ast::Expr::PathExpr(path) => path.path(),
+ _ => None,
+ }?;
+ let function = match ctx.sema.resolve_path(&path)? {
+ PathResolution::Def(hir::ModuleDef::Function(f)) => f,
+ _ => return None,
+ };
+ (function, format!("Inline `{}`", path))
+ }
+ ast::CallableExpr::MethodCall(call) => {
+ (ctx.sema.resolve_method_call(call)?, format!("Inline `{}`", name_ref))
+ }
+ };
+
+ let fn_source = ctx.sema.source(function)?;
+ let fn_body = fn_source.value.body()?;
+ let param_list = fn_source.value.param_list()?;
+
+ let FileRange { file_id, range } = fn_source.syntax().original_file_range(ctx.sema.db);
+ if file_id == ctx.file_id() && range.contains(ctx.offset()) {
+ cov_mark::hit!(inline_call_recursive);
+ return None;
+ }
+ let params = get_fn_params(ctx.sema.db, function, &param_list)?;
+
+ if call_info.arguments.len() != params.len() {
+ // Can't inline the function because they've passed the wrong number of
+ // arguments to this function
+ cov_mark::hit!(inline_call_incorrect_number_of_arguments);
+ return None;
+ }
+
+ let syntax = call_info.node.syntax().clone();
+ acc.add(
+ AssistId("inline_call", AssistKind::RefactorInline),
+ label,
+ syntax.text_range(),
+ |builder| {
+ let replacement = inline(&ctx.sema, file_id, function, &fn_body, &params, &call_info);
+
+ builder.replace_ast(
+ match call_info.node {
+ ast::CallableExpr::Call(it) => ast::Expr::CallExpr(it),
+ ast::CallableExpr::MethodCall(it) => ast::Expr::MethodCallExpr(it),
+ },
+ replacement,
+ );
+ },
+ )
+}
+
+struct CallInfo {
+ node: ast::CallableExpr,
+ arguments: Vec<ast::Expr>,
+ generic_arg_list: Option<ast::GenericArgList>,
+}
+
+impl CallInfo {
+ fn from_name_ref(name_ref: ast::NameRef) -> Option<CallInfo> {
+ let parent = name_ref.syntax().parent()?;
+ if let Some(call) = ast::MethodCallExpr::cast(parent.clone()) {
+ let receiver = call.receiver()?;
+ let mut arguments = vec![receiver];
+ arguments.extend(call.arg_list()?.args());
+ Some(CallInfo {
+ generic_arg_list: call.generic_arg_list(),
+ node: ast::CallableExpr::MethodCall(call),
+ arguments,
+ })
+ } else if let Some(segment) = ast::PathSegment::cast(parent) {
+ let path = segment.syntax().parent().and_then(ast::Path::cast)?;
+ let path = path.syntax().parent().and_then(ast::PathExpr::cast)?;
+ let call = path.syntax().parent().and_then(ast::CallExpr::cast)?;
+
+ Some(CallInfo {
+ arguments: call.arg_list()?.args().collect(),
+ node: ast::CallableExpr::Call(call),
+ generic_arg_list: segment.generic_arg_list(),
+ })
+ } else {
+ None
+ }
+ }
+}
+
+fn get_fn_params(
+ db: &dyn HirDatabase,
+ function: hir::Function,
+ param_list: &ast::ParamList,
+) -> Option<Vec<(ast::Pat, Option<ast::Type>, hir::Param)>> {
+ let mut assoc_fn_params = function.assoc_fn_params(db).into_iter();
+
+ let mut params = Vec::new();
+ if let Some(self_param) = param_list.self_param() {
+ // FIXME this should depend on the receiver as well as the self_param
+ params.push((
+ make::ident_pat(
+ self_param.amp_token().is_some(),
+ self_param.mut_token().is_some(),
+ make::name("this"),
+ )
+ .into(),
+ None,
+ assoc_fn_params.next()?,
+ ));
+ }
+ for param in param_list.params() {
+ params.push((param.pat()?, param.ty(), assoc_fn_params.next()?));
+ }
+
+ Some(params)
+}
+
+fn inline(
+ sema: &Semantics<'_, RootDatabase>,
+ function_def_file_id: FileId,
+ function: hir::Function,
+ fn_body: &ast::BlockExpr,
+ params: &[(ast::Pat, Option<ast::Type>, hir::Param)],
+ CallInfo { node, arguments, generic_arg_list }: &CallInfo,
+) -> ast::Expr {
+ let body = if sema.hir_file_for(fn_body.syntax()).is_macro() {
+ cov_mark::hit!(inline_call_defined_in_macro);
+ if let Some(body) = ast::BlockExpr::cast(insert_ws_into(fn_body.syntax().clone())) {
+ body
+ } else {
+ fn_body.clone_for_update()
+ }
+ } else {
+ fn_body.clone_for_update()
+ };
+ let usages_for_locals = |local| {
+ Definition::Local(local)
+ .usages(sema)
+ .all()
+ .references
+ .remove(&function_def_file_id)
+ .unwrap_or_default()
+ .into_iter()
+ };
+ let param_use_nodes: Vec<Vec<_>> = params
+ .iter()
+ .map(|(pat, _, param)| {
+ if !matches!(pat, ast::Pat::IdentPat(pat) if pat.is_simple_ident()) {
+ return Vec::new();
+ }
+ // FIXME: we need to fetch all locals declared in the parameter here
+ // not only the local if it is a simple binding
+ match param.as_local(sema.db) {
+ Some(l) => usages_for_locals(l)
+ .map(|FileReference { name, range, .. }| match name {
+ ast::NameLike::NameRef(_) => body
+ .syntax()
+ .covering_element(range)
+ .ancestors()
+ .nth(3)
+ .and_then(ast::PathExpr::cast),
+ _ => None,
+ })
+ .collect::<Option<Vec<_>>>()
+ .unwrap_or_default(),
+ None => Vec::new(),
+ }
+ })
+ .collect();
+ if function.self_param(sema.db).is_some() {
+ let this = || make::name_ref("this").syntax().clone_for_update();
+ if let Some(self_local) = params[0].2.as_local(sema.db) {
+ usages_for_locals(self_local)
+ .flat_map(|FileReference { name, range, .. }| match name {
+ ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)),
+ _ => None,
+ })
+ .for_each(|it| {
+ ted::replace(it, &this());
+ })
+ }
+ }
+ // Inline parameter expressions or generate `let` statements depending on whether inlining works or not.
+ for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments).rev() {
+ let inline_direct = |usage, replacement: &ast::Expr| {
+ if let Some(field) = path_expr_as_record_field(usage) {
+ cov_mark::hit!(inline_call_inline_direct_field);
+ field.replace_expr(replacement.clone_for_update());
+ } else {
+ ted::replace(usage.syntax(), &replacement.syntax().clone_for_update());
+ }
+ };
+ // izip confuses RA due to our lack of hygiene info currently losing us type info causing incorrect errors
+ let usages: &[ast::PathExpr] = &*usages;
+ let expr: &ast::Expr = expr;
+ match usages {
+ // inline single use closure arguments
+ [usage]
+ if matches!(expr, ast::Expr::ClosureExpr(_))
+ && usage.syntax().parent().and_then(ast::Expr::cast).is_some() =>
+ {
+ cov_mark::hit!(inline_call_inline_closure);
+ let expr = make::expr_paren(expr.clone());
+ inline_direct(usage, &expr);
+ }
+ // inline single use literals
+ [usage] if matches!(expr, ast::Expr::Literal(_)) => {
+ cov_mark::hit!(inline_call_inline_literal);
+ inline_direct(usage, expr);
+ }
+ // inline direct local arguments
+ [_, ..] if expr_as_name_ref(expr).is_some() => {
+ cov_mark::hit!(inline_call_inline_locals);
+ usages.iter().for_each(|usage| inline_direct(usage, expr));
+ }
+ // can't inline, emit a let statement
+ _ => {
+ let ty =
+ sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty.clone());
+ if let Some(stmt_list) = body.stmt_list() {
+ stmt_list.push_front(
+ make::let_stmt(pat.clone(), ty, Some(expr.clone()))
+ .clone_for_update()
+ .into(),
+ )
+ }
+ }
+ }
+ }
+ if let Some(generic_arg_list) = generic_arg_list.clone() {
+ if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax()))
+ {
+ PathTransform::function_call(target, source, function, generic_arg_list)
+ .apply(body.syntax());
+ }
+ }
+
+ let original_indentation = match node {
+ ast::CallableExpr::Call(it) => it.indent_level(),
+ ast::CallableExpr::MethodCall(it) => it.indent_level(),
+ };
+ body.reindent_to(original_indentation);
+
+ match body.tail_expr() {
+ Some(expr) if body.statements().next().is_none() => expr,
+ _ => match node
+ .syntax()
+ .parent()
+ .and_then(ast::BinExpr::cast)
+ .and_then(|bin_expr| bin_expr.lhs())
+ {
+ Some(lhs) if lhs.syntax() == node.syntax() => {
+ make::expr_paren(ast::Expr::BlockExpr(body)).clone_for_update()
+ }
+ _ => ast::Expr::BlockExpr(body),
+ },
+ }
+}
+
+fn path_expr_as_record_field(usage: &PathExpr) -> Option<ast::RecordExprField> {
+ let path = usage.path()?;
+ let name_ref = path.as_single_name_ref()?;
+ ast::RecordExprField::for_name_ref(&name_ref)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn no_args_or_return_value_gets_inlined_without_block() {
+ check_assist(
+ inline_call,
+ r#"
+fn foo() { println!("Hello, World!"); }
+fn main() {
+ fo$0o();
+}
+"#,
+ r#"
+fn foo() { println!("Hello, World!"); }
+fn main() {
+ { println!("Hello, World!"); };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_when_incorrect_number_of_parameters_are_provided() {
+ cov_mark::check!(inline_call_incorrect_number_of_arguments);
+ check_assist_not_applicable(
+ inline_call,
+ r#"
+fn add(a: u32, b: u32) -> u32 { a + b }
+fn main() { let x = add$0(42); }
+"#,
+ );
+ }
+
+ #[test]
+ fn args_with_side_effects() {
+ check_assist(
+ inline_call,
+ r#"
+fn foo(name: String) {
+ println!("Hello, {}!", name);
+}
+fn main() {
+ foo$0(String::from("Michael"));
+}
+"#,
+ r#"
+fn foo(name: String) {
+ println!("Hello, {}!", name);
+}
+fn main() {
+ {
+ let name = String::from("Michael");
+ println!("Hello, {}!", name);
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_with_multiple_statements() {
+ check_assist(
+ inline_call,
+ r#"
+fn foo(a: u32, b: u32) -> u32 {
+ let x = a + b;
+ let y = x - b;
+ x * y
+}
+
+fn main() {
+ let x = foo$0(1, 2);
+}
+"#,
+ r#"
+fn foo(a: u32, b: u32) -> u32 {
+ let x = a + b;
+ let y = x - b;
+ x * y
+}
+
+fn main() {
+ let x = {
+ let b = 2;
+ let x = 1 + b;
+ let y = x - b;
+ x * y
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_with_self_param() {
+ check_assist(
+ inline_call,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add(self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = Foo::add$0(Foo(3), 2);
+}
+"#,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add(self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = {
+ let this = Foo(3);
+ Foo(this.0 + 2)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_by_val() {
+ check_assist(
+ inline_call,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add(self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = Foo(3).add$0(2);
+}
+"#,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add(self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = {
+ let this = Foo(3);
+ Foo(this.0 + 2)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_by_ref() {
+ check_assist(
+ inline_call,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add(&self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = Foo(3).add$0(2);
+}
+"#,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add(&self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = {
+ let ref this = Foo(3);
+ Foo(this.0 + 2)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_by_ref_mut() {
+ check_assist(
+ inline_call,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn clear(&mut self) {
+ self.0 = 0;
+ }
+}
+
+fn main() {
+ let mut foo = Foo(3);
+ foo.clear$0();
+}
+"#,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn clear(&mut self) {
+ self.0 = 0;
+ }
+}
+
+fn main() {
+ let mut foo = Foo(3);
+ {
+ let ref mut this = foo;
+ this.0 = 0;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_multi_use_expr_in_param() {
+ check_assist(
+ inline_call,
+ r#"
+fn square(x: u32) -> u32 {
+ x * x
+}
+fn main() {
+ let x = 51;
+ let y = square$0(10 + x);
+}
+"#,
+ r#"
+fn square(x: u32) -> u32 {
+ x * x
+}
+fn main() {
+ let x = 51;
+ let y = {
+ let x = 10 + x;
+ x * x
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_use_local_in_param() {
+ cov_mark::check!(inline_call_inline_locals);
+ check_assist(
+ inline_call,
+ r#"
+fn square(x: u32) -> u32 {
+ x * x
+}
+fn main() {
+ let local = 51;
+ let y = square$0(local);
+}
+"#,
+ r#"
+fn square(x: u32) -> u32 {
+ x * x
+}
+fn main() {
+ let local = 51;
+ let y = local * local;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_in_impl() {
+ check_assist(
+ inline_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {
+ self;
+ self;
+ }
+ fn bar(&self) {
+ self.foo$0();
+ }
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {
+ self;
+ self;
+ }
+ fn bar(&self) {
+ {
+ let ref this = self;
+ this;
+ this;
+ };
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wraps_closure_in_paren() {
+ cov_mark::check!(inline_call_inline_closure);
+ check_assist(
+ inline_call,
+ r#"
+fn foo(x: fn()) {
+ x();
+}
+
+fn main() {
+ foo$0(|| {})
+}
+"#,
+ r#"
+fn foo(x: fn()) {
+ x();
+}
+
+fn main() {
+ {
+ (|| {})();
+ }
+}
+"#,
+ );
+ check_assist(
+ inline_call,
+ r#"
+fn foo(x: fn()) {
+ x();
+}
+
+fn main() {
+ foo$0(main)
+}
+"#,
+ r#"
+fn foo(x: fn()) {
+ x();
+}
+
+fn main() {
+ {
+ main();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_single_literal_expr() {
+ cov_mark::check!(inline_call_inline_literal);
+ check_assist(
+ inline_call,
+ r#"
+fn foo(x: u32) -> u32{
+ x
+}
+
+fn main() {
+ foo$0(222);
+}
+"#,
+ r#"
+fn foo(x: u32) -> u32{
+ x
+}
+
+fn main() {
+ 222;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_emits_type_for_coercion() {
+ check_assist(
+ inline_call,
+ r#"
+fn foo(x: *const u32) -> u32 {
+ x as u32
+}
+
+fn main() {
+ foo$0(&222);
+}
+"#,
+ r#"
+fn foo(x: *const u32) -> u32 {
+ x as u32
+}
+
+fn main() {
+ {
+ let x: *const u32 = &222;
+ x as u32
+ };
+}
+"#,
+ );
+ }
+
+ // FIXME: const generics aren't being substituted, this is blocked on better support for them
+ #[test]
+ fn inline_substitutes_generics() {
+ check_assist(
+ inline_call,
+ r#"
+fn foo<T, const N: usize>() {
+ bar::<T, N>()
+}
+
+fn bar<U, const M: usize>() {}
+
+fn main() {
+ foo$0::<usize, {0}>();
+}
+"#,
+ r#"
+fn foo<T, const N: usize>() {
+ bar::<T, N>()
+}
+
+fn bar<U, const M: usize>() {}
+
+fn main() {
+ bar::<usize, N>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_callers() {
+ check_assist(
+ inline_into_callers,
+ r#"
+fn do_the_math$0(b: u32) -> u32 {
+ let foo = 10;
+ foo * b + foo
+}
+fn foo() {
+ do_the_math(0);
+ let bar = 10;
+ do_the_math(bar);
+}
+"#,
+ r#"
+
+fn foo() {
+ {
+ let foo = 10;
+ foo * 0 + foo
+ };
+ let bar = 10;
+ {
+ let foo = 10;
+ foo * bar + foo
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_callers_across_files() {
+ check_assist(
+ inline_into_callers,
+ r#"
+//- /lib.rs
+mod foo;
+fn do_the_math$0(b: u32) -> u32 {
+ let foo = 10;
+ foo * b + foo
+}
+//- /foo.rs
+use super::do_the_math;
+fn foo() {
+ do_the_math(0);
+ let bar = 10;
+ do_the_math(bar);
+}
+"#,
+ r#"
+//- /lib.rs
+mod foo;
+
+//- /foo.rs
+fn foo() {
+ {
+ let foo = 10;
+ foo * 0 + foo
+ };
+ let bar = 10;
+ {
+ let foo = 10;
+ foo * bar + foo
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_callers_across_files_with_def_file() {
+ check_assist(
+ inline_into_callers,
+ r#"
+//- /lib.rs
+mod foo;
+fn do_the_math$0(b: u32) -> u32 {
+ let foo = 10;
+ foo * b + foo
+}
+fn bar(a: u32, b: u32) -> u32 {
+ do_the_math(0);
+}
+//- /foo.rs
+use super::do_the_math;
+fn foo() {
+ do_the_math(0);
+}
+"#,
+ r#"
+//- /lib.rs
+mod foo;
+
+fn bar(a: u32, b: u32) -> u32 {
+ {
+ let foo = 10;
+ foo * 0 + foo
+ };
+}
+//- /foo.rs
+fn foo() {
+ {
+ let foo = 10;
+ foo * 0 + foo
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_callers_recursive() {
+ cov_mark::check!(inline_into_callers_recursive);
+ check_assist_not_applicable(
+ inline_into_callers,
+ r#"
+fn foo$0() {
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_call_recursive() {
+ cov_mark::check!(inline_call_recursive);
+ check_assist_not_applicable(
+ inline_call,
+ r#"
+fn foo() {
+ foo$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_call_field_shorthand() {
+ cov_mark::check!(inline_call_inline_direct_field);
+ check_assist(
+ inline_call,
+ r#"
+struct Foo {
+ field: u32,
+ field1: u32,
+ field2: u32,
+ field3: u32,
+}
+fn foo(field: u32, field1: u32, val2: u32, val3: u32) -> Foo {
+ Foo {
+ field,
+ field1,
+ field2: val2,
+ field3: val3,
+ }
+}
+fn main() {
+ let bar = 0;
+ let baz = 0;
+ foo$0(bar, 0, baz, 0);
+}
+"#,
+ r#"
+struct Foo {
+ field: u32,
+ field1: u32,
+ field2: u32,
+ field3: u32,
+}
+fn foo(field: u32, field1: u32, val2: u32, val3: u32) -> Foo {
+ Foo {
+ field,
+ field1,
+ field2: val2,
+ field3: val3,
+ }
+}
+fn main() {
+ let bar = 0;
+ let baz = 0;
+ Foo {
+ field: bar,
+ field1: 0,
+ field2: baz,
+ field3: 0,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_callers_wrapped_in_parentheses() {
+ check_assist(
+ inline_into_callers,
+ r#"
+fn foo$0() -> u32 {
+ let x = 0;
+ x
+}
+fn bar() -> u32 {
+ foo() + foo()
+}
+"#,
+ r#"
+
+fn bar() -> u32 {
+ ({
+ let x = 0;
+ x
+ }) + {
+ let x = 0;
+ x
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn inline_call_wrapped_in_parentheses() {
+ check_assist(
+ inline_call,
+ r#"
+fn foo() -> u32 {
+ let x = 0;
+ x
+}
+fn bar() -> u32 {
+ foo$0() + foo()
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let x = 0;
+ x
+}
+fn bar() -> u32 {
+ ({
+ let x = 0;
+ x
+ }) + foo()
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn inline_call_defined_in_macro() {
+ cov_mark::check!(inline_call_defined_in_macro);
+ check_assist(
+ inline_call,
+ r#"
+macro_rules! define_foo {
+ () => { fn foo() -> u32 {
+ let x = 0;
+ x
+ } };
+}
+define_foo!();
+fn bar() -> u32 {
+ foo$0()
+}
+"#,
+ r#"
+macro_rules! define_foo {
+ () => { fn foo() -> u32 {
+ let x = 0;
+ x
+ } };
+}
+define_foo!();
+fn bar() -> u32 {
+ {
+ let x = 0;
+ x
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
new file mode 100644
index 000000000..7259d6781
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
@@ -0,0 +1,954 @@
+use either::Either;
+use hir::{PathResolution, Semantics};
+use ide_db::{
+ base_db::FileId,
+ defs::Definition,
+ search::{FileReference, UsageSearchResult},
+ RootDatabase,
+};
+use syntax::{
+ ast::{self, AstNode, AstToken, HasName},
+ SyntaxElement, TextRange,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: inline_local_variable
+//
+// Inlines a local variable.
+//
+// ```
+// fn main() {
+// let x$0 = 1 + 2;
+// x * 4;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// (1 + 2) * 4;
+// }
+// ```
+pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let file_id = ctx.file_id();
+ let range = ctx.selection_trimmed();
+ let InlineData { let_stmt, delete_let, references, target } =
+ if let Some(path_expr) = ctx.find_node_at_offset::<ast::PathExpr>() {
+ inline_usage(&ctx.sema, path_expr, range, file_id)
+ } else if let Some(let_stmt) = ctx.find_node_at_offset::<ast::LetStmt>() {
+ inline_let(&ctx.sema, let_stmt, range, file_id)
+ } else {
+ None
+ }?;
+ let initializer_expr = let_stmt.initializer()?;
+
+ let delete_range = delete_let.then(|| {
+ if let Some(whitespace) = let_stmt
+ .syntax()
+ .next_sibling_or_token()
+ .and_then(SyntaxElement::into_token)
+ .and_then(ast::Whitespace::cast)
+ {
+ TextRange::new(
+ let_stmt.syntax().text_range().start(),
+ whitespace.syntax().text_range().end(),
+ )
+ } else {
+ let_stmt.syntax().text_range()
+ }
+ });
+
+ let wrap_in_parens = references
+ .into_iter()
+ .filter_map(|FileReference { range, name, .. }| match name {
+ ast::NameLike::NameRef(name) => Some((range, name)),
+ _ => None,
+ })
+ .map(|(range, name_ref)| {
+ if range != name_ref.syntax().text_range() {
+ // Do not rename inside macros
+ // FIXME: This feels like a bad heuristic for macros
+ return None;
+ }
+ let usage_node =
+ name_ref.syntax().ancestors().find(|it| ast::PathExpr::can_cast(it.kind()));
+ let usage_parent_option =
+ usage_node.and_then(|it| it.parent()).and_then(ast::Expr::cast);
+ let usage_parent = match usage_parent_option {
+ Some(u) => u,
+ None => return Some((range, name_ref, false)),
+ };
+ let initializer = matches!(
+ initializer_expr,
+ ast::Expr::CallExpr(_)
+ | ast::Expr::IndexExpr(_)
+ | ast::Expr::MethodCallExpr(_)
+ | ast::Expr::FieldExpr(_)
+ | ast::Expr::TryExpr(_)
+ | ast::Expr::Literal(_)
+ | ast::Expr::TupleExpr(_)
+ | ast::Expr::ArrayExpr(_)
+ | ast::Expr::ParenExpr(_)
+ | ast::Expr::PathExpr(_)
+ | ast::Expr::BlockExpr(_),
+ );
+ let parent = matches!(
+ usage_parent,
+ ast::Expr::CallExpr(_)
+ | ast::Expr::TupleExpr(_)
+ | ast::Expr::ArrayExpr(_)
+ | ast::Expr::ParenExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::WhileExpr(_)
+ | ast::Expr::BreakExpr(_)
+ | ast::Expr::ReturnExpr(_)
+ | ast::Expr::MatchExpr(_)
+ | ast::Expr::BlockExpr(_)
+ );
+ Some((range, name_ref, !(initializer || parent)))
+ })
+ .collect::<Option<Vec<_>>>()?;
+
+ let init_str = initializer_expr.syntax().text().to_string();
+ let init_in_paren = format!("({})", &init_str);
+
+ let target = match target {
+ ast::NameOrNameRef::Name(it) => it.syntax().text_range(),
+ ast::NameOrNameRef::NameRef(it) => it.syntax().text_range(),
+ };
+
+ acc.add(
+ AssistId("inline_local_variable", AssistKind::RefactorInline),
+ "Inline variable",
+ target,
+ move |builder| {
+ if let Some(range) = delete_range {
+ builder.delete(range);
+ }
+ for (range, name, should_wrap) in wrap_in_parens {
+ let replacement = if should_wrap { &init_in_paren } else { &init_str };
+ if ast::RecordExprField::for_field_name(&name).is_some() {
+ cov_mark::hit!(inline_field_shorthand);
+ builder.insert(range.end(), format!(": {}", replacement));
+ } else {
+ builder.replace(range, replacement.clone())
+ }
+ }
+ },
+ )
+}
+
+struct InlineData {
+ let_stmt: ast::LetStmt,
+ delete_let: bool,
+ target: ast::NameOrNameRef,
+ references: Vec<FileReference>,
+}
+
+fn inline_let(
+ sema: &Semantics<'_, RootDatabase>,
+ let_stmt: ast::LetStmt,
+ range: TextRange,
+ file_id: FileId,
+) -> Option<InlineData> {
+ let bind_pat = match let_stmt.pat()? {
+ ast::Pat::IdentPat(pat) => pat,
+ _ => return None,
+ };
+ if bind_pat.mut_token().is_some() {
+ cov_mark::hit!(test_not_inline_mut_variable);
+ return None;
+ }
+ if !bind_pat.syntax().text_range().contains_range(range) {
+ cov_mark::hit!(not_applicable_outside_of_bind_pat);
+ return None;
+ }
+
+ let local = sema.to_def(&bind_pat)?;
+ let UsageSearchResult { mut references } = Definition::Local(local).usages(sema).all();
+ match references.remove(&file_id) {
+ Some(references) => Some(InlineData {
+ let_stmt,
+ delete_let: true,
+ target: ast::NameOrNameRef::Name(bind_pat.name()?),
+ references,
+ }),
+ None => {
+ cov_mark::hit!(test_not_applicable_if_variable_unused);
+ None
+ }
+ }
+}
+
+fn inline_usage(
+ sema: &Semantics<'_, RootDatabase>,
+ path_expr: ast::PathExpr,
+ range: TextRange,
+ file_id: FileId,
+) -> Option<InlineData> {
+ let path = path_expr.path()?;
+ let name = path.as_single_name_ref()?;
+ if !name.syntax().text_range().contains_range(range) {
+ cov_mark::hit!(test_not_inline_selection_too_broad);
+ return None;
+ }
+
+ let local = match sema.resolve_path(&path)? {
+ PathResolution::Local(local) => local,
+ _ => return None,
+ };
+ if local.is_mut(sema.db) {
+ cov_mark::hit!(test_not_inline_mut_variable_use);
+ return None;
+ }
+
+ // FIXME: Handle multiple local definitions
+ let bind_pat = match local.source(sema.db).value {
+ Either::Left(ident) => ident,
+ _ => return None,
+ };
+
+ let let_stmt = ast::LetStmt::cast(bind_pat.syntax().parent()?)?;
+
+ let UsageSearchResult { mut references } = Definition::Local(local).usages(sema).all();
+ let mut references = references.remove(&file_id)?;
+ let delete_let = references.len() == 1;
+ references.retain(|fref| fref.name.as_name_ref() == Some(&name));
+
+ Some(InlineData { let_stmt, delete_let, target: ast::NameOrNameRef::NameRef(name), references })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_inline_let_bind_literal_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn bar(a: usize) {}
+fn foo() {
+ let a$0 = 1;
+ a + 1;
+ if a > 10 {
+ }
+
+ while a > 10 {
+
+ }
+ let b = a * 10;
+ bar(a);
+}",
+ r"
+fn bar(a: usize) {}
+fn foo() {
+ 1 + 1;
+ if 1 > 10 {
+ }
+
+ while 1 > 10 {
+
+ }
+ let b = 1 * 10;
+ bar(1);
+}",
+ );
+ }
+
+ #[test]
+ fn test_inline_let_bind_bin_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn bar(a: usize) {}
+fn foo() {
+ let a$0 = 1 + 1;
+ a + 1;
+ if a > 10 {
+ }
+
+ while a > 10 {
+
+ }
+ let b = a * 10;
+ bar(a);
+}",
+ r"
+fn bar(a: usize) {}
+fn foo() {
+ (1 + 1) + 1;
+ if (1 + 1) > 10 {
+ }
+
+ while (1 + 1) > 10 {
+
+ }
+ let b = (1 + 1) * 10;
+ bar(1 + 1);
+}",
+ );
+ }
+
+ #[test]
+ fn test_inline_let_bind_function_call_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn bar(a: usize) {}
+fn foo() {
+ let a$0 = bar(1);
+ a + 1;
+ if a > 10 {
+ }
+
+ while a > 10 {
+
+ }
+ let b = a * 10;
+ bar(a);
+}",
+ r"
+fn bar(a: usize) {}
+fn foo() {
+ bar(1) + 1;
+ if bar(1) > 10 {
+ }
+
+ while bar(1) > 10 {
+
+ }
+ let b = bar(1) * 10;
+ bar(bar(1));
+}",
+ );
+ }
+
+ #[test]
+ fn test_inline_let_bind_cast_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn bar(a: usize): usize { a }
+fn foo() {
+ let a$0 = bar(1) as u64;
+ a + 1;
+ if a > 10 {
+ }
+
+ while a > 10 {
+
+ }
+ let b = a * 10;
+ bar(a);
+}",
+ r"
+fn bar(a: usize): usize { a }
+fn foo() {
+ (bar(1) as u64) + 1;
+ if (bar(1) as u64) > 10 {
+ }
+
+ while (bar(1) as u64) > 10 {
+
+ }
+ let b = (bar(1) as u64) * 10;
+ bar(bar(1) as u64);
+}",
+ );
+ }
+
+ #[test]
+ fn test_inline_let_bind_block_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = { 10 + 1 };
+ a + 1;
+ if a > 10 {
+ }
+
+ while a > 10 {
+
+ }
+ let b = a * 10;
+ bar(a);
+}",
+ r"
+fn foo() {
+ { 10 + 1 } + 1;
+ if { 10 + 1 } > 10 {
+ }
+
+ while { 10 + 1 } > 10 {
+
+ }
+ let b = { 10 + 1 } * 10;
+ bar({ 10 + 1 });
+}",
+ );
+ }
+
+ #[test]
+ fn test_inline_let_bind_paren_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = ( 10 + 1 );
+ a + 1;
+ if a > 10 {
+ }
+
+ while a > 10 {
+
+ }
+ let b = a * 10;
+ bar(a);
+}",
+ r"
+fn foo() {
+ ( 10 + 1 ) + 1;
+ if ( 10 + 1 ) > 10 {
+ }
+
+ while ( 10 + 1 ) > 10 {
+
+ }
+ let b = ( 10 + 1 ) * 10;
+ bar(( 10 + 1 ));
+}",
+ );
+ }
+
+ #[test]
+ fn test_not_inline_mut_variable() {
+ cov_mark::check!(test_not_inline_mut_variable);
+ check_assist_not_applicable(
+ inline_local_variable,
+ r"
+fn foo() {
+ let mut a$0 = 1 + 1;
+ a + 1;
+}",
+ );
+ }
+
+ #[test]
+ fn test_not_inline_mut_variable_use() {
+ cov_mark::check!(test_not_inline_mut_variable_use);
+ check_assist_not_applicable(
+ inline_local_variable,
+ r"
+fn foo() {
+ let mut a = 1 + 1;
+ a$0 + 1;
+}",
+ );
+ }
+
+ #[test]
+ fn test_call_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = bar(10 + 1);
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+fn foo() {
+ let b = bar(10 + 1) * 10;
+ let c = bar(10 + 1) as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_index_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let x = vec![1, 2, 3];
+ let a$0 = x[0];
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+fn foo() {
+ let x = vec![1, 2, 3];
+ let b = x[0] * 10;
+ let c = x[0] as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_method_call_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let bar = vec![1];
+ let a$0 = bar.len();
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+fn foo() {
+ let bar = vec![1];
+ let b = bar.len() * 10;
+ let c = bar.len() as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_field_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+struct Bar {
+ foo: usize
+}
+
+fn foo() {
+ let bar = Bar { foo: 1 };
+ let a$0 = bar.foo;
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+struct Bar {
+ foo: usize
+}
+
+fn foo() {
+ let bar = Bar { foo: 1 };
+ let b = bar.foo * 10;
+ let c = bar.foo as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_try_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() -> Option<usize> {
+ let bar = Some(1);
+ let a$0 = bar?;
+ let b = a * 10;
+ let c = a as usize;
+ None
+}",
+ r"
+fn foo() -> Option<usize> {
+ let bar = Some(1);
+ let b = bar? * 10;
+ let c = bar? as usize;
+ None
+}",
+ );
+ }
+
+ #[test]
+ fn test_ref_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let bar = 10;
+ let a$0 = &bar;
+ let b = a * 10;
+}",
+ r"
+fn foo() {
+ let bar = 10;
+ let b = (&bar) * 10;
+}",
+ );
+ }
+
+ #[test]
+ fn test_tuple_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = (10, 20);
+ let b = a[0];
+}",
+ r"
+fn foo() {
+ let b = (10, 20)[0];
+}",
+ );
+ }
+
+ #[test]
+ fn test_array_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = [1, 2, 3];
+ let b = a.len();
+}",
+ r"
+fn foo() {
+ let b = [1, 2, 3].len();
+}",
+ );
+ }
+
+ #[test]
+ fn test_paren() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = (10 + 20);
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+fn foo() {
+ let b = (10 + 20) * 10;
+ let c = (10 + 20) as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_path_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let d = 10;
+ let a$0 = d;
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+fn foo() {
+ let d = 10;
+ let b = d * 10;
+ let c = d as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_block_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = { 10 };
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+fn foo() {
+ let b = { 10 } * 10;
+ let c = { 10 } as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_used_in_different_expr1() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = 10 + 20;
+ let b = a * 10;
+ let c = (a, 20);
+ let d = [a, 10];
+ let e = (a);
+}",
+ r"
+fn foo() {
+ let b = (10 + 20) * 10;
+ let c = (10 + 20, 20);
+ let d = [10 + 20, 10];
+ let e = (10 + 20);
+}",
+ );
+ }
+
+ #[test]
+ fn test_used_in_for_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = vec![10, 20];
+ for i in a {}
+}",
+ r"
+fn foo() {
+ for i in vec![10, 20] {}
+}",
+ );
+ }
+
+ #[test]
+ fn test_used_in_while_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = 1 > 0;
+ while a {}
+}",
+ r"
+fn foo() {
+ while 1 > 0 {}
+}",
+ );
+ }
+
+ #[test]
+ fn test_used_in_break_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = 1 + 1;
+ loop {
+ break a;
+ }
+}",
+ r"
+fn foo() {
+ loop {
+ break 1 + 1;
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn test_used_in_return_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = 1 > 0;
+ return a;
+}",
+ r"
+fn foo() {
+ return 1 > 0;
+}",
+ );
+ }
+
+ #[test]
+ fn test_used_in_match_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = 1 > 0;
+ match a {}
+}",
+ r"
+fn foo() {
+ match 1 > 0 {}
+}",
+ );
+ }
+
+ #[test]
+ fn inline_field_shorthand() {
+ cov_mark::check!(inline_field_shorthand);
+ check_assist(
+ inline_local_variable,
+ r"
+struct S { foo: i32}
+fn main() {
+ let $0foo = 92;
+ S { foo }
+}
+",
+ r"
+struct S { foo: i32}
+fn main() {
+ S { foo: 92 }
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_not_applicable_if_variable_unused() {
+ cov_mark::check!(test_not_applicable_if_variable_unused);
+ check_assist_not_applicable(
+ inline_local_variable,
+ r"
+fn foo() {
+ let $0a = 0;
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn not_applicable_outside_of_bind_pat() {
+ cov_mark::check!(not_applicable_outside_of_bind_pat);
+ check_assist_not_applicable(
+ inline_local_variable,
+ r"
+fn main() {
+ let x = $01 + 2;
+ x * 4;
+}
+",
+ )
+ }
+
+ #[test]
+ fn works_on_local_usage() {
+ check_assist(
+ inline_local_variable,
+ r#"
+fn f() {
+ let xyz = 0;
+ xyz$0;
+}
+"#,
+ r#"
+fn f() {
+ 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn does_not_remove_let_when_multiple_usages() {
+ check_assist(
+ inline_local_variable,
+ r#"
+fn f() {
+ let xyz = 0;
+ xyz$0;
+ xyz;
+}
+"#,
+ r#"
+fn f() {
+ let xyz = 0;
+ 0;
+ xyz;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_with_non_ident_pattern() {
+ check_assist_not_applicable(
+ inline_local_variable,
+ r#"
+fn main() {
+ let (x, y) = (0, 1);
+ x$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_on_local_usage_in_macro() {
+ check_assist_not_applicable(
+ inline_local_variable,
+ r#"
+macro_rules! m {
+ ($i:ident) => { $i }
+}
+fn f() {
+ let xyz = 0;
+ m!(xyz$0); // replacing it would break the macro
+}
+"#,
+ );
+ check_assist_not_applicable(
+ inline_local_variable,
+ r#"
+macro_rules! m {
+ ($i:ident) => { $i }
+}
+fn f() {
+ let xyz$0 = 0;
+ m!(xyz); // replacing it would break the macro
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_not_inline_selection_too_broad() {
+ cov_mark::check!(test_not_inline_selection_too_broad);
+ check_assist_not_applicable(
+ inline_local_variable,
+ r#"
+fn f() {
+ let foo = 0;
+ let bar = 0;
+ $0foo + bar$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_inline_ref_in_let() {
+ check_assist(
+ inline_local_variable,
+ r#"
+fn f() {
+ let x = {
+ let y = 0;
+ y$0
+ };
+}
+"#,
+ r#"
+fn f() {
+ let x = {
+ 0
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_inline_let_unit_struct() {
+ check_assist_not_applicable(
+ inline_local_variable,
+ r#"
+struct S;
+fn f() {
+ let S$0 = S;
+ S;
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs
new file mode 100644
index 000000000..054663a06
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs
@@ -0,0 +1,838 @@
+// Some ideas for future improvements:
+// - Support replacing aliases which are used in expressions, e.g. `A::new()`.
+// - "inline_alias_to_users" assist #10881.
+// - Remove unused aliases if there are no longer any users, see inline_call.rs.
+
+use hir::{HasSource, PathResolution};
+use itertools::Itertools;
+use std::collections::HashMap;
+use syntax::{
+ ast::{self, make, HasGenericParams, HasName},
+ ted, AstNode, NodeOrToken, SyntaxNode,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: inline_type_alias
+//
+// Replace a type alias with its concrete type.
+//
+// ```
+// type A<T = u32> = Vec<T>;
+//
+// fn main() {
+// let a: $0A;
+// }
+// ```
+// ->
+// ```
+// type A<T = u32> = Vec<T>;
+//
+// fn main() {
+// let a: Vec<u32>;
+// }
+// ```
+pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ enum Replacement {
+ Generic { lifetime_map: LifetimeMap, const_and_type_map: ConstAndTypeMap },
+ Plain,
+ }
+
+ let alias_instance = ctx.find_node_at_offset::<ast::PathType>()?;
+ let concrete_type;
+ let replacement;
+ match alias_instance.path()?.as_single_name_ref() {
+ Some(nameref) if nameref.Self_token().is_some() => {
+ match ctx.sema.resolve_path(&alias_instance.path()?)? {
+ PathResolution::SelfType(imp) => {
+ concrete_type = imp.source(ctx.db())?.value.self_ty()?;
+ }
+ // FIXME: should also work in ADT definitions
+ _ => return None,
+ }
+
+ replacement = Replacement::Plain;
+ }
+ _ => {
+ let alias = get_type_alias(&ctx, &alias_instance)?;
+ concrete_type = alias.ty()?;
+
+ replacement = if let Some(alias_generics) = alias.generic_param_list() {
+ if alias_generics.generic_params().next().is_none() {
+ cov_mark::hit!(no_generics_params);
+ return None;
+ }
+
+ let instance_args =
+ alias_instance.syntax().descendants().find_map(ast::GenericArgList::cast);
+
+ Replacement::Generic {
+ lifetime_map: LifetimeMap::new(&instance_args, &alias_generics)?,
+ const_and_type_map: ConstAndTypeMap::new(&instance_args, &alias_generics)?,
+ }
+ } else {
+ Replacement::Plain
+ };
+ }
+ }
+
+ let target = alias_instance.syntax().text_range();
+
+ acc.add(
+ AssistId("inline_type_alias", AssistKind::RefactorInline),
+ "Inline type alias",
+ target,
+ |builder| {
+ let replacement_text = match replacement {
+ Replacement::Generic { lifetime_map, const_and_type_map } => {
+ create_replacement(&lifetime_map, &const_and_type_map, &concrete_type)
+ }
+ Replacement::Plain => concrete_type.to_string(),
+ };
+
+ builder.replace(target, replacement_text);
+ },
+ )
+}
+
+struct LifetimeMap(HashMap<String, ast::Lifetime>);
+
+impl LifetimeMap {
+ fn new(
+ instance_args: &Option<ast::GenericArgList>,
+ alias_generics: &ast::GenericParamList,
+ ) -> Option<Self> {
+ let mut inner = HashMap::new();
+
+ let wildcard_lifetime = make::lifetime("'_");
+ let lifetimes = alias_generics
+ .lifetime_params()
+ .filter_map(|lp| lp.lifetime())
+ .map(|l| l.to_string())
+ .collect_vec();
+
+ for lifetime in &lifetimes {
+ inner.insert(lifetime.to_string(), wildcard_lifetime.clone());
+ }
+
+ if let Some(instance_generic_args_list) = &instance_args {
+ for (index, lifetime) in instance_generic_args_list
+ .lifetime_args()
+ .filter_map(|arg| arg.lifetime())
+ .enumerate()
+ {
+ let key = match lifetimes.get(index) {
+ Some(key) => key,
+ None => {
+ cov_mark::hit!(too_many_lifetimes);
+ return None;
+ }
+ };
+
+ inner.insert(key.clone(), lifetime);
+ }
+ }
+
+ Some(Self(inner))
+ }
+}
+
+struct ConstAndTypeMap(HashMap<String, SyntaxNode>);
+
+impl ConstAndTypeMap {
+ fn new(
+ instance_args: &Option<ast::GenericArgList>,
+ alias_generics: &ast::GenericParamList,
+ ) -> Option<Self> {
+ let mut inner = HashMap::new();
+ let instance_generics = generic_args_to_const_and_type_generics(instance_args);
+ let alias_generics = generic_param_list_to_const_and_type_generics(&alias_generics);
+
+ if instance_generics.len() > alias_generics.len() {
+ cov_mark::hit!(too_many_generic_args);
+ return None;
+ }
+
+ // Any declaration generics that don't have a default value must have one
+ // provided by the instance.
+ for (i, declaration_generic) in alias_generics.iter().enumerate() {
+ let key = declaration_generic.replacement_key()?;
+
+ if let Some(instance_generic) = instance_generics.get(i) {
+ inner.insert(key, instance_generic.replacement_value()?);
+ } else if let Some(value) = declaration_generic.replacement_value() {
+ inner.insert(key, value);
+ } else {
+ cov_mark::hit!(missing_replacement_param);
+ return None;
+ }
+ }
+
+ Some(Self(inner))
+ }
+}
+
+/// This doesn't attempt to ensure specified generics are compatible with those
+/// required by the type alias, other than lifetimes which must either all be
+/// specified or all omitted. It will replace TypeArgs with ConstArgs and vice
+/// versa if they're in the wrong position. It supports partially specified
+/// generics.
+///
+/// 1. Map the provided instance's generic args to the type alias's generic
+/// params:
+///
+/// ```
+/// type A<'a, const N: usize, T = u64> = &'a [T; N];
+/// ^ alias generic params
+/// let a: A<100>;
+/// ^ instance generic args
+/// ```
+///
+/// generic['a] = '_ due to omission
+/// generic[N] = 100 due to the instance arg
+/// generic[T] = u64 due to the default param
+///
+/// 2. Copy the concrete type and substitute in each found mapping:
+///
+/// &'_ [u64; 100]
+///
+/// 3. Remove wildcard lifetimes entirely:
+///
+/// &[u64; 100]
+fn create_replacement(
+ lifetime_map: &LifetimeMap,
+ const_and_type_map: &ConstAndTypeMap,
+ concrete_type: &ast::Type,
+) -> String {
+ let updated_concrete_type = concrete_type.clone_for_update();
+ let mut replacements = Vec::new();
+ let mut removals = Vec::new();
+
+ for syntax in updated_concrete_type.syntax().descendants() {
+ let syntax_string = syntax.to_string();
+ let syntax_str = syntax_string.as_str();
+
+ if let Some(old_lifetime) = ast::Lifetime::cast(syntax.clone()) {
+ if let Some(new_lifetime) = lifetime_map.0.get(&old_lifetime.to_string()) {
+ if new_lifetime.text() == "'_" {
+ removals.push(NodeOrToken::Node(syntax.clone()));
+
+ if let Some(ws) = syntax.next_sibling_or_token() {
+ removals.push(ws.clone());
+ }
+
+ continue;
+ }
+
+ replacements.push((syntax.clone(), new_lifetime.syntax().clone_for_update()));
+ }
+ } else if let Some(replacement_syntax) = const_and_type_map.0.get(syntax_str) {
+ let new_string = replacement_syntax.to_string();
+ let new = if new_string == "_" {
+ make::wildcard_pat().syntax().clone_for_update()
+ } else {
+ replacement_syntax.clone_for_update()
+ };
+
+ replacements.push((syntax.clone(), new));
+ }
+ }
+
+ for (old, new) in replacements {
+ ted::replace(old, new);
+ }
+
+ for syntax in removals {
+ ted::remove(syntax);
+ }
+
+ updated_concrete_type.to_string()
+}
+
+fn get_type_alias(ctx: &AssistContext<'_>, path: &ast::PathType) -> Option<ast::TypeAlias> {
+ let resolved_path = ctx.sema.resolve_path(&path.path()?)?;
+
+ // We need the generics in the correct order to be able to map any provided
+ // instance generics to declaration generics. The `hir::TypeAlias` doesn't
+ // keep the order, so we must get the `ast::TypeAlias` from the hir
+ // definition.
+ if let PathResolution::Def(hir::ModuleDef::TypeAlias(ta)) = resolved_path {
+ Some(ctx.sema.source(ta)?.value)
+ } else {
+ None
+ }
+}
+
+enum ConstOrTypeGeneric {
+ ConstArg(ast::ConstArg),
+ TypeArg(ast::TypeArg),
+ ConstParam(ast::ConstParam),
+ TypeParam(ast::TypeParam),
+}
+
+impl ConstOrTypeGeneric {
+ fn replacement_key(&self) -> Option<String> {
+ // Only params are used as replacement keys.
+ match self {
+ ConstOrTypeGeneric::ConstParam(cp) => Some(cp.name()?.to_string()),
+ ConstOrTypeGeneric::TypeParam(tp) => Some(tp.name()?.to_string()),
+ _ => None,
+ }
+ }
+
+ fn replacement_value(&self) -> Option<SyntaxNode> {
+ Some(match self {
+ ConstOrTypeGeneric::ConstArg(ca) => ca.expr()?.syntax().clone(),
+ ConstOrTypeGeneric::TypeArg(ta) => ta.syntax().clone(),
+ ConstOrTypeGeneric::ConstParam(cp) => cp.default_val()?.syntax().clone(),
+ ConstOrTypeGeneric::TypeParam(tp) => tp.default_type()?.syntax().clone(),
+ })
+ }
+}
+
+fn generic_param_list_to_const_and_type_generics(
+ generics: &ast::GenericParamList,
+) -> Vec<ConstOrTypeGeneric> {
+ let mut others = Vec::new();
+
+ for param in generics.generic_params() {
+ match param {
+ ast::GenericParam::LifetimeParam(_) => {}
+ ast::GenericParam::ConstParam(cp) => {
+ others.push(ConstOrTypeGeneric::ConstParam(cp));
+ }
+ ast::GenericParam::TypeParam(tp) => others.push(ConstOrTypeGeneric::TypeParam(tp)),
+ }
+ }
+
+ others
+}
+
+fn generic_args_to_const_and_type_generics(
+ generics: &Option<ast::GenericArgList>,
+) -> Vec<ConstOrTypeGeneric> {
+ let mut others = Vec::new();
+
+ // It's fine for there to be no instance generics because the declaration
+ // might have default values or they might be inferred.
+ if let Some(generics) = generics {
+ for arg in generics.generic_args() {
+ match arg {
+ ast::GenericArg::TypeArg(ta) => {
+ others.push(ConstOrTypeGeneric::TypeArg(ta));
+ }
+ ast::GenericArg::ConstArg(ca) => {
+ others.push(ConstOrTypeGeneric::ConstArg(ca));
+ }
+ _ => {}
+ }
+ }
+ }
+
+ others
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn empty_generic_params() {
+ cov_mark::check!(no_generics_params);
+ check_assist_not_applicable(
+ inline_type_alias,
+ r#"
+type A<> = T;
+fn main() {
+ let a: $0A<u32>;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn too_many_generic_args() {
+ cov_mark::check!(too_many_generic_args);
+ check_assist_not_applicable(
+ inline_type_alias,
+ r#"
+type A<T> = T;
+fn main() {
+ let a: $0A<u32, u64>;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn too_many_lifetimes() {
+ cov_mark::check!(too_many_lifetimes);
+ check_assist_not_applicable(
+ inline_type_alias,
+ r#"
+type A<'a> = &'a &'b u32;
+fn f<'a>() {
+ let a: $0A<'a, 'b> = 0;
+}
+"#,
+ );
+ }
+
+ // This must be supported in order to support "inline_alias_to_users" or
+ // whatever it will be called.
+ #[test]
+ fn alias_as_expression_ignored() {
+ check_assist_not_applicable(
+ inline_type_alias,
+ r#"
+type A = Vec<u32>;
+fn main() {
+ let a: A = $0A::new();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn primitive_arg() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<T> = T;
+fn main() {
+ let a: $0A<u32> = 0;
+}
+"#,
+ r#"
+type A<T> = T;
+fn main() {
+ let a: u32 = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_generic_replacements() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A = Vec<u32>;
+fn main() {
+ let a: $0A;
+}
+"#,
+ r#"
+type A = Vec<u32>;
+fn main() {
+ let a: Vec<u32>;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn param_expression() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<const N: usize = { 1 }> = [u32; N];
+fn main() {
+ let a: $0A;
+}
+"#,
+ r#"
+type A<const N: usize = { 1 }> = [u32; N];
+fn main() {
+ let a: [u32; { 1 }];
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn param_default_value() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<const N: usize = 1> = [u32; N];
+fn main() {
+ let a: $0A;
+}
+"#,
+ r#"
+type A<const N: usize = 1> = [u32; N];
+fn main() {
+ let a: [u32; 1];
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn all_param_types() {
+ check_assist(
+ inline_type_alias,
+ r#"
+struct Struct<const C: usize>;
+type A<'inner1, 'outer1, Outer1, const INNER1: usize, Inner1: Clone, const OUTER1: usize> = (Struct<INNER1>, Struct<OUTER1>, Outer1, &'inner1 (), Inner1, &'outer1 ());
+fn foo<'inner2, 'outer2, Outer2, const INNER2: usize, Inner2, const OUTER2: usize>() {
+ let a: $0A<'inner2, 'outer2, Outer2, INNER2, Inner2, OUTER2>;
+}
+"#,
+ r#"
+struct Struct<const C: usize>;
+type A<'inner1, 'outer1, Outer1, const INNER1: usize, Inner1: Clone, const OUTER1: usize> = (Struct<INNER1>, Struct<OUTER1>, Outer1, &'inner1 (), Inner1, &'outer1 ());
+fn foo<'inner2, 'outer2, Outer2, const INNER2: usize, Inner2, const OUTER2: usize>() {
+ let a: (Struct<INNER2>, Struct<OUTER2>, Outer2, &'inner2 (), Inner2, &'outer2 ());
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn omitted_lifetimes() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<'l, 'r> = &'l &'r u32;
+fn main() {
+ let a: $0A;
+}
+"#,
+ r#"
+type A<'l, 'r> = &'l &'r u32;
+fn main() {
+ let a: &&u32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn omitted_type() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<'r, 'l, T = u32> = &'l std::collections::HashMap<&'r str, T>;
+fn main() {
+ let a: $0A<'_, '_>;
+}
+"#,
+ r#"
+type A<'r, 'l, T = u32> = &'l std::collections::HashMap<&'r str, T>;
+fn main() {
+ let a: &std::collections::HashMap<&str, u32>;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn omitted_everything() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<'r, 'l, T = u32> = &'l std::collections::HashMap<&'r str, T>;
+fn main() {
+ let v = std::collections::HashMap<&str, u32>;
+ let a: $0A = &v;
+}
+"#,
+ r#"
+type A<'r, 'l, T = u32> = &'l std::collections::HashMap<&'r str, T>;
+fn main() {
+ let v = std::collections::HashMap<&str, u32>;
+ let a: &std::collections::HashMap<&str, u32> = &v;
+}
+"#,
+ );
+ }
+
+ // This doesn't actually cause the GenericArgsList to contain a AssocTypeArg.
+ #[test]
+ fn arg_associated_type() {
+ check_assist(
+ inline_type_alias,
+ r#"
+trait Tra { type Assoc; fn a(); }
+struct Str {}
+impl Tra for Str {
+ type Assoc = u32;
+ fn a() {
+ type A<T> = Vec<T>;
+ let a: $0A<Self::Assoc>;
+ }
+}
+"#,
+ r#"
+trait Tra { type Assoc; fn a(); }
+struct Str {}
+impl Tra for Str {
+ type Assoc = u32;
+ fn a() {
+ type A<T> = Vec<T>;
+ let a: Vec<Self::Assoc>;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn param_default_associated_type() {
+ check_assist(
+ inline_type_alias,
+ r#"
+trait Tra { type Assoc; fn a() }
+struct Str {}
+impl Tra for Str {
+ type Assoc = u32;
+ fn a() {
+ type A<T = Self::Assoc> = Vec<T>;
+ let a: $0A;
+ }
+}
+"#,
+ r#"
+trait Tra { type Assoc; fn a() }
+struct Str {}
+impl Tra for Str {
+ type Assoc = u32;
+ fn a() {
+ type A<T = Self::Assoc> = Vec<T>;
+ let a: Vec<Self::Assoc>;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_pointer() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A = fn(u32);
+fn foo(a: u32) {}
+fn main() {
+ let a: $0A = foo;
+}
+"#,
+ r#"
+type A = fn(u32);
+fn foo(a: u32) {}
+fn main() {
+ let a: fn(u32) = foo;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn closure() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A = Box<dyn FnOnce(u32) -> u32>;
+fn main() {
+ let a: $0A = Box::new(|_| 0);
+}
+"#,
+ r#"
+type A = Box<dyn FnOnce(u32) -> u32>;
+fn main() {
+ let a: Box<dyn FnOnce(u32) -> u32> = Box::new(|_| 0);
+}
+"#,
+ );
+ }
+
+ // Type aliases can't be used in traits, but someone might use the assist to
+ // fix the error.
+ #[test]
+ fn bounds() {
+ check_assist(
+ inline_type_alias,
+ r#"type A = std::io::Write; fn f<T>() where T: $0A {}"#,
+ r#"type A = std::io::Write; fn f<T>() where T: std::io::Write {}"#,
+ );
+ }
+
+ #[test]
+ fn function_parameter() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A = std::io::Write;
+fn f(a: impl $0A) {}
+"#,
+ r#"
+type A = std::io::Write;
+fn f(a: impl std::io::Write) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn arg_expression() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<const N: usize> = [u32; N];
+fn main() {
+ let a: $0A<{ 1 + 1 }>;
+}
+"#,
+ r#"
+type A<const N: usize> = [u32; N];
+fn main() {
+ let a: [u32; { 1 + 1 }];
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn alias_instance_generic_path() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<const N: usize> = [u32; N];
+fn main() {
+ let a: $0A<u32::MAX>;
+}
+"#,
+ r#"
+type A<const N: usize> = [u32; N];
+fn main() {
+ let a: [u32; u32::MAX];
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generic_type() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A = String;
+fn f(a: Vec<$0A>) {}
+"#,
+ r#"
+type A = String;
+fn f(a: Vec<String>) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn missing_replacement_param() {
+ cov_mark::check!(missing_replacement_param);
+ check_assist_not_applicable(
+ inline_type_alias,
+ r#"
+type A<U> = Vec<T>;
+fn main() {
+ let a: $0A;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn full_path_type_is_replaced() {
+ check_assist(
+ inline_type_alias,
+ r#"
+mod foo {
+ pub type A = String;
+}
+fn main() {
+ let a: foo::$0A;
+}
+"#,
+ r#"
+mod foo {
+ pub type A = String;
+}
+fn main() {
+ let a: String;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_self_type() {
+ check_assist(
+ inline_type_alias,
+ r#"
+struct Strukt;
+
+impl Strukt {
+ fn new() -> Self$0 {}
+}
+"#,
+ r#"
+struct Strukt;
+
+impl Strukt {
+ fn new() -> Strukt {}
+}
+"#,
+ );
+ check_assist(
+ inline_type_alias,
+ r#"
+struct Strukt<'a, T, const C: usize>(&'a [T; C]);
+
+impl<T, const C: usize> Strukt<'_, T, C> {
+ fn new() -> Self$0 {}
+}
+"#,
+ r#"
+struct Strukt<'a, T, const C: usize>(&'a [T; C]);
+
+impl<T, const C: usize> Strukt<'_, T, C> {
+ fn new() -> Strukt<'_, T, C> {}
+}
+"#,
+ );
+ check_assist(
+ inline_type_alias,
+ r#"
+struct Strukt<'a, T, const C: usize>(&'a [T; C]);
+
+trait Tr<'b, T> {}
+
+impl<T, const C: usize> Tr<'static, u8> for Strukt<'_, T, C> {
+ fn new() -> Self$0 {}
+}
+"#,
+ r#"
+struct Strukt<'a, T, const C: usize>(&'a [T; C]);
+
+trait Tr<'b, T> {}
+
+impl<T, const C: usize> Tr<'static, u8> for Strukt<'_, T, C> {
+ fn new() -> Strukt<'_, T, C> {}
+}
+"#,
+ );
+
+ check_assist_not_applicable(
+ inline_type_alias,
+ r#"
+trait Tr {
+ fn new() -> Self$0;
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_generic.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_generic.rs
new file mode 100644
index 000000000..062c816ae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_generic.rs
@@ -0,0 +1,144 @@
+use syntax::{
+ ast::{self, edit_in_place::GenericParamsOwnerEdit, make, AstNode},
+ ted,
+};
+
+use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: introduce_named_generic
+//
+// Replaces `impl Trait` function argument with the named generic.
+//
+// ```
+// fn foo(bar: $0impl Bar) {}
+// ```
+// ->
+// ```
+// fn foo<B: Bar>(bar: B) {}
+// ```
+pub(crate) fn introduce_named_generic(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let impl_trait_type = ctx.find_node_at_offset::<ast::ImplTraitType>()?;
+ let param = impl_trait_type.syntax().parent().and_then(ast::Param::cast)?;
+ let fn_ = param.syntax().ancestors().find_map(ast::Fn::cast)?;
+
+ let type_bound_list = impl_trait_type.type_bound_list()?;
+
+ let target = fn_.syntax().text_range();
+ acc.add(
+ AssistId("introduce_named_generic", AssistKind::RefactorRewrite),
+ "Replace impl trait with generic",
+ target,
+ |edit| {
+ let impl_trait_type = edit.make_mut(impl_trait_type);
+ let fn_ = edit.make_mut(fn_);
+
+ let type_param_name = suggest_name::for_generic_parameter(&impl_trait_type);
+
+ let type_param = make::type_param(make::name(&type_param_name), Some(type_bound_list))
+ .clone_for_update();
+ let new_ty = make::ty(&type_param_name).clone_for_update();
+
+ ted::replace(impl_trait_type.syntax(), new_ty.syntax());
+ fn_.get_or_create_generic_param_list().add_generic_param(type_param.into())
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::check_assist;
+
+ #[test]
+ fn introduce_named_generic_params() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo<G>(bar: $0impl Bar) {}"#,
+ r#"fn foo<G, B: Bar>(bar: B) {}"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_without_generic_params() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo(bar: $0impl Bar) {}"#,
+ r#"fn foo<B: Bar>(bar: B) {}"#,
+ );
+ }
+
+ #[test]
+ fn replace_two_impl_trait_with_generic_params() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo<G>(foo: impl Foo, bar: $0impl Bar) {}"#,
+ r#"fn foo<G, B: Bar>(foo: impl Foo, bar: B) {}"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_with_empty_generic_params() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo<>(bar: $0impl Bar) {}"#,
+ r#"fn foo<B: Bar>(bar: B) {}"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_with_empty_multiline_generic_params() {
+ check_assist(
+ introduce_named_generic,
+ r#"
+fn foo<
+>(bar: $0impl Bar) {}
+"#,
+ r#"
+fn foo<B: Bar
+>(bar: B) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_with_exist_generic_letter() {
+ // FIXME: This is wrong, we should pick a different name if the one we
+ // want is already bound.
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo<B>(bar: $0impl Bar) {}"#,
+ r#"fn foo<B, B: Bar>(bar: B) {}"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_with_multiline_generic_params() {
+ check_assist(
+ introduce_named_generic,
+ r#"
+fn foo<
+ G: Foo,
+ F,
+ H,
+>(bar: $0impl Bar) {}
+"#,
+ r#"
+fn foo<
+ G: Foo,
+ F,
+ H, B: Bar,
+>(bar: B) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_multiple() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo(bar: $0impl Foo + Bar) {}"#,
+ r#"fn foo<F: Foo + Bar>(bar: F) {}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
new file mode 100644
index 000000000..ce91dd237
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
@@ -0,0 +1,338 @@
+use ide_db::FxHashSet;
+use syntax::{
+ ast::{self, edit_in_place::GenericParamsOwnerEdit, make, HasGenericParams},
+ ted::{self, Position},
+ AstNode, TextRange,
+};
+
+use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists};
+
+static ASSIST_NAME: &str = "introduce_named_lifetime";
+static ASSIST_LABEL: &str = "Introduce named lifetime";
+
+// Assist: introduce_named_lifetime
+//
+// Change an anonymous lifetime to a named lifetime.
+//
+// ```
+// impl Cursor<'_$0> {
+// fn node(self) -> &SyntaxNode {
+// match self {
+// Cursor::Replace(node) | Cursor::Before(node) => node,
+// }
+// }
+// }
+// ```
+// ->
+// ```
+// impl<'a> Cursor<'a> {
+// fn node(self) -> &SyntaxNode {
+// match self {
+// Cursor::Replace(node) | Cursor::Before(node) => node,
+// }
+// }
+// }
+// ```
+pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // FIXME: How can we handle renaming any one of multiple anonymous lifetimes?
+ // FIXME: should also add support for the case fun(f: &Foo) -> &$0Foo
+ let lifetime =
+ ctx.find_node_at_offset::<ast::Lifetime>().filter(|lifetime| lifetime.text() == "'_")?;
+ let lifetime_loc = lifetime.lifetime_ident_token()?.text_range();
+
+ if let Some(fn_def) = lifetime.syntax().ancestors().find_map(ast::Fn::cast) {
+ generate_fn_def_assist(acc, fn_def, lifetime_loc, lifetime)
+ } else if let Some(impl_def) = lifetime.syntax().ancestors().find_map(ast::Impl::cast) {
+ generate_impl_def_assist(acc, impl_def, lifetime_loc, lifetime)
+ } else {
+ None
+ }
+}
+
+/// Generate the assist for the fn def case
+fn generate_fn_def_assist(
+ acc: &mut Assists,
+ fn_def: ast::Fn,
+ lifetime_loc: TextRange,
+ lifetime: ast::Lifetime,
+) -> Option<()> {
+ let param_list: ast::ParamList = fn_def.param_list()?;
+ let new_lifetime_param = generate_unique_lifetime_param_name(fn_def.generic_param_list())?;
+ let self_param =
+ // use the self if it's a reference and has no explicit lifetime
+ param_list.self_param().filter(|p| p.lifetime().is_none() && p.amp_token().is_some());
+ // compute the location which implicitly has the same lifetime as the anonymous lifetime
+ let loc_needing_lifetime = if let Some(self_param) = self_param {
+ // if we have a self reference, use that
+ Some(NeedsLifetime::SelfParam(self_param))
+ } else {
+ // otherwise, if there's a single reference parameter without a named liftime, use that
+ let fn_params_without_lifetime: Vec<_> = param_list
+ .params()
+ .filter_map(|param| match param.ty() {
+ Some(ast::Type::RefType(ascribed_type)) if ascribed_type.lifetime().is_none() => {
+ Some(NeedsLifetime::RefType(ascribed_type))
+ }
+ _ => None,
+ })
+ .collect();
+ match fn_params_without_lifetime.len() {
+ 1 => Some(fn_params_without_lifetime.into_iter().next()?),
+ 0 => None,
+ // multiple unnnamed is invalid. assist is not applicable
+ _ => return None,
+ }
+ };
+ acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| {
+ let fn_def = builder.make_mut(fn_def);
+ let lifetime = builder.make_mut(lifetime);
+ let loc_needing_lifetime =
+ loc_needing_lifetime.and_then(|it| it.make_mut(builder).to_position());
+
+ fn_def.get_or_create_generic_param_list().add_generic_param(
+ make::lifetime_param(new_lifetime_param.clone()).clone_for_update().into(),
+ );
+ ted::replace(lifetime.syntax(), new_lifetime_param.clone_for_update().syntax());
+ if let Some(position) = loc_needing_lifetime {
+ ted::insert(position, new_lifetime_param.clone_for_update().syntax());
+ }
+ })
+}
+
+/// Generate the assist for the impl def case
+fn generate_impl_def_assist(
+ acc: &mut Assists,
+ impl_def: ast::Impl,
+ lifetime_loc: TextRange,
+ lifetime: ast::Lifetime,
+) -> Option<()> {
+ let new_lifetime_param = generate_unique_lifetime_param_name(impl_def.generic_param_list())?;
+ acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| {
+ let impl_def = builder.make_mut(impl_def);
+ let lifetime = builder.make_mut(lifetime);
+
+ impl_def.get_or_create_generic_param_list().add_generic_param(
+ make::lifetime_param(new_lifetime_param.clone()).clone_for_update().into(),
+ );
+ ted::replace(lifetime.syntax(), new_lifetime_param.clone_for_update().syntax());
+ })
+}
+
+/// Given a type parameter list, generate a unique lifetime parameter name
+/// which is not in the list
+fn generate_unique_lifetime_param_name(
+ existing_type_param_list: Option<ast::GenericParamList>,
+) -> Option<ast::Lifetime> {
+ match existing_type_param_list {
+ Some(type_params) => {
+ let used_lifetime_params: FxHashSet<_> =
+ type_params.lifetime_params().map(|p| p.syntax().text().to_string()).collect();
+ ('a'..='z').map(|it| format!("'{}", it)).find(|it| !used_lifetime_params.contains(it))
+ }
+ None => Some("'a".to_string()),
+ }
+ .map(|it| make::lifetime(&it))
+}
+
+enum NeedsLifetime {
+ SelfParam(ast::SelfParam),
+ RefType(ast::RefType),
+}
+
+impl NeedsLifetime {
+ fn make_mut(self, builder: &mut AssistBuilder) -> Self {
+ match self {
+ Self::SelfParam(it) => Self::SelfParam(builder.make_mut(it)),
+ Self::RefType(it) => Self::RefType(builder.make_mut(it)),
+ }
+ }
+
+ fn to_position(self) -> Option<Position> {
+ match self {
+ Self::SelfParam(it) => Some(Position::after(it.amp_token()?)),
+ Self::RefType(it) => Some(Position::after(it.amp_token()?)),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn test_example_case() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"impl Cursor<'_$0> {
+ fn node(self) -> &SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+ }"#,
+ r#"impl<'a> Cursor<'a> {
+ fn node(self) -> &SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+ }"#,
+ );
+ }
+
+ #[test]
+ fn test_example_case_simplified() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"impl Cursor<'_$0> {"#,
+ r#"impl<'a> Cursor<'a> {"#,
+ );
+ }
+
+ #[test]
+ fn test_example_case_cursor_after_tick() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"impl Cursor<'$0_> {"#,
+ r#"impl<'a> Cursor<'a> {"#,
+ );
+ }
+
+ #[test]
+ fn test_impl_with_other_type_param() {
+ check_assist(
+ introduce_named_lifetime,
+ "impl<I> fmt::Display for SepByBuilder<'_$0, I>
+ where
+ I: Iterator,
+ I::Item: fmt::Display,
+ {",
+ "impl<I, 'a> fmt::Display for SepByBuilder<'a, I>
+ where
+ I: Iterator,
+ I::Item: fmt::Display,
+ {",
+ )
+ }
+
+ #[test]
+ fn test_example_case_cursor_before_tick() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"impl Cursor<$0'_> {"#,
+ r#"impl<'a> Cursor<'a> {"#,
+ );
+ }
+
+ #[test]
+ fn test_not_applicable_cursor_position() {
+ check_assist_not_applicable(introduce_named_lifetime, r#"impl Cursor<'_>$0 {"#);
+ check_assist_not_applicable(introduce_named_lifetime, r#"impl Cursor$0<'_> {"#);
+ }
+
+ #[test]
+ fn test_not_applicable_lifetime_already_name() {
+ check_assist_not_applicable(introduce_named_lifetime, r#"impl Cursor<'a$0> {"#);
+ check_assist_not_applicable(introduce_named_lifetime, r#"fn my_fun<'a>() -> X<'a$0>"#);
+ }
+
+ #[test]
+ fn test_with_type_parameter() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"impl<T> Cursor<T, '_$0>"#,
+ r#"impl<T, 'a> Cursor<T, 'a>"#,
+ );
+ }
+
+ #[test]
+ fn test_with_existing_lifetime_name_conflict() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"impl<'a, 'b> Cursor<'a, 'b, '_$0>"#,
+ r#"impl<'a, 'b, 'c> Cursor<'a, 'b, 'c>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_return_value_anon_lifetime_param() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun() -> X<'_$0>"#,
+ r#"fn my_fun<'a>() -> X<'a>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_return_value_anon_reference_lifetime() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun() -> &'_$0 X"#,
+ r#"fn my_fun<'a>() -> &'a X"#,
+ );
+ }
+
+ #[test]
+ fn test_function_param_anon_lifetime() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun(x: X<'_$0>)"#,
+ r#"fn my_fun<'a>(x: X<'a>)"#,
+ );
+ }
+
+ #[test]
+ fn test_function_add_lifetime_to_params() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun(f: &Foo) -> X<'_$0>"#,
+ r#"fn my_fun<'a>(f: &'a Foo) -> X<'a>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_add_lifetime_to_params_in_presence_of_other_lifetime() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun<'other>(f: &Foo, b: &'other Bar) -> X<'_$0>"#,
+ r#"fn my_fun<'other, 'a>(f: &'a Foo, b: &'other Bar) -> X<'a>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_not_applicable_without_self_and_multiple_unnamed_param_lifetimes() {
+ // this is not permitted under lifetime elision rules
+ check_assist_not_applicable(
+ introduce_named_lifetime,
+ r#"fn my_fun(f: &Foo, b: &Bar) -> X<'_$0>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_add_lifetime_to_self_ref_param() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun<'other>(&self, f: &Foo, b: &'other Bar) -> X<'_$0>"#,
+ r#"fn my_fun<'other, 'a>(&'a self, f: &Foo, b: &'other Bar) -> X<'a>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_add_lifetime_to_param_with_non_ref_self() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun<'other>(self, f: &Foo, b: &'other Bar) -> X<'_$0>"#,
+ r#"fn my_fun<'other, 'a>(self, f: &'a Foo, b: &'other Bar) -> X<'a>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_add_lifetime_to_self_ref_mut() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn foo(&mut self) -> &'_$0 ()"#,
+ r#"fn foo<'a>(&'a mut self) -> &'a ()"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs
new file mode 100644
index 000000000..547158e29
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs
@@ -0,0 +1,144 @@
+use ide_db::syntax_helpers::node_ext::is_pattern_cond;
+use syntax::{
+ ast::{self, AstNode},
+ T,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::invert_boolean_expression,
+ AssistId, AssistKind,
+};
+
+// Assist: invert_if
+//
+// This transforms if expressions of the form `if !x {A} else {B}` into `if x {B} else {A}`
+// This also works with `!=`. This assist can only be applied with the cursor on `if`.
+//
+// ```
+// fn main() {
+// if$0 !y { A } else { B }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// if y { B } else { A }
+// }
+// ```
+pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let if_keyword = ctx.find_token_syntax_at_offset(T![if])?;
+ let expr = ast::IfExpr::cast(if_keyword.parent()?)?;
+ let if_range = if_keyword.text_range();
+ let cursor_in_range = if_range.contains_range(ctx.selection_trimmed());
+ if !cursor_in_range {
+ return None;
+ }
+
+ let cond = expr.condition()?;
+ // This assist should not apply for if-let.
+ if is_pattern_cond(cond.clone()) {
+ return None;
+ }
+
+ let then_node = expr.then_branch()?.syntax().clone();
+ let else_block = match expr.else_branch()? {
+ ast::ElseBranch::Block(it) => it,
+ ast::ElseBranch::IfExpr(_) => return None,
+ };
+
+ acc.add(AssistId("invert_if", AssistKind::RefactorRewrite), "Invert if", if_range, |edit| {
+ let flip_cond = invert_boolean_expression(cond.clone());
+ edit.replace_ast(cond, flip_cond);
+
+ let else_node = else_block.syntax();
+ let else_range = else_node.text_range();
+ let then_range = then_node.text_range();
+
+ edit.replace(else_range, then_node.text());
+ edit.replace(then_range, else_node.text());
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn invert_if_composite_condition() {
+ check_assist(
+ invert_if,
+ "fn f() { i$0f x == 3 || x == 4 || x == 5 { 1 } else { 3 * 2 } }",
+ "fn f() { if !(x == 3 || x == 4 || x == 5) { 3 * 2 } else { 1 } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_remove_not_parentheses() {
+ check_assist(
+ invert_if,
+ "fn f() { i$0f !(x == 3 || x == 4 || x == 5) { 3 * 2 } else { 1 } }",
+ "fn f() { if x == 3 || x == 4 || x == 5 { 1 } else { 3 * 2 } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_remove_inequality() {
+ check_assist(
+ invert_if,
+ "fn f() { i$0f x != 3 { 1 } else { 3 + 2 } }",
+ "fn f() { if x == 3 { 3 + 2 } else { 1 } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_remove_not() {
+ check_assist(
+ invert_if,
+ "fn f() { $0if !cond { 3 * 2 } else { 1 } }",
+ "fn f() { if cond { 1 } else { 3 * 2 } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_general_case() {
+ check_assist(
+ invert_if,
+ "fn f() { i$0f cond { 3 * 2 } else { 1 } }",
+ "fn f() { if !cond { 1 } else { 3 * 2 } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_doesnt_apply_with_cursor_not_on_if() {
+ check_assist_not_applicable(invert_if, "fn f() { if !$0cond { 3 * 2 } else { 1 } }")
+ }
+
+ #[test]
+ fn invert_if_doesnt_apply_with_if_let() {
+ check_assist_not_applicable(
+ invert_if,
+ "fn f() { i$0f let Some(_) = Some(1) { 1 } else { 0 } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_option_case() {
+ check_assist(
+ invert_if,
+ "fn f() { if$0 doc_style.is_some() { Class::DocComment } else { Class::Comment } }",
+ "fn f() { if doc_style.is_none() { Class::Comment } else { Class::DocComment } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_result_case() {
+ check_assist(
+ invert_if,
+ "fn f() { i$0f doc_style.is_err() { Class::Err } else { Class::Ok } }",
+ "fn f() { if doc_style.is_ok() { Class::Ok } else { Class::Err } }",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
new file mode 100644
index 000000000..7e102ceba
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
@@ -0,0 +1,570 @@
+use either::Either;
+use ide_db::imports::merge_imports::{try_merge_imports, try_merge_trees, MergeBehavior};
+use syntax::{algo::neighbor, ast, match_ast, ted, AstNode, SyntaxElement, SyntaxNode};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::next_prev,
+ AssistId, AssistKind,
+};
+
+use Edit::*;
+
+// Assist: merge_imports
+//
+// Merges two imports with a common prefix.
+//
+// ```
+// use std::$0fmt::Formatter;
+// use std::io;
+// ```
+// ->
+// ```
+// use std::{fmt::Formatter, io};
+// ```
+pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let (target, edits) = if ctx.has_empty_selection() {
+ // Merge a neighbor
+ let tree: ast::UseTree = ctx.find_node_at_offset()?;
+ let target = tree.syntax().text_range();
+
+ let edits = if let Some(use_item) = tree.syntax().parent().and_then(ast::Use::cast) {
+ let mut neighbor = next_prev().find_map(|dir| neighbor(&use_item, dir)).into_iter();
+ use_item.try_merge_from(&mut neighbor)
+ } else {
+ let mut neighbor = next_prev().find_map(|dir| neighbor(&tree, dir)).into_iter();
+ tree.try_merge_from(&mut neighbor)
+ };
+ (target, edits?)
+ } else {
+ // Merge selected
+ let selection_range = ctx.selection_trimmed();
+ let parent_node = match ctx.covering_element() {
+ SyntaxElement::Node(n) => n,
+ SyntaxElement::Token(t) => t.parent()?,
+ };
+ let mut selected_nodes =
+ parent_node.children().filter(|it| selection_range.contains_range(it.text_range()));
+
+ let first_selected = selected_nodes.next()?;
+ let edits = match_ast! {
+ match first_selected {
+ ast::Use(use_item) => {
+ use_item.try_merge_from(&mut selected_nodes.filter_map(ast::Use::cast))
+ },
+ ast::UseTree(use_tree) => {
+ use_tree.try_merge_from(&mut selected_nodes.filter_map(ast::UseTree::cast))
+ },
+ _ => return None,
+ }
+ };
+ (selection_range, edits?)
+ };
+
+ acc.add(
+ AssistId("merge_imports", AssistKind::RefactorRewrite),
+ "Merge imports",
+ target,
+ |builder| {
+ let edits_mut: Vec<Edit> = edits
+ .into_iter()
+ .map(|it| match it {
+ Remove(Either::Left(it)) => Remove(Either::Left(builder.make_mut(it))),
+ Remove(Either::Right(it)) => Remove(Either::Right(builder.make_mut(it))),
+ Replace(old, new) => Replace(builder.make_syntax_mut(old), new),
+ })
+ .collect();
+ for edit in edits_mut {
+ match edit {
+ Remove(it) => it.as_ref().either(ast::Use::remove, ast::UseTree::remove),
+ Replace(old, new) => ted::replace(old, new),
+ }
+ }
+ },
+ )
+}
+
+trait Merge: AstNode + Clone {
+ fn try_merge_from(self, items: &mut dyn Iterator<Item = Self>) -> Option<Vec<Edit>> {
+ let mut edits = Vec::new();
+ let mut merged = self.clone();
+ while let Some(item) = items.next() {
+ merged = merged.try_merge(&item)?;
+ edits.push(Edit::Remove(item.into_either()));
+ }
+ if !edits.is_empty() {
+ edits.push(Edit::replace(self, merged));
+ Some(edits)
+ } else {
+ None
+ }
+ }
+ fn try_merge(&self, other: &Self) -> Option<Self>;
+ fn into_either(self) -> Either<ast::Use, ast::UseTree>;
+}
+
+impl Merge for ast::Use {
+ fn try_merge(&self, other: &Self) -> Option<Self> {
+ try_merge_imports(self, other, MergeBehavior::Crate)
+ }
+ fn into_either(self) -> Either<ast::Use, ast::UseTree> {
+ Either::Left(self)
+ }
+}
+
+impl Merge for ast::UseTree {
+ fn try_merge(&self, other: &Self) -> Option<Self> {
+ try_merge_trees(self, other, MergeBehavior::Crate)
+ }
+ fn into_either(self) -> Either<ast::Use, ast::UseTree> {
+ Either::Right(self)
+ }
+}
+
+enum Edit {
+ Remove(Either<ast::Use, ast::UseTree>),
+ Replace(SyntaxNode, SyntaxNode),
+}
+
+impl Edit {
+ fn replace(old: impl AstNode, new: impl AstNode) -> Self {
+ Edit::Replace(old.syntax().clone(), new.syntax().clone())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_merge_equal() {
+ check_assist(
+ merge_imports,
+ r"
+use std::fmt$0::{Display, Debug};
+use std::fmt::{Display, Debug};
+",
+ r"
+use std::fmt::{Display, Debug};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_first() {
+ check_assist(
+ merge_imports,
+ r"
+use std::fmt$0::Debug;
+use std::fmt::Display;
+",
+ r"
+use std::fmt::{Debug, Display};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_second() {
+ check_assist(
+ merge_imports,
+ r"
+use std::fmt::Debug;
+use std::fmt$0::Display;
+",
+ r"
+use std::fmt::{Display, Debug};
+",
+ );
+ }
+
+ #[test]
+ fn merge_self1() {
+ check_assist(
+ merge_imports,
+ r"
+use std::fmt$0;
+use std::fmt::Display;
+",
+ r"
+use std::fmt::{self, Display};
+",
+ );
+ }
+
+ #[test]
+ fn merge_self2() {
+ check_assist(
+ merge_imports,
+ r"
+use std::{fmt, $0fmt::Display};
+",
+ r"
+use std::{fmt::{Display, self}};
+",
+ );
+ }
+
+ #[test]
+ fn skip_pub1() {
+ check_assist_not_applicable(
+ merge_imports,
+ r"
+pub use std::fmt$0::Debug;
+use std::fmt::Display;
+",
+ );
+ }
+
+ #[test]
+ fn skip_pub_last() {
+ check_assist_not_applicable(
+ merge_imports,
+ r"
+use std::fmt$0::Debug;
+pub use std::fmt::Display;
+",
+ );
+ }
+
+ #[test]
+ fn skip_pub_crate_pub() {
+ check_assist_not_applicable(
+ merge_imports,
+ r"
+pub(crate) use std::fmt$0::Debug;
+pub use std::fmt::Display;
+",
+ );
+ }
+
+ #[test]
+ fn skip_pub_pub_crate() {
+ check_assist_not_applicable(
+ merge_imports,
+ r"
+pub use std::fmt$0::Debug;
+pub(crate) use std::fmt::Display;
+",
+ );
+ }
+
+ #[test]
+ fn merge_pub() {
+ check_assist(
+ merge_imports,
+ r"
+pub use std::fmt$0::Debug;
+pub use std::fmt::Display;
+",
+ r"
+pub use std::fmt::{Debug, Display};
+",
+ )
+ }
+
+ #[test]
+ fn merge_pub_crate() {
+ check_assist(
+ merge_imports,
+ r"
+pub(crate) use std::fmt$0::Debug;
+pub(crate) use std::fmt::Display;
+",
+ r"
+pub(crate) use std::fmt::{Debug, Display};
+",
+ )
+ }
+
+ #[test]
+ fn merge_pub_in_path_crate() {
+ check_assist(
+ merge_imports,
+ r"
+pub(in this::path) use std::fmt$0::Debug;
+pub(in this::path) use std::fmt::Display;
+",
+ r"
+pub(in this::path) use std::fmt::{Debug, Display};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_nested() {
+ check_assist(
+ merge_imports,
+ r"
+use std::{fmt$0::Debug, fmt::Display};
+",
+ r"
+use std::{fmt::{Debug, Display}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_nested2() {
+ check_assist(
+ merge_imports,
+ r"
+use std::{fmt::Debug, fmt$0::Display};
+",
+ r"
+use std::{fmt::{Display, Debug}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_with_nested_self_item() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::{fmt::{Write, Display}};
+use std::{fmt::{self, Debug}};
+",
+ r"
+use std::{fmt::{Write, Display, self, Debug}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_with_nested_self_item2() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::{fmt::{self, Debug}};
+use std::{fmt::{Write, Display}};
+",
+ r"
+use std::{fmt::{self, Debug, Write, Display}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_self_with_nested_self_item() {
+ check_assist(
+ merge_imports,
+ r"
+use std::{fmt$0::{self, Debug}, fmt::{Write, Display}};
+",
+ r"
+use std::{fmt::{self, Debug, Write, Display}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_nested_self_and_empty() {
+ check_assist(
+ merge_imports,
+ r"
+use foo::$0{bar::{self}};
+use foo::{bar};
+",
+ r"
+use foo::{bar::{self}};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_nested_empty_and_self() {
+ check_assist(
+ merge_imports,
+ r"
+use foo::$0{bar};
+use foo::{bar::{self}};
+",
+ r"
+use foo::{bar::{self}};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_nested_list_self_and_glob() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::{fmt::*};
+use std::{fmt::{self, Display}};
+",
+ r"
+use std::{fmt::{*, self, Display}};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_single_wildcard_diff_prefixes() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::cell::*;
+use std::str;
+",
+ r"
+use std::{cell::*, str};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_both_wildcard_diff_prefixes() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::cell::*;
+use std::str::*;
+",
+ r"
+use std::{cell::*, str::*};
+",
+ )
+ }
+
+ #[test]
+ fn removes_just_enough_whitespace() {
+ check_assist(
+ merge_imports,
+ r"
+use foo$0::bar;
+use foo::baz;
+
+/// Doc comment
+",
+ r"
+use foo::{bar, baz};
+
+/// Doc comment
+",
+ );
+ }
+
+ #[test]
+ fn works_with_trailing_comma() {
+ check_assist(
+ merge_imports,
+ r"
+use {
+ foo$0::bar,
+ foo::baz,
+};
+",
+ r"
+use {
+ foo::{bar, baz},
+};
+",
+ );
+ check_assist(
+ merge_imports,
+ r"
+use {
+ foo::baz,
+ foo$0::bar,
+};
+",
+ r"
+use {
+ foo::{bar, baz},
+};
+",
+ );
+ }
+
+ #[test]
+ fn test_double_comma() {
+ check_assist(
+ merge_imports,
+ r"
+use foo::bar::baz;
+use foo::$0{
+ FooBar,
+};
+",
+ r"
+use foo::{
+ FooBar, bar::baz,
+};
+",
+ )
+ }
+
+ #[test]
+ fn test_empty_use() {
+ check_assist_not_applicable(
+ merge_imports,
+ r"
+use std::$0
+fn main() {}",
+ );
+ }
+
+ #[test]
+ fn split_glob() {
+ check_assist(
+ merge_imports,
+ r"
+use foo::$0*;
+use foo::bar::Baz;
+",
+ r"
+use foo::{*, bar::Baz};
+",
+ );
+ }
+
+ #[test]
+ fn merge_selection_uses() {
+ check_assist(
+ merge_imports,
+ r"
+use std::fmt::Error;
+$0use std::fmt::Display;
+use std::fmt::Debug;
+use std::fmt::Write;
+$0use std::fmt::Result;
+",
+ r"
+use std::fmt::Error;
+use std::fmt::{Display, Debug, Write};
+use std::fmt::Result;
+",
+ );
+ }
+
+ #[test]
+ fn merge_selection_use_trees() {
+ check_assist(
+ merge_imports,
+ r"
+use std::{
+ fmt::Error,
+ $0fmt::Display,
+ fmt::Debug,
+ fmt::Write,$0
+ fmt::Result,
+};",
+ r"
+use std::{
+ fmt::Error,
+ fmt::{Display, Debug, Write},
+ fmt::Result,
+};",
+ );
+ // FIXME: Remove redundant braces. See also unnecessary-braces diagnostic.
+ check_assist(
+ merge_imports,
+ r"use std::$0{fmt::Display, fmt::Debug}$0;",
+ r"use std::{fmt::{Display, Debug}};",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs
new file mode 100644
index 000000000..c24015b1c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs
@@ -0,0 +1,822 @@
+use hir::TypeInfo;
+use std::{collections::HashMap, iter::successors};
+use syntax::{
+ algo::neighbor,
+ ast::{self, AstNode, HasName},
+ Direction,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists, TextRange};
+
+// Assist: merge_match_arms
+//
+// Merges the current match arm with the following if their bodies are identical.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// $0Action::Move(..) => foo(),
+// Action::Stop => foo(),
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move(..) | Action::Stop => foo(),
+// }
+// }
+// ```
+pub(crate) fn merge_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let current_arm = ctx.find_node_at_offset::<ast::MatchArm>()?;
+ // Don't try to handle arms with guards for now - can add support for this later
+ if current_arm.guard().is_some() {
+ return None;
+ }
+ let current_expr = current_arm.expr()?;
+ let current_text_range = current_arm.syntax().text_range();
+ let current_arm_types = get_arm_types(ctx, &current_arm);
+
+ // We check if the following match arms match this one. We could, but don't,
+ // compare to the previous match arm as well.
+ let arms_to_merge = successors(Some(current_arm), |it| neighbor(it, Direction::Next))
+ .take_while(|arm| match arm.expr() {
+ Some(expr) if arm.guard().is_none() => {
+ let same_text = expr.syntax().text() == current_expr.syntax().text();
+ if !same_text {
+ return false;
+ }
+
+ are_same_types(&current_arm_types, arm, ctx)
+ }
+ _ => false,
+ })
+ .collect::<Vec<_>>();
+
+ if arms_to_merge.len() <= 1 {
+ return None;
+ }
+
+ acc.add(
+ AssistId("merge_match_arms", AssistKind::RefactorRewrite),
+ "Merge match arms",
+ current_text_range,
+ |edit| {
+ let pats = if arms_to_merge.iter().any(contains_placeholder) {
+ "_".into()
+ } else {
+ arms_to_merge
+ .iter()
+ .filter_map(ast::MatchArm::pat)
+ .map(|x| x.syntax().to_string())
+ .collect::<Vec<String>>()
+ .join(" | ")
+ };
+
+ let arm = format!("{} => {},", pats, current_expr.syntax().text());
+
+ if let [first, .., last] = &*arms_to_merge {
+ let start = first.syntax().text_range().start();
+ let end = last.syntax().text_range().end();
+
+ edit.replace(TextRange::new(start, end), arm);
+ }
+ },
+ )
+}
+
+fn contains_placeholder(a: &ast::MatchArm) -> bool {
+ matches!(a.pat(), Some(ast::Pat::WildcardPat(..)))
+}
+
+fn are_same_types(
+ current_arm_types: &HashMap<String, Option<TypeInfo>>,
+ arm: &ast::MatchArm,
+ ctx: &AssistContext<'_>,
+) -> bool {
+ let arm_types = get_arm_types(ctx, arm);
+ for (other_arm_type_name, other_arm_type) in arm_types {
+ match (current_arm_types.get(&other_arm_type_name), other_arm_type) {
+ (Some(Some(current_arm_type)), Some(other_arm_type))
+ if other_arm_type.original == current_arm_type.original => {}
+ _ => return false,
+ }
+ }
+
+ true
+}
+
+fn get_arm_types(
+ context: &AssistContext<'_>,
+ arm: &ast::MatchArm,
+) -> HashMap<String, Option<TypeInfo>> {
+ let mut mapping: HashMap<String, Option<TypeInfo>> = HashMap::new();
+
+ fn recurse(
+ map: &mut HashMap<String, Option<TypeInfo>>,
+ ctx: &AssistContext<'_>,
+ pat: &Option<ast::Pat>,
+ ) {
+ if let Some(local_pat) = pat {
+ match pat {
+ Some(ast::Pat::TupleStructPat(tuple)) => {
+ for field in tuple.fields() {
+ recurse(map, ctx, &Some(field));
+ }
+ }
+ Some(ast::Pat::TuplePat(tuple)) => {
+ for field in tuple.fields() {
+ recurse(map, ctx, &Some(field));
+ }
+ }
+ Some(ast::Pat::RecordPat(record)) => {
+ if let Some(field_list) = record.record_pat_field_list() {
+ for field in field_list.fields() {
+ recurse(map, ctx, &field.pat());
+ }
+ }
+ }
+ Some(ast::Pat::ParenPat(parentheses)) => {
+ recurse(map, ctx, &parentheses.pat());
+ }
+ Some(ast::Pat::SlicePat(slice)) => {
+ for slice_pat in slice.pats() {
+ recurse(map, ctx, &Some(slice_pat));
+ }
+ }
+ Some(ast::Pat::IdentPat(ident_pat)) => {
+ if let Some(name) = ident_pat.name() {
+ let pat_type = ctx.sema.type_of_pat(local_pat);
+ map.insert(name.text().to_string(), pat_type);
+ }
+ }
+ _ => (),
+ }
+ }
+ }
+
+ recurse(&mut mapping, context, &arm.pat());
+ mapping
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn merge_match_arms_single_patterns() {
+ check_assist(
+ merge_match_arms,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A => { 1i32$0 }
+ X::B => { 1i32 }
+ X::C => { 2i32 }
+ }
+}
+"#,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A | X::B => { 1i32 },
+ X::C => { 2i32 }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_multiple_patterns() {
+ check_assist(
+ merge_match_arms,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C, D, E }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A | X::B => {$0 1i32 },
+ X::C | X::D => { 1i32 },
+ X::E => { 2i32 },
+ }
+}
+"#,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C, D, E }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A | X::B | X::C | X::D => { 1i32 },
+ X::E => { 2i32 },
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_placeholder_pattern() {
+ check_assist(
+ merge_match_arms,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C, D, E }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A => { 1i32 },
+ X::B => { 2i$032 },
+ _ => { 2i32 }
+ }
+}
+"#,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C, D, E }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A => { 1i32 },
+ _ => { 2i32 },
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merges_all_subsequent_arms() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum X { A, B, C, D, E }
+
+fn main() {
+ match X::A {
+ X::A$0 => 92,
+ X::B => 92,
+ X::C => 92,
+ X::D => 62,
+ _ => panic!(),
+ }
+}
+"#,
+ r#"
+enum X { A, B, C, D, E }
+
+fn main() {
+ match X::A {
+ X::A | X::B | X::C => 92,
+ X::D => 62,
+ _ => panic!(),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_rejects_guards() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+#[derive(Debug)]
+enum X {
+ A(i32),
+ B,
+ C
+}
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A(a) if a > 5 => { $01i32 },
+ X::B => { 1i32 },
+ X::C => { 2i32 }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_different_type() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+//- minicore: result
+fn func() {
+ match Result::<f64, f32>::Ok(0f64) {
+ Ok(x) => $0x.classify(),
+ Err(x) => x.classify()
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_different_type_multiple_fields() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+//- minicore: result
+fn func() {
+ match Result::<(f64, f64), (f32, f32)>::Ok((0f64, 0f64)) {
+ Ok(x) => $0x.1.classify(),
+ Err(x) => x.1.classify()
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_same_type_multiple_fields() {
+ check_assist(
+ merge_match_arms,
+ r#"
+//- minicore: result
+fn func() {
+ match Result::<(f64, f64), (f64, f64)>::Ok((0f64, 0f64)) {
+ Ok(x) => $0x.1.classify(),
+ Err(x) => x.1.classify()
+ };
+}
+"#,
+ r#"
+fn func() {
+ match Result::<(f64, f64), (f64, f64)>::Ok((0f64, 0f64)) {
+ Ok(x) | Err(x) => x.1.classify(),
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_same_type_subsequent_arm_with_different_type_in_other() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ OptionA(f32),
+ OptionB(f32),
+ OptionC(f64)
+}
+
+fn func(e: MyEnum) {
+ match e {
+ MyEnum::OptionA(x) => $0x.classify(),
+ MyEnum::OptionB(x) => x.classify(),
+ MyEnum::OptionC(x) => x.classify(),
+ };
+}
+"#,
+ r#"
+enum MyEnum {
+ OptionA(f32),
+ OptionB(f32),
+ OptionC(f64)
+}
+
+fn func(e: MyEnum) {
+ match e {
+ MyEnum::OptionA(x) | MyEnum::OptionB(x) => x.classify(),
+ MyEnum::OptionC(x) => x.classify(),
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_same_type_skip_arm_with_different_type_in_between() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ OptionA(f32),
+ OptionB(f64),
+ OptionC(f32)
+}
+
+fn func(e: MyEnum) {
+ match e {
+ MyEnum::OptionA(x) => $0x.classify(),
+ MyEnum::OptionB(x) => x.classify(),
+ MyEnum::OptionC(x) => x.classify(),
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_same_type_different_number_of_fields() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+//- minicore: result
+fn func() {
+ match Result::<(f64, f64, f64), (f64, f64)>::Ok((0f64, 0f64, 0f64)) {
+ Ok(x) => $0x.1.classify(),
+ Err(x) => x.1.classify()
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_same_destructuring_different_types() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+struct Point {
+ x: i32,
+ y: i32,
+}
+
+fn func() {
+ let p = Point { x: 0, y: 7 };
+
+ match p {
+ Point { x, y: 0 } => $0"",
+ Point { x: 0, y } => "",
+ Point { x, y } => "",
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_range() {
+ check_assist(
+ merge_match_arms,
+ r#"
+fn func() {
+ let x = 'c';
+
+ match x {
+ 'a'..='j' => $0"",
+ 'c'..='z' => "",
+ _ => "other",
+ };
+}
+"#,
+ r#"
+fn func() {
+ let x = 'c';
+
+ match x {
+ 'a'..='j' | 'c'..='z' => "",
+ _ => "other",
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_enum_without_field() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ NoField,
+ AField(u8)
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::NoField => $0"",
+ MyEnum::AField(x) => ""
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_destructuring_different_types() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ Move { x: i32, y: i32 },
+ Write(String),
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, y } => $0"",
+ MyEnum::Write(text) => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_destructuring_same_types() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ Move { x: i32, y: i32 },
+ Crawl { x: i32, y: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, y } => $0"",
+ MyEnum::Crawl { x, y } => "",
+ };
+}
+ "#,
+ r#"
+enum MyEnum {
+ Move { x: i32, y: i32 },
+ Crawl { x: i32, y: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, y } | MyEnum::Crawl { x, y } => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_destructuring_same_types_different_name() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ Move { x: i32, y: i32 },
+ Crawl { a: i32, b: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, y } => $0"",
+ MyEnum::Crawl { a, b } => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_nested_pattern_different_names() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum Color {
+ Rgb(i32, i32, i32),
+ Hsv(i32, i32, i32),
+}
+
+enum Message {
+ Quit,
+ Move { x: i32, y: i32 },
+ Write(String),
+ ChangeColor(Color),
+}
+
+fn main(msg: Message) {
+ match msg {
+ Message::ChangeColor(Color::Rgb(r, g, b)) => $0"",
+ Message::ChangeColor(Color::Hsv(h, s, v)) => "",
+ _ => "other"
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_nested_pattern_same_names() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum Color {
+ Rgb(i32, i32, i32),
+ Hsv(i32, i32, i32),
+}
+
+enum Message {
+ Quit,
+ Move { x: i32, y: i32 },
+ Write(String),
+ ChangeColor(Color),
+}
+
+fn main(msg: Message) {
+ match msg {
+ Message::ChangeColor(Color::Rgb(a, b, c)) => $0"",
+ Message::ChangeColor(Color::Hsv(a, b, c)) => "",
+ _ => "other"
+ };
+}
+ "#,
+ r#"
+enum Color {
+ Rgb(i32, i32, i32),
+ Hsv(i32, i32, i32),
+}
+
+enum Message {
+ Quit,
+ Move { x: i32, y: i32 },
+ Write(String),
+ ChangeColor(Color),
+}
+
+fn main(msg: Message) {
+ match msg {
+ Message::ChangeColor(Color::Rgb(a, b, c)) | Message::ChangeColor(Color::Hsv(a, b, c)) => "",
+ _ => "other"
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_destructuring_with_ignore() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ Move { x: i32, a: i32 },
+ Crawl { x: i32, b: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, .. } => $0"",
+ MyEnum::Crawl { x, .. } => "",
+ };
+}
+ "#,
+ r#"
+enum MyEnum {
+ Move { x: i32, a: i32 },
+ Crawl { x: i32, b: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, .. } | MyEnum::Crawl { x, .. } => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_nested_with_conflicting_identifier() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum Color {
+ Rgb(i32, i32, i32),
+ Hsv(i32, i32, i32),
+}
+
+enum Message {
+ Move { x: i32, y: i32 },
+ ChangeColor(u8, Color),
+}
+
+fn main(msg: Message) {
+ match msg {
+ Message::ChangeColor(x, Color::Rgb(y, b, c)) => $0"",
+ Message::ChangeColor(y, Color::Hsv(x, b, c)) => "",
+ _ => "other"
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_tuple() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+fn func() {
+ match (0, "boo") {
+ (x, y) => $0"",
+ (y, x) => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_parentheses() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+fn func(x: i32) {
+ let variable = 2;
+ match x {
+ 1 => $0"",
+ ((((variable)))) => "",
+ _ => "other"
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_refpat() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+fn func() {
+ let name = Some(String::from(""));
+ let n = String::from("");
+ match name {
+ Some(ref n) => $0"",
+ Some(n) => "",
+ _ => "other",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_slice() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+fn func(binary: &[u8]) {
+ let space = b' ';
+ match binary {
+ [0x7f, b'E', b'L', b'F', ..] => $0"",
+ [space] => "",
+ _ => "other",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_slice_identical() {
+ check_assist(
+ merge_match_arms,
+ r#"
+fn func(binary: &[u8]) {
+ let space = b' ';
+ match binary {
+ [space, 5u8] => $0"",
+ [space] => "",
+ _ => "other",
+ };
+}
+ "#,
+ r#"
+fn func(binary: &[u8]) {
+ let space = b' ';
+ match binary {
+ [space, 5u8] | [space] => "",
+ _ => "other",
+ };
+}
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs
new file mode 100644
index 000000000..176a3bf58
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs
@@ -0,0 +1,122 @@
+use syntax::{
+ ast::{self, edit_in_place::GenericParamsOwnerEdit, make, AstNode, HasName, HasTypeBounds},
+ match_ast,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: move_bounds_to_where_clause
+//
+// Moves inline type bounds to a where clause.
+//
+// ```
+// fn apply<T, U, $0F: FnOnce(T) -> U>(f: F, x: T) -> U {
+// f(x)
+// }
+// ```
+// ->
+// ```
+// fn apply<T, U, F>(f: F, x: T) -> U where F: FnOnce(T) -> U {
+// f(x)
+// }
+// ```
+pub(crate) fn move_bounds_to_where_clause(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let type_param_list = ctx.find_node_at_offset::<ast::GenericParamList>()?;
+
+ let mut type_params = type_param_list.type_or_const_params();
+ if type_params.all(|p| match p {
+ ast::TypeOrConstParam::Type(t) => t.type_bound_list().is_none(),
+ ast::TypeOrConstParam::Const(_) => true,
+ }) {
+ return None;
+ }
+
+ let parent = type_param_list.syntax().parent()?;
+
+ let target = type_param_list.syntax().text_range();
+ acc.add(
+ AssistId("move_bounds_to_where_clause", AssistKind::RefactorRewrite),
+ "Move to where clause",
+ target,
+ |edit| {
+ let type_param_list = edit.make_mut(type_param_list);
+ let parent = edit.make_syntax_mut(parent);
+
+ let where_clause: ast::WhereClause = match_ast! {
+ match parent {
+ ast::Fn(it) => it.get_or_create_where_clause(),
+ ast::Trait(it) => it.get_or_create_where_clause(),
+ ast::Impl(it) => it.get_or_create_where_clause(),
+ ast::Enum(it) => it.get_or_create_where_clause(),
+ ast::Struct(it) => it.get_or_create_where_clause(),
+ _ => return,
+ }
+ };
+
+ for toc_param in type_param_list.type_or_const_params() {
+ let type_param = match toc_param {
+ ast::TypeOrConstParam::Type(x) => x,
+ ast::TypeOrConstParam::Const(_) => continue,
+ };
+ if let Some(tbl) = type_param.type_bound_list() {
+ if let Some(predicate) = build_predicate(type_param) {
+ where_clause.add_predicate(predicate)
+ }
+ tbl.remove()
+ }
+ }
+ },
+ )
+}
+
+fn build_predicate(param: ast::TypeParam) -> Option<ast::WherePred> {
+ let path = make::ext::ident_path(&param.name()?.syntax().to_string());
+ let predicate = make::where_pred(path, param.type_bound_list()?.bounds());
+ Some(predicate.clone_for_update())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::check_assist;
+
+ #[test]
+ fn move_bounds_to_where_clause_fn() {
+ check_assist(
+ move_bounds_to_where_clause,
+ r#"fn foo<T: u32, $0F: FnOnce(T) -> T>() {}"#,
+ r#"fn foo<T, F>() where T: u32, F: FnOnce(T) -> T {}"#,
+ );
+ }
+
+ #[test]
+ fn move_bounds_to_where_clause_impl() {
+ check_assist(
+ move_bounds_to_where_clause,
+ r#"impl<U: u32, $0T> A<U, T> {}"#,
+ r#"impl<U, T> A<U, T> where U: u32 {}"#,
+ );
+ }
+
+ #[test]
+ fn move_bounds_to_where_clause_struct() {
+ check_assist(
+ move_bounds_to_where_clause,
+ r#"struct A<$0T: Iterator<Item = u32>> {}"#,
+ r#"struct A<T> where T: Iterator<Item = u32> {}"#,
+ );
+ }
+
+ #[test]
+ fn move_bounds_to_where_clause_tuple_struct() {
+ check_assist(
+ move_bounds_to_where_clause,
+ r#"struct Pair<$0T: u32>(T, T);"#,
+ r#"struct Pair<T>(T, T) where T: u32;"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs
new file mode 100644
index 000000000..a6c85a2b1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs
@@ -0,0 +1,130 @@
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ base_db::AnchoredPathBuf,
+};
+use syntax::{ast, AstNode};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::trimmed_text_range,
+};
+
+// Assist: move_from_mod_rs
+//
+// Moves xxx/mod.rs to xxx.rs.
+//
+// ```
+// //- /main.rs
+// mod a;
+// //- /a/mod.rs
+// $0fn t() {}$0
+// ```
+// ->
+// ```
+// fn t() {}
+// ```
+pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
+ let module = ctx.sema.to_module_def(ctx.file_id())?;
+ // Enable this assist if the user select all "meaningful" content in the source file
+ let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
+ let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
+ if !module.is_mod_rs(ctx.db()) {
+ cov_mark::hit!(not_mod_rs);
+ return None;
+ }
+ if trimmed_selected_range != trimmed_file_range {
+ cov_mark::hit!(not_all_selected);
+ return None;
+ }
+
+ let target = source_file.syntax().text_range();
+ let module_name = module.name(ctx.db())?.to_string();
+ let path = format!("../{}.rs", module_name);
+ let dst = AnchoredPathBuf { anchor: ctx.file_id(), path };
+ acc.add(
+ AssistId("move_from_mod_rs", AssistKind::Refactor),
+ format!("Convert {}/mod.rs to {}.rs", module_name, module_name),
+ target,
+ |builder| {
+ builder.move_file(ctx.file_id(), dst);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn trivial() {
+ check_assist(
+ move_from_mod_rs,
+ r#"
+//- /main.rs
+mod a;
+//- /a/mod.rs
+$0fn t() {}
+$0"#,
+ r#"
+//- /a.rs
+fn t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn must_select_all_file() {
+ cov_mark::check!(not_all_selected);
+ check_assist_not_applicable(
+ move_from_mod_rs,
+ r#"
+//- /main.rs
+mod a;
+//- /a/mod.rs
+fn t() {}$0
+"#,
+ );
+ cov_mark::check!(not_all_selected);
+ check_assist_not_applicable(
+ move_from_mod_rs,
+ r#"
+//- /main.rs
+mod a;
+//- /a/mod.rs
+$0fn$0 t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn cannot_move_not_mod_rs() {
+ cov_mark::check!(not_mod_rs);
+ check_assist_not_applicable(
+ move_from_mod_rs,
+ r#"//- /main.rs
+mod a;
+//- /a.rs
+$0fn t() {}$0
+"#,
+ );
+ }
+
+ #[test]
+ fn cannot_downgrade_main_and_lib_rs() {
+ check_assist_not_applicable(
+ move_from_mod_rs,
+ r#"//- /main.rs
+$0fn t() {}$0
+"#,
+ );
+ check_assist_not_applicable(
+ move_from_mod_rs,
+ r#"//- /lib.rs
+$0fn t() {}$0
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs
new file mode 100644
index 000000000..b8f1b36de
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs
@@ -0,0 +1,997 @@
+use syntax::{
+ ast::{edit::AstNodeEdit, make, AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat},
+ SyntaxKind::WHITESPACE,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: move_guard_to_arm_body
+//
+// Moves match guard into match arm body.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move { distance } $0if distance > 10 => foo(),
+// _ => (),
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move { distance } => if distance > 10 {
+// foo()
+// },
+// _ => (),
+// }
+// }
+// ```
+pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let match_arm = ctx.find_node_at_offset::<MatchArm>()?;
+ let guard = match_arm.guard()?;
+ if ctx.offset() > guard.syntax().text_range().end() {
+ cov_mark::hit!(move_guard_unapplicable_in_arm_body);
+ return None;
+ }
+ let space_before_guard = guard.syntax().prev_sibling_or_token();
+
+ let guard_condition = guard.condition()?;
+ let arm_expr = match_arm.expr()?;
+ let if_expr =
+ make::expr_if(guard_condition, make::block_expr(None, Some(arm_expr.clone())), None)
+ .indent(arm_expr.indent_level());
+
+ let target = guard.syntax().text_range();
+ acc.add(
+ AssistId("move_guard_to_arm_body", AssistKind::RefactorRewrite),
+ "Move guard to arm body",
+ target,
+ |edit| {
+ match space_before_guard {
+ Some(element) if element.kind() == WHITESPACE => {
+ edit.delete(element.text_range());
+ }
+ _ => (),
+ };
+
+ edit.delete(guard.syntax().text_range());
+ edit.replace_ast(arm_expr, if_expr);
+ },
+ )
+}
+
+// Assist: move_arm_cond_to_match_guard
+//
+// Moves if expression from match arm body into a guard.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move { distance } => $0if distance > 10 { foo() },
+// _ => (),
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move { distance } if distance > 10 => foo(),
+// _ => (),
+// }
+// }
+// ```
+pub(crate) fn move_arm_cond_to_match_guard(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let match_arm: MatchArm = ctx.find_node_at_offset::<MatchArm>()?;
+ let match_pat = match_arm.pat()?;
+ let arm_body = match_arm.expr()?;
+
+ let mut replace_node = None;
+ let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone()).or_else(|| {
+ let block_expr = BlockExpr::cast(arm_body.syntax().clone())?;
+ if let Expr::IfExpr(e) = block_expr.tail_expr()? {
+ replace_node = Some(block_expr.syntax().clone());
+ Some(e)
+ } else {
+ None
+ }
+ })?;
+ if ctx.offset() > if_expr.then_branch()?.syntax().text_range().start() {
+ return None;
+ }
+
+ let replace_node = replace_node.unwrap_or_else(|| if_expr.syntax().clone());
+ let needs_dedent = replace_node != *if_expr.syntax();
+ let (conds_blocks, tail) = parse_if_chain(if_expr)?;
+
+ acc.add(
+ AssistId("move_arm_cond_to_match_guard", AssistKind::RefactorRewrite),
+ "Move condition to match guard",
+ replace_node.text_range(),
+ |edit| {
+ edit.delete(match_arm.syntax().text_range());
+ // Dedent if if_expr is in a BlockExpr
+ let dedent = if needs_dedent {
+ cov_mark::hit!(move_guard_ifelse_in_block);
+ 1
+ } else {
+ cov_mark::hit!(move_guard_ifelse_else_block);
+ 0
+ };
+ let then_arm_end = match_arm.syntax().text_range().end();
+ let indent_level = match_arm.indent_level();
+ let spaces = " ".repeat(indent_level.0 as _);
+
+ let mut first = true;
+ for (cond, block) in conds_blocks {
+ if !first {
+ edit.insert(then_arm_end, format!("\n{}", spaces));
+ } else {
+ first = false;
+ }
+ let guard = format!("{} if {} => ", match_pat, cond.syntax().text());
+ edit.insert(then_arm_end, guard);
+ let only_expr = block.statements().next().is_none();
+ match &block.tail_expr() {
+ Some(then_expr) if only_expr => {
+ edit.insert(then_arm_end, then_expr.syntax().text());
+ edit.insert(then_arm_end, ",");
+ }
+ _ => {
+ let to_insert = block.dedent(dedent.into()).syntax().text();
+ edit.insert(then_arm_end, to_insert)
+ }
+ }
+ }
+ if let Some(e) = tail {
+ cov_mark::hit!(move_guard_ifelse_else_tail);
+ let guard = format!("\n{}{} => ", spaces, match_pat);
+ edit.insert(then_arm_end, guard);
+ let only_expr = e.statements().next().is_none();
+ match &e.tail_expr() {
+ Some(expr) if only_expr => {
+ cov_mark::hit!(move_guard_ifelse_expr_only);
+ edit.insert(then_arm_end, expr.syntax().text());
+ edit.insert(then_arm_end, ",");
+ }
+ _ => {
+ let to_insert = e.dedent(dedent.into()).syntax().text();
+ edit.insert(then_arm_end, to_insert)
+ }
+ }
+ } else {
+ // There's no else branch. Add a pattern without guard, unless the following match
+ // arm is `_ => ...`
+ cov_mark::hit!(move_guard_ifelse_notail);
+ match match_arm.syntax().next_sibling().and_then(MatchArm::cast) {
+ Some(next_arm)
+ if matches!(next_arm.pat(), Some(Pat::WildcardPat(_)))
+ && next_arm.guard().is_none() =>
+ {
+ cov_mark::hit!(move_guard_ifelse_has_wildcard);
+ }
+ _ => edit.insert(then_arm_end, format!("\n{}{} => {{}}", spaces, match_pat)),
+ }
+ }
+ },
+ )
+}
+
+// Parses an if-else-if chain to get the conditions and the then branches until we encounter an else
+// branch or the end.
+fn parse_if_chain(if_expr: IfExpr) -> Option<(Vec<(Expr, BlockExpr)>, Option<BlockExpr>)> {
+ let mut conds_blocks = Vec::new();
+ let mut curr_if = if_expr;
+ let tail = loop {
+ let cond = curr_if.condition()?;
+ conds_blocks.push((cond, curr_if.then_branch()?));
+ match curr_if.else_branch() {
+ Some(ElseBranch::IfExpr(e)) => {
+ curr_if = e;
+ }
+ Some(ElseBranch::Block(b)) => {
+ break Some(b);
+ }
+ None => break None,
+ }
+ };
+ Some((conds_blocks, tail))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn move_guard_to_arm_body_range() {
+ cov_mark::check!(move_guard_unapplicable_in_arm_body);
+ check_assist_not_applicable(
+ move_guard_to_arm_body,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => $0false,
+ _ => true
+ }
+}
+"#,
+ );
+ }
+ #[test]
+ fn move_guard_to_arm_body_target() {
+ check_assist_target(
+ move_guard_to_arm_body,
+ r#"
+fn main() {
+ match 92 {
+ x $0if x > 10 => false,
+ _ => true
+ }
+}
+"#,
+ r#"if x > 10"#,
+ );
+ }
+
+ #[test]
+ fn move_guard_to_arm_body_works() {
+ check_assist(
+ move_guard_to_arm_body,
+ r#"
+fn main() {
+ match 92 {
+ x $0if x > 10 => false,
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x => if x > 10 {
+ false
+ },
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_let_guard_to_arm_body_works() {
+ check_assist(
+ move_guard_to_arm_body,
+ r#"
+fn main() {
+ match 92 {
+ x $0if (let 1 = x) => false,
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x => if (let 1 = x) {
+ false
+ },
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_guard_to_arm_body_works_complex_match() {
+ check_assist(
+ move_guard_to_arm_body,
+ r#"
+fn main() {
+ match 92 {
+ $0x @ 4 | x @ 5 if x > 5 => true,
+ _ => false
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x @ 4 | x @ 5 => if x > 5 {
+ true
+ },
+ _ => false
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if x > 10$0 { false },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_in_block_to_match_guard_works() {
+ cov_mark::check!(move_guard_ifelse_has_wildcard);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ $0if x > 10 {
+ false
+ }
+ },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_in_block_to_match_guard_no_wildcard_works() {
+ cov_mark::check_count!(move_guard_ifelse_has_wildcard, 0);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ $0if x > 10 {
+ false
+ }
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ x => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_in_block_to_match_guard_wildcard_guard_works() {
+ cov_mark::check_count!(move_guard_ifelse_has_wildcard, 0);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ $0if x > 10 {
+ false
+ }
+ }
+ _ if x > 10 => true,
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ x => {}
+ _ if x > 10 => true,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_in_block_to_match_guard_add_comma_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ $0if x > 10 {
+ false
+ }
+ }
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_if_let_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if let 62 = x $0&& true { false },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if let 62 = x && true => false,
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_if_empty_body_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if x $0> 10 { },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => { }
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_if_multiline_body_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if$0 x > 10 {
+ 92;
+ false
+ },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => {
+ 92;
+ false
+ }
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_in_block_to_match_guard_if_multiline_body_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ if x > $010 {
+ 92;
+ false
+ }
+ }
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => {
+ 92;
+ false
+ }
+ _ => true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if x > $010 {
+ false
+ } else {
+ true
+ }
+ _ => true,
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ x => true,
+ _ => true,
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_block_works() {
+ cov_mark::check!(move_guard_ifelse_expr_only);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ if x $0> 10 {
+ false
+ } else {
+ true
+ }
+ }
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ x => true,
+ _ => true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_else_if_empty_body_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if x > $010 { } else { },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => { }
+ x => { }
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_multiline_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if$0 x > 10 {
+ 92;
+ false
+ } else {
+ true
+ }
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => {
+ 92;
+ false
+ }
+ x => true,
+ _ => true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_multiline_else_works() {
+ cov_mark::check!(move_guard_ifelse_else_block);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if x $0> 10 {
+ false
+ } else {
+ 42;
+ true
+ }
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ x => {
+ 42;
+ true
+ }
+ _ => true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_multiline_else_block_works() {
+ cov_mark::check!(move_guard_ifelse_in_block);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ if x > $010 {
+ false
+ } else {
+ 42;
+ true
+ }
+ }
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ x => {
+ 42;
+ true
+ }
+ _ => true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_last_arm_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x => {
+ if x > $010 {
+ false
+ } else {
+ 92;
+ true
+ }
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x if x > 10 => false,
+ x => {
+ 92;
+ true
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_comma_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x => if x > $010 {
+ false
+ } else {
+ 92;
+ true
+ },
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x if x > 10 => false,
+ x => {
+ 92;
+ true
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_elseif() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x => if x $0> 10 {
+ false
+ } else if x > 5 {
+ true
+ } else if x > 4 {
+ false
+ } else {
+ true
+ },
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x if x > 10 => false,
+ x if x > 5 => true,
+ x if x > 4 => false,
+ x => true,
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_elseif_in_block() {
+ cov_mark::check!(move_guard_ifelse_in_block);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x => {
+ if x > $010 {
+ false
+ } else if x > 5 {
+ true
+ } else if x > 4 {
+ false
+ } else {
+ true
+ }
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x if x > 10 => false,
+ x if x > 5 => true,
+ x if x > 4 => false,
+ x => true,
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_elseif_chain() {
+ cov_mark::check!(move_guard_ifelse_else_tail);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x => if x $0> 10 {
+ 1
+ } else if x > 5 {
+ 2
+ } else if x > 3 {
+ 42;
+ 3
+ } else {
+ 4
+ },
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x if x > 10 => 1,
+ x if x > 5 => 2,
+ x if x > 3 => {
+ 42;
+ 3
+ }
+ x => 4,
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_elseif_iflet() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x => if x $0> 10 {
+ 1
+ } else if x > 5 {
+ 2
+ } else if let 4 = 4 {
+ 42;
+ 3
+ } else {
+ 4
+ },
+ }
+}"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x if x > 10 => 1,
+ x if x > 5 => 2,
+ x if let 4 = 4 => {
+ 42;
+ 3
+ }
+ x => 4,
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_elseif_notail() {
+ cov_mark::check!(move_guard_ifelse_notail);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x => if x > $010 {
+ 1
+ } else if x > 5 {
+ 2
+ } else if x > 4 {
+ 42;
+ 3
+ },
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x if x > 10 => 1,
+ x if x > 5 => 2,
+ x if x > 4 => {
+ 42;
+ 3
+ }
+ x => {}
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs
new file mode 100644
index 000000000..7468318a5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs
@@ -0,0 +1,337 @@
+use std::iter;
+
+use ast::edit::IndentLevel;
+use ide_db::base_db::AnchoredPathBuf;
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::{
+ ast::{self, edit::AstNodeEdit, HasName},
+ AstNode, SmolStr, TextRange,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: move_module_to_file
+//
+// Moves inline module's contents to a separate file.
+//
+// ```
+// mod $0foo {
+// fn t() {}
+// }
+// ```
+// ->
+// ```
+// mod foo;
+// ```
+pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let module_ast = ctx.find_node_at_offset::<ast::Module>()?;
+ let module_items = module_ast.item_list()?;
+
+ let l_curly_offset = module_items.syntax().text_range().start();
+ if l_curly_offset <= ctx.offset() {
+ cov_mark::hit!(available_before_curly);
+ return None;
+ }
+ let target = TextRange::new(module_ast.syntax().text_range().start(), l_curly_offset);
+
+ let module_name = module_ast.name()?;
+
+ // get to the outermost module syntax so we can grab the module of file we are in
+ let outermost_mod_decl =
+ iter::successors(Some(module_ast.clone()), |module| module.parent()).last()?;
+ let module_def = ctx.sema.to_def(&outermost_mod_decl)?;
+ let parent_module = module_def.parent(ctx.db())?;
+
+ acc.add(
+ AssistId("move_module_to_file", AssistKind::RefactorExtract),
+ "Extract module to file",
+ target,
+ |builder| {
+ let path = {
+ let mut buf = String::from("./");
+ match parent_module.name(ctx.db()) {
+ Some(name) if !parent_module.is_mod_rs(ctx.db()) => {
+ format_to!(buf, "{}/", name)
+ }
+ _ => (),
+ }
+ let segments = iter::successors(Some(module_ast.clone()), |module| module.parent())
+ .filter_map(|it| it.name())
+ .map(|name| SmolStr::from(name.text().trim_start_matches("r#")))
+ .collect::<Vec<_>>();
+
+ format_to!(buf, "{}", segments.into_iter().rev().format("/"));
+
+ // We need to special case mod named `r#mod` and place the file in a
+ // subdirectory as "mod.rs" would be of its parent module otherwise.
+ if module_name.text() == "r#mod" {
+ format_to!(buf, "/mod.rs");
+ } else {
+ format_to!(buf, ".rs");
+ }
+ buf
+ };
+ let contents = {
+ let items = module_items.dedent(IndentLevel(1)).to_string();
+ let mut items =
+ items.trim_start_matches('{').trim_end_matches('}').trim().to_string();
+ if !items.is_empty() {
+ items.push('\n');
+ }
+ items
+ };
+
+ let buf = format!("mod {};", module_name);
+
+ let replacement_start = match module_ast.mod_token() {
+ Some(mod_token) => mod_token.text_range(),
+ None => module_ast.syntax().text_range(),
+ }
+ .start();
+
+ builder.replace(
+ TextRange::new(replacement_start, module_ast.syntax().text_range().end()),
+ buf,
+ );
+
+ let dst = AnchoredPathBuf { anchor: ctx.file_id(), path };
+ builder.create_file(dst, contents);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn extract_from_root() {
+ check_assist(
+ move_module_to_file,
+ r#"
+mod $0tests {
+ #[test] fn t() {}
+}
+"#,
+ r#"
+//- /main.rs
+mod tests;
+//- /tests.rs
+#[test] fn t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_from_submodule() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /main.rs
+mod submod;
+//- /submod.rs
+$0mod inner {
+ fn f() {}
+}
+fn g() {}
+"#,
+ r#"
+//- /submod.rs
+mod inner;
+fn g() {}
+//- /submod/inner.rs
+fn f() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_from_mod_rs() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /main.rs
+mod submodule;
+//- /submodule/mod.rs
+mod inner$0 {
+ fn f() {}
+}
+fn g() {}
+"#,
+ r#"
+//- /submodule/mod.rs
+mod inner;
+fn g() {}
+//- /submodule/inner.rs
+fn f() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_public() {
+ check_assist(
+ move_module_to_file,
+ r#"
+pub mod $0tests {
+ #[test] fn t() {}
+}
+"#,
+ r#"
+//- /main.rs
+pub mod tests;
+//- /tests.rs
+#[test] fn t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_public_crate() {
+ check_assist(
+ move_module_to_file,
+ r#"
+pub(crate) mod $0tests {
+ #[test] fn t() {}
+}
+"#,
+ r#"
+//- /main.rs
+pub(crate) mod tests;
+//- /tests.rs
+#[test] fn t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn available_before_curly() {
+ cov_mark::check!(available_before_curly);
+ check_assist_not_applicable(move_module_to_file, r#"mod m { $0 }"#);
+ }
+
+ #[test]
+ fn keep_outer_comments_and_attributes() {
+ check_assist(
+ move_module_to_file,
+ r#"
+/// doc comment
+#[attribute]
+mod $0tests {
+ #[test] fn t() {}
+}
+"#,
+ r#"
+//- /main.rs
+/// doc comment
+#[attribute]
+mod tests;
+//- /tests.rs
+#[test] fn t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_nested() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+mod bar {
+ mod baz {
+ mod qux$0 {}
+ }
+}
+"#,
+ r#"
+//- /foo.rs
+mod bar {
+ mod baz {
+ mod qux;
+ }
+}
+//- /foo/bar/baz/qux.rs
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_mod_with_raw_ident() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /main.rs
+mod $0r#static {}
+"#,
+ r#"
+//- /main.rs
+mod r#static;
+//- /static.rs
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_r_mod() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /main.rs
+mod $0r#mod {}
+"#,
+ r#"
+//- /main.rs
+mod r#mod;
+//- /mod/mod.rs
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_r_mod_from_mod_rs() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /main.rs
+mod foo;
+//- /foo/mod.rs
+mod $0r#mod {}
+"#,
+ r#"
+//- /foo/mod.rs
+mod r#mod;
+//- /foo/mod/mod.rs
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_nested_r_mod() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /main.rs
+mod r#mod {
+ mod foo {
+ mod $0r#mod {}
+ }
+}
+"#,
+ r#"
+//- /main.rs
+mod r#mod {
+ mod foo {
+ mod r#mod;
+ }
+}
+//- /mod/foo/mod/mod.rs
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs
new file mode 100644
index 000000000..a909ce8b2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs
@@ -0,0 +1,151 @@
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ base_db::AnchoredPathBuf,
+};
+use syntax::{ast, AstNode};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::trimmed_text_range,
+};
+
+// Assist: move_to_mod_rs
+//
+// Moves xxx.rs to xxx/mod.rs.
+//
+// ```
+// //- /main.rs
+// mod a;
+// //- /a.rs
+// $0fn t() {}$0
+// ```
+// ->
+// ```
+// fn t() {}
+// ```
+pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
+ let module = ctx.sema.to_module_def(ctx.file_id())?;
+ // Enable this assist if the user select all "meaningful" content in the source file
+ let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
+ let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
+ if module.is_mod_rs(ctx.db()) {
+ cov_mark::hit!(already_mod_rs);
+ return None;
+ }
+ if trimmed_selected_range != trimmed_file_range {
+ cov_mark::hit!(not_all_selected);
+ return None;
+ }
+
+ let target = source_file.syntax().text_range();
+ let module_name = module.name(ctx.db())?.to_string();
+ let path = format!("./{}/mod.rs", module_name);
+ let dst = AnchoredPathBuf { anchor: ctx.file_id(), path };
+ acc.add(
+ AssistId("move_to_mod_rs", AssistKind::Refactor),
+ format!("Convert {}.rs to {}/mod.rs", module_name, module_name),
+ target,
+ |builder| {
+ builder.move_file(ctx.file_id(), dst);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn trivial() {
+ check_assist(
+ move_to_mod_rs,
+ r#"
+//- /main.rs
+mod a;
+//- /a.rs
+$0fn t() {}
+$0"#,
+ r#"
+//- /a/mod.rs
+fn t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn must_select_all_file() {
+ cov_mark::check!(not_all_selected);
+ check_assist_not_applicable(
+ move_to_mod_rs,
+ r#"
+//- /main.rs
+mod a;
+//- /a.rs
+fn t() {}$0
+"#,
+ );
+ cov_mark::check!(not_all_selected);
+ check_assist_not_applicable(
+ move_to_mod_rs,
+ r#"
+//- /main.rs
+mod a;
+//- /a.rs
+$0fn$0 t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn cannot_promote_mod_rs() {
+ cov_mark::check!(already_mod_rs);
+ check_assist_not_applicable(
+ move_to_mod_rs,
+ r#"//- /main.rs
+mod a;
+//- /a/mod.rs
+$0fn t() {}$0
+"#,
+ );
+ }
+
+ #[test]
+ fn cannot_promote_main_and_lib_rs() {
+ check_assist_not_applicable(
+ move_to_mod_rs,
+ r#"//- /main.rs
+$0fn t() {}$0
+"#,
+ );
+ check_assist_not_applicable(
+ move_to_mod_rs,
+ r#"//- /lib.rs
+$0fn t() {}$0
+"#,
+ );
+ }
+
+ #[test]
+ fn works_in_mod() {
+ // note: /a/b.rs remains untouched
+ check_assist(
+ move_to_mod_rs,
+ r#"//- /main.rs
+mod a;
+//- /a.rs
+$0mod b;
+fn t() {}$0
+//- /a/b.rs
+fn t1() {}
+"#,
+ r#"
+//- /a/mod.rs
+mod b;
+fn t() {}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs
new file mode 100644
index 000000000..424db7437
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs
@@ -0,0 +1,183 @@
+use syntax::{ast, ast::Radix, AstToken};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
+
+const MIN_NUMBER_OF_DIGITS_TO_FORMAT: usize = 5;
+
+// Assist: reformat_number_literal
+//
+// Adds or removes separators from integer literal.
+//
+// ```
+// const _: i32 = 1012345$0;
+// ```
+// ->
+// ```
+// const _: i32 = 1_012_345;
+// ```
+pub(crate) fn reformat_number_literal(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let literal = ctx.find_node_at_offset::<ast::Literal>()?;
+ let literal = match literal.kind() {
+ ast::LiteralKind::IntNumber(it) => it,
+ _ => return None,
+ };
+
+ let text = literal.text();
+ if text.contains('_') {
+ return remove_separators(acc, literal);
+ }
+
+ let (prefix, value, suffix) = literal.split_into_parts();
+ if value.len() < MIN_NUMBER_OF_DIGITS_TO_FORMAT {
+ return None;
+ }
+
+ let radix = literal.radix();
+ let mut converted = prefix.to_string();
+ converted.push_str(&add_group_separators(value, group_size(radix)));
+ converted.push_str(suffix);
+
+ let group_id = GroupLabel("Reformat number literal".into());
+ let label = format!("Convert {} to {}", literal, converted);
+ let range = literal.syntax().text_range();
+ acc.add_group(
+ &group_id,
+ AssistId("reformat_number_literal", AssistKind::RefactorInline),
+ label,
+ range,
+ |builder| builder.replace(range, converted),
+ )
+}
+
+fn remove_separators(acc: &mut Assists, literal: ast::IntNumber) -> Option<()> {
+ let group_id = GroupLabel("Reformat number literal".into());
+ let range = literal.syntax().text_range();
+ acc.add_group(
+ &group_id,
+ AssistId("reformat_number_literal", AssistKind::RefactorInline),
+ "Remove digit separators",
+ range,
+ |builder| builder.replace(range, literal.text().replace('_', "")),
+ )
+}
+
+const fn group_size(r: Radix) -> usize {
+ match r {
+ Radix::Binary => 4,
+ Radix::Octal => 3,
+ Radix::Decimal => 3,
+ Radix::Hexadecimal => 4,
+ }
+}
+
+fn add_group_separators(s: &str, group_size: usize) -> String {
+ let mut chars = Vec::new();
+ for (i, ch) in s.chars().filter(|&ch| ch != '_').rev().enumerate() {
+ if i > 0 && i % group_size == 0 {
+ chars.push('_');
+ }
+ chars.push(ch);
+ }
+
+ chars.into_iter().rev().collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist_by_label, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn group_separators() {
+ let cases = vec![
+ ("", 4, ""),
+ ("1", 4, "1"),
+ ("12", 4, "12"),
+ ("123", 4, "123"),
+ ("1234", 4, "1234"),
+ ("12345", 4, "1_2345"),
+ ("123456", 4, "12_3456"),
+ ("1234567", 4, "123_4567"),
+ ("12345678", 4, "1234_5678"),
+ ("123456789", 4, "1_2345_6789"),
+ ("1234567890", 4, "12_3456_7890"),
+ ("1_2_3_4_5_6_7_8_9_0_", 4, "12_3456_7890"),
+ ("1234567890", 3, "1_234_567_890"),
+ ("1234567890", 2, "12_34_56_78_90"),
+ ("1234567890", 1, "1_2_3_4_5_6_7_8_9_0"),
+ ];
+
+ for case in cases {
+ let (input, group_size, expected) = case;
+ assert_eq!(add_group_separators(input, group_size), expected)
+ }
+ }
+
+ #[test]
+ fn good_targets() {
+ let cases = vec![
+ ("const _: i32 = 0b11111$0", "0b11111"),
+ ("const _: i32 = 0o77777$0;", "0o77777"),
+ ("const _: i32 = 10000$0;", "10000"),
+ ("const _: i32 = 0xFFFFF$0;", "0xFFFFF"),
+ ("const _: i32 = 10000i32$0;", "10000i32"),
+ ("const _: i32 = 0b_10_0i32$0;", "0b_10_0i32"),
+ ];
+
+ for case in cases {
+ check_assist_target(reformat_number_literal, case.0, case.1);
+ }
+ }
+
+ #[test]
+ fn bad_targets() {
+ let cases = vec![
+ "const _: i32 = 0b111$0",
+ "const _: i32 = 0b1111$0",
+ "const _: i32 = 0o77$0;",
+ "const _: i32 = 0o777$0;",
+ "const _: i32 = 10$0;",
+ "const _: i32 = 999$0;",
+ "const _: i32 = 0xFF$0;",
+ "const _: i32 = 0xFFFF$0;",
+ ];
+
+ for case in cases {
+ check_assist_not_applicable(reformat_number_literal, case);
+ }
+ }
+
+ #[test]
+ fn labels() {
+ let cases = vec![
+ ("const _: i32 = 10000$0", "const _: i32 = 10_000", "Convert 10000 to 10_000"),
+ (
+ "const _: i32 = 0xFF0000$0;",
+ "const _: i32 = 0xFF_0000;",
+ "Convert 0xFF0000 to 0xFF_0000",
+ ),
+ (
+ "const _: i32 = 0b11111111$0;",
+ "const _: i32 = 0b1111_1111;",
+ "Convert 0b11111111 to 0b1111_1111",
+ ),
+ (
+ "const _: i32 = 0o377211$0;",
+ "const _: i32 = 0o377_211;",
+ "Convert 0o377211 to 0o377_211",
+ ),
+ (
+ "const _: i32 = 10000i32$0;",
+ "const _: i32 = 10_000i32;",
+ "Convert 10000i32 to 10_000i32",
+ ),
+ ("const _: i32 = 1_0_0_0_i32$0;", "const _: i32 = 1000i32;", "Remove digit separators"),
+ ];
+
+ for case in cases {
+ let (before, after, label) = case;
+ check_assist_by_label(reformat_number_literal, before, after, label);
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
new file mode 100644
index 000000000..cbbea6c1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
@@ -0,0 +1,221 @@
+use hir::{HirDisplay, ModuleDef, PathResolution, Semantics};
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ defs::Definition,
+ syntax_helpers::node_ext::preorder_expr,
+ RootDatabase,
+};
+use stdx::to_upper_snake_case;
+use syntax::{
+ ast::{self, make, HasName},
+ AstNode, WalkEvent,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::{render_snippet, Cursor},
+};
+
+// Assist: promote_local_to_const
+//
+// Promotes a local variable to a const item changing its name to a `SCREAMING_SNAKE_CASE` variant
+// if the local uses no non-const expressions.
+//
+// ```
+// fn main() {
+// let foo$0 = true;
+//
+// if foo {
+// println!("It's true");
+// } else {
+// println!("It's false");
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// const $0FOO: bool = true;
+//
+// if FOO {
+// println!("It's true");
+// } else {
+// println!("It's false");
+// }
+// }
+// ```
+pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let pat = ctx.find_node_at_offset::<ast::IdentPat>()?;
+ let name = pat.name()?;
+ if !pat.is_simple_ident() {
+ cov_mark::hit!(promote_local_non_simple_ident);
+ return None;
+ }
+ let let_stmt = pat.syntax().parent().and_then(ast::LetStmt::cast)?;
+
+ let module = ctx.sema.scope(pat.syntax())?.module();
+ let local = ctx.sema.to_def(&pat)?;
+ let ty = ctx.sema.type_of_pat(&pat.into())?.original;
+
+ if ty.contains_unknown() || ty.is_closure() {
+ cov_mark::hit!(promote_lcoal_not_applicable_if_ty_not_inferred);
+ return None;
+ }
+ let ty = ty.display_source_code(ctx.db(), module.into()).ok()?;
+
+ let initializer = let_stmt.initializer()?;
+ if !is_body_const(&ctx.sema, &initializer) {
+ cov_mark::hit!(promote_local_non_const);
+ return None;
+ }
+ let target = let_stmt.syntax().text_range();
+ acc.add(
+ AssistId("promote_local_to_const", AssistKind::Refactor),
+ "Promote local to constant",
+ target,
+ |builder| {
+ let name = to_upper_snake_case(&name.to_string());
+ let usages = Definition::Local(local).usages(&ctx.sema).all();
+ if let Some(usages) = usages.references.get(&ctx.file_id()) {
+ for usage in usages {
+ builder.replace(usage.range, &name);
+ }
+ }
+
+ let item = make::item_const(None, make::name(&name), make::ty(&ty), initializer);
+ match ctx.config.snippet_cap.zip(item.name()) {
+ Some((cap, name)) => builder.replace_snippet(
+ cap,
+ target,
+ render_snippet(cap, item.syntax(), Cursor::Before(name.syntax())),
+ ),
+ None => builder.replace(target, item.to_string()),
+ }
+ },
+ )
+}
+
+fn is_body_const(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> bool {
+ let mut is_const = true;
+ preorder_expr(expr, &mut |ev| {
+ let expr = match ev {
+ WalkEvent::Enter(_) if !is_const => return true,
+ WalkEvent::Enter(expr) => expr,
+ WalkEvent::Leave(_) => return false,
+ };
+ match expr {
+ ast::Expr::CallExpr(call) => {
+ if let Some(ast::Expr::PathExpr(path_expr)) = call.expr() {
+ if let Some(PathResolution::Def(ModuleDef::Function(func))) =
+ path_expr.path().and_then(|path| sema.resolve_path(&path))
+ {
+ is_const &= func.is_const(sema.db);
+ }
+ }
+ }
+ ast::Expr::MethodCallExpr(call) => {
+ is_const &=
+ sema.resolve_method_call(&call).map(|it| it.is_const(sema.db)).unwrap_or(true)
+ }
+ ast::Expr::BoxExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::ReturnExpr(_)
+ | ast::Expr::TryExpr(_)
+ | ast::Expr::YieldExpr(_)
+ | ast::Expr::AwaitExpr(_) => is_const = false,
+ _ => (),
+ }
+ !is_const
+ });
+ is_const
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn simple() {
+ check_assist(
+ promote_local_to_const,
+ r"
+fn foo() {
+ let x$0 = 0;
+ let y = x;
+}
+",
+ r"
+fn foo() {
+ const $0X: i32 = 0;
+ let y = X;
+}
+",
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_const_meth_call() {
+ cov_mark::check!(promote_local_non_const);
+ check_assist_not_applicable(
+ promote_local_to_const,
+ r"
+struct Foo;
+impl Foo {
+ fn foo(self) {}
+}
+fn foo() {
+ let x$0 = Foo.foo();
+}
+",
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_const_call() {
+ check_assist_not_applicable(
+ promote_local_to_const,
+ r"
+fn bar(self) {}
+fn foo() {
+ let x$0 = bar();
+}
+",
+ );
+ }
+
+ #[test]
+ fn not_applicable_unknown_ty() {
+ cov_mark::check!(promote_lcoal_not_applicable_if_ty_not_inferred);
+ check_assist_not_applicable(
+ promote_local_to_const,
+ r"
+fn foo() {
+ let x$0 = bar();
+}
+",
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_simple_ident() {
+ cov_mark::check!(promote_local_non_simple_ident);
+ check_assist_not_applicable(
+ promote_local_to_const,
+ r"
+fn foo() {
+ let ref x$0 = ();
+}
+",
+ );
+ check_assist_not_applicable(
+ promote_local_to_const,
+ r"
+fn foo() {
+ let mut x$0 = ();
+}
+",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
new file mode 100644
index 000000000..4cfe6c99b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
@@ -0,0 +1,507 @@
+use syntax::{
+ ast::{self, make},
+ ted, AstNode,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: pull_assignment_up
+//
+// Extracts variable assignment to outside an if or match statement.
+//
+// ```
+// fn main() {
+// let mut foo = 6;
+//
+// if true {
+// $0foo = 5;
+// } else {
+// foo = 4;
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let mut foo = 6;
+//
+// foo = if true {
+// 5
+// } else {
+// 4
+// };
+// }
+// ```
+pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let assign_expr = ctx.find_node_at_offset::<ast::BinExpr>()?;
+
+ let op_kind = assign_expr.op_kind()?;
+ if op_kind != (ast::BinaryOp::Assignment { op: None }) {
+ cov_mark::hit!(test_cant_pull_non_assignments);
+ return None;
+ }
+
+ let mut collector = AssignmentsCollector {
+ sema: &ctx.sema,
+ common_lhs: assign_expr.lhs()?,
+ assignments: Vec::new(),
+ };
+
+ let tgt: ast::Expr = if let Some(if_expr) = ctx.find_node_at_offset::<ast::IfExpr>() {
+ collector.collect_if(&if_expr)?;
+ if_expr.into()
+ } else if let Some(match_expr) = ctx.find_node_at_offset::<ast::MatchExpr>() {
+ collector.collect_match(&match_expr)?;
+ match_expr.into()
+ } else {
+ return None;
+ };
+
+ if let Some(parent) = tgt.syntax().parent() {
+ if matches!(parent.kind(), syntax::SyntaxKind::BIN_EXPR | syntax::SyntaxKind::LET_STMT) {
+ return None;
+ }
+ }
+
+ acc.add(
+ AssistId("pull_assignment_up", AssistKind::RefactorExtract),
+ "Pull assignment up",
+ tgt.syntax().text_range(),
+ move |edit| {
+ let assignments: Vec<_> = collector
+ .assignments
+ .into_iter()
+ .map(|(stmt, rhs)| (edit.make_mut(stmt), rhs.clone_for_update()))
+ .collect();
+
+ let tgt = edit.make_mut(tgt);
+
+ for (stmt, rhs) in assignments {
+ let mut stmt = stmt.syntax().clone();
+ if let Some(parent) = stmt.parent() {
+ if ast::ExprStmt::cast(parent.clone()).is_some() {
+ stmt = parent.clone();
+ }
+ }
+ ted::replace(stmt, rhs.syntax());
+ }
+ let assign_expr = make::expr_assignment(collector.common_lhs, tgt.clone());
+ let assign_stmt = make::expr_stmt(assign_expr);
+
+ ted::replace(tgt.syntax(), assign_stmt.syntax().clone_for_update());
+ },
+ )
+}
+
+struct AssignmentsCollector<'a> {
+ sema: &'a hir::Semantics<'a, ide_db::RootDatabase>,
+ common_lhs: ast::Expr,
+ assignments: Vec<(ast::BinExpr, ast::Expr)>,
+}
+
+impl<'a> AssignmentsCollector<'a> {
+ fn collect_match(&mut self, match_expr: &ast::MatchExpr) -> Option<()> {
+ for arm in match_expr.match_arm_list()?.arms() {
+ match arm.expr()? {
+ ast::Expr::BlockExpr(block) => self.collect_block(&block)?,
+ ast::Expr::BinExpr(expr) => self.collect_expr(&expr)?,
+ _ => return None,
+ }
+ }
+
+ Some(())
+ }
+ fn collect_if(&mut self, if_expr: &ast::IfExpr) -> Option<()> {
+ let then_branch = if_expr.then_branch()?;
+ self.collect_block(&then_branch)?;
+
+ match if_expr.else_branch()? {
+ ast::ElseBranch::Block(block) => self.collect_block(&block),
+ ast::ElseBranch::IfExpr(expr) => {
+ cov_mark::hit!(test_pull_assignment_up_chained_if);
+ self.collect_if(&expr)
+ }
+ }
+ }
+ fn collect_block(&mut self, block: &ast::BlockExpr) -> Option<()> {
+ let last_expr = block.tail_expr().or_else(|| match block.statements().last()? {
+ ast::Stmt::ExprStmt(stmt) => stmt.expr(),
+ ast::Stmt::Item(_) | ast::Stmt::LetStmt(_) => None,
+ })?;
+
+ if let ast::Expr::BinExpr(expr) = last_expr {
+ return self.collect_expr(&expr);
+ }
+
+ None
+ }
+
+ fn collect_expr(&mut self, expr: &ast::BinExpr) -> Option<()> {
+ if expr.op_kind()? == (ast::BinaryOp::Assignment { op: None })
+ && is_equivalent(self.sema, &expr.lhs()?, &self.common_lhs)
+ {
+ self.assignments.push((expr.clone(), expr.rhs()?));
+ return Some(());
+ }
+ None
+ }
+}
+
+fn is_equivalent(
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+ expr0: &ast::Expr,
+ expr1: &ast::Expr,
+) -> bool {
+ match (expr0, expr1) {
+ (ast::Expr::FieldExpr(field_expr0), ast::Expr::FieldExpr(field_expr1)) => {
+ cov_mark::hit!(test_pull_assignment_up_field_assignment);
+ sema.resolve_field(field_expr0) == sema.resolve_field(field_expr1)
+ }
+ (ast::Expr::PathExpr(path0), ast::Expr::PathExpr(path1)) => {
+ let path0 = path0.path();
+ let path1 = path1.path();
+ if let (Some(path0), Some(path1)) = (path0, path1) {
+ sema.resolve_path(&path0) == sema.resolve_path(&path1)
+ } else {
+ false
+ }
+ }
+ (ast::Expr::PrefixExpr(prefix0), ast::Expr::PrefixExpr(prefix1))
+ if prefix0.op_kind() == Some(ast::UnaryOp::Deref)
+ && prefix1.op_kind() == Some(ast::UnaryOp::Deref) =>
+ {
+ cov_mark::hit!(test_pull_assignment_up_deref);
+ if let (Some(prefix0), Some(prefix1)) = (prefix0.expr(), prefix1.expr()) {
+ is_equivalent(sema, &prefix0, &prefix1)
+ } else {
+ false
+ }
+ }
+ _ => false,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn test_pull_assignment_up_if() {
+ check_assist(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ if true {
+ $0a = 2;
+ } else {
+ a = 3;
+ }
+}"#,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ a = if true {
+ 2
+ } else {
+ 3
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_pull_assignment_up_match() {
+ check_assist(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ match 1 {
+ 1 => {
+ $0a = 2;
+ },
+ 2 => {
+ a = 3;
+ },
+ 3 => {
+ a = 4;
+ }
+ }
+}"#,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ a = match 1 {
+ 1 => {
+ 2
+ },
+ 2 => {
+ 3
+ },
+ 3 => {
+ 4
+ }
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_pull_assignment_up_assignment_expressions() {
+ check_assist(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ match 1 {
+ 1 => { $0a = 2; },
+ 2 => a = 3,
+ 3 => {
+ a = 4
+ }
+ }
+}"#,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ a = match 1 {
+ 1 => { 2 },
+ 2 => 3,
+ 3 => {
+ 4
+ }
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_pull_assignment_up_not_last_not_applicable() {
+ check_assist_not_applicable(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ if true {
+ $0a = 2;
+ b = a;
+ } else {
+ a = 3;
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_pull_assignment_up_chained_if() {
+ cov_mark::check!(test_pull_assignment_up_chained_if);
+ check_assist(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ if true {
+ $0a = 2;
+ } else if false {
+ a = 3;
+ } else {
+ a = 4;
+ }
+}"#,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ a = if true {
+ 2
+ } else if false {
+ 3
+ } else {
+ 4
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_pull_assignment_up_retains_stmts() {
+ check_assist(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ if true {
+ let b = 2;
+ $0a = 2;
+ } else {
+ let b = 3;
+ a = 3;
+ }
+}"#,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ a = if true {
+ let b = 2;
+ 2
+ } else {
+ let b = 3;
+ 3
+ };
+}"#,
+ )
+ }
+
+ #[test]
+ fn pull_assignment_up_let_stmt_not_applicable() {
+ check_assist_not_applicable(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ let b = if true {
+ $0a = 2
+ } else {
+ a = 3
+ };
+}"#,
+ )
+ }
+
+ #[test]
+ fn pull_assignment_up_if_missing_assigment_not_applicable() {
+ check_assist_not_applicable(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ if true {
+ $0a = 2;
+ } else {}
+}"#,
+ )
+ }
+
+ #[test]
+ fn pull_assignment_up_match_missing_assigment_not_applicable() {
+ check_assist_not_applicable(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ match 1 {
+ 1 => {
+ $0a = 2;
+ },
+ 2 => {
+ a = 3;
+ },
+ 3 => {},
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_pull_assignment_up_field_assignment() {
+ cov_mark::check!(test_pull_assignment_up_field_assignment);
+ check_assist(
+ pull_assignment_up,
+ r#"
+struct A(usize);
+
+fn foo() {
+ let mut a = A(1);
+
+ if true {
+ $0a.0 = 2;
+ } else {
+ a.0 = 3;
+ }
+}"#,
+ r#"
+struct A(usize);
+
+fn foo() {
+ let mut a = A(1);
+
+ a.0 = if true {
+ 2
+ } else {
+ 3
+ };
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_pull_assignment_up_deref() {
+ cov_mark::check!(test_pull_assignment_up_deref);
+ check_assist(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+ let b = &mut a;
+
+ if true {
+ $0*b = 2;
+ } else {
+ *b = 3;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ let mut a = 1;
+ let b = &mut a;
+
+ *b = if true {
+ 2
+ } else {
+ 3
+ };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_cant_pull_non_assignments() {
+ cov_mark::check!(test_cant_pull_non_assignments);
+ check_assist_not_applicable(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+ let b = &mut a;
+
+ if true {
+ $0*b + 2;
+ } else {
+ *b + 3;
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
new file mode 100644
index 000000000..121f8b4a1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
@@ -0,0 +1,548 @@
+use hir::{db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, ItemInNs, ModuleDef};
+use ide_db::assists::{AssistId, AssistKind};
+use syntax::{ast, AstNode};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ handlers::qualify_path::QualifyCandidate,
+};
+
+// Assist: qualify_method_call
+//
+// Replaces the method call with a qualified function call.
+//
+// ```
+// struct Foo;
+// impl Foo {
+// fn foo(&self) {}
+// }
+// fn main() {
+// let foo = Foo;
+// foo.fo$0o();
+// }
+// ```
+// ->
+// ```
+// struct Foo;
+// impl Foo {
+// fn foo(&self) {}
+// }
+// fn main() {
+// let foo = Foo;
+// Foo::foo(&foo);
+// }
+// ```
+pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let name: ast::NameRef = ctx.find_node_at_offset()?;
+ let call = name.syntax().parent().and_then(ast::MethodCallExpr::cast)?;
+
+ let ident = name.ident_token()?;
+
+ let range = call.syntax().text_range();
+ let resolved_call = ctx.sema.resolve_method_call(&call)?;
+
+ let current_module = ctx.sema.scope(call.syntax())?.module();
+ let target_module_def = ModuleDef::from(resolved_call);
+ let item_in_ns = ItemInNs::from(target_module_def);
+ let receiver_path = current_module
+ .find_use_path(ctx.sema.db, item_for_path_search(ctx.sema.db, item_in_ns)?)?;
+
+ let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call);
+
+ acc.add(
+ AssistId("qualify_method_call", AssistKind::RefactorInline),
+ format!("Qualify `{}` method call", ident.text()),
+ range,
+ |builder| {
+ qualify_candidate.qualify(
+ |replace_with: String| builder.replace(range, replace_with),
+ &receiver_path,
+ item_in_ns,
+ )
+ },
+ );
+ Some(())
+}
+
+fn item_for_path_search(db: &dyn HirDatabase, item: ItemInNs) -> Option<ItemInNs> {
+ Some(match item {
+ ItemInNs::Types(_) | ItemInNs::Values(_) => match item_as_assoc(db, item) {
+ Some(assoc_item) => match assoc_item.container(db) {
+ AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
+ AssocItemContainer::Impl(impl_) => match impl_.trait_(db) {
+ None => ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?)),
+ Some(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
+ },
+ },
+ None => item,
+ },
+ ItemInNs::Macros(_) => item,
+ })
+}
+
+fn item_as_assoc(db: &dyn HirDatabase, item: ItemInNs) -> Option<AssocItem> {
+ item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn struct_method() {
+ check_assist(
+ qualify_method_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ foo.fo$0o()
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ Foo::foo(&foo)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_method_multi_params() {
+ check_assist(
+ qualify_method_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self, p1: i32, p2: u32) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ foo.fo$0o(9, 9u)
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self, p1: i32, p2: u32) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ Foo::foo(&foo, 9, 9u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_method_consume() {
+ check_assist(
+ qualify_method_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(self, p1: i32, p2: u32) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ foo.fo$0o(9, 9u)
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(self, p1: i32, p2: u32) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ Foo::foo(foo, 9, 9u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_method_exclusive() {
+ check_assist(
+ qualify_method_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&mut self, p1: i32, p2: u32) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ foo.fo$0o(9, 9u)
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&mut self, p1: i32, p2: u32) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ Foo::foo(&mut foo, 9, 9u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_method_cross_crate() {
+ check_assist(
+ qualify_method_call,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ let foo = dep::test_mod::Foo {};
+ foo.fo$0o(9, 9u)
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ pub struct Foo;
+ impl Foo {
+ pub fn foo(&mut self, p1: i32, p2: u32) {}
+ }
+}
+"#,
+ r#"
+fn main() {
+ let foo = dep::test_mod::Foo {};
+ dep::test_mod::Foo::foo(&mut foo, 9, 9u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_method_generic() {
+ check_assist(
+ qualify_method_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo<T>(&self) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ foo.fo$0o::<()>()
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo<T>(&self) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ Foo::foo::<()>(&foo)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method() {
+ check_assist(
+ qualify_method_call,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ TestTrait::test_method(&test_struct)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_multi_params() {
+ check_assist(
+ qualify_method_call,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self, p1: i32, p2: u32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self, p1: i32, p2: u32) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od(12, 32u)
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self, p1: i32, p2: u32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self, p1: i32, p2: u32) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ TestTrait::test_method(&test_struct, 12, 32u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_consume() {
+ check_assist(
+ qualify_method_call,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(self, p1: i32, p2: u32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(self, p1: i32, p2: u32) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od(12, 32u)
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(self, p1: i32, p2: u32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(self, p1: i32, p2: u32) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ TestTrait::test_method(test_struct, 12, 32u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_exclusive() {
+ check_assist(
+ qualify_method_call,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&mut self, p1: i32, p2: u32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&mut self, p1: i32, p2: u32);
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od(12, 32u)
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&mut self, p1: i32, p2: u32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&mut self, p1: i32, p2: u32);
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ TestTrait::test_method(&mut test_struct, 12, 32u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_cross_crate() {
+ check_assist(
+ qualify_method_call,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ let foo = dep::test_mod::Foo {};
+ foo.fo$0o(9, 9u)
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ pub struct Foo;
+ impl Foo {
+ pub fn foo(&mut self, p1: i32, p2: u32) {}
+ }
+}
+"#,
+ r#"
+fn main() {
+ let foo = dep::test_mod::Foo {};
+ dep::test_mod::Foo::foo(&mut foo, 9, 9u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_generic() {
+ check_assist(
+ qualify_method_call,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method<T>(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method<T>(&self) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = TestStruct {};
+ test_struct.test_meth$0od::<()>()
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method<T>(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method<T>(&self) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = TestStruct {};
+ TestTrait::test_method::<()>(&test_struct)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_method_over_stuct_instance() {
+ check_assist_not_applicable(
+ qualify_method_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ f$0oo.foo()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_over_stuct_instance() {
+ check_assist_not_applicable(
+ qualify_method_call,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ tes$0t_struct.test_method()
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
new file mode 100644
index 000000000..0c2e9da38
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
@@ -0,0 +1,1297 @@
+use std::iter;
+
+use hir::AsAssocItem;
+use ide_db::RootDatabase;
+use ide_db::{
+ helpers::mod_path_to_ast,
+ imports::import_assets::{ImportCandidate, LocatedImport},
+};
+use syntax::{
+ ast,
+ ast::{make, HasArgList},
+ AstNode, NodeOrToken,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ handlers::auto_import::find_importable_node,
+ AssistId, AssistKind, GroupLabel,
+};
+
+// Assist: qualify_path
+//
+// If the name is unresolved, provides all possible qualified paths for it.
+//
+// ```
+// fn main() {
+// let map = HashMap$0::new();
+// }
+// # pub mod std { pub mod collections { pub struct HashMap { } } }
+// ```
+// ->
+// ```
+// fn main() {
+// let map = std::collections::HashMap::new();
+// }
+// # pub mod std { pub mod collections { pub struct HashMap { } } }
+// ```
+pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
+ let mut proposed_imports = import_assets.search_for_relative_paths(&ctx.sema);
+ if proposed_imports.is_empty() {
+ return None;
+ }
+
+ let range = match &syntax_under_caret {
+ NodeOrToken::Node(node) => ctx.sema.original_range(node).range,
+ NodeOrToken::Token(token) => token.text_range(),
+ };
+ let candidate = import_assets.import_candidate();
+ let qualify_candidate = match syntax_under_caret {
+ NodeOrToken::Node(syntax_under_caret) => match candidate {
+ ImportCandidate::Path(candidate) if candidate.qualifier.is_some() => {
+ cov_mark::hit!(qualify_path_qualifier_start);
+ let path = ast::Path::cast(syntax_under_caret)?;
+ let (prev_segment, segment) = (path.qualifier()?.segment()?, path.segment()?);
+ QualifyCandidate::QualifierStart(segment, prev_segment.generic_arg_list())
+ }
+ ImportCandidate::Path(_) => {
+ cov_mark::hit!(qualify_path_unqualified_name);
+ let path = ast::Path::cast(syntax_under_caret)?;
+ let generics = path.segment()?.generic_arg_list();
+ QualifyCandidate::UnqualifiedName(generics)
+ }
+ ImportCandidate::TraitAssocItem(_) => {
+ cov_mark::hit!(qualify_path_trait_assoc_item);
+ let path = ast::Path::cast(syntax_under_caret)?;
+ let (qualifier, segment) = (path.qualifier()?, path.segment()?);
+ QualifyCandidate::TraitAssocItem(qualifier, segment)
+ }
+ ImportCandidate::TraitMethod(_) => {
+ cov_mark::hit!(qualify_path_trait_method);
+ let mcall_expr = ast::MethodCallExpr::cast(syntax_under_caret)?;
+ QualifyCandidate::TraitMethod(ctx.sema.db, mcall_expr)
+ }
+ },
+ // derive attribute path
+ NodeOrToken::Token(_) => QualifyCandidate::UnqualifiedName(None),
+ };
+
+ // we aren't interested in different namespaces
+ proposed_imports.dedup_by(|a, b| a.import_path == b.import_path);
+
+ let group_label = group_label(candidate);
+ for import in proposed_imports {
+ acc.add_group(
+ &group_label,
+ AssistId("qualify_path", AssistKind::QuickFix),
+ label(candidate, &import),
+ range,
+ |builder| {
+ qualify_candidate.qualify(
+ |replace_with: String| builder.replace(range, replace_with),
+ &import.import_path,
+ import.item_to_import,
+ )
+ },
+ );
+ }
+ Some(())
+}
+pub(crate) enum QualifyCandidate<'db> {
+ QualifierStart(ast::PathSegment, Option<ast::GenericArgList>),
+ UnqualifiedName(Option<ast::GenericArgList>),
+ TraitAssocItem(ast::Path, ast::PathSegment),
+ TraitMethod(&'db RootDatabase, ast::MethodCallExpr),
+ ImplMethod(&'db RootDatabase, ast::MethodCallExpr, hir::Function),
+}
+
+impl QualifyCandidate<'_> {
+ pub(crate) fn qualify(
+ &self,
+ mut replacer: impl FnMut(String),
+ import: &hir::ModPath,
+ item: hir::ItemInNs,
+ ) {
+ let import = mod_path_to_ast(import);
+ match self {
+ QualifyCandidate::QualifierStart(segment, generics) => {
+ let generics = generics.as_ref().map_or_else(String::new, ToString::to_string);
+ replacer(format!("{}{}::{}", import, generics, segment));
+ }
+ QualifyCandidate::UnqualifiedName(generics) => {
+ let generics = generics.as_ref().map_or_else(String::new, ToString::to_string);
+ replacer(format!("{}{}", import, generics));
+ }
+ QualifyCandidate::TraitAssocItem(qualifier, segment) => {
+ replacer(format!("<{} as {}>::{}", qualifier, import, segment));
+ }
+ QualifyCandidate::TraitMethod(db, mcall_expr) => {
+ Self::qualify_trait_method(db, mcall_expr, replacer, import, item);
+ }
+ QualifyCandidate::ImplMethod(db, mcall_expr, hir_fn) => {
+ Self::qualify_fn_call(db, mcall_expr, replacer, import, hir_fn);
+ }
+ }
+ }
+
+ fn qualify_fn_call(
+ db: &RootDatabase,
+ mcall_expr: &ast::MethodCallExpr,
+ mut replacer: impl FnMut(String),
+ import: ast::Path,
+ hir_fn: &hir::Function,
+ ) -> Option<()> {
+ let receiver = mcall_expr.receiver()?;
+ let method_name = mcall_expr.name_ref()?;
+ let generics =
+ mcall_expr.generic_arg_list().as_ref().map_or_else(String::new, ToString::to_string);
+ let arg_list = mcall_expr.arg_list().map(|arg_list| arg_list.args());
+
+ if let Some(self_access) = hir_fn.self_param(db).map(|sp| sp.access(db)) {
+ let receiver = match self_access {
+ hir::Access::Shared => make::expr_ref(receiver, false),
+ hir::Access::Exclusive => make::expr_ref(receiver, true),
+ hir::Access::Owned => receiver,
+ };
+ replacer(format!(
+ "{}::{}{}{}",
+ import,
+ method_name,
+ generics,
+ match arg_list {
+ Some(args) => make::arg_list(iter::once(receiver).chain(args)),
+ None => make::arg_list(iter::once(receiver)),
+ }
+ ));
+ }
+ Some(())
+ }
+
+ fn qualify_trait_method(
+ db: &RootDatabase,
+ mcall_expr: &ast::MethodCallExpr,
+ replacer: impl FnMut(String),
+ import: ast::Path,
+ item: hir::ItemInNs,
+ ) -> Option<()> {
+ let trait_method_name = mcall_expr.name_ref()?;
+ let trait_ = item_as_trait(db, item)?;
+ let method = find_trait_method(db, trait_, &trait_method_name)?;
+ Self::qualify_fn_call(db, mcall_expr, replacer, import, &method)
+ }
+}
+
+fn find_trait_method(
+ db: &RootDatabase,
+ trait_: hir::Trait,
+ trait_method_name: &ast::NameRef,
+) -> Option<hir::Function> {
+ if let Some(hir::AssocItem::Function(method)) =
+ trait_.items(db).into_iter().find(|item: &hir::AssocItem| {
+ item.name(db)
+ .map(|name| name.to_string() == trait_method_name.to_string())
+ .unwrap_or(false)
+ })
+ {
+ Some(method)
+ } else {
+ None
+ }
+}
+
+fn item_as_trait(db: &RootDatabase, item: hir::ItemInNs) -> Option<hir::Trait> {
+ let item_module_def = item.as_module_def()?;
+
+ match item_module_def {
+ hir::ModuleDef::Trait(trait_) => Some(trait_),
+ _ => item_module_def.as_assoc_item(db)?.containing_trait(db),
+ }
+}
+
+fn group_label(candidate: &ImportCandidate) -> GroupLabel {
+ let name = match candidate {
+ ImportCandidate::Path(it) => &it.name,
+ ImportCandidate::TraitAssocItem(it) | ImportCandidate::TraitMethod(it) => {
+ &it.assoc_item_name
+ }
+ }
+ .text();
+ GroupLabel(format!("Qualify {}", name))
+}
+
+fn label(candidate: &ImportCandidate, import: &LocatedImport) -> String {
+ match candidate {
+ ImportCandidate::Path(candidate) if candidate.qualifier.is_none() => {
+ format!("Qualify as `{}`", import.import_path)
+ }
+ _ => format!("Qualify with `{}`", import.import_path),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn applicable_when_found_an_import_partial() {
+ cov_mark::check!(qualify_path_unqualified_name);
+ check_assist(
+ qualify_path,
+ r#"
+mod std {
+ pub mod fmt {
+ pub struct Formatter;
+ }
+}
+
+use std::fmt;
+
+$0Formatter
+"#,
+ r#"
+mod std {
+ pub mod fmt {
+ pub struct Formatter;
+ }
+}
+
+use std::fmt;
+
+fmt::Formatter
+"#,
+ );
+ }
+
+ #[test]
+ fn applicable_when_found_an_import() {
+ check_assist(
+ qualify_path,
+ r#"
+$0PubStruct
+
+pub mod PubMod {
+ pub struct PubStruct;
+}
+"#,
+ r#"
+PubMod::PubStruct
+
+pub mod PubMod {
+ pub struct PubStruct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn applicable_in_macros() {
+ check_assist(
+ qualify_path,
+ r#"
+macro_rules! foo {
+ ($i:ident) => { fn foo(a: $i) {} }
+}
+foo!(Pub$0Struct);
+
+pub mod PubMod {
+ pub struct PubStruct;
+}
+"#,
+ r#"
+macro_rules! foo {
+ ($i:ident) => { fn foo(a: $i) {} }
+}
+foo!(PubMod::PubStruct);
+
+pub mod PubMod {
+ pub struct PubStruct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn applicable_when_found_multiple_imports() {
+ check_assist(
+ qualify_path,
+ r#"
+PubSt$0ruct
+
+pub mod PubMod1 {
+ pub struct PubStruct;
+}
+pub mod PubMod2 {
+ pub struct PubStruct;
+}
+pub mod PubMod3 {
+ pub struct PubStruct;
+}
+"#,
+ r#"
+PubMod3::PubStruct
+
+pub mod PubMod1 {
+ pub struct PubStruct;
+}
+pub mod PubMod2 {
+ pub struct PubStruct;
+}
+pub mod PubMod3 {
+ pub struct PubStruct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_already_imported_types() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+use PubMod::PubStruct;
+
+PubStruct$0
+
+pub mod PubMod {
+ pub struct PubStruct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_types_with_private_paths() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+PrivateStruct$0
+
+pub mod PubMod {
+ struct PrivateStruct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_when_no_imports_found() {
+ check_assist_not_applicable(qualify_path, r#"PubStruct$0"#);
+ }
+
+ #[test]
+ fn qualify_function() {
+ check_assist(
+ qualify_path,
+ r#"
+test_function$0
+
+pub mod PubMod {
+ pub fn test_function() {};
+}
+"#,
+ r#"
+PubMod::test_function
+
+pub mod PubMod {
+ pub fn test_function() {};
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn qualify_macro() {
+ check_assist(
+ qualify_path,
+ r#"
+//- /lib.rs crate:crate_with_macro
+#[macro_export]
+macro_rules! foo {
+ () => ()
+}
+
+//- /main.rs crate:main deps:crate_with_macro
+fn main() {
+ foo$0
+}
+"#,
+ r#"
+fn main() {
+ crate_with_macro::foo
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn qualify_path_target() {
+ check_assist_target(
+ qualify_path,
+ r#"
+struct AssistInfo {
+ group_label: Option<$0GroupLabel>,
+}
+
+mod m { pub struct GroupLabel; }
+"#,
+ "GroupLabel",
+ )
+ }
+
+ #[test]
+ fn not_applicable_when_path_start_is_imported() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+pub mod mod1 {
+ pub mod mod2 {
+ pub mod mod3 {
+ pub struct TestStruct;
+ }
+ }
+}
+
+use mod1::mod2;
+fn main() {
+ mod2::mod3::TestStruct$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_function() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+pub mod test_mod {
+ pub fn test_function() {}
+}
+
+use test_mod::test_function;
+fn main() {
+ test_function$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_struct_function() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ pub fn test_function() {}
+ }
+}
+
+fn main() {
+ TestStruct::test_function$0
+}
+"#,
+ r#"
+mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ pub fn test_function() {}
+ }
+}
+
+fn main() {
+ test_mod::TestStruct::test_function
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_struct_const() {
+ cov_mark::check!(qualify_path_qualifier_start);
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ TestStruct::TEST_CONST$0
+}
+"#,
+ r#"
+mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ test_mod::TestStruct::TEST_CONST
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_struct_const_unqualified() {
+ // FIXME: non-trait assoc items completion is unsupported yet, see FIXME in the import_assets.rs for more details
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ TEST_CONST$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_trait_function() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+}
+
+fn main() {
+ test_mod::TestStruct::test_function$0
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+}
+
+fn main() {
+ <test_mod::TestStruct as test_mod::TestTrait>::test_function
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_trait_for_function() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub trait TestTrait2 {
+ fn test_function();
+ }
+ pub enum TestEnum {
+ One,
+ Two,
+ }
+ impl TestTrait2 for TestEnum {
+ fn test_function() {}
+ }
+ impl TestTrait for TestEnum {
+ fn test_function() {}
+ }
+}
+
+use test_mod::TestTrait2;
+fn main() {
+ test_mod::TestEnum::test_function$0;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn associated_trait_const() {
+ cov_mark::check!(qualify_path_trait_assoc_item);
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ test_mod::TestStruct::TEST_CONST$0
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ <test_mod::TestStruct as test_mod::TestTrait>::TEST_CONST
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_trait_for_const() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub trait TestTrait2 {
+ const TEST_CONST: f64;
+ }
+ pub enum TestEnum {
+ One,
+ Two,
+ }
+ impl TestTrait2 for TestEnum {
+ const TEST_CONST: f64 = 42.0;
+ }
+ impl TestTrait for TestEnum {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+use test_mod::TestTrait2;
+fn main() {
+ test_mod::TestEnum::TEST_CONST$0;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn trait_method() {
+ cov_mark::check!(qualify_path_trait_method);
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_mod::TestTrait::test_method(&test_struct)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_multi_params() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self, test: i32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self, test: i32) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od(42)
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self, test: i32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self, test: i32) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_mod::TestTrait::test_method(&test_struct, 42)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_consume() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(self) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(self) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_mod::TestTrait::test_method(test_struct)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_cross_crate() {
+ check_assist(
+ qualify_path,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+"#,
+ r#"
+fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ dep::test_mod::TestTrait::test_method(&test_struct)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn assoc_fn_cross_crate() {
+ check_assist(
+ qualify_path,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ dep::test_mod::TestStruct::test_func$0tion
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+}
+"#,
+ r#"
+fn main() {
+ <dep::test_mod::TestStruct as dep::test_mod::TestTrait>::test_function
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn assoc_const_cross_crate() {
+ check_assist(
+ qualify_path,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ dep::test_mod::TestStruct::CONST$0
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ const CONST: bool;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const CONST: bool = true;
+ }
+}
+"#,
+ r#"
+fn main() {
+ <dep::test_mod::TestStruct as dep::test_mod::TestTrait>::CONST
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn assoc_fn_as_method_cross_crate() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_func$0tion()
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn private_trait_cross_crate() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_trait_for_method() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub trait TestTrait2 {
+ fn test_method(&self);
+ }
+ pub enum TestEnum {
+ One,
+ Two,
+ }
+ impl TestTrait2 for TestEnum {
+ fn test_method(&self) {}
+ }
+ impl TestTrait for TestEnum {
+ fn test_method(&self) {}
+ }
+}
+
+use test_mod::TestTrait2;
+fn main() {
+ let one = test_mod::TestEnum::One;
+ one.test$0_method();
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn dep_import() {
+ check_assist(
+ qualify_path,
+ r"
+//- /lib.rs crate:dep
+pub struct Struct;
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ Struct$0
+}
+",
+ r"
+fn main() {
+ dep::Struct
+}
+",
+ );
+ }
+
+ #[test]
+ fn whole_segment() {
+ // Tests that only imports whose last segment matches the identifier get suggested.
+ check_assist(
+ qualify_path,
+ r"
+//- /lib.rs crate:dep
+pub mod fmt {
+ pub trait Display {}
+}
+
+pub fn panic_fmt() {}
+
+//- /main.rs crate:main deps:dep
+struct S;
+
+impl f$0mt::Display for S {}
+",
+ r"
+struct S;
+
+impl dep::fmt::Display for S {}
+",
+ );
+ }
+
+ #[test]
+ fn macro_generated() {
+ // Tests that macro-generated items are suggested from external crates.
+ check_assist(
+ qualify_path,
+ r"
+//- /lib.rs crate:dep
+macro_rules! mac {
+ () => {
+ pub struct Cheese;
+ };
+}
+
+mac!();
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ Cheese$0;
+}
+",
+ r"
+fn main() {
+ dep::Cheese;
+}
+",
+ );
+ }
+
+ #[test]
+ fn casing() {
+ // Tests that differently cased names don't interfere and we only suggest the matching one.
+ check_assist(
+ qualify_path,
+ r"
+//- /lib.rs crate:dep
+pub struct FMT;
+pub struct fmt;
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ FMT$0;
+}
+",
+ r"
+fn main() {
+ dep::FMT;
+}
+",
+ );
+ }
+
+ #[test]
+ fn keep_generic_annotations() {
+ check_assist(
+ qualify_path,
+ r"
+//- /lib.rs crate:dep
+pub mod generic { pub struct Thing<'a, T>(&'a T); }
+
+//- /main.rs crate:main deps:dep
+fn foo() -> Thin$0g<'static, ()> {}
+
+fn main() {}
+",
+ r"
+fn foo() -> dep::generic::Thing<'static, ()> {}
+
+fn main() {}
+",
+ );
+ }
+
+ #[test]
+ fn keep_generic_annotations_leading_colon() {
+ check_assist(
+ qualify_path,
+ r#"
+//- /lib.rs crate:dep
+pub mod generic { pub struct Thing<'a, T>(&'a T); }
+
+//- /main.rs crate:main deps:dep
+fn foo() -> Thin$0g::<'static, ()> {}
+
+fn main() {}
+"#,
+ r"
+fn foo() -> dep::generic::Thing::<'static, ()> {}
+
+fn main() {}
+",
+ );
+ }
+
+ #[test]
+ fn associated_struct_const_generic() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub struct TestStruct<T> {}
+ impl<T> TestStruct<T> {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ TestStruct::<()>::TEST_CONST$0
+}
+"#,
+ r#"
+mod test_mod {
+ pub struct TestStruct<T> {}
+ impl<T> TestStruct<T> {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ test_mod::TestStruct::<()>::TEST_CONST
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_trait_const_generic() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct<T> {}
+ impl<T> TestTrait for TestStruct<T> {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ test_mod::TestStruct::<()>::TEST_CONST$0
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct<T> {}
+ impl<T> TestTrait for TestStruct<T> {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ <test_mod::TestStruct::<()> as test_mod::TestTrait>::TEST_CONST
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_generic() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method<T>(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method<T>(&self) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od::<()>()
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method<T>(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method<T>(&self) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_mod::TestTrait::test_method::<()>(&test_struct)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn works_in_derives() {
+ check_assist(
+ qualify_path,
+ r#"
+//- minicore:derive
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(Copy$0)]
+struct Foo;
+"#,
+ r#"
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(foo::Copy)]
+struct Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn works_in_use_start() {
+ check_assist(
+ qualify_path,
+ r#"
+mod bar {
+ pub mod foo {
+ pub struct Foo;
+ }
+}
+use foo$0::Foo;
+"#,
+ r#"
+mod bar {
+ pub mod foo {
+ pub struct Foo;
+ }
+}
+use bar::foo::Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_in_non_start_use() {
+ check_assist_not_applicable(
+ qualify_path,
+ r"
+mod bar {
+ pub mod foo {
+ pub struct Foo;
+ }
+}
+use foo::Foo$0;
+",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs
new file mode 100644
index 000000000..dbe8cb7bf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs
@@ -0,0 +1,509 @@
+use std::borrow::Cow;
+
+use syntax::{ast, ast::IsString, AstToken, TextRange, TextSize};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: make_raw_string
+//
+// Adds `r#` to a plain string literal.
+//
+// ```
+// fn main() {
+// "Hello,$0 World!";
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// r#"Hello, World!"#;
+// }
+// ```
+pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let token = ctx.find_token_at_offset::<ast::String>()?;
+ if token.is_raw() {
+ return None;
+ }
+ let value = token.value()?;
+ let target = token.syntax().text_range();
+ acc.add(
+ AssistId("make_raw_string", AssistKind::RefactorRewrite),
+ "Rewrite as raw string",
+ target,
+ |edit| {
+ let hashes = "#".repeat(required_hashes(&value).max(1));
+ if matches!(value, Cow::Borrowed(_)) {
+ // Avoid replacing the whole string to better position the cursor.
+ edit.insert(token.syntax().text_range().start(), format!("r{}", hashes));
+ edit.insert(token.syntax().text_range().end(), hashes);
+ } else {
+ edit.replace(
+ token.syntax().text_range(),
+ format!("r{}\"{}\"{}", hashes, value, hashes),
+ );
+ }
+ },
+ )
+}
+
+// Assist: make_usual_string
+//
+// Turns a raw string into a plain string.
+//
+// ```
+// fn main() {
+// r#"Hello,$0 "World!""#;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// "Hello, \"World!\"";
+// }
+// ```
+pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let token = ctx.find_token_at_offset::<ast::String>()?;
+ if !token.is_raw() {
+ return None;
+ }
+ let value = token.value()?;
+ let target = token.syntax().text_range();
+ acc.add(
+ AssistId("make_usual_string", AssistKind::RefactorRewrite),
+ "Rewrite as regular string",
+ target,
+ |edit| {
+ // parse inside string to escape `"`
+ let escaped = value.escape_default().to_string();
+ if let Some(offsets) = token.quote_offsets() {
+ if token.text()[offsets.contents - token.syntax().text_range().start()] == escaped {
+ edit.replace(offsets.quotes.0, "\"");
+ edit.replace(offsets.quotes.1, "\"");
+ return;
+ }
+ }
+
+ edit.replace(token.syntax().text_range(), format!("\"{}\"", escaped));
+ },
+ )
+}
+
+// Assist: add_hash
+//
+// Adds a hash to a raw string literal.
+//
+// ```
+// fn main() {
+// r#"Hello,$0 World!"#;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// r##"Hello, World!"##;
+// }
+// ```
+pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let token = ctx.find_token_at_offset::<ast::String>()?;
+ if !token.is_raw() {
+ return None;
+ }
+ let text_range = token.syntax().text_range();
+ let target = text_range;
+ acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| {
+ edit.insert(text_range.start() + TextSize::of('r'), "#");
+ edit.insert(text_range.end(), "#");
+ })
+}
+
+// Assist: remove_hash
+//
+// Removes a hash from a raw string literal.
+//
+// ```
+// fn main() {
+// r#"Hello,$0 World!"#;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// r"Hello, World!";
+// }
+// ```
+pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let token = ctx.find_token_at_offset::<ast::String>()?;
+ if !token.is_raw() {
+ return None;
+ }
+
+ let text = token.text();
+ if !text.starts_with("r#") && text.ends_with('#') {
+ return None;
+ }
+
+ let existing_hashes = text.chars().skip(1).take_while(|&it| it == '#').count();
+
+ let text_range = token.syntax().text_range();
+ let internal_text = &text[token.text_range_between_quotes()? - text_range.start()];
+
+ if existing_hashes == required_hashes(internal_text) {
+ cov_mark::hit!(cant_remove_required_hash);
+ return None;
+ }
+
+ acc.add(AssistId("remove_hash", AssistKind::RefactorRewrite), "Remove #", text_range, |edit| {
+ edit.delete(TextRange::at(text_range.start() + TextSize::of('r'), TextSize::of('#')));
+ edit.delete(TextRange::new(text_range.end() - TextSize::of('#'), text_range.end()));
+ })
+}
+
+fn required_hashes(s: &str) -> usize {
+ let mut res = 0usize;
+ for idx in s.match_indices('"').map(|(i, _)| i) {
+ let (_, sub) = s.split_at(idx + 1);
+ let n_hashes = sub.chars().take_while(|c| *c == '#').count();
+ res = res.max(n_hashes + 1)
+ }
+ res
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn test_required_hashes() {
+ assert_eq!(0, required_hashes("abc"));
+ assert_eq!(0, required_hashes("###"));
+ assert_eq!(1, required_hashes("\""));
+ assert_eq!(2, required_hashes("\"#abc"));
+ assert_eq!(0, required_hashes("#abc"));
+ assert_eq!(3, required_hashes("#ab\"##c"));
+ assert_eq!(5, required_hashes("#ab\"##\"####c"));
+ }
+
+ #[test]
+ fn make_raw_string_target() {
+ check_assist_target(
+ make_raw_string,
+ r#"
+ fn f() {
+ let s = $0"random\nstring";
+ }
+ "#,
+ r#""random\nstring""#,
+ );
+ }
+
+ #[test]
+ fn make_raw_string_works() {
+ check_assist(
+ make_raw_string,
+ r#"
+fn f() {
+ let s = $0"random\nstring";
+}
+"#,
+ r##"
+fn f() {
+ let s = r#"random
+string"#;
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn make_raw_string_works_inside_macros() {
+ check_assist(
+ make_raw_string,
+ r#"
+ fn f() {
+ format!($0"x = {}", 92)
+ }
+ "#,
+ r##"
+ fn f() {
+ format!(r#"x = {}"#, 92)
+ }
+ "##,
+ )
+ }
+
+ #[test]
+ fn make_raw_string_hashes_inside_works() {
+ check_assist(
+ make_raw_string,
+ r###"
+fn f() {
+ let s = $0"#random##\nstring";
+}
+"###,
+ r####"
+fn f() {
+ let s = r#"#random##
+string"#;
+}
+"####,
+ )
+ }
+
+ #[test]
+ fn make_raw_string_closing_hashes_inside_works() {
+ check_assist(
+ make_raw_string,
+ r###"
+fn f() {
+ let s = $0"#random\"##\nstring";
+}
+"###,
+ r####"
+fn f() {
+ let s = r###"#random"##
+string"###;
+}
+"####,
+ )
+ }
+
+ #[test]
+ fn make_raw_string_nothing_to_unescape_works() {
+ check_assist(
+ make_raw_string,
+ r#"
+ fn f() {
+ let s = $0"random string";
+ }
+ "#,
+ r##"
+ fn f() {
+ let s = r#"random string"#;
+ }
+ "##,
+ )
+ }
+
+ #[test]
+ fn make_raw_string_not_works_on_partial_string() {
+ check_assist_not_applicable(
+ make_raw_string,
+ r#"
+ fn f() {
+ let s = "foo$0
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn make_usual_string_not_works_on_partial_string() {
+ check_assist_not_applicable(
+ make_usual_string,
+ r#"
+ fn main() {
+ let s = r#"bar$0
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn add_hash_target() {
+ check_assist_target(
+ add_hash,
+ r#"
+ fn f() {
+ let s = $0r"random string";
+ }
+ "#,
+ r#"r"random string""#,
+ );
+ }
+
+ #[test]
+ fn add_hash_works() {
+ check_assist(
+ add_hash,
+ r#"
+ fn f() {
+ let s = $0r"random string";
+ }
+ "#,
+ r##"
+ fn f() {
+ let s = r#"random string"#;
+ }
+ "##,
+ )
+ }
+
+ #[test]
+ fn add_more_hash_works() {
+ check_assist(
+ add_hash,
+ r##"
+ fn f() {
+ let s = $0r#"random"string"#;
+ }
+ "##,
+ r###"
+ fn f() {
+ let s = r##"random"string"##;
+ }
+ "###,
+ )
+ }
+
+ #[test]
+ fn add_hash_not_works() {
+ check_assist_not_applicable(
+ add_hash,
+ r#"
+ fn f() {
+ let s = $0"random string";
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn remove_hash_target() {
+ check_assist_target(
+ remove_hash,
+ r##"
+ fn f() {
+ let s = $0r#"random string"#;
+ }
+ "##,
+ r##"r#"random string"#"##,
+ );
+ }
+
+ #[test]
+ fn remove_hash_works() {
+ check_assist(
+ remove_hash,
+ r##"fn f() { let s = $0r#"random string"#; }"##,
+ r#"fn f() { let s = r"random string"; }"#,
+ )
+ }
+
+ #[test]
+ fn cant_remove_required_hash() {
+ cov_mark::check!(cant_remove_required_hash);
+ check_assist_not_applicable(
+ remove_hash,
+ r##"
+ fn f() {
+ let s = $0r#"random"str"ing"#;
+ }
+ "##,
+ )
+ }
+
+ #[test]
+ fn remove_more_hash_works() {
+ check_assist(
+ remove_hash,
+ r###"
+ fn f() {
+ let s = $0r##"random string"##;
+ }
+ "###,
+ r##"
+ fn f() {
+ let s = r#"random string"#;
+ }
+ "##,
+ )
+ }
+
+ #[test]
+ fn remove_hash_doesnt_work() {
+ check_assist_not_applicable(remove_hash, r#"fn f() { let s = $0"random string"; }"#);
+ }
+
+ #[test]
+ fn remove_hash_no_hash_doesnt_work() {
+ check_assist_not_applicable(remove_hash, r#"fn f() { let s = $0r"random string"; }"#);
+ }
+
+ #[test]
+ fn make_usual_string_target() {
+ check_assist_target(
+ make_usual_string,
+ r##"
+ fn f() {
+ let s = $0r#"random string"#;
+ }
+ "##,
+ r##"r#"random string"#"##,
+ );
+ }
+
+ #[test]
+ fn make_usual_string_works() {
+ check_assist(
+ make_usual_string,
+ r##"
+ fn f() {
+ let s = $0r#"random string"#;
+ }
+ "##,
+ r#"
+ fn f() {
+ let s = "random string";
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn make_usual_string_with_quote_works() {
+ check_assist(
+ make_usual_string,
+ r##"
+ fn f() {
+ let s = $0r#"random"str"ing"#;
+ }
+ "##,
+ r#"
+ fn f() {
+ let s = "random\"str\"ing";
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn make_usual_string_more_hash_works() {
+ check_assist(
+ make_usual_string,
+ r###"
+ fn f() {
+ let s = $0r##"random string"##;
+ }
+ "###,
+ r##"
+ fn f() {
+ let s = "random string";
+ }
+ "##,
+ )
+ }
+
+ #[test]
+ fn make_usual_string_not_works() {
+ check_assist_not_applicable(
+ make_usual_string,
+ r#"
+ fn f() {
+ let s = $0"random string";
+ }
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
new file mode 100644
index 000000000..afaa7c933
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
@@ -0,0 +1,241 @@
+use itertools::Itertools;
+use syntax::{
+ ast::{self, AstNode, AstToken},
+ match_ast, NodeOrToken, SyntaxElement, TextSize, T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: remove_dbg
+//
+// Removes `dbg!()` macro call.
+//
+// ```
+// fn main() {
+// $0dbg!(92);
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// 92;
+// }
+// ```
+pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let macro_call = ctx.find_node_at_offset::<ast::MacroCall>()?;
+ let tt = macro_call.token_tree()?;
+ let r_delim = NodeOrToken::Token(tt.right_delimiter_token()?);
+ if macro_call.path()?.segment()?.name_ref()?.text() != "dbg"
+ || macro_call.excl_token().is_none()
+ {
+ return None;
+ }
+
+ let mac_input = tt.syntax().children_with_tokens().skip(1).take_while(|it| *it != r_delim);
+ let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]);
+ let input_expressions = input_expressions
+ .into_iter()
+ .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
+ .map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
+ .collect::<Option<Vec<ast::Expr>>>()?;
+
+ let macro_expr = ast::MacroExpr::cast(macro_call.syntax().parent()?)?;
+ let parent = macro_expr.syntax().parent()?;
+ let (range, text) = match &*input_expressions {
+ // dbg!()
+ [] => {
+ match_ast! {
+ match parent {
+ ast::StmtList(__) => {
+ let range = macro_expr.syntax().text_range();
+ let range = match whitespace_start(macro_expr.syntax().prev_sibling_or_token()) {
+ Some(start) => range.cover_offset(start),
+ None => range,
+ };
+ (range, String::new())
+ },
+ ast::ExprStmt(it) => {
+ let range = it.syntax().text_range();
+ let range = match whitespace_start(it.syntax().prev_sibling_or_token()) {
+ Some(start) => range.cover_offset(start),
+ None => range,
+ };
+ (range, String::new())
+ },
+ _ => (macro_call.syntax().text_range(), "()".to_owned())
+ }
+ }
+ }
+ // dbg!(expr0)
+ [expr] => {
+ let wrap = match ast::Expr::cast(parent) {
+ Some(parent) => match (expr, parent) {
+ (ast::Expr::CastExpr(_), ast::Expr::CastExpr(_)) => false,
+ (
+ ast::Expr::BoxExpr(_) | ast::Expr::PrefixExpr(_) | ast::Expr::RefExpr(_),
+ ast::Expr::AwaitExpr(_)
+ | ast::Expr::CallExpr(_)
+ | ast::Expr::CastExpr(_)
+ | ast::Expr::FieldExpr(_)
+ | ast::Expr::IndexExpr(_)
+ | ast::Expr::MethodCallExpr(_)
+ | ast::Expr::RangeExpr(_)
+ | ast::Expr::TryExpr(_),
+ ) => true,
+ (
+ ast::Expr::BinExpr(_) | ast::Expr::CastExpr(_) | ast::Expr::RangeExpr(_),
+ ast::Expr::AwaitExpr(_)
+ | ast::Expr::BinExpr(_)
+ | ast::Expr::CallExpr(_)
+ | ast::Expr::CastExpr(_)
+ | ast::Expr::FieldExpr(_)
+ | ast::Expr::IndexExpr(_)
+ | ast::Expr::MethodCallExpr(_)
+ | ast::Expr::PrefixExpr(_)
+ | ast::Expr::RangeExpr(_)
+ | ast::Expr::RefExpr(_)
+ | ast::Expr::TryExpr(_),
+ ) => true,
+ _ => false,
+ },
+ None => false,
+ };
+ (
+ macro_call.syntax().text_range(),
+ if wrap { format!("({})", expr) } else { expr.to_string() },
+ )
+ }
+ // dbg!(expr0, expr1, ...)
+ exprs => (macro_call.syntax().text_range(), format!("({})", exprs.iter().format(", "))),
+ };
+
+ acc.add(AssistId("remove_dbg", AssistKind::Refactor), "Remove dbg!()", range, |builder| {
+ builder.replace(range, text);
+ })
+}
+
+fn whitespace_start(it: Option<SyntaxElement>) -> Option<TextSize> {
+ Some(it?.into_token().and_then(ast::Whitespace::cast)?.syntax().text_range().start())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ fn check(ra_fixture_before: &str, ra_fixture_after: &str) {
+ check_assist(
+ remove_dbg,
+ &format!("fn main() {{\n{}\n}}", ra_fixture_before),
+ &format!("fn main() {{\n{}\n}}", ra_fixture_after),
+ );
+ }
+
+ #[test]
+ fn test_remove_dbg() {
+ check("$0dbg!(1 + 1)", "1 + 1");
+ check("dbg!$0(1 + 1)", "1 + 1");
+ check("dbg!(1 $0+ 1)", "1 + 1");
+ check("dbg![$01 + 1]", "1 + 1");
+ check("dbg!{$01 + 1}", "1 + 1");
+ }
+
+ #[test]
+ fn test_remove_dbg_not_applicable() {
+ check_assist_not_applicable(remove_dbg, "fn main() {$0vec![1, 2, 3]}");
+ check_assist_not_applicable(remove_dbg, "fn main() {$0dbg(5, 6, 7)}");
+ check_assist_not_applicable(remove_dbg, "fn main() {$0dbg!(5, 6, 7}");
+ }
+
+ #[test]
+ fn test_remove_dbg_keep_semicolon_in_let() {
+ // https://github.com/rust-lang/rust-analyzer/issues/5129#issuecomment-651399779
+ check(
+ r#"let res = $0dbg!(1 * 20); // needless comment"#,
+ r#"let res = 1 * 20; // needless comment"#,
+ );
+ check(r#"let res = $0dbg!(); // needless comment"#, r#"let res = (); // needless comment"#);
+ check(
+ r#"let res = $0dbg!(1, 2); // needless comment"#,
+ r#"let res = (1, 2); // needless comment"#,
+ );
+ }
+
+ #[test]
+ fn test_remove_dbg_cast_cast() {
+ check(r#"let res = $0dbg!(x as u32) as u32;"#, r#"let res = x as u32 as u32;"#);
+ }
+
+ #[test]
+ fn test_remove_dbg_prefix() {
+ check(r#"let res = $0dbg!(&result).foo();"#, r#"let res = (&result).foo();"#);
+ check(r#"let res = &$0dbg!(&result);"#, r#"let res = &&result;"#);
+ check(r#"let res = $0dbg!(!result) && true;"#, r#"let res = !result && true;"#);
+ }
+
+ #[test]
+ fn test_remove_dbg_post_expr() {
+ check(r#"let res = $0dbg!(fut.await).foo();"#, r#"let res = fut.await.foo();"#);
+ check(r#"let res = $0dbg!(result?).foo();"#, r#"let res = result?.foo();"#);
+ check(r#"let res = $0dbg!(foo as u32).foo();"#, r#"let res = (foo as u32).foo();"#);
+ check(r#"let res = $0dbg!(array[3]).foo();"#, r#"let res = array[3].foo();"#);
+ check(r#"let res = $0dbg!(tuple.3).foo();"#, r#"let res = tuple.3.foo();"#);
+ }
+
+ #[test]
+ fn test_remove_dbg_range_expr() {
+ check(r#"let res = $0dbg!(foo..bar).foo();"#, r#"let res = (foo..bar).foo();"#);
+ check(r#"let res = $0dbg!(foo..=bar).foo();"#, r#"let res = (foo..=bar).foo();"#);
+ }
+
+ #[test]
+ fn test_remove_empty_dbg() {
+ check_assist(remove_dbg, r#"fn foo() { $0dbg!(); }"#, r#"fn foo() { }"#);
+ check_assist(
+ remove_dbg,
+ r#"
+fn foo() {
+ $0dbg!();
+}
+"#,
+ r#"
+fn foo() {
+}
+"#,
+ );
+ check_assist(
+ remove_dbg,
+ r#"
+fn foo() {
+ let test = $0dbg!();
+}"#,
+ r#"
+fn foo() {
+ let test = ();
+}"#,
+ );
+ check_assist(
+ remove_dbg,
+ r#"
+fn foo() {
+ let t = {
+ println!("Hello, world");
+ $0dbg!()
+ };
+}"#,
+ r#"
+fn foo() {
+ let t = {
+ println!("Hello, world");
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_remove_multi_dbg() {
+ check(r#"$0dbg!(0, 1)"#, r#"(0, 1)"#);
+ check(r#"$0dbg!(0, (1, 2))"#, r#"(0, (1, 2))"#);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs
new file mode 100644
index 000000000..0b299e834
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs
@@ -0,0 +1,37 @@
+use syntax::{SyntaxKind, TextRange, T};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: remove_mut
+//
+// Removes the `mut` keyword.
+//
+// ```
+// impl Walrus {
+// fn feed(&mut$0 self, amount: u32) {}
+// }
+// ```
+// ->
+// ```
+// impl Walrus {
+// fn feed(&self, amount: u32) {}
+// }
+// ```
+pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let mut_token = ctx.find_token_syntax_at_offset(T![mut])?;
+ let delete_from = mut_token.text_range().start();
+ let delete_to = match mut_token.next_token() {
+ Some(it) if it.kind() == SyntaxKind::WHITESPACE => it.text_range().end(),
+ _ => mut_token.text_range().end(),
+ };
+
+ let target = mut_token.text_range();
+ acc.add(
+ AssistId("remove_mut", AssistKind::Refactor),
+ "Remove `mut` keyword",
+ target,
+ |builder| {
+ builder.delete(TextRange::new(delete_from, delete_to));
+ },
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
new file mode 100644
index 000000000..59ea94ea1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
@@ -0,0 +1,409 @@
+use ide_db::{base_db::FileId, defs::Definition, search::FileReference};
+use syntax::{
+ algo::find_node_at_range,
+ ast::{self, HasArgList},
+ AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, T,
+};
+
+use SyntaxKind::WHITESPACE;
+
+use crate::{
+ assist_context::AssistBuilder, utils::next_prev, AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: remove_unused_param
+//
+// Removes unused function parameter.
+//
+// ```
+// fn frobnicate(x: i32$0) {}
+//
+// fn main() {
+// frobnicate(92);
+// }
+// ```
+// ->
+// ```
+// fn frobnicate() {}
+//
+// fn main() {
+// frobnicate();
+// }
+// ```
+pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let param: ast::Param = ctx.find_node_at_offset()?;
+ let ident_pat = match param.pat()? {
+ ast::Pat::IdentPat(it) => it,
+ _ => return None,
+ };
+ let func = param.syntax().ancestors().find_map(ast::Fn::cast)?;
+ let is_self_present =
+ param.syntax().parent()?.children().find_map(ast::SelfParam::cast).is_some();
+
+ // check if fn is in impl Trait for ..
+ if func
+ .syntax()
+ .parent() // AssocItemList
+ .and_then(|x| x.parent())
+ .and_then(ast::Impl::cast)
+ .map_or(false, |imp| imp.trait_().is_some())
+ {
+ cov_mark::hit!(trait_impl);
+ return None;
+ }
+
+ let mut param_position = func.param_list()?.params().position(|it| it == param)?;
+ // param_list() does not take the self param into consideration, hence this additional check
+ // is required. For associated functions, param_position is incremented here. For inherent
+ // calls we revet the increment below, in process_usage, as those calls will not have an
+ // explicit self parameter.
+ if is_self_present {
+ param_position += 1;
+ }
+ let fn_def = {
+ let func = ctx.sema.to_def(&func)?;
+ Definition::Function(func)
+ };
+
+ let param_def = {
+ let local = ctx.sema.to_def(&ident_pat)?;
+ Definition::Local(local)
+ };
+ if param_def.usages(&ctx.sema).at_least_one() {
+ cov_mark::hit!(keep_used);
+ return None;
+ }
+ acc.add(
+ AssistId("remove_unused_param", AssistKind::Refactor),
+ "Remove unused parameter",
+ param.syntax().text_range(),
+ |builder| {
+ builder.delete(range_to_remove(param.syntax()));
+ for (file_id, references) in fn_def.usages(&ctx.sema).all() {
+ process_usages(ctx, builder, file_id, references, param_position, is_self_present);
+ }
+ },
+ )
+}
+
+fn process_usages(
+ ctx: &AssistContext<'_>,
+ builder: &mut AssistBuilder,
+ file_id: FileId,
+ references: Vec<FileReference>,
+ arg_to_remove: usize,
+ is_self_present: bool,
+) {
+ let source_file = ctx.sema.parse(file_id);
+ builder.edit_file(file_id);
+ let possible_ranges = references
+ .into_iter()
+ .filter_map(|usage| process_usage(&source_file, usage, arg_to_remove, is_self_present));
+
+ let mut ranges_to_delete: Vec<TextRange> = vec![];
+ for range in possible_ranges {
+ if !ranges_to_delete.iter().any(|it| it.contains_range(range)) {
+ ranges_to_delete.push(range)
+ }
+ }
+
+ for range in ranges_to_delete {
+ builder.delete(range)
+ }
+}
+
+fn process_usage(
+ source_file: &SourceFile,
+ FileReference { range, .. }: FileReference,
+ mut arg_to_remove: usize,
+ is_self_present: bool,
+) -> Option<TextRange> {
+ let call_expr_opt: Option<ast::CallExpr> = find_node_at_range(source_file.syntax(), range);
+ if let Some(call_expr) = call_expr_opt {
+ let call_expr_range = call_expr.expr()?.syntax().text_range();
+ if !call_expr_range.contains_range(range) {
+ return None;
+ }
+
+ let arg = call_expr.arg_list()?.args().nth(arg_to_remove)?;
+ return Some(range_to_remove(arg.syntax()));
+ }
+
+ let method_call_expr_opt: Option<ast::MethodCallExpr> =
+ find_node_at_range(source_file.syntax(), range);
+ if let Some(method_call_expr) = method_call_expr_opt {
+ let method_call_expr_range = method_call_expr.name_ref()?.syntax().text_range();
+ if !method_call_expr_range.contains_range(range) {
+ return None;
+ }
+
+ if is_self_present {
+ arg_to_remove -= 1;
+ }
+
+ let arg = method_call_expr.arg_list()?.args().nth(arg_to_remove)?;
+ return Some(range_to_remove(arg.syntax()));
+ }
+
+ None
+}
+
+pub(crate) fn range_to_remove(node: &SyntaxNode) -> TextRange {
+ let up_to_comma = next_prev().find_map(|dir| {
+ node.siblings_with_tokens(dir)
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T![,])
+ .map(|it| (dir, it))
+ });
+ if let Some((dir, token)) = up_to_comma {
+ if node.next_sibling().is_some() {
+ let up_to_space = token
+ .siblings_with_tokens(dir)
+ .skip(1)
+ .take_while(|it| it.kind() == WHITESPACE)
+ .last()
+ .and_then(|it| it.into_token());
+ return node
+ .text_range()
+ .cover(up_to_space.map_or(token.text_range(), |it| it.text_range()));
+ }
+ node.text_range().cover(token.text_range())
+ } else {
+ node.text_range()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn remove_unused() {
+ check_assist(
+ remove_unused_param,
+ r#"
+fn a() { foo(9, 2) }
+fn foo(x: i32, $0y: i32) { x; }
+fn b() { foo(9, 2,) }
+"#,
+ r#"
+fn a() { foo(9) }
+fn foo(x: i32) { x; }
+fn b() { foo(9, ) }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_first_param() {
+ check_assist(
+ remove_unused_param,
+ r#"
+fn foo($0x: i32, y: i32) { y; }
+fn a() { foo(1, 2) }
+fn b() { foo(1, 2,) }
+"#,
+ r#"
+fn foo(y: i32) { y; }
+fn a() { foo(2) }
+fn b() { foo(2,) }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_single_param() {
+ check_assist(
+ remove_unused_param,
+ r#"
+fn foo($0x: i32) { 0; }
+fn a() { foo(1) }
+fn b() { foo(1, ) }
+"#,
+ r#"
+fn foo() { 0; }
+fn a() { foo() }
+fn b() { foo( ) }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_surrounded_by_parms() {
+ check_assist(
+ remove_unused_param,
+ r#"
+fn foo(x: i32, $0y: i32, z: i32) { x; }
+fn a() { foo(1, 2, 3) }
+fn b() { foo(1, 2, 3,) }
+"#,
+ r#"
+fn foo(x: i32, z: i32) { x; }
+fn a() { foo(1, 3) }
+fn b() { foo(1, 3,) }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_qualified_call() {
+ check_assist(
+ remove_unused_param,
+ r#"
+mod bar { pub fn foo(x: i32, $0y: i32) { x; } }
+fn b() { bar::foo(9, 2) }
+"#,
+ r#"
+mod bar { pub fn foo(x: i32) { x; } }
+fn b() { bar::foo(9) }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_turbofished_func() {
+ check_assist(
+ remove_unused_param,
+ r#"
+pub fn foo<T>(x: T, $0y: i32) { x; }
+fn b() { foo::<i32>(9, 2) }
+"#,
+ r#"
+pub fn foo<T>(x: T) { x; }
+fn b() { foo::<i32>(9) }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_generic_unused_param_func() {
+ check_assist(
+ remove_unused_param,
+ r#"
+pub fn foo<T>(x: i32, $0y: T) { x; }
+fn b() { foo::<i32>(9, 2) }
+fn b2() { foo(9, 2) }
+"#,
+ r#"
+pub fn foo<T>(x: i32) { x; }
+fn b() { foo::<i32>(9) }
+fn b2() { foo(9) }
+"#,
+ );
+ }
+
+ #[test]
+ fn keep_used() {
+ cov_mark::check!(keep_used);
+ check_assist_not_applicable(
+ remove_unused_param,
+ r#"
+fn foo(x: i32, $0y: i32) { y; }
+fn main() { foo(9, 2) }
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_impl() {
+ cov_mark::check!(trait_impl);
+ check_assist_not_applicable(
+ remove_unused_param,
+ r#"
+trait Trait {
+ fn foo(x: i32);
+}
+impl Trait for () {
+ fn foo($0x: i32) {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_across_files() {
+ check_assist(
+ remove_unused_param,
+ r#"
+//- /main.rs
+fn foo(x: i32, $0y: i32) { x; }
+
+mod foo;
+
+//- /foo.rs
+use super::foo;
+
+fn bar() {
+ let _ = foo(1, 2);
+}
+"#,
+ r#"
+//- /main.rs
+fn foo(x: i32) { x; }
+
+mod foo;
+
+//- /foo.rs
+use super::foo;
+
+fn bar() {
+ let _ = foo(1);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_remove_method_param() {
+ check_assist(
+ remove_unused_param,
+ r#"
+struct S;
+impl S { fn f(&self, $0_unused: i32) {} }
+fn main() {
+ S.f(92);
+ S.f();
+ S.f(93, 92);
+ S::f(&S, 92);
+}
+"#,
+ r#"
+struct S;
+impl S { fn f(&self) {} }
+fn main() {
+ S.f();
+ S.f();
+ S.f(92);
+ S::f(&S);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn nested_call() {
+ check_assist(
+ remove_unused_param,
+ r#"
+fn foo(x: i32, $0y: i32) -> i32 {
+ x
+}
+
+fn bar() {
+ foo(1, foo(2, 3));
+}
+"#,
+ r#"
+fn foo(x: i32) -> i32 {
+ x
+}
+
+fn bar() {
+ foo(1);
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs
new file mode 100644
index 000000000..a899c7a64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs
@@ -0,0 +1,212 @@
+use either::Either;
+use ide_db::FxHashMap;
+use itertools::Itertools;
+use syntax::{ast, ted, AstNode};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: reorder_fields
+//
+// Reorder the fields of record literals and record patterns in the same order as in
+// the definition.
+//
+// ```
+// struct Foo {foo: i32, bar: i32};
+// const test: Foo = $0Foo {bar: 0, foo: 1}
+// ```
+// ->
+// ```
+// struct Foo {foo: i32, bar: i32};
+// const test: Foo = Foo {foo: 1, bar: 0}
+// ```
+pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let record = ctx
+ .find_node_at_offset::<ast::RecordExpr>()
+ .map(Either::Left)
+ .or_else(|| ctx.find_node_at_offset::<ast::RecordPat>().map(Either::Right))?;
+
+ let path = record.as_ref().either(|it| it.path(), |it| it.path())?;
+ let ranks = compute_fields_ranks(&path, ctx)?;
+ let get_rank_of_field =
+ |of: Option<_>| *ranks.get(&of.unwrap_or_default()).unwrap_or(&usize::MAX);
+
+ let field_list = match &record {
+ Either::Left(it) => Either::Left(it.record_expr_field_list()?),
+ Either::Right(it) => Either::Right(it.record_pat_field_list()?),
+ };
+ let fields = match field_list {
+ Either::Left(it) => Either::Left((
+ it.fields()
+ .sorted_unstable_by_key(|field| {
+ get_rank_of_field(field.field_name().map(|it| it.to_string()))
+ })
+ .collect::<Vec<_>>(),
+ it,
+ )),
+ Either::Right(it) => Either::Right((
+ it.fields()
+ .sorted_unstable_by_key(|field| {
+ get_rank_of_field(field.field_name().map(|it| it.to_string()))
+ })
+ .collect::<Vec<_>>(),
+ it,
+ )),
+ };
+
+ let is_sorted = fields.as_ref().either(
+ |(sorted, field_list)| field_list.fields().zip(sorted).all(|(a, b)| a == *b),
+ |(sorted, field_list)| field_list.fields().zip(sorted).all(|(a, b)| a == *b),
+ );
+ if is_sorted {
+ cov_mark::hit!(reorder_sorted_fields);
+ return None;
+ }
+ let target = record.as_ref().either(AstNode::syntax, AstNode::syntax).text_range();
+ acc.add(
+ AssistId("reorder_fields", AssistKind::RefactorRewrite),
+ "Reorder record fields",
+ target,
+ |builder| match fields {
+ Either::Left((sorted, field_list)) => {
+ replace(builder.make_mut(field_list).fields(), sorted)
+ }
+ Either::Right((sorted, field_list)) => {
+ replace(builder.make_mut(field_list).fields(), sorted)
+ }
+ },
+ )
+}
+
+fn replace<T: AstNode + PartialEq>(
+ fields: impl Iterator<Item = T>,
+ sorted_fields: impl IntoIterator<Item = T>,
+) {
+ fields.zip(sorted_fields).for_each(|(field, sorted_field)| {
+ ted::replace(field.syntax(), sorted_field.syntax().clone_for_update())
+ });
+}
+
+fn compute_fields_ranks(
+ path: &ast::Path,
+ ctx: &AssistContext<'_>,
+) -> Option<FxHashMap<String, usize>> {
+ let strukt = match ctx.sema.resolve_path(path) {
+ Some(hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Struct(it)))) => it,
+ _ => return None,
+ };
+
+ let res = strukt
+ .fields(ctx.db())
+ .into_iter()
+ .enumerate()
+ .map(|(idx, field)| (field.name(ctx.db()).to_string(), idx))
+ .collect();
+
+ Some(res)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn reorder_sorted_fields() {
+ cov_mark::check!(reorder_sorted_fields);
+ check_assist_not_applicable(
+ reorder_fields,
+ r#"
+struct Foo { foo: i32, bar: i32 }
+const test: Foo = $0Foo { foo: 0, bar: 0 };
+"#,
+ )
+ }
+
+ #[test]
+ fn trivial_empty_fields() {
+ check_assist_not_applicable(
+ reorder_fields,
+ r#"
+struct Foo {}
+const test: Foo = $0Foo {};
+"#,
+ )
+ }
+
+ #[test]
+ fn reorder_struct_fields() {
+ check_assist(
+ reorder_fields,
+ r#"
+struct Foo { foo: i32, bar: i32 }
+const test: Foo = $0Foo { bar: 0, foo: 1 };
+"#,
+ r#"
+struct Foo { foo: i32, bar: i32 }
+const test: Foo = Foo { foo: 1, bar: 0 };
+"#,
+ )
+ }
+ #[test]
+ fn reorder_struct_pattern() {
+ check_assist(
+ reorder_fields,
+ r#"
+struct Foo { foo: i64, bar: i64, baz: i64 }
+
+fn f(f: Foo) -> {
+ match f {
+ $0Foo { baz: 0, ref mut bar, .. } => (),
+ _ => ()
+ }
+}
+"#,
+ r#"
+struct Foo { foo: i64, bar: i64, baz: i64 }
+
+fn f(f: Foo) -> {
+ match f {
+ Foo { ref mut bar, baz: 0, .. } => (),
+ _ => ()
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn reorder_with_extra_field() {
+ check_assist(
+ reorder_fields,
+ r#"
+struct Foo { foo: String, bar: String }
+
+impl Foo {
+ fn new() -> Foo {
+ let foo = String::new();
+ $0Foo {
+ bar: foo.clone(),
+ extra: "Extra field",
+ foo,
+ }
+ }
+}
+"#,
+ r#"
+struct Foo { foo: String, bar: String }
+
+impl Foo {
+ fn new() -> Foo {
+ let foo = String::new();
+ Foo {
+ foo,
+ bar: foo.clone(),
+ extra: "Extra field",
+ }
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs
new file mode 100644
index 000000000..208c3e109
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs
@@ -0,0 +1,284 @@
+use hir::{PathResolution, Semantics};
+use ide_db::{FxHashMap, RootDatabase};
+use itertools::Itertools;
+use syntax::{
+ ast::{self, HasName},
+ ted, AstNode,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: reorder_impl_items
+//
+// Reorder the items of an `impl Trait`. The items will be ordered
+// in the same order as in the trait definition.
+//
+// ```
+// trait Foo {
+// type A;
+// const B: u8;
+// fn c();
+// }
+//
+// struct Bar;
+// $0impl Foo for Bar {
+// const B: u8 = 17;
+// fn c() {}
+// type A = String;
+// }
+// ```
+// ->
+// ```
+// trait Foo {
+// type A;
+// const B: u8;
+// fn c();
+// }
+//
+// struct Bar;
+// impl Foo for Bar {
+// type A = String;
+// const B: u8 = 17;
+// fn c() {}
+// }
+// ```
+pub(crate) fn reorder_impl_items(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let impl_ast = ctx.find_node_at_offset::<ast::Impl>()?;
+ let items = impl_ast.assoc_item_list()?;
+ let assoc_items = items.assoc_items().collect::<Vec<_>>();
+
+ let path = impl_ast
+ .trait_()
+ .and_then(|t| match t {
+ ast::Type::PathType(path) => Some(path),
+ _ => None,
+ })?
+ .path()?;
+
+ let ranks = compute_item_ranks(&path, ctx)?;
+ let sorted: Vec<_> = assoc_items
+ .iter()
+ .cloned()
+ .sorted_by_key(|i| {
+ let name = match i {
+ ast::AssocItem::Const(c) => c.name(),
+ ast::AssocItem::Fn(f) => f.name(),
+ ast::AssocItem::TypeAlias(t) => t.name(),
+ ast::AssocItem::MacroCall(_) => None,
+ };
+
+ name.and_then(|n| ranks.get(&n.to_string()).copied()).unwrap_or(usize::max_value())
+ })
+ .collect();
+
+ // Don't edit already sorted methods:
+ if assoc_items == sorted {
+ cov_mark::hit!(not_applicable_if_sorted);
+ return None;
+ }
+
+ let target = items.syntax().text_range();
+ acc.add(
+ AssistId("reorder_impl_items", AssistKind::RefactorRewrite),
+ "Sort items by trait definition",
+ target,
+ |builder| {
+ let assoc_items =
+ assoc_items.into_iter().map(|item| builder.make_mut(item)).collect::<Vec<_>>();
+ assoc_items
+ .into_iter()
+ .zip(sorted)
+ .for_each(|(old, new)| ted::replace(old.syntax(), new.clone_for_update().syntax()));
+ },
+ )
+}
+
+fn compute_item_ranks(
+ path: &ast::Path,
+ ctx: &AssistContext<'_>,
+) -> Option<FxHashMap<String, usize>> {
+ let td = trait_definition(path, &ctx.sema)?;
+
+ Some(
+ td.items(ctx.db())
+ .iter()
+ .flat_map(|i| i.name(ctx.db()))
+ .enumerate()
+ .map(|(idx, name)| (name.to_string(), idx))
+ .collect(),
+ )
+}
+
+fn trait_definition(path: &ast::Path, sema: &Semantics<'_, RootDatabase>) -> Option<hir::Trait> {
+ match sema.resolve_path(path)? {
+ PathResolution::Def(hir::ModuleDef::Trait(trait_)) => Some(trait_),
+ _ => None,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn not_applicable_if_sorted() {
+ cov_mark::check!(not_applicable_if_sorted);
+ check_assist_not_applicable(
+ reorder_impl_items,
+ r#"
+trait Bar {
+ type T;
+ const C: ();
+ fn a() {}
+ fn z() {}
+ fn b() {}
+}
+struct Foo;
+$0impl Bar for Foo {
+ type T = ();
+ const C: () = ();
+ fn a() {}
+ fn z() {}
+ fn b() {}
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn reorder_impl_trait_functions() {
+ check_assist(
+ reorder_impl_items,
+ r#"
+trait Bar {
+ fn a() {}
+ fn c() {}
+ fn b() {}
+ fn d() {}
+}
+
+struct Foo;
+$0impl Bar for Foo {
+ fn d() {}
+ fn b() {}
+ fn c() {}
+ fn a() {}
+}
+"#,
+ r#"
+trait Bar {
+ fn a() {}
+ fn c() {}
+ fn b() {}
+ fn d() {}
+}
+
+struct Foo;
+impl Bar for Foo {
+ fn a() {}
+ fn c() {}
+ fn b() {}
+ fn d() {}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_empty() {
+ check_assist_not_applicable(
+ reorder_impl_items,
+ r#"
+trait Bar {};
+struct Foo;
+$0impl Bar for Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn reorder_impl_trait_items() {
+ check_assist(
+ reorder_impl_items,
+ r#"
+trait Bar {
+ fn a() {}
+ type T0;
+ fn c() {}
+ const C1: ();
+ fn b() {}
+ type T1;
+ fn d() {}
+ const C0: ();
+}
+
+struct Foo;
+$0impl Bar for Foo {
+ type T1 = ();
+ fn d() {}
+ fn b() {}
+ fn c() {}
+ const C1: () = ();
+ fn a() {}
+ type T0 = ();
+ const C0: () = ();
+}
+ "#,
+ r#"
+trait Bar {
+ fn a() {}
+ type T0;
+ fn c() {}
+ const C1: ();
+ fn b() {}
+ type T1;
+ fn d() {}
+ const C0: ();
+}
+
+struct Foo;
+impl Bar for Foo {
+ fn a() {}
+ type T0 = ();
+ fn c() {}
+ const C1: () = ();
+ fn b() {}
+ type T1 = ();
+ fn d() {}
+ const C0: () = ();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn reorder_impl_trait_items_uneven_ident_lengths() {
+ check_assist(
+ reorder_impl_items,
+ r#"
+trait Bar {
+ type Foo;
+ type Fooo;
+}
+
+struct Foo;
+impl Bar for Foo {
+ type Fooo = ();
+ type Foo = ();$0
+}"#,
+ r#"
+trait Bar {
+ type Foo;
+ type Fooo;
+}
+
+struct Foo;
+impl Bar for Foo {
+ type Foo = ();
+ type Fooo = ();
+}"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
new file mode 100644
index 000000000..bd50208da
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -0,0 +1,1250 @@
+use hir::{InFile, ModuleDef};
+use ide_db::{
+ helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator,
+ syntax_helpers::insert_whitespace_into_node::insert_ws_into,
+};
+use itertools::Itertools;
+use syntax::{
+ ast::{self, AstNode, HasName},
+ SyntaxKind::WHITESPACE,
+};
+
+use crate::{
+ assist_context::{AssistBuilder, AssistContext, Assists},
+ utils::{
+ add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body,
+ generate_trait_impl_text, render_snippet, Cursor, DefaultMethods,
+ },
+ AssistId, AssistKind,
+};
+
+// Assist: replace_derive_with_manual_impl
+//
+// Converts a `derive` impl into a manual one.
+//
+// ```
+// # //- minicore: derive
+// # trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
+// #[derive(Deb$0ug, Display)]
+// struct S;
+// ```
+// ->
+// ```
+// # trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
+// #[derive(Display)]
+// struct S;
+//
+// impl Debug for S {
+// $0fn fmt(&self, f: &mut Formatter) -> Result<()> {
+// f.debug_struct("S").finish()
+// }
+// }
+// ```
+pub(crate) fn replace_derive_with_manual_impl(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let attr = ctx.find_node_at_offset_with_descend::<ast::Attr>()?;
+ let path = attr.path()?;
+ let hir_file = ctx.sema.hir_file_for(attr.syntax());
+ if !hir_file.is_derive_attr_pseudo_expansion(ctx.db()) {
+ return None;
+ }
+
+ let InFile { file_id, value } = hir_file.call_node(ctx.db())?;
+ if file_id.is_macro() {
+ // FIXME: make this work in macro files
+ return None;
+ }
+ // collect the derive paths from the #[derive] expansion
+ let current_derives = ctx
+ .sema
+ .parse_or_expand(hir_file)?
+ .descendants()
+ .filter_map(ast::Attr::cast)
+ .filter_map(|attr| attr.path())
+ .collect::<Vec<_>>();
+
+ let adt = value.parent().and_then(ast::Adt::cast)?;
+ let attr = ast::Attr::cast(value)?;
+ let args = attr.token_tree()?;
+
+ let current_module = ctx.sema.scope(adt.syntax())?.module();
+ let current_crate = current_module.krate();
+
+ let found_traits = items_locator::items_with_name(
+ &ctx.sema,
+ current_crate,
+ NameToImport::exact_case_sensitive(path.segments().last()?.to_string()),
+ items_locator::AssocItemSearch::Exclude,
+ Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ )
+ .filter_map(|item| match item.as_module_def()? {
+ ModuleDef::Trait(trait_) => Some(trait_),
+ _ => None,
+ })
+ .flat_map(|trait_| {
+ current_module
+ .find_use_path(ctx.sema.db, hir::ModuleDef::Trait(trait_))
+ .as_ref()
+ .map(mod_path_to_ast)
+ .zip(Some(trait_))
+ });
+
+ let mut no_traits_found = true;
+ for (replace_trait_path, trait_) in found_traits.inspect(|_| no_traits_found = false) {
+ add_assist(
+ acc,
+ ctx,
+ &attr,
+ &current_derives,
+ &args,
+ &path,
+ &replace_trait_path,
+ Some(trait_),
+ &adt,
+ )?;
+ }
+ if no_traits_found {
+ add_assist(acc, ctx, &attr, &current_derives, &args, &path, &path, None, &adt)?;
+ }
+ Some(())
+}
+
+fn add_assist(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ attr: &ast::Attr,
+ old_derives: &[ast::Path],
+ old_tree: &ast::TokenTree,
+ old_trait_path: &ast::Path,
+ replace_trait_path: &ast::Path,
+ trait_: Option<hir::Trait>,
+ adt: &ast::Adt,
+) -> Option<()> {
+ let target = attr.syntax().text_range();
+ let annotated_name = adt.name()?;
+ let label = format!("Convert to manual `impl {} for {}`", replace_trait_path, annotated_name);
+
+ acc.add(
+ AssistId("replace_derive_with_manual_impl", AssistKind::Refactor),
+ label,
+ target,
+ |builder| {
+ let insert_pos = adt.syntax().text_range().end();
+ let impl_def_with_items =
+ impl_def_from_trait(&ctx.sema, adt, &annotated_name, trait_, replace_trait_path);
+ update_attribute(builder, old_derives, old_tree, old_trait_path, attr);
+ let trait_path = replace_trait_path.to_string();
+ match (ctx.config.snippet_cap, impl_def_with_items) {
+ (None, _) => {
+ builder.insert(insert_pos, generate_trait_impl_text(adt, &trait_path, ""))
+ }
+ (Some(cap), None) => builder.insert_snippet(
+ cap,
+ insert_pos,
+ generate_trait_impl_text(adt, &trait_path, " $0"),
+ ),
+ (Some(cap), Some((impl_def, first_assoc_item))) => {
+ let mut cursor = Cursor::Before(first_assoc_item.syntax());
+ let placeholder;
+ if let ast::AssocItem::Fn(ref func) = first_assoc_item {
+ if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
+ {
+ if m.syntax().text() == "todo!()" {
+ placeholder = m;
+ cursor = Cursor::Replace(placeholder.syntax());
+ }
+ }
+ }
+
+ builder.insert_snippet(
+ cap,
+ insert_pos,
+ format!("\n\n{}", render_snippet(cap, impl_def.syntax(), cursor)),
+ )
+ }
+ };
+ },
+ )
+}
+
+fn impl_def_from_trait(
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+ adt: &ast::Adt,
+ annotated_name: &ast::Name,
+ trait_: Option<hir::Trait>,
+ trait_path: &ast::Path,
+) -> Option<(ast::Impl, ast::AssocItem)> {
+ let trait_ = trait_?;
+ let target_scope = sema.scope(annotated_name.syntax())?;
+ let trait_items = filter_assoc_items(sema, &trait_.items(sema.db), DefaultMethods::No);
+ if trait_items.is_empty() {
+ return None;
+ }
+ let impl_def = {
+ use syntax::ast::Impl;
+ let text = generate_trait_impl_text(adt, trait_path.to_string().as_str(), "");
+ let parse = syntax::SourceFile::parse(&text);
+ let node = match parse.tree().syntax().descendants().find_map(Impl::cast) {
+ Some(it) => it,
+ None => {
+ panic!(
+ "Failed to make ast node `{}` from text {}",
+ std::any::type_name::<Impl>(),
+ text
+ )
+ }
+ };
+ let node = node.clone_subtree();
+ assert_eq!(node.syntax().text_range().start(), 0.into());
+ node
+ };
+
+ let trait_items = trait_items
+ .into_iter()
+ .map(|it| {
+ if sema.hir_file_for(it.syntax()).is_macro() {
+ if let Some(it) = ast::AssocItem::cast(insert_ws_into(it.syntax().clone())) {
+ return it;
+ }
+ }
+ it.clone_for_update()
+ })
+ .collect();
+ let (impl_def, first_assoc_item) =
+ add_trait_assoc_items_to_impl(sema, trait_items, trait_, impl_def, target_scope);
+
+ // Generate a default `impl` function body for the derived trait.
+ if let ast::AssocItem::Fn(ref func) = first_assoc_item {
+ let _ = gen_trait_fn_body(func, trait_path, adt);
+ };
+
+ Some((impl_def, first_assoc_item))
+}
+
+fn update_attribute(
+ builder: &mut AssistBuilder,
+ old_derives: &[ast::Path],
+ old_tree: &ast::TokenTree,
+ old_trait_path: &ast::Path,
+ attr: &ast::Attr,
+) {
+ let new_derives = old_derives
+ .iter()
+ .filter(|t| t.to_string() != old_trait_path.to_string())
+ .collect::<Vec<_>>();
+ let has_more_derives = !new_derives.is_empty();
+
+ if has_more_derives {
+ let new_derives = format!("({})", new_derives.iter().format(", "));
+ builder.replace(old_tree.syntax().text_range(), new_derives);
+ } else {
+ let attr_range = attr.syntax().text_range();
+ builder.delete(attr_range);
+
+ if let Some(line_break_range) = attr
+ .syntax()
+ .next_sibling_or_token()
+ .filter(|t| t.kind() == WHITESPACE)
+ .map(|t| t.text_range())
+ {
+ builder.delete(line_break_range);
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn add_custom_impl_debug_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+struct Foo {
+ bar: String,
+}
+"#,
+ r#"
+struct Foo {
+ bar: String,
+}
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ f.debug_struct("Foo").field("bar", &self.bar).finish()
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_debug_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+struct Foo(String, usize);
+"#,
+ r#"struct Foo(String, usize);
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ f.debug_tuple("Foo").field(&self.0).field(&self.1).finish()
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_debug_empty_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+struct Foo;
+"#,
+ r#"
+struct Foo;
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ f.debug_struct("Foo").finish()
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_debug_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+enum Foo {
+ Bar,
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self {
+ Self::Bar => write!(f, "Bar"),
+ Self::Baz => write!(f, "Baz"),
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_debug_tuple_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+enum Foo {
+ Bar(usize, usize),
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar(usize, usize),
+ Baz,
+}
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self {
+ Self::Bar(arg0, arg1) => f.debug_tuple("Bar").field(arg0).field(arg1).finish(),
+ Self::Baz => write!(f, "Baz"),
+ }
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_debug_record_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+enum Foo {
+ Bar {
+ baz: usize,
+ qux: usize,
+ },
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar {
+ baz: usize,
+ qux: usize,
+ },
+ Baz,
+}
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self {
+ Self::Bar { baz, qux } => f.debug_struct("Bar").field("baz", baz).field("qux", qux).finish(),
+ Self::Baz => write!(f, "Baz"),
+ }
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_default_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: default, derive
+#[derive(Defau$0lt)]
+struct Foo {
+ foo: usize,
+}
+"#,
+ r#"
+struct Foo {
+ foo: usize,
+}
+
+impl Default for Foo {
+ $0fn default() -> Self {
+ Self { foo: Default::default() }
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_default_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: default, derive
+#[derive(Defau$0lt)]
+struct Foo(usize);
+"#,
+ r#"
+struct Foo(usize);
+
+impl Default for Foo {
+ $0fn default() -> Self {
+ Self(Default::default())
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_default_empty_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: default, derive
+#[derive(Defau$0lt)]
+struct Foo;
+"#,
+ r#"
+struct Foo;
+
+impl Default for Foo {
+ $0fn default() -> Self {
+ Self { }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_hash_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: hash, derive
+#[derive(Has$0h)]
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+
+impl core::hash::Hash for Foo {
+ $0fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
+ self.bin.hash(state);
+ self.bar.hash(state);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_hash_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: hash, derive
+#[derive(Has$0h)]
+struct Foo(usize, usize);
+"#,
+ r#"
+struct Foo(usize, usize);
+
+impl core::hash::Hash for Foo {
+ $0fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
+ self.0.hash(state);
+ self.1.hash(state);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_hash_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: hash, derive
+#[derive(Has$0h)]
+enum Foo {
+ Bar,
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl core::hash::Hash for Foo {
+ $0fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
+ core::mem::discriminant(self).hash(state);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ Self { bin: self.bin.clone(), bar: self.bar.clone() }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+struct Foo(usize, usize);
+"#,
+ r#"
+struct Foo(usize, usize);
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ Self(self.0.clone(), self.1.clone())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_empty_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+struct Foo;
+"#,
+ r#"
+struct Foo;
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ Self { }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+enum Foo {
+ Bar,
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ match self {
+ Self::Bar => Self::Bar,
+ Self::Baz => Self::Baz,
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_tuple_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+enum Foo {
+ Bar(String),
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar(String),
+ Baz,
+}
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ match self {
+ Self::Bar(arg0) => Self::Bar(arg0.clone()),
+ Self::Baz => Self::Baz,
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_record_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+enum Foo {
+ Bar {
+ bin: String,
+ },
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar {
+ bin: String,
+ },
+ Baz,
+}
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ match self {
+ Self::Bar { bin } => Self::Bar { bin: bin.clone() },
+ Self::Baz => Self::Baz,
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_ord_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: ord, derive
+#[derive(Partial$0Ord)]
+struct Foo {
+ bin: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+}
+
+impl PartialOrd for Foo {
+ $0fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
+ self.bin.partial_cmp(&other.bin)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_ord_record_struct_multi_field() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: ord, derive
+#[derive(Partial$0Ord)]
+struct Foo {
+ bin: usize,
+ bar: usize,
+ baz: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+ bar: usize,
+ baz: usize,
+}
+
+impl PartialOrd for Foo {
+ $0fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
+ match self.bin.partial_cmp(&other.bin) {
+ Some(core::cmp::Ordering::Equal) => {}
+ ord => return ord,
+ }
+ match self.bar.partial_cmp(&other.bar) {
+ Some(core::cmp::Ordering::Equal) => {}
+ ord => return ord,
+ }
+ self.baz.partial_cmp(&other.baz)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_ord_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: ord, derive
+#[derive(Partial$0Ord)]
+struct Foo(usize, usize, usize);
+"#,
+ r#"
+struct Foo(usize, usize, usize);
+
+impl PartialOrd for Foo {
+ $0fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
+ match self.0.partial_cmp(&other.0) {
+ Some(core::cmp::Ordering::Equal) => {}
+ ord => return ord,
+ }
+ match self.1.partial_cmp(&other.1) {
+ Some(core::cmp::Ordering::Equal) => {}
+ ord => return ord,
+ }
+ self.2.partial_cmp(&other.2)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ self.bin == other.bin && self.bar == other.bar
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+struct Foo(usize, usize);
+"#,
+ r#"
+struct Foo(usize, usize);
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ self.0 == other.0 && self.1 == other.1
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_empty_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+struct Foo;
+"#,
+ r#"
+struct Foo;
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+enum Foo {
+ Bar,
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ core::mem::discriminant(self) == core::mem::discriminant(other)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_tuple_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+enum Foo {
+ Bar(String),
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar(String),
+ Baz,
+}
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (Self::Bar(l0), Self::Bar(r0)) => l0 == r0,
+ _ => core::mem::discriminant(self) == core::mem::discriminant(other),
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_record_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+enum Foo {
+ Bar {
+ bin: String,
+ },
+ Baz {
+ qux: String,
+ fez: String,
+ },
+ Qux {},
+ Bin,
+}
+"#,
+ r#"
+enum Foo {
+ Bar {
+ bin: String,
+ },
+ Baz {
+ qux: String,
+ fez: String,
+ },
+ Qux {},
+ Bin,
+}
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (Self::Bar { bin: l_bin }, Self::Bar { bin: r_bin }) => l_bin == r_bin,
+ (Self::Baz { qux: l_qux, fez: l_fez }, Self::Baz { qux: r_qux, fez: r_fez }) => l_qux == r_qux && l_fez == r_fez,
+ _ => core::mem::discriminant(self) == core::mem::discriminant(other),
+ }
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_all() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+mod foo {
+ pub trait Bar {
+ type Qux;
+ const Baz: usize = 42;
+ const Fez: usize;
+ fn foo();
+ fn bar() {}
+ }
+}
+
+#[derive($0Bar)]
+struct Foo {
+ bar: String,
+}
+"#,
+ r#"
+mod foo {
+ pub trait Bar {
+ type Qux;
+ const Baz: usize = 42;
+ const Fez: usize;
+ fn foo();
+ fn bar() {}
+ }
+}
+
+struct Foo {
+ bar: String,
+}
+
+impl foo::Bar for Foo {
+ $0type Qux;
+
+ const Baz: usize = 42;
+
+ const Fez: usize;
+
+ fn foo() {
+ todo!()
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_for_unique_input_unknown() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[derive(Debu$0g)]
+struct Foo {
+ bar: String,
+}
+ "#,
+ r#"
+struct Foo {
+ bar: String,
+}
+
+impl Debug for Foo {
+ $0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_for_with_visibility_modifier() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[derive(Debug$0)]
+pub struct Foo {
+ bar: String,
+}
+ "#,
+ r#"
+pub struct Foo {
+ bar: String,
+}
+
+impl Debug for Foo {
+ $0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_when_multiple_inputs() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[derive(Display, Debug$0, Serialize)]
+struct Foo {}
+ "#,
+ r#"
+#[derive(Display, Serialize)]
+struct Foo {}
+
+impl Debug for Foo {
+ $0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_default_generic_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: default, derive
+#[derive(Defau$0lt)]
+struct Foo<T, U> {
+ foo: T,
+ bar: U,
+}
+"#,
+ r#"
+struct Foo<T, U> {
+ foo: T,
+ bar: U,
+}
+
+impl<T, U> Default for Foo<T, U> {
+ $0fn default() -> Self {
+ Self { foo: Default::default(), bar: Default::default() }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_generic_tuple_struct_with_bounds() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+struct Foo<T: Clone>(T, usize);
+"#,
+ r#"
+struct Foo<T: Clone>(T, usize);
+
+impl<T: Clone> Clone for Foo<T> {
+ $0fn clone(&self) -> Self {
+ Self(self.0.clone(), self.1.clone())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_ignore_derive_macro_without_input() {
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[derive($0)]
+struct Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_ignore_if_cursor_on_param() {
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive, fmt
+#[derive$0(Debug)]
+struct Foo {}
+ "#,
+ );
+
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive, fmt
+#[derive(Debug)$0]
+struct Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_ignore_if_not_derive() {
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[allow(non_camel_$0case_types)]
+struct Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn works_at_start_of_file() {
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive, fmt
+$0#[derive(Debug)]
+struct S;
+ "#,
+ );
+ }
+
+ #[test]
+ fn add_custom_impl_keep_path() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(std::fmt::Debug, Clo$0ne)]
+pub struct Foo;
+"#,
+ r#"
+#[derive(std::fmt::Debug)]
+pub struct Foo;
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ Self { }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_replace_path() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(core::fmt::Deb$0ug, Clone)]
+pub struct Foo;
+"#,
+ r#"
+#[derive(Clone)]
+pub struct Foo;
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ f.debug_struct("Foo").finish()
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
new file mode 100644
index 000000000..484c27387
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
@@ -0,0 +1,999 @@
+use std::iter::{self, successors};
+
+use either::Either;
+use ide_db::{
+ defs::NameClass,
+ syntax_helpers::node_ext::{is_pattern_cond, single_let},
+ ty_filter::TryEnum,
+ RootDatabase,
+};
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make, HasName,
+ },
+ AstNode, TextRange,
+};
+
+use crate::{
+ utils::{does_nested_pattern, does_pat_match_variant, unwrap_trivial_block},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: replace_if_let_with_match
+//
+// Replaces a `if let` expression with a `match` expression.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// $0if let Action::Move { distance } = action {
+// foo(distance)
+// } else {
+// bar()
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move { distance } => foo(distance),
+// _ => bar(),
+// }
+// }
+// ```
+pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let if_expr: ast::IfExpr = ctx.find_node_at_offset()?;
+ let available_range = TextRange::new(
+ if_expr.syntax().text_range().start(),
+ if_expr.then_branch()?.syntax().text_range().start(),
+ );
+ let cursor_in_range = available_range.contains_range(ctx.selection_trimmed());
+ if !cursor_in_range {
+ return None;
+ }
+ let mut else_block = None;
+ let if_exprs = successors(Some(if_expr.clone()), |expr| match expr.else_branch()? {
+ ast::ElseBranch::IfExpr(expr) => Some(expr),
+ ast::ElseBranch::Block(block) => {
+ else_block = Some(block);
+ None
+ }
+ });
+ let scrutinee_to_be_expr = if_expr.condition()?;
+ let scrutinee_to_be_expr = match single_let(scrutinee_to_be_expr.clone()) {
+ Some(cond) => cond.expr()?,
+ None => scrutinee_to_be_expr,
+ };
+
+ let mut pat_seen = false;
+ let mut cond_bodies = Vec::new();
+ for if_expr in if_exprs {
+ let cond = if_expr.condition()?;
+ let cond = match single_let(cond.clone()) {
+ Some(let_) => {
+ let pat = let_.pat()?;
+ let expr = let_.expr()?;
+ // FIXME: If one `let` is wrapped in parentheses and the second is not,
+ // we'll exit here.
+ if scrutinee_to_be_expr.syntax().text() != expr.syntax().text() {
+ // Only if all condition expressions are equal we can merge them into a match
+ return None;
+ }
+ pat_seen = true;
+ Either::Left(pat)
+ }
+ // Multiple `let`, unsupported.
+ None if is_pattern_cond(cond.clone()) => return None,
+ None => Either::Right(cond),
+ };
+ let body = if_expr.then_branch()?;
+ cond_bodies.push((cond, body));
+ }
+
+ if !pat_seen {
+ // Don't offer turning an if (chain) without patterns into a match
+ return None;
+ }
+
+ acc.add(
+ AssistId("replace_if_let_with_match", AssistKind::RefactorRewrite),
+ "Replace if let with match",
+ available_range,
+ move |edit| {
+ let match_expr = {
+ let else_arm = make_else_arm(ctx, else_block, &cond_bodies);
+ let make_match_arm = |(pat, body): (_, ast::BlockExpr)| {
+ let body = body.reset_indent().indent(IndentLevel(1));
+ match pat {
+ Either::Left(pat) => {
+ make::match_arm(iter::once(pat), None, unwrap_trivial_block(body))
+ }
+ Either::Right(expr) => make::match_arm(
+ iter::once(make::wildcard_pat().into()),
+ Some(expr),
+ unwrap_trivial_block(body),
+ ),
+ }
+ };
+ let arms = cond_bodies.into_iter().map(make_match_arm).chain(iter::once(else_arm));
+ let match_expr = make::expr_match(scrutinee_to_be_expr, make::match_arm_list(arms));
+ match_expr.indent(IndentLevel::from_node(if_expr.syntax()))
+ };
+
+ let has_preceding_if_expr =
+ if_expr.syntax().parent().map_or(false, |it| ast::IfExpr::can_cast(it.kind()));
+ let expr = if has_preceding_if_expr {
+ // make sure we replace the `else if let ...` with a block so we don't end up with `else expr`
+ make::block_expr(None, Some(match_expr)).into()
+ } else {
+ match_expr
+ };
+ edit.replace_ast::<ast::Expr>(if_expr.into(), expr);
+ },
+ )
+}
+
+fn make_else_arm(
+ ctx: &AssistContext<'_>,
+ else_block: Option<ast::BlockExpr>,
+ conditionals: &[(Either<ast::Pat, ast::Expr>, ast::BlockExpr)],
+) -> ast::MatchArm {
+ if let Some(else_block) = else_block {
+ let pattern = if let [(Either::Left(pat), _)] = conditionals {
+ ctx.sema
+ .type_of_pat(pat)
+ .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted()))
+ .zip(Some(pat))
+ } else {
+ None
+ };
+ let pattern = match pattern {
+ Some((it, pat)) => {
+ if does_pat_match_variant(pat, &it.sad_pattern()) {
+ it.happy_pattern_wildcard()
+ } else if does_nested_pattern(pat) {
+ make::wildcard_pat().into()
+ } else {
+ it.sad_pattern()
+ }
+ }
+ None => make::wildcard_pat().into(),
+ };
+ make::match_arm(iter::once(pattern), None, unwrap_trivial_block(else_block))
+ } else {
+ make::match_arm(iter::once(make::wildcard_pat().into()), None, make::expr_unit())
+ }
+}
+
+// Assist: replace_match_with_if_let
+//
+// Replaces a binary `match` with a wildcard pattern and no guards with an `if let` expression.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// $0match action {
+// Action::Move { distance } => foo(distance),
+// _ => bar(),
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// if let Action::Move { distance } = action {
+// foo(distance)
+// } else {
+// bar()
+// }
+// }
+// ```
+pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let match_expr: ast::MatchExpr = ctx.find_node_at_offset()?;
+
+ let mut arms = match_expr.match_arm_list()?.arms();
+ let (first_arm, second_arm) = (arms.next()?, arms.next()?);
+ if arms.next().is_some() || first_arm.guard().is_some() || second_arm.guard().is_some() {
+ return None;
+ }
+
+ let (if_let_pat, then_expr, else_expr) = pick_pattern_and_expr_order(
+ &ctx.sema,
+ first_arm.pat()?,
+ second_arm.pat()?,
+ first_arm.expr()?,
+ second_arm.expr()?,
+ )?;
+ let scrutinee = match_expr.expr()?;
+
+ let target = match_expr.syntax().text_range();
+ acc.add(
+ AssistId("replace_match_with_if_let", AssistKind::RefactorRewrite),
+ "Replace match with if let",
+ target,
+ move |edit| {
+ fn make_block_expr(expr: ast::Expr) -> ast::BlockExpr {
+ // Blocks with modifiers (unsafe, async, etc.) are parsed as BlockExpr, but are
+ // formatted without enclosing braces. If we encounter such block exprs,
+ // wrap them in another BlockExpr.
+ match expr {
+ ast::Expr::BlockExpr(block) if block.modifier().is_none() => block,
+ expr => make::block_expr(iter::empty(), Some(expr)),
+ }
+ }
+
+ let condition = make::expr_let(if_let_pat, scrutinee);
+ let then_block = make_block_expr(then_expr.reset_indent());
+ let else_expr = if is_empty_expr(&else_expr) { None } else { Some(else_expr) };
+ let if_let_expr = make::expr_if(
+ condition.into(),
+ then_block,
+ else_expr.map(make_block_expr).map(ast::ElseBranch::Block),
+ )
+ .indent(IndentLevel::from_node(match_expr.syntax()));
+
+ edit.replace_ast::<ast::Expr>(match_expr.into(), if_let_expr);
+ },
+ )
+}
+
+/// Pick the pattern for the if let condition and return the expressions for the `then` body and `else` body in that order.
+fn pick_pattern_and_expr_order(
+ sema: &hir::Semantics<'_, RootDatabase>,
+ pat: ast::Pat,
+ pat2: ast::Pat,
+ expr: ast::Expr,
+ expr2: ast::Expr,
+) -> Option<(ast::Pat, ast::Expr, ast::Expr)> {
+ let res = match (pat, pat2) {
+ (ast::Pat::WildcardPat(_), _) => return None,
+ (pat, ast::Pat::WildcardPat(_)) => (pat, expr, expr2),
+ (pat, _) if is_empty_expr(&expr2) => (pat, expr, expr2),
+ (_, pat) if is_empty_expr(&expr) => (pat, expr2, expr),
+ (pat, pat2) => match (binds_name(sema, &pat), binds_name(sema, &pat2)) {
+ (true, true) => return None,
+ (true, false) => (pat, expr, expr2),
+ (false, true) => (pat2, expr2, expr),
+ _ if is_sad_pat(sema, &pat) => (pat2, expr2, expr),
+ (false, false) => (pat, expr, expr2),
+ },
+ };
+ Some(res)
+}
+
+fn is_empty_expr(expr: &ast::Expr) -> bool {
+ match expr {
+ ast::Expr::BlockExpr(expr) => match expr.stmt_list() {
+ Some(it) => it.statements().next().is_none() && it.tail_expr().is_none(),
+ None => true,
+ },
+ ast::Expr::TupleExpr(expr) => expr.fields().next().is_none(),
+ _ => false,
+ }
+}
+
+fn binds_name(sema: &hir::Semantics<'_, RootDatabase>, pat: &ast::Pat) -> bool {
+ let binds_name_v = |pat| binds_name(sema, &pat);
+ match pat {
+ ast::Pat::IdentPat(pat) => !matches!(
+ pat.name().and_then(|name| NameClass::classify(sema, &name)),
+ Some(NameClass::ConstReference(_))
+ ),
+ ast::Pat::MacroPat(_) => true,
+ ast::Pat::OrPat(pat) => pat.pats().any(binds_name_v),
+ ast::Pat::SlicePat(pat) => pat.pats().any(binds_name_v),
+ ast::Pat::TuplePat(it) => it.fields().any(binds_name_v),
+ ast::Pat::TupleStructPat(it) => it.fields().any(binds_name_v),
+ ast::Pat::RecordPat(it) => it
+ .record_pat_field_list()
+ .map_or(false, |rpfl| rpfl.fields().flat_map(|rpf| rpf.pat()).any(binds_name_v)),
+ ast::Pat::RefPat(pat) => pat.pat().map_or(false, binds_name_v),
+ ast::Pat::BoxPat(pat) => pat.pat().map_or(false, binds_name_v),
+ ast::Pat::ParenPat(pat) => pat.pat().map_or(false, binds_name_v),
+ _ => false,
+ }
+}
+
+fn is_sad_pat(sema: &hir::Semantics<'_, RootDatabase>, pat: &ast::Pat) -> bool {
+ sema.type_of_pat(pat)
+ .and_then(|ty| TryEnum::from_ty(sema, &ty.adjusted()))
+ .map_or(false, |it| does_pat_match_variant(pat, &it.sad_pattern()))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn test_if_let_with_match_unapplicable_for_simple_ifs() {
+ check_assist_not_applicable(
+ replace_if_let_with_match,
+ r#"
+fn main() {
+ if $0true {} else if false {} else {}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_if_let_with_match_no_else() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+impl VariantData {
+ pub fn foo(&self) {
+ if $0let VariantData::Struct(..) = *self {
+ self.foo();
+ }
+ }
+}
+"#,
+ r#"
+impl VariantData {
+ pub fn foo(&self) {
+ match *self {
+ VariantData::Struct(..) => {
+ self.foo();
+ }
+ _ => (),
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_if_let_with_match_available_range_left() {
+ check_assist_not_applicable(
+ replace_if_let_with_match,
+ r#"
+impl VariantData {
+ pub fn foo(&self) {
+ $0 if let VariantData::Struct(..) = *self {
+ self.foo();
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_if_let_with_match_available_range_right() {
+ check_assist_not_applicable(
+ replace_if_let_with_match,
+ r#"
+impl VariantData {
+ pub fn foo(&self) {
+ if let VariantData::Struct(..) = *self {$0
+ self.foo();
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_if_let_with_match_let_chain() {
+ check_assist_not_applicable(
+ replace_if_let_with_match,
+ r#"
+fn main() {
+ if $0let true = true && let Some(1) = None {}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_if_let_with_match_basic() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ if $0let VariantData::Struct(..) = *self {
+ true
+ } else if let VariantData::Tuple(..) = *self {
+ false
+ } else if cond() {
+ true
+ } else {
+ bar(
+ 123
+ )
+ }
+ }
+}
+"#,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ match *self {
+ VariantData::Struct(..) => true,
+ VariantData::Tuple(..) => false,
+ _ if cond() => true,
+ _ => {
+ bar(
+ 123
+ )
+ }
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_if_let_with_match_on_tail_if_let() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ if let VariantData::Struct(..) = *self {
+ true
+ } else if let$0 VariantData::Tuple(..) = *self {
+ false
+ } else {
+ false
+ }
+ }
+}
+"#,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ if let VariantData::Struct(..) = *self {
+ true
+ } else {
+ match *self {
+ VariantData::Tuple(..) => false,
+ _ => false,
+ }
+}
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn special_case_option() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+//- minicore: option
+fn foo(x: Option<i32>) {
+ $0if let Some(x) = x {
+ println!("{}", x)
+ } else {
+ println!("none")
+ }
+}
+"#,
+ r#"
+fn foo(x: Option<i32>) {
+ match x {
+ Some(x) => println!("{}", x),
+ None => println!("none"),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn special_case_inverted_option() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+//- minicore: option
+fn foo(x: Option<i32>) {
+ $0if let None = x {
+ println!("none")
+ } else {
+ println!("some")
+ }
+}
+"#,
+ r#"
+fn foo(x: Option<i32>) {
+ match x {
+ None => println!("none"),
+ Some(_) => println!("some"),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn special_case_result() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+//- minicore: result
+fn foo(x: Result<i32, ()>) {
+ $0if let Ok(x) = x {
+ println!("{}", x)
+ } else {
+ println!("none")
+ }
+}
+"#,
+ r#"
+fn foo(x: Result<i32, ()>) {
+ match x {
+ Ok(x) => println!("{}", x),
+ Err(_) => println!("none"),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn special_case_inverted_result() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+//- minicore: result
+fn foo(x: Result<i32, ()>) {
+ $0if let Err(x) = x {
+ println!("{}", x)
+ } else {
+ println!("ok")
+ }
+}
+"#,
+ r#"
+fn foo(x: Result<i32, ()>) {
+ match x {
+ Err(x) => println!("{}", x),
+ Ok(_) => println!("ok"),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nested_indent() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+fn main() {
+ if true {
+ $0if let Ok(rel_path) = path.strip_prefix(root_path) {
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+ } else {
+ None
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ if true {
+ match path.strip_prefix(root_path) {
+ Ok(rel_path) => {
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+ }
+ _ => None,
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn nested_type() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+//- minicore: result
+fn foo(x: Result<i32, ()>) {
+ let bar: Result<_, ()> = Ok(Some(1));
+ $0if let Ok(Some(_)) = bar {
+ ()
+ } else {
+ ()
+ }
+}
+"#,
+ r#"
+fn foo(x: Result<i32, ()>) {
+ let bar: Result<_, ()> = Ok(Some(1));
+ match bar {
+ Ok(Some(_)) => (),
+ _ => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_replace_match_with_if_let_unwraps_simple_expressions() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ $0match *self {
+ VariantData::Struct(..) => true,
+ _ => false,
+ }
+ }
+} "#,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ if let VariantData::Struct(..) = *self {
+ true
+ } else {
+ false
+ }
+ }
+} "#,
+ )
+ }
+
+ #[test]
+ fn test_replace_match_with_if_let_doesnt_unwrap_multiline_expressions() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn foo() {
+ $0match a {
+ VariantData::Struct(..) => {
+ bar(
+ 123
+ )
+ }
+ _ => false,
+ }
+} "#,
+ r#"
+fn foo() {
+ if let VariantData::Struct(..) = a {
+ bar(
+ 123
+ )
+ } else {
+ false
+ }
+} "#,
+ )
+ }
+
+ #[test]
+ fn replace_match_with_if_let_target() {
+ check_assist_target(
+ replace_match_with_if_let,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ $0match *self {
+ VariantData::Struct(..) => true,
+ _ => false,
+ }
+ }
+} "#,
+ r#"match *self {
+ VariantData::Struct(..) => true,
+ _ => false,
+ }"#,
+ );
+ }
+
+ #[test]
+ fn special_case_option_match_to_if_let() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+//- minicore: option
+fn foo(x: Option<i32>) {
+ $0match x {
+ Some(x) => println!("{}", x),
+ None => println!("none"),
+ }
+}
+"#,
+ r#"
+fn foo(x: Option<i32>) {
+ if let Some(x) = x {
+ println!("{}", x)
+ } else {
+ println!("none")
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn special_case_result_match_to_if_let() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+//- minicore: result
+fn foo(x: Result<i32, ()>) {
+ $0match x {
+ Ok(x) => println!("{}", x),
+ Err(_) => println!("none"),
+ }
+}
+"#,
+ r#"
+fn foo(x: Result<i32, ()>) {
+ if let Ok(x) = x {
+ println!("{}", x)
+ } else {
+ println!("none")
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nested_indent_match_to_if_let() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn main() {
+ if true {
+ $0match path.strip_prefix(root_path) {
+ Ok(rel_path) => {
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+ }
+ _ => None,
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ if true {
+ if let Ok(rel_path) = path.strip_prefix(root_path) {
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+ } else {
+ None
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_match_with_if_let_empty_wildcard_expr() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn main() {
+ $0match path.strip_prefix(root_path) {
+ Ok(rel_path) => println!("{}", rel_path),
+ _ => (),
+ }
+}
+"#,
+ r#"
+fn main() {
+ if let Ok(rel_path) = path.strip_prefix(root_path) {
+ println!("{}", rel_path)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_match_with_if_let_number_body() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn main() {
+ $0match Ok(()) {
+ Ok(()) => {},
+ Err(_) => 0,
+ }
+}
+"#,
+ r#"
+fn main() {
+ if let Err(_) = Ok(()) {
+ 0
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_match_with_if_let_exhaustive() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn print_source(def_source: ModuleSource) {
+ match def_so$0urce {
+ ModuleSource::SourceFile(..) => { println!("source file"); }
+ ModuleSource::Module(..) => { println!("module"); }
+ }
+}
+"#,
+ r#"
+fn print_source(def_source: ModuleSource) {
+ if let ModuleSource::SourceFile(..) = def_source { println!("source file"); } else { println!("module"); }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_match_with_if_let_prefer_name_bind() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn foo() {
+ match $0Foo(0) {
+ Foo(_) => (),
+ Bar(bar) => println!("bar {}", bar),
+ }
+}
+"#,
+ r#"
+fn foo() {
+ if let Bar(bar) = Foo(0) {
+ println!("bar {}", bar)
+ }
+}
+"#,
+ );
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn foo() {
+ match $0Foo(0) {
+ Bar(bar) => println!("bar {}", bar),
+ Foo(_) => (),
+ }
+}
+"#,
+ r#"
+fn foo() {
+ if let Bar(bar) = Foo(0) {
+ println!("bar {}", bar)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_match_with_if_let_prefer_nonempty_body() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn foo() {
+ match $0Ok(0) {
+ Ok(value) => {},
+ Err(err) => eprintln!("{}", err),
+ }
+}
+"#,
+ r#"
+fn foo() {
+ if let Err(err) = Ok(0) {
+ eprintln!("{}", err)
+ }
+}
+"#,
+ );
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn foo() {
+ match $0Ok(0) {
+ Err(err) => eprintln!("{}", err),
+ Ok(value) => {},
+ }
+}
+"#,
+ r#"
+fn foo() {
+ if let Err(err) = Ok(0) {
+ eprintln!("{}", err)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_match_with_if_let_rejects_double_name_bindings() {
+ check_assist_not_applicable(
+ replace_match_with_if_let,
+ r#"
+fn foo() {
+ match $0Foo(0) {
+ Foo(foo) => println!("bar {}", foo),
+ Bar(bar) => println!("bar {}", bar),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_replace_match_with_if_let_keeps_unsafe_block() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ $0match *self {
+ VariantData::Struct(..) => true,
+ _ => unsafe { unreachable_unchecked() },
+ }
+ }
+} "#,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ if let VariantData::Struct(..) = *self {
+ true
+ } else {
+ unsafe { unreachable_unchecked() }
+ }
+ }
+} "#,
+ )
+ }
+
+ #[test]
+ fn test_replace_match_with_if_let_forces_else() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn main() {
+ match$0 0 {
+ 0 => (),
+ _ => code(),
+ }
+}
+"#,
+ r#"
+fn main() {
+ if let 0 = 0 {
+ ()
+ } else {
+ code()
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
new file mode 100644
index 000000000..c2be4593b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
@@ -0,0 +1,100 @@
+use std::iter::once;
+
+use ide_db::ty_filter::TryEnum;
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make,
+ },
+ AstNode, T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: replace_let_with_if_let
+//
+// Replaces `let` with an `if let`.
+//
+// ```
+// # enum Option<T> { Some(T), None }
+//
+// fn main(action: Action) {
+// $0let x = compute();
+// }
+//
+// fn compute() -> Option<i32> { None }
+// ```
+// ->
+// ```
+// # enum Option<T> { Some(T), None }
+//
+// fn main(action: Action) {
+// if let Some(x) = compute() {
+// }
+// }
+//
+// fn compute() -> Option<i32> { None }
+// ```
+pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let let_kw = ctx.find_token_syntax_at_offset(T![let])?;
+ let let_stmt = let_kw.parent().and_then(ast::LetStmt::cast)?;
+ let init = let_stmt.initializer()?;
+ let original_pat = let_stmt.pat()?;
+
+ let target = let_kw.text_range();
+ acc.add(
+ AssistId("replace_let_with_if_let", AssistKind::RefactorRewrite),
+ "Replace let with if let",
+ target,
+ |edit| {
+ let ty = ctx.sema.type_of_expr(&init);
+ let happy_variant = ty
+ .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted()))
+ .map(|it| it.happy_case());
+ let pat = match happy_variant {
+ None => original_pat,
+ Some(var_name) => {
+ make::tuple_struct_pat(make::ext::ident_path(var_name), once(original_pat))
+ .into()
+ }
+ };
+
+ let block =
+ make::ext::empty_block_expr().indent(IndentLevel::from_node(let_stmt.syntax()));
+ let if_ = make::expr_if(make::expr_let(pat, init).into(), block, None);
+ let stmt = make::expr_stmt(if_);
+
+ edit.replace_ast(ast::Stmt::from(let_stmt), ast::Stmt::from(stmt));
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_assist;
+
+ use super::*;
+
+ #[test]
+ fn replace_let_unknown_enum() {
+ check_assist(
+ replace_let_with_if_let,
+ r"
+enum E<T> { X(T), Y(T) }
+
+fn main() {
+ $0let x = E::X(92);
+}
+ ",
+ r"
+enum E<T> { X(T), Y(T) }
+
+fn main() {
+ if let x = E::X(92) {
+ }
+}
+ ",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
new file mode 100644
index 000000000..2419fa11c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
@@ -0,0 +1,438 @@
+use hir::AsAssocItem;
+use ide_db::{
+ helpers::mod_path_to_ast,
+ imports::insert_use::{insert_use, ImportScope},
+};
+use syntax::{
+ ast::{self, make},
+ match_ast, ted, AstNode, SyntaxNode,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: replace_qualified_name_with_use
+//
+// Adds a use statement for a given fully-qualified name.
+//
+// ```
+// # mod std { pub mod collections { pub struct HashMap<T, U>(T, U); } }
+// fn process(map: std::collections::$0HashMap<String, String>) {}
+// ```
+// ->
+// ```
+// use std::collections::HashMap;
+//
+// # mod std { pub mod collections { pub struct HashMap<T, U>(T, U); } }
+// fn process(map: HashMap<String, String>) {}
+// ```
+pub(crate) fn replace_qualified_name_with_use(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let path: ast::Path = ctx.find_node_at_offset()?;
+ // We don't want to mess with use statements
+ if path.syntax().ancestors().find_map(ast::UseTree::cast).is_some() {
+ cov_mark::hit!(not_applicable_in_use);
+ return None;
+ }
+
+ if path.qualifier().is_none() {
+ cov_mark::hit!(dont_import_trivial_paths);
+ return None;
+ }
+
+ // only offer replacement for non assoc items
+ match ctx.sema.resolve_path(&path)? {
+ hir::PathResolution::Def(def) if def.as_assoc_item(ctx.sema.db).is_none() => (),
+ _ => return None,
+ }
+ // then search for an import for the first path segment of what we want to replace
+ // that way it is less likely that we import the item from a different location due re-exports
+ let module = match ctx.sema.resolve_path(&path.first_qualifier_or_self())? {
+ hir::PathResolution::Def(module @ hir::ModuleDef::Module(_)) => module,
+ _ => return None,
+ };
+
+ let starts_with_name_ref = !matches!(
+ path.first_segment().and_then(|it| it.kind()),
+ Some(
+ ast::PathSegmentKind::CrateKw
+ | ast::PathSegmentKind::SuperKw
+ | ast::PathSegmentKind::SelfKw
+ )
+ );
+ let path_to_qualifier = starts_with_name_ref
+ .then(|| {
+ ctx.sema.scope(path.syntax())?.module().find_use_path_prefixed(
+ ctx.sema.db,
+ module,
+ ctx.config.insert_use.prefix_kind,
+ )
+ })
+ .flatten();
+
+ let scope = ImportScope::find_insert_use_container(path.syntax(), &ctx.sema)?;
+ let target = path.syntax().text_range();
+ acc.add(
+ AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite),
+ "Replace qualified path with use",
+ target,
+ |builder| {
+ // Now that we've brought the name into scope, re-qualify all paths that could be
+ // affected (that is, all paths inside the node we added the `use` to).
+ let scope = match scope {
+ ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
+ };
+ shorten_paths(scope.as_syntax_node(), &path);
+ let path = drop_generic_args(&path);
+ // stick the found import in front of the to be replaced path
+ let path = match path_to_qualifier.and_then(|it| mod_path_to_ast(&it).qualifier()) {
+ Some(qualifier) => make::path_concat(qualifier, path),
+ None => path,
+ };
+ insert_use(&scope, path, &ctx.config.insert_use);
+ },
+ )
+}
+
+fn drop_generic_args(path: &ast::Path) -> ast::Path {
+ let path = path.clone_for_update();
+ if let Some(segment) = path.segment() {
+ if let Some(generic_args) = segment.generic_arg_list() {
+ ted::remove(generic_args.syntax());
+ }
+ }
+ path
+}
+
+/// Mutates `node` to shorten `path` in all descendants of `node`.
+fn shorten_paths(node: &SyntaxNode, path: &ast::Path) {
+ for child in node.children() {
+ match_ast! {
+ match child {
+ // Don't modify `use` items, as this can break the `use` item when injecting a new
+ // import into the use tree.
+ ast::Use(_) => continue,
+ // Don't descend into submodules, they don't have the same `use` items in scope.
+ // FIXME: This isn't true due to `super::*` imports?
+ ast::Module(_) => continue,
+ ast::Path(p) => if maybe_replace_path(p.clone(), path.clone()).is_none() {
+ shorten_paths(p.syntax(), path);
+ },
+ _ => shorten_paths(&child, path),
+ }
+ }
+ }
+}
+
+fn maybe_replace_path(path: ast::Path, target: ast::Path) -> Option<()> {
+ if !path_eq_no_generics(path.clone(), target) {
+ return None;
+ }
+
+ // Shorten `path`, leaving only its last segment.
+ if let Some(parent) = path.qualifier() {
+ ted::remove(parent.syntax());
+ }
+ if let Some(double_colon) = path.coloncolon_token() {
+ ted::remove(&double_colon);
+ }
+
+ Some(())
+}
+
+fn path_eq_no_generics(lhs: ast::Path, rhs: ast::Path) -> bool {
+ let mut lhs_curr = lhs;
+ let mut rhs_curr = rhs;
+ loop {
+ match lhs_curr.segment().zip(rhs_curr.segment()) {
+ Some((lhs, rhs))
+ if lhs.coloncolon_token().is_some() == rhs.coloncolon_token().is_some()
+ && lhs
+ .name_ref()
+ .zip(rhs.name_ref())
+ .map_or(false, |(lhs, rhs)| lhs.text() == rhs.text()) => {}
+ _ => return false,
+ }
+
+ match (lhs_curr.qualifier(), rhs_curr.qualifier()) {
+ (Some(lhs), Some(rhs)) => {
+ lhs_curr = lhs;
+ rhs_curr = rhs;
+ }
+ (None, None) => return true,
+ _ => return false,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_replace_already_imported() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod fs { pub struct Path; } }
+use std::fs;
+
+fn main() {
+ std::f$0s::Path
+}",
+ r"
+mod std { pub mod fs { pub struct Path; } }
+use std::fs;
+
+fn main() {
+ fs::Path
+}",
+ )
+ }
+
+ #[test]
+ fn test_replace_add_use_no_anchor() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod fs { pub struct Path; } }
+std::fs::Path$0
+ ",
+ r"
+use std::fs::Path;
+
+mod std { pub mod fs { pub struct Path; } }
+Path
+ ",
+ );
+ }
+
+ #[test]
+ fn test_replace_add_use_no_anchor_middle_segment() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod fs { pub struct Path; } }
+std::fs$0::Path
+ ",
+ r"
+use std::fs;
+
+mod std { pub mod fs { pub struct Path; } }
+fs::Path
+ ",
+ );
+ }
+
+ #[test]
+ fn dont_import_trivial_paths() {
+ cov_mark::check!(dont_import_trivial_paths);
+ check_assist_not_applicable(replace_qualified_name_with_use, r"impl foo$0 for () {}");
+ }
+
+ #[test]
+ fn test_replace_not_applicable_in_use() {
+ cov_mark::check!(not_applicable_in_use);
+ check_assist_not_applicable(replace_qualified_name_with_use, r"use std::fmt$0;");
+ }
+
+ #[test]
+ fn replaces_all_affected_paths() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod fmt { pub trait Debug {} } }
+fn main() {
+ std::fmt::Debug$0;
+ let x: std::fmt::Debug = std::fmt::Debug;
+}
+ ",
+ r"
+use std::fmt::Debug;
+
+mod std { pub mod fmt { pub trait Debug {} } }
+fn main() {
+ Debug;
+ let x: Debug = Debug;
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn does_not_replace_in_submodules() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod fmt { pub trait Debug {} } }
+fn main() {
+ std::fmt::Debug$0;
+}
+
+mod sub {
+ fn f() {
+ std::fmt::Debug;
+ }
+}
+ ",
+ r"
+use std::fmt::Debug;
+
+mod std { pub mod fmt { pub trait Debug {} } }
+fn main() {
+ Debug;
+}
+
+mod sub {
+ fn f() {
+ std::fmt::Debug;
+ }
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn does_not_replace_in_use() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod fmt { pub trait Display {} } }
+use std::fmt::Display;
+
+fn main() {
+ std::fmt$0;
+}
+ ",
+ r"
+mod std { pub mod fmt { pub trait Display {} } }
+use std::fmt::{Display, self};
+
+fn main() {
+ fmt;
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn does_not_replace_assoc_item_path() {
+ check_assist_not_applicable(
+ replace_qualified_name_with_use,
+ r"
+pub struct Foo;
+impl Foo {
+ pub fn foo() {}
+}
+
+fn main() {
+ Foo::foo$0();
+}
+",
+ );
+ }
+
+ #[test]
+ fn replace_reuses_path_qualifier() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+pub mod foo {
+ pub struct Foo;
+}
+
+mod bar {
+ pub use super::foo::Foo as Bar;
+}
+
+fn main() {
+ foo::Foo$0;
+}
+",
+ r"
+use foo::Foo;
+
+pub mod foo {
+ pub struct Foo;
+}
+
+mod bar {
+ pub use super::foo::Foo as Bar;
+}
+
+fn main() {
+ Foo;
+}
+",
+ );
+ }
+
+ #[test]
+ fn replace_does_not_always_try_to_replace_by_full_item_path() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+use std::mem;
+
+mod std {
+ pub mod mem {
+ pub fn drop<T>(_: T) {}
+ }
+}
+
+fn main() {
+ mem::drop$0(0);
+}
+",
+ r"
+use std::mem::{self, drop};
+
+mod std {
+ pub mod mem {
+ pub fn drop<T>(_: T) {}
+ }
+}
+
+fn main() {
+ drop(0);
+}
+",
+ );
+ }
+
+ #[test]
+ fn replace_should_drop_generic_args_in_use() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std {
+ pub mod mem {
+ pub fn drop<T>(_: T) {}
+ }
+}
+
+fn main() {
+ std::mem::drop::<usize>$0(0);
+}
+",
+ r"
+use std::mem::drop;
+
+mod std {
+ pub mod mem {
+ pub fn drop<T>(_: T) {}
+ }
+}
+
+fn main() {
+ drop::<usize>(0);
+}
+",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs
new file mode 100644
index 000000000..decb5fb62
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs
@@ -0,0 +1,307 @@
+use syntax::{
+ ast,
+ ast::IsString,
+ AstToken,
+ SyntaxKind::{CHAR, STRING},
+ TextRange, TextSize,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: replace_string_with_char
+//
+// Replace string literal with char literal.
+//
+// ```
+// fn main() {
+// find("{$0");
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// find('{');
+// }
+// ```
+pub(crate) fn replace_string_with_char(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let token = ctx.find_token_syntax_at_offset(STRING).and_then(ast::String::cast)?;
+ let value = token.value()?;
+ let target = token.syntax().text_range();
+
+ if value.chars().take(2).count() != 1 {
+ return None;
+ }
+ let quote_offets = token.quote_offsets()?;
+
+ acc.add(
+ AssistId("replace_string_with_char", AssistKind::RefactorRewrite),
+ "Replace string with char",
+ target,
+ |edit| {
+ let (left, right) = quote_offets.quotes;
+ edit.replace(left, '\'');
+ edit.replace(right, '\'');
+ if value == "'" {
+ edit.insert(left.end(), '\\');
+ }
+ },
+ )
+}
+
+// Assist: replace_char_with_string
+//
+// Replace a char literal with a string literal.
+//
+// ```
+// fn main() {
+// find('{$0');
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// find("{");
+// }
+// ```
+pub(crate) fn replace_char_with_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let token = ctx.find_token_syntax_at_offset(CHAR)?;
+ let target = token.text_range();
+
+ acc.add(
+ AssistId("replace_char_with_string", AssistKind::RefactorRewrite),
+ "Replace char with string",
+ target,
+ |edit| {
+ if token.text() == "'\"'" {
+ edit.replace(token.text_range(), r#""\"""#);
+ } else {
+ let len = TextSize::of('\'');
+ edit.replace(TextRange::at(target.start(), len), '"');
+ edit.replace(TextRange::at(target.end() - len, len), '"');
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn replace_string_with_char_assist() {
+ check_assist(
+ replace_string_with_char,
+ r#"
+fn f() {
+ let s = "$0c";
+}
+"#,
+ r##"
+fn f() {
+ let s = 'c';
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn replace_string_with_char_assist_with_multi_byte_char() {
+ check_assist(
+ replace_string_with_char,
+ r#"
+fn f() {
+ let s = "$0😀";
+}
+"#,
+ r##"
+fn f() {
+ let s = '😀';
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn replace_string_with_char_multiple_chars() {
+ check_assist_not_applicable(
+ replace_string_with_char,
+ r#"
+fn f() {
+ let s = "$0test";
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_string_with_char_works_inside_macros() {
+ check_assist(
+ replace_string_with_char,
+ r#"
+fn f() {
+ format!($0"x", 92)
+}
+"#,
+ r##"
+fn f() {
+ format!('x', 92)
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn replace_string_with_char_newline() {
+ check_assist(
+ replace_string_with_char,
+ r#"
+fn f() {
+ find($0"\n");
+}
+"#,
+ r##"
+fn f() {
+ find('\n');
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn replace_string_with_char_unicode_escape() {
+ check_assist(
+ replace_string_with_char,
+ r#"
+fn f() {
+ find($0"\u{7FFF}");
+}
+"#,
+ r##"
+fn f() {
+ find('\u{7FFF}');
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn replace_raw_string_with_char() {
+ check_assist(
+ replace_string_with_char,
+ r##"
+fn f() {
+ $0r#"X"#
+}
+"##,
+ r##"
+fn f() {
+ 'X'
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn replace_char_with_string_assist() {
+ check_assist(
+ replace_char_with_string,
+ r"
+fn f() {
+ let s = '$0c';
+}
+",
+ r#"
+fn f() {
+ let s = "c";
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_char_with_string_assist_with_multi_byte_char() {
+ check_assist(
+ replace_char_with_string,
+ r"
+fn f() {
+ let s = '$0😀';
+}
+",
+ r#"
+fn f() {
+ let s = "😀";
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_char_with_string_newline() {
+ check_assist(
+ replace_char_with_string,
+ r"
+fn f() {
+ find($0'\n');
+}
+",
+ r#"
+fn f() {
+ find("\n");
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_char_with_string_unicode_escape() {
+ check_assist(
+ replace_char_with_string,
+ r"
+fn f() {
+ find($0'\u{7FFF}');
+}
+",
+ r#"
+fn f() {
+ find("\u{7FFF}");
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_char_with_string_quote() {
+ check_assist(
+ replace_char_with_string,
+ r#"
+fn f() {
+ find($0'"');
+}
+"#,
+ r#"
+fn f() {
+ find("\"");
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_string_with_char_quote() {
+ check_assist(
+ replace_string_with_char,
+ r#"
+fn f() {
+ find($0"'");
+}
+"#,
+ r#"
+fn f() {
+ find('\'');
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs
new file mode 100644
index 000000000..38fccb338
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs
@@ -0,0 +1,150 @@
+use std::iter;
+
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ ty_filter::TryEnum,
+};
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make,
+ },
+ AstNode, T,
+};
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: replace_try_expr_with_match
+//
+// Replaces a `try` expression with a `match` expression.
+//
+// ```
+// # //- minicore:option
+// fn handle() {
+// let pat = Some(true)$0?;
+// }
+// ```
+// ->
+// ```
+// fn handle() {
+// let pat = match Some(true) {
+// Some(it) => it,
+// None => return None,
+// };
+// }
+// ```
+pub(crate) fn replace_try_expr_with_match(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let qm_kw = ctx.find_token_syntax_at_offset(T![?])?;
+ let qm_kw_parent = qm_kw.parent().and_then(ast::TryExpr::cast)?;
+
+ let expr = qm_kw_parent.expr()?;
+ let expr_type_info = ctx.sema.type_of_expr(&expr)?;
+
+ let try_enum = TryEnum::from_ty(&ctx.sema, &expr_type_info.original)?;
+
+ let target = qm_kw_parent.syntax().text_range();
+ acc.add(
+ AssistId("replace_try_expr_with_match", AssistKind::RefactorRewrite),
+ "Replace try expression with match",
+ target,
+ |edit| {
+ let sad_pat = match try_enum {
+ TryEnum::Option => make::path_pat(make::ext::ident_path("None")),
+ TryEnum::Result => make::tuple_struct_pat(
+ make::ext::ident_path("Err"),
+ iter::once(make::path_pat(make::ext::ident_path("err"))),
+ )
+ .into(),
+ };
+ let sad_expr = match try_enum {
+ TryEnum::Option => {
+ make::expr_return(Some(make::expr_path(make::ext::ident_path("None"))))
+ }
+ TryEnum::Result => make::expr_return(Some(make::expr_call(
+ make::expr_path(make::ext::ident_path("Err")),
+ make::arg_list(iter::once(make::expr_path(make::ext::ident_path("err")))),
+ ))),
+ };
+
+ let happy_arm = make::match_arm(
+ iter::once(
+ try_enum.happy_pattern(make::ident_pat(false, false, make::name("it")).into()),
+ ),
+ None,
+ make::expr_path(make::ext::ident_path("it")),
+ );
+ let sad_arm = make::match_arm(iter::once(sad_pat), None, sad_expr);
+
+ let match_arm_list = make::match_arm_list([happy_arm, sad_arm]);
+
+ let expr_match = make::expr_match(expr, match_arm_list)
+ .indent(IndentLevel::from_node(qm_kw_parent.syntax()));
+ edit.replace_ast::<ast::Expr>(qm_kw_parent.into(), expr_match);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn test_replace_try_expr_with_match_not_applicable() {
+ check_assist_not_applicable(
+ replace_try_expr_with_match,
+ r#"
+ fn test() {
+ let pat: u32 = 25$0;
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_replace_try_expr_with_match_option() {
+ check_assist(
+ replace_try_expr_with_match,
+ r#"
+//- minicore:option
+fn test() {
+ let pat = Some(true)$0?;
+}
+ "#,
+ r#"
+fn test() {
+ let pat = match Some(true) {
+ Some(it) => it,
+ None => return None,
+ };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_replace_try_expr_with_match_result() {
+ check_assist(
+ replace_try_expr_with_match,
+ r#"
+//- minicore:result
+fn test() {
+ let pat = Ok(true)$0?;
+}
+ "#,
+ r#"
+fn test() {
+ let pat = match Ok(true) {
+ Ok(it) => it,
+ Err(err) => return Err(err),
+ };
+}
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs
new file mode 100644
index 000000000..6112e0945
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs
@@ -0,0 +1,243 @@
+use syntax::{
+ ast::{Expr, GenericArg},
+ ast::{LetStmt, Type::InferType},
+ AstNode, TextRange,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: replace_turbofish_with_explicit_type
+//
+// Converts `::<_>` to an explicit type assignment.
+//
+// ```
+// fn make<T>() -> T { ) }
+// fn main() {
+// let a = make$0::<i32>();
+// }
+// ```
+// ->
+// ```
+// fn make<T>() -> T { ) }
+// fn main() {
+// let a: i32 = make();
+// }
+// ```
+pub(crate) fn replace_turbofish_with_explicit_type(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let let_stmt = ctx.find_node_at_offset::<LetStmt>()?;
+
+ let initializer = let_stmt.initializer()?;
+
+ let generic_args = match &initializer {
+ Expr::MethodCallExpr(ce) => ce.generic_arg_list()?,
+ Expr::CallExpr(ce) => {
+ if let Expr::PathExpr(pe) = ce.expr()? {
+ pe.path()?.segment()?.generic_arg_list()?
+ } else {
+ cov_mark::hit!(not_applicable_if_non_path_function_call);
+ return None;
+ }
+ }
+ _ => {
+ cov_mark::hit!(not_applicable_if_non_function_call_initializer);
+ return None;
+ }
+ };
+
+ // Find range of ::<_>
+ let colon2 = generic_args.coloncolon_token()?;
+ let r_angle = generic_args.r_angle_token()?;
+ let turbofish_range = TextRange::new(colon2.text_range().start(), r_angle.text_range().end());
+
+ let turbofish_args: Vec<GenericArg> = generic_args.generic_args().into_iter().collect();
+
+ // Find type of ::<_>
+ if turbofish_args.len() != 1 {
+ cov_mark::hit!(not_applicable_if_not_single_arg);
+ return None;
+ }
+
+ // An improvement would be to check that this is correctly part of the return value of the
+ // function call, or sub in the actual return type.
+ let turbofish_type = &turbofish_args[0];
+
+ let initializer_start = initializer.syntax().text_range().start();
+ if ctx.offset() > turbofish_range.end() || ctx.offset() < initializer_start {
+ cov_mark::hit!(not_applicable_outside_turbofish);
+ return None;
+ }
+
+ if let None = let_stmt.colon_token() {
+ // If there's no colon in a let statement, then there is no explicit type.
+ // let x = fn::<...>();
+ let ident_range = let_stmt.pat()?.syntax().text_range();
+
+ return acc.add(
+ AssistId("replace_turbofish_with_explicit_type", AssistKind::RefactorRewrite),
+ "Replace turbofish with explicit type",
+ TextRange::new(initializer_start, turbofish_range.end()),
+ |builder| {
+ builder.insert(ident_range.end(), format!(": {}", turbofish_type));
+ builder.delete(turbofish_range);
+ },
+ );
+ } else if let Some(InferType(t)) = let_stmt.ty() {
+ // If there's a type inferrence underscore, we can offer to replace it with the type in
+ // the turbofish.
+ // let x: _ = fn::<...>();
+ let underscore_range = t.syntax().text_range();
+
+ return acc.add(
+ AssistId("replace_turbofish_with_explicit_type", AssistKind::RefactorRewrite),
+ "Replace `_` with turbofish type",
+ turbofish_range,
+ |builder| {
+ builder.replace(underscore_range, turbofish_type.to_string());
+ builder.delete(turbofish_range);
+ },
+ );
+ }
+
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn replaces_turbofish_for_vec_string() {
+ check_assist(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a = make$0::<Vec<String>>();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a: Vec<String> = make();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replaces_method_calls() {
+ // foo.make() is a method call which uses a different expr in the let initializer
+ check_assist(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a = foo.make$0::<Vec<String>>();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a: Vec<String> = foo.make();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_turbofish_target() {
+ check_assist_target(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a = $0make::<Vec<String>>();
+}
+"#,
+ r#"make::<Vec<String>>"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_outside_turbofish() {
+ cov_mark::check!(not_applicable_outside_turbofish);
+ check_assist_not_applicable(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let $0a = make::<Vec<String>>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_inferred_type_placeholder() {
+ check_assist(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a: _ = make$0::<Vec<String>>();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a: Vec<String> = make();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_constant_initializer() {
+ cov_mark::check!(not_applicable_if_non_function_call_initializer);
+ check_assist_not_applicable(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a = "foo"$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_path_function_call() {
+ cov_mark::check!(not_applicable_if_non_path_function_call);
+ check_assist_not_applicable(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ $0let a = (|| {})();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn non_applicable_multiple_generic_args() {
+ cov_mark::check!(not_applicable_if_not_single_arg);
+ check_assist_not_applicable(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a = make$0::<Vec<String>, i32>();
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs
new file mode 100644
index 000000000..a93704b39
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs
@@ -0,0 +1,588 @@
+use std::cmp::Ordering;
+
+use itertools::Itertools;
+
+use syntax::{
+ ast::{self, HasName},
+ ted, AstNode, TextRange,
+};
+
+use crate::{utils::get_methods, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: sort_items
+//
+// Sorts item members alphabetically: fields, enum variants and methods.
+//
+// ```
+// struct $0Foo$0 { second: u32, first: String }
+// ```
+// ->
+// ```
+// struct Foo { first: String, second: u32 }
+// ```
+// ---
+// ```
+// trait $0Bar$0 {
+// fn second(&self) -> u32;
+// fn first(&self) -> String;
+// }
+// ```
+// ->
+// ```
+// trait Bar {
+// fn first(&self) -> String;
+// fn second(&self) -> u32;
+// }
+// ```
+// ---
+// ```
+// struct Baz;
+// impl $0Baz$0 {
+// fn second(&self) -> u32;
+// fn first(&self) -> String;
+// }
+// ```
+// ->
+// ```
+// struct Baz;
+// impl Baz {
+// fn first(&self) -> String;
+// fn second(&self) -> u32;
+// }
+// ```
+// ---
+// There is a difference between sorting enum variants:
+//
+// ```
+// enum $0Animal$0 {
+// Dog(String, f64),
+// Cat { weight: f64, name: String },
+// }
+// ```
+// ->
+// ```
+// enum Animal {
+// Cat { weight: f64, name: String },
+// Dog(String, f64),
+// }
+// ```
+// and sorting a single enum struct variant:
+//
+// ```
+// enum Animal {
+// Dog(String, f64),
+// Cat $0{ weight: f64, name: String }$0,
+// }
+// ```
+// ->
+// ```
+// enum Animal {
+// Dog(String, f64),
+// Cat { name: String, weight: f64 },
+// }
+// ```
+pub(crate) fn sort_items(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if ctx.has_empty_selection() {
+ cov_mark::hit!(not_applicable_if_no_selection);
+ return None;
+ }
+
+ if let Some(trait_ast) = ctx.find_node_at_offset::<ast::Trait>() {
+ add_sort_methods_assist(acc, trait_ast.assoc_item_list()?)
+ } else if let Some(impl_ast) = ctx.find_node_at_offset::<ast::Impl>() {
+ add_sort_methods_assist(acc, impl_ast.assoc_item_list()?)
+ } else if let Some(struct_ast) = ctx.find_node_at_offset::<ast::Struct>() {
+ add_sort_field_list_assist(acc, struct_ast.field_list())
+ } else if let Some(union_ast) = ctx.find_node_at_offset::<ast::Union>() {
+ add_sort_fields_assist(acc, union_ast.record_field_list()?)
+ } else if let Some(variant_ast) = ctx.find_node_at_offset::<ast::Variant>() {
+ add_sort_field_list_assist(acc, variant_ast.field_list())
+ } else if let Some(enum_struct_variant_ast) = ctx.find_node_at_offset::<ast::RecordFieldList>()
+ {
+ // should be above enum and below struct
+ add_sort_fields_assist(acc, enum_struct_variant_ast)
+ } else if let Some(enum_ast) = ctx.find_node_at_offset::<ast::Enum>() {
+ add_sort_variants_assist(acc, enum_ast.variant_list()?)
+ } else {
+ None
+ }
+}
+
+trait AddRewrite {
+ fn add_rewrite<T: AstNode>(
+ &mut self,
+ label: &str,
+ old: Vec<T>,
+ new: Vec<T>,
+ target: TextRange,
+ ) -> Option<()>;
+}
+
+impl AddRewrite for Assists {
+ fn add_rewrite<T: AstNode>(
+ &mut self,
+ label: &str,
+ old: Vec<T>,
+ new: Vec<T>,
+ target: TextRange,
+ ) -> Option<()> {
+ self.add(AssistId("sort_items", AssistKind::RefactorRewrite), label, target, |builder| {
+ let mutable: Vec<T> = old.into_iter().map(|it| builder.make_mut(it)).collect();
+ mutable
+ .into_iter()
+ .zip(new)
+ .for_each(|(old, new)| ted::replace(old.syntax(), new.clone_for_update().syntax()));
+ })
+ }
+}
+
+fn add_sort_field_list_assist(acc: &mut Assists, field_list: Option<ast::FieldList>) -> Option<()> {
+ match field_list {
+ Some(ast::FieldList::RecordFieldList(it)) => add_sort_fields_assist(acc, it),
+ _ => {
+ cov_mark::hit!(not_applicable_if_sorted_or_empty_or_single);
+ None
+ }
+ }
+}
+
+fn add_sort_methods_assist(acc: &mut Assists, item_list: ast::AssocItemList) -> Option<()> {
+ let methods = get_methods(&item_list);
+ let sorted = sort_by_name(&methods);
+
+ if methods == sorted {
+ cov_mark::hit!(not_applicable_if_sorted_or_empty_or_single);
+ return None;
+ }
+
+ acc.add_rewrite("Sort methods alphabetically", methods, sorted, item_list.syntax().text_range())
+}
+
+fn add_sort_fields_assist(
+ acc: &mut Assists,
+ record_field_list: ast::RecordFieldList,
+) -> Option<()> {
+ let fields: Vec<_> = record_field_list.fields().collect();
+ let sorted = sort_by_name(&fields);
+
+ if fields == sorted {
+ cov_mark::hit!(not_applicable_if_sorted_or_empty_or_single);
+ return None;
+ }
+
+ acc.add_rewrite(
+ "Sort fields alphabetically",
+ fields,
+ sorted,
+ record_field_list.syntax().text_range(),
+ )
+}
+
+fn add_sort_variants_assist(acc: &mut Assists, variant_list: ast::VariantList) -> Option<()> {
+ let variants: Vec<_> = variant_list.variants().collect();
+ let sorted = sort_by_name(&variants);
+
+ if variants == sorted {
+ cov_mark::hit!(not_applicable_if_sorted_or_empty_or_single);
+ return None;
+ }
+
+ acc.add_rewrite(
+ "Sort variants alphabetically",
+ variants,
+ sorted,
+ variant_list.syntax().text_range(),
+ )
+}
+
+fn sort_by_name<T: HasName + Clone>(initial: &[T]) -> Vec<T> {
+ initial
+ .iter()
+ .cloned()
+ .sorted_by(|a, b| match (a.name(), b.name()) {
+ (Some(a), Some(b)) => Ord::cmp(&a.to_string(), &b.to_string()),
+
+ // unexpected, but just in case
+ (None, None) => Ordering::Equal,
+ (None, Some(_)) => Ordering::Less,
+ (Some(_), None) => Ordering::Greater,
+ })
+ .collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn not_applicable_if_no_selection() {
+ cov_mark::check!(not_applicable_if_no_selection);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+t$0rait Bar {
+ fn b();
+ fn a();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_trait_empty() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+t$0rait Bar$0 {
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_impl_empty() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+struct Bar;
+$0impl Bar$0 {
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_struct_empty() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+$0struct Bar$0 ;
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_struct_empty2() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+$0struct Bar$0 { };
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_enum_empty() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+$0enum ZeroVariants$0 {};
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_trait_sorted() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+t$0rait Bar$0 {
+ fn a() {}
+ fn b() {}
+ fn c() {}
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_impl_sorted() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+struct Bar;
+$0impl Bar$0 {
+ fn a() {}
+ fn b() {}
+ fn c() {}
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_struct_sorted() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+$0struct Bar$0 {
+ a: u32,
+ b: u8,
+ c: u64,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_union_sorted() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+$0union Bar$0 {
+ a: u32,
+ b: u8,
+ c: u64,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_enum_sorted() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+$0enum Bar$0 {
+ a,
+ b,
+ c,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_trait() {
+ check_assist(
+ sort_items,
+ r#"
+$0trait Bar$0 {
+ fn a() {
+
+ }
+
+ // comment for c
+ fn c() {}
+ fn z() {}
+ fn b() {}
+}
+ "#,
+ r#"
+trait Bar {
+ fn a() {
+
+ }
+
+ fn b() {}
+ // comment for c
+ fn c() {}
+ fn z() {}
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_impl() {
+ check_assist(
+ sort_items,
+ r#"
+struct Bar;
+$0impl Bar$0 {
+ fn c() {}
+ fn a() {}
+ /// long
+ /// doc
+ /// comment
+ fn z() {}
+ fn d() {}
+}
+ "#,
+ r#"
+struct Bar;
+impl Bar {
+ fn a() {}
+ fn c() {}
+ fn d() {}
+ /// long
+ /// doc
+ /// comment
+ fn z() {}
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_struct() {
+ check_assist(
+ sort_items,
+ r#"
+$0struct Bar$0 {
+ b: u8,
+ a: u32,
+ c: u64,
+}
+ "#,
+ r#"
+struct Bar {
+ a: u32,
+ b: u8,
+ c: u64,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_generic_struct_with_lifetime() {
+ check_assist(
+ sort_items,
+ r#"
+$0struct Bar<'a,$0 T> {
+ d: &'a str,
+ b: u8,
+ a: T,
+ c: u64,
+}
+ "#,
+ r#"
+struct Bar<'a, T> {
+ a: T,
+ b: u8,
+ c: u64,
+ d: &'a str,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_struct_fields_diff_len() {
+ check_assist(
+ sort_items,
+ r#"
+$0struct Bar $0{
+ aaa: u8,
+ a: usize,
+ b: u8,
+}
+ "#,
+ r#"
+struct Bar {
+ a: usize,
+ aaa: u8,
+ b: u8,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_union() {
+ check_assist(
+ sort_items,
+ r#"
+$0union Bar$0 {
+ b: u8,
+ a: u32,
+ c: u64,
+}
+ "#,
+ r#"
+union Bar {
+ a: u32,
+ b: u8,
+ c: u64,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_enum() {
+ check_assist(
+ sort_items,
+ r#"
+$0enum Bar $0{
+ d{ first: u32, second: usize},
+ b = 14,
+ a,
+ c(u32, usize),
+}
+ "#,
+ r#"
+enum Bar {
+ a,
+ b = 14,
+ c(u32, usize),
+ d{ first: u32, second: usize},
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_struct_enum_variant_fields() {
+ check_assist(
+ sort_items,
+ r#"
+enum Bar {
+ d$0{ second: usize, first: u32 }$0,
+ b = 14,
+ a,
+ c(u32, usize),
+}
+ "#,
+ r#"
+enum Bar {
+ d{ first: u32, second: usize },
+ b = 14,
+ a,
+ c(u32, usize),
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_struct_enum_variant() {
+ check_assist(
+ sort_items,
+ r#"
+enum Bar {
+ $0d$0{ second: usize, first: u32 },
+}
+ "#,
+ r#"
+enum Bar {
+ d{ first: u32, second: usize },
+}
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs
new file mode 100644
index 000000000..775ededec
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs
@@ -0,0 +1,82 @@
+use syntax::{ast, AstNode, T};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: split_import
+//
+// Wraps the tail of import into braces.
+//
+// ```
+// use std::$0collections::HashMap;
+// ```
+// ->
+// ```
+// use std::{collections::HashMap};
+// ```
+pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let colon_colon = ctx.find_token_syntax_at_offset(T![::])?;
+ let path = ast::Path::cast(colon_colon.parent()?)?.qualifier()?;
+
+ let use_tree = path.top_path().syntax().ancestors().find_map(ast::UseTree::cast)?;
+
+ let has_errors = use_tree
+ .syntax()
+ .descendants_with_tokens()
+ .any(|it| it.kind() == syntax::SyntaxKind::ERROR);
+ let last_segment = use_tree.path().and_then(|it| it.segment());
+ if has_errors || last_segment.is_none() {
+ return None;
+ }
+
+ let target = colon_colon.text_range();
+ acc.add(AssistId("split_import", AssistKind::RefactorRewrite), "Split import", target, |edit| {
+ let use_tree = edit.make_mut(use_tree.clone());
+ let path = edit.make_mut(path);
+ use_tree.split_prefix(&path);
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn test_split_import() {
+ check_assist(
+ split_import,
+ "use crate::$0db::RootDatabase;",
+ "use crate::{db::RootDatabase};",
+ )
+ }
+
+ #[test]
+ fn split_import_works_with_trees() {
+ check_assist(
+ split_import,
+ "use crate:$0:db::{RootDatabase, FileSymbol}",
+ "use crate::{db::{RootDatabase, FileSymbol}}",
+ )
+ }
+
+ #[test]
+ fn split_import_target() {
+ check_assist_target(split_import, "use crate::$0db::{RootDatabase, FileSymbol}", "::");
+ }
+
+ #[test]
+ fn issue4044() {
+ check_assist_not_applicable(split_import, "use crate::$0:::self;")
+ }
+
+ #[test]
+ fn test_empty_use() {
+ check_assist_not_applicable(
+ split_import,
+ r"
+use std::$0
+fn main() {}",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
new file mode 100644
index 000000000..b7d57f02b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
@@ -0,0 +1,98 @@
+use syntax::{
+ ast::{self, HasAttrs},
+ AstNode, AstToken,
+};
+
+use crate::{utils::test_related_attribute, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: toggle_ignore
+//
+// Adds `#[ignore]` attribute to the test.
+//
+// ```
+// $0#[test]
+// fn arithmetics {
+// assert_eq!(2 + 2, 5);
+// }
+// ```
+// ->
+// ```
+// #[test]
+// #[ignore]
+// fn arithmetics {
+// assert_eq!(2 + 2, 5);
+// }
+// ```
+pub(crate) fn toggle_ignore(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let attr: ast::Attr = ctx.find_node_at_offset()?;
+ let func = attr.syntax().parent().and_then(ast::Fn::cast)?;
+ let attr = test_related_attribute(&func)?;
+
+ match has_ignore_attribute(&func) {
+ None => acc.add(
+ AssistId("toggle_ignore", AssistKind::None),
+ "Ignore this test",
+ attr.syntax().text_range(),
+ |builder| builder.insert(attr.syntax().text_range().end(), "\n#[ignore]"),
+ ),
+ Some(ignore_attr) => acc.add(
+ AssistId("toggle_ignore", AssistKind::None),
+ "Re-enable this test",
+ ignore_attr.syntax().text_range(),
+ |builder| {
+ builder.delete(ignore_attr.syntax().text_range());
+ let whitespace = ignore_attr
+ .syntax()
+ .next_sibling_or_token()
+ .and_then(|x| x.into_token())
+ .and_then(ast::Whitespace::cast);
+ if let Some(whitespace) = whitespace {
+ builder.delete(whitespace.syntax().text_range());
+ }
+ },
+ ),
+ }
+}
+
+fn has_ignore_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
+ fn_def.attrs().find(|attr| attr.path().map(|it| it.syntax().text() == "ignore") == Some(true))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_assist;
+
+ use super::*;
+
+ #[test]
+ fn test_base_case() {
+ check_assist(
+ toggle_ignore,
+ r#"
+ #[test$0]
+ fn test() {}
+ "#,
+ r#"
+ #[test]
+ #[ignore]
+ fn test() {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_unignore() {
+ check_assist(
+ toggle_ignore,
+ r#"
+ #[test$0]
+ #[ignore]
+ fn test() {}
+ "#,
+ r#"
+ #[test]
+ fn test() {}
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
new file mode 100644
index 000000000..3ce028e93
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
@@ -0,0 +1,237 @@
+use syntax::{
+ ast::{self, make, HasVisibility},
+ ted::{self, Position},
+ AstNode, SyntaxKind,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: unmerge_use
+//
+// Extracts single use item from use list.
+//
+// ```
+// use std::fmt::{Debug, Display$0};
+// ```
+// ->
+// ```
+// use std::fmt::{Debug};
+// use std::fmt::Display;
+// ```
+pub(crate) fn unmerge_use(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let tree: ast::UseTree = ctx.find_node_at_offset::<ast::UseTree>()?.clone_for_update();
+
+ let tree_list = tree.syntax().parent().and_then(ast::UseTreeList::cast)?;
+ if tree_list.use_trees().count() < 2 {
+ cov_mark::hit!(skip_single_use_item);
+ return None;
+ }
+
+ let use_: ast::Use = tree_list.syntax().ancestors().find_map(ast::Use::cast)?;
+ let path = resolve_full_path(&tree)?;
+
+ let old_parent_range = use_.syntax().parent()?.text_range();
+ let new_parent = use_.syntax().parent()?;
+
+ let target = tree.syntax().text_range();
+ acc.add(
+ AssistId("unmerge_use", AssistKind::RefactorRewrite),
+ "Unmerge use",
+ target,
+ |builder| {
+ let new_use = make::use_(
+ use_.visibility(),
+ make::use_tree(
+ path,
+ tree.use_tree_list(),
+ tree.rename(),
+ tree.star_token().is_some(),
+ ),
+ )
+ .clone_for_update();
+
+ tree.remove();
+ ted::insert(Position::after(use_.syntax()), new_use.syntax());
+
+ builder.replace(old_parent_range, new_parent.to_string());
+ },
+ )
+}
+
+fn resolve_full_path(tree: &ast::UseTree) -> Option<ast::Path> {
+ let paths = tree
+ .syntax()
+ .ancestors()
+ .take_while(|n| n.kind() != SyntaxKind::USE)
+ .filter_map(ast::UseTree::cast)
+ .filter_map(|t| t.path());
+
+ let final_path = paths.reduce(|prev, next| make::path_concat(next, prev))?;
+ if final_path.segment().map_or(false, |it| it.self_token().is_some()) {
+ final_path.qualifier()
+ } else {
+ Some(final_path)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn skip_single_use_item() {
+ cov_mark::check!(skip_single_use_item);
+ check_assist_not_applicable(
+ unmerge_use,
+ r"
+use std::fmt::Debug$0;
+",
+ );
+ check_assist_not_applicable(
+ unmerge_use,
+ r"
+use std::fmt::{Debug$0};
+",
+ );
+ check_assist_not_applicable(
+ unmerge_use,
+ r"
+use std::fmt::Debug as Dbg$0;
+",
+ );
+ }
+
+ #[test]
+ fn skip_single_glob_import() {
+ check_assist_not_applicable(
+ unmerge_use,
+ r"
+use std::fmt::*$0;
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_use_item() {
+ check_assist(
+ unmerge_use,
+ r"
+use std::fmt::{Debug, Display$0};
+",
+ r"
+use std::fmt::{Debug};
+use std::fmt::Display;
+",
+ );
+
+ check_assist(
+ unmerge_use,
+ r"
+use std::fmt::{Debug, format$0, Display};
+",
+ r"
+use std::fmt::{Debug, Display};
+use std::fmt::format;
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_glob_import() {
+ check_assist(
+ unmerge_use,
+ r"
+use std::fmt::{*$0, Display};
+",
+ r"
+use std::fmt::{Display};
+use std::fmt::*;
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_renamed_use_item() {
+ check_assist(
+ unmerge_use,
+ r"
+use std::fmt::{Debug, Display as Disp$0};
+",
+ r"
+use std::fmt::{Debug};
+use std::fmt::Display as Disp;
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_indented_use_item() {
+ check_assist(
+ unmerge_use,
+ r"
+mod format {
+ use std::fmt::{Debug, Display$0 as Disp, format};
+}
+",
+ r"
+mod format {
+ use std::fmt::{Debug, format};
+ use std::fmt::Display as Disp;
+}
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_nested_use_item() {
+ check_assist(
+ unmerge_use,
+ r"
+use foo::bar::{baz::{qux$0, foobar}, barbaz};
+",
+ r"
+use foo::bar::{baz::{foobar}, barbaz};
+use foo::bar::baz::qux;
+",
+ );
+ check_assist(
+ unmerge_use,
+ r"
+use foo::bar::{baz$0::{qux, foobar}, barbaz};
+",
+ r"
+use foo::bar::{barbaz};
+use foo::bar::baz::{qux, foobar};
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_use_item_with_visibility() {
+ check_assist(
+ unmerge_use,
+ r"
+pub use std::fmt::{Debug, Display$0};
+",
+ r"
+pub use std::fmt::{Debug};
+pub use std::fmt::Display;
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_use_item_on_self() {
+ check_assist(
+ unmerge_use,
+ r"use std::process::{Command, self$0};",
+ r"use std::process::{Command};
+use std::process;",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
new file mode 100644
index 000000000..d5cd2d551
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
@@ -0,0 +1,257 @@
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ base_db::FileId,
+ defs::Definition,
+ search::FileReference,
+ syntax_helpers::node_ext::full_path_of_name_ref,
+};
+use syntax::{
+ ast::{self, NameLike, NameRef},
+ AstNode, SyntaxKind, TextRange,
+};
+
+use crate::{AssistContext, Assists};
+
+// Assist: unnecessary_async
+//
+// Removes the `async` mark from functions which have no `.await` in their body.
+// Looks for calls to the functions and removes the `.await` on the call site.
+//
+// ```
+// pub async f$0n foo() {}
+// pub async fn bar() { foo().await }
+// ```
+// ->
+// ```
+// pub fn foo() {}
+// pub async fn bar() { foo() }
+// ```
+pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let function: ast::Fn = ctx.find_node_at_offset()?;
+
+ // Do nothing if the cursor is not on the prototype. This is so that the check does not pollute
+ // when the user asks us for assists when in the middle of the function body.
+ // We consider the prototype to be anything that is before the body of the function.
+ let cursor_position = ctx.offset();
+ if cursor_position >= function.body()?.syntax().text_range().start() {
+ return None;
+ }
+ // Do nothing if the function isn't async.
+ if let None = function.async_token() {
+ return None;
+ }
+ // Do nothing if the function has an `await` expression in its body.
+ if function.body()?.syntax().descendants().find_map(ast::AwaitExpr::cast).is_some() {
+ return None;
+ }
+
+ // Remove the `async` keyword plus whitespace after it, if any.
+ let async_range = {
+ let async_token = function.async_token()?;
+ let next_token = async_token.next_token()?;
+ if matches!(next_token.kind(), SyntaxKind::WHITESPACE) {
+ TextRange::new(async_token.text_range().start(), next_token.text_range().end())
+ } else {
+ async_token.text_range()
+ }
+ };
+
+ // Otherwise, we may remove the `async` keyword.
+ acc.add(
+ AssistId("unnecessary_async", AssistKind::QuickFix),
+ "Remove unnecessary async",
+ async_range,
+ |edit| {
+ // Remove async on the function definition.
+ edit.replace(async_range, "");
+
+ // Remove all `.await`s from calls to the function we remove `async` from.
+ if let Some(fn_def) = ctx.sema.to_def(&function) {
+ for await_expr in find_all_references(ctx, &Definition::Function(fn_def))
+ // Keep only references that correspond NameRefs.
+ .filter_map(|(_, reference)| match reference.name {
+ NameLike::NameRef(nameref) => Some(nameref),
+ _ => None,
+ })
+ // Keep only references that correspond to await expressions
+ .filter_map(|nameref| find_await_expression(ctx, &nameref))
+ {
+ if let Some(await_token) = &await_expr.await_token() {
+ edit.replace(await_token.text_range(), "");
+ }
+ if let Some(dot_token) = &await_expr.dot_token() {
+ edit.replace(dot_token.text_range(), "");
+ }
+ }
+ }
+ },
+ )
+}
+
+fn find_all_references(
+ ctx: &AssistContext<'_>,
+ def: &Definition,
+) -> impl Iterator<Item = (FileId, FileReference)> {
+ def.usages(&ctx.sema).all().into_iter().flat_map(|(file_id, references)| {
+ references.into_iter().map(move |reference| (file_id, reference))
+ })
+}
+
+/// Finds the await expression for the given `NameRef`.
+/// If no await expression is found, returns None.
+fn find_await_expression(ctx: &AssistContext<'_>, nameref: &NameRef) -> Option<ast::AwaitExpr> {
+ // From the nameref, walk up the tree to the await expression.
+ let await_expr = if let Some(path) = full_path_of_name_ref(&nameref) {
+ // Function calls.
+ path.syntax()
+ .parent()
+ .and_then(ast::PathExpr::cast)?
+ .syntax()
+ .parent()
+ .and_then(ast::CallExpr::cast)?
+ .syntax()
+ .parent()
+ .and_then(ast::AwaitExpr::cast)
+ } else {
+ // Method calls.
+ nameref
+ .syntax()
+ .parent()
+ .and_then(ast::MethodCallExpr::cast)?
+ .syntax()
+ .parent()
+ .and_then(ast::AwaitExpr::cast)
+ };
+
+ ctx.sema.original_ast_node(await_expr?)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn applies_on_empty_function() {
+ check_assist(unnecessary_async, "pub async f$0n f() {}", "pub fn f() {}")
+ }
+
+ #[test]
+ fn applies_and_removes_whitespace() {
+ check_assist(unnecessary_async, "pub async f$0n f() {}", "pub fn f() {}")
+ }
+
+ #[test]
+ fn does_not_apply_on_non_async_function() {
+ check_assist_not_applicable(unnecessary_async, "pub f$0n f() {}")
+ }
+
+ #[test]
+ fn applies_on_function_with_a_non_await_expr() {
+ check_assist(unnecessary_async, "pub async f$0n f() { f2() }", "pub fn f() { f2() }")
+ }
+
+ #[test]
+ fn does_not_apply_on_function_with_an_await_expr() {
+ check_assist_not_applicable(unnecessary_async, "pub async f$0n f() { f2().await }")
+ }
+
+ #[test]
+ fn applies_and_removes_await_on_reference() {
+ check_assist(
+ unnecessary_async,
+ r#"
+pub async fn f4() { }
+pub async f$0n f2() { }
+pub async fn f() { f2().await }
+pub async fn f3() { f2().await }"#,
+ r#"
+pub async fn f4() { }
+pub fn f2() { }
+pub async fn f() { f2() }
+pub async fn f3() { f2() }"#,
+ )
+ }
+
+ #[test]
+ fn applies_and_removes_await_from_within_module() {
+ check_assist(
+ unnecessary_async,
+ r#"
+pub async fn f4() { }
+mod a { pub async f$0n f2() { } }
+pub async fn f() { a::f2().await }
+pub async fn f3() { a::f2().await }"#,
+ r#"
+pub async fn f4() { }
+mod a { pub fn f2() { } }
+pub async fn f() { a::f2() }
+pub async fn f3() { a::f2() }"#,
+ )
+ }
+
+ #[test]
+ fn applies_and_removes_await_on_inner_await() {
+ check_assist(
+ unnecessary_async,
+ // Ensure that it is the first await on the 3rd line that is removed
+ r#"
+pub async fn f() { f2().await }
+pub async f$0n f2() -> i32 { 1 }
+pub async fn f3() { f4(f2().await).await }
+pub async fn f4(i: i32) { }"#,
+ r#"
+pub async fn f() { f2() }
+pub fn f2() -> i32 { 1 }
+pub async fn f3() { f4(f2()).await }
+pub async fn f4(i: i32) { }"#,
+ )
+ }
+
+ #[test]
+ fn applies_and_removes_await_on_outer_await() {
+ check_assist(
+ unnecessary_async,
+ // Ensure that it is the second await on the 3rd line that is removed
+ r#"
+pub async fn f() { f2().await }
+pub async f$0n f2(i: i32) { }
+pub async fn f3() { f2(f4().await).await }
+pub async fn f4() -> i32 { 1 }"#,
+ r#"
+pub async fn f() { f2() }
+pub fn f2(i: i32) { }
+pub async fn f3() { f2(f4().await) }
+pub async fn f4() -> i32 { 1 }"#,
+ )
+ }
+
+ #[test]
+ fn applies_on_method_call() {
+ check_assist(
+ unnecessary_async,
+ r#"
+pub struct S { }
+impl S { pub async f$0n f2(&self) { } }
+pub async fn f(s: &S) { s.f2().await }"#,
+ r#"
+pub struct S { }
+impl S { pub fn f2(&self) { } }
+pub async fn f(s: &S) { s.f2() }"#,
+ )
+ }
+
+ #[test]
+ fn does_not_apply_on_function_with_a_nested_await_expr() {
+ check_assist_not_applicable(
+ unnecessary_async,
+ "async f$0n f() { if true { loop { f2().await } } }",
+ )
+ }
+
+ #[test]
+ fn does_not_apply_when_not_on_prototype() {
+ check_assist_not_applicable(unnecessary_async, "pub async fn f() { $0f2() }")
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
new file mode 100644
index 000000000..7969a4918
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
@@ -0,0 +1,719 @@
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ },
+ AstNode, SyntaxKind, TextRange, T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: unwrap_block
+//
+// This assist removes if...else, for, while and loop control statements to just keep the body.
+//
+// ```
+// fn foo() {
+// if true {$0
+// println!("foo");
+// }
+// }
+// ```
+// ->
+// ```
+// fn foo() {
+// println!("foo");
+// }
+// ```
+pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite);
+ let assist_label = "Unwrap block";
+
+ let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?;
+ let mut block = ast::BlockExpr::cast(l_curly_token.parent_ancestors().nth(1)?)?;
+ let target = block.syntax().text_range();
+ let mut parent = block.syntax().parent()?;
+ if ast::MatchArm::can_cast(parent.kind()) {
+ parent = parent.ancestors().find(|it| ast::MatchExpr::can_cast(it.kind()))?
+ }
+
+ if matches!(parent.kind(), SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT) {
+ return acc.add(assist_id, assist_label, target, |builder| {
+ builder.replace(block.syntax().text_range(), update_expr_string(block.to_string()));
+ });
+ }
+
+ let parent = ast::Expr::cast(parent)?;
+
+ match parent.clone() {
+ ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::LoopExpr(_) => (),
+ ast::Expr::MatchExpr(_) => block = block.dedent(IndentLevel(1)),
+ ast::Expr::IfExpr(if_expr) => {
+ let then_branch = if_expr.then_branch()?;
+ if then_branch == block {
+ if let Some(ancestor) = if_expr.syntax().parent().and_then(ast::IfExpr::cast) {
+ // For `else if` blocks
+ let ancestor_then_branch = ancestor.then_branch()?;
+
+ return acc.add(assist_id, assist_label, target, |edit| {
+ let range_to_del_else_if = TextRange::new(
+ ancestor_then_branch.syntax().text_range().end(),
+ l_curly_token.text_range().start(),
+ );
+ let range_to_del_rest = TextRange::new(
+ then_branch.syntax().text_range().end(),
+ if_expr.syntax().text_range().end(),
+ );
+
+ edit.delete(range_to_del_rest);
+ edit.delete(range_to_del_else_if);
+ edit.replace(
+ target,
+ update_expr_string_without_newline(then_branch.to_string()),
+ );
+ });
+ }
+ } else {
+ return acc.add(assist_id, assist_label, target, |edit| {
+ let range_to_del = TextRange::new(
+ then_branch.syntax().text_range().end(),
+ l_curly_token.text_range().start(),
+ );
+
+ edit.delete(range_to_del);
+ edit.replace(target, update_expr_string_without_newline(block.to_string()));
+ });
+ }
+ }
+ _ => return None,
+ };
+
+ acc.add(assist_id, assist_label, target, |builder| {
+ builder.replace(parent.syntax().text_range(), update_expr_string(block.to_string()));
+ })
+}
+
+fn update_expr_string(expr_string: String) -> String {
+ update_expr_string_with_pat(expr_string, &[' ', '\n'])
+}
+
+fn update_expr_string_without_newline(expr_string: String) -> String {
+ update_expr_string_with_pat(expr_string, &[' '])
+}
+
+fn update_expr_string_with_pat(expr_str: String, whitespace_pat: &[char]) -> String {
+ // Remove leading whitespace, index [1..] to remove the leading '{',
+ // then continue to remove leading whitespace.
+ let expr_str =
+ expr_str.trim_start_matches(whitespace_pat)[1..].trim_start_matches(whitespace_pat);
+
+ // Remove trailing whitespace, index [..expr_str.len() - 1] to remove the trailing '}',
+ // then continue to remove trailing whitespace.
+ let expr_str = expr_str.trim_end_matches(whitespace_pat);
+ let expr_str = expr_str[..expr_str.len() - 1].trim_end_matches(whitespace_pat);
+
+ expr_str
+ .lines()
+ .map(|line| line.replacen(" ", "", 1)) // Delete indentation
+ .collect::<Vec<String>>()
+ .join("\n")
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn unwrap_tail_expr_block() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ $0{
+ 92
+ }
+}
+"#,
+ r#"
+fn main() {
+ 92
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn unwrap_stmt_expr_block() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ $0{
+ 92;
+ }
+ ()
+}
+"#,
+ r#"
+fn main() {
+ 92;
+ ()
+}
+"#,
+ );
+ // Pedantically, we should add an `;` here...
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ $0{
+ 92
+ }
+ ()
+}
+"#,
+ r#"
+fn main() {
+ 92
+ ()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ bar();
+ if true {$0
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+}
+"#,
+ r#"
+fn main() {
+ bar();
+ foo();
+
+ // comment
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_else() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ bar();
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {$0
+ println!("bar");
+ }
+}
+"#,
+ r#"
+fn main() {
+ bar();
+ if true {
+ foo();
+
+ // comment
+ bar();
+ }
+ println!("bar");
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_else_if() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {$0
+ println!("bar");
+ } else {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ }
+ println!("bar");
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_else_if_nested() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {
+ println!("bar");
+ } else if true {$0
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {
+ println!("bar");
+ }
+ println!("foo");
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_else_if_nested_else() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {
+ println!("bar");
+ } else if true {
+ println!("foo");
+ } else {$0
+ println!("else");
+ }
+}
+"#,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {
+ println!("bar");
+ } else if true {
+ println!("foo");
+ }
+ println!("else");
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_else_if_nested_middle() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {
+ println!("bar");
+ } else if true {$0
+ println!("foo");
+ } else {
+ println!("else");
+ }
+}
+"#,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {
+ println!("bar");
+ }
+ println!("foo");
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_bad_cursor_position() {
+ check_assist_not_applicable(
+ unwrap_block,
+ r#"
+fn main() {
+ bar();$0
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_for() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ for i in 0..5 {$0
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_in_for() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ for i in 0..5 {
+ if true {$0
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ for i in 0..5 {
+ foo();
+
+ // comment
+ bar();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_loop() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ loop {$0
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_while() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ while true {$0
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_match_arm() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ match rel_path {
+ Ok(rel_path) => {$0
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+ }
+ Err(_) => None,
+ }
+}
+"#,
+ r#"
+fn main() {
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_in_while_bad_cursor_position() {
+ check_assist_not_applicable(
+ unwrap_block,
+ r#"
+fn main() {
+ while true {
+ if true {
+ foo();$0
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_single_line() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ {$0 0 }
+}
+"#,
+ r#"
+fn main() {
+ 0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_nested_block() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ $0{
+ {
+ 3
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ {
+ 3
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nested_single_line() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ {$0 { println!("foo"); } }
+}
+"#,
+ r#"
+fn main() {
+ { println!("foo"); }
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ {$0 { 0 } }
+}
+"#,
+ r#"
+fn main() {
+ { 0 }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_single_line() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ if true {$0 /* foo */ foo() } else { bar() /* bar */}
+}
+"#,
+ r#"
+fn main() {
+ /* foo */ foo()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn if_single_statement() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ if true {$0
+ return 3;
+ }
+}
+"#,
+ r#"
+fn main() {
+ return 3;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multiple_statements() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() -> i32 {
+ if 2 > 1 {$0
+ let a = 5;
+ return 3;
+ }
+ 5
+}
+"#,
+ r#"
+fn main() -> i32 {
+ let a = 5;
+ return 3;
+ 5
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
new file mode 100644
index 000000000..9ef4ae047
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
@@ -0,0 +1,1020 @@
+use ide_db::{
+ famous_defs::FamousDefs,
+ syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
+};
+use itertools::Itertools;
+use syntax::{
+ ast::{self, Expr},
+ match_ast, AstNode, TextRange, TextSize,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: unwrap_result_return_type
+//
+// Unwrap the function's return type.
+//
+// ```
+// # //- minicore: result
+// fn foo() -> Result<i32>$0 { Ok(42i32) }
+// ```
+// ->
+// ```
+// fn foo() -> i32 { 42i32 }
+// ```
+pub(crate) fn unwrap_result_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let ret_type = ctx.find_node_at_offset::<ast::RetType>()?;
+ let parent = ret_type.syntax().parent()?;
+ let body = match_ast! {
+ match parent {
+ ast::Fn(func) => func.body()?,
+ ast::ClosureExpr(closure) => match closure.body()? {
+ Expr::BlockExpr(block) => block,
+ // closures require a block when a return type is specified
+ _ => return None,
+ },
+ _ => return None,
+ }
+ };
+
+ let type_ref = &ret_type.ty()?;
+ let ty = ctx.sema.resolve_type(type_ref)?.as_adt();
+ let result_enum =
+ FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax())?.krate()).core_result_Result()?;
+
+ if !matches!(ty, Some(hir::Adt::Enum(ret_type)) if ret_type == result_enum) {
+ return None;
+ }
+
+ acc.add(
+ AssistId("unwrap_result_return_type", AssistKind::RefactorRewrite),
+ "Unwrap Result return type",
+ type_ref.syntax().text_range(),
+ |builder| {
+ let body = ast::Expr::BlockExpr(body);
+
+ let mut exprs_to_unwrap = Vec::new();
+ let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_unwrap, e);
+ walk_expr(&body, &mut |expr| {
+ if let Expr::ReturnExpr(ret_expr) = expr {
+ if let Some(ret_expr_arg) = &ret_expr.expr() {
+ for_each_tail_expr(ret_expr_arg, tail_cb);
+ }
+ }
+ });
+ for_each_tail_expr(&body, tail_cb);
+
+ let mut is_unit_type = false;
+ if let Some((_, inner_type)) = type_ref.to_string().split_once('<') {
+ let inner_type = match inner_type.split_once(',') {
+ Some((success_inner_type, _)) => success_inner_type,
+ None => inner_type,
+ };
+ let new_ret_type = inner_type.strip_suffix('>').unwrap_or(inner_type);
+ if new_ret_type == "()" {
+ is_unit_type = true;
+ let text_range = TextRange::new(
+ ret_type.syntax().text_range().start(),
+ ret_type.syntax().text_range().end() + TextSize::from(1u32),
+ );
+ builder.delete(text_range)
+ } else {
+ builder.replace(
+ type_ref.syntax().text_range(),
+ inner_type.strip_suffix('>').unwrap_or(inner_type),
+ )
+ }
+ }
+
+ for ret_expr_arg in exprs_to_unwrap {
+ let ret_expr_str = ret_expr_arg.to_string();
+ if ret_expr_str.starts_with("Ok(") || ret_expr_str.starts_with("Err(") {
+ let arg_list = ret_expr_arg.syntax().children().find_map(ast::ArgList::cast);
+ if let Some(arg_list) = arg_list {
+ if is_unit_type {
+ match ret_expr_arg.syntax().prev_sibling_or_token() {
+ // Useful to delete the entire line without leaving trailing whitespaces
+ Some(whitespace) => {
+ let new_range = TextRange::new(
+ whitespace.text_range().start(),
+ ret_expr_arg.syntax().text_range().end(),
+ );
+ builder.delete(new_range);
+ }
+ None => {
+ builder.delete(ret_expr_arg.syntax().text_range());
+ }
+ }
+ } else {
+ builder.replace(
+ ret_expr_arg.syntax().text_range(),
+ arg_list.args().join(", "),
+ );
+ }
+ }
+ }
+ }
+ },
+ )
+}
+
+fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
+ match e {
+ Expr::BreakExpr(break_expr) => {
+ if let Some(break_expr_arg) = break_expr.expr() {
+ for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
+ }
+ }
+ Expr::ReturnExpr(ret_expr) => {
+ if let Some(ret_expr_arg) = &ret_expr.expr() {
+ for_each_tail_expr(ret_expr_arg, &mut |e| tail_cb_impl(acc, e));
+ }
+ }
+ e => acc.push(e.clone()),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn unwrap_result_return_type_simple() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i3$02> {
+ let test = "test";
+ return Ok(42i32);
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let test = "test";
+ return 42i32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_unit_type() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), Box<dyn Error$0>> {
+ Ok(())
+}
+"#,
+ r#"
+fn foo() {
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_ending_with_parent() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32, Box<dyn Error$0>> {
+ if true {
+ Ok(42)
+ } else {
+ foo()
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ if true {
+ 42
+ } else {
+ foo()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_return_type_break_split_tail() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i3$02, String> {
+ loop {
+ break if true {
+ Ok(1)
+ } else {
+ Ok(0)
+ };
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ loop {
+ break if true {
+ 1
+ } else {
+ 0
+ };
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_closure() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> Result<i32$0> {
+ let test = "test";
+ return Ok(42i32);
+ };
+}
+"#,
+ r#"
+fn foo() {
+ || -> i32 {
+ let test = "test";
+ return 42i32;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_return_type_bad_cursor() {
+ check_assist_not_applicable(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> i32 {
+ let test = "test";$0
+ return 42i32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_return_type_bad_cursor_closure() {
+ check_assist_not_applicable(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> i32 {
+ let test = "test";$0
+ return 42i32;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_closure_non_block() {
+ check_assist_not_applicable(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() { || -> i$032 3; }
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_return_type_already_not_result_std() {
+ check_assist_not_applicable(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let test = "test";
+ return 42i32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_return_type_already_not_result_closure() {
+ check_assist_not_applicable(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> i32$0 {
+ let test = "test";
+ return 42i32;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() ->$0 Result<i32> {
+ let test = "test";
+ Ok(42i32)
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let test = "test";
+ 42i32
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_closure() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() {
+ || ->$0 Result<i32, String> {
+ let test = "test";
+ Ok(42i32)
+ };
+}
+"#,
+ r#"
+fn foo() {
+ || -> i32 {
+ let test = "test";
+ 42i32
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_only() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> { Ok(42i32) }
+"#,
+ r#"
+fn foo() -> i32 { 42i32 }
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_block_like() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32>$0 {
+ if true {
+ Ok(42i32)
+ } else {
+ Ok(24i32)
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ if true {
+ 42i32
+ } else {
+ 24i32
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_without_block_closure() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> Result<i32, String>$0 {
+ if true {
+ Ok(42i32)
+ } else {
+ Ok(24i32)
+ }
+ };
+}
+"#,
+ r#"
+fn foo() {
+ || -> i32 {
+ if true {
+ 42i32
+ } else {
+ 24i32
+ }
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_nested_if() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32>$0 {
+ if true {
+ if false {
+ Ok(1)
+ } else {
+ Ok(2)
+ }
+ } else {
+ Ok(24i32)
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ if true {
+ if false {
+ 1
+ } else {
+ 2
+ }
+ } else {
+ 24i32
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_await() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+async fn foo() -> Result<i$032> {
+ if true {
+ if false {
+ Ok(1.await)
+ } else {
+ Ok(2.await)
+ }
+ } else {
+ Ok(24i32.await)
+ }
+}
+"#,
+ r#"
+async fn foo() -> i32 {
+ if true {
+ if false {
+ 1.await
+ } else {
+ 2.await
+ }
+ } else {
+ 24i32.await
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_array() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<[i32; 3]$0> { Ok([1, 2, 3]) }
+"#,
+ r#"
+fn foo() -> [i32; 3] { [1, 2, 3] }
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_cast() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -$0> Result<i32> {
+ if true {
+ if false {
+ Ok(1 as i32)
+ } else {
+ Ok(2 as i32)
+ }
+ } else {
+ Ok(24 as i32)
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ if true {
+ if false {
+ 1 as i32
+ } else {
+ 2 as i32
+ }
+ } else {
+ 24 as i32
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_block_like_match() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let my_var = 5;
+ match my_var {
+ 5 => Ok(42i32),
+ _ => Ok(24i32),
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let my_var = 5;
+ match my_var {
+ 5 => 42i32,
+ _ => 24i32,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_loop_with_tail() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let my_var = 5;
+ loop {
+ println!("test");
+ 5
+ }
+ Ok(my_var)
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let my_var = 5;
+ loop {
+ println!("test");
+ 5
+ }
+ my_var
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_loop_in_let_stmt() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let my_var = let x = loop {
+ break 1;
+ };
+ Ok(my_var)
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let my_var = let x = loop {
+ break 1;
+ };
+ my_var
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_block_like_match_return_expr() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32>$0 {
+ let my_var = 5;
+ let res = match my_var {
+ 5 => 42i32,
+ _ => return Ok(24i32),
+ };
+ Ok(res)
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let my_var = 5;
+ let res = match my_var {
+ 5 => 42i32,
+ _ => return 24i32,
+ };
+ res
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let my_var = 5;
+ let res = if my_var == 5 {
+ 42i32
+ } else {
+ return Ok(24i32);
+ };
+ Ok(res)
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let my_var = 5;
+ let res = if my_var == 5 {
+ 42i32
+ } else {
+ return 24i32;
+ };
+ res
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_block_like_match_deeper() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let my_var = 5;
+ match my_var {
+ 5 => {
+ if true {
+ Ok(42i32)
+ } else {
+ Ok(25i32)
+ }
+ },
+ _ => {
+ let test = "test";
+ if test == "test" {
+ return Ok(bar());
+ }
+ Ok(53i32)
+ },
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let my_var = 5;
+ match my_var {
+ 5 => {
+ if true {
+ 42i32
+ } else {
+ 25i32
+ }
+ },
+ _ => {
+ let test = "test";
+ if test == "test" {
+ return bar();
+ }
+ 53i32
+ },
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_block_like_early_return() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let test = "test";
+ if test == "test" {
+ return Ok(24i32);
+ }
+ Ok(53i32)
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let test = "test";
+ if test == "test" {
+ return 24i32;
+ }
+ 53i32
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_closure() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> Result<u32$0> {
+ let true_closure = || { return true; };
+ if the_field < 5 {
+ let mut i = 0;
+ if true_closure() {
+ return Ok(99);
+ } else {
+ return Ok(0);
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> u32 {
+ let true_closure = || { return true; };
+ if the_field < 5 {
+ let mut i = 0;
+ if true_closure() {
+ return 99;
+ } else {
+ return 0;
+ }
+ }
+ the_field
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> Result<u32$0> {
+ let true_closure = || {
+ return true;
+ };
+ if the_field < 5 {
+ let mut i = 0;
+
+
+ if true_closure() {
+ return Ok(99);
+ } else {
+ return Ok(0);
+ }
+ }
+ let t = None;
+
+ Ok(t.unwrap_or_else(|| the_field))
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> u32 {
+ let true_closure = || {
+ return true;
+ };
+ if the_field < 5 {
+ let mut i = 0;
+
+
+ if true_closure() {
+ return 99;
+ } else {
+ return 0;
+ }
+ }
+ let t = None;
+
+ t.unwrap_or_else(|| the_field)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_weird_forms() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let test = "test";
+ if test == "test" {
+ return Ok(24i32);
+ }
+ let mut i = 0;
+ loop {
+ if i == 1 {
+ break Ok(55);
+ }
+ i += 1;
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let test = "test";
+ if test == "test" {
+ return 24i32;
+ }
+ let mut i = 0;
+ loop {
+ if i == 1 {
+ break 55;
+ }
+ i += 1;
+ }
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> Result<u32$0> {
+ if the_field < 5 {
+ let mut i = 0;
+ loop {
+ if i > 5 {
+ return Ok(55u32);
+ }
+ i += 3;
+ }
+ match i {
+ 5 => return Ok(99),
+ _ => return Ok(0),
+ };
+ }
+ Ok(the_field)
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> u32 {
+ if the_field < 5 {
+ let mut i = 0;
+ loop {
+ if i > 5 {
+ return 55u32;
+ }
+ i += 3;
+ }
+ match i {
+ 5 => return 99,
+ _ => return 0,
+ };
+ }
+ the_field
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> Result<u32$0> {
+ if the_field < 5 {
+ let mut i = 0;
+ match i {
+ 5 => return Ok(99),
+ _ => return Ok(0),
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> u32 {
+ if the_field < 5 {
+ let mut i = 0;
+ match i {
+ 5 => return 99,
+ _ => return 0,
+ }
+ }
+ the_field
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> Result<u32$0> {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return Ok(99)
+ } else {
+ return Ok(0)
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> u32 {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return 99
+ } else {
+ return 0
+ }
+ }
+ the_field
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> Result<u3$02> {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return Ok(99);
+ } else {
+ return Ok(0);
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> u32 {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return 99;
+ } else {
+ return 0;
+ }
+ }
+ the_field
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
new file mode 100644
index 000000000..83446387d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
@@ -0,0 +1,980 @@
+use std::iter;
+
+use ide_db::{
+ famous_defs::FamousDefs,
+ syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
+};
+use syntax::{
+ ast::{self, make, Expr},
+ match_ast, AstNode,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: wrap_return_type_in_result
+//
+// Wrap the function's return type into Result.
+//
+// ```
+// # //- minicore: result
+// fn foo() -> i32$0 { 42i32 }
+// ```
+// ->
+// ```
+// fn foo() -> Result<i32, ${0:_}> { Ok(42i32) }
+// ```
+pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let ret_type = ctx.find_node_at_offset::<ast::RetType>()?;
+ let parent = ret_type.syntax().parent()?;
+ let body = match_ast! {
+ match parent {
+ ast::Fn(func) => func.body()?,
+ ast::ClosureExpr(closure) => match closure.body()? {
+ Expr::BlockExpr(block) => block,
+ // closures require a block when a return type is specified
+ _ => return None,
+ },
+ _ => return None,
+ }
+ };
+
+ let type_ref = &ret_type.ty()?;
+ let ty = ctx.sema.resolve_type(type_ref)?.as_adt();
+ let result_enum =
+ FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax())?.krate()).core_result_Result()?;
+
+ if matches!(ty, Some(hir::Adt::Enum(ret_type)) if ret_type == result_enum) {
+ cov_mark::hit!(wrap_return_type_in_result_simple_return_type_already_result);
+ return None;
+ }
+
+ acc.add(
+ AssistId("wrap_return_type_in_result", AssistKind::RefactorRewrite),
+ "Wrap return type in Result",
+ type_ref.syntax().text_range(),
+ |builder| {
+ let body = ast::Expr::BlockExpr(body);
+
+ let mut exprs_to_wrap = Vec::new();
+ let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e);
+ walk_expr(&body, &mut |expr| {
+ if let Expr::ReturnExpr(ret_expr) = expr {
+ if let Some(ret_expr_arg) = &ret_expr.expr() {
+ for_each_tail_expr(ret_expr_arg, tail_cb);
+ }
+ }
+ });
+ for_each_tail_expr(&body, tail_cb);
+
+ for ret_expr_arg in exprs_to_wrap {
+ let ok_wrapped = make::expr_call(
+ make::expr_path(make::ext::ident_path("Ok")),
+ make::arg_list(iter::once(ret_expr_arg.clone())),
+ );
+ builder.replace_ast(ret_expr_arg, ok_wrapped);
+ }
+
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snippet = format!("Result<{}, ${{0:_}}>", type_ref);
+ builder.replace_snippet(cap, type_ref.syntax().text_range(), snippet)
+ }
+ None => builder
+ .replace(type_ref.syntax().text_range(), format!("Result<{}, _>", type_ref)),
+ }
+ },
+ )
+}
+
+fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
+ match e {
+ Expr::BreakExpr(break_expr) => {
+ if let Some(break_expr_arg) = break_expr.expr() {
+ for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
+ }
+ }
+ Expr::ReturnExpr(ret_expr) => {
+ if let Some(ret_expr_arg) = &ret_expr.expr() {
+ for_each_tail_expr(ret_expr_arg, &mut |e| tail_cb_impl(acc, e));
+ }
+ }
+ e => acc.push(e.clone()),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn wrap_return_type_in_result_simple() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i3$02 {
+ let test = "test";
+ return 42i32;
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let test = "test";
+ return Ok(42i32);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_break_split_tail() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i3$02 {
+ loop {
+ break if true {
+ 1
+ } else {
+ 0
+ };
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ loop {
+ break if true {
+ Ok(1)
+ } else {
+ Ok(0)
+ };
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_closure() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> i32$0 {
+ let test = "test";
+ return 42i32;
+ };
+}
+"#,
+ r#"
+fn foo() {
+ || -> Result<i32, ${0:_}> {
+ let test = "test";
+ return Ok(42i32);
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_return_type_bad_cursor() {
+ check_assist_not_applicable(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32 {
+ let test = "test";$0
+ return 42i32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_return_type_bad_cursor_closure() {
+ check_assist_not_applicable(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> i32 {
+ let test = "test";$0
+ return 42i32;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_closure_non_block() {
+ check_assist_not_applicable(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() { || -> i$032 3; }
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_return_type_already_result_std() {
+ check_assist_not_applicable(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> core::result::Result<i32$0, String> {
+ let test = "test";
+ return 42i32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_return_type_already_result() {
+ cov_mark::check!(wrap_return_type_in_result_simple_return_type_already_result);
+ check_assist_not_applicable(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0, String> {
+ let test = "test";
+ return 42i32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_return_type_already_result_closure() {
+ check_assist_not_applicable(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> Result<i32$0, String> {
+ let test = "test";
+ return 42i32;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_cursor() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> $0i32 {
+ let test = "test";
+ return 42i32;
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let test = "test";
+ return Ok(42i32);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() ->$0 i32 {
+ let test = "test";
+ 42i32
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let test = "test";
+ Ok(42i32)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_closure() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() {
+ || ->$0 i32 {
+ let test = "test";
+ 42i32
+ };
+}
+"#,
+ r#"
+fn foo() {
+ || -> Result<i32, ${0:_}> {
+ let test = "test";
+ Ok(42i32)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_only() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 { 42i32 }
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> { Ok(42i32) }
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_block_like() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ if true {
+ 42i32
+ } else {
+ 24i32
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ if true {
+ Ok(42i32)
+ } else {
+ Ok(24i32)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_without_block_closure() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> i32$0 {
+ if true {
+ 42i32
+ } else {
+ 24i32
+ }
+ };
+}
+"#,
+ r#"
+fn foo() {
+ || -> Result<i32, ${0:_}> {
+ if true {
+ Ok(42i32)
+ } else {
+ Ok(24i32)
+ }
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_nested_if() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ if true {
+ if false {
+ 1
+ } else {
+ 2
+ }
+ } else {
+ 24i32
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ if true {
+ if false {
+ Ok(1)
+ } else {
+ Ok(2)
+ }
+ } else {
+ Ok(24i32)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_await() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+async fn foo() -> i$032 {
+ if true {
+ if false {
+ 1.await
+ } else {
+ 2.await
+ }
+ } else {
+ 24i32.await
+ }
+}
+"#,
+ r#"
+async fn foo() -> Result<i32, ${0:_}> {
+ if true {
+ if false {
+ Ok(1.await)
+ } else {
+ Ok(2.await)
+ }
+ } else {
+ Ok(24i32.await)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_array() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> [i32;$0 3] { [1, 2, 3] }
+"#,
+ r#"
+fn foo() -> Result<[i32; 3], ${0:_}> { Ok([1, 2, 3]) }
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_cast() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -$0> i32 {
+ if true {
+ if false {
+ 1 as i32
+ } else {
+ 2 as i32
+ }
+ } else {
+ 24 as i32
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ if true {
+ if false {
+ Ok(1 as i32)
+ } else {
+ Ok(2 as i32)
+ }
+ } else {
+ Ok(24 as i32)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_block_like_match() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let my_var = 5;
+ match my_var {
+ 5 => 42i32,
+ _ => 24i32,
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let my_var = 5;
+ match my_var {
+ 5 => Ok(42i32),
+ _ => Ok(24i32),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_loop_with_tail() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let my_var = 5;
+ loop {
+ println!("test");
+ 5
+ }
+ my_var
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let my_var = 5;
+ loop {
+ println!("test");
+ 5
+ }
+ Ok(my_var)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_loop_in_let_stmt() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let my_var = let x = loop {
+ break 1;
+ };
+ my_var
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let my_var = let x = loop {
+ break 1;
+ };
+ Ok(my_var)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_block_like_match_return_expr() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let my_var = 5;
+ let res = match my_var {
+ 5 => 42i32,
+ _ => return 24i32,
+ };
+ res
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let my_var = 5;
+ let res = match my_var {
+ 5 => 42i32,
+ _ => return Ok(24i32),
+ };
+ Ok(res)
+}
+"#,
+ );
+
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let my_var = 5;
+ let res = if my_var == 5 {
+ 42i32
+ } else {
+ return 24i32;
+ };
+ res
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let my_var = 5;
+ let res = if my_var == 5 {
+ 42i32
+ } else {
+ return Ok(24i32);
+ };
+ Ok(res)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_block_like_match_deeper() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let my_var = 5;
+ match my_var {
+ 5 => {
+ if true {
+ 42i32
+ } else {
+ 25i32
+ }
+ },
+ _ => {
+ let test = "test";
+ if test == "test" {
+ return bar();
+ }
+ 53i32
+ },
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let my_var = 5;
+ match my_var {
+ 5 => {
+ if true {
+ Ok(42i32)
+ } else {
+ Ok(25i32)
+ }
+ },
+ _ => {
+ let test = "test";
+ if test == "test" {
+ return Ok(bar());
+ }
+ Ok(53i32)
+ },
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_block_like_early_return() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i$032 {
+ let test = "test";
+ if test == "test" {
+ return 24i32;
+ }
+ 53i32
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let test = "test";
+ if test == "test" {
+ return Ok(24i32);
+ }
+ Ok(53i32)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_closure() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) ->$0 u32 {
+ let true_closure = || { return true; };
+ if the_field < 5 {
+ let mut i = 0;
+ if true_closure() {
+ return 99;
+ } else {
+ return 0;
+ }
+ }
+ the_field
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> Result<u32, ${0:_}> {
+ let true_closure = || { return true; };
+ if the_field < 5 {
+ let mut i = 0;
+ if true_closure() {
+ return Ok(99);
+ } else {
+ return Ok(0);
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ );
+
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> u32$0 {
+ let true_closure = || {
+ return true;
+ };
+ if the_field < 5 {
+ let mut i = 0;
+
+
+ if true_closure() {
+ return 99;
+ } else {
+ return 0;
+ }
+ }
+ let t = None;
+
+ t.unwrap_or_else(|| the_field)
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> Result<u32, ${0:_}> {
+ let true_closure = || {
+ return true;
+ };
+ if the_field < 5 {
+ let mut i = 0;
+
+
+ if true_closure() {
+ return Ok(99);
+ } else {
+ return Ok(0);
+ }
+ }
+ let t = None;
+
+ Ok(t.unwrap_or_else(|| the_field))
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_weird_forms() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let test = "test";
+ if test == "test" {
+ return 24i32;
+ }
+ let mut i = 0;
+ loop {
+ if i == 1 {
+ break 55;
+ }
+ i += 1;
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let test = "test";
+ if test == "test" {
+ return Ok(24i32);
+ }
+ let mut i = 0;
+ loop {
+ if i == 1 {
+ break Ok(55);
+ }
+ i += 1;
+ }
+}
+"#,
+ );
+
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> u32$0 {
+ if the_field < 5 {
+ let mut i = 0;
+ loop {
+ if i > 5 {
+ return 55u32;
+ }
+ i += 3;
+ }
+ match i {
+ 5 => return 99,
+ _ => return 0,
+ };
+ }
+ the_field
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> Result<u32, ${0:_}> {
+ if the_field < 5 {
+ let mut i = 0;
+ loop {
+ if i > 5 {
+ return Ok(55u32);
+ }
+ i += 3;
+ }
+ match i {
+ 5 => return Ok(99),
+ _ => return Ok(0),
+ };
+ }
+ Ok(the_field)
+}
+"#,
+ );
+
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> u3$02 {
+ if the_field < 5 {
+ let mut i = 0;
+ match i {
+ 5 => return 99,
+ _ => return 0,
+ }
+ }
+ the_field
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> Result<u32, ${0:_}> {
+ if the_field < 5 {
+ let mut i = 0;
+ match i {
+ 5 => return Ok(99),
+ _ => return Ok(0),
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ );
+
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> u32$0 {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return 99
+ } else {
+ return 0
+ }
+ }
+ the_field
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> Result<u32, ${0:_}> {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return Ok(99)
+ } else {
+ return Ok(0)
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ );
+
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> $0u32 {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return 99;
+ } else {
+ return 0;
+ }
+ }
+ the_field
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> Result<u32, ${0:_}> {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return Ok(99);
+ } else {
+ return Ok(0);
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
new file mode 100644
index 000000000..fe87aa15f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -0,0 +1,309 @@
+//! `assists` crate provides a bunch of code assists, also known as code actions
+//! (in LSP) or intentions (in IntelliJ).
+//!
+//! An assist is a micro-refactoring, which is automatically activated in
+//! certain context. For example, if the cursor is over `,`, a "swap `,`" assist
+//! becomes available.
+//!
+//! ## Assists Guidelines
+//!
+//! Assists are the main mechanism to deliver advanced IDE features to the user,
+//! so we should pay extra attention to the UX.
+//!
+//! The power of assists comes from their context-awareness. The main problem
+//! with IDE features is that there are a lot of them, and it's hard to teach
+//! the user what's available. Assists solve this problem nicely: 💡 signifies
+//! that *something* is possible, and clicking on it reveals a *short* list of
+//! actions. Contrast it with Emacs `M-x`, which just spits an infinite list of
+//! all the features.
+//!
+//! Here are some considerations when creating a new assist:
+//!
+//! * It's good to preserve semantics, and it's good to keep the code compiling,
+//! but it isn't necessary. Example: "flip binary operation" might change
+//! semantics.
+//! * Assist shouldn't necessary make the code "better". A lot of assist come in
+//! pairs: "if let <-> match".
+//! * Assists should have as narrow scope as possible. Each new assists greatly
+//! improves UX for cases where the user actually invokes it, but it makes UX
+//! worse for every case where the user clicks 💡 to invoke some *other*
+//! assist. So, a rarely useful assist which is always applicable can be a net
+//! negative.
+//! * Rarely useful actions are tricky. Sometimes there are features which are
+//! clearly useful to some users, but are just noise most of the time. We
+//! don't have a good solution here, our current approach is to make this
+//! functionality available only if assist is applicable to the whole
+//! selection. Example: `sort_items` sorts items alphabetically. Naively, it
+//! should be available more or less everywhere, which isn't useful. So
+//! instead we only show it if the user *selects* the items they want to sort.
+//! * Consider grouping related assists together (see [`Assists::add_group`]).
+//! * Make assists robust. If the assist depends on results of type-inference too
+//! much, it might only fire in fully-correct code. This makes assist less
+//! useful and (worse) less predictable. The user should have a clear
+//! intuition when each particular assist is available.
+//! * Make small assists, which compose. Example: rather than auto-importing
+//! enums in `add_missing_match_arms`, we use fully-qualified names. There's a
+//! separate assist to shorten a fully-qualified name.
+//! * Distinguish between assists and fixits for diagnostics. Internally, fixits
+//! and assists are equivalent. They have the same "show a list + invoke a
+//! single element" workflow, and both use [`Assist`] data structure. The main
+//! difference is in the UX: while 💡 looks only at the cursor position,
+//! diagnostics squigglies and fixits are calculated for the whole file and
+//! are presented to the user eagerly. So, diagnostics should be fixable
+//! errors, while assists can be just suggestions for an alternative way to do
+//! something. If something *could* be a diagnostic, it should be a
+//! diagnostic. Conversely, it might be valuable to turn a diagnostic with a
+//! lot of false errors into an assist.
+//!
+//! See also this post:
+//! <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html>
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod assist_config;
+mod assist_context;
+#[cfg(test)]
+mod tests;
+pub mod utils;
+
+use hir::Semantics;
+use ide_db::{base_db::FileRange, RootDatabase};
+use syntax::TextRange;
+
+pub(crate) use crate::assist_context::{AssistContext, Assists};
+
+pub use assist_config::AssistConfig;
+pub use ide_db::assists::{
+ Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel, SingleResolve,
+};
+
+/// Return all the assists applicable at the given position.
+///
+// NOTE: We don't have a `Feature: ` section for assists, they are special-cased
+// in the manual.
+pub fn assists(
+ db: &RootDatabase,
+ config: &AssistConfig,
+ resolve: AssistResolveStrategy,
+ range: FileRange,
+) -> Vec<Assist> {
+ let sema = Semantics::new(db);
+ let ctx = AssistContext::new(sema, config, range);
+ let mut acc = Assists::new(&ctx, resolve);
+ handlers::all().iter().for_each(|handler| {
+ handler(&mut acc, &ctx);
+ });
+ acc.finish()
+}
+
+mod handlers {
+ use crate::{AssistContext, Assists};
+
+ pub(crate) type Handler = fn(&mut Assists, &AssistContext<'_>) -> Option<()>;
+
+ mod add_explicit_type;
+ mod add_label_to_loop;
+ mod add_lifetime_to_type;
+ mod add_missing_impl_members;
+ mod add_turbo_fish;
+ mod apply_demorgan;
+ mod auto_import;
+ mod change_visibility;
+ mod convert_bool_then;
+ mod convert_comment_block;
+ mod convert_integer_literal;
+ mod convert_into_to_from;
+ mod convert_iter_for_each_to_for;
+ mod convert_let_else_to_match;
+ mod convert_tuple_struct_to_named_struct;
+ mod convert_to_guarded_return;
+ mod convert_while_to_loop;
+ mod destructure_tuple_binding;
+ mod expand_glob_import;
+ mod extract_function;
+ mod extract_module;
+ mod extract_struct_from_enum_variant;
+ mod extract_type_alias;
+ mod extract_variable;
+ mod add_missing_match_arms;
+ mod fix_visibility;
+ mod flip_binexpr;
+ mod flip_comma;
+ mod flip_trait_bound;
+ mod generate_constant;
+ mod generate_default_from_enum_variant;
+ mod generate_default_from_new;
+ mod generate_deref;
+ mod generate_derive;
+ mod generate_documentation_template;
+ mod generate_enum_is_method;
+ mod generate_enum_projection_method;
+ mod generate_enum_variant;
+ mod generate_from_impl_for_enum;
+ mod generate_function;
+ mod generate_getter;
+ mod generate_impl;
+ mod generate_is_empty_from_len;
+ mod generate_new;
+ mod generate_setter;
+ mod generate_delegate_methods;
+ mod add_return_type;
+ mod inline_call;
+ mod inline_local_variable;
+ mod inline_type_alias;
+ mod introduce_named_lifetime;
+ mod invert_if;
+ mod merge_imports;
+ mod merge_match_arms;
+ mod move_bounds;
+ mod move_guard;
+ mod move_module_to_file;
+ mod move_to_mod_rs;
+ mod move_from_mod_rs;
+ mod number_representation;
+ mod promote_local_to_const;
+ mod pull_assignment_up;
+ mod qualify_path;
+ mod qualify_method_call;
+ mod raw_string;
+ mod remove_dbg;
+ mod remove_mut;
+ mod remove_unused_param;
+ mod reorder_fields;
+ mod reorder_impl_items;
+ mod replace_try_expr_with_match;
+ mod replace_derive_with_manual_impl;
+ mod replace_if_let_with_match;
+ mod introduce_named_generic;
+ mod replace_let_with_if_let;
+ mod replace_qualified_name_with_use;
+ mod replace_string_with_char;
+ mod replace_turbofish_with_explicit_type;
+ mod split_import;
+ mod sort_items;
+ mod toggle_ignore;
+ mod unmerge_use;
+ mod unnecessary_async;
+ mod unwrap_block;
+ mod unwrap_result_return_type;
+ mod wrap_return_type_in_result;
+
+ pub(crate) fn all() -> &'static [Handler] {
+ &[
+ // These are alphabetic for the foolish consistency
+ add_explicit_type::add_explicit_type,
+ add_label_to_loop::add_label_to_loop,
+ add_missing_match_arms::add_missing_match_arms,
+ add_lifetime_to_type::add_lifetime_to_type,
+ add_return_type::add_return_type,
+ add_turbo_fish::add_turbo_fish,
+ apply_demorgan::apply_demorgan,
+ auto_import::auto_import,
+ change_visibility::change_visibility,
+ convert_bool_then::convert_bool_then_to_if,
+ convert_bool_then::convert_if_to_bool_then,
+ convert_comment_block::convert_comment_block,
+ convert_integer_literal::convert_integer_literal,
+ convert_into_to_from::convert_into_to_from,
+ convert_iter_for_each_to_for::convert_iter_for_each_to_for,
+ convert_iter_for_each_to_for::convert_for_loop_with_for_each,
+ convert_let_else_to_match::convert_let_else_to_match,
+ convert_to_guarded_return::convert_to_guarded_return,
+ convert_tuple_struct_to_named_struct::convert_tuple_struct_to_named_struct,
+ convert_while_to_loop::convert_while_to_loop,
+ destructure_tuple_binding::destructure_tuple_binding,
+ expand_glob_import::expand_glob_import,
+ extract_struct_from_enum_variant::extract_struct_from_enum_variant,
+ extract_type_alias::extract_type_alias,
+ fix_visibility::fix_visibility,
+ flip_binexpr::flip_binexpr,
+ flip_comma::flip_comma,
+ flip_trait_bound::flip_trait_bound,
+ generate_constant::generate_constant,
+ generate_default_from_enum_variant::generate_default_from_enum_variant,
+ generate_default_from_new::generate_default_from_new,
+ generate_derive::generate_derive,
+ generate_documentation_template::generate_documentation_template,
+ generate_documentation_template::generate_doc_example,
+ generate_enum_is_method::generate_enum_is_method,
+ generate_enum_projection_method::generate_enum_as_method,
+ generate_enum_projection_method::generate_enum_try_into_method,
+ generate_enum_variant::generate_enum_variant,
+ generate_from_impl_for_enum::generate_from_impl_for_enum,
+ generate_function::generate_function,
+ generate_impl::generate_impl,
+ generate_is_empty_from_len::generate_is_empty_from_len,
+ generate_new::generate_new,
+ inline_call::inline_call,
+ inline_call::inline_into_callers,
+ inline_local_variable::inline_local_variable,
+ inline_type_alias::inline_type_alias,
+ introduce_named_generic::introduce_named_generic,
+ introduce_named_lifetime::introduce_named_lifetime,
+ invert_if::invert_if,
+ merge_imports::merge_imports,
+ merge_match_arms::merge_match_arms,
+ move_bounds::move_bounds_to_where_clause,
+ move_guard::move_arm_cond_to_match_guard,
+ move_guard::move_guard_to_arm_body,
+ move_module_to_file::move_module_to_file,
+ move_to_mod_rs::move_to_mod_rs,
+ move_from_mod_rs::move_from_mod_rs,
+ number_representation::reformat_number_literal,
+ pull_assignment_up::pull_assignment_up,
+ promote_local_to_const::promote_local_to_const,
+ qualify_path::qualify_path,
+ qualify_method_call::qualify_method_call,
+ raw_string::add_hash,
+ raw_string::make_usual_string,
+ raw_string::remove_hash,
+ remove_dbg::remove_dbg,
+ remove_mut::remove_mut,
+ remove_unused_param::remove_unused_param,
+ reorder_fields::reorder_fields,
+ reorder_impl_items::reorder_impl_items,
+ replace_try_expr_with_match::replace_try_expr_with_match,
+ replace_derive_with_manual_impl::replace_derive_with_manual_impl,
+ replace_if_let_with_match::replace_if_let_with_match,
+ replace_if_let_with_match::replace_match_with_if_let,
+ replace_let_with_if_let::replace_let_with_if_let,
+ replace_turbofish_with_explicit_type::replace_turbofish_with_explicit_type,
+ replace_qualified_name_with_use::replace_qualified_name_with_use,
+ sort_items::sort_items,
+ split_import::split_import,
+ toggle_ignore::toggle_ignore,
+ unmerge_use::unmerge_use,
+ unnecessary_async::unnecessary_async,
+ unwrap_block::unwrap_block,
+ unwrap_result_return_type::unwrap_result_return_type,
+ wrap_return_type_in_result::wrap_return_type_in_result,
+ // These are manually sorted for better priorities. By default,
+ // priority is determined by the size of the target range (smaller
+ // target wins). If the ranges are equal, position in this list is
+ // used as a tie-breaker.
+ add_missing_impl_members::add_missing_impl_members,
+ add_missing_impl_members::add_missing_default_members,
+ //
+ replace_string_with_char::replace_string_with_char,
+ replace_string_with_char::replace_char_with_string,
+ raw_string::make_raw_string,
+ //
+ extract_variable::extract_variable,
+ extract_function::extract_function,
+ extract_module::extract_module,
+ //
+ generate_getter::generate_getter,
+ generate_getter::generate_getter_mut,
+ generate_setter::generate_setter,
+ generate_delegate_methods::generate_delegate_methods,
+ generate_deref::generate_deref,
+ // Are you sure you want to add new assist here, and not to the
+ // sorted list above?
+ ]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
new file mode 100644
index 000000000..9cd66c6b3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
@@ -0,0 +1,558 @@
+mod generated;
+#[cfg(not(feature = "in-rust-tree"))]
+mod sourcegen;
+
+use expect_test::expect;
+use hir::{db::DefDatabase, Semantics};
+use ide_db::{
+ base_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt},
+ imports::insert_use::{ImportGranularity, InsertUseConfig},
+ source_change::FileSystemEdit,
+ RootDatabase, SnippetCap,
+};
+use stdx::{format_to, trim_indent};
+use syntax::TextRange;
+use test_utils::{assert_eq_text, extract_offset};
+
+use crate::{
+ assists, handlers::Handler, Assist, AssistConfig, AssistContext, AssistKind,
+ AssistResolveStrategy, Assists, SingleResolve,
+};
+
+pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
+ snippet_cap: SnippetCap::new(true),
+ allowed: None,
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::Plain,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+};
+
+pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
+ RootDatabase::with_single_file(text)
+}
+
+#[track_caller]
+pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_after: &str) {
+ let ra_fixture_after = trim_indent(ra_fixture_after);
+ check(assist, ra_fixture_before, ExpectedResult::After(&ra_fixture_after), None);
+}
+
+// There is no way to choose what assist within a group you want to test against,
+// so this is here to allow you choose.
+pub(crate) fn check_assist_by_label(
+ assist: Handler,
+ ra_fixture_before: &str,
+ ra_fixture_after: &str,
+ label: &str,
+) {
+ let ra_fixture_after = trim_indent(ra_fixture_after);
+ check(assist, ra_fixture_before, ExpectedResult::After(&ra_fixture_after), Some(label));
+}
+
+// FIXME: instead of having a separate function here, maybe use
+// `extract_ranges` and mark the target as `<target> </target>` in the
+// fixture?
+#[track_caller]
+pub(crate) fn check_assist_target(assist: Handler, ra_fixture: &str, target: &str) {
+ check(assist, ra_fixture, ExpectedResult::Target(target), None);
+}
+
+#[track_caller]
+pub(crate) fn check_assist_not_applicable(assist: Handler, ra_fixture: &str) {
+ check(assist, ra_fixture, ExpectedResult::NotApplicable, None);
+}
+
+/// Check assist in unresolved state. Useful to check assists for lazy computation.
+#[track_caller]
+pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) {
+ check(assist, ra_fixture, ExpectedResult::Unresolved, None);
+}
+
+#[track_caller]
+fn check_doc_test(assist_id: &str, before: &str, after: &str) {
+ let after = trim_indent(after);
+ let (db, file_id, selection) = RootDatabase::with_range_or_offset(before);
+ let before = db.file_text(file_id).to_string();
+ let frange = FileRange { file_id, range: selection.into() };
+
+ let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange)
+ .into_iter()
+ .find(|assist| assist.id.0 == assist_id)
+ .unwrap_or_else(|| {
+ panic!(
+ "\n\nAssist is not applicable: {}\nAvailable assists: {}",
+ assist_id,
+ assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange)
+ .into_iter()
+ .map(|assist| assist.id.0)
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
+ });
+
+ let actual = {
+ let source_change =
+ assist.source_change.expect("Assist did not contain any source changes");
+ let mut actual = before;
+ if let Some(source_file_edit) = source_change.get_source_edit(file_id) {
+ source_file_edit.apply(&mut actual);
+ }
+ actual
+ };
+ assert_eq_text!(&after, &actual);
+}
+
+enum ExpectedResult<'a> {
+ NotApplicable,
+ Unresolved,
+ After(&'a str),
+ Target(&'a str),
+}
+
+#[track_caller]
+fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_label: Option<&str>) {
+ let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
+ db.set_enable_proc_attr_macros(true);
+ let text_without_caret = db.file_text(file_with_caret_id).to_string();
+
+ let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
+
+ let sema = Semantics::new(&db);
+ let config = TEST_CONFIG;
+ let ctx = AssistContext::new(sema, &config, frange);
+ let resolve = match expected {
+ ExpectedResult::Unresolved => AssistResolveStrategy::None,
+ _ => AssistResolveStrategy::All,
+ };
+ let mut acc = Assists::new(&ctx, resolve);
+ handler(&mut acc, &ctx);
+ let mut res = acc.finish();
+
+ let assist = match assist_label {
+ Some(label) => res.into_iter().find(|resolved| resolved.label == label),
+ None => res.pop(),
+ };
+
+ match (assist, expected) {
+ (Some(assist), ExpectedResult::After(after)) => {
+ let source_change =
+ assist.source_change.expect("Assist did not contain any source changes");
+ let skip_header = source_change.source_file_edits.len() == 1
+ && source_change.file_system_edits.len() == 0;
+
+ let mut buf = String::new();
+ for (file_id, edit) in source_change.source_file_edits {
+ let mut text = db.file_text(file_id).as_ref().to_owned();
+ edit.apply(&mut text);
+ if !skip_header {
+ let sr = db.file_source_root(file_id);
+ let sr = db.source_root(sr);
+ let path = sr.path_for_file(&file_id).unwrap();
+ format_to!(buf, "//- {}\n", path)
+ }
+ buf.push_str(&text);
+ }
+
+ for file_system_edit in source_change.file_system_edits {
+ let (dst, contents) = match file_system_edit {
+ FileSystemEdit::CreateFile { dst, initial_contents } => (dst, initial_contents),
+ FileSystemEdit::MoveFile { src, dst } => {
+ (dst, db.file_text(src).as_ref().to_owned())
+ }
+ FileSystemEdit::MoveDir { src, src_id, dst } => {
+ // temporary placeholder for MoveDir since we are not using MoveDir in ide assists yet.
+ (dst, format!("{:?}\n{:?}", src_id, src))
+ }
+ };
+ let sr = db.file_source_root(dst.anchor);
+ let sr = db.source_root(sr);
+ let mut base = sr.path_for_file(&dst.anchor).unwrap().clone();
+ base.pop();
+ let created_file_path = base.join(&dst.path).unwrap();
+ format_to!(buf, "//- {}\n", created_file_path);
+ buf.push_str(&contents);
+ }
+
+ assert_eq_text!(after, &buf);
+ }
+ (Some(assist), ExpectedResult::Target(target)) => {
+ let range = assist.target;
+ assert_eq_text!(&text_without_caret[range], target);
+ }
+ (Some(assist), ExpectedResult::Unresolved) => assert!(
+ assist.source_change.is_none(),
+ "unresolved assist should not contain source changes"
+ ),
+ (Some(_), ExpectedResult::NotApplicable) => panic!("assist should not be applicable!"),
+ (
+ None,
+ ExpectedResult::After(_) | ExpectedResult::Target(_) | ExpectedResult::Unresolved,
+ ) => {
+ panic!("code action is not applicable")
+ }
+ (None, ExpectedResult::NotApplicable) => (),
+ };
+}
+
+fn labels(assists: &[Assist]) -> String {
+ let mut labels = assists
+ .iter()
+ .map(|assist| {
+ let mut label = match &assist.group {
+ Some(g) => g.0.clone(),
+ None => assist.label.to_string(),
+ };
+ label.push('\n');
+ label
+ })
+ .collect::<Vec<_>>();
+ labels.dedup();
+ labels.into_iter().collect::<String>()
+}
+
+#[test]
+fn assist_order_field_struct() {
+ let before = "struct Foo { $0bar: u32 }";
+ let (before_cursor_pos, before) = extract_offset(before);
+ let (db, file_id) = with_single_file(&before);
+ let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) };
+ let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange);
+ let mut assists = assists.iter();
+
+ assert_eq!(assists.next().expect("expected assist").label, "Change visibility to pub(crate)");
+ assert_eq!(assists.next().expect("expected assist").label, "Generate a getter method");
+ assert_eq!(assists.next().expect("expected assist").label, "Generate a mut getter method");
+ assert_eq!(assists.next().expect("expected assist").label, "Generate a setter method");
+ assert_eq!(assists.next().expect("expected assist").label, "Add `#[derive]`");
+}
+
+#[test]
+fn assist_order_if_expr() {
+ let (db, frange) = RootDatabase::with_range(
+ r#"
+pub fn test_some_range(a: int) -> bool {
+ if let 2..6 = $05$0 {
+ true
+ } else {
+ false
+ }
+}
+"#,
+ );
+
+ let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange);
+ let expected = labels(&assists);
+
+ expect![[r#"
+ Convert integer base
+ Extract into variable
+ Extract into function
+ Replace if let with match
+ "#]]
+ .assert_eq(&expected);
+}
+
+#[test]
+fn assist_filter_works() {
+ let (db, frange) = RootDatabase::with_range(
+ r#"
+pub fn test_some_range(a: int) -> bool {
+ if let 2..6 = $05$0 {
+ true
+ } else {
+ false
+ }
+}
+"#,
+ );
+ {
+ let mut cfg = TEST_CONFIG;
+ cfg.allowed = Some(vec![AssistKind::Refactor]);
+
+ let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange);
+ let expected = labels(&assists);
+
+ expect![[r#"
+ Convert integer base
+ Extract into variable
+ Extract into function
+ Replace if let with match
+ "#]]
+ .assert_eq(&expected);
+ }
+
+ {
+ let mut cfg = TEST_CONFIG;
+ cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
+ let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange);
+ let expected = labels(&assists);
+
+ expect![[r#"
+ Extract into variable
+ Extract into function
+ "#]]
+ .assert_eq(&expected);
+ }
+
+ {
+ let mut cfg = TEST_CONFIG;
+ cfg.allowed = Some(vec![AssistKind::QuickFix]);
+ let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange);
+ let expected = labels(&assists);
+
+ expect![[r#""#]].assert_eq(&expected);
+ }
+}
+
+#[test]
+fn various_resolve_strategies() {
+ let (db, frange) = RootDatabase::with_range(
+ r#"
+pub fn test_some_range(a: int) -> bool {
+ if let 2..6 = $05$0 {
+ true
+ } else {
+ false
+ }
+}
+"#,
+ );
+
+ let mut cfg = TEST_CONFIG;
+ cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
+
+ {
+ let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange);
+ assert_eq!(2, assists.len());
+ let mut assists = assists.into_iter();
+
+ let extract_into_variable_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_variable",
+ RefactorExtract,
+ ),
+ label: "Extract into variable",
+ group: None,
+ target: 59..60,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_variable_assist);
+
+ let extract_into_function_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_function",
+ RefactorExtract,
+ ),
+ label: "Extract into function",
+ group: None,
+ target: 59..60,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_function_assist);
+ }
+
+ {
+ let assists = assists(
+ &db,
+ &cfg,
+ AssistResolveStrategy::Single(SingleResolve {
+ assist_id: "SOMETHING_MISMATCHING".to_string(),
+ assist_kind: AssistKind::RefactorExtract,
+ }),
+ frange,
+ );
+ assert_eq!(2, assists.len());
+ let mut assists = assists.into_iter();
+
+ let extract_into_variable_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_variable",
+ RefactorExtract,
+ ),
+ label: "Extract into variable",
+ group: None,
+ target: 59..60,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_variable_assist);
+
+ let extract_into_function_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_function",
+ RefactorExtract,
+ ),
+ label: "Extract into function",
+ group: None,
+ target: 59..60,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_function_assist);
+ }
+
+ {
+ let assists = assists(
+ &db,
+ &cfg,
+ AssistResolveStrategy::Single(SingleResolve {
+ assist_id: "extract_variable".to_string(),
+ assist_kind: AssistKind::RefactorExtract,
+ }),
+ frange,
+ );
+ assert_eq!(2, assists.len());
+ let mut assists = assists.into_iter();
+
+ let extract_into_variable_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_variable",
+ RefactorExtract,
+ ),
+ label: "Extract into variable",
+ group: None,
+ target: 59..60,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "let $0var_name = 5;\n ",
+ delete: 45..45,
+ },
+ Indel {
+ insert: "var_name",
+ delete: 59..60,
+ },
+ ],
+ },
+ },
+ file_system_edits: [],
+ is_snippet: true,
+ },
+ ),
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_variable_assist);
+
+ let extract_into_function_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_function",
+ RefactorExtract,
+ ),
+ label: "Extract into function",
+ group: None,
+ target: 59..60,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_function_assist);
+ }
+
+ {
+ let assists = assists(&db, &cfg, AssistResolveStrategy::All, frange);
+ assert_eq!(2, assists.len());
+ let mut assists = assists.into_iter();
+
+ let extract_into_variable_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_variable",
+ RefactorExtract,
+ ),
+ label: "Extract into variable",
+ group: None,
+ target: 59..60,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "let $0var_name = 5;\n ",
+ delete: 45..45,
+ },
+ Indel {
+ insert: "var_name",
+ delete: 59..60,
+ },
+ ],
+ },
+ },
+ file_system_edits: [],
+ is_snippet: true,
+ },
+ ),
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_variable_assist);
+
+ let extract_into_function_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_function",
+ RefactorExtract,
+ ),
+ label: "Extract into function",
+ group: None,
+ target: 59..60,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "fun_name()",
+ delete: 59..60,
+ },
+ Indel {
+ insert: "\n\nfn $0fun_name() -> i32 {\n 5\n}",
+ delete: 110..110,
+ },
+ ],
+ },
+ },
+ file_system_edits: [],
+ is_snippet: true,
+ },
+ ),
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_function_assist);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
new file mode 100644
index 000000000..6eaab48a3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
@@ -0,0 +1,2259 @@
+//! Generated by `sourcegen_assists_docs`, do not edit by hand.
+
+use super::check_doc_test;
+
+#[test]
+fn doctest_add_explicit_type() {
+ check_doc_test(
+ "add_explicit_type",
+ r#####"
+fn main() {
+ let x$0 = 92;
+}
+"#####,
+ r#####"
+fn main() {
+ let x: i32 = 92;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_hash() {
+ check_doc_test(
+ "add_hash",
+ r#####"
+fn main() {
+ r#"Hello,$0 World!"#;
+}
+"#####,
+ r#####"
+fn main() {
+ r##"Hello, World!"##;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_impl_default_members() {
+ check_doc_test(
+ "add_impl_default_members",
+ r#####"
+trait Trait {
+ type X;
+ fn foo(&self);
+ fn bar(&self) {}
+}
+
+impl Trait for () {
+ type X = ();
+ fn foo(&self) {}$0
+}
+"#####,
+ r#####"
+trait Trait {
+ type X;
+ fn foo(&self);
+ fn bar(&self) {}
+}
+
+impl Trait for () {
+ type X = ();
+ fn foo(&self) {}
+
+ $0fn bar(&self) {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_impl_missing_members() {
+ check_doc_test(
+ "add_impl_missing_members",
+ r#####"
+trait Trait<T> {
+ type X;
+ fn foo(&self) -> T;
+ fn bar(&self) {}
+}
+
+impl Trait<u32> for () {$0
+
+}
+"#####,
+ r#####"
+trait Trait<T> {
+ type X;
+ fn foo(&self) -> T;
+ fn bar(&self) {}
+}
+
+impl Trait<u32> for () {
+ $0type X;
+
+ fn foo(&self) -> u32 {
+ todo!()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_label_to_loop() {
+ check_doc_test(
+ "add_label_to_loop",
+ r#####"
+fn main() {
+ loop$0 {
+ break;
+ continue;
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ 'l: loop {
+ break 'l;
+ continue 'l;
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_lifetime_to_type() {
+ check_doc_test(
+ "add_lifetime_to_type",
+ r#####"
+struct Point {
+ x: &$0u32,
+ y: u32,
+}
+"#####,
+ r#####"
+struct Point<'a> {
+ x: &'a u32,
+ y: u32,
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_missing_match_arms() {
+ check_doc_test(
+ "add_missing_match_arms",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ $0
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ $0Action::Move { distance } => todo!(),
+ Action::Stop => todo!(),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_return_type() {
+ check_doc_test(
+ "add_return_type",
+ r#####"
+fn foo() { 4$02i32 }
+"#####,
+ r#####"
+fn foo() -> i32 { 42i32 }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_turbo_fish() {
+ check_doc_test(
+ "add_turbo_fish",
+ r#####"
+fn make<T>() -> T { todo!() }
+fn main() {
+ let x = make$0();
+}
+"#####,
+ r#####"
+fn make<T>() -> T { todo!() }
+fn main() {
+ let x = make::<${0:_}>();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_apply_demorgan() {
+ check_doc_test(
+ "apply_demorgan",
+ r#####"
+fn main() {
+ if x != 4 ||$0 y < 3.14 {}
+}
+"#####,
+ r#####"
+fn main() {
+ if !(x == 4 && y >= 3.14) {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_auto_import() {
+ check_doc_test(
+ "auto_import",
+ r#####"
+fn main() {
+ let map = HashMap$0::new();
+}
+pub mod std { pub mod collections { pub struct HashMap { } } }
+"#####,
+ r#####"
+use std::collections::HashMap;
+
+fn main() {
+ let map = HashMap::new();
+}
+pub mod std { pub mod collections { pub struct HashMap { } } }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_change_visibility() {
+ check_doc_test(
+ "change_visibility",
+ r#####"
+$0fn frobnicate() {}
+"#####,
+ r#####"
+pub(crate) fn frobnicate() {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_bool_then_to_if() {
+ check_doc_test(
+ "convert_bool_then_to_if",
+ r#####"
+//- minicore: bool_impl
+fn main() {
+ (0 == 0).then$0(|| val)
+}
+"#####,
+ r#####"
+fn main() {
+ if 0 == 0 {
+ Some(val)
+ } else {
+ None
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_for_loop_with_for_each() {
+ check_doc_test(
+ "convert_for_loop_with_for_each",
+ r#####"
+fn main() {
+ let x = vec![1, 2, 3];
+ for$0 v in x {
+ let y = v * 2;
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ let x = vec![1, 2, 3];
+ x.into_iter().for_each(|v| {
+ let y = v * 2;
+ });
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_if_to_bool_then() {
+ check_doc_test(
+ "convert_if_to_bool_then",
+ r#####"
+//- minicore: option
+fn main() {
+ if$0 cond {
+ Some(val)
+ } else {
+ None
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ cond.then(|| val)
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_integer_literal() {
+ check_doc_test(
+ "convert_integer_literal",
+ r#####"
+const _: i32 = 10$0;
+"#####,
+ r#####"
+const _: i32 = 0b1010;
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_into_to_from() {
+ check_doc_test(
+ "convert_into_to_from",
+ r#####"
+//- minicore: from
+impl $0Into<Thing> for usize {
+ fn into(self) -> Thing {
+ Thing {
+ b: self.to_string(),
+ a: self
+ }
+ }
+}
+"#####,
+ r#####"
+impl From<usize> for Thing {
+ fn from(val: usize) -> Self {
+ Thing {
+ b: val.to_string(),
+ a: val
+ }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_iter_for_each_to_for() {
+ check_doc_test(
+ "convert_iter_for_each_to_for",
+ r#####"
+//- minicore: iterators
+use core::iter;
+fn main() {
+ let iter = iter::repeat((9, 2));
+ iter.for_each$0(|(x, y)| {
+ println!("x: {}, y: {}", x, y);
+ });
+}
+"#####,
+ r#####"
+use core::iter;
+fn main() {
+ let iter = iter::repeat((9, 2));
+ for (x, y) in iter {
+ println!("x: {}, y: {}", x, y);
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_let_else_to_match() {
+ check_doc_test(
+ "convert_let_else_to_match",
+ r#####"
+fn main() {
+ let Ok(mut x) = f() else$0 { return };
+}
+"#####,
+ r#####"
+fn main() {
+ let mut x = match f() {
+ Ok(x) => x,
+ _ => return,
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_to_guarded_return() {
+ check_doc_test(
+ "convert_to_guarded_return",
+ r#####"
+fn main() {
+ $0if cond {
+ foo();
+ bar();
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ if !cond {
+ return;
+ }
+ foo();
+ bar();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_tuple_struct_to_named_struct() {
+ check_doc_test(
+ "convert_tuple_struct_to_named_struct",
+ r#####"
+struct Point$0(f32, f32);
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point(x, y)
+ }
+
+ pub fn x(&self) -> f32 {
+ self.0
+ }
+
+ pub fn y(&self) -> f32 {
+ self.1
+ }
+}
+"#####,
+ r#####"
+struct Point { field1: f32, field2: f32 }
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point { field1: x, field2: y }
+ }
+
+ pub fn x(&self) -> f32 {
+ self.field1
+ }
+
+ pub fn y(&self) -> f32 {
+ self.field2
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_while_to_loop() {
+ check_doc_test(
+ "convert_while_to_loop",
+ r#####"
+fn main() {
+ $0while cond {
+ foo();
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ loop {
+ if !cond {
+ break;
+ }
+ foo();
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_destructure_tuple_binding() {
+ check_doc_test(
+ "destructure_tuple_binding",
+ r#####"
+fn main() {
+ let $0t = (1,2);
+ let v = t.0;
+}
+"#####,
+ r#####"
+fn main() {
+ let ($0_0, _1) = (1,2);
+ let v = _0;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_expand_glob_import() {
+ check_doc_test(
+ "expand_glob_import",
+ r#####"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+use foo::*$0;
+
+fn qux(bar: Bar, baz: Baz) {}
+"#####,
+ r#####"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+use foo::{Bar, Baz};
+
+fn qux(bar: Bar, baz: Baz) {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_function() {
+ check_doc_test(
+ "extract_function",
+ r#####"
+fn main() {
+ let n = 1;
+ $0let m = n + 2;
+ // calculate
+ let k = m + n;$0
+ let g = 3;
+}
+"#####,
+ r#####"
+fn main() {
+ let n = 1;
+ fun_name(n);
+ let g = 3;
+}
+
+fn $0fun_name(n: i32) {
+ let m = n + 2;
+ // calculate
+ let k = m + n;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_module() {
+ check_doc_test(
+ "extract_module",
+ r#####"
+$0fn foo(name: i32) -> i32 {
+ name + 1
+}$0
+
+fn bar(name: i32) -> i32 {
+ name + 2
+}
+"#####,
+ r#####"
+mod modname {
+ pub(crate) fn foo(name: i32) -> i32 {
+ name + 1
+ }
+}
+
+fn bar(name: i32) -> i32 {
+ name + 2
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_struct_from_enum_variant() {
+ check_doc_test(
+ "extract_struct_from_enum_variant",
+ r#####"
+enum A { $0One(u32, u32) }
+"#####,
+ r#####"
+struct One(u32, u32);
+
+enum A { One(One) }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_type_alias() {
+ check_doc_test(
+ "extract_type_alias",
+ r#####"
+struct S {
+ field: $0(u8, u8, u8)$0,
+}
+"#####,
+ r#####"
+type $0Type = (u8, u8, u8);
+
+struct S {
+ field: Type,
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_variable() {
+ check_doc_test(
+ "extract_variable",
+ r#####"
+fn main() {
+ $0(1 + 2)$0 * 4;
+}
+"#####,
+ r#####"
+fn main() {
+ let $0var_name = (1 + 2);
+ var_name * 4;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_fix_visibility() {
+ check_doc_test(
+ "fix_visibility",
+ r#####"
+mod m {
+ fn frobnicate() {}
+}
+fn main() {
+ m::frobnicate$0() {}
+}
+"#####,
+ r#####"
+mod m {
+ $0pub(crate) fn frobnicate() {}
+}
+fn main() {
+ m::frobnicate() {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_flip_binexpr() {
+ check_doc_test(
+ "flip_binexpr",
+ r#####"
+fn main() {
+ let _ = 90 +$0 2;
+}
+"#####,
+ r#####"
+fn main() {
+ let _ = 2 + 90;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_flip_comma() {
+ check_doc_test(
+ "flip_comma",
+ r#####"
+fn main() {
+ ((1, 2),$0 (3, 4));
+}
+"#####,
+ r#####"
+fn main() {
+ ((3, 4), (1, 2));
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_flip_trait_bound() {
+ check_doc_test(
+ "flip_trait_bound",
+ r#####"
+fn foo<T: Clone +$0 Copy>() { }
+"#####,
+ r#####"
+fn foo<T: Copy + Clone>() { }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_constant() {
+ check_doc_test(
+ "generate_constant",
+ r#####"
+struct S { i: usize }
+impl S { pub fn new(n: usize) {} }
+fn main() {
+ let v = S::new(CAPA$0CITY);
+}
+"#####,
+ r#####"
+struct S { i: usize }
+impl S { pub fn new(n: usize) {} }
+fn main() {
+ const CAPACITY: usize = $0;
+ let v = S::new(CAPACITY);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_default_from_enum_variant() {
+ check_doc_test(
+ "generate_default_from_enum_variant",
+ r#####"
+enum Version {
+ Undefined,
+ Minor$0,
+ Major,
+}
+"#####,
+ r#####"
+enum Version {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Default for Version {
+ fn default() -> Self {
+ Self::Minor
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_default_from_new() {
+ check_doc_test(
+ "generate_default_from_new",
+ r#####"
+struct Example { _inner: () }
+
+impl Example {
+ pub fn n$0ew() -> Self {
+ Self { _inner: () }
+ }
+}
+"#####,
+ r#####"
+struct Example { _inner: () }
+
+impl Example {
+ pub fn new() -> Self {
+ Self { _inner: () }
+ }
+}
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_delegate_methods() {
+ check_doc_test(
+ "generate_delegate_methods",
+ r#####"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ ag$0e: Age,
+}
+"#####,
+ r#####"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ age: Age,
+}
+
+impl Person {
+ $0fn age(&self) -> u8 {
+ self.age.age()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_deref() {
+ check_doc_test(
+ "generate_deref",
+ r#####"
+//- minicore: deref, deref_mut
+struct A;
+struct B {
+ $0a: A
+}
+"#####,
+ r#####"
+struct A;
+struct B {
+ a: A
+}
+
+impl core::ops::Deref for B {
+ type Target = A;
+
+ fn deref(&self) -> &Self::Target {
+ &self.a
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_derive() {
+ check_doc_test(
+ "generate_derive",
+ r#####"
+struct Point {
+ x: u32,
+ y: u32,$0
+}
+"#####,
+ r#####"
+#[derive($0)]
+struct Point {
+ x: u32,
+ y: u32,
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_doc_example() {
+ check_doc_test(
+ "generate_doc_example",
+ r#####"
+/// Adds two numbers.$0
+pub fn add(a: i32, b: i32) -> i32 { a + b }
+"#####,
+ r#####"
+/// Adds two numbers.
+///
+/// # Examples
+///
+/// ```
+/// use test::add;
+///
+/// assert_eq!(add(a, b), );
+/// ```
+pub fn add(a: i32, b: i32) -> i32 { a + b }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_documentation_template() {
+ check_doc_test(
+ "generate_documentation_template",
+ r#####"
+pub struct S;
+impl S {
+ pub unsafe fn set_len$0(&mut self, len: usize) -> Result<(), std::io::Error> {
+ /* ... */
+ }
+}
+"#####,
+ r#####"
+pub struct S;
+impl S {
+ /// Sets the length of this [`S`].
+ ///
+ /// # Errors
+ ///
+ /// This function will return an error if .
+ ///
+ /// # Safety
+ ///
+ /// .
+ pub unsafe fn set_len(&mut self, len: usize) -> Result<(), std::io::Error> {
+ /* ... */
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_enum_as_method() {
+ check_doc_test(
+ "generate_enum_as_method",
+ r#####"
+enum Value {
+ Number(i32),
+ Text(String)$0,
+}
+"#####,
+ r#####"
+enum Value {
+ Number(i32),
+ Text(String),
+}
+
+impl Value {
+ fn as_text(&self) -> Option<&String> {
+ if let Self::Text(v) = self {
+ Some(v)
+ } else {
+ None
+ }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_enum_is_method() {
+ check_doc_test(
+ "generate_enum_is_method",
+ r#####"
+enum Version {
+ Undefined,
+ Minor$0,
+ Major,
+}
+"#####,
+ r#####"
+enum Version {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Version {
+ /// Returns `true` if the version is [`Minor`].
+ ///
+ /// [`Minor`]: Version::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_enum_try_into_method() {
+ check_doc_test(
+ "generate_enum_try_into_method",
+ r#####"
+enum Value {
+ Number(i32),
+ Text(String)$0,
+}
+"#####,
+ r#####"
+enum Value {
+ Number(i32),
+ Text(String),
+}
+
+impl Value {
+ fn try_into_text(self) -> Result<String, Self> {
+ if let Self::Text(v) = self {
+ Ok(v)
+ } else {
+ Err(self)
+ }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_enum_variant() {
+ check_doc_test(
+ "generate_enum_variant",
+ r#####"
+enum Countries {
+ Ghana,
+}
+
+fn main() {
+ let country = Countries::Lesotho$0;
+}
+"#####,
+ r#####"
+enum Countries {
+ Ghana,
+ Lesotho,
+}
+
+fn main() {
+ let country = Countries::Lesotho;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_from_impl_for_enum() {
+ check_doc_test(
+ "generate_from_impl_for_enum",
+ r#####"
+enum A { $0One(u32) }
+"#####,
+ r#####"
+enum A { One(u32) }
+
+impl From<u32> for A {
+ fn from(v: u32) -> Self {
+ Self::One(v)
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_function() {
+ check_doc_test(
+ "generate_function",
+ r#####"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar$0("", baz());
+}
+
+"#####,
+ r#####"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar("", baz());
+}
+
+fn bar(arg: &str, baz: Baz) ${0:-> _} {
+ todo!()
+}
+
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_getter() {
+ check_doc_test(
+ "generate_getter",
+ r#####"
+//- minicore: as_ref
+pub struct String;
+impl AsRef<str> for String {
+ fn as_ref(&self) -> &str {
+ ""
+ }
+}
+
+struct Person {
+ nam$0e: String,
+}
+"#####,
+ r#####"
+pub struct String;
+impl AsRef<str> for String {
+ fn as_ref(&self) -> &str {
+ ""
+ }
+}
+
+struct Person {
+ name: String,
+}
+
+impl Person {
+ fn $0name(&self) -> &str {
+ self.name.as_ref()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_getter_mut() {
+ check_doc_test(
+ "generate_getter_mut",
+ r#####"
+struct Person {
+ nam$0e: String,
+}
+"#####,
+ r#####"
+struct Person {
+ name: String,
+}
+
+impl Person {
+ fn $0name_mut(&mut self) -> &mut String {
+ &mut self.name
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_impl() {
+ check_doc_test(
+ "generate_impl",
+ r#####"
+struct Ctx<T: Clone> {
+ data: T,$0
+}
+"#####,
+ r#####"
+struct Ctx<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Ctx<T> {
+ $0
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_is_empty_from_len() {
+ check_doc_test(
+ "generate_is_empty_from_len",
+ r#####"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self) -> usize {
+ self.data.len()
+ }
+}
+"#####,
+ r#####"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_new() {
+ check_doc_test(
+ "generate_new",
+ r#####"
+struct Ctx<T: Clone> {
+ data: T,$0
+}
+"#####,
+ r#####"
+struct Ctx<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Ctx<T> {
+ fn $0new(data: T) -> Self { Self { data } }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_setter() {
+ check_doc_test(
+ "generate_setter",
+ r#####"
+struct Person {
+ nam$0e: String,
+}
+"#####,
+ r#####"
+struct Person {
+ name: String,
+}
+
+impl Person {
+ fn set_name(&mut self, name: String) {
+ self.name = name;
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_call() {
+ check_doc_test(
+ "inline_call",
+ r#####"
+//- minicore: option
+fn foo(name: Option<&str>) {
+ let name = name.unwrap$0();
+}
+"#####,
+ r#####"
+fn foo(name: Option<&str>) {
+ let name = match name {
+ Some(val) => val,
+ None => panic!("called `Option::unwrap()` on a `None` value"),
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_into_callers() {
+ check_doc_test(
+ "inline_into_callers",
+ r#####"
+fn print(_: &str) {}
+fn foo$0(word: &str) {
+ if !word.is_empty() {
+ print(word);
+ }
+}
+fn bar() {
+ foo("안녕하세요");
+ foo("여러분");
+}
+"#####,
+ r#####"
+fn print(_: &str) {}
+
+fn bar() {
+ {
+ let word = "안녕하세요";
+ if !word.is_empty() {
+ print(word);
+ }
+ };
+ {
+ let word = "여러분";
+ if !word.is_empty() {
+ print(word);
+ }
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_local_variable() {
+ check_doc_test(
+ "inline_local_variable",
+ r#####"
+fn main() {
+ let x$0 = 1 + 2;
+ x * 4;
+}
+"#####,
+ r#####"
+fn main() {
+ (1 + 2) * 4;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_type_alias() {
+ check_doc_test(
+ "inline_type_alias",
+ r#####"
+type A<T = u32> = Vec<T>;
+
+fn main() {
+ let a: $0A;
+}
+"#####,
+ r#####"
+type A<T = u32> = Vec<T>;
+
+fn main() {
+ let a: Vec<u32>;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_introduce_named_generic() {
+ check_doc_test(
+ "introduce_named_generic",
+ r#####"
+fn foo(bar: $0impl Bar) {}
+"#####,
+ r#####"
+fn foo<B: Bar>(bar: B) {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_introduce_named_lifetime() {
+ check_doc_test(
+ "introduce_named_lifetime",
+ r#####"
+impl Cursor<'_$0> {
+ fn node(self) -> &SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+}
+"#####,
+ r#####"
+impl<'a> Cursor<'a> {
+ fn node(self) -> &SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_invert_if() {
+ check_doc_test(
+ "invert_if",
+ r#####"
+fn main() {
+ if$0 !y { A } else { B }
+}
+"#####,
+ r#####"
+fn main() {
+ if y { B } else { A }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_line_to_block() {
+ check_doc_test(
+ "line_to_block",
+ r#####"
+ // Multi-line$0
+ // comment
+"#####,
+ r#####"
+ /*
+ Multi-line
+ comment
+ */
+"#####,
+ )
+}
+
+#[test]
+fn doctest_make_raw_string() {
+ check_doc_test(
+ "make_raw_string",
+ r#####"
+fn main() {
+ "Hello,$0 World!";
+}
+"#####,
+ r#####"
+fn main() {
+ r#"Hello, World!"#;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_make_usual_string() {
+ check_doc_test(
+ "make_usual_string",
+ r#####"
+fn main() {
+ r#"Hello,$0 "World!""#;
+}
+"#####,
+ r#####"
+fn main() {
+ "Hello, \"World!\"";
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_merge_imports() {
+ check_doc_test(
+ "merge_imports",
+ r#####"
+use std::$0fmt::Formatter;
+use std::io;
+"#####,
+ r#####"
+use std::{fmt::Formatter, io};
+"#####,
+ )
+}
+
+#[test]
+fn doctest_merge_match_arms() {
+ check_doc_test(
+ "merge_match_arms",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ $0Action::Move(..) => foo(),
+ Action::Stop => foo(),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move(..) | Action::Stop => foo(),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_arm_cond_to_match_guard() {
+ check_doc_test(
+ "move_arm_cond_to_match_guard",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => $0if distance > 10 { foo() },
+ _ => (),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } if distance > 10 => foo(),
+ _ => (),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_bounds_to_where_clause() {
+ check_doc_test(
+ "move_bounds_to_where_clause",
+ r#####"
+fn apply<T, U, $0F: FnOnce(T) -> U>(f: F, x: T) -> U {
+ f(x)
+}
+"#####,
+ r#####"
+fn apply<T, U, F>(f: F, x: T) -> U where F: FnOnce(T) -> U {
+ f(x)
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_from_mod_rs() {
+ check_doc_test(
+ "move_from_mod_rs",
+ r#####"
+//- /main.rs
+mod a;
+//- /a/mod.rs
+$0fn t() {}$0
+"#####,
+ r#####"
+fn t() {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_guard_to_arm_body() {
+ check_doc_test(
+ "move_guard_to_arm_body",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } $0if distance > 10 => foo(),
+ _ => (),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => if distance > 10 {
+ foo()
+ },
+ _ => (),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_module_to_file() {
+ check_doc_test(
+ "move_module_to_file",
+ r#####"
+mod $0foo {
+ fn t() {}
+}
+"#####,
+ r#####"
+mod foo;
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_to_mod_rs() {
+ check_doc_test(
+ "move_to_mod_rs",
+ r#####"
+//- /main.rs
+mod a;
+//- /a.rs
+$0fn t() {}$0
+"#####,
+ r#####"
+fn t() {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_promote_local_to_const() {
+ check_doc_test(
+ "promote_local_to_const",
+ r#####"
+fn main() {
+ let foo$0 = true;
+
+ if foo {
+ println!("It's true");
+ } else {
+ println!("It's false");
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ const $0FOO: bool = true;
+
+ if FOO {
+ println!("It's true");
+ } else {
+ println!("It's false");
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_pull_assignment_up() {
+ check_doc_test(
+ "pull_assignment_up",
+ r#####"
+fn main() {
+ let mut foo = 6;
+
+ if true {
+ $0foo = 5;
+ } else {
+ foo = 4;
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ let mut foo = 6;
+
+ foo = if true {
+ 5
+ } else {
+ 4
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_qualify_method_call() {
+ check_doc_test(
+ "qualify_method_call",
+ r#####"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+fn main() {
+ let foo = Foo;
+ foo.fo$0o();
+}
+"#####,
+ r#####"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+fn main() {
+ let foo = Foo;
+ Foo::foo(&foo);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_qualify_path() {
+ check_doc_test(
+ "qualify_path",
+ r#####"
+fn main() {
+ let map = HashMap$0::new();
+}
+pub mod std { pub mod collections { pub struct HashMap { } } }
+"#####,
+ r#####"
+fn main() {
+ let map = std::collections::HashMap::new();
+}
+pub mod std { pub mod collections { pub struct HashMap { } } }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_reformat_number_literal() {
+ check_doc_test(
+ "reformat_number_literal",
+ r#####"
+const _: i32 = 1012345$0;
+"#####,
+ r#####"
+const _: i32 = 1_012_345;
+"#####,
+ )
+}
+
+#[test]
+fn doctest_remove_dbg() {
+ check_doc_test(
+ "remove_dbg",
+ r#####"
+fn main() {
+ $0dbg!(92);
+}
+"#####,
+ r#####"
+fn main() {
+ 92;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_remove_hash() {
+ check_doc_test(
+ "remove_hash",
+ r#####"
+fn main() {
+ r#"Hello,$0 World!"#;
+}
+"#####,
+ r#####"
+fn main() {
+ r"Hello, World!";
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_remove_mut() {
+ check_doc_test(
+ "remove_mut",
+ r#####"
+impl Walrus {
+ fn feed(&mut$0 self, amount: u32) {}
+}
+"#####,
+ r#####"
+impl Walrus {
+ fn feed(&self, amount: u32) {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_remove_unused_param() {
+ check_doc_test(
+ "remove_unused_param",
+ r#####"
+fn frobnicate(x: i32$0) {}
+
+fn main() {
+ frobnicate(92);
+}
+"#####,
+ r#####"
+fn frobnicate() {}
+
+fn main() {
+ frobnicate();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_reorder_fields() {
+ check_doc_test(
+ "reorder_fields",
+ r#####"
+struct Foo {foo: i32, bar: i32};
+const test: Foo = $0Foo {bar: 0, foo: 1}
+"#####,
+ r#####"
+struct Foo {foo: i32, bar: i32};
+const test: Foo = Foo {foo: 1, bar: 0}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_reorder_impl_items() {
+ check_doc_test(
+ "reorder_impl_items",
+ r#####"
+trait Foo {
+ type A;
+ const B: u8;
+ fn c();
+}
+
+struct Bar;
+$0impl Foo for Bar {
+ const B: u8 = 17;
+ fn c() {}
+ type A = String;
+}
+"#####,
+ r#####"
+trait Foo {
+ type A;
+ const B: u8;
+ fn c();
+}
+
+struct Bar;
+impl Foo for Bar {
+ type A = String;
+ const B: u8 = 17;
+ fn c() {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_char_with_string() {
+ check_doc_test(
+ "replace_char_with_string",
+ r#####"
+fn main() {
+ find('{$0');
+}
+"#####,
+ r#####"
+fn main() {
+ find("{");
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_derive_with_manual_impl() {
+ check_doc_test(
+ "replace_derive_with_manual_impl",
+ r#####"
+//- minicore: derive
+trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
+#[derive(Deb$0ug, Display)]
+struct S;
+"#####,
+ r#####"
+trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
+#[derive(Display)]
+struct S;
+
+impl Debug for S {
+ $0fn fmt(&self, f: &mut Formatter) -> Result<()> {
+ f.debug_struct("S").finish()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_if_let_with_match() {
+ check_doc_test(
+ "replace_if_let_with_match",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ $0if let Action::Move { distance } = action {
+ foo(distance)
+ } else {
+ bar()
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => foo(distance),
+ _ => bar(),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_let_with_if_let() {
+ check_doc_test(
+ "replace_let_with_if_let",
+ r#####"
+enum Option<T> { Some(T), None }
+
+fn main(action: Action) {
+ $0let x = compute();
+}
+
+fn compute() -> Option<i32> { None }
+"#####,
+ r#####"
+enum Option<T> { Some(T), None }
+
+fn main(action: Action) {
+ if let Some(x) = compute() {
+ }
+}
+
+fn compute() -> Option<i32> { None }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_match_with_if_let() {
+ check_doc_test(
+ "replace_match_with_if_let",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ $0match action {
+ Action::Move { distance } => foo(distance),
+ _ => bar(),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ if let Action::Move { distance } = action {
+ foo(distance)
+ } else {
+ bar()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_qualified_name_with_use() {
+ check_doc_test(
+ "replace_qualified_name_with_use",
+ r#####"
+mod std { pub mod collections { pub struct HashMap<T, U>(T, U); } }
+fn process(map: std::collections::$0HashMap<String, String>) {}
+"#####,
+ r#####"
+use std::collections::HashMap;
+
+mod std { pub mod collections { pub struct HashMap<T, U>(T, U); } }
+fn process(map: HashMap<String, String>) {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_string_with_char() {
+ check_doc_test(
+ "replace_string_with_char",
+ r#####"
+fn main() {
+ find("{$0");
+}
+"#####,
+ r#####"
+fn main() {
+ find('{');
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_try_expr_with_match() {
+ check_doc_test(
+ "replace_try_expr_with_match",
+ r#####"
+//- minicore:option
+fn handle() {
+ let pat = Some(true)$0?;
+}
+"#####,
+ r#####"
+fn handle() {
+ let pat = match Some(true) {
+ Some(it) => it,
+ None => return None,
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_turbofish_with_explicit_type() {
+ check_doc_test(
+ "replace_turbofish_with_explicit_type",
+ r#####"
+fn make<T>() -> T { ) }
+fn main() {
+ let a = make$0::<i32>();
+}
+"#####,
+ r#####"
+fn make<T>() -> T { ) }
+fn main() {
+ let a: i32 = make();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+struct $0Foo$0 { second: u32, first: String }
+"#####,
+ r#####"
+struct Foo { first: String, second: u32 }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items_1() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+trait $0Bar$0 {
+ fn second(&self) -> u32;
+ fn first(&self) -> String;
+}
+"#####,
+ r#####"
+trait Bar {
+ fn first(&self) -> String;
+ fn second(&self) -> u32;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items_2() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+struct Baz;
+impl $0Baz$0 {
+ fn second(&self) -> u32;
+ fn first(&self) -> String;
+}
+"#####,
+ r#####"
+struct Baz;
+impl Baz {
+ fn first(&self) -> String;
+ fn second(&self) -> u32;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items_3() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+enum $0Animal$0 {
+ Dog(String, f64),
+ Cat { weight: f64, name: String },
+}
+"#####,
+ r#####"
+enum Animal {
+ Cat { weight: f64, name: String },
+ Dog(String, f64),
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items_4() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+enum Animal {
+ Dog(String, f64),
+ Cat $0{ weight: f64, name: String }$0,
+}
+"#####,
+ r#####"
+enum Animal {
+ Dog(String, f64),
+ Cat { name: String, weight: f64 },
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_split_import() {
+ check_doc_test(
+ "split_import",
+ r#####"
+use std::$0collections::HashMap;
+"#####,
+ r#####"
+use std::{collections::HashMap};
+"#####,
+ )
+}
+
+#[test]
+fn doctest_toggle_ignore() {
+ check_doc_test(
+ "toggle_ignore",
+ r#####"
+$0#[test]
+fn arithmetics {
+ assert_eq!(2 + 2, 5);
+}
+"#####,
+ r#####"
+#[test]
+#[ignore]
+fn arithmetics {
+ assert_eq!(2 + 2, 5);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unmerge_use() {
+ check_doc_test(
+ "unmerge_use",
+ r#####"
+use std::fmt::{Debug, Display$0};
+"#####,
+ r#####"
+use std::fmt::{Debug};
+use std::fmt::Display;
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unnecessary_async() {
+ check_doc_test(
+ "unnecessary_async",
+ r#####"
+pub async f$0n foo() {}
+pub async fn bar() { foo().await }
+"#####,
+ r#####"
+pub fn foo() {}
+pub async fn bar() { foo() }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unwrap_block() {
+ check_doc_test(
+ "unwrap_block",
+ r#####"
+fn foo() {
+ if true {$0
+ println!("foo");
+ }
+}
+"#####,
+ r#####"
+fn foo() {
+ println!("foo");
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unwrap_result_return_type() {
+ check_doc_test(
+ "unwrap_result_return_type",
+ r#####"
+//- minicore: result
+fn foo() -> Result<i32>$0 { Ok(42i32) }
+"#####,
+ r#####"
+fn foo() -> i32 { 42i32 }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_wrap_return_type_in_result() {
+ check_doc_test(
+ "wrap_return_type_in_result",
+ r#####"
+//- minicore: result
+fn foo() -> i32$0 { 42i32 }
+"#####,
+ r#####"
+fn foo() -> Result<i32, ${0:_}> { Ok(42i32) }
+"#####,
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/sourcegen.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/sourcegen.rs
new file mode 100644
index 000000000..070b83d3c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/sourcegen.rs
@@ -0,0 +1,195 @@
+//! Generates `assists.md` documentation.
+
+use std::{fmt, fs, path::Path};
+
+use test_utils::project_root;
+
+#[test]
+fn sourcegen_assists_docs() {
+ let assists = Assist::collect();
+
+ {
+ // Generate doctests.
+
+ let mut buf = "
+use super::check_doc_test;
+"
+ .to_string();
+ for assist in assists.iter() {
+ for (idx, section) in assist.sections.iter().enumerate() {
+ let test_id =
+ if idx == 0 { assist.id.clone() } else { format!("{}_{}", &assist.id, idx) };
+ let test = format!(
+ r######"
+#[test]
+fn doctest_{}() {{
+ check_doc_test(
+ "{}",
+r#####"
+{}"#####, r#####"
+{}"#####)
+}}
+"######,
+ &test_id,
+ &assist.id,
+ reveal_hash_comments(&section.before),
+ reveal_hash_comments(&section.after)
+ );
+
+ buf.push_str(&test)
+ }
+ }
+ let buf = sourcegen::add_preamble("sourcegen_assists_docs", sourcegen::reformat(buf));
+ sourcegen::ensure_file_contents(
+ &project_root().join("crates/ide-assists/src/tests/generated.rs"),
+ &buf,
+ );
+ }
+
+ {
+ // Generate assists manual. Note that we do _not_ commit manual to the
+ // git repo. Instead, `cargo xtask release` runs this test before making
+ // a release.
+
+ let contents = sourcegen::add_preamble(
+ "sourcegen_assists_docs",
+ assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"),
+ );
+ let dst = project_root().join("docs/user/generated_assists.adoc");
+ fs::write(dst, contents).unwrap();
+ }
+}
+
+#[derive(Debug)]
+struct Section {
+ doc: String,
+ before: String,
+ after: String,
+}
+
+#[derive(Debug)]
+struct Assist {
+ id: String,
+ location: sourcegen::Location,
+ sections: Vec<Section>,
+}
+
+impl Assist {
+ fn collect() -> Vec<Assist> {
+ let handlers_dir = project_root().join("crates/ide-assists/src/handlers");
+
+ let mut res = Vec::new();
+ for path in sourcegen::list_rust_files(&handlers_dir) {
+ collect_file(&mut res, path.as_path());
+ }
+ res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
+ return res;
+
+ fn collect_file(acc: &mut Vec<Assist>, path: &Path) {
+ let text = fs::read_to_string(path).unwrap();
+ let comment_blocks = sourcegen::CommentBlock::extract("Assist", &text);
+
+ for block in comment_blocks {
+ // FIXME: doesn't support blank lines yet, need to tweak
+ // `extract_comment_blocks` for that.
+ let id = block.id;
+ assert!(
+ id.chars().all(|it| it.is_ascii_lowercase() || it == '_'),
+ "invalid assist id: {:?}",
+ id
+ );
+ let mut lines = block.contents.iter().peekable();
+ let location = sourcegen::Location { file: path.to_path_buf(), line: block.line };
+ let mut assist = Assist { id, location, sections: Vec::new() };
+
+ while lines.peek().is_some() {
+ let doc = take_until(lines.by_ref(), "```").trim().to_string();
+ assert!(
+ (doc.chars().next().unwrap().is_ascii_uppercase() && doc.ends_with('.'))
+ || assist.sections.len() > 0,
+ "\n\n{}: assist docs should be proper sentences, with capitalization and a full stop at the end.\n\n{}\n\n",
+ &assist.id,
+ doc,
+ );
+
+ let before = take_until(lines.by_ref(), "```");
+
+ assert_eq!(lines.next().unwrap().as_str(), "->");
+ assert_eq!(lines.next().unwrap().as_str(), "```");
+ let after = take_until(lines.by_ref(), "```");
+
+ assist.sections.push(Section { doc, before, after });
+ }
+
+ acc.push(assist)
+ }
+ }
+
+ fn take_until<'a>(lines: impl Iterator<Item = &'a String>, marker: &str) -> String {
+ let mut buf = Vec::new();
+ for line in lines {
+ if line == marker {
+ break;
+ }
+ buf.push(line.clone());
+ }
+ buf.join("\n")
+ }
+ }
+}
+
+impl fmt::Display for Assist {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let _ = writeln!(
+ f,
+ "[discrete]\n=== `{}`
+**Source:** {}",
+ self.id, self.location,
+ );
+
+ for section in &self.sections {
+ let before = section.before.replace("$0", "┃"); // Unicode pseudo-graphics bar
+ let after = section.after.replace("$0", "┃");
+ let _ = writeln!(
+ f,
+ "
+{}
+
+.Before
+```rust
+{}```
+
+.After
+```rust
+{}```",
+ section.doc,
+ hide_hash_comments(&before),
+ hide_hash_comments(&after)
+ );
+ }
+
+ Ok(())
+ }
+}
+
+fn hide_hash_comments(text: &str) -> String {
+ text.split('\n') // want final newline
+ .filter(|&it| !(it.starts_with("# ") || it == "#"))
+ .map(|it| format!("{}\n", it))
+ .collect()
+}
+
+fn reveal_hash_comments(text: &str) -> String {
+ text.split('\n') // want final newline
+ .map(|it| {
+ if let Some(stripped) = it.strip_prefix("# ") {
+ stripped
+ } else if it == "#" {
+ ""
+ } else {
+ it
+ }
+ })
+ .map(|it| format!("{}\n", it))
+ .collect()
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
new file mode 100644
index 000000000..3e61d0741
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -0,0 +1,703 @@
+//! Assorted functions shared by several assists.
+
+use std::ops;
+
+use itertools::Itertools;
+
+pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
+use hir::{db::HirDatabase, HirDisplay, Semantics};
+use ide_db::{famous_defs::FamousDefs, path_transform::PathTransform, RootDatabase, SnippetCap};
+use stdx::format_to;
+use syntax::{
+ ast::{
+ self,
+ edit::{self, AstNodeEdit},
+ edit_in_place::AttrsOwnerEdit,
+ make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
+ },
+ ted, AstNode, AstToken, Direction, SmolStr, SourceFile,
+ SyntaxKind::*,
+ SyntaxNode, TextRange, TextSize, T,
+};
+
+use crate::assist_context::{AssistBuilder, AssistContext};
+
+pub(crate) mod suggest_name;
+mod gen_trait_fn_body;
+
+pub(crate) fn unwrap_trivial_block(block_expr: ast::BlockExpr) -> ast::Expr {
+ extract_trivial_expression(&block_expr)
+ .filter(|expr| !expr.syntax().text().contains_char('\n'))
+ .unwrap_or_else(|| block_expr.into())
+}
+
+pub fn extract_trivial_expression(block_expr: &ast::BlockExpr) -> Option<ast::Expr> {
+ if block_expr.modifier().is_some() {
+ return None;
+ }
+ let stmt_list = block_expr.stmt_list()?;
+ let has_anything_else = |thing: &SyntaxNode| -> bool {
+ let mut non_trivial_children =
+ stmt_list.syntax().children_with_tokens().filter(|it| match it.kind() {
+ WHITESPACE | T!['{'] | T!['}'] => false,
+ _ => it.as_node() != Some(thing),
+ });
+ non_trivial_children.next().is_some()
+ };
+
+ if let Some(expr) = stmt_list.tail_expr() {
+ if has_anything_else(expr.syntax()) {
+ return None;
+ }
+ return Some(expr);
+ }
+ // Unwrap `{ continue; }`
+ let stmt = stmt_list.statements().next()?;
+ if let ast::Stmt::ExprStmt(expr_stmt) = stmt {
+ if has_anything_else(expr_stmt.syntax()) {
+ return None;
+ }
+ let expr = expr_stmt.expr()?;
+ if matches!(expr.syntax().kind(), CONTINUE_EXPR | BREAK_EXPR | RETURN_EXPR) {
+ return Some(expr);
+ }
+ }
+ None
+}
+
+/// This is a method with a heuristics to support test methods annotated with custom test annotations, such as
+/// `#[test_case(...)]`, `#[tokio::test]` and similar.
+/// Also a regular `#[test]` annotation is supported.
+///
+/// It may produce false positives, for example, `#[wasm_bindgen_test]` requires a different command to run the test,
+/// but it's better than not to have the runnables for the tests at all.
+pub fn test_related_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
+ fn_def.attrs().find_map(|attr| {
+ let path = attr.path()?;
+ let text = path.syntax().text().to_string();
+ if text.starts_with("test") || text.ends_with("test") {
+ Some(attr)
+ } else {
+ None
+ }
+ })
+}
+
+#[derive(Copy, Clone, PartialEq)]
+pub enum DefaultMethods {
+ Only,
+ No,
+}
+
+pub fn filter_assoc_items(
+ sema: &Semantics<'_, RootDatabase>,
+ items: &[hir::AssocItem],
+ default_methods: DefaultMethods,
+) -> Vec<ast::AssocItem> {
+ fn has_def_name(item: &ast::AssocItem) -> bool {
+ match item {
+ ast::AssocItem::Fn(def) => def.name(),
+ ast::AssocItem::TypeAlias(def) => def.name(),
+ ast::AssocItem::Const(def) => def.name(),
+ ast::AssocItem::MacroCall(_) => None,
+ }
+ .is_some()
+ }
+
+ items
+ .iter()
+ // Note: This throws away items with no source.
+ .filter_map(|&i| {
+ let item = match i {
+ hir::AssocItem::Function(i) => ast::AssocItem::Fn(sema.source(i)?.value),
+ hir::AssocItem::TypeAlias(i) => ast::AssocItem::TypeAlias(sema.source(i)?.value),
+ hir::AssocItem::Const(i) => ast::AssocItem::Const(sema.source(i)?.value),
+ };
+ Some(item)
+ })
+ .filter(has_def_name)
+ .filter(|it| match it {
+ ast::AssocItem::Fn(def) => matches!(
+ (default_methods, def.body()),
+ (DefaultMethods::Only, Some(_)) | (DefaultMethods::No, None)
+ ),
+ _ => default_methods == DefaultMethods::No,
+ })
+ .collect::<Vec<_>>()
+}
+
+pub fn add_trait_assoc_items_to_impl(
+ sema: &Semantics<'_, RootDatabase>,
+ items: Vec<ast::AssocItem>,
+ trait_: hir::Trait,
+ impl_: ast::Impl,
+ target_scope: hir::SemanticsScope<'_>,
+) -> (ast::Impl, ast::AssocItem) {
+ let source_scope = sema.scope_for_def(trait_);
+
+ let transform = PathTransform::trait_impl(&target_scope, &source_scope, trait_, impl_.clone());
+
+ let items = items.into_iter().map(|assoc_item| {
+ transform.apply(assoc_item.syntax());
+ assoc_item.remove_attrs_and_docs();
+ assoc_item
+ });
+
+ let res = impl_.clone_for_update();
+
+ let assoc_item_list = res.get_or_create_assoc_item_list();
+ let mut first_item = None;
+ for item in items {
+ first_item.get_or_insert_with(|| item.clone());
+ match &item {
+ ast::AssocItem::Fn(fn_) if fn_.body().is_none() => {
+ let body = make::block_expr(None, Some(make::ext::expr_todo()))
+ .indent(edit::IndentLevel(1));
+ ted::replace(fn_.get_or_create_body().syntax(), body.clone_for_update().syntax())
+ }
+ ast::AssocItem::TypeAlias(type_alias) => {
+ if let Some(type_bound_list) = type_alias.type_bound_list() {
+ type_bound_list.remove()
+ }
+ }
+ _ => {}
+ }
+
+ assoc_item_list.add_item(item)
+ }
+
+ (res, first_item.unwrap())
+}
+
+#[derive(Clone, Copy, Debug)]
+pub(crate) enum Cursor<'a> {
+ Replace(&'a SyntaxNode),
+ Before(&'a SyntaxNode),
+}
+
+impl<'a> Cursor<'a> {
+ fn node(self) -> &'a SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+}
+
+pub(crate) fn render_snippet(_cap: SnippetCap, node: &SyntaxNode, cursor: Cursor<'_>) -> String {
+ assert!(cursor.node().ancestors().any(|it| it == *node));
+ let range = cursor.node().text_range() - node.text_range().start();
+ let range: ops::Range<usize> = range.into();
+
+ let mut placeholder = cursor.node().to_string();
+ escape(&mut placeholder);
+ let tab_stop = match cursor {
+ Cursor::Replace(placeholder) => format!("${{0:{}}}", placeholder),
+ Cursor::Before(placeholder) => format!("$0{}", placeholder),
+ };
+
+ let mut buf = node.to_string();
+ buf.replace_range(range, &tab_stop);
+ return buf;
+
+ fn escape(buf: &mut String) {
+ stdx::replace(buf, '{', r"\{");
+ stdx::replace(buf, '}', r"\}");
+ stdx::replace(buf, '$', r"\$");
+ }
+}
+
+pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize {
+ node.children_with_tokens()
+ .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
+ .map(|it| it.text_range().start())
+ .unwrap_or_else(|| node.text_range().start())
+}
+
+pub(crate) fn invert_boolean_expression(expr: ast::Expr) -> ast::Expr {
+ invert_special_case(&expr).unwrap_or_else(|| make::expr_prefix(T![!], expr))
+}
+
+fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
+ match expr {
+ ast::Expr::BinExpr(bin) => {
+ let bin = bin.clone_for_update();
+ let op_token = bin.op_token()?;
+ let rev_token = match op_token.kind() {
+ T![==] => T![!=],
+ T![!=] => T![==],
+ T![<] => T![>=],
+ T![<=] => T![>],
+ T![>] => T![<=],
+ T![>=] => T![<],
+ // Parenthesize other expressions before prefixing `!`
+ _ => return Some(make::expr_prefix(T![!], make::expr_paren(expr.clone()))),
+ };
+ ted::replace(op_token, make::token(rev_token));
+ Some(bin.into())
+ }
+ ast::Expr::MethodCallExpr(mce) => {
+ let receiver = mce.receiver()?;
+ let method = mce.name_ref()?;
+ let arg_list = mce.arg_list()?;
+
+ let method = match method.text().as_str() {
+ "is_some" => "is_none",
+ "is_none" => "is_some",
+ "is_ok" => "is_err",
+ "is_err" => "is_ok",
+ _ => return None,
+ };
+ Some(make::expr_method_call(receiver, make::name_ref(method), arg_list))
+ }
+ ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::UnaryOp::Not => match pe.expr()? {
+ ast::Expr::ParenExpr(parexpr) => parexpr.expr(),
+ _ => pe.expr(),
+ },
+ ast::Expr::Literal(lit) => match lit.kind() {
+ ast::LiteralKind::Bool(b) => match b {
+ true => Some(ast::Expr::Literal(make::expr_literal("false"))),
+ false => Some(ast::Expr::Literal(make::expr_literal("true"))),
+ },
+ _ => None,
+ },
+ _ => None,
+ }
+}
+
+pub(crate) fn next_prev() -> impl Iterator<Item = Direction> {
+ [Direction::Next, Direction::Prev].into_iter()
+}
+
+pub(crate) fn does_pat_match_variant(pat: &ast::Pat, var: &ast::Pat) -> bool {
+ let first_node_text = |pat: &ast::Pat| pat.syntax().first_child().map(|node| node.text());
+
+ let pat_head = match pat {
+ ast::Pat::IdentPat(bind_pat) => match bind_pat.pat() {
+ Some(p) => first_node_text(&p),
+ None => return pat.syntax().text() == var.syntax().text(),
+ },
+ pat => first_node_text(pat),
+ };
+
+ let var_head = first_node_text(var);
+
+ pat_head == var_head
+}
+
+pub(crate) fn does_nested_pattern(pat: &ast::Pat) -> bool {
+ let depth = calc_depth(pat, 0);
+
+ if 1 < depth {
+ return true;
+ }
+ false
+}
+
+fn calc_depth(pat: &ast::Pat, depth: usize) -> usize {
+ match pat {
+ ast::Pat::IdentPat(_)
+ | ast::Pat::BoxPat(_)
+ | ast::Pat::RestPat(_)
+ | ast::Pat::LiteralPat(_)
+ | ast::Pat::MacroPat(_)
+ | ast::Pat::OrPat(_)
+ | ast::Pat::ParenPat(_)
+ | ast::Pat::PathPat(_)
+ | ast::Pat::WildcardPat(_)
+ | ast::Pat::RangePat(_)
+ | ast::Pat::RecordPat(_)
+ | ast::Pat::RefPat(_)
+ | ast::Pat::SlicePat(_)
+ | ast::Pat::TuplePat(_)
+ | ast::Pat::ConstBlockPat(_) => depth,
+
+ // FIXME: Other patterns may also be nested. Currently it simply supports only `TupleStructPat`
+ ast::Pat::TupleStructPat(pat) => {
+ let mut max_depth = depth;
+ for p in pat.fields() {
+ let d = calc_depth(&p, depth + 1);
+ if d > max_depth {
+ max_depth = d
+ }
+ }
+ max_depth
+ }
+ }
+}
+
+// Uses a syntax-driven approach to find any impl blocks for the struct that
+// exist within the module/file
+//
+// Returns `None` if we've found an existing fn
+//
+// FIXME: change the new fn checking to a more semantic approach when that's more
+// viable (e.g. we process proc macros, etc)
+// FIXME: this partially overlaps with `find_impl_block_*`
+pub(crate) fn find_struct_impl(
+ ctx: &AssistContext<'_>,
+ adt: &ast::Adt,
+ name: &str,
+) -> Option<Option<ast::Impl>> {
+ let db = ctx.db();
+ let module = adt.syntax().parent()?;
+
+ let struct_def = ctx.sema.to_def(adt)?;
+
+ let block = module.descendants().filter_map(ast::Impl::cast).find_map(|impl_blk| {
+ let blk = ctx.sema.to_def(&impl_blk)?;
+
+ // FIXME: handle e.g. `struct S<T>; impl<U> S<U> {}`
+ // (we currently use the wrong type parameter)
+ // also we wouldn't want to use e.g. `impl S<u32>`
+
+ let same_ty = match blk.self_ty(db).as_adt() {
+ Some(def) => def == struct_def,
+ None => false,
+ };
+ let not_trait_impl = blk.trait_(db).is_none();
+
+ if !(same_ty && not_trait_impl) {
+ None
+ } else {
+ Some(impl_blk)
+ }
+ });
+
+ if let Some(ref impl_blk) = block {
+ if has_fn(impl_blk, name) {
+ return None;
+ }
+ }
+
+ Some(block)
+}
+
+fn has_fn(imp: &ast::Impl, rhs_name: &str) -> bool {
+ if let Some(il) = imp.assoc_item_list() {
+ for item in il.assoc_items() {
+ if let ast::AssocItem::Fn(f) = item {
+ if let Some(name) = f.name() {
+ if name.text().eq_ignore_ascii_case(rhs_name) {
+ return true;
+ }
+ }
+ }
+ }
+ }
+
+ false
+}
+
+/// Find the start of the `impl` block for the given `ast::Impl`.
+//
+// FIXME: this partially overlaps with `find_struct_impl`
+pub(crate) fn find_impl_block_start(impl_def: ast::Impl, buf: &mut String) -> Option<TextSize> {
+ buf.push('\n');
+ let start = impl_def.assoc_item_list().and_then(|it| it.l_curly_token())?.text_range().end();
+ Some(start)
+}
+
+/// Find the end of the `impl` block for the given `ast::Impl`.
+//
+// FIXME: this partially overlaps with `find_struct_impl`
+pub(crate) fn find_impl_block_end(impl_def: ast::Impl, buf: &mut String) -> Option<TextSize> {
+ buf.push('\n');
+ let end = impl_def
+ .assoc_item_list()
+ .and_then(|it| it.r_curly_token())?
+ .prev_sibling_or_token()?
+ .text_range()
+ .end();
+ Some(end)
+}
+
+// Generates the surrounding `impl Type { <code> }` including type and lifetime
+// parameters
+pub(crate) fn generate_impl_text(adt: &ast::Adt, code: &str) -> String {
+ generate_impl_text_inner(adt, None, code)
+}
+
+// Generates the surrounding `impl <trait> for Type { <code> }` including type
+// and lifetime parameters
+pub(crate) fn generate_trait_impl_text(adt: &ast::Adt, trait_text: &str, code: &str) -> String {
+ generate_impl_text_inner(adt, Some(trait_text), code)
+}
+
+fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str) -> String {
+ let generic_params = adt.generic_param_list();
+ let mut buf = String::with_capacity(code.len());
+ buf.push_str("\n\n");
+ adt.attrs()
+ .filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false))
+ .for_each(|attr| buf.push_str(format!("{}\n", attr).as_str()));
+ buf.push_str("impl");
+ if let Some(generic_params) = &generic_params {
+ let lifetimes = generic_params.lifetime_params().map(|lt| format!("{}", lt.syntax()));
+ let toc_params = generic_params.type_or_const_params().map(|toc_param| {
+ let type_param = match toc_param {
+ ast::TypeOrConstParam::Type(x) => x,
+ ast::TypeOrConstParam::Const(x) => return x.syntax().to_string(),
+ };
+ let mut buf = String::new();
+ if let Some(it) = type_param.name() {
+ format_to!(buf, "{}", it.syntax());
+ }
+ if let Some(it) = type_param.colon_token() {
+ format_to!(buf, "{} ", it);
+ }
+ if let Some(it) = type_param.type_bound_list() {
+ format_to!(buf, "{}", it.syntax());
+ }
+ buf
+ });
+ let generics = lifetimes.chain(toc_params).format(", ");
+ format_to!(buf, "<{}>", generics);
+ }
+ buf.push(' ');
+ if let Some(trait_text) = trait_text {
+ buf.push_str(trait_text);
+ buf.push_str(" for ");
+ }
+ buf.push_str(&adt.name().unwrap().text());
+ if let Some(generic_params) = generic_params {
+ let lifetime_params = generic_params
+ .lifetime_params()
+ .filter_map(|it| it.lifetime())
+ .map(|it| SmolStr::from(it.text()));
+ let toc_params = generic_params
+ .type_or_const_params()
+ .filter_map(|it| it.name())
+ .map(|it| SmolStr::from(it.text()));
+ format_to!(buf, "<{}>", lifetime_params.chain(toc_params).format(", "))
+ }
+
+ match adt.where_clause() {
+ Some(where_clause) => {
+ format_to!(buf, "\n{}\n{{\n{}\n}}", where_clause, code);
+ }
+ None => {
+ format_to!(buf, " {{\n{}\n}}", code);
+ }
+ }
+
+ buf
+}
+
+pub(crate) fn add_method_to_adt(
+ builder: &mut AssistBuilder,
+ adt: &ast::Adt,
+ impl_def: Option<ast::Impl>,
+ method: &str,
+) {
+ let mut buf = String::with_capacity(method.len() + 2);
+ if impl_def.is_some() {
+ buf.push('\n');
+ }
+ buf.push_str(method);
+
+ let start_offset = impl_def
+ .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
+ .unwrap_or_else(|| {
+ buf = generate_impl_text(adt, &buf);
+ adt.syntax().text_range().end()
+ });
+
+ builder.insert(start_offset, buf);
+}
+
+#[derive(Debug)]
+pub(crate) struct ReferenceConversion {
+ conversion: ReferenceConversionType,
+ ty: hir::Type,
+}
+
+#[derive(Debug)]
+enum ReferenceConversionType {
+ // reference can be stripped if the type is Copy
+ Copy,
+ // &String -> &str
+ AsRefStr,
+ // &Vec<T> -> &[T]
+ AsRefSlice,
+ // &Box<T> -> &T
+ Dereferenced,
+ // &Option<T> -> Option<&T>
+ Option,
+ // &Result<T, E> -> Result<&T, &E>
+ Result,
+}
+
+impl ReferenceConversion {
+ pub(crate) fn convert_type(&self, db: &dyn HirDatabase) -> String {
+ match self.conversion {
+ ReferenceConversionType::Copy => self.ty.display(db).to_string(),
+ ReferenceConversionType::AsRefStr => "&str".to_string(),
+ ReferenceConversionType::AsRefSlice => {
+ let type_argument_name =
+ self.ty.type_arguments().next().unwrap().display(db).to_string();
+ format!("&[{}]", type_argument_name)
+ }
+ ReferenceConversionType::Dereferenced => {
+ let type_argument_name =
+ self.ty.type_arguments().next().unwrap().display(db).to_string();
+ format!("&{}", type_argument_name)
+ }
+ ReferenceConversionType::Option => {
+ let type_argument_name =
+ self.ty.type_arguments().next().unwrap().display(db).to_string();
+ format!("Option<&{}>", type_argument_name)
+ }
+ ReferenceConversionType::Result => {
+ let mut type_arguments = self.ty.type_arguments();
+ let first_type_argument_name =
+ type_arguments.next().unwrap().display(db).to_string();
+ let second_type_argument_name =
+ type_arguments.next().unwrap().display(db).to_string();
+ format!("Result<&{}, &{}>", first_type_argument_name, second_type_argument_name)
+ }
+ }
+ }
+
+ pub(crate) fn getter(&self, field_name: String) -> String {
+ match self.conversion {
+ ReferenceConversionType::Copy => format!("self.{}", field_name),
+ ReferenceConversionType::AsRefStr
+ | ReferenceConversionType::AsRefSlice
+ | ReferenceConversionType::Dereferenced
+ | ReferenceConversionType::Option
+ | ReferenceConversionType::Result => format!("self.{}.as_ref()", field_name),
+ }
+ }
+}
+
+// FIXME: It should return a new hir::Type, but currently constructing new types is too cumbersome
+// and all users of this function operate on string type names, so they can do the conversion
+// itself themselves.
+pub(crate) fn convert_reference_type(
+ ty: hir::Type,
+ db: &RootDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversion> {
+ handle_copy(&ty, db)
+ .or_else(|| handle_as_ref_str(&ty, db, famous_defs))
+ .or_else(|| handle_as_ref_slice(&ty, db, famous_defs))
+ .or_else(|| handle_dereferenced(&ty, db, famous_defs))
+ .or_else(|| handle_option_as_ref(&ty, db, famous_defs))
+ .or_else(|| handle_result_as_ref(&ty, db, famous_defs))
+ .map(|conversion| ReferenceConversion { ty, conversion })
+}
+
+fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<ReferenceConversionType> {
+ ty.is_copy(db).then(|| ReferenceConversionType::Copy)
+}
+
+fn handle_as_ref_str(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ let str_type = hir::BuiltinType::str().ty(db);
+
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type])
+ .then(|| ReferenceConversionType::AsRefStr)
+}
+
+fn handle_as_ref_slice(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ let type_argument = ty.type_arguments().next()?;
+ let slice_type = hir::Type::new_slice(type_argument);
+
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type])
+ .then(|| ReferenceConversionType::AsRefSlice)
+}
+
+fn handle_dereferenced(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ let type_argument = ty.type_arguments().next()?;
+
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument])
+ .then(|| ReferenceConversionType::Dereferenced)
+}
+
+fn handle_option_as_ref(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ if ty.as_adt() == famous_defs.core_option_Option()?.ty(db).as_adt() {
+ Some(ReferenceConversionType::Option)
+ } else {
+ None
+ }
+}
+
+fn handle_result_as_ref(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ if ty.as_adt() == famous_defs.core_result_Result()?.ty(db).as_adt() {
+ Some(ReferenceConversionType::Result)
+ } else {
+ None
+ }
+}
+
+pub(crate) fn get_methods(items: &ast::AssocItemList) -> Vec<ast::Fn> {
+ items
+ .assoc_items()
+ .flat_map(|i| match i {
+ ast::AssocItem::Fn(f) => Some(f),
+ _ => None,
+ })
+ .filter(|f| f.name().is_some())
+ .collect()
+}
+
+/// Trim(remove leading and trailing whitespace) `initial_range` in `source_file`, return the trimmed range.
+pub(crate) fn trimmed_text_range(source_file: &SourceFile, initial_range: TextRange) -> TextRange {
+ let mut trimmed_range = initial_range;
+ while source_file
+ .syntax()
+ .token_at_offset(trimmed_range.start())
+ .find_map(Whitespace::cast)
+ .is_some()
+ && trimmed_range.start() < trimmed_range.end()
+ {
+ let start = trimmed_range.start() + TextSize::from(1);
+ trimmed_range = TextRange::new(start, trimmed_range.end());
+ }
+ while source_file
+ .syntax()
+ .token_at_offset(trimmed_range.end())
+ .find_map(Whitespace::cast)
+ .is_some()
+ && trimmed_range.start() < trimmed_range.end()
+ {
+ let end = trimmed_range.end() - TextSize::from(1);
+ trimmed_range = TextRange::new(trimmed_range.start(), end);
+ }
+ trimmed_range
+}
+
+/// Convert a list of function params to a list of arguments that can be passed
+/// into a function call.
+pub(crate) fn convert_param_list_to_arg_list(list: ast::ParamList) -> ast::ArgList {
+ let mut args = vec![];
+ for param in list.params() {
+ if let Some(ast::Pat::IdentPat(pat)) = param.pat() {
+ if let Some(name) = pat.name() {
+ let name = name.to_string();
+ let expr = make::expr_path(make::ext::ident_path(&name));
+ args.push(expr);
+ }
+ }
+ }
+ make::arg_list(args)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs
new file mode 100644
index 000000000..7a0c91295
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs
@@ -0,0 +1,661 @@
+//! This module contains functions to generate default trait impl function bodies where possible.
+
+use syntax::{
+ ast::{self, edit::AstNodeEdit, make, AstNode, BinaryOp, CmpOp, HasName, LogicOp},
+ ted,
+};
+
+/// Generate custom trait bodies without default implementation where possible.
+///
+/// Returns `Option` so that we can use `?` rather than `if let Some`. Returning
+/// `None` means that generating a custom trait body failed, and the body will remain
+/// as `todo!` instead.
+pub(crate) fn gen_trait_fn_body(
+ func: &ast::Fn,
+ trait_path: &ast::Path,
+ adt: &ast::Adt,
+) -> Option<()> {
+ match trait_path.segment()?.name_ref()?.text().as_str() {
+ "Clone" => gen_clone_impl(adt, func),
+ "Debug" => gen_debug_impl(adt, func),
+ "Default" => gen_default_impl(adt, func),
+ "Hash" => gen_hash_impl(adt, func),
+ "PartialEq" => gen_partial_eq(adt, func),
+ "PartialOrd" => gen_partial_ord(adt, func),
+ _ => None,
+ }
+}
+
+/// Generate a `Clone` impl based on the fields and members of the target type.
+fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+ stdx::always!(func.name().map_or(false, |name| name.text() == "clone"));
+ fn gen_clone_call(target: ast::Expr) -> ast::Expr {
+ let method = make::name_ref("clone");
+ make::expr_method_call(target, method, make::arg_list(None))
+ }
+ let expr = match adt {
+ // `Clone` cannot be derived for unions, so no default impl can be provided.
+ ast::Adt::Union(_) => return None,
+ ast::Adt::Enum(enum_) => {
+ let list = enum_.variant_list()?;
+ let mut arms = vec![];
+ for variant in list.variants() {
+ let name = variant.name()?;
+ let variant_name = make::ext::path_from_idents(["Self", &format!("{}", name)])?;
+
+ match variant.field_list() {
+ // => match self { Self::Name { x } => Self::Name { x: x.clone() } }
+ Some(ast::FieldList::RecordFieldList(list)) => {
+ let mut pats = vec![];
+ let mut fields = vec![];
+ for field in list.fields() {
+ let field_name = field.name()?;
+ let pat = make::ident_pat(false, false, field_name.clone());
+ pats.push(pat.into());
+
+ let path = make::ext::ident_path(&field_name.to_string());
+ let method_call = gen_clone_call(make::expr_path(path));
+ let name_ref = make::name_ref(&field_name.to_string());
+ let field = make::record_expr_field(name_ref, Some(method_call));
+ fields.push(field);
+ }
+ let pat = make::record_pat(variant_name.clone(), pats.into_iter());
+ let fields = make::record_expr_field_list(fields);
+ let record_expr = make::record_expr(variant_name, fields).into();
+ arms.push(make::match_arm(Some(pat.into()), None, record_expr));
+ }
+
+ // => match self { Self::Name(arg1) => Self::Name(arg1.clone()) }
+ Some(ast::FieldList::TupleFieldList(list)) => {
+ let mut pats = vec![];
+ let mut fields = vec![];
+ for (i, _) in list.fields().enumerate() {
+ let field_name = format!("arg{}", i);
+ let pat = make::ident_pat(false, false, make::name(&field_name));
+ pats.push(pat.into());
+
+ let f_path = make::expr_path(make::ext::ident_path(&field_name));
+ fields.push(gen_clone_call(f_path));
+ }
+ let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter());
+ let struct_name = make::expr_path(variant_name);
+ let tuple_expr = make::expr_call(struct_name, make::arg_list(fields));
+ arms.push(make::match_arm(Some(pat.into()), None, tuple_expr));
+ }
+
+ // => match self { Self::Name => Self::Name }
+ None => {
+ let pattern = make::path_pat(variant_name.clone());
+ let variant_expr = make::expr_path(variant_name);
+ arms.push(make::match_arm(Some(pattern), None, variant_expr));
+ }
+ }
+ }
+
+ let match_target = make::expr_path(make::ext::ident_path("self"));
+ let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
+ make::expr_match(match_target, list)
+ }
+ ast::Adt::Struct(strukt) => {
+ match strukt.field_list() {
+ // => Self { name: self.name.clone() }
+ Some(ast::FieldList::RecordFieldList(field_list)) => {
+ let mut fields = vec![];
+ for field in field_list.fields() {
+ let base = make::expr_path(make::ext::ident_path("self"));
+ let target = make::expr_field(base, &field.name()?.to_string());
+ let method_call = gen_clone_call(target);
+ let name_ref = make::name_ref(&field.name()?.to_string());
+ let field = make::record_expr_field(name_ref, Some(method_call));
+ fields.push(field);
+ }
+ let struct_name = make::ext::ident_path("Self");
+ let fields = make::record_expr_field_list(fields);
+ make::record_expr(struct_name, fields).into()
+ }
+ // => Self(self.0.clone(), self.1.clone())
+ Some(ast::FieldList::TupleFieldList(field_list)) => {
+ let mut fields = vec![];
+ for (i, _) in field_list.fields().enumerate() {
+ let f_path = make::expr_path(make::ext::ident_path("self"));
+ let target = make::expr_field(f_path, &format!("{}", i));
+ fields.push(gen_clone_call(target));
+ }
+ let struct_name = make::expr_path(make::ext::ident_path("Self"));
+ make::expr_call(struct_name, make::arg_list(fields))
+ }
+ // => Self { }
+ None => {
+ let struct_name = make::ext::ident_path("Self");
+ let fields = make::record_expr_field_list(None);
+ make::record_expr(struct_name, fields).into()
+ }
+ }
+ }
+ };
+ let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+}
+
+/// Generate a `Debug` impl based on the fields and members of the target type.
+fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+ let annotated_name = adt.name()?;
+ match adt {
+ // `Debug` cannot be derived for unions, so no default impl can be provided.
+ ast::Adt::Union(_) => None,
+
+ // => match self { Self::Variant => write!(f, "Variant") }
+ ast::Adt::Enum(enum_) => {
+ let list = enum_.variant_list()?;
+ let mut arms = vec![];
+ for variant in list.variants() {
+ let name = variant.name()?;
+ let variant_name = make::ext::path_from_idents(["Self", &format!("{}", name)])?;
+ let target = make::expr_path(make::ext::ident_path("f"));
+
+ match variant.field_list() {
+ Some(ast::FieldList::RecordFieldList(list)) => {
+ // => f.debug_struct(name)
+ let target = make::expr_path(make::ext::ident_path("f"));
+ let method = make::name_ref("debug_struct");
+ let struct_name = format!("\"{}\"", name);
+ let args = make::arg_list(Some(make::expr_literal(&struct_name).into()));
+ let mut expr = make::expr_method_call(target, method, args);
+
+ let mut pats = vec![];
+ for field in list.fields() {
+ let field_name = field.name()?;
+
+ // create a field pattern for use in `MyStruct { fields.. }`
+ let pat = make::ident_pat(false, false, field_name.clone());
+ pats.push(pat.into());
+
+ // => <expr>.field("field_name", field)
+ let method_name = make::name_ref("field");
+ let name = make::expr_literal(&(format!("\"{}\"", field_name))).into();
+ let path = &format!("{}", field_name);
+ let path = make::expr_path(make::ext::ident_path(path));
+ let args = make::arg_list(vec![name, path]);
+ expr = make::expr_method_call(expr, method_name, args);
+ }
+
+ // => <expr>.finish()
+ let method = make::name_ref("finish");
+ let expr = make::expr_method_call(expr, method, make::arg_list(None));
+
+ // => MyStruct { fields.. } => f.debug_struct("MyStruct")...finish(),
+ let pat = make::record_pat(variant_name.clone(), pats.into_iter());
+ arms.push(make::match_arm(Some(pat.into()), None, expr));
+ }
+ Some(ast::FieldList::TupleFieldList(list)) => {
+ // => f.debug_tuple(name)
+ let target = make::expr_path(make::ext::ident_path("f"));
+ let method = make::name_ref("debug_tuple");
+ let struct_name = format!("\"{}\"", name);
+ let args = make::arg_list(Some(make::expr_literal(&struct_name).into()));
+ let mut expr = make::expr_method_call(target, method, args);
+
+ let mut pats = vec![];
+ for (i, _) in list.fields().enumerate() {
+ let name = format!("arg{}", i);
+
+ // create a field pattern for use in `MyStruct(fields..)`
+ let field_name = make::name(&name);
+ let pat = make::ident_pat(false, false, field_name.clone());
+ pats.push(pat.into());
+
+ // => <expr>.field(field)
+ let method_name = make::name_ref("field");
+ let field_path = &name.to_string();
+ let field_path = make::expr_path(make::ext::ident_path(field_path));
+ let args = make::arg_list(vec![field_path]);
+ expr = make::expr_method_call(expr, method_name, args);
+ }
+
+ // => <expr>.finish()
+ let method = make::name_ref("finish");
+ let expr = make::expr_method_call(expr, method, make::arg_list(None));
+
+ // => MyStruct (fields..) => f.debug_tuple("MyStruct")...finish(),
+ let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter());
+ arms.push(make::match_arm(Some(pat.into()), None, expr));
+ }
+ None => {
+ let fmt_string = make::expr_literal(&(format!("\"{}\"", name))).into();
+ let args = make::arg_list([target, fmt_string]);
+ let macro_name = make::expr_path(make::ext::ident_path("write"));
+ let macro_call = make::expr_macro_call(macro_name, args);
+
+ let variant_name = make::path_pat(variant_name);
+ arms.push(make::match_arm(Some(variant_name), None, macro_call));
+ }
+ }
+ }
+
+ let match_target = make::expr_path(make::ext::ident_path("self"));
+ let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
+ let match_expr = make::expr_match(match_target, list);
+
+ let body = make::block_expr(None, Some(match_expr));
+ let body = body.indent(ast::edit::IndentLevel(1));
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+ }
+
+ ast::Adt::Struct(strukt) => {
+ let name = format!("\"{}\"", annotated_name);
+ let args = make::arg_list(Some(make::expr_literal(&name).into()));
+ let target = make::expr_path(make::ext::ident_path("f"));
+
+ let expr = match strukt.field_list() {
+ // => f.debug_struct("Name").finish()
+ None => make::expr_method_call(target, make::name_ref("debug_struct"), args),
+
+ // => f.debug_struct("Name").field("foo", &self.foo).finish()
+ Some(ast::FieldList::RecordFieldList(field_list)) => {
+ let method = make::name_ref("debug_struct");
+ let mut expr = make::expr_method_call(target, method, args);
+ for field in field_list.fields() {
+ let name = field.name()?;
+ let f_name = make::expr_literal(&(format!("\"{}\"", name))).into();
+ let f_path = make::expr_path(make::ext::ident_path("self"));
+ let f_path = make::expr_ref(f_path, false);
+ let f_path = make::expr_field(f_path, &format!("{}", name));
+ let args = make::arg_list([f_name, f_path]);
+ expr = make::expr_method_call(expr, make::name_ref("field"), args);
+ }
+ expr
+ }
+
+ // => f.debug_tuple("Name").field(self.0).finish()
+ Some(ast::FieldList::TupleFieldList(field_list)) => {
+ let method = make::name_ref("debug_tuple");
+ let mut expr = make::expr_method_call(target, method, args);
+ for (i, _) in field_list.fields().enumerate() {
+ let f_path = make::expr_path(make::ext::ident_path("self"));
+ let f_path = make::expr_ref(f_path, false);
+ let f_path = make::expr_field(f_path, &format!("{}", i));
+ let method = make::name_ref("field");
+ expr = make::expr_method_call(expr, method, make::arg_list(Some(f_path)));
+ }
+ expr
+ }
+ };
+
+ let method = make::name_ref("finish");
+ let expr = make::expr_method_call(expr, method, make::arg_list(None));
+ let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+ }
+ }
+}
+
+/// Generate a `Debug` impl based on the fields and members of the target type.
+fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+ fn gen_default_call() -> Option<ast::Expr> {
+ let fn_name = make::ext::path_from_idents(["Default", "default"])?;
+ Some(make::expr_call(make::expr_path(fn_name), make::arg_list(None)))
+ }
+ match adt {
+ // `Debug` cannot be derived for unions, so no default impl can be provided.
+ ast::Adt::Union(_) => None,
+ // Deriving `Debug` for enums is not stable yet.
+ ast::Adt::Enum(_) => None,
+ ast::Adt::Struct(strukt) => {
+ let expr = match strukt.field_list() {
+ Some(ast::FieldList::RecordFieldList(field_list)) => {
+ let mut fields = vec![];
+ for field in field_list.fields() {
+ let method_call = gen_default_call()?;
+ let name_ref = make::name_ref(&field.name()?.to_string());
+ let field = make::record_expr_field(name_ref, Some(method_call));
+ fields.push(field);
+ }
+ let struct_name = make::ext::ident_path("Self");
+ let fields = make::record_expr_field_list(fields);
+ make::record_expr(struct_name, fields).into()
+ }
+ Some(ast::FieldList::TupleFieldList(field_list)) => {
+ let struct_name = make::expr_path(make::ext::ident_path("Self"));
+ let fields = field_list
+ .fields()
+ .map(|_| gen_default_call())
+ .collect::<Option<Vec<ast::Expr>>>()?;
+ make::expr_call(struct_name, make::arg_list(fields))
+ }
+ None => {
+ let struct_name = make::ext::ident_path("Self");
+ let fields = make::record_expr_field_list(None);
+ make::record_expr(struct_name, fields).into()
+ }
+ };
+ let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+ }
+ }
+}
+
+/// Generate a `Hash` impl based on the fields and members of the target type.
+fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+ stdx::always!(func.name().map_or(false, |name| name.text() == "hash"));
+ fn gen_hash_call(target: ast::Expr) -> ast::Stmt {
+ let method = make::name_ref("hash");
+ let arg = make::expr_path(make::ext::ident_path("state"));
+ let expr = make::expr_method_call(target, method, make::arg_list(Some(arg)));
+ make::expr_stmt(expr).into()
+ }
+
+ let body = match adt {
+ // `Hash` cannot be derived for unions, so no default impl can be provided.
+ ast::Adt::Union(_) => return None,
+
+ // => std::mem::discriminant(self).hash(state);
+ ast::Adt::Enum(_) => {
+ let fn_name = make_discriminant()?;
+
+ let arg = make::expr_path(make::ext::ident_path("self"));
+ let fn_call = make::expr_call(fn_name, make::arg_list(Some(arg)));
+ let stmt = gen_hash_call(fn_call);
+
+ make::block_expr(Some(stmt), None).indent(ast::edit::IndentLevel(1))
+ }
+ ast::Adt::Struct(strukt) => match strukt.field_list() {
+ // => self.<field>.hash(state);
+ Some(ast::FieldList::RecordFieldList(field_list)) => {
+ let mut stmts = vec![];
+ for field in field_list.fields() {
+ let base = make::expr_path(make::ext::ident_path("self"));
+ let target = make::expr_field(base, &field.name()?.to_string());
+ stmts.push(gen_hash_call(target));
+ }
+ make::block_expr(stmts, None).indent(ast::edit::IndentLevel(1))
+ }
+
+ // => self.<field_index>.hash(state);
+ Some(ast::FieldList::TupleFieldList(field_list)) => {
+ let mut stmts = vec![];
+ for (i, _) in field_list.fields().enumerate() {
+ let base = make::expr_path(make::ext::ident_path("self"));
+ let target = make::expr_field(base, &format!("{}", i));
+ stmts.push(gen_hash_call(target));
+ }
+ make::block_expr(stmts, None).indent(ast::edit::IndentLevel(1))
+ }
+
+ // No fields in the body means there's nothing to hash.
+ None => return None,
+ },
+ };
+
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+}
+
+/// Generate a `PartialEq` impl based on the fields and members of the target type.
+fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+ stdx::always!(func.name().map_or(false, |name| name.text() == "eq"));
+ fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> {
+ match expr {
+ Some(expr) => Some(make::expr_bin_op(expr, BinaryOp::LogicOp(LogicOp::And), cmp)),
+ None => Some(cmp),
+ }
+ }
+
+ fn gen_record_pat_field(field_name: &str, pat_name: &str) -> ast::RecordPatField {
+ let pat = make::ext::simple_ident_pat(make::name(pat_name));
+ let name_ref = make::name_ref(field_name);
+ make::record_pat_field(name_ref, pat.into())
+ }
+
+ fn gen_record_pat(record_name: ast::Path, fields: Vec<ast::RecordPatField>) -> ast::RecordPat {
+ let list = make::record_pat_field_list(fields);
+ make::record_pat_with_fields(record_name, list)
+ }
+
+ fn gen_variant_path(variant: &ast::Variant) -> Option<ast::Path> {
+ make::ext::path_from_idents(["Self", &variant.name()?.to_string()])
+ }
+
+ fn gen_tuple_field(field_name: &String) -> ast::Pat {
+ ast::Pat::IdentPat(make::ident_pat(false, false, make::name(field_name)))
+ }
+
+ // FIXME: return `None` if the trait carries a generic type; we can only
+ // generate this code `Self` for the time being.
+
+ let body = match adt {
+ // `PartialEq` cannot be derived for unions, so no default impl can be provided.
+ ast::Adt::Union(_) => return None,
+
+ ast::Adt::Enum(enum_) => {
+ // => std::mem::discriminant(self) == std::mem::discriminant(other)
+ let lhs_name = make::expr_path(make::ext::ident_path("self"));
+ let lhs = make::expr_call(make_discriminant()?, make::arg_list(Some(lhs_name.clone())));
+ let rhs_name = make::expr_path(make::ext::ident_path("other"));
+ let rhs = make::expr_call(make_discriminant()?, make::arg_list(Some(rhs_name.clone())));
+ let eq_check =
+ make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
+
+ let mut n_cases = 0;
+ let mut arms = vec![];
+ for variant in enum_.variant_list()?.variants() {
+ n_cases += 1;
+ match variant.field_list() {
+ // => (Self::Bar { bin: l_bin }, Self::Bar { bin: r_bin }) => l_bin == r_bin,
+ Some(ast::FieldList::RecordFieldList(list)) => {
+ let mut expr = None;
+ let mut l_fields = vec![];
+ let mut r_fields = vec![];
+
+ for field in list.fields() {
+ let field_name = field.name()?.to_string();
+
+ let l_name = &format!("l_{}", field_name);
+ l_fields.push(gen_record_pat_field(&field_name, l_name));
+
+ let r_name = &format!("r_{}", field_name);
+ r_fields.push(gen_record_pat_field(&field_name, r_name));
+
+ let lhs = make::expr_path(make::ext::ident_path(l_name));
+ let rhs = make::expr_path(make::ext::ident_path(r_name));
+ let cmp = make::expr_bin_op(
+ lhs,
+ BinaryOp::CmpOp(CmpOp::Eq { negated: false }),
+ rhs,
+ );
+ expr = gen_eq_chain(expr, cmp);
+ }
+
+ let left = gen_record_pat(gen_variant_path(&variant)?, l_fields);
+ let right = gen_record_pat(gen_variant_path(&variant)?, r_fields);
+ let tuple = make::tuple_pat(vec![left.into(), right.into()]);
+
+ if let Some(expr) = expr {
+ arms.push(make::match_arm(Some(tuple.into()), None, expr));
+ }
+ }
+
+ Some(ast::FieldList::TupleFieldList(list)) => {
+ let mut expr = None;
+ let mut l_fields = vec![];
+ let mut r_fields = vec![];
+
+ for (i, _) in list.fields().enumerate() {
+ let field_name = format!("{}", i);
+
+ let l_name = format!("l{}", field_name);
+ l_fields.push(gen_tuple_field(&l_name));
+
+ let r_name = format!("r{}", field_name);
+ r_fields.push(gen_tuple_field(&r_name));
+
+ let lhs = make::expr_path(make::ext::ident_path(&l_name));
+ let rhs = make::expr_path(make::ext::ident_path(&r_name));
+ let cmp = make::expr_bin_op(
+ lhs,
+ BinaryOp::CmpOp(CmpOp::Eq { negated: false }),
+ rhs,
+ );
+ expr = gen_eq_chain(expr, cmp);
+ }
+
+ let left = make::tuple_struct_pat(gen_variant_path(&variant)?, l_fields);
+ let right = make::tuple_struct_pat(gen_variant_path(&variant)?, r_fields);
+ let tuple = make::tuple_pat(vec![left.into(), right.into()]);
+
+ if let Some(expr) = expr {
+ arms.push(make::match_arm(Some(tuple.into()), None, expr));
+ }
+ }
+ None => continue,
+ }
+ }
+
+ let expr = match arms.len() {
+ 0 => eq_check,
+ _ => {
+ if n_cases > arms.len() {
+ let lhs = make::wildcard_pat().into();
+ arms.push(make::match_arm(Some(lhs), None, eq_check));
+ }
+
+ let match_target = make::expr_tuple(vec![lhs_name, rhs_name]);
+ let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
+ make::expr_match(match_target, list)
+ }
+ };
+
+ make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
+ }
+ ast::Adt::Struct(strukt) => match strukt.field_list() {
+ Some(ast::FieldList::RecordFieldList(field_list)) => {
+ let mut expr = None;
+ for field in field_list.fields() {
+ let lhs = make::expr_path(make::ext::ident_path("self"));
+ let lhs = make::expr_field(lhs, &field.name()?.to_string());
+ let rhs = make::expr_path(make::ext::ident_path("other"));
+ let rhs = make::expr_field(rhs, &field.name()?.to_string());
+ let cmp =
+ make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
+ expr = gen_eq_chain(expr, cmp);
+ }
+ make::block_expr(None, expr).indent(ast::edit::IndentLevel(1))
+ }
+
+ Some(ast::FieldList::TupleFieldList(field_list)) => {
+ let mut expr = None;
+ for (i, _) in field_list.fields().enumerate() {
+ let idx = format!("{}", i);
+ let lhs = make::expr_path(make::ext::ident_path("self"));
+ let lhs = make::expr_field(lhs, &idx);
+ let rhs = make::expr_path(make::ext::ident_path("other"));
+ let rhs = make::expr_field(rhs, &idx);
+ let cmp =
+ make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
+ expr = gen_eq_chain(expr, cmp);
+ }
+ make::block_expr(None, expr).indent(ast::edit::IndentLevel(1))
+ }
+
+ // No fields in the body means there's nothing to hash.
+ None => {
+ let expr = make::expr_literal("true").into();
+ make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
+ }
+ },
+ };
+
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+}
+
+fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+ stdx::always!(func.name().map_or(false, |name| name.text() == "partial_cmp"));
+ fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> {
+ let mut arms = vec![];
+
+ let variant_name =
+ make::path_pat(make::ext::path_from_idents(["core", "cmp", "Ordering", "Equal"])?);
+ let lhs = make::tuple_struct_pat(make::ext::path_from_idents(["Some"])?, [variant_name]);
+ arms.push(make::match_arm(Some(lhs.into()), None, make::expr_empty_block()));
+
+ arms.push(make::match_arm(
+ [make::ident_pat(false, false, make::name("ord")).into()],
+ None,
+ make::expr_return(Some(make::expr_path(make::ext::ident_path("ord")))),
+ ));
+ let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
+ Some(make::expr_stmt(make::expr_match(match_target, list)).into())
+ }
+
+ fn gen_partial_cmp_call(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {
+ let rhs = make::expr_ref(rhs, false);
+ let method = make::name_ref("partial_cmp");
+ make::expr_method_call(lhs, method, make::arg_list(Some(rhs)))
+ }
+
+ // FIXME: return `None` if the trait carries a generic type; we can only
+ // generate this code `Self` for the time being.
+
+ let body = match adt {
+ // `PartialOrd` cannot be derived for unions, so no default impl can be provided.
+ ast::Adt::Union(_) => return None,
+ // `core::mem::Discriminant` does not implement `PartialOrd` in stable Rust today.
+ ast::Adt::Enum(_) => return None,
+ ast::Adt::Struct(strukt) => match strukt.field_list() {
+ Some(ast::FieldList::RecordFieldList(field_list)) => {
+ let mut exprs = vec![];
+ for field in field_list.fields() {
+ let lhs = make::expr_path(make::ext::ident_path("self"));
+ let lhs = make::expr_field(lhs, &field.name()?.to_string());
+ let rhs = make::expr_path(make::ext::ident_path("other"));
+ let rhs = make::expr_field(rhs, &field.name()?.to_string());
+ let ord = gen_partial_cmp_call(lhs, rhs);
+ exprs.push(ord);
+ }
+
+ let tail = exprs.pop();
+ let stmts = exprs
+ .into_iter()
+ .map(gen_partial_eq_match)
+ .collect::<Option<Vec<ast::Stmt>>>()?;
+ make::block_expr(stmts.into_iter(), tail).indent(ast::edit::IndentLevel(1))
+ }
+
+ Some(ast::FieldList::TupleFieldList(field_list)) => {
+ let mut exprs = vec![];
+ for (i, _) in field_list.fields().enumerate() {
+ let idx = format!("{}", i);
+ let lhs = make::expr_path(make::ext::ident_path("self"));
+ let lhs = make::expr_field(lhs, &idx);
+ let rhs = make::expr_path(make::ext::ident_path("other"));
+ let rhs = make::expr_field(rhs, &idx);
+ let ord = gen_partial_cmp_call(lhs, rhs);
+ exprs.push(ord);
+ }
+ let tail = exprs.pop();
+ let stmts = exprs
+ .into_iter()
+ .map(gen_partial_eq_match)
+ .collect::<Option<Vec<ast::Stmt>>>()?;
+ make::block_expr(stmts.into_iter(), tail).indent(ast::edit::IndentLevel(1))
+ }
+
+ // No fields in the body means there's nothing to compare.
+ None => {
+ let expr = make::expr_literal("true").into();
+ make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
+ }
+ },
+ };
+
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+}
+
+fn make_discriminant() -> Option<ast::Expr> {
+ Some(make::expr_path(make::ext::path_from_idents(["core", "mem", "discriminant"])?))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs
new file mode 100644
index 000000000..779cdbc93
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs
@@ -0,0 +1,775 @@
+//! This module contains functions to suggest names for expressions, functions and other items
+
+use hir::Semantics;
+use ide_db::RootDatabase;
+use itertools::Itertools;
+use stdx::to_lower_snake_case;
+use syntax::{
+ ast::{self, HasName},
+ match_ast, AstNode, SmolStr,
+};
+
+/// Trait names, that will be ignored when in `impl Trait` and `dyn Trait`
+const USELESS_TRAITS: &[&str] = &["Send", "Sync", "Copy", "Clone", "Eq", "PartialEq"];
+
+/// Identifier names that won't be suggested, ever
+///
+/// **NOTE**: they all must be snake lower case
+const USELESS_NAMES: &[&str] =
+ &["new", "default", "option", "some", "none", "ok", "err", "str", "string"];
+
+/// Generic types replaced by their first argument
+///
+/// # Examples
+/// `Option<Name>` -> `Name`
+/// `Result<User, Error>` -> `User`
+const WRAPPER_TYPES: &[&str] = &["Box", "Option", "Result"];
+
+/// Prefixes to strip from methods names
+///
+/// # Examples
+/// `vec.as_slice()` -> `slice`
+/// `args.into_config()` -> `config`
+/// `bytes.to_vec()` -> `vec`
+const USELESS_METHOD_PREFIXES: &[&str] = &["into_", "as_", "to_"];
+
+/// Useless methods that are stripped from expression
+///
+/// # Examples
+/// `var.name().to_string()` -> `var.name()`
+const USELESS_METHODS: &[&str] = &[
+ "to_string",
+ "as_str",
+ "to_owned",
+ "as_ref",
+ "clone",
+ "cloned",
+ "expect",
+ "expect_none",
+ "unwrap",
+ "unwrap_none",
+ "unwrap_or",
+ "unwrap_or_default",
+ "unwrap_or_else",
+ "unwrap_unchecked",
+ "iter",
+ "into_iter",
+ "iter_mut",
+];
+
+pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr {
+ let c = ty
+ .type_bound_list()
+ .and_then(|bounds| bounds.syntax().text().char_at(0.into()))
+ .unwrap_or('T');
+ c.encode_utf8(&mut [0; 4]).into()
+}
+
+/// Suggest name of variable for given expression
+///
+/// **NOTE**: it is caller's responsibility to guarantee uniqueness of the name.
+/// I.e. it doesn't look for names in scope.
+///
+/// # Current implementation
+///
+/// In current implementation, the function tries to get the name from
+/// the following sources:
+///
+/// * if expr is an argument to function/method, use paramter name
+/// * if expr is a function/method call, use function name
+/// * expression type name if it exists (E.g. `()`, `fn() -> ()` or `!` do not have names)
+/// * fallback: `var_name`
+///
+/// It also applies heuristics to filter out less informative names
+///
+/// Currently it sticks to the first name found.
+// FIXME: Microoptimize and return a `SmolStr` here.
+pub(crate) fn for_variable(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> String {
+ // `from_param` does not benifit from stripping
+ // it need the largest context possible
+ // so we check firstmost
+ if let Some(name) = from_param(expr, sema) {
+ return name;
+ }
+
+ let mut next_expr = Some(expr.clone());
+ while let Some(expr) = next_expr {
+ let name =
+ from_call(&expr).or_else(|| from_type(&expr, sema)).or_else(|| from_field_name(&expr));
+ if let Some(name) = name {
+ return name;
+ }
+
+ match expr {
+ ast::Expr::RefExpr(inner) => next_expr = inner.expr(),
+ ast::Expr::BoxExpr(inner) => next_expr = inner.expr(),
+ ast::Expr::AwaitExpr(inner) => next_expr = inner.expr(),
+ // ast::Expr::BlockExpr(block) => expr = block.tail_expr(),
+ ast::Expr::CastExpr(inner) => next_expr = inner.expr(),
+ ast::Expr::MethodCallExpr(method) if is_useless_method(&method) => {
+ next_expr = method.receiver();
+ }
+ ast::Expr::ParenExpr(inner) => next_expr = inner.expr(),
+ ast::Expr::TryExpr(inner) => next_expr = inner.expr(),
+ ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(ast::UnaryOp::Deref) => {
+ next_expr = prefix.expr()
+ }
+ _ => break,
+ }
+ }
+
+ "var_name".to_string()
+}
+
+fn normalize(name: &str) -> Option<String> {
+ let name = to_lower_snake_case(name);
+
+ if USELESS_NAMES.contains(&name.as_str()) {
+ return None;
+ }
+
+ if !is_valid_name(&name) {
+ return None;
+ }
+
+ Some(name)
+}
+
+fn is_valid_name(name: &str) -> bool {
+ match ide_db::syntax_helpers::LexedStr::single_token(name) {
+ Some((syntax::SyntaxKind::IDENT, _error)) => true,
+ _ => false,
+ }
+}
+
+fn is_useless_method(method: &ast::MethodCallExpr) -> bool {
+ let ident = method.name_ref().and_then(|it| it.ident_token());
+
+ match ident {
+ Some(ident) => USELESS_METHODS.contains(&ident.text()),
+ None => false,
+ }
+}
+
+fn from_call(expr: &ast::Expr) -> Option<String> {
+ from_func_call(expr).or_else(|| from_method_call(expr))
+}
+
+fn from_func_call(expr: &ast::Expr) -> Option<String> {
+ let call = match expr {
+ ast::Expr::CallExpr(call) => call,
+ _ => return None,
+ };
+ let func = match call.expr()? {
+ ast::Expr::PathExpr(path) => path,
+ _ => return None,
+ };
+ let ident = func.path()?.segment()?.name_ref()?.ident_token()?;
+ normalize(ident.text())
+}
+
+fn from_method_call(expr: &ast::Expr) -> Option<String> {
+ let method = match expr {
+ ast::Expr::MethodCallExpr(call) => call,
+ _ => return None,
+ };
+ let ident = method.name_ref()?.ident_token()?;
+ let mut name = ident.text();
+
+ if USELESS_METHODS.contains(&name) {
+ return None;
+ }
+
+ for prefix in USELESS_METHOD_PREFIXES {
+ if let Some(suffix) = name.strip_prefix(prefix) {
+ name = suffix;
+ break;
+ }
+ }
+
+ normalize(name)
+}
+
+fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<String> {
+ let arg_list = expr.syntax().parent().and_then(ast::ArgList::cast)?;
+ let args_parent = arg_list.syntax().parent()?;
+ let func = match_ast! {
+ match args_parent {
+ ast::CallExpr(call) => {
+ let func = call.expr()?;
+ let func_ty = sema.type_of_expr(&func)?.adjusted();
+ func_ty.as_callable(sema.db)?
+ },
+ ast::MethodCallExpr(method) => sema.resolve_method_call_as_callable(&method)?,
+ _ => return None,
+ }
+ };
+
+ let (idx, _) = arg_list.args().find_position(|it| it == expr).unwrap();
+ let (pat, _) = func.params(sema.db).into_iter().nth(idx)?;
+ let pat = match pat? {
+ either::Either::Right(pat) => pat,
+ _ => return None,
+ };
+ let name = var_name_from_pat(&pat)?;
+ normalize(&name.to_string())
+}
+
+fn var_name_from_pat(pat: &ast::Pat) -> Option<ast::Name> {
+ match pat {
+ ast::Pat::IdentPat(var) => var.name(),
+ ast::Pat::RefPat(ref_pat) => var_name_from_pat(&ref_pat.pat()?),
+ ast::Pat::BoxPat(box_pat) => var_name_from_pat(&box_pat.pat()?),
+ _ => None,
+ }
+}
+
+fn from_type(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<String> {
+ let ty = sema.type_of_expr(expr)?.adjusted();
+ let ty = ty.remove_ref().unwrap_or(ty);
+
+ name_of_type(&ty, sema.db)
+}
+
+fn name_of_type(ty: &hir::Type, db: &RootDatabase) -> Option<String> {
+ let name = if let Some(adt) = ty.as_adt() {
+ let name = adt.name(db).to_string();
+
+ if WRAPPER_TYPES.contains(&name.as_str()) {
+ let inner_ty = ty.type_arguments().next()?;
+ return name_of_type(&inner_ty, db);
+ }
+
+ name
+ } else if let Some(trait_) = ty.as_dyn_trait() {
+ trait_name(&trait_, db)?
+ } else if let Some(traits) = ty.as_impl_traits(db) {
+ let mut iter = traits.filter_map(|t| trait_name(&t, db));
+ let name = iter.next()?;
+ if iter.next().is_some() {
+ return None;
+ }
+ name
+ } else {
+ return None;
+ };
+ normalize(&name)
+}
+
+fn trait_name(trait_: &hir::Trait, db: &RootDatabase) -> Option<String> {
+ let name = trait_.name(db).to_string();
+ if USELESS_TRAITS.contains(&name.as_str()) {
+ return None;
+ }
+ Some(name)
+}
+
+fn from_field_name(expr: &ast::Expr) -> Option<String> {
+ let field = match expr {
+ ast::Expr::FieldExpr(field) => field,
+ _ => return None,
+ };
+ let ident = field.name_ref()?.ident_token()?;
+ normalize(ident.text())
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::{fixture::WithFixture, FileRange};
+
+ use super::*;
+
+ #[track_caller]
+ fn check(ra_fixture: &str, expected: &str) {
+ let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture);
+ let frange = FileRange { file_id, range: range_or_offset.into() };
+
+ let sema = Semantics::new(&db);
+ let source_file = sema.parse(frange.file_id);
+ let element = source_file.syntax().covering_element(frange.range);
+ let expr =
+ element.ancestors().find_map(ast::Expr::cast).expect("selection is not an expression");
+ assert_eq!(
+ expr.syntax().text_range(),
+ frange.range,
+ "selection is not an expression(yet contained in one)"
+ );
+ let name = for_variable(&expr, &sema);
+ assert_eq!(&name, expected);
+ }
+
+ #[test]
+ fn no_args() {
+ check(r#"fn foo() { $0bar()$0 }"#, "bar");
+ check(r#"fn foo() { $0bar.frobnicate()$0 }"#, "frobnicate");
+ }
+
+ #[test]
+ fn single_arg() {
+ check(r#"fn foo() { $0bar(1)$0 }"#, "bar");
+ }
+
+ #[test]
+ fn many_args() {
+ check(r#"fn foo() { $0bar(1, 2, 3)$0 }"#, "bar");
+ }
+
+ #[test]
+ fn path() {
+ check(r#"fn foo() { $0i32::bar(1, 2, 3)$0 }"#, "bar");
+ }
+
+ #[test]
+ fn generic_params() {
+ check(r#"fn foo() { $0bar::<i32>(1, 2, 3)$0 }"#, "bar");
+ check(r#"fn foo() { $0bar.frobnicate::<i32, u32>()$0 }"#, "frobnicate");
+ }
+
+ #[test]
+ fn to_name() {
+ check(
+ r#"
+struct Args;
+struct Config;
+impl Args {
+ fn to_config(&self) -> Config {}
+}
+fn foo() {
+ $0Args.to_config()$0;
+}
+"#,
+ "config",
+ );
+ }
+
+ #[test]
+ fn plain_func() {
+ check(
+ r#"
+fn bar(n: i32, m: u32);
+fn foo() { bar($01$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn mut_param() {
+ check(
+ r#"
+fn bar(mut n: i32, m: u32);
+fn foo() { bar($01$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn func_does_not_exist() {
+ check(r#"fn foo() { bar($01$0, 2) }"#, "var_name");
+ }
+
+ #[test]
+ fn unnamed_param() {
+ check(
+ r#"
+fn bar(_: i32, m: u32);
+fn foo() { bar($01$0, 2) }
+"#,
+ "var_name",
+ );
+ }
+
+ #[test]
+ fn tuple_pat() {
+ check(
+ r#"
+fn bar((n, k): (i32, i32), m: u32);
+fn foo() {
+ bar($0(1, 2)$0, 3)
+}
+"#,
+ "var_name",
+ );
+ }
+
+ #[test]
+ fn ref_pat() {
+ check(
+ r#"
+fn bar(&n: &i32, m: u32);
+fn foo() { bar($0&1$0, 3) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn box_pat() {
+ check(
+ r#"
+fn bar(box n: &i32, m: u32);
+fn foo() { bar($01$0, 3) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn param_out_of_index() {
+ check(
+ r#"
+fn bar(n: i32, m: u32);
+fn foo() { bar(1, 2, $03$0) }
+"#,
+ "var_name",
+ );
+ }
+
+ #[test]
+ fn generic_param_resolved() {
+ check(
+ r#"
+fn bar<T>(n: T, m: u32);
+fn foo() { bar($01$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn generic_param_unresolved() {
+ check(
+ r#"
+fn bar<T>(n: T, m: u32);
+fn foo<T>(x: T) { bar($0x$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn method() {
+ check(
+ r#"
+struct S;
+impl S { fn bar(&self, n: i32, m: u32); }
+fn foo() { S.bar($01$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn method_on_impl_trait() {
+ check(
+ r#"
+struct S;
+trait T {
+ fn bar(&self, n: i32, m: u32);
+}
+impl T for S { fn bar(&self, n: i32, m: u32); }
+fn foo() { S.bar($01$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn method_ufcs() {
+ check(
+ r#"
+struct S;
+impl S { fn bar(&self, n: i32, m: u32); }
+fn foo() { S::bar(&S, $01$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn method_self() {
+ check(
+ r#"
+struct S;
+impl S { fn bar(&self, n: i32, m: u32); }
+fn foo() { S::bar($0&S$0, 1, 2) }
+"#,
+ "s",
+ );
+ }
+
+ #[test]
+ fn method_self_named() {
+ check(
+ r#"
+struct S;
+impl S { fn bar(strukt: &Self, n: i32, m: u32); }
+fn foo() { S::bar($0&S$0, 1, 2) }
+"#,
+ "strukt",
+ );
+ }
+
+ #[test]
+ fn i32() {
+ check(r#"fn foo() { let _: i32 = $01$0; }"#, "var_name");
+ }
+
+ #[test]
+ fn u64() {
+ check(r#"fn foo() { let _: u64 = $01$0; }"#, "var_name");
+ }
+
+ #[test]
+ fn bool() {
+ check(r#"fn foo() { let _: bool = $0true$0; }"#, "var_name");
+ }
+
+ #[test]
+ fn struct_unit() {
+ check(
+ r#"
+struct Seed;
+fn foo() { let _ = $0Seed$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn struct_unit_to_snake() {
+ check(
+ r#"
+struct SeedState;
+fn foo() { let _ = $0SeedState$0; }
+"#,
+ "seed_state",
+ );
+ }
+
+ #[test]
+ fn struct_single_arg() {
+ check(
+ r#"
+struct Seed(u32);
+fn foo() { let _ = $0Seed(0)$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn struct_with_fields() {
+ check(
+ r#"
+struct Seed { value: u32 }
+fn foo() { let _ = $0Seed { value: 0 }$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn enum_() {
+ check(
+ r#"
+enum Kind { A, B }
+fn foo() { let _ = $0Kind::A$0; }
+"#,
+ "kind",
+ );
+ }
+
+ #[test]
+ fn enum_generic_resolved() {
+ check(
+ r#"
+enum Kind<T> { A { x: T }, B }
+fn foo() { let _ = $0Kind::A { x:1 }$0; }
+"#,
+ "kind",
+ );
+ }
+
+ #[test]
+ fn enum_generic_unresolved() {
+ check(
+ r#"
+enum Kind<T> { A { x: T }, B }
+fn foo<T>(x: T) { let _ = $0Kind::A { x }$0; }
+"#,
+ "kind",
+ );
+ }
+
+ #[test]
+ fn dyn_trait() {
+ check(
+ r#"
+trait DynHandler {}
+fn bar() -> dyn DynHandler {}
+fn foo() { $0(bar())$0; }
+"#,
+ "dyn_handler",
+ );
+ }
+
+ #[test]
+ fn impl_trait() {
+ check(
+ r#"
+trait StaticHandler {}
+fn bar() -> impl StaticHandler {}
+fn foo() { $0(bar())$0; }
+"#,
+ "static_handler",
+ );
+ }
+
+ #[test]
+ fn impl_trait_plus_clone() {
+ check(
+ r#"
+trait StaticHandler {}
+trait Clone {}
+fn bar() -> impl StaticHandler + Clone {}
+fn foo() { $0(bar())$0; }
+"#,
+ "static_handler",
+ );
+ }
+
+ #[test]
+ fn impl_trait_plus_lifetime() {
+ check(
+ r#"
+trait StaticHandler {}
+trait Clone {}
+fn bar<'a>(&'a i32) -> impl StaticHandler + 'a {}
+fn foo() { $0(bar(&1))$0; }
+"#,
+ "static_handler",
+ );
+ }
+
+ #[test]
+ fn impl_trait_plus_trait() {
+ check(
+ r#"
+trait Handler {}
+trait StaticHandler {}
+fn bar() -> impl StaticHandler + Handler {}
+fn foo() { $0(bar())$0; }
+"#,
+ "bar",
+ );
+ }
+
+ #[test]
+ fn ref_value() {
+ check(
+ r#"
+struct Seed;
+fn bar() -> &Seed {}
+fn foo() { $0(bar())$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn box_value() {
+ check(
+ r#"
+struct Box<T>(*const T);
+struct Seed;
+fn bar() -> Box<Seed> {}
+fn foo() { $0(bar())$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn box_generic() {
+ check(
+ r#"
+struct Box<T>(*const T);
+fn bar<T>() -> Box<T> {}
+fn foo<T>() { $0(bar::<T>())$0; }
+"#,
+ "bar",
+ );
+ }
+
+ #[test]
+ fn option_value() {
+ check(
+ r#"
+enum Option<T> { Some(T) }
+struct Seed;
+fn bar() -> Option<Seed> {}
+fn foo() { $0(bar())$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn result_value() {
+ check(
+ r#"
+enum Result<T, E> { Ok(T), Err(E) }
+struct Seed;
+struct Error;
+fn bar() -> Result<Seed, Error> {}
+fn foo() { $0(bar())$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn ref_call() {
+ check(
+ r#"
+fn foo() { $0&bar(1, 3)$0 }
+"#,
+ "bar",
+ );
+ }
+
+ #[test]
+ fn name_to_string() {
+ check(
+ r#"
+fn foo() { $0function.name().to_string()$0 }
+"#,
+ "name",
+ );
+ }
+
+ #[test]
+ fn nested_useless_method() {
+ check(
+ r#"
+fn foo() { $0function.name().as_ref().unwrap().to_string()$0 }
+"#,
+ "name",
+ );
+ }
+
+ #[test]
+ fn struct_field_name() {
+ check(
+ r#"
+struct S<T> {
+ some_field: T;
+}
+fn foo<T>(some_struct: S<T>) { $0some_struct.some_field$0 }
+"#,
+ "some_field",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
new file mode 100644
index 000000000..8c9d6b228
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
@@ -0,0 +1,33 @@
+[package]
+name = "ide-completion"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+itertools = "0.10.3"
+
+once_cell = "1.12.0"
+smallvec = "1.9.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+
+# completions crate should depend only on the top-level `hir` package. if you need
+# something from some `hir-xxx` subpackage, reexport the API via `hir`.
+hir = { path = "../hir", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+
+test-utils = { path = "../test-utils" }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
new file mode 100644
index 000000000..72579e602
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
@@ -0,0 +1,691 @@
+//! This module defines an accumulator for completions which are going to be presented to user.
+
+pub(crate) mod attribute;
+pub(crate) mod dot;
+pub(crate) mod expr;
+pub(crate) mod extern_abi;
+pub(crate) mod field;
+pub(crate) mod flyimport;
+pub(crate) mod fn_param;
+pub(crate) mod format_string;
+pub(crate) mod item_list;
+pub(crate) mod keyword;
+pub(crate) mod lifetime;
+pub(crate) mod mod_;
+pub(crate) mod pattern;
+pub(crate) mod postfix;
+pub(crate) mod record;
+pub(crate) mod snippet;
+pub(crate) mod r#type;
+pub(crate) mod use_;
+pub(crate) mod vis;
+
+use std::iter;
+
+use hir::{known, ScopeDef};
+use ide_db::{imports::import_assets::LocatedImport, SymbolKind};
+use syntax::ast;
+
+use crate::{
+ context::{
+ DotAccess, ItemListKind, NameContext, NameKind, NameRefContext, NameRefKind,
+ PathCompletionCtx, PathKind, PatternContext, TypeLocation, Visible,
+ },
+ item::Builder,
+ render::{
+ const_::render_const,
+ function::{render_fn, render_method},
+ literal::{render_struct_literal, render_variant_lit},
+ macro_::render_macro,
+ pattern::{render_struct_pat, render_variant_pat},
+ render_field, render_path_resolution, render_pattern_resolution, render_tuple_field,
+ type_alias::{render_type_alias, render_type_alias_with_eq},
+ union_literal::render_union_literal,
+ RenderContext,
+ },
+ CompletionContext, CompletionItem, CompletionItemKind,
+};
+
+/// Represents an in-progress set of completions being built.
+#[derive(Debug, Default)]
+pub struct Completions {
+ buf: Vec<CompletionItem>,
+}
+
+impl From<Completions> for Vec<CompletionItem> {
+ fn from(val: Completions) -> Self {
+ val.buf
+ }
+}
+
+impl Builder {
+ /// Convenience method, which allows to add a freshly created completion into accumulator
+ /// without binding it to the variable.
+ pub(crate) fn add_to(self, acc: &mut Completions) {
+ acc.add(self.build())
+ }
+}
+
+impl Completions {
+ fn add(&mut self, item: CompletionItem) {
+ self.buf.push(item)
+ }
+
+ fn add_opt(&mut self, item: Option<CompletionItem>) {
+ if let Some(item) = item {
+ self.buf.push(item)
+ }
+ }
+
+ pub(crate) fn add_all<I>(&mut self, items: I)
+ where
+ I: IntoIterator,
+ I::Item: Into<CompletionItem>,
+ {
+ items.into_iter().for_each(|item| self.add(item.into()))
+ }
+
+ pub(crate) fn add_keyword(&mut self, ctx: &CompletionContext<'_>, keyword: &'static str) {
+ let item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), keyword);
+ item.add_to(self);
+ }
+
+ pub(crate) fn add_nameref_keywords_with_colon(&mut self, ctx: &CompletionContext<'_>) {
+ ["self::", "crate::"].into_iter().for_each(|kw| self.add_keyword(ctx, kw));
+
+ if ctx.depth_from_crate_root > 0 {
+ self.add_keyword(ctx, "super::");
+ }
+ }
+
+ pub(crate) fn add_nameref_keywords(&mut self, ctx: &CompletionContext<'_>) {
+ ["self", "crate"].into_iter().for_each(|kw| self.add_keyword(ctx, kw));
+
+ if ctx.depth_from_crate_root > 0 {
+ self.add_keyword(ctx, "super");
+ }
+ }
+
+ pub(crate) fn add_super_keyword(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ super_chain_len: Option<usize>,
+ ) {
+ if let Some(len) = super_chain_len {
+ if len > 0 && len < ctx.depth_from_crate_root {
+ self.add_keyword(ctx, "super::");
+ }
+ }
+ }
+
+ pub(crate) fn add_keyword_snippet_expr(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ incomplete_let: bool,
+ kw: &str,
+ snippet: &str,
+ ) {
+ let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw);
+
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ if incomplete_let && snippet.ends_with('}') {
+ // complete block expression snippets with a trailing semicolon, if inside an incomplete let
+ cov_mark::hit!(let_semi);
+ item.insert_snippet(cap, format!("{};", snippet));
+ } else {
+ item.insert_snippet(cap, snippet);
+ }
+ }
+ None => {
+ item.insert_text(if snippet.contains('$') { kw } else { snippet });
+ }
+ };
+ item.add_to(self);
+ }
+
+ pub(crate) fn add_keyword_snippet(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ kw: &str,
+ snippet: &str,
+ ) {
+ let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw);
+
+ match ctx.config.snippet_cap {
+ Some(cap) => item.insert_snippet(cap, snippet),
+ None => item.insert_text(if snippet.contains('$') { kw } else { snippet }),
+ };
+ item.add_to(self);
+ }
+
+ pub(crate) fn add_crate_roots(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ ) {
+ ctx.process_all_names(&mut |name, res| match res {
+ ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) if m.is_crate_root(ctx.db) => {
+ self.add_module(ctx, path_ctx, m, name);
+ }
+ _ => (),
+ });
+ }
+
+ pub(crate) fn add_path_resolution(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ local_name: hir::Name,
+ resolution: hir::ScopeDef,
+ ) {
+ let is_private_editable = match ctx.def_is_visible(&resolution) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add(
+ render_path_resolution(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ path_ctx,
+ local_name,
+ resolution,
+ )
+ .build(),
+ );
+ }
+
+ pub(crate) fn add_pattern_resolution(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+ local_name: hir::Name,
+ resolution: hir::ScopeDef,
+ ) {
+ let is_private_editable = match ctx.def_is_visible(&resolution) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add(
+ render_pattern_resolution(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ pattern_ctx,
+ local_name,
+ resolution,
+ )
+ .build(),
+ );
+ }
+
+ pub(crate) fn add_enum_variants(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ e: hir::Enum,
+ ) {
+ e.variants(ctx.db)
+ .into_iter()
+ .for_each(|variant| self.add_enum_variant(ctx, path_ctx, variant, None));
+ }
+
+ pub(crate) fn add_module(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ module: hir::Module,
+ local_name: hir::Name,
+ ) {
+ self.add_path_resolution(
+ ctx,
+ path_ctx,
+ local_name,
+ hir::ScopeDef::ModuleDef(module.into()),
+ );
+ }
+
+ pub(crate) fn add_macro(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ mac: hir::Macro,
+ local_name: hir::Name,
+ ) {
+ let is_private_editable = match ctx.is_visible(&mac) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add(
+ render_macro(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ path_ctx,
+ local_name,
+ mac,
+ )
+ .build(),
+ );
+ }
+
+ pub(crate) fn add_function(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ func: hir::Function,
+ local_name: Option<hir::Name>,
+ ) {
+ let is_private_editable = match ctx.is_visible(&func) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add(
+ render_fn(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ path_ctx,
+ local_name,
+ func,
+ )
+ .build(),
+ );
+ }
+
+ pub(crate) fn add_method(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+ func: hir::Function,
+ receiver: Option<hir::Name>,
+ local_name: Option<hir::Name>,
+ ) {
+ let is_private_editable = match ctx.is_visible(&func) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add(
+ render_method(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ dot_access,
+ receiver,
+ local_name,
+ func,
+ )
+ .build(),
+ );
+ }
+
+ pub(crate) fn add_method_with_import(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+ func: hir::Function,
+ import: LocatedImport,
+ ) {
+ let is_private_editable = match ctx.is_visible(&func) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add(
+ render_method(
+ RenderContext::new(ctx)
+ .private_editable(is_private_editable)
+ .import_to_add(Some(import)),
+ dot_access,
+ None,
+ None,
+ func,
+ )
+ .build(),
+ );
+ }
+
+ pub(crate) fn add_const(&mut self, ctx: &CompletionContext<'_>, konst: hir::Const) {
+ let is_private_editable = match ctx.is_visible(&konst) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add_opt(render_const(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ konst,
+ ));
+ }
+
+ pub(crate) fn add_type_alias(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ type_alias: hir::TypeAlias,
+ ) {
+ let is_private_editable = match ctx.is_visible(&type_alias) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add_opt(render_type_alias(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ type_alias,
+ ));
+ }
+
+ pub(crate) fn add_type_alias_with_eq(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ type_alias: hir::TypeAlias,
+ ) {
+ self.add_opt(render_type_alias_with_eq(RenderContext::new(ctx), type_alias));
+ }
+
+ pub(crate) fn add_qualified_enum_variant(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ variant: hir::Variant,
+ path: hir::ModPath,
+ ) {
+ if let Some(builder) =
+ render_variant_lit(RenderContext::new(ctx), path_ctx, None, variant, Some(path))
+ {
+ self.add(builder.build());
+ }
+ }
+
+ pub(crate) fn add_enum_variant(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ variant: hir::Variant,
+ local_name: Option<hir::Name>,
+ ) {
+ if let PathCompletionCtx { kind: PathKind::Pat { pat_ctx }, .. } = path_ctx {
+ cov_mark::hit!(enum_variant_pattern_path);
+ self.add_variant_pat(ctx, pat_ctx, Some(path_ctx), variant, local_name);
+ return;
+ }
+
+ if let Some(builder) =
+ render_variant_lit(RenderContext::new(ctx), path_ctx, local_name, variant, None)
+ {
+ self.add(builder.build());
+ }
+ }
+
+ pub(crate) fn add_field(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+ receiver: Option<hir::Name>,
+ field: hir::Field,
+ ty: &hir::Type,
+ ) {
+ let is_private_editable = match ctx.is_visible(&field) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ let item = render_field(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ dot_access,
+ receiver,
+ field,
+ ty,
+ );
+ self.add(item);
+ }
+
+ pub(crate) fn add_struct_literal(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ strukt: hir::Struct,
+ path: Option<hir::ModPath>,
+ local_name: Option<hir::Name>,
+ ) {
+ if let Some(builder) =
+ render_struct_literal(RenderContext::new(ctx), path_ctx, strukt, path, local_name)
+ {
+ self.add(builder.build());
+ }
+ }
+
+ pub(crate) fn add_union_literal(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ un: hir::Union,
+ path: Option<hir::ModPath>,
+ local_name: Option<hir::Name>,
+ ) {
+ let item = render_union_literal(RenderContext::new(ctx), un, path, local_name);
+ self.add_opt(item);
+ }
+
+ pub(crate) fn add_tuple_field(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ receiver: Option<hir::Name>,
+ field: usize,
+ ty: &hir::Type,
+ ) {
+ let item = render_tuple_field(RenderContext::new(ctx), receiver, field, ty);
+ self.add(item);
+ }
+
+ pub(crate) fn add_lifetime(&mut self, ctx: &CompletionContext<'_>, name: hir::Name) {
+ CompletionItem::new(SymbolKind::LifetimeParam, ctx.source_range(), name.to_smol_str())
+ .add_to(self)
+ }
+
+ pub(crate) fn add_label(&mut self, ctx: &CompletionContext<'_>, name: hir::Name) {
+ CompletionItem::new(SymbolKind::Label, ctx.source_range(), name.to_smol_str()).add_to(self)
+ }
+
+ pub(crate) fn add_variant_pat(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+ path_ctx: Option<&PathCompletionCtx>,
+ variant: hir::Variant,
+ local_name: Option<hir::Name>,
+ ) {
+ self.add_opt(render_variant_pat(
+ RenderContext::new(ctx),
+ pattern_ctx,
+ path_ctx,
+ variant,
+ local_name.clone(),
+ None,
+ ));
+ }
+
+ pub(crate) fn add_qualified_variant_pat(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+ variant: hir::Variant,
+ path: hir::ModPath,
+ ) {
+ let path = Some(&path);
+ self.add_opt(render_variant_pat(
+ RenderContext::new(ctx),
+ pattern_ctx,
+ None,
+ variant,
+ None,
+ path,
+ ));
+ }
+
+ pub(crate) fn add_struct_pat(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+ strukt: hir::Struct,
+ local_name: Option<hir::Name>,
+ ) {
+ self.add_opt(render_struct_pat(RenderContext::new(ctx), pattern_ctx, strukt, local_name));
+ }
+}
+
+/// Calls the callback for each variant of the provided enum with the path to the variant.
+/// Skips variants that are visible with single segment paths.
+fn enum_variants_with_paths(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ enum_: hir::Enum,
+ impl_: &Option<ast::Impl>,
+ cb: impl Fn(&mut Completions, &CompletionContext<'_>, hir::Variant, hir::ModPath),
+) {
+ let variants = enum_.variants(ctx.db);
+
+ if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) {
+ if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) {
+ for &variant in &variants {
+ let self_path = hir::ModPath::from_segments(
+ hir::PathKind::Plain,
+ iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))),
+ );
+ cb(acc, ctx, variant, self_path);
+ }
+ }
+ }
+
+ for variant in variants {
+ if let Some(path) = ctx.module.find_use_path(ctx.db, hir::ModuleDef::from(variant)) {
+ // Variants with trivial paths are already added by the existing completion logic,
+ // so we should avoid adding these twice
+ if path.segments().len() > 1 {
+ cb(acc, ctx, variant, path);
+ }
+ }
+ }
+}
+
+pub(super) fn complete_name(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ NameContext { name, kind }: &NameContext,
+) {
+ match kind {
+ NameKind::Const => {
+ item_list::trait_impl::complete_trait_impl_const(acc, ctx, name);
+ }
+ NameKind::Function => {
+ item_list::trait_impl::complete_trait_impl_fn(acc, ctx, name);
+ }
+ NameKind::IdentPat(pattern_ctx) => {
+ if ctx.token.kind() != syntax::T![_] {
+ complete_patterns(acc, ctx, pattern_ctx)
+ }
+ }
+ NameKind::Module(mod_under_caret) => {
+ mod_::complete_mod(acc, ctx, mod_under_caret);
+ }
+ NameKind::TypeAlias => {
+ item_list::trait_impl::complete_trait_impl_type_alias(acc, ctx, name);
+ }
+ NameKind::RecordField => {
+ field::complete_field_list_record_variant(acc, ctx);
+ }
+ NameKind::ConstParam
+ | NameKind::Enum
+ | NameKind::MacroDef
+ | NameKind::MacroRules
+ | NameKind::Rename
+ | NameKind::SelfParam
+ | NameKind::Static
+ | NameKind::Struct
+ | NameKind::Trait
+ | NameKind::TypeParam
+ | NameKind::Union
+ | NameKind::Variant => (),
+ }
+}
+
+pub(super) fn complete_name_ref(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ NameRefContext { nameref, kind }: &NameRefContext,
+) {
+ match kind {
+ NameRefKind::Path(path_ctx) => {
+ flyimport::import_on_the_fly_path(acc, ctx, path_ctx);
+
+ match &path_ctx.kind {
+ PathKind::Expr { expr_ctx } => {
+ expr::complete_expr_path(acc, ctx, path_ctx, expr_ctx);
+
+ dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx);
+ item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx);
+ record::complete_record_expr_func_update(acc, ctx, path_ctx, expr_ctx);
+ snippet::complete_expr_snippet(acc, ctx, path_ctx, expr_ctx);
+ }
+ PathKind::Type { location } => {
+ r#type::complete_type_path(acc, ctx, path_ctx, location);
+
+ match location {
+ TypeLocation::TupleField => {
+ field::complete_field_list_tuple_variant(acc, ctx, path_ctx);
+ }
+ TypeLocation::TypeAscription(ascription) => {
+ r#type::complete_ascribed_type(acc, ctx, path_ctx, ascription);
+ }
+ TypeLocation::GenericArgList(_)
+ | TypeLocation::TypeBound
+ | TypeLocation::ImplTarget
+ | TypeLocation::ImplTrait
+ | TypeLocation::Other => (),
+ }
+ }
+ PathKind::Attr { attr_ctx } => {
+ attribute::complete_attribute_path(acc, ctx, path_ctx, attr_ctx);
+ }
+ PathKind::Derive { existing_derives } => {
+ attribute::complete_derive_path(acc, ctx, path_ctx, existing_derives);
+ }
+ PathKind::Item { kind } => {
+ item_list::complete_item_list(acc, ctx, path_ctx, kind);
+
+ snippet::complete_item_snippet(acc, ctx, path_ctx, kind);
+ if let ItemListKind::TraitImpl(impl_) = kind {
+ item_list::trait_impl::complete_trait_impl_item_by_name(
+ acc, ctx, path_ctx, nameref, impl_,
+ );
+ }
+ }
+ PathKind::Pat { .. } => {
+ pattern::complete_pattern_path(acc, ctx, path_ctx);
+ }
+ PathKind::Vis { has_in_token } => {
+ vis::complete_vis_path(acc, ctx, path_ctx, has_in_token);
+ }
+ PathKind::Use => {
+ use_::complete_use_path(acc, ctx, path_ctx, nameref);
+ }
+ }
+ }
+ NameRefKind::DotAccess(dot_access) => {
+ flyimport::import_on_the_fly_dot(acc, ctx, dot_access);
+ dot::complete_dot(acc, ctx, dot_access);
+ postfix::complete_postfix(acc, ctx, dot_access);
+ }
+ NameRefKind::Keyword(item) => {
+ keyword::complete_for_and_where(acc, ctx, item);
+ }
+ NameRefKind::RecordExpr { dot_prefix, expr } => {
+ record::complete_record_expr_fields(acc, ctx, expr, dot_prefix);
+ }
+ NameRefKind::Pattern(pattern_ctx) => complete_patterns(acc, ctx, pattern_ctx),
+ }
+}
+
+fn complete_patterns(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+) {
+ flyimport::import_on_the_fly_pat(acc, ctx, pattern_ctx);
+ fn_param::complete_fn_param(acc, ctx, pattern_ctx);
+ pattern::complete_pattern(acc, ctx, pattern_ctx);
+ record::complete_record_pattern_fields(acc, ctx, pattern_ctx);
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs
new file mode 100644
index 000000000..d9fe94cb4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs
@@ -0,0 +1,380 @@
+//! Completion for (built-in) attributes, derives and lints.
+//!
+//! This module uses a bit of static metadata to provide completions for builtin-in attributes and lints.
+
+use ide_db::{
+ generated::lints::{
+ Lint, CLIPPY_LINTS, CLIPPY_LINT_GROUPS, DEFAULT_LINTS, FEATURES, RUSTDOC_LINTS,
+ },
+ syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
+ FxHashMap, SymbolKind,
+};
+use itertools::Itertools;
+use once_cell::sync::Lazy;
+use syntax::{
+ ast::{self, AttrKind},
+ AstNode, SyntaxKind, T,
+};
+
+use crate::{
+ context::{AttrCtx, CompletionContext, PathCompletionCtx, Qualified},
+ item::CompletionItem,
+ Completions,
+};
+
+mod cfg;
+mod derive;
+mod lint;
+mod repr;
+
+pub(crate) use self::derive::complete_derive_path;
+
+/// Complete inputs to known builtin attributes as well as derive attributes
+pub(crate) fn complete_known_attribute_input(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ &colon_prefix: &bool,
+ fake_attribute_under_caret: &ast::Attr,
+) -> Option<()> {
+ let attribute = fake_attribute_under_caret;
+ let name_ref = match attribute.path() {
+ Some(p) => Some(p.as_single_name_ref()?),
+ None => None,
+ };
+ let (path, tt) = name_ref.zip(attribute.token_tree())?;
+ if tt.l_paren_token().is_none() {
+ return None;
+ }
+
+ match path.text().as_str() {
+ "repr" => repr::complete_repr(acc, ctx, tt),
+ "feature" => {
+ lint::complete_lint(acc, ctx, colon_prefix, &parse_tt_as_comma_sep_paths(tt)?, FEATURES)
+ }
+ "allow" | "warn" | "deny" | "forbid" => {
+ let existing_lints = parse_tt_as_comma_sep_paths(tt)?;
+
+ let lints: Vec<Lint> = CLIPPY_LINT_GROUPS
+ .iter()
+ .map(|g| &g.lint)
+ .chain(DEFAULT_LINTS)
+ .chain(CLIPPY_LINTS)
+ .chain(RUSTDOC_LINTS)
+ .cloned()
+ .collect();
+
+ lint::complete_lint(acc, ctx, colon_prefix, &existing_lints, &lints);
+ }
+ "cfg" => cfg::complete_cfg(acc, ctx),
+ _ => (),
+ }
+ Some(())
+}
+
+pub(crate) fn complete_attribute_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ &AttrCtx { kind, annotated_item_kind }: &AttrCtx,
+) {
+ let is_inner = kind == AttrKind::Inner;
+
+ match qualified {
+ Qualified::With {
+ resolution: Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))),
+ super_chain_len,
+ ..
+ } => {
+ acc.add_super_keyword(ctx, *super_chain_len);
+
+ for (name, def) in module.scope(ctx.db, Some(ctx.module)) {
+ match def {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Macro(m)) if m.is_attr(ctx.db) => {
+ acc.add_macro(ctx, path_ctx, m, name)
+ }
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
+ acc.add_module(ctx, path_ctx, m, name)
+ }
+ _ => (),
+ }
+ }
+ return;
+ }
+ // fresh use tree with leading colon2, only show crate roots
+ Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
+ // only show modules in a fresh UseTree
+ Qualified::No => {
+ ctx.process_all_names(&mut |name, def| match def {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Macro(m)) if m.is_attr(ctx.db) => {
+ acc.add_macro(ctx, path_ctx, m, name)
+ }
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
+ acc.add_module(ctx, path_ctx, m, name)
+ }
+ _ => (),
+ });
+ acc.add_nameref_keywords_with_colon(ctx);
+ }
+ Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
+ }
+
+ let attributes = annotated_item_kind.and_then(|kind| {
+ if ast::Expr::can_cast(kind) {
+ Some(EXPR_ATTRIBUTES)
+ } else {
+ KIND_TO_ATTRIBUTES.get(&kind).copied()
+ }
+ });
+
+ let add_completion = |attr_completion: &AttrCompletion| {
+ let mut item =
+ CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), attr_completion.label);
+
+ if let Some(lookup) = attr_completion.lookup {
+ item.lookup_by(lookup);
+ }
+
+ if let Some((snippet, cap)) = attr_completion.snippet.zip(ctx.config.snippet_cap) {
+ item.insert_snippet(cap, snippet);
+ }
+
+ if is_inner || !attr_completion.prefer_inner {
+ item.add_to(acc);
+ }
+ };
+
+ match attributes {
+ Some(applicable) => applicable
+ .iter()
+ .flat_map(|name| ATTRIBUTES.binary_search_by(|attr| attr.key().cmp(name)).ok())
+ .flat_map(|idx| ATTRIBUTES.get(idx))
+ .for_each(add_completion),
+ None if is_inner => ATTRIBUTES.iter().for_each(add_completion),
+ None => ATTRIBUTES.iter().filter(|compl| !compl.prefer_inner).for_each(add_completion),
+ }
+}
+
+struct AttrCompletion {
+ label: &'static str,
+ lookup: Option<&'static str>,
+ snippet: Option<&'static str>,
+ prefer_inner: bool,
+}
+
+impl AttrCompletion {
+ fn key(&self) -> &'static str {
+ self.lookup.unwrap_or(self.label)
+ }
+
+ const fn prefer_inner(self) -> AttrCompletion {
+ AttrCompletion { prefer_inner: true, ..self }
+ }
+}
+
+const fn attr(
+ label: &'static str,
+ lookup: Option<&'static str>,
+ snippet: Option<&'static str>,
+) -> AttrCompletion {
+ AttrCompletion { label, lookup, snippet, prefer_inner: false }
+}
+
+macro_rules! attrs {
+ // attributes applicable to all items
+ [@ { item $($tt:tt)* } {$($acc:tt)*}] => {
+ attrs!(@ { $($tt)* } { $($acc)*, "deprecated", "doc", "dochidden", "docalias", "must_use", "no_mangle" })
+ };
+ // attributes applicable to all adts
+ [@ { adt $($tt:tt)* } {$($acc:tt)*}] => {
+ attrs!(@ { $($tt)* } { $($acc)*, "derive", "repr" })
+ };
+ // attributes applicable to all linkable things aka functions/statics
+ [@ { linkable $($tt:tt)* } {$($acc:tt)*}] => {
+ attrs!(@ { $($tt)* } { $($acc)*, "export_name", "link_name", "link_section" })
+ };
+ // error fallback for nicer error message
+ [@ { $ty:ident $($tt:tt)* } {$($acc:tt)*}] => {
+ compile_error!(concat!("unknown attr subtype ", stringify!($ty)))
+ };
+ // general push down accumulation
+ [@ { $lit:literal $($tt:tt)*} {$($acc:tt)*}] => {
+ attrs!(@ { $($tt)* } { $($acc)*, $lit })
+ };
+ [@ {$($tt:tt)+} {$($tt2:tt)*}] => {
+ compile_error!(concat!("Unexpected input ", stringify!($($tt)+)))
+ };
+ // final output construction
+ [@ {} {$($tt:tt)*}] => { &[$($tt)*] as _ };
+ // starting matcher
+ [$($tt:tt),*] => {
+ attrs!(@ { $($tt)* } { "allow", "cfg", "cfg_attr", "deny", "forbid", "warn" })
+ };
+}
+
+#[rustfmt::skip]
+static KIND_TO_ATTRIBUTES: Lazy<FxHashMap<SyntaxKind, &[&str]>> = Lazy::new(|| {
+ use SyntaxKind::*;
+ [
+ (
+ SOURCE_FILE,
+ attrs!(
+ item,
+ "crate_name", "feature", "no_implicit_prelude", "no_main", "no_std",
+ "recursion_limit", "type_length_limit", "windows_subsystem"
+ ),
+ ),
+ (MODULE, attrs!(item, "macro_use", "no_implicit_prelude", "path")),
+ (ITEM_LIST, attrs!(item, "no_implicit_prelude")),
+ (MACRO_RULES, attrs!(item, "macro_export", "macro_use")),
+ (MACRO_DEF, attrs!(item)),
+ (EXTERN_CRATE, attrs!(item, "macro_use", "no_link")),
+ (USE, attrs!(item)),
+ (TYPE_ALIAS, attrs!(item)),
+ (STRUCT, attrs!(item, adt, "non_exhaustive")),
+ (ENUM, attrs!(item, adt, "non_exhaustive")),
+ (UNION, attrs!(item, adt)),
+ (CONST, attrs!(item)),
+ (
+ FN,
+ attrs!(
+ item, linkable,
+ "cold", "ignore", "inline", "must_use", "panic_handler", "proc_macro",
+ "proc_macro_derive", "proc_macro_attribute", "should_panic", "target_feature",
+ "test", "track_caller"
+ ),
+ ),
+ (STATIC, attrs!(item, linkable, "global_allocator", "used")),
+ (TRAIT, attrs!(item, "must_use")),
+ (IMPL, attrs!(item, "automatically_derived")),
+ (ASSOC_ITEM_LIST, attrs!(item)),
+ (EXTERN_BLOCK, attrs!(item, "link")),
+ (EXTERN_ITEM_LIST, attrs!(item, "link")),
+ (MACRO_CALL, attrs!()),
+ (SELF_PARAM, attrs!()),
+ (PARAM, attrs!()),
+ (RECORD_FIELD, attrs!()),
+ (VARIANT, attrs!("non_exhaustive")),
+ (TYPE_PARAM, attrs!()),
+ (CONST_PARAM, attrs!()),
+ (LIFETIME_PARAM, attrs!()),
+ (LET_STMT, attrs!()),
+ (EXPR_STMT, attrs!()),
+ (LITERAL, attrs!()),
+ (RECORD_EXPR_FIELD_LIST, attrs!()),
+ (RECORD_EXPR_FIELD, attrs!()),
+ (MATCH_ARM_LIST, attrs!()),
+ (MATCH_ARM, attrs!()),
+ (IDENT_PAT, attrs!()),
+ (RECORD_PAT_FIELD, attrs!()),
+ ]
+ .into_iter()
+ .collect()
+});
+const EXPR_ATTRIBUTES: &[&str] = attrs!();
+
+/// <https://doc.rust-lang.org/reference/attributes.html#built-in-attributes-index>
+// Keep these sorted for the binary search!
+const ATTRIBUTES: &[AttrCompletion] = &[
+ attr("allow(…)", Some("allow"), Some("allow(${0:lint})")),
+ attr("automatically_derived", None, None),
+ attr("cfg(…)", Some("cfg"), Some("cfg(${0:predicate})")),
+ attr("cfg_attr(…)", Some("cfg_attr"), Some("cfg_attr(${1:predicate}, ${0:attr})")),
+ attr("cold", None, None),
+ attr(r#"crate_name = """#, Some("crate_name"), Some(r#"crate_name = "${0:crate_name}""#))
+ .prefer_inner(),
+ attr("deny(…)", Some("deny"), Some("deny(${0:lint})")),
+ attr(r#"deprecated"#, Some("deprecated"), Some(r#"deprecated"#)),
+ attr("derive(…)", Some("derive"), Some(r#"derive(${0:Debug})"#)),
+ attr(r#"doc = "…""#, Some("doc"), Some(r#"doc = "${0:docs}""#)),
+ attr(r#"doc(alias = "…")"#, Some("docalias"), Some(r#"doc(alias = "${0:docs}")"#)),
+ attr(r#"doc(hidden)"#, Some("dochidden"), Some(r#"doc(hidden)"#)),
+ attr(
+ r#"export_name = "…""#,
+ Some("export_name"),
+ Some(r#"export_name = "${0:exported_symbol_name}""#),
+ ),
+ attr("feature(…)", Some("feature"), Some("feature(${0:flag})")).prefer_inner(),
+ attr("forbid(…)", Some("forbid"), Some("forbid(${0:lint})")),
+ attr("global_allocator", None, None),
+ attr(r#"ignore = "…""#, Some("ignore"), Some(r#"ignore = "${0:reason}""#)),
+ attr("inline", Some("inline"), Some("inline")),
+ attr("link", None, None),
+ attr(r#"link_name = "…""#, Some("link_name"), Some(r#"link_name = "${0:symbol_name}""#)),
+ attr(
+ r#"link_section = "…""#,
+ Some("link_section"),
+ Some(r#"link_section = "${0:section_name}""#),
+ ),
+ attr("macro_export", None, None),
+ attr("macro_use", None, None),
+ attr(r#"must_use"#, Some("must_use"), Some(r#"must_use"#)),
+ attr("no_implicit_prelude", None, None).prefer_inner(),
+ attr("no_link", None, None).prefer_inner(),
+ attr("no_main", None, None).prefer_inner(),
+ attr("no_mangle", None, None),
+ attr("no_std", None, None).prefer_inner(),
+ attr("non_exhaustive", None, None),
+ attr("panic_handler", None, None),
+ attr(r#"path = "…""#, Some("path"), Some(r#"path ="${0:path}""#)),
+ attr("proc_macro", None, None),
+ attr("proc_macro_attribute", None, None),
+ attr("proc_macro_derive(…)", Some("proc_macro_derive"), Some("proc_macro_derive(${0:Trait})")),
+ attr(
+ r#"recursion_limit = "…""#,
+ Some("recursion_limit"),
+ Some(r#"recursion_limit = "${0:128}""#),
+ )
+ .prefer_inner(),
+ attr("repr(…)", Some("repr"), Some("repr(${0:C})")),
+ attr("should_panic", Some("should_panic"), Some(r#"should_panic"#)),
+ attr(
+ r#"target_feature(enable = "…")"#,
+ Some("target_feature"),
+ Some(r#"target_feature(enable = "${0:feature}")"#),
+ ),
+ attr("test", None, None),
+ attr("track_caller", None, None),
+ attr("type_length_limit = …", Some("type_length_limit"), Some("type_length_limit = ${0:128}"))
+ .prefer_inner(),
+ attr("used", None, None),
+ attr("warn(…)", Some("warn"), Some("warn(${0:lint})")),
+ attr(
+ r#"windows_subsystem = "…""#,
+ Some("windows_subsystem"),
+ Some(r#"windows_subsystem = "${0:subsystem}""#),
+ )
+ .prefer_inner(),
+];
+
+fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
+ let r_paren = input.r_paren_token()?;
+ let tokens = input
+ .syntax()
+ .children_with_tokens()
+ .skip(1)
+ .take_while(|it| it.as_token() != Some(&r_paren));
+ let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
+ Some(
+ input_expressions
+ .into_iter()
+ .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
+ .filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
+ .collect::<Vec<ast::Expr>>(),
+ )
+}
+
+#[test]
+fn attributes_are_sorted() {
+ let mut attrs = ATTRIBUTES.iter().map(|attr| attr.key());
+ let mut prev = attrs.next().unwrap();
+
+ attrs.for_each(|next| {
+ assert!(
+ prev < next,
+ r#"ATTRIBUTES array is not sorted, "{}" should come after "{}""#,
+ prev,
+ next
+ );
+ prev = next;
+ });
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
new file mode 100644
index 000000000..311060143
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
@@ -0,0 +1,93 @@
+//! Completion for cfg
+
+use std::iter;
+
+use ide_db::SymbolKind;
+use itertools::Itertools;
+use syntax::SyntaxKind;
+
+use crate::{completions::Completions, context::CompletionContext, CompletionItem};
+
+pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) {
+ let add_completion = |item: &str| {
+ let mut completion = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), item);
+ completion.insert_text(format!(r#""{}""#, item));
+ acc.add(completion.build());
+ };
+
+ let previous = iter::successors(ctx.original_token.prev_token(), |t| {
+ (matches!(t.kind(), SyntaxKind::EQ) || t.kind().is_trivia())
+ .then(|| t.prev_token())
+ .flatten()
+ })
+ .find(|t| matches!(t.kind(), SyntaxKind::IDENT));
+
+ match previous.as_ref().map(|p| p.text()) {
+ Some("target_arch") => KNOWN_ARCH.iter().copied().for_each(add_completion),
+ Some("target_env") => KNOWN_ENV.iter().copied().for_each(add_completion),
+ Some("target_os") => KNOWN_OS.iter().copied().for_each(add_completion),
+ Some("target_vendor") => KNOWN_VENDOR.iter().copied().for_each(add_completion),
+ Some("target_endian") => ["little", "big"].into_iter().for_each(add_completion),
+ Some(name) => ctx.krate.potential_cfg(ctx.db).get_cfg_values(name).cloned().for_each(|s| {
+ let insert_text = format!(r#""{}""#, s);
+ let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s);
+ item.insert_text(insert_text);
+
+ acc.add(item.build());
+ }),
+ None => ctx.krate.potential_cfg(ctx.db).get_cfg_keys().cloned().unique().for_each(|s| {
+ let item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s);
+ acc.add(item.build());
+ }),
+ };
+}
+
+const KNOWN_ARCH: [&str; 19] = [
+ "aarch64",
+ "arm",
+ "avr",
+ "hexagon",
+ "mips",
+ "mips64",
+ "msp430",
+ "nvptx64",
+ "powerpc",
+ "powerpc64",
+ "riscv32",
+ "riscv64",
+ "s390x",
+ "sparc",
+ "sparc64",
+ "wasm32",
+ "wasm64",
+ "x86",
+ "x86_64",
+];
+
+const KNOWN_ENV: [&str; 7] = ["eabihf", "gnu", "gnueabihf", "msvc", "relibc", "sgx", "uclibc"];
+
+const KNOWN_OS: [&str; 20] = [
+ "cuda",
+ "dragonfly",
+ "emscripten",
+ "freebsd",
+ "fuchsia",
+ "haiku",
+ "hermit",
+ "illumos",
+ "l4re",
+ "linux",
+ "netbsd",
+ "none",
+ "openbsd",
+ "psp",
+ "redox",
+ "solaris",
+ "uefi",
+ "unknown",
+ "vxworks",
+ "windows",
+];
+
+const KNOWN_VENDOR: [&str; 8] =
+ ["apple", "fortanix", "nvidia", "pc", "sony", "unknown", "wrs", "uwp"];
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs
new file mode 100644
index 000000000..793c22630
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs
@@ -0,0 +1,116 @@
+//! Completion for derives
+use hir::{HasAttrs, ScopeDef};
+use ide_db::SymbolKind;
+use itertools::Itertools;
+use syntax::SmolStr;
+
+use crate::{
+ context::{CompletionContext, ExistingDerives, PathCompletionCtx, Qualified},
+ item::CompletionItem,
+ Completions,
+};
+
+pub(crate) fn complete_derive_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ existing_derives: &ExistingDerives,
+) {
+ let core = ctx.famous_defs().core();
+
+ match qualified {
+ Qualified::With {
+ resolution: Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))),
+ super_chain_len,
+ ..
+ } => {
+ acc.add_super_keyword(ctx, *super_chain_len);
+
+ for (name, def) in module.scope(ctx.db, Some(ctx.module)) {
+ match def {
+ ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac))
+ if !existing_derives.contains(&mac) && mac.is_derive(ctx.db) =>
+ {
+ acc.add_macro(ctx, path_ctx, mac, name)
+ }
+ ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
+ acc.add_module(ctx, path_ctx, m, name)
+ }
+ _ => (),
+ }
+ }
+ }
+ Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
+ // only show modules in a fresh UseTree
+ Qualified::No => {
+ ctx.process_all_names(&mut |name, def| {
+ let mac = match def {
+ ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac))
+ if !existing_derives.contains(&mac) && mac.is_derive(ctx.db) =>
+ {
+ mac
+ }
+ ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
+ return acc.add_module(ctx, path_ctx, m, name);
+ }
+ _ => return,
+ };
+
+ match (core, mac.module(ctx.db).krate()) {
+ // show derive dependencies for `core`/`std` derives
+ (Some(core), mac_krate) if core == mac_krate => {}
+ _ => return acc.add_macro(ctx, path_ctx, mac, name),
+ };
+
+ let name_ = name.to_smol_str();
+ let find = DEFAULT_DERIVE_DEPENDENCIES
+ .iter()
+ .find(|derive_completion| derive_completion.label == name_);
+
+ match find {
+ Some(derive_completion) => {
+ let mut components = vec![derive_completion.label];
+ components.extend(derive_completion.dependencies.iter().filter(
+ |&&dependency| {
+ !existing_derives
+ .iter()
+ .map(|it| it.name(ctx.db))
+ .any(|it| it.to_smol_str() == dependency)
+ },
+ ));
+ let lookup = components.join(", ");
+ let label = Itertools::intersperse(components.into_iter().rev(), ", ");
+
+ let mut item = CompletionItem::new(
+ SymbolKind::Derive,
+ ctx.source_range(),
+ SmolStr::from_iter(label),
+ );
+ if let Some(docs) = mac.docs(ctx.db) {
+ item.documentation(docs);
+ }
+ item.lookup_by(lookup);
+ item.add_to(acc);
+ }
+ None => acc.add_macro(ctx, path_ctx, mac, name),
+ }
+ });
+ acc.add_nameref_keywords_with_colon(ctx);
+ }
+ Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
+ }
+}
+
+struct DeriveDependencies {
+ label: &'static str,
+ dependencies: &'static [&'static str],
+}
+
+/// Standard Rust derives that have dependencies
+/// (the dependencies are needed so that the main derive don't break the compilation when added)
+const DEFAULT_DERIVE_DEPENDENCIES: &[DeriveDependencies] = &[
+ DeriveDependencies { label: "Copy", dependencies: &["Clone"] },
+ DeriveDependencies { label: "Eq", dependencies: &["PartialEq"] },
+ DeriveDependencies { label: "Ord", dependencies: &["PartialOrd", "Eq", "PartialEq"] },
+ DeriveDependencies { label: "PartialOrd", dependencies: &["PartialEq"] },
+];
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
new file mode 100644
index 000000000..967f6ddd9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
@@ -0,0 +1,61 @@
+//! Completion for lints
+use ide_db::{generated::lints::Lint, SymbolKind};
+use syntax::ast;
+
+use crate::{context::CompletionContext, item::CompletionItem, Completions};
+
+pub(super) fn complete_lint(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ is_qualified: bool,
+ existing_lints: &[ast::Path],
+ lints_completions: &[Lint],
+) {
+ for &Lint { label, description } in lints_completions {
+ let (qual, name) = {
+ // FIXME: change `Lint`'s label to not store a path in it but split the prefix off instead?
+ let mut parts = label.split("::");
+ let ns_or_label = match parts.next() {
+ Some(it) => it,
+ None => continue,
+ };
+ let label = parts.next();
+ match label {
+ Some(label) => (Some(ns_or_label), label),
+ None => (None, ns_or_label),
+ }
+ };
+ if qual.is_none() && is_qualified {
+ // qualified completion requested, but this lint is unqualified
+ continue;
+ }
+ let lint_already_annotated = existing_lints
+ .iter()
+ .filter_map(|path| {
+ let q = path.qualifier();
+ if q.as_ref().and_then(|it| it.qualifier()).is_some() {
+ return None;
+ }
+ Some((q.and_then(|it| it.as_single_name_ref()), path.segment()?.name_ref()?))
+ })
+ .any(|(q, name_ref)| {
+ let qualifier_matches = match (q, qual) {
+ (None, None) => true,
+ (None, Some(_)) => false,
+ (Some(_), None) => false,
+ (Some(q), Some(ns)) => q.text() == ns,
+ };
+ qualifier_matches && name_ref.text() == name
+ });
+ if lint_already_annotated {
+ continue;
+ }
+ let label = match qual {
+ Some(qual) if !is_qualified => format!("{}::{}", qual, name),
+ _ => name.to_owned(),
+ };
+ let mut item = CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label);
+ item.documentation(hir::Documentation::new(description.to_owned()));
+ item.add_to(acc)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs
new file mode 100644
index 000000000..a29417133
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs
@@ -0,0 +1,74 @@
+//! Completion for representations.
+
+use ide_db::SymbolKind;
+use syntax::ast;
+
+use crate::{context::CompletionContext, item::CompletionItem, Completions};
+
+pub(super) fn complete_repr(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ input: ast::TokenTree,
+) {
+ if let Some(existing_reprs) = super::parse_comma_sep_expr(input) {
+ for &ReprCompletion { label, snippet, lookup, collides } in REPR_COMPLETIONS {
+ let repr_already_annotated = existing_reprs
+ .iter()
+ .filter_map(|expr| match expr {
+ ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
+ ast::Expr::CallExpr(call) => match call.expr()? {
+ ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
+ _ => None,
+ },
+ _ => None,
+ })
+ .any(|it| {
+ let text = it.text();
+ lookup.unwrap_or(label) == text || collides.contains(&text.as_str())
+ });
+ if repr_already_annotated {
+ continue;
+ }
+
+ let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), label);
+ if let Some(lookup) = lookup {
+ item.lookup_by(lookup);
+ }
+ if let Some((snippet, cap)) = snippet.zip(ctx.config.snippet_cap) {
+ item.insert_snippet(cap, snippet);
+ }
+ item.add_to(acc);
+ }
+ }
+}
+
+struct ReprCompletion {
+ label: &'static str,
+ snippet: Option<&'static str>,
+ lookup: Option<&'static str>,
+ collides: &'static [&'static str],
+}
+
+const fn attr(label: &'static str, collides: &'static [&'static str]) -> ReprCompletion {
+ ReprCompletion { label, snippet: None, lookup: None, collides }
+}
+
+#[rustfmt::skip]
+const REPR_COMPLETIONS: &[ReprCompletion] = &[
+ ReprCompletion { label: "align($0)", snippet: Some("align($0)"), lookup: Some("align"), collides: &["transparent", "packed"] },
+ attr("packed", &["transparent", "align"]),
+ attr("transparent", &["C", "u8", "u16", "u32", "u64", "u128", "usize", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("C", &["transparent"]),
+ attr("u8", &["transparent", "u16", "u32", "u64", "u128", "usize", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("u16", &["transparent", "u8", "u32", "u64", "u128", "usize", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("u32", &["transparent", "u8", "u16", "u64", "u128", "usize", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("u64", &["transparent", "u8", "u16", "u32", "u128", "usize", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("u128", &["transparent", "u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("usize", &["transparent", "u8", "u16", "u32", "u64", "u128", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("i8", &["transparent", "u8", "u16", "u32", "u64", "u128", "usize", "i16", "i32", "i64", "i128", "isize"]),
+ attr("i16", &["transparent", "u8", "u16", "u32", "u64", "u128", "usize", "i8", "i32", "i64", "i128", "isize"]),
+ attr("i32", &["transparent", "u8", "u16", "u32", "u64", "u128", "usize", "i8", "i16", "i64", "i128", "isize"]),
+ attr("i64", &["transparent", "u8", "u16", "u32", "u64", "u128", "usize", "i8", "i16", "i32", "i128", "isize"]),
+ attr("i28", &["transparent", "u8", "u16", "u32", "u64", "u128", "usize", "i8", "i16", "i32", "i64", "isize"]),
+ attr("isize", &["transparent", "u8", "u16", "u32", "u64", "u128", "usize", "i8", "i16", "i32", "i64", "i128"]),
+];
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
new file mode 100644
index 000000000..cf40ca489
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
@@ -0,0 +1,947 @@
+//! Completes references after dot (fields and method calls).
+
+use ide_db::FxHashSet;
+
+use crate::{
+ context::{CompletionContext, DotAccess, DotAccessKind, ExprCtx, PathCompletionCtx, Qualified},
+ CompletionItem, CompletionItemKind, Completions,
+};
+
+/// Complete dot accesses, i.e. fields or methods.
+pub(crate) fn complete_dot(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+) {
+ let receiver_ty = match dot_access {
+ DotAccess { receiver_ty: Some(receiver_ty), .. } => &receiver_ty.original,
+ _ => return,
+ };
+
+ // Suggest .await syntax for types that implement Future trait
+ if receiver_ty.impls_future(ctx.db) {
+ let mut item =
+ CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), "await");
+ item.detail("expr.await");
+ item.add_to(acc);
+ }
+
+ if let DotAccessKind::Method { .. } = dot_access.kind {
+ cov_mark::hit!(test_no_struct_field_completion_for_method_call);
+ } else {
+ complete_fields(
+ acc,
+ ctx,
+ &receiver_ty,
+ |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
+ |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
+ );
+ }
+ complete_methods(ctx, &receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
+}
+
+pub(crate) fn complete_undotted_self(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ expr_ctx: &ExprCtx,
+) {
+ if !ctx.config.enable_self_on_the_fly {
+ return;
+ }
+ if !path_ctx.is_trivial_path() {
+ return;
+ }
+ if !ctx.qualifier_ctx.none() {
+ return;
+ }
+ if !matches!(path_ctx.qualified, Qualified::No) {
+ return;
+ }
+ let self_param = match expr_ctx {
+ ExprCtx { self_param: Some(self_param), .. } => self_param,
+ _ => return,
+ };
+
+ let ty = self_param.ty(ctx.db);
+ complete_fields(
+ acc,
+ ctx,
+ &ty,
+ |acc, field, ty| {
+ acc.add_field(
+ ctx,
+ &DotAccess {
+ receiver: None,
+ receiver_ty: None,
+ kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal: false },
+ },
+ Some(hir::known::SELF_PARAM),
+ field,
+ &ty,
+ )
+ },
+ |acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
+ );
+ complete_methods(ctx, &ty, |func| {
+ acc.add_method(
+ ctx,
+ &DotAccess {
+ receiver: None,
+ receiver_ty: None,
+ kind: DotAccessKind::Method { has_parens: false },
+ },
+ func,
+ Some(hir::known::SELF_PARAM),
+ None,
+ )
+ });
+}
+
+fn complete_fields(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ receiver: &hir::Type,
+ mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
+ mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
+) {
+ for receiver in receiver.autoderef(ctx.db) {
+ for (field, ty) in receiver.fields(ctx.db) {
+ named_field(acc, field, ty);
+ }
+ for (i, ty) in receiver.tuple_fields(ctx.db).into_iter().enumerate() {
+ // Tuple fields are always public (tuple struct fields are handled above).
+ tuple_index(acc, i, ty);
+ }
+ }
+}
+
+fn complete_methods(
+ ctx: &CompletionContext<'_>,
+ receiver: &hir::Type,
+ mut f: impl FnMut(hir::Function),
+) {
+ let mut seen_methods = FxHashSet::default();
+ receiver.iterate_method_candidates(
+ ctx.db,
+ &ctx.scope,
+ &ctx.traits_in_scope(),
+ Some(ctx.module),
+ None,
+ |func| {
+ if func.self_param(ctx.db).is_some() && seen_methods.insert(func.name(ctx.db)) {
+ f(func);
+ }
+ None::<()>
+ },
+ );
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{
+ check_edit, completion_list_no_kw, completion_list_no_kw_with_private_editable,
+ };
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ fn check_with_private_editable(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw_with_private_editable(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn test_struct_field_and_method_completion() {
+ check(
+ r#"
+struct S { foo: u32 }
+impl S {
+ fn bar(&self) {}
+}
+fn foo(s: S) { s.$0 }
+"#,
+ expect![[r#"
+ fd foo u32
+ me bar() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_field_completion_self() {
+ check(
+ r#"
+struct S { the_field: (u32,) }
+impl S {
+ fn foo(self) { self.$0 }
+}
+"#,
+ expect![[r#"
+ fd the_field (u32,)
+ me foo() fn(self)
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_struct_field_completion_autoderef() {
+ check(
+ r#"
+struct A { the_field: (u32, i32) }
+impl A {
+ fn foo(&self) { self.$0 }
+}
+"#,
+ expect![[r#"
+ fd the_field (u32, i32)
+ me foo() fn(&self)
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_no_struct_field_completion_for_method_call() {
+ cov_mark::check!(test_no_struct_field_completion_for_method_call);
+ check(
+ r#"
+struct A { the_field: u32 }
+fn foo(a: A) { a.$0() }
+"#,
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn test_visibility_filtering() {
+ check(
+ r#"
+//- /lib.rs crate:lib new_source_root:local
+pub mod m {
+ pub struct A {
+ private_field: u32,
+ pub pub_field: u32,
+ pub(crate) crate_field: u32,
+ pub(super) super_field: u32,
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::m::A) { a.$0 }
+"#,
+ expect![[r#"
+ fd pub_field u32
+ "#]],
+ );
+
+ check(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub mod m {
+ pub struct A {
+ private_field: u32,
+ pub pub_field: u32,
+ pub(crate) crate_field: u32,
+ pub(super) super_field: u32,
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::m::A) { a.$0 }
+"#,
+ expect![[r#"
+ fd pub_field u32
+ "#]],
+ );
+
+ check(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub mod m {
+ pub struct A(
+ i32,
+ pub f64,
+ );
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::m::A) { a.$0 }
+"#,
+ expect![[r#"
+ fd 1 f64
+ "#]],
+ );
+
+ check(
+ r#"
+//- /lib.rs crate:lib new_source_root:local
+pub struct A {}
+mod m {
+ impl super::A {
+ fn private_method(&self) {}
+ pub(crate) fn crate_method(&self) {}
+ pub fn pub_method(&self) {}
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::A) { a.$0 }
+"#,
+ expect![[r#"
+ me pub_method() fn(&self)
+ "#]],
+ );
+ check(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub struct A {}
+mod m {
+ impl super::A {
+ fn private_method(&self) {}
+ pub(crate) fn crate_method(&self) {}
+ pub fn pub_method(&self) {}
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::A) { a.$0 }
+"#,
+ expect![[r#"
+ me pub_method() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_visibility_filtering_with_private_editable_enabled() {
+ check_with_private_editable(
+ r#"
+//- /lib.rs crate:lib new_source_root:local
+pub mod m {
+ pub struct A {
+ private_field: u32,
+ pub pub_field: u32,
+ pub(crate) crate_field: u32,
+ pub(super) super_field: u32,
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::m::A) { a.$0 }
+"#,
+ expect![[r#"
+ fd crate_field u32
+ fd private_field u32
+ fd pub_field u32
+ fd super_field u32
+ "#]],
+ );
+
+ check_with_private_editable(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub mod m {
+ pub struct A {
+ private_field: u32,
+ pub pub_field: u32,
+ pub(crate) crate_field: u32,
+ pub(super) super_field: u32,
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::m::A) { a.$0 }
+"#,
+ expect![[r#"
+ fd pub_field u32
+ "#]],
+ );
+
+ check_with_private_editable(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub mod m {
+ pub struct A(
+ i32,
+ pub f64,
+ );
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::m::A) { a.$0 }
+"#,
+ expect![[r#"
+ fd 1 f64
+ "#]],
+ );
+
+ check_with_private_editable(
+ r#"
+//- /lib.rs crate:lib new_source_root:local
+pub struct A {}
+mod m {
+ impl super::A {
+ fn private_method(&self) {}
+ pub(crate) fn crate_method(&self) {}
+ pub fn pub_method(&self) {}
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::A) { a.$0 }
+"#,
+ expect![[r#"
+ me crate_method() fn(&self)
+ me private_method() fn(&self)
+ me pub_method() fn(&self)
+ "#]],
+ );
+ check_with_private_editable(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub struct A {}
+mod m {
+ impl super::A {
+ fn private_method(&self) {}
+ pub(crate) fn crate_method(&self) {}
+ pub fn pub_method(&self) {}
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::A) { a.$0 }
+"#,
+ expect![[r#"
+ me pub_method() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_local_impls() {
+ check(
+ r#"
+//- /lib.rs crate:lib
+pub struct A {}
+mod m {
+ impl super::A {
+ pub fn pub_module_method(&self) {}
+ }
+ fn f() {
+ impl super::A {
+ pub fn pub_foreign_local_method(&self) {}
+ }
+ }
+}
+//- /main.rs crate:main deps:lib
+fn foo(a: lib::A) {
+ impl lib::A {
+ fn local_method(&self) {}
+ }
+ a.$0
+}
+"#,
+ expect![[r#"
+ me local_method() fn(&self)
+ me pub_module_method() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_doc_hidden_filtering() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep
+fn foo(a: dep::A) { a.$0 }
+//- /dep.rs crate:dep
+pub struct A {
+ #[doc(hidden)]
+ pub hidden_field: u32,
+ pub pub_field: u32,
+}
+
+impl A {
+ pub fn pub_method(&self) {}
+
+ #[doc(hidden)]
+ pub fn hidden_method(&self) {}
+}
+ "#,
+ expect![[r#"
+ fd pub_field u32
+ me pub_method() fn(&self)
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_union_field_completion() {
+ check(
+ r#"
+union U { field: u8, other: u16 }
+fn foo(u: U) { u.$0 }
+"#,
+ expect![[r#"
+ fd field u8
+ fd other u16
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_method_completion_only_fitting_impls() {
+ check(
+ r#"
+struct A<T> {}
+impl A<u32> {
+ fn the_method(&self) {}
+}
+impl A<i32> {
+ fn the_other_method(&self) {}
+}
+fn foo(a: A<u32>) { a.$0 }
+"#,
+ expect![[r#"
+ me the_method() fn(&self)
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_trait_method_completion() {
+ check(
+ r#"
+struct A {}
+trait Trait { fn the_method(&self); }
+impl Trait for A {}
+fn foo(a: A) { a.$0 }
+"#,
+ expect![[r#"
+ me the_method() (as Trait) fn(&self)
+ "#]],
+ );
+ check_edit(
+ "the_method",
+ r#"
+struct A {}
+trait Trait { fn the_method(&self); }
+impl Trait for A {}
+fn foo(a: A) { a.$0 }
+"#,
+ r#"
+struct A {}
+trait Trait { fn the_method(&self); }
+impl Trait for A {}
+fn foo(a: A) { a.the_method()$0 }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_trait_method_completion_deduplicated() {
+ check(
+ r"
+struct A {}
+trait Trait { fn the_method(&self); }
+impl<T> Trait for T {}
+fn foo(a: &A) { a.$0 }
+",
+ expect![[r#"
+ me the_method() (as Trait) fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn completes_trait_method_from_other_module() {
+ check(
+ r"
+struct A {}
+mod m {
+ pub trait Trait { fn the_method(&self); }
+}
+use m::Trait;
+impl Trait for A {}
+fn foo(a: A) { a.$0 }
+",
+ expect![[r#"
+ me the_method() (as Trait) fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_no_non_self_method() {
+ check(
+ r#"
+struct A {}
+impl A {
+ fn the_method() {}
+}
+fn foo(a: A) {
+ a.$0
+}
+"#,
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn test_tuple_field_completion() {
+ check(
+ r#"
+fn foo() {
+ let b = (0, 3.14);
+ b.$0
+}
+"#,
+ expect![[r#"
+ fd 0 i32
+ fd 1 f64
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_tuple_struct_field_completion() {
+ check(
+ r#"
+struct S(i32, f64);
+fn foo() {
+ let b = S(0, 3.14);
+ b.$0
+}
+"#,
+ expect![[r#"
+ fd 0 i32
+ fd 1 f64
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_tuple_field_inference() {
+ check(
+ r#"
+pub struct S;
+impl S { pub fn blah(&self) {} }
+
+struct T(S);
+
+impl T {
+ fn foo(&self) {
+ // FIXME: This doesn't work without the trailing `a` as `0.` is a float
+ self.0.a$0
+ }
+}
+"#,
+ expect![[r#"
+ me blah() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_completion_works_in_consts() {
+ check(
+ r#"
+struct A { the_field: u32 }
+const X: u32 = {
+ A { the_field: 92 }.$0
+};
+"#,
+ expect![[r#"
+ fd the_field u32
+ "#]],
+ );
+ }
+
+ #[test]
+ fn works_in_simple_macro_1() {
+ check(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+struct A { the_field: u32 }
+fn foo(a: A) {
+ m!(a.x$0)
+}
+"#,
+ expect![[r#"
+ fd the_field u32
+ "#]],
+ );
+ }
+
+ #[test]
+ fn works_in_simple_macro_2() {
+ // this doesn't work yet because the macro doesn't expand without the token -- maybe it can be fixed with better recovery
+ check(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+struct A { the_field: u32 }
+fn foo(a: A) {
+ m!(a.$0)
+}
+"#,
+ expect![[r#"
+ fd the_field u32
+ "#]],
+ );
+ }
+
+ #[test]
+ fn works_in_simple_macro_recursive_1() {
+ check(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+struct A { the_field: u32 }
+fn foo(a: A) {
+ m!(m!(m!(a.x$0)))
+}
+"#,
+ expect![[r#"
+ fd the_field u32
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expansion_resilient() {
+ check(
+ r#"
+macro_rules! d {
+ () => {};
+ ($val:expr) => {
+ match $val { tmp => { tmp } }
+ };
+ // Trailing comma with single argument is ignored
+ ($val:expr,) => { $crate::d!($val) };
+ ($($val:expr),+ $(,)?) => {
+ ($($crate::d!($val)),+,)
+ };
+}
+struct A { the_field: u32 }
+fn foo(a: A) {
+ d!(a.$0)
+}
+"#,
+ expect![[r#"
+ fd the_field u32
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_method_completion_issue_3547() {
+ check(
+ r#"
+struct HashSet<T> {}
+impl<T> HashSet<T> {
+ pub fn the_method(&self) {}
+}
+fn foo() {
+ let s: HashSet<_>;
+ s.$0
+}
+"#,
+ expect![[r#"
+ me the_method() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn completes_method_call_when_receiver_is_a_macro_call() {
+ check(
+ r#"
+struct S;
+impl S { fn foo(&self) {} }
+macro_rules! make_s { () => { S }; }
+fn main() { make_s!().f$0; }
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ "#]],
+ )
+ }
+
+ #[test]
+ fn completes_after_macro_call_in_submodule() {
+ check(
+ r#"
+macro_rules! empty {
+ () => {};
+}
+
+mod foo {
+ #[derive(Debug, Default)]
+ struct Template2 {}
+
+ impl Template2 {
+ fn private(&self) {}
+ }
+ fn baz() {
+ let goo: Template2 = Template2 {};
+ empty!();
+ goo.$0
+ }
+}
+ "#,
+ expect![[r#"
+ me private() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn issue_8931() {
+ check(
+ r#"
+//- minicore: fn
+struct S;
+
+struct Foo;
+impl Foo {
+ fn foo(&self) -> &[u8] { loop {} }
+}
+
+impl S {
+ fn indented(&mut self, f: impl FnOnce(&mut Self)) {
+ }
+
+ fn f(&mut self, v: Foo) {
+ self.indented(|this| v.$0)
+ }
+}
+ "#,
+ expect![[r#"
+ me foo() fn(&self) -> &[u8]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn completes_bare_fields_and_methods_in_methods() {
+ check(
+ r#"
+struct Foo { field: i32 }
+
+impl Foo { fn foo(&self) { $0 } }"#,
+ expect![[r#"
+ fd self.field i32
+ lc self &Foo
+ sp Self
+ st Foo
+ bt u32
+ me self.foo() fn(&self)
+ "#]],
+ );
+ check(
+ r#"
+struct Foo(i32);
+
+impl Foo { fn foo(&mut self) { $0 } }"#,
+ expect![[r#"
+ fd self.0 i32
+ lc self &mut Foo
+ sp Self
+ st Foo
+ bt u32
+ me self.foo() fn(&mut self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_completion_after_dot() {
+ check(
+ r#"
+macro_rules! m {
+ ($e:expr) => { $e };
+}
+
+struct Completable;
+
+impl Completable {
+ fn method(&self) {}
+}
+
+fn f() {
+ let c = Completable;
+ m!(c.$0);
+}
+ "#,
+ expect![[r#"
+ me method() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn completes_method_call_when_receiver_type_has_errors_issue_10297() {
+ check(
+ r#"
+//- minicore: iterator, sized
+struct Vec<T>;
+impl<T> IntoIterator for Vec<T> {
+ type Item = ();
+ type IntoIter = ();
+ fn into_iter(self);
+}
+fn main() {
+ let x: Vec<_>;
+ x.$0;
+}
+"#,
+ expect![[r#"
+ me into_iter() (as IntoIterator) fn(self) -> <Self as IntoIterator>::IntoIter
+ "#]],
+ )
+ }
+
+ #[test]
+ fn postfix_drop_completion() {
+ cov_mark::check!(postfix_drop_completion);
+ check_edit(
+ "drop",
+ r#"
+//- minicore: drop
+struct Vec<T>(T);
+impl<T> Drop for Vec<T> {
+ fn drop(&mut self) {}
+}
+fn main() {
+ let x = Vec(0u32)
+ x.$0;
+}
+"#,
+ r"
+struct Vec<T>(T);
+impl<T> Drop for Vec<T> {
+ fn drop(&mut self) {}
+}
+fn main() {
+ let x = Vec(0u32)
+ drop($0x);
+}
+",
+ )
+ }
+
+ #[test]
+ fn issue_12484() {
+ check(
+ r#"
+//- minicore: sized
+trait SizeUser {
+ type Size;
+}
+trait Closure: SizeUser {}
+trait Encrypt: SizeUser {
+ fn encrypt(self, _: impl Closure<Size = Self::Size>);
+}
+fn test(thing: impl Encrypt) {
+ thing.$0;
+}
+ "#,
+ expect![[r#"
+ me encrypt(…) (as Encrypt) fn(self, impl Closure<Size = <Self as SizeUser>::Size>)
+ "#]],
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
new file mode 100644
index 000000000..5d0ddaaf2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
@@ -0,0 +1,280 @@
+//! Completion of names from the current scope in expression position.
+
+use hir::ScopeDef;
+
+use crate::{
+ context::{ExprCtx, PathCompletionCtx, Qualified},
+ CompletionContext, Completions,
+};
+
+pub(crate) fn complete_expr_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ expr_ctx: &ExprCtx,
+) {
+ let _p = profile::span("complete_expr_path");
+ if !ctx.qualifier_ctx.none() {
+ return;
+ }
+
+ let &ExprCtx {
+ in_block_expr,
+ in_loop_body,
+ after_if_expr,
+ in_condition,
+ incomplete_let,
+ ref ref_expr_parent,
+ ref is_func_update,
+ ref innermost_ret_ty,
+ ref impl_,
+ in_match_guard,
+ ..
+ } = expr_ctx;
+
+ let wants_mut_token =
+ ref_expr_parent.as_ref().map(|it| it.mut_token().is_none()).unwrap_or(false);
+
+ let scope_def_applicable = |def| match def {
+ ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) | ScopeDef::Label(_) => false,
+ ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)) => mac.is_fn_like(ctx.db),
+ _ => true,
+ };
+
+ let add_assoc_item = |acc: &mut Completions, item| match item {
+ hir::AssocItem::Function(func) => acc.add_function(ctx, path_ctx, func, None),
+ hir::AssocItem::Const(ct) => acc.add_const(ctx, ct),
+ hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty),
+ };
+
+ match qualified {
+ Qualified::TypeAnchor { ty: None, trait_: None } => ctx
+ .traits_in_scope()
+ .iter()
+ .flat_map(|&it| hir::Trait::from(it).items(ctx.sema.db))
+ .for_each(|item| add_assoc_item(acc, item)),
+ Qualified::TypeAnchor { trait_: Some(trait_), .. } => {
+ trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item))
+ }
+ Qualified::TypeAnchor { ty: Some(ty), trait_: None } => {
+ if let Some(hir::Adt::Enum(e)) = ty.as_adt() {
+ cov_mark::hit!(completes_variant_through_alias);
+ acc.add_enum_variants(ctx, path_ctx, e);
+ }
+
+ ctx.iterate_path_candidates(&ty, |item| {
+ add_assoc_item(acc, item);
+ });
+
+ // Iterate assoc types separately
+ ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
+ if let hir::AssocItem::TypeAlias(ty) = item {
+ acc.add_type_alias(ctx, ty)
+ }
+ None::<()>
+ });
+ }
+ Qualified::With { resolution: None, .. } => {}
+ Qualified::With { resolution: Some(resolution), .. } => {
+ // Add associated types on type parameters and `Self`.
+ ctx.scope.assoc_type_shorthand_candidates(resolution, |_, alias| {
+ acc.add_type_alias(ctx, alias);
+ None::<()>
+ });
+ match resolution {
+ hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
+ let module_scope = module.scope(ctx.db, Some(ctx.module));
+ for (name, def) in module_scope {
+ if scope_def_applicable(def) {
+ acc.add_path_resolution(ctx, path_ctx, name, def);
+ }
+ }
+ }
+ hir::PathResolution::Def(
+ def @ (hir::ModuleDef::Adt(_)
+ | hir::ModuleDef::TypeAlias(_)
+ | hir::ModuleDef::BuiltinType(_)),
+ ) => {
+ let ty = match def {
+ hir::ModuleDef::Adt(adt) => adt.ty(ctx.db),
+ hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db),
+ hir::ModuleDef::BuiltinType(builtin) => {
+ cov_mark::hit!(completes_primitive_assoc_const);
+ builtin.ty(ctx.db)
+ }
+ _ => return,
+ };
+
+ if let Some(hir::Adt::Enum(e)) = ty.as_adt() {
+ cov_mark::hit!(completes_variant_through_alias);
+ acc.add_enum_variants(ctx, path_ctx, e);
+ }
+
+ // XXX: For parity with Rust bug #22519, this does not complete Ty::AssocType.
+ // (where AssocType is defined on a trait, not an inherent impl)
+
+ ctx.iterate_path_candidates(&ty, |item| {
+ add_assoc_item(acc, item);
+ });
+
+ // Iterate assoc types separately
+ ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
+ if let hir::AssocItem::TypeAlias(ty) = item {
+ acc.add_type_alias(ctx, ty)
+ }
+ None::<()>
+ });
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Trait(t)) => {
+ // Handles `Trait::assoc` as well as `<Ty as Trait>::assoc`.
+ for item in t.items(ctx.db) {
+ add_assoc_item(acc, item);
+ }
+ }
+ hir::PathResolution::TypeParam(_) | hir::PathResolution::SelfType(_) => {
+ let ty = match resolution {
+ hir::PathResolution::TypeParam(param) => param.ty(ctx.db),
+ hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db),
+ _ => return,
+ };
+
+ if let Some(hir::Adt::Enum(e)) = ty.as_adt() {
+ cov_mark::hit!(completes_variant_through_self);
+ acc.add_enum_variants(ctx, path_ctx, e);
+ }
+
+ ctx.iterate_path_candidates(&ty, |item| {
+ add_assoc_item(acc, item);
+ });
+ }
+ _ => (),
+ }
+ }
+ Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
+ Qualified::No => {
+ acc.add_nameref_keywords_with_colon(ctx);
+ if let Some(adt) =
+ ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
+ {
+ let self_ty = (|| ctx.sema.to_def(impl_.as_ref()?)?.self_ty(ctx.db).as_adt())();
+ let complete_self = self_ty == Some(adt);
+
+ match adt {
+ hir::Adt::Struct(strukt) => {
+ let path = ctx
+ .module
+ .find_use_path(ctx.db, hir::ModuleDef::from(strukt))
+ .filter(|it| it.len() > 1);
+
+ acc.add_struct_literal(ctx, path_ctx, strukt, path, None);
+
+ if complete_self {
+ acc.add_struct_literal(
+ ctx,
+ path_ctx,
+ strukt,
+ None,
+ Some(hir::known::SELF_TYPE),
+ );
+ }
+ }
+ hir::Adt::Union(un) => {
+ let path = ctx
+ .module
+ .find_use_path(ctx.db, hir::ModuleDef::from(un))
+ .filter(|it| it.len() > 1);
+
+ acc.add_union_literal(ctx, un, path, None);
+ if complete_self {
+ acc.add_union_literal(ctx, un, None, Some(hir::known::SELF_TYPE));
+ }
+ }
+ hir::Adt::Enum(e) => {
+ super::enum_variants_with_paths(
+ acc,
+ ctx,
+ e,
+ impl_,
+ |acc, ctx, variant, path| {
+ acc.add_qualified_enum_variant(ctx, path_ctx, variant, path)
+ },
+ );
+ }
+ }
+ }
+ ctx.process_all_names(&mut |name, def| match def {
+ ScopeDef::ModuleDef(hir::ModuleDef::Trait(t)) => {
+ let assocs = t.items_with_supertraits(ctx.db);
+ match &*assocs {
+ // traits with no assoc items are unusable as expressions since
+ // there is no associated item path that can be constructed with them
+ [] => (),
+ // FIXME: Render the assoc item with the trait qualified
+ &[_item] => acc.add_path_resolution(ctx, path_ctx, name, def),
+ // FIXME: Append `::` to the thing here, since a trait on its own won't work
+ [..] => acc.add_path_resolution(ctx, path_ctx, name, def),
+ }
+ }
+ _ if scope_def_applicable(def) => acc.add_path_resolution(ctx, path_ctx, name, def),
+ _ => (),
+ });
+
+ if is_func_update.is_none() {
+ let mut add_keyword =
+ |kw, snippet| acc.add_keyword_snippet_expr(ctx, incomplete_let, kw, snippet);
+
+ if !in_block_expr {
+ add_keyword("unsafe", "unsafe {\n $0\n}");
+ }
+ add_keyword("match", "match $1 {\n $0\n}");
+ add_keyword("while", "while $1 {\n $0\n}");
+ add_keyword("while let", "while let $1 = $2 {\n $0\n}");
+ add_keyword("loop", "loop {\n $0\n}");
+ if in_match_guard {
+ add_keyword("if", "if $0");
+ } else {
+ add_keyword("if", "if $1 {\n $0\n}");
+ }
+ add_keyword("if let", "if let $1 = $2 {\n $0\n}");
+ add_keyword("for", "for $1 in $2 {\n $0\n}");
+ add_keyword("true", "true");
+ add_keyword("false", "false");
+
+ if in_condition || in_block_expr {
+ add_keyword("let", "let");
+ }
+
+ if after_if_expr {
+ add_keyword("else", "else {\n $0\n}");
+ add_keyword("else if", "else if $1 {\n $0\n}");
+ }
+
+ if wants_mut_token {
+ add_keyword("mut", "mut ");
+ }
+
+ if in_loop_body {
+ if in_block_expr {
+ add_keyword("continue", "continue;");
+ add_keyword("break", "break;");
+ } else {
+ add_keyword("continue", "continue");
+ add_keyword("break", "break");
+ }
+ }
+
+ if let Some(ty) = innermost_ret_ty {
+ add_keyword(
+ "return",
+ match (in_block_expr, ty.is_unit()) {
+ (true, true) => "return ;",
+ (true, false) => "return;",
+ (false, true) => "return $0",
+ (false, false) => "return",
+ },
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
new file mode 100644
index 000000000..4e89ef696
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
@@ -0,0 +1,108 @@
+//! Completes function abi strings.
+use syntax::{
+ ast::{self, IsString},
+ AstNode, AstToken,
+};
+
+use crate::{
+ completions::Completions, context::CompletionContext, CompletionItem, CompletionItemKind,
+};
+
+// Most of these are feature gated, we should filter/add feature gate completions once we have them.
+const SUPPORTED_CALLING_CONVENTIONS: &[&str] = &[
+ "Rust",
+ "C",
+ "C-unwind",
+ "cdecl",
+ "stdcall",
+ "stdcall-unwind",
+ "fastcall",
+ "vectorcall",
+ "thiscall",
+ "thiscall-unwind",
+ "aapcs",
+ "win64",
+ "sysv64",
+ "ptx-kernel",
+ "msp430-interrupt",
+ "x86-interrupt",
+ "amdgpu-kernel",
+ "efiapi",
+ "avr-interrupt",
+ "avr-non-blocking-interrupt",
+ "C-cmse-nonsecure-call",
+ "wasm",
+ "system",
+ "system-unwind",
+ "rust-intrinsic",
+ "rust-call",
+ "platform-intrinsic",
+ "unadjusted",
+];
+
+pub(crate) fn complete_extern_abi(
+ acc: &mut Completions,
+ _ctx: &CompletionContext<'_>,
+ expanded: &ast::String,
+) -> Option<()> {
+ if !expanded.syntax().parent().map_or(false, |it| ast::Abi::can_cast(it.kind())) {
+ return None;
+ }
+ let abi_str = expanded;
+ let source_range = abi_str.text_range_between_quotes()?;
+ for &abi in SUPPORTED_CALLING_CONVENTIONS {
+ CompletionItem::new(CompletionItemKind::Keyword, source_range, abi).add_to(acc);
+ }
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{check_edit, completion_list_no_kw};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn only_completes_in_string_literals() {
+ check(
+ r#"
+$0 fn foo {}
+"#,
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn requires_extern_prefix() {
+ check(
+ r#"
+"$0" fn foo {}
+"#,
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn works() {
+ check(
+ r#"
+extern "$0" fn foo {}
+"#,
+ expect![[]],
+ );
+ check_edit(
+ "Rust",
+ r#"
+extern "$0" fn foo {}
+"#,
+ r#"
+extern "Rust" fn foo {}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs
new file mode 100644
index 000000000..870df63b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs
@@ -0,0 +1,43 @@
+//! Completion of field list position.
+
+use crate::{
+ context::{PathCompletionCtx, Qualified},
+ CompletionContext, Completions,
+};
+
+pub(crate) fn complete_field_list_tuple_variant(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+) {
+ if ctx.qualifier_ctx.vis_node.is_some() {
+ return;
+ }
+ match path_ctx {
+ PathCompletionCtx {
+ has_macro_bang: false,
+ qualified: Qualified::No,
+ parent: None,
+ has_type_args: false,
+ ..
+ } => {
+ let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
+ add_keyword("pub(crate)", "pub(crate)");
+ add_keyword("pub(super)", "pub(super)");
+ add_keyword("pub", "pub");
+ }
+ _ => (),
+ }
+}
+
+pub(crate) fn complete_field_list_record_variant(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+) {
+ if ctx.qualifier_ctx.vis_node.is_none() {
+ let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
+ add_keyword("pub(crate)", "pub(crate)");
+ add_keyword("pub(super)", "pub(super)");
+ add_keyword("pub", "pub");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
new file mode 100644
index 000000000..f04cc15d7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
@@ -0,0 +1,407 @@
+//! See [`import_on_the_fly`].
+use hir::{ItemInNs, ModuleDef};
+use ide_db::imports::{
+ import_assets::{ImportAssets, LocatedImport},
+ insert_use::ImportScope,
+};
+use itertools::Itertools;
+use syntax::{
+ ast::{self},
+ AstNode, SyntaxNode, T,
+};
+
+use crate::{
+ context::{
+ CompletionContext, DotAccess, PathCompletionCtx, PathKind, PatternContext, Qualified,
+ TypeLocation,
+ },
+ render::{render_resolution_with_import, render_resolution_with_import_pat, RenderContext},
+};
+
+use super::Completions;
+
+// Feature: Completion With Autoimport
+//
+// When completing names in the current scope, proposes additional imports from other modules or crates,
+// if they can be qualified in the scope, and their name contains all symbols from the completion input.
+//
+// To be considered applicable, the name must contain all input symbols in the given order, not necessarily adjacent.
+// If any input symbol is not lowercased, the name must contain all symbols in exact case; otherwise the containing is checked case-insensitively.
+//
+// ```
+// fn main() {
+// pda$0
+// }
+// # pub mod std { pub mod marker { pub struct PhantomData { } } }
+// ```
+// ->
+// ```
+// use std::marker::PhantomData;
+//
+// fn main() {
+// PhantomData
+// }
+// # pub mod std { pub mod marker { pub struct PhantomData { } } }
+// ```
+//
+// Also completes associated items, that require trait imports.
+// If any unresolved and/or partially-qualified path precedes the input, it will be taken into account.
+// Currently, only the imports with their import path ending with the whole qualifier will be proposed
+// (no fuzzy matching for qualifier).
+//
+// ```
+// mod foo {
+// pub mod bar {
+// pub struct Item;
+//
+// impl Item {
+// pub const TEST_ASSOC: usize = 3;
+// }
+// }
+// }
+//
+// fn main() {
+// bar::Item::TEST_A$0
+// }
+// ```
+// ->
+// ```
+// use foo::bar;
+//
+// mod foo {
+// pub mod bar {
+// pub struct Item;
+//
+// impl Item {
+// pub const TEST_ASSOC: usize = 3;
+// }
+// }
+// }
+//
+// fn main() {
+// bar::Item::TEST_ASSOC
+// }
+// ```
+//
+// NOTE: currently, if an assoc item comes from a trait that's not currently imported, and it also has an unresolved and/or partially-qualified path,
+// no imports will be proposed.
+//
+// .Fuzzy search details
+//
+// To avoid an excessive amount of the results returned, completion input is checked for inclusion in the names only
+// (i.e. in `HashMap` in the `std::collections::HashMap` path).
+// For the same reasons, avoids searching for any path imports for inputs with their length less than 2 symbols
+// (but shows all associated items for any input length).
+//
+// .Import configuration
+//
+// It is possible to configure how use-trees are merged with the `imports.granularity.group` setting.
+// Mimics the corresponding behavior of the `Auto Import` feature.
+//
+// .LSP and performance implications
+//
+// The feature is enabled only if the LSP client supports LSP protocol version 3.16+ and reports the `additionalTextEdits`
+// (case-sensitive) resolve client capability in its client capabilities.
+// This way the server is able to defer the costly computations, doing them for a selected completion item only.
+// For clients with no such support, all edits have to be calculated on the completion request, including the fuzzy search completion ones,
+// which might be slow ergo the feature is automatically disabled.
+//
+// .Feature toggle
+//
+// The feature can be forcefully turned off in the settings with the `rust-analyzer.completion.autoimport.enable` flag.
+// Note that having this flag set to `true` does not guarantee that the feature is enabled: your client needs to have the corresponding
+// capability enabled.
+pub(crate) fn import_on_the_fly_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+) -> Option<()> {
+ if !ctx.config.enable_imports_on_the_fly {
+ return None;
+ }
+ let qualified = match path_ctx {
+ PathCompletionCtx {
+ kind:
+ PathKind::Expr { .. }
+ | PathKind::Type { .. }
+ | PathKind::Attr { .. }
+ | PathKind::Derive { .. }
+ | PathKind::Item { .. }
+ | PathKind::Pat { .. },
+ qualified,
+ ..
+ } => qualified,
+ _ => return None,
+ };
+ let potential_import_name = import_name(ctx);
+ let qualifier = match qualified {
+ Qualified::With { path, .. } => Some(path.clone()),
+ _ => None,
+ };
+ let import_assets = import_assets_for_path(ctx, &potential_import_name, qualifier.clone())?;
+
+ import_on_the_fly(
+ acc,
+ ctx,
+ path_ctx,
+ import_assets,
+ qualifier.map(|it| it.syntax().clone()).or_else(|| ctx.original_token.parent())?,
+ potential_import_name,
+ )
+}
+
+pub(crate) fn import_on_the_fly_pat(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+) -> Option<()> {
+ if !ctx.config.enable_imports_on_the_fly {
+ return None;
+ }
+ if let PatternContext { record_pat: Some(_), .. } = pattern_ctx {
+ return None;
+ }
+
+ let potential_import_name = import_name(ctx);
+ let import_assets = import_assets_for_path(ctx, &potential_import_name, None)?;
+
+ import_on_the_fly_pat_(
+ acc,
+ ctx,
+ pattern_ctx,
+ import_assets,
+ ctx.original_token.parent()?,
+ potential_import_name,
+ )
+}
+
+pub(crate) fn import_on_the_fly_dot(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+) -> Option<()> {
+ if !ctx.config.enable_imports_on_the_fly {
+ return None;
+ }
+ let receiver = dot_access.receiver.as_ref()?;
+ let ty = dot_access.receiver_ty.as_ref()?;
+ let potential_import_name = import_name(ctx);
+ let import_assets = ImportAssets::for_fuzzy_method_call(
+ ctx.module,
+ ty.original.clone(),
+ potential_import_name.clone(),
+ receiver.syntax().clone(),
+ )?;
+
+ import_on_the_fly_method(
+ acc,
+ ctx,
+ dot_access,
+ import_assets,
+ receiver.syntax().clone(),
+ potential_import_name,
+ )
+}
+
+fn import_on_the_fly(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { kind, .. }: &PathCompletionCtx,
+ import_assets: ImportAssets,
+ position: SyntaxNode,
+ potential_import_name: String,
+) -> Option<()> {
+ let _p = profile::span("import_on_the_fly").detail(|| potential_import_name.clone());
+
+ if ImportScope::find_insert_use_container(&position, &ctx.sema).is_none() {
+ return None;
+ }
+
+ let ns_filter = |import: &LocatedImport| {
+ match (kind, import.original_item) {
+ // Aren't handled in flyimport
+ (PathKind::Vis { .. } | PathKind::Use, _) => false,
+ // modules are always fair game
+ (_, ItemInNs::Types(hir::ModuleDef::Module(_))) => true,
+ // and so are macros(except for attributes)
+ (
+ PathKind::Expr { .. }
+ | PathKind::Type { .. }
+ | PathKind::Item { .. }
+ | PathKind::Pat { .. },
+ ItemInNs::Macros(mac),
+ ) => mac.is_fn_like(ctx.db),
+ (PathKind::Item { .. }, ..) => false,
+
+ (PathKind::Expr { .. }, ItemInNs::Types(_) | ItemInNs::Values(_)) => true,
+
+ (PathKind::Pat { .. }, ItemInNs::Types(_)) => true,
+ (PathKind::Pat { .. }, ItemInNs::Values(def)) => {
+ matches!(def, hir::ModuleDef::Const(_))
+ }
+
+ (PathKind::Type { location }, ItemInNs::Types(ty)) => {
+ if matches!(location, TypeLocation::TypeBound) {
+ matches!(ty, ModuleDef::Trait(_))
+ } else {
+ true
+ }
+ }
+ (PathKind::Type { .. }, ItemInNs::Values(_)) => false,
+
+ (PathKind::Attr { .. }, ItemInNs::Macros(mac)) => mac.is_attr(ctx.db),
+ (PathKind::Attr { .. }, _) => false,
+
+ (PathKind::Derive { existing_derives }, ItemInNs::Macros(mac)) => {
+ mac.is_derive(ctx.db) && !existing_derives.contains(&mac)
+ }
+ (PathKind::Derive { .. }, _) => false,
+ }
+ };
+ let user_input_lowercased = potential_import_name.to_lowercase();
+
+ acc.add_all(
+ import_assets
+ .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind)
+ .into_iter()
+ .filter(ns_filter)
+ .filter(|import| {
+ !ctx.is_item_hidden(&import.item_to_import)
+ && !ctx.is_item_hidden(&import.original_item)
+ })
+ .sorted_by_key(|located_import| {
+ compute_fuzzy_completion_order_key(
+ &located_import.import_path,
+ &user_input_lowercased,
+ )
+ })
+ .filter_map(|import| {
+ render_resolution_with_import(RenderContext::new(ctx), path_ctx, import)
+ })
+ .map(|builder| builder.build()),
+ );
+ Some(())
+}
+
+fn import_on_the_fly_pat_(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+ import_assets: ImportAssets,
+ position: SyntaxNode,
+ potential_import_name: String,
+) -> Option<()> {
+ let _p = profile::span("import_on_the_fly_pat").detail(|| potential_import_name.clone());
+
+ if ImportScope::find_insert_use_container(&position, &ctx.sema).is_none() {
+ return None;
+ }
+
+ let ns_filter = |import: &LocatedImport| match import.original_item {
+ ItemInNs::Macros(mac) => mac.is_fn_like(ctx.db),
+ ItemInNs::Types(_) => true,
+ ItemInNs::Values(def) => matches!(def, hir::ModuleDef::Const(_)),
+ };
+ let user_input_lowercased = potential_import_name.to_lowercase();
+
+ acc.add_all(
+ import_assets
+ .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind)
+ .into_iter()
+ .filter(ns_filter)
+ .filter(|import| {
+ !ctx.is_item_hidden(&import.item_to_import)
+ && !ctx.is_item_hidden(&import.original_item)
+ })
+ .sorted_by_key(|located_import| {
+ compute_fuzzy_completion_order_key(
+ &located_import.import_path,
+ &user_input_lowercased,
+ )
+ })
+ .filter_map(|import| {
+ render_resolution_with_import_pat(RenderContext::new(ctx), pattern_ctx, import)
+ })
+ .map(|builder| builder.build()),
+ );
+ Some(())
+}
+
+fn import_on_the_fly_method(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+ import_assets: ImportAssets,
+ position: SyntaxNode,
+ potential_import_name: String,
+) -> Option<()> {
+ let _p = profile::span("import_on_the_fly_method").detail(|| potential_import_name.clone());
+
+ if ImportScope::find_insert_use_container(&position, &ctx.sema).is_none() {
+ return None;
+ }
+
+ let user_input_lowercased = potential_import_name.to_lowercase();
+
+ import_assets
+ .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind)
+ .into_iter()
+ .filter(|import| {
+ !ctx.is_item_hidden(&import.item_to_import)
+ && !ctx.is_item_hidden(&import.original_item)
+ })
+ .sorted_by_key(|located_import| {
+ compute_fuzzy_completion_order_key(&located_import.import_path, &user_input_lowercased)
+ })
+ .for_each(|import| match import.original_item {
+ ItemInNs::Values(hir::ModuleDef::Function(f)) => {
+ acc.add_method_with_import(ctx, dot_access, f, import);
+ }
+ _ => (),
+ });
+ Some(())
+}
+
+fn import_name(ctx: &CompletionContext<'_>) -> String {
+ let token_kind = ctx.token.kind();
+ if matches!(token_kind, T![.] | T![::]) {
+ String::new()
+ } else {
+ ctx.token.to_string()
+ }
+}
+
+fn import_assets_for_path(
+ ctx: &CompletionContext<'_>,
+ potential_import_name: &str,
+ qualifier: Option<ast::Path>,
+) -> Option<ImportAssets> {
+ let fuzzy_name_length = potential_import_name.len();
+ let mut assets_for_path = ImportAssets::for_fuzzy_path(
+ ctx.module,
+ qualifier,
+ potential_import_name.to_owned(),
+ &ctx.sema,
+ ctx.token.parent()?,
+ )?;
+ if fuzzy_name_length < 3 {
+ cov_mark::hit!(flyimport_exact_on_short_path);
+ assets_for_path.path_fuzzy_name_to_exact(false);
+ }
+ Some(assets_for_path)
+}
+
+fn compute_fuzzy_completion_order_key(
+ proposed_mod_path: &hir::ModPath,
+ user_input_lowercased: &str,
+) -> usize {
+ cov_mark::hit!(certain_fuzzy_order_test);
+ let import_name = match proposed_mod_path.segments().last() {
+ Some(name) => name.to_smol_str().to_lowercase(),
+ None => return usize::MAX,
+ };
+ match import_name.match_indices(user_input_lowercased).next() {
+ Some((first_matching_index, _)) => first_matching_index,
+ None => usize::MAX,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs
new file mode 100644
index 000000000..f0ecc595a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs
@@ -0,0 +1,196 @@
+//! See [`complete_fn_param`].
+
+use hir::HirDisplay;
+use ide_db::FxHashMap;
+use syntax::{
+ algo,
+ ast::{self, HasModuleItem},
+ match_ast, AstNode, Direction, SyntaxKind, TextRange, TextSize,
+};
+
+use crate::{
+ context::{ParamContext, ParamKind, PatternContext},
+ CompletionContext, CompletionItem, CompletionItemKind, Completions,
+};
+
+// FIXME: Make this a submodule of [`pattern`]
+/// Complete repeated parameters, both name and type. For example, if all
+/// functions in a file have a `spam: &mut Spam` parameter, a completion with
+/// `spam: &mut Spam` insert text/label will be suggested.
+///
+/// Also complete parameters for closure or local functions from the surrounding defined locals.
+pub(crate) fn complete_fn_param(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+) -> Option<()> {
+ let (ParamContext { param_list, kind, .. }, impl_) = match pattern_ctx {
+ PatternContext { param_ctx: Some(kind), impl_, .. } => (kind, impl_),
+ _ => return None,
+ };
+
+ let comma_wrapper = comma_wrapper(ctx);
+ let mut add_new_item_to_acc = |label: &str| {
+ let mk_item = |label: &str, range: TextRange| {
+ CompletionItem::new(CompletionItemKind::Binding, range, label)
+ };
+ let item = match &comma_wrapper {
+ Some((fmt, range)) => mk_item(&fmt(label), *range),
+ None => mk_item(label, ctx.source_range()),
+ };
+ // Completion lookup is omitted intentionally here.
+ // See the full discussion: https://github.com/rust-lang/rust-analyzer/issues/12073
+ item.add_to(acc)
+ };
+
+ match kind {
+ ParamKind::Function(function) => {
+ fill_fn_params(ctx, function, param_list, impl_, add_new_item_to_acc);
+ }
+ ParamKind::Closure(closure) => {
+ let stmt_list = closure.syntax().ancestors().find_map(ast::StmtList::cast)?;
+ params_from_stmt_list_scope(ctx, stmt_list, |name, ty| {
+ add_new_item_to_acc(&format!("{name}: {ty}"));
+ });
+ }
+ }
+
+ Some(())
+}
+
+fn fill_fn_params(
+ ctx: &CompletionContext<'_>,
+ function: &ast::Fn,
+ param_list: &ast::ParamList,
+ impl_: &Option<ast::Impl>,
+ mut add_new_item_to_acc: impl FnMut(&str),
+) {
+ let mut file_params = FxHashMap::default();
+
+ let mut extract_params = |f: ast::Fn| {
+ f.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| {
+ if let Some(pat) = param.pat() {
+ // FIXME: We should be able to turn these into SmolStr without having to allocate a String
+ let whole_param = param.syntax().text().to_string();
+ let binding = pat.syntax().text().to_string();
+ file_params.entry(whole_param).or_insert(binding);
+ }
+ });
+ };
+
+ for node in ctx.token.parent_ancestors() {
+ match_ast! {
+ match node {
+ ast::SourceFile(it) => it.items().filter_map(|item| match item {
+ ast::Item::Fn(it) => Some(it),
+ _ => None,
+ }).for_each(&mut extract_params),
+ ast::ItemList(it) => it.items().filter_map(|item| match item {
+ ast::Item::Fn(it) => Some(it),
+ _ => None,
+ }).for_each(&mut extract_params),
+ ast::AssocItemList(it) => it.assoc_items().filter_map(|item| match item {
+ ast::AssocItem::Fn(it) => Some(it),
+ _ => None,
+ }).for_each(&mut extract_params),
+ _ => continue,
+ }
+ };
+ }
+
+ if let Some(stmt_list) = function.syntax().parent().and_then(ast::StmtList::cast) {
+ params_from_stmt_list_scope(ctx, stmt_list, |name, ty| {
+ file_params.entry(format!("{name}: {ty}")).or_insert(name.to_string());
+ });
+ }
+ remove_duplicated(&mut file_params, param_list.params());
+ let self_completion_items = ["self", "&self", "mut self", "&mut self"];
+ if should_add_self_completions(ctx.token.text_range().start(), param_list, impl_) {
+ self_completion_items.into_iter().for_each(|self_item| add_new_item_to_acc(self_item));
+ }
+
+ file_params.keys().for_each(|whole_param| add_new_item_to_acc(whole_param));
+}
+
+fn params_from_stmt_list_scope(
+ ctx: &CompletionContext<'_>,
+ stmt_list: ast::StmtList,
+ mut cb: impl FnMut(hir::Name, String),
+) {
+ let syntax_node = match stmt_list.syntax().last_child() {
+ Some(it) => it,
+ None => return,
+ };
+ if let Some(scope) =
+ ctx.sema.scope_at_offset(stmt_list.syntax(), syntax_node.text_range().end())
+ {
+ let module = scope.module().into();
+ scope.process_all_names(&mut |name, def| {
+ if let hir::ScopeDef::Local(local) = def {
+ if let Ok(ty) = local.ty(ctx.db).display_source_code(ctx.db, module) {
+ cb(name, ty);
+ }
+ }
+ });
+ }
+}
+
+fn remove_duplicated(
+ file_params: &mut FxHashMap<String, String>,
+ fn_params: ast::AstChildren<ast::Param>,
+) {
+ fn_params.for_each(|param| {
+ let whole_param = param.syntax().text().to_string();
+ file_params.remove(&whole_param);
+
+ match param.pat() {
+ // remove suggestions for patterns that already exist
+ // if the type is missing we are checking the current param to be completed
+ // in which case this would find itself removing the suggestions due to itself
+ Some(pattern) if param.ty().is_some() => {
+ let binding = pattern.syntax().text().to_string();
+ file_params.retain(|_, v| v != &binding);
+ }
+ _ => (),
+ }
+ })
+}
+
+fn should_add_self_completions(
+ cursor: TextSize,
+ param_list: &ast::ParamList,
+ impl_: &Option<ast::Impl>,
+) -> bool {
+ if impl_.is_none() || param_list.self_param().is_some() {
+ return false;
+ }
+ match param_list.params().next() {
+ Some(first) => first.pat().map_or(false, |pat| pat.syntax().text_range().contains(cursor)),
+ None => true,
+ }
+}
+
+fn comma_wrapper(ctx: &CompletionContext<'_>) -> Option<(impl Fn(&str) -> String, TextRange)> {
+ let param = ctx.token.parent_ancestors().find(|node| node.kind() == SyntaxKind::PARAM)?;
+
+ let next_token_kind = {
+ let t = param.last_token()?.next_token()?;
+ let t = algo::skip_whitespace_token(t, Direction::Next)?;
+ t.kind()
+ };
+ let prev_token_kind = {
+ let t = param.first_token()?.prev_token()?;
+ let t = algo::skip_whitespace_token(t, Direction::Prev)?;
+ t.kind()
+ };
+
+ let has_trailing_comma =
+ matches!(next_token_kind, SyntaxKind::COMMA | SyntaxKind::R_PAREN | SyntaxKind::PIPE);
+ let trailing = if has_trailing_comma { "" } else { "," };
+
+ let has_leading_comma =
+ matches!(prev_token_kind, SyntaxKind::COMMA | SyntaxKind::L_PAREN | SyntaxKind::PIPE);
+ let leading = if has_leading_comma { "" } else { ", " };
+
+ Some((move |label: &_| (format!("{}{}{}", leading, label, trailing)), param.text_range()))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs
new file mode 100644
index 000000000..038bdb427
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs
@@ -0,0 +1,130 @@
+//! Completes identifiers in format string literals.
+
+use ide_db::syntax_helpers::format_string::is_format_string;
+use itertools::Itertools;
+use syntax::{ast, AstToken, TextRange, TextSize};
+
+use crate::{context::CompletionContext, CompletionItem, CompletionItemKind, Completions};
+
+/// Complete identifiers in format strings.
+pub(crate) fn format_string(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ original: &ast::String,
+ expanded: &ast::String,
+) {
+ if !is_format_string(&expanded) {
+ return;
+ }
+ let cursor = ctx.position.offset;
+ let lit_start = ctx.original_token.text_range().start();
+ let cursor_in_lit = cursor - lit_start;
+
+ let prefix = &original.text()[..cursor_in_lit.into()];
+ let braces = prefix.char_indices().rev().skip_while(|&(_, c)| c.is_alphanumeric()).next_tuple();
+ let brace_offset = match braces {
+ // escaped brace
+ Some(((_, '{'), (_, '{'))) => return,
+ Some(((idx, '{'), _)) => lit_start + TextSize::from(idx as u32 + 1),
+ _ => return,
+ };
+
+ let source_range = TextRange::new(brace_offset, cursor);
+ ctx.locals.iter().for_each(|(name, _)| {
+ CompletionItem::new(CompletionItemKind::Binding, source_range, name.to_smol_str())
+ .add_to(acc);
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{check_edit, completion_list_no_kw};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn works_when_wrapped() {
+ check(
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+macro_rules! print {
+ ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
+}
+fn main() {
+ let foobar = 1;
+ print!("f$0");
+}
+"#,
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn no_completion_without_brace() {
+ check(
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("f$0");
+}
+"#,
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn completes_locals() {
+ check_edit(
+ "foobar",
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("{f$0");
+}
+"#,
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("{foobar");
+}
+"#,
+ );
+ check_edit(
+ "foobar",
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("{$0");
+}
+"#,
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("{foobar");
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs
new file mode 100644
index 000000000..60d05ae46
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs
@@ -0,0 +1,133 @@
+//! Completion of paths and keywords at item list position.
+
+use crate::{
+ context::{ExprCtx, ItemListKind, PathCompletionCtx, Qualified},
+ CompletionContext, Completions,
+};
+
+pub(crate) mod trait_impl;
+
+pub(crate) fn complete_item_list_in_expr(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ expr_ctx: &ExprCtx,
+) {
+ if !expr_ctx.in_block_expr {
+ return;
+ }
+ if !path_ctx.is_trivial_path() {
+ return;
+ }
+ add_keywords(acc, ctx, None);
+}
+
+pub(crate) fn complete_item_list(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ kind: &ItemListKind,
+) {
+ let _p = profile::span("complete_item_list");
+ if path_ctx.is_trivial_path() {
+ add_keywords(acc, ctx, Some(kind));
+ }
+
+ match qualified {
+ Qualified::With {
+ resolution: Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))),
+ super_chain_len,
+ ..
+ } => {
+ for (name, def) in module.scope(ctx.db, Some(ctx.module)) {
+ match def {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Macro(m)) if m.is_fn_like(ctx.db) => {
+ acc.add_macro(ctx, path_ctx, m, name)
+ }
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
+ acc.add_module(ctx, path_ctx, m, name)
+ }
+ _ => (),
+ }
+ }
+
+ acc.add_super_keyword(ctx, *super_chain_len);
+ }
+ Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
+ Qualified::No if ctx.qualifier_ctx.none() => {
+ ctx.process_all_names(&mut |name, def| match def {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Macro(m)) if m.is_fn_like(ctx.db) => {
+ acc.add_macro(ctx, path_ctx, m, name)
+ }
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
+ acc.add_module(ctx, path_ctx, m, name)
+ }
+ _ => (),
+ });
+ acc.add_nameref_keywords_with_colon(ctx);
+ }
+ Qualified::TypeAnchor { .. } | Qualified::No | Qualified::With { .. } => {}
+ }
+}
+
+fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option<&ItemListKind>) {
+ let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
+
+ let in_item_list = matches!(kind, Some(ItemListKind::SourceFile | ItemListKind::Module) | None);
+ let in_assoc_non_trait_impl = matches!(kind, Some(ItemListKind::Impl | ItemListKind::Trait));
+ let in_extern_block = matches!(kind, Some(ItemListKind::ExternBlock));
+ let in_trait = matches!(kind, Some(ItemListKind::Trait));
+ let in_trait_impl = matches!(kind, Some(ItemListKind::TraitImpl(_)));
+ let in_inherent_impl = matches!(kind, Some(ItemListKind::Impl));
+ let no_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
+ let in_block = matches!(kind, None);
+
+ if !in_trait_impl {
+ if ctx.qualifier_ctx.unsafe_tok.is_some() {
+ if in_item_list || in_assoc_non_trait_impl {
+ add_keyword("fn", "fn $1($2) {\n $0\n}");
+ }
+ if in_item_list {
+ add_keyword("trait", "trait $1 {\n $0\n}");
+ if no_qualifiers {
+ add_keyword("impl", "impl $1 {\n $0\n}");
+ }
+ }
+ return;
+ }
+
+ if in_item_list {
+ add_keyword("enum", "enum $1 {\n $0\n}");
+ add_keyword("mod", "mod $0");
+ add_keyword("static", "static $0");
+ add_keyword("struct", "struct $0");
+ add_keyword("trait", "trait $1 {\n $0\n}");
+ add_keyword("union", "union $1 {\n $0\n}");
+ add_keyword("use", "use $0");
+ if no_qualifiers {
+ add_keyword("impl", "impl $1 {\n $0\n}");
+ }
+ }
+
+ if !in_trait && !in_block && no_qualifiers {
+ add_keyword("pub(crate)", "pub(crate)");
+ add_keyword("pub(super)", "pub(super)");
+ add_keyword("pub", "pub");
+ }
+
+ if in_extern_block {
+ add_keyword("fn", "fn $1($2);");
+ } else {
+ if !in_inherent_impl {
+ if !in_trait {
+ add_keyword("extern", "extern $0");
+ }
+ add_keyword("type", "type $0");
+ }
+
+ add_keyword("fn", "fn $1($2) {\n $0\n}");
+ add_keyword("unsafe", "unsafe");
+ add_keyword("const", "const $0");
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
new file mode 100644
index 000000000..e9256803c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -0,0 +1,1160 @@
+//! Completion for associated items in a trait implementation.
+//!
+//! This module adds the completion items related to implementing associated
+//! items within an `impl Trait for Struct` block. The current context node
+//! must be within either a `FN`, `TYPE_ALIAS`, or `CONST` node
+//! and an direct child of an `IMPL`.
+//!
+//! # Examples
+//!
+//! Considering the following trait `impl`:
+//!
+//! ```ignore
+//! trait SomeTrait {
+//! fn foo();
+//! }
+//!
+//! impl SomeTrait for () {
+//! fn f$0
+//! }
+//! ```
+//!
+//! may result in the completion of the following method:
+//!
+//! ```ignore
+//! # trait SomeTrait {
+//! # fn foo();
+//! # }
+//!
+//! impl SomeTrait for () {
+//! fn foo() {}$0
+//! }
+//! ```
+
+use hir::{self, HasAttrs};
+use ide_db::{
+ path_transform::PathTransform, syntax_helpers::insert_whitespace_into_node,
+ traits::get_missing_assoc_items, SymbolKind,
+};
+use syntax::{
+ ast::{self, edit_in_place::AttrsOwnerEdit},
+ AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, T,
+};
+use text_edit::TextEdit;
+
+use crate::{
+ context::PathCompletionCtx, CompletionContext, CompletionItem, CompletionItemKind,
+ CompletionRelevance, Completions,
+};
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+enum ImplCompletionKind {
+ All,
+ Fn,
+ TypeAlias,
+ Const,
+}
+
+pub(crate) fn complete_trait_impl_const(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ name: &Option<ast::Name>,
+) -> Option<()> {
+ complete_trait_impl_name(acc, ctx, name, ImplCompletionKind::Const)
+}
+
+pub(crate) fn complete_trait_impl_type_alias(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ name: &Option<ast::Name>,
+) -> Option<()> {
+ complete_trait_impl_name(acc, ctx, name, ImplCompletionKind::TypeAlias)
+}
+
+pub(crate) fn complete_trait_impl_fn(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ name: &Option<ast::Name>,
+) -> Option<()> {
+ complete_trait_impl_name(acc, ctx, name, ImplCompletionKind::Fn)
+}
+
+fn complete_trait_impl_name(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ name: &Option<ast::Name>,
+ kind: ImplCompletionKind,
+) -> Option<()> {
+ let token = ctx.token.clone();
+ let item = match name {
+ Some(name) => name.syntax().parent(),
+ None => if token.kind() == SyntaxKind::WHITESPACE { token.prev_token()? } else { token }
+ .parent(),
+ }?;
+ complete_trait_impl(
+ acc,
+ ctx,
+ kind,
+ replacement_range(ctx, &item),
+ // item -> ASSOC_ITEM_LIST -> IMPL
+ &ast::Impl::cast(item.parent()?.parent()?)?,
+ );
+ Some(())
+}
+
+pub(crate) fn complete_trait_impl_item_by_name(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ name_ref: &Option<ast::NameRef>,
+ impl_: &Option<ast::Impl>,
+) {
+ if !path_ctx.is_trivial_path() {
+ return;
+ }
+ if let Some(impl_) = impl_ {
+ complete_trait_impl(
+ acc,
+ ctx,
+ ImplCompletionKind::All,
+ match name_ref {
+ Some(name) => name.syntax().text_range(),
+ None => ctx.source_range(),
+ },
+ impl_,
+ );
+ }
+}
+
+fn complete_trait_impl(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ kind: ImplCompletionKind,
+ replacement_range: TextRange,
+ impl_def: &ast::Impl,
+) {
+ if let Some(hir_impl) = ctx.sema.to_def(impl_def) {
+ get_missing_assoc_items(&ctx.sema, impl_def).into_iter().for_each(|item| {
+ use self::ImplCompletionKind::*;
+ match (item, kind) {
+ (hir::AssocItem::Function(func), All | Fn) => {
+ add_function_impl(acc, ctx, replacement_range, func, hir_impl)
+ }
+ (hir::AssocItem::TypeAlias(type_alias), All | TypeAlias) => {
+ add_type_alias_impl(acc, ctx, replacement_range, type_alias)
+ }
+ (hir::AssocItem::Const(const_), All | Const) => {
+ add_const_impl(acc, ctx, replacement_range, const_, hir_impl)
+ }
+ _ => {}
+ }
+ });
+ }
+}
+
+fn add_function_impl(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ replacement_range: TextRange,
+ func: hir::Function,
+ impl_def: hir::Impl,
+) {
+ let fn_name = func.name(ctx.db);
+
+ let label = format!(
+ "fn {}({})",
+ fn_name,
+ if func.assoc_fn_params(ctx.db).is_empty() { "" } else { ".." }
+ );
+
+ let completion_kind = if func.has_self_param(ctx.db) {
+ CompletionItemKind::Method
+ } else {
+ CompletionItemKind::SymbolKind(SymbolKind::Function)
+ };
+
+ let mut item = CompletionItem::new(completion_kind, replacement_range, label);
+ item.lookup_by(format!("fn {}", fn_name))
+ .set_documentation(func.docs(ctx.db))
+ .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() });
+
+ if let Some(source) = ctx.sema.source(func) {
+ let assoc_item = ast::AssocItem::Fn(source.value);
+ if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) {
+ let transformed_fn = match transformed_item {
+ ast::AssocItem::Fn(func) => func,
+ _ => unreachable!(),
+ };
+
+ let function_decl = function_declaration(&transformed_fn, source.file_id.is_macro());
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snippet = format!("{} {{\n $0\n}}", function_decl);
+ item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet));
+ }
+ None => {
+ let header = format!("{} {{", function_decl);
+ item.text_edit(TextEdit::replace(replacement_range, header));
+ }
+ };
+ item.add_to(acc);
+ }
+ }
+}
+
+/// Transform a relevant associated item to inline generics from the impl, remove attrs and docs, etc.
+fn get_transformed_assoc_item(
+ ctx: &CompletionContext<'_>,
+ assoc_item: ast::AssocItem,
+ impl_def: hir::Impl,
+) -> Option<ast::AssocItem> {
+ let assoc_item = assoc_item.clone_for_update();
+ let trait_ = impl_def.trait_(ctx.db)?;
+ let source_scope = &ctx.sema.scope_for_def(trait_);
+ let target_scope = &ctx.sema.scope(ctx.sema.source(impl_def)?.syntax().value)?;
+ let transform = PathTransform::trait_impl(
+ target_scope,
+ source_scope,
+ trait_,
+ ctx.sema.source(impl_def)?.value,
+ );
+
+ transform.apply(assoc_item.syntax());
+ if let ast::AssocItem::Fn(func) = &assoc_item {
+ func.remove_attrs_and_docs();
+ }
+ Some(assoc_item)
+}
+
+fn add_type_alias_impl(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ replacement_range: TextRange,
+ type_alias: hir::TypeAlias,
+) {
+ let alias_name = type_alias.name(ctx.db);
+ let (alias_name, escaped_name) = (alias_name.to_smol_str(), alias_name.escaped().to_smol_str());
+
+ let label = format!("type {} =", alias_name);
+ let replacement = format!("type {} = ", escaped_name);
+
+ let mut item = CompletionItem::new(SymbolKind::TypeAlias, replacement_range, label);
+ item.lookup_by(format!("type {}", alias_name))
+ .set_documentation(type_alias.docs(ctx.db))
+ .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() });
+ match ctx.config.snippet_cap {
+ Some(cap) => item
+ .snippet_edit(cap, TextEdit::replace(replacement_range, format!("{}$0;", replacement))),
+ None => item.text_edit(TextEdit::replace(replacement_range, replacement)),
+ };
+ item.add_to(acc);
+}
+
+fn add_const_impl(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ replacement_range: TextRange,
+ const_: hir::Const,
+ impl_def: hir::Impl,
+) {
+ let const_name = const_.name(ctx.db).map(|n| n.to_smol_str());
+
+ if let Some(const_name) = const_name {
+ if let Some(source) = ctx.sema.source(const_) {
+ let assoc_item = ast::AssocItem::Const(source.value);
+ if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) {
+ let transformed_const = match transformed_item {
+ ast::AssocItem::Const(const_) => const_,
+ _ => unreachable!(),
+ };
+
+ let label = make_const_compl_syntax(&transformed_const, source.file_id.is_macro());
+ let replacement = format!("{} ", label);
+
+ let mut item = CompletionItem::new(SymbolKind::Const, replacement_range, label);
+ item.lookup_by(format!("const {}", const_name))
+ .set_documentation(const_.docs(ctx.db))
+ .set_relevance(CompletionRelevance {
+ is_item_from_trait: true,
+ ..Default::default()
+ });
+ match ctx.config.snippet_cap {
+ Some(cap) => item.snippet_edit(
+ cap,
+ TextEdit::replace(replacement_range, format!("{}$0;", replacement)),
+ ),
+ None => item.text_edit(TextEdit::replace(replacement_range, replacement)),
+ };
+ item.add_to(acc);
+ }
+ }
+ }
+}
+
+fn make_const_compl_syntax(const_: &ast::Const, needs_whitespace: bool) -> String {
+ const_.remove_attrs_and_docs();
+ let const_ = if needs_whitespace {
+ insert_whitespace_into_node::insert_ws_into(const_.syntax().clone())
+ } else {
+ const_.syntax().clone()
+ };
+
+ let start = const_.text_range().start();
+ let const_end = const_.text_range().end();
+
+ let end = const_
+ .children_with_tokens()
+ .find(|s| s.kind() == T![;] || s.kind() == T![=])
+ .map_or(const_end, |f| f.text_range().start());
+
+ let len = end - start;
+ let range = TextRange::new(0.into(), len);
+
+ let syntax = const_.text().slice(range).to_string();
+
+ format!("{} =", syntax.trim_end())
+}
+
+fn function_declaration(node: &ast::Fn, needs_whitespace: bool) -> String {
+ node.remove_attrs_and_docs();
+
+ let node = if needs_whitespace {
+ insert_whitespace_into_node::insert_ws_into(node.syntax().clone())
+ } else {
+ node.syntax().clone()
+ };
+
+ let start = node.text_range().start();
+ let end = node.text_range().end();
+
+ let end = node
+ .last_child_or_token()
+ .filter(|s| s.kind() == T![;] || s.kind() == SyntaxKind::BLOCK_EXPR)
+ .map_or(end, |f| f.text_range().start());
+
+ let len = end - start;
+ let range = TextRange::new(0.into(), len);
+
+ let syntax = node.text().slice(range).to_string();
+
+ syntax.trim_end().to_owned()
+}
+
+fn replacement_range(ctx: &CompletionContext<'_>, item: &SyntaxNode) -> TextRange {
+ let first_child = item
+ .children_with_tokens()
+ .find(|child| {
+ !matches!(child.kind(), SyntaxKind::COMMENT | SyntaxKind::WHITESPACE | SyntaxKind::ATTR)
+ })
+ .unwrap_or_else(|| SyntaxElement::Node(item.clone()));
+
+ TextRange::new(first_child.text_range().start(), ctx.source_range().end())
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{check_edit, completion_list_no_kw};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw(ra_fixture);
+ expect.assert_eq(&actual)
+ }
+
+ #[test]
+ fn no_completion_inside_fn() {
+ check(
+ r"
+trait Test { fn test(); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test() {
+ t$0
+ }
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { fn test(); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test() {
+ fn t$0
+ }
+}
+",
+ expect![[""]],
+ );
+
+ check(
+ r"
+trait Test { fn test(); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test() {
+ fn $0
+ }
+}
+",
+ expect![[""]],
+ );
+
+ // https://github.com/rust-lang/rust-analyzer/pull/5976#issuecomment-692332191
+ check(
+ r"
+trait Test { fn test(); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test() {
+ foo.$0
+ }
+}
+",
+ expect![[r#""#]],
+ );
+
+ check(
+ r"
+trait Test { fn test(_: i32); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test(t$0)
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ bn &mut self
+ bn &self
+ bn mut self
+ bn self
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { fn test(_: fn()); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test(f: fn $0)
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ "#]],
+ );
+ }
+
+ #[test]
+ fn no_completion_inside_const() {
+ check(
+ r"
+trait Test { const TEST: fn(); const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: fn $0
+}
+",
+ expect![[r#""#]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: T$0
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: u32 = f$0
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: u32 = {
+ t$0
+ };
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: u32 = {
+ fn $0
+ };
+}
+",
+ expect![[""]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: u32 = {
+ fn t$0
+ };
+}
+",
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn no_completion_inside_type() {
+ check(
+ r"
+trait Test { type Test; type Test2; fn test(); }
+struct T;
+
+impl Test for T {
+ type Test = T$0;
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { type Test; type Test2; fn test(); }
+struct T;
+
+impl Test for T {
+ type Test = fn $0;
+}
+",
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn name_ref_single_function() {
+ check_edit(
+ "fn test",
+ r#"
+trait Test {
+ fn test();
+}
+struct T;
+
+impl Test for T {
+ t$0
+}
+"#,
+ r#"
+trait Test {
+ fn test();
+}
+struct T;
+
+impl Test for T {
+ fn test() {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn single_function() {
+ check_edit(
+ "fn test",
+ r#"
+trait Test {
+ fn test();
+}
+struct T;
+
+impl Test for T {
+ fn t$0
+}
+"#,
+ r#"
+trait Test {
+ fn test();
+}
+struct T;
+
+impl Test for T {
+ fn test() {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generic_fn() {
+ check_edit(
+ "fn foo",
+ r#"
+trait Test {
+ fn foo<T>();
+}
+struct T;
+
+impl Test for T {
+ fn f$0
+}
+"#,
+ r#"
+trait Test {
+ fn foo<T>();
+}
+struct T;
+
+impl Test for T {
+ fn foo<T>() {
+ $0
+}
+}
+"#,
+ );
+ check_edit(
+ "fn foo",
+ r#"
+trait Test {
+ fn foo<T>() where T: Into<String>;
+}
+struct T;
+
+impl Test for T {
+ fn f$0
+}
+"#,
+ r#"
+trait Test {
+ fn foo<T>() where T: Into<String>;
+}
+struct T;
+
+impl Test for T {
+ fn foo<T>() where T: Into<String> {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_type() {
+ check_edit(
+ "type SomeType",
+ r#"
+trait Test {
+ type SomeType;
+}
+
+impl Test for () {
+ type S$0
+}
+"#,
+ "
+trait Test {
+ type SomeType;
+}
+
+impl Test for () {
+ type SomeType = $0;\n\
+}
+",
+ );
+ check_edit(
+ "type SomeType",
+ r#"
+trait Test {
+ type SomeType;
+}
+
+impl Test for () {
+ type$0
+}
+"#,
+ "
+trait Test {
+ type SomeType;
+}
+
+impl Test for () {
+ type SomeType = $0;\n\
+}
+",
+ );
+ }
+
+ #[test]
+ fn associated_const() {
+ check_edit(
+ "const SOME_CONST",
+ r#"
+trait Test {
+ const SOME_CONST: u16;
+}
+
+impl Test for () {
+ const S$0
+}
+"#,
+ "
+trait Test {
+ const SOME_CONST: u16;
+}
+
+impl Test for () {
+ const SOME_CONST: u16 = $0;\n\
+}
+",
+ );
+
+ check_edit(
+ "const SOME_CONST",
+ r#"
+trait Test {
+ const SOME_CONST: u16 = 92;
+}
+
+impl Test for () {
+ const S$0
+}
+"#,
+ "
+trait Test {
+ const SOME_CONST: u16 = 92;
+}
+
+impl Test for () {
+ const SOME_CONST: u16 = $0;\n\
+}
+",
+ );
+ }
+
+ #[test]
+ fn complete_without_name() {
+ let test = |completion: &str, hint: &str, completed: &str, next_sibling: &str| {
+ check_edit(
+ completion,
+ &format!(
+ r#"
+trait Test {{
+ type Foo;
+ const CONST: u16;
+ fn bar();
+}}
+struct T;
+
+impl Test for T {{
+ {}
+ {}
+}}
+"#,
+ hint, next_sibling
+ ),
+ &format!(
+ r#"
+trait Test {{
+ type Foo;
+ const CONST: u16;
+ fn bar();
+}}
+struct T;
+
+impl Test for T {{
+ {}
+ {}
+}}
+"#,
+ completed, next_sibling
+ ),
+ )
+ };
+
+ // Enumerate some possible next siblings.
+ for next_sibling in &[
+ "",
+ "fn other_fn() {}", // `const $0 fn` -> `const fn`
+ "type OtherType = i32;",
+ "const OTHER_CONST: i32 = 0;",
+ "async fn other_fn() {}",
+ "unsafe fn other_fn() {}",
+ "default fn other_fn() {}",
+ "default type OtherType = i32;",
+ "default const OTHER_CONST: i32 = 0;",
+ ] {
+ test("fn bar", "fn $0", "fn bar() {\n $0\n}", next_sibling);
+ test("type Foo", "type $0", "type Foo = $0;", next_sibling);
+ test("const CONST", "const $0", "const CONST: u16 = $0;", next_sibling);
+ }
+ }
+
+ #[test]
+ fn snippet_does_not_overwrite_comment_or_attr() {
+ let test = |completion: &str, hint: &str, completed: &str| {
+ check_edit(
+ completion,
+ &format!(
+ r#"
+trait Foo {{
+ type Type;
+ fn function();
+ const CONST: i32 = 0;
+}}
+struct T;
+
+impl Foo for T {{
+ // Comment
+ #[bar]
+ {}
+}}
+"#,
+ hint
+ ),
+ &format!(
+ r#"
+trait Foo {{
+ type Type;
+ fn function();
+ const CONST: i32 = 0;
+}}
+struct T;
+
+impl Foo for T {{
+ // Comment
+ #[bar]
+ {}
+}}
+"#,
+ completed
+ ),
+ )
+ };
+ test("fn function", "fn f$0", "fn function() {\n $0\n}");
+ test("type Type", "type T$0", "type Type = $0;");
+ test("const CONST", "const C$0", "const CONST: i32 = $0;");
+ }
+
+ #[test]
+ fn generics_are_inlined_in_return_type() {
+ check_edit(
+ "fn function",
+ r#"
+trait Foo<T> {
+ fn function() -> T;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait Foo<T> {
+ fn function() -> T;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn function() -> u32 {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_in_parameter() {
+ check_edit(
+ "fn function",
+ r#"
+trait Foo<T> {
+ fn function(bar: T);
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait Foo<T> {
+ fn function(bar: T);
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn function(bar: u32) {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_when_part_of_other_types() {
+ check_edit(
+ "fn function",
+ r#"
+trait Foo<T> {
+ fn function(bar: Vec<T>);
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait Foo<T> {
+ fn function(bar: Vec<T>);
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn function(bar: Vec<u32>) {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_complex() {
+ check_edit(
+ "fn function",
+ r#"
+trait Foo<T, U, V> {
+ fn function(bar: Vec<T>, baz: U) -> Arc<Vec<V>>;
+}
+struct Bar;
+
+impl Foo<u32, Vec<usize>, u8> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait Foo<T, U, V> {
+ fn function(bar: Vec<T>, baz: U) -> Arc<Vec<V>>;
+}
+struct Bar;
+
+impl Foo<u32, Vec<usize>, u8> for Bar {
+ fn function(bar: Vec<u32>, baz: Vec<usize>) -> Arc<Vec<u8>> {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_in_associated_const() {
+ check_edit(
+ "const BAR",
+ r#"
+trait Foo<T> {
+ const BAR: T;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ const B$0
+}
+"#,
+ r#"
+trait Foo<T> {
+ const BAR: T;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ const BAR: u32 = $0;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_in_where_clause() {
+ check_edit(
+ "fn function",
+ r#"
+trait SomeTrait<T> {}
+
+trait Foo<T> {
+ fn function()
+ where Self: SomeTrait<T>;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait SomeTrait<T> {}
+
+trait Foo<T> {
+ fn function()
+ where Self: SomeTrait<T>;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn function()
+ where Self: SomeTrait<u32> {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn works_directly_in_impl() {
+ check(
+ r#"
+trait Tr {
+ fn required();
+}
+
+impl Tr for () {
+ $0
+}
+"#,
+ expect![[r#"
+ fn fn required()
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ fn provided() {}
+ fn required();
+}
+
+impl Tr for () {
+ fn provided() {}
+ $0
+}
+"#,
+ expect![[r#"
+ fn fn required()
+ "#]],
+ );
+ }
+
+ #[test]
+ fn fixes_up_macro_generated() {
+ check_edit(
+ "fn foo",
+ r#"
+macro_rules! noop {
+ ($($item: item)*) => {
+ $($item)*
+ }
+}
+
+noop! {
+ trait Foo {
+ fn foo(&mut self, bar: i64, baz: &mut u32) -> Result<(), u32>;
+ }
+}
+
+struct Test;
+
+impl Foo for Test {
+ $0
+}
+"#,
+ r#"
+macro_rules! noop {
+ ($($item: item)*) => {
+ $($item)*
+ }
+}
+
+noop! {
+ trait Foo {
+ fn foo(&mut self, bar: i64, baz: &mut u32) -> Result<(), u32>;
+ }
+}
+
+struct Test;
+
+impl Foo for Test {
+ fn foo(&mut self,bar:i64,baz: &mut u32) -> Result<(),u32> {
+ $0
+}
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
new file mode 100644
index 000000000..3989a451b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
@@ -0,0 +1,237 @@
+//! Completes `where` and `for` keywords.
+
+use syntax::ast::{self, Item};
+
+use crate::{CompletionContext, Completions};
+
+pub(crate) fn complete_for_and_where(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ keyword_item: &ast::Item,
+) {
+ let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
+
+ match keyword_item {
+ Item::Impl(it) => {
+ if it.for_token().is_none() && it.trait_().is_none() && it.self_ty().is_some() {
+ add_keyword("for", "for");
+ }
+ add_keyword("where", "where");
+ }
+ Item::Enum(_)
+ | Item::Fn(_)
+ | Item::Struct(_)
+ | Item::Trait(_)
+ | Item::TypeAlias(_)
+ | Item::Union(_) => {
+ add_keyword("where", "where");
+ }
+ _ => (),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{check_edit, completion_list};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual)
+ }
+
+ #[test]
+ fn test_else_edit_after_if() {
+ check_edit(
+ "else",
+ r#"fn quux() { if true { () } $0 }"#,
+ r#"fn quux() { if true { () } else {
+ $0
+} }"#,
+ );
+ }
+
+ #[test]
+ fn test_keywords_after_unsafe_in_block_expr() {
+ check(
+ r"fn my_fn() { unsafe $0 }",
+ expect![[r#"
+ kw fn
+ kw impl
+ kw trait
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_completion_await_impls_future() {
+ check(
+ r#"
+//- minicore: future
+use core::future::*;
+struct A {}
+impl Future for A {}
+fn foo(a: A) { a.$0 }
+"#,
+ expect![[r#"
+ kw await expr.await
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ );
+
+ check(
+ r#"
+//- minicore: future
+use std::future::*;
+fn foo() {
+ let a = async {};
+ a.$0
+}
+"#,
+ expect![[r#"
+ kw await expr.await
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ )
+ }
+
+ #[test]
+ fn let_semi() {
+ cov_mark::check!(let_semi);
+ check_edit(
+ "match",
+ r#"
+fn main() { let x = $0 }
+"#,
+ r#"
+fn main() { let x = match $1 {
+ $0
+}; }
+"#,
+ );
+
+ check_edit(
+ "if",
+ r#"
+fn main() {
+ let x = $0
+ let y = 92;
+}
+"#,
+ r#"
+fn main() {
+ let x = if $1 {
+ $0
+};
+ let y = 92;
+}
+"#,
+ );
+
+ check_edit(
+ "loop",
+ r#"
+fn main() {
+ let x = $0
+ bar();
+}
+"#,
+ r#"
+fn main() {
+ let x = loop {
+ $0
+};
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn if_completion_in_match_guard() {
+ check_edit(
+ "if",
+ r"
+fn main() {
+ match () {
+ () $0
+ }
+}
+",
+ r"
+fn main() {
+ match () {
+ () if $0
+ }
+}
+",
+ )
+ }
+
+ #[test]
+ fn if_completion_in_match_arm_expr() {
+ check_edit(
+ "if",
+ r"
+fn main() {
+ match () {
+ () => $0
+ }
+}
+",
+ r"
+fn main() {
+ match () {
+ () => if $1 {
+ $0
+}
+ }
+}
+",
+ )
+ }
+
+ #[test]
+ fn if_completion_in_match_arm_expr_block() {
+ check_edit(
+ "if",
+ r"
+fn main() {
+ match () {
+ () => {
+ $0
+ }
+ }
+}
+",
+ r"
+fn main() {
+ match () {
+ () => {
+ if $1 {
+ $0
+}
+ }
+ }
+}
+",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs
new file mode 100644
index 000000000..3b79def63
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs
@@ -0,0 +1,341 @@
+//! Completes lifetimes and labels.
+//!
+//! These completions work a bit differently in that they are only shown when what the user types
+//! has a `'` preceding it, as our fake syntax tree is invalid otherwise (due to us not inserting
+//! a lifetime but an ident for obvious reasons).
+//! Due to this all the tests for lifetimes and labels live in this module for the time being as
+//! there is no value in lifting these out into the outline module test since they will either not
+//! show up for normal completions, or they won't show completions other than lifetimes depending
+//! on the fixture input.
+use hir::{known, ScopeDef};
+use syntax::{ast, TokenText};
+
+use crate::{
+ completions::Completions,
+ context::{CompletionContext, LifetimeContext, LifetimeKind},
+};
+
+/// Completes lifetimes.
+pub(crate) fn complete_lifetime(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ lifetime_ctx: &LifetimeContext,
+) {
+ let (lp, lifetime) = match lifetime_ctx {
+ LifetimeContext { kind: LifetimeKind::Lifetime, lifetime } => (None, lifetime),
+ LifetimeContext {
+ kind: LifetimeKind::LifetimeParam { is_decl: false, param },
+ lifetime,
+ } => (Some(param), lifetime),
+ _ => return,
+ };
+ let param_lifetime = match (lifetime, lp.and_then(|lp| lp.lifetime())) {
+ (Some(lt), Some(lp)) if lp == lt.clone() => return,
+ (Some(_), Some(lp)) => Some(lp),
+ _ => None,
+ };
+ let param_lifetime = param_lifetime.as_ref().map(ast::Lifetime::text);
+ let param_lifetime = param_lifetime.as_ref().map(TokenText::as_str);
+
+ ctx.process_all_names_raw(&mut |name, res| {
+ if matches!(
+ res,
+ ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_))
+ if param_lifetime != Some(&*name.to_smol_str())
+ ) {
+ acc.add_lifetime(ctx, name);
+ }
+ });
+ if param_lifetime.is_none() {
+ acc.add_lifetime(ctx, known::STATIC_LIFETIME);
+ }
+}
+
+/// Completes labels.
+pub(crate) fn complete_label(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ lifetime_ctx: &LifetimeContext,
+) {
+ if !matches!(lifetime_ctx, LifetimeContext { kind: LifetimeKind::LabelRef, .. }) {
+ return;
+ }
+ ctx.process_all_names_raw(&mut |name, res| {
+ if let ScopeDef::Label(_) = res {
+ acc.add_label(ctx, name);
+ }
+ });
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{check_edit, completion_list};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn check_lifetime_edit() {
+ check_edit(
+ "'lifetime",
+ r#"
+fn func<'lifetime>(foo: &'li$0) {}
+"#,
+ r#"
+fn func<'lifetime>(foo: &'lifetime) {}
+"#,
+ );
+ cov_mark::check!(completes_if_lifetime_without_idents);
+ check_edit(
+ "'lifetime",
+ r#"
+fn func<'lifetime>(foo: &'$0) {}
+"#,
+ r#"
+fn func<'lifetime>(foo: &'lifetime) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_lifetime_in_ref() {
+ check(
+ r#"
+fn foo<'lifetime>(foo: &'a$0 usize) {}
+"#,
+ expect![[r#"
+ lt 'lifetime
+ lt 'static
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_lifetime_in_ref_missing_ty() {
+ check(
+ r#"
+fn foo<'lifetime>(foo: &'a$0) {}
+"#,
+ expect![[r#"
+ lt 'lifetime
+ lt 'static
+ "#]],
+ );
+ }
+ #[test]
+ fn complete_lifetime_in_self_ref() {
+ check(
+ r#"
+struct Foo;
+impl<'impl> Foo {
+ fn foo<'func>(&'a$0 self) {}
+}
+"#,
+ expect![[r#"
+ lt 'func
+ lt 'impl
+ lt 'static
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_lifetime_in_arg_list() {
+ check(
+ r#"
+struct Foo<'lt>;
+fn foo<'lifetime>(_: Foo<'a$0>) {}
+"#,
+ expect![[r#"
+ lt 'lifetime
+ lt 'static
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_lifetime_in_where_pred() {
+ check(
+ r#"
+fn foo2<'lifetime, T>() where 'a$0 {}
+"#,
+ expect![[r#"
+ lt 'lifetime
+ lt 'static
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_lifetime_in_ty_bound() {
+ check(
+ r#"
+fn foo2<'lifetime, T>() where T: 'a$0 {}
+"#,
+ expect![[r#"
+ lt 'lifetime
+ lt 'static
+ "#]],
+ );
+ check(
+ r#"
+fn foo2<'lifetime, T>() where T: Trait<'a$0> {}
+"#,
+ expect![[r#"
+ lt 'lifetime
+ lt 'static
+ "#]],
+ );
+ }
+
+ #[test]
+ fn dont_complete_lifetime_in_assoc_ty_bound() {
+ check(
+ r#"
+fn foo2<'lifetime, T>() where T: Trait<Item = 'a$0> {}
+"#,
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn complete_lifetime_in_param_list() {
+ check(
+ r#"
+fn foo<'$0>() {}
+"#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+fn foo<'a$0>() {}
+"#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+fn foo<'footime, 'lifetime: 'a$0>() {}
+"#,
+ expect![[r#"
+ lt 'footime
+ "#]],
+ );
+ }
+
+ #[test]
+ fn check_label_edit() {
+ check_edit(
+ "'label",
+ r#"
+fn foo() {
+ 'label: loop {
+ break '$0
+ }
+}
+"#,
+ r#"
+fn foo() {
+ 'label: loop {
+ break 'label
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_label_in_loop() {
+ check(
+ r#"
+fn foo() {
+ 'foop: loop {
+ break '$0
+ }
+}
+"#,
+ expect![[r#"
+ lb 'foop
+ "#]],
+ );
+ check(
+ r#"
+fn foo() {
+ 'foop: loop {
+ continue '$0
+ }
+}
+"#,
+ expect![[r#"
+ lb 'foop
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_label_in_block_nested() {
+ check(
+ r#"
+fn foo() {
+ 'foop: {
+ 'baap: {
+ break '$0
+ }
+ }
+}
+"#,
+ expect![[r#"
+ lb 'baap
+ lb 'foop
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_label_in_loop_with_value() {
+ check(
+ r#"
+fn foo() {
+ 'foop: loop {
+ break '$0 i32;
+ }
+}
+"#,
+ expect![[r#"
+ lb 'foop
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_label_in_while_cond() {
+ check(
+ r#"
+fn foo() {
+ 'outer: while { 'inner: loop { break '$0 } } {}
+}
+"#,
+ expect![[r#"
+ lb 'inner
+ lb 'outer
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_label_in_for_iterable() {
+ check(
+ r#"
+fn foo() {
+ 'outer: for _ in [{ 'inner: loop { break '$0 } }] {}
+}
+"#,
+ expect![[r#"
+ lb 'inner
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
new file mode 100644
index 000000000..9c975b929
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
@@ -0,0 +1,354 @@
+//! Completes mod declarations.
+
+use std::iter;
+
+use hir::{Module, ModuleSource};
+use ide_db::{
+ base_db::{SourceDatabaseExt, VfsPath},
+ FxHashSet, RootDatabase, SymbolKind,
+};
+use syntax::{ast, AstNode, SyntaxKind};
+
+use crate::{context::CompletionContext, CompletionItem, Completions};
+
+/// Complete mod declaration, i.e. `mod $0;`
+pub(crate) fn complete_mod(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ mod_under_caret: &ast::Module,
+) -> Option<()> {
+ if mod_under_caret.item_list().is_some() {
+ return None;
+ }
+
+ let _p = profile::span("completion::complete_mod");
+
+ let mut current_module = ctx.module;
+ // For `mod $0`, `ctx.module` is its parent, but for `mod f$0`, it's `mod f` itself, but we're
+ // interested in its parent.
+ if ctx.original_token.kind() == SyntaxKind::IDENT {
+ if let Some(module) =
+ ctx.original_token.parent_ancestors().nth(1).and_then(ast::Module::cast)
+ {
+ match ctx.sema.to_def(&module) {
+ Some(module) if module == current_module => {
+ if let Some(parent) = current_module.parent(ctx.db) {
+ current_module = parent;
+ }
+ }
+ _ => {}
+ }
+ }
+ }
+
+ let module_definition_file =
+ current_module.definition_source(ctx.db).file_id.original_file(ctx.db);
+ let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file));
+ let directory_to_look_for_submodules = directory_to_look_for_submodules(
+ current_module,
+ ctx.db,
+ source_root.path_for_file(&module_definition_file)?,
+ )?;
+
+ let existing_mod_declarations = current_module
+ .children(ctx.db)
+ .filter_map(|module| Some(module.name(ctx.db)?.to_string()))
+ .collect::<FxHashSet<_>>();
+
+ let module_declaration_file =
+ current_module.declaration_source(ctx.db).map(|module_declaration_source_file| {
+ module_declaration_source_file.file_id.original_file(ctx.db)
+ });
+
+ source_root
+ .iter()
+ .filter(|submodule_candidate_file| submodule_candidate_file != &module_definition_file)
+ .filter(|submodule_candidate_file| {
+ Some(submodule_candidate_file) != module_declaration_file.as_ref()
+ })
+ .filter_map(|submodule_file| {
+ let submodule_path = source_root.path_for_file(&submodule_file)?;
+ let directory_with_submodule = submodule_path.parent()?;
+ let (name, ext) = submodule_path.name_and_extension()?;
+ if ext != Some("rs") {
+ return None;
+ }
+ match name {
+ "lib" | "main" => None,
+ "mod" => {
+ if directory_with_submodule.parent()? == directory_to_look_for_submodules {
+ match directory_with_submodule.name_and_extension()? {
+ (directory_name, None) => Some(directory_name.to_owned()),
+ _ => None,
+ }
+ } else {
+ None
+ }
+ }
+ file_name if directory_with_submodule == directory_to_look_for_submodules => {
+ Some(file_name.to_owned())
+ }
+ _ => None,
+ }
+ })
+ .filter(|name| !existing_mod_declarations.contains(name))
+ .for_each(|submodule_name| {
+ let mut label = submodule_name;
+ if mod_under_caret.semicolon_token().is_none() {
+ label.push(';');
+ }
+ let item = CompletionItem::new(SymbolKind::Module, ctx.source_range(), &label);
+ item.add_to(acc)
+ });
+
+ Some(())
+}
+
+fn directory_to_look_for_submodules(
+ module: Module,
+ db: &RootDatabase,
+ module_file_path: &VfsPath,
+) -> Option<VfsPath> {
+ let directory_with_module_path = module_file_path.parent()?;
+ let (name, ext) = module_file_path.name_and_extension()?;
+ if ext != Some("rs") {
+ return None;
+ }
+ let base_directory = match name {
+ "mod" | "lib" | "main" => Some(directory_with_module_path),
+ regular_rust_file_name => {
+ if matches!(
+ (
+ directory_with_module_path
+ .parent()
+ .as_ref()
+ .and_then(|path| path.name_and_extension()),
+ directory_with_module_path.name_and_extension(),
+ ),
+ (Some(("src", None)), Some(("bin", None)))
+ ) {
+ // files in /src/bin/ can import each other directly
+ Some(directory_with_module_path)
+ } else {
+ directory_with_module_path.join(regular_rust_file_name)
+ }
+ }
+ }?;
+
+ module_chain_to_containing_module_file(module, db)
+ .into_iter()
+ .filter_map(|module| module.name(db))
+ .try_fold(base_directory, |path, name| path.join(&name.to_smol_str()))
+}
+
+fn module_chain_to_containing_module_file(
+ current_module: Module,
+ db: &RootDatabase,
+) -> Vec<Module> {
+ let mut path =
+ iter::successors(Some(current_module), |current_module| current_module.parent(db))
+ .take_while(|current_module| {
+ matches!(current_module.definition_source(db).value, ModuleSource::Module(_))
+ })
+ .collect::<Vec<_>>();
+ path.reverse();
+ path
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::completion_list;
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn lib_module_completion() {
+ check(
+ r#"
+//- /lib.rs
+mod $0
+//- /foo.rs
+fn foo() {}
+//- /foo/ignored_foo.rs
+fn ignored_foo() {}
+//- /bar/mod.rs
+fn bar() {}
+//- /bar/ignored_bar.rs
+fn ignored_bar() {}
+"#,
+ expect![[r#"
+ md bar;
+ md foo;
+ "#]],
+ );
+ }
+
+ #[test]
+ fn no_module_completion_with_module_body() {
+ check(
+ r#"
+//- /lib.rs
+mod $0 {
+
+}
+//- /foo.rs
+fn foo() {}
+"#,
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn main_module_completion() {
+ check(
+ r#"
+//- /main.rs
+mod $0
+//- /foo.rs
+fn foo() {}
+//- /foo/ignored_foo.rs
+fn ignored_foo() {}
+//- /bar/mod.rs
+fn bar() {}
+//- /bar/ignored_bar.rs
+fn ignored_bar() {}
+"#,
+ expect![[r#"
+ md bar;
+ md foo;
+ "#]],
+ );
+ }
+
+ #[test]
+ fn main_test_module_completion() {
+ check(
+ r#"
+//- /main.rs
+mod tests {
+ mod $0;
+}
+//- /tests/foo.rs
+fn foo() {}
+"#,
+ expect![[r#"
+ md foo
+ "#]],
+ );
+ }
+
+ #[test]
+ fn directly_nested_module_completion() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+mod $0;
+//- /foo/bar.rs
+fn bar() {}
+//- /foo/bar/ignored_bar.rs
+fn ignored_bar() {}
+//- /foo/baz/mod.rs
+fn baz() {}
+//- /foo/moar/ignored_moar.rs
+fn ignored_moar() {}
+"#,
+ expect![[r#"
+ md bar
+ md baz
+ "#]],
+ );
+ }
+
+ #[test]
+ fn nested_in_source_module_completion() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+mod bar {
+ mod $0
+}
+//- /foo/bar/baz.rs
+fn baz() {}
+"#,
+ expect![[r#"
+ md baz;
+ "#]],
+ );
+ }
+
+ // FIXME binary modules are not supported in tests properly
+ // Binary modules are a bit special, they allow importing the modules from `/src/bin`
+ // and that's why are good to test two things:
+ // * no cycles are allowed in mod declarations
+ // * no modules from the parent directory are proposed
+ // Unfortunately, binary modules support is in cargo not rustc,
+ // hence the test does not work now
+ //
+ // #[test]
+ // fn regular_bin_module_completion() {
+ // check(
+ // r#"
+ // //- /src/bin.rs
+ // fn main() {}
+ // //- /src/bin/foo.rs
+ // mod $0
+ // //- /src/bin/bar.rs
+ // fn bar() {}
+ // //- /src/bin/bar/bar_ignored.rs
+ // fn bar_ignored() {}
+ // "#,
+ // expect![[r#"
+ // md bar;
+ // "#]],foo
+ // );
+ // }
+
+ #[test]
+ fn already_declared_bin_module_completion_omitted() {
+ check(
+ r#"
+//- /src/bin.rs crate:main
+fn main() {}
+//- /src/bin/foo.rs
+mod $0
+//- /src/bin/bar.rs
+mod foo;
+fn bar() {}
+//- /src/bin/bar/bar_ignored.rs
+fn bar_ignored() {}
+"#,
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn name_partially_typed() {
+ check(
+ r#"
+//- /lib.rs
+mod f$0
+//- /foo.rs
+fn foo() {}
+//- /foo/ignored_foo.rs
+fn ignored_foo() {}
+//- /bar/mod.rs
+fn bar() {}
+//- /bar/ignored_bar.rs
+fn ignored_bar() {}
+"#,
+ expect![[r#"
+ md bar;
+ md foo;
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
new file mode 100644
index 000000000..71d2d9d43
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
@@ -0,0 +1,185 @@
+//! Completes constants and paths in unqualified patterns.
+
+use hir::{db::DefDatabase, AssocItem, ScopeDef};
+use syntax::ast::Pat;
+
+use crate::{
+ context::{PathCompletionCtx, PatternContext, PatternRefutability, Qualified},
+ CompletionContext, Completions,
+};
+
+/// Completes constants and paths in unqualified patterns.
+pub(crate) fn complete_pattern(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+) {
+ match pattern_ctx.parent_pat.as_ref() {
+ Some(Pat::RangePat(_) | Pat::BoxPat(_)) => (),
+ Some(Pat::RefPat(r)) => {
+ if r.mut_token().is_none() {
+ acc.add_keyword(ctx, "mut");
+ }
+ }
+ _ => {
+ let tok = ctx.token.text_range().start();
+ match (pattern_ctx.ref_token.as_ref(), pattern_ctx.mut_token.as_ref()) {
+ (None, None) => {
+ acc.add_keyword(ctx, "ref");
+ acc.add_keyword(ctx, "mut");
+ }
+ (None, Some(m)) if tok < m.text_range().start() => {
+ acc.add_keyword(ctx, "ref");
+ }
+ (Some(r), None) if tok > r.text_range().end() => {
+ acc.add_keyword(ctx, "mut");
+ }
+ _ => (),
+ }
+ }
+ }
+
+ if pattern_ctx.record_pat.is_some() {
+ return;
+ }
+
+ let refutable = pattern_ctx.refutability == PatternRefutability::Refutable;
+ let single_variant_enum = |enum_: hir::Enum| ctx.db.enum_data(enum_.into()).variants.len() == 1;
+
+ if let Some(hir::Adt::Enum(e)) =
+ ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
+ {
+ if refutable || single_variant_enum(e) {
+ super::enum_variants_with_paths(
+ acc,
+ ctx,
+ e,
+ &pattern_ctx.impl_,
+ |acc, ctx, variant, path| {
+ acc.add_qualified_variant_pat(ctx, pattern_ctx, variant, path);
+ },
+ );
+ }
+ }
+
+ // FIXME: ideally, we should look at the type we are matching against and
+ // suggest variants + auto-imports
+ ctx.process_all_names(&mut |name, res| {
+ let add_simple_path = match res {
+ hir::ScopeDef::ModuleDef(def) => match def {
+ hir::ModuleDef::Adt(hir::Adt::Struct(strukt)) => {
+ acc.add_struct_pat(ctx, pattern_ctx, strukt, Some(name.clone()));
+ true
+ }
+ hir::ModuleDef::Variant(variant)
+ if refutable || single_variant_enum(variant.parent_enum(ctx.db)) =>
+ {
+ acc.add_variant_pat(ctx, pattern_ctx, None, variant, Some(name.clone()));
+ true
+ }
+ hir::ModuleDef::Adt(hir::Adt::Enum(e)) => refutable || single_variant_enum(e),
+ hir::ModuleDef::Const(..) => refutable,
+ hir::ModuleDef::Module(..) => true,
+ hir::ModuleDef::Macro(mac) => mac.is_fn_like(ctx.db),
+ _ => false,
+ },
+ hir::ScopeDef::ImplSelfType(impl_) => match impl_.self_ty(ctx.db).as_adt() {
+ Some(hir::Adt::Struct(strukt)) => {
+ acc.add_struct_pat(ctx, pattern_ctx, strukt, Some(name.clone()));
+ true
+ }
+ Some(hir::Adt::Enum(e)) => refutable || single_variant_enum(e),
+ Some(hir::Adt::Union(_)) => true,
+ _ => false,
+ },
+ ScopeDef::GenericParam(hir::GenericParam::ConstParam(_)) => true,
+ ScopeDef::GenericParam(_)
+ | ScopeDef::AdtSelfType(_)
+ | ScopeDef::Local(_)
+ | ScopeDef::Label(_)
+ | ScopeDef::Unknown => false,
+ };
+ if add_simple_path {
+ acc.add_pattern_resolution(ctx, pattern_ctx, name, res);
+ }
+ });
+}
+
+pub(crate) fn complete_pattern_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+) {
+ match qualified {
+ Qualified::With { resolution: Some(resolution), super_chain_len, .. } => {
+ acc.add_super_keyword(ctx, *super_chain_len);
+
+ match resolution {
+ hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
+ let module_scope = module.scope(ctx.db, Some(ctx.module));
+ for (name, def) in module_scope {
+ let add_resolution = match def {
+ ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)) => {
+ mac.is_fn_like(ctx.db)
+ }
+ ScopeDef::ModuleDef(_) => true,
+ _ => false,
+ };
+
+ if add_resolution {
+ acc.add_path_resolution(ctx, path_ctx, name, def);
+ }
+ }
+ }
+ res => {
+ let ty = match res {
+ hir::PathResolution::TypeParam(param) => param.ty(ctx.db),
+ hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db),
+ hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Struct(s))) => {
+ s.ty(ctx.db)
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Enum(e))) => {
+ e.ty(ctx.db)
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Union(u))) => {
+ u.ty(ctx.db)
+ }
+ hir::PathResolution::Def(hir::ModuleDef::BuiltinType(ty)) => ty.ty(ctx.db),
+ _ => return,
+ };
+
+ if let Some(hir::Adt::Enum(e)) = ty.as_adt() {
+ acc.add_enum_variants(ctx, path_ctx, e);
+ }
+
+ ctx.iterate_path_candidates(&ty, |item| match item {
+ AssocItem::TypeAlias(ta) => acc.add_type_alias(ctx, ta),
+ AssocItem::Const(c) => acc.add_const(ctx, c),
+ _ => {}
+ });
+ }
+ }
+ }
+ Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
+ Qualified::No => {
+ // this will only be hit if there are brackets or braces, otherwise this will be parsed as an ident pattern
+ ctx.process_all_names(&mut |name, res| {
+ // FIXME: we should check what kind of pattern we are in and filter accordingly
+ let add_completion = match res {
+ ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)) => mac.is_fn_like(ctx.db),
+ ScopeDef::ModuleDef(hir::ModuleDef::Adt(_)) => true,
+ ScopeDef::ModuleDef(hir::ModuleDef::Variant(_)) => true,
+ ScopeDef::ModuleDef(hir::ModuleDef::Module(_)) => true,
+ ScopeDef::ImplSelfType(_) => true,
+ _ => false,
+ };
+ if add_completion {
+ acc.add_path_resolution(ctx, path_ctx, name, res);
+ }
+ });
+
+ acc.add_nameref_keywords_with_colon(ctx);
+ }
+ Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
new file mode 100644
index 000000000..9a891cea2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
@@ -0,0 +1,616 @@
+//! Postfix completions, like `Ok(10).ifl$0` => `if let Ok() = Ok(10) { $0 }`.
+
+mod format_like;
+
+use hir::{Documentation, HasAttrs};
+use ide_db::{imports::insert_use::ImportScope, ty_filter::TryEnum, SnippetCap};
+use syntax::{
+ ast::{self, AstNode, AstToken},
+ SyntaxKind::{EXPR_STMT, STMT_LIST},
+ TextRange, TextSize,
+};
+use text_edit::TextEdit;
+
+use crate::{
+ completions::postfix::format_like::add_format_like_completions,
+ context::{CompletionContext, DotAccess, DotAccessKind},
+ item::{Builder, CompletionRelevancePostfixMatch},
+ CompletionItem, CompletionItemKind, CompletionRelevance, Completions, SnippetScope,
+};
+
+pub(crate) fn complete_postfix(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+) {
+ if !ctx.config.enable_postfix_completions {
+ return;
+ }
+
+ let (dot_receiver, receiver_ty, receiver_is_ambiguous_float_literal) = match dot_access {
+ DotAccess { receiver_ty: Some(ty), receiver: Some(it), kind, .. } => (
+ it,
+ &ty.original,
+ match *kind {
+ DotAccessKind::Field { receiver_is_ambiguous_float_literal } => {
+ receiver_is_ambiguous_float_literal
+ }
+ DotAccessKind::Method { .. } => false,
+ },
+ ),
+ _ => return,
+ };
+
+ let receiver_text = get_receiver_text(dot_receiver, receiver_is_ambiguous_float_literal);
+
+ let cap = match ctx.config.snippet_cap {
+ Some(it) => it,
+ None => return,
+ };
+
+ let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, dot_receiver) {
+ Some(it) => it,
+ None => return,
+ };
+
+ if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() {
+ if receiver_ty.impls_trait(ctx.db, drop_trait, &[]) {
+ if let &[hir::AssocItem::Function(drop_fn)] = &*drop_trait.items(ctx.db) {
+ cov_mark::hit!(postfix_drop_completion);
+ // FIXME: check that `drop` is in scope, use fully qualified path if it isn't/if shadowed
+ let mut item = postfix_snippet(
+ "drop",
+ "fn drop(&mut self)",
+ &format!("drop($0{})", receiver_text),
+ );
+ item.set_documentation(drop_fn.docs(ctx.db));
+ item.add_to(acc);
+ }
+ }
+ }
+
+ if !ctx.config.snippets.is_empty() {
+ add_custom_postfix_completions(acc, ctx, &postfix_snippet, &receiver_text);
+ }
+
+ let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references());
+ if let Some(try_enum) = &try_enum {
+ match try_enum {
+ TryEnum::Result => {
+ postfix_snippet(
+ "ifl",
+ "if let Ok {}",
+ &format!("if let Ok($1) = {} {{\n $0\n}}", receiver_text),
+ )
+ .add_to(acc);
+
+ postfix_snippet(
+ "while",
+ "while let Ok {}",
+ &format!("while let Ok($1) = {} {{\n $0\n}}", receiver_text),
+ )
+ .add_to(acc);
+ }
+ TryEnum::Option => {
+ postfix_snippet(
+ "ifl",
+ "if let Some {}",
+ &format!("if let Some($1) = {} {{\n $0\n}}", receiver_text),
+ )
+ .add_to(acc);
+
+ postfix_snippet(
+ "while",
+ "while let Some {}",
+ &format!("while let Some($1) = {} {{\n $0\n}}", receiver_text),
+ )
+ .add_to(acc);
+ }
+ }
+ } else if receiver_ty.is_bool() || receiver_ty.is_unknown() {
+ postfix_snippet("if", "if expr {}", &format!("if {} {{\n $0\n}}", receiver_text))
+ .add_to(acc);
+ postfix_snippet(
+ "while",
+ "while expr {}",
+ &format!("while {} {{\n $0\n}}", receiver_text),
+ )
+ .add_to(acc);
+ postfix_snippet("not", "!expr", &format!("!{}", receiver_text)).add_to(acc);
+ } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() {
+ if receiver_ty.impls_trait(ctx.db, trait_, &[]) {
+ postfix_snippet(
+ "for",
+ "for ele in expr {}",
+ &format!("for ele in {} {{\n $0\n}}", receiver_text),
+ )
+ .add_to(acc);
+ }
+ }
+
+ postfix_snippet("ref", "&expr", &format!("&{}", receiver_text)).add_to(acc);
+ postfix_snippet("refm", "&mut expr", &format!("&mut {}", receiver_text)).add_to(acc);
+
+ // The rest of the postfix completions create an expression that moves an argument,
+ // so it's better to consider references now to avoid breaking the compilation
+ let dot_receiver = include_references(dot_receiver);
+ let receiver_text = get_receiver_text(&dot_receiver, receiver_is_ambiguous_float_literal);
+ let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, &dot_receiver) {
+ Some(it) => it,
+ None => return,
+ };
+
+ match try_enum {
+ Some(try_enum) => match try_enum {
+ TryEnum::Result => {
+ postfix_snippet(
+ "match",
+ "match expr {}",
+ &format!("match {} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}", receiver_text),
+ )
+ .add_to(acc);
+ }
+ TryEnum::Option => {
+ postfix_snippet(
+ "match",
+ "match expr {}",
+ &format!(
+ "match {} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}",
+ receiver_text
+ ),
+ )
+ .add_to(acc);
+ }
+ },
+ None => {
+ postfix_snippet(
+ "match",
+ "match expr {}",
+ &format!("match {} {{\n ${{1:_}} => {{$0}},\n}}", receiver_text),
+ )
+ .add_to(acc);
+ }
+ }
+
+ postfix_snippet("box", "Box::new(expr)", &format!("Box::new({})", receiver_text)).add_to(acc);
+ postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({})", receiver_text)).add_to(acc); // fixme
+ postfix_snippet("dbgr", "dbg!(&expr)", &format!("dbg!(&{})", receiver_text)).add_to(acc);
+ postfix_snippet("call", "function(expr)", &format!("${{1}}({})", receiver_text)).add_to(acc);
+
+ if let Some(parent) = dot_receiver.syntax().parent().and_then(|p| p.parent()) {
+ if matches!(parent.kind(), STMT_LIST | EXPR_STMT) {
+ postfix_snippet("let", "let", &format!("let $0 = {};", receiver_text)).add_to(acc);
+ postfix_snippet("letm", "let mut", &format!("let mut $0 = {};", receiver_text))
+ .add_to(acc);
+ }
+ }
+
+ if let ast::Expr::Literal(literal) = dot_receiver.clone() {
+ if let Some(literal_text) = ast::String::cast(literal.token()) {
+ add_format_like_completions(acc, ctx, &dot_receiver, cap, &literal_text);
+ }
+ }
+}
+
+fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String {
+ let text = if receiver_is_ambiguous_float_literal {
+ let text = receiver.syntax().text();
+ let without_dot = ..text.len() - TextSize::of('.');
+ text.slice(without_dot).to_string()
+ } else {
+ receiver.to_string()
+ };
+
+ // The receiver texts should be interpreted as-is, as they are expected to be
+ // normal Rust expressions. We escape '\' and '$' so they don't get treated as
+ // snippet-specific constructs.
+ //
+ // Note that we don't need to escape the other characters that can be escaped,
+ // because they wouldn't be treated as snippet-specific constructs without '$'.
+ text.replace('\\', "\\\\").replace('$', "\\$")
+}
+
+fn include_references(initial_element: &ast::Expr) -> ast::Expr {
+ let mut resulting_element = initial_element.clone();
+ while let Some(parent_ref_element) =
+ resulting_element.syntax().parent().and_then(ast::RefExpr::cast)
+ {
+ resulting_element = ast::Expr::from(parent_ref_element);
+ }
+ resulting_element
+}
+
+fn build_postfix_snippet_builder<'ctx>(
+ ctx: &'ctx CompletionContext<'_>,
+ cap: SnippetCap,
+ receiver: &'ctx ast::Expr,
+) -> Option<impl Fn(&str, &str, &str) -> Builder + 'ctx> {
+ let receiver_syntax = receiver.syntax();
+ let receiver_range = ctx.sema.original_range_opt(receiver_syntax)?.range;
+ if ctx.source_range().end() < receiver_range.start() {
+ // This shouldn't happen, yet it does. I assume this might be due to an incorrect token mapping.
+ return None;
+ }
+ let delete_range = TextRange::new(receiver_range.start(), ctx.source_range().end());
+
+ // Wrapping impl Fn in an option ruins lifetime inference for the parameters in a way that
+ // can't be annotated for the closure, hence fix it by constructing it without the Option first
+ fn build<'ctx>(
+ ctx: &'ctx CompletionContext<'_>,
+ cap: SnippetCap,
+ delete_range: TextRange,
+ ) -> impl Fn(&str, &str, &str) -> Builder + 'ctx {
+ move |label, detail, snippet| {
+ let edit = TextEdit::replace(delete_range, snippet.to_string());
+ let mut item =
+ CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), label);
+ item.detail(detail).snippet_edit(cap, edit);
+ let postfix_match = if ctx.original_token.text() == label {
+ cov_mark::hit!(postfix_exact_match_is_high_priority);
+ Some(CompletionRelevancePostfixMatch::Exact)
+ } else {
+ cov_mark::hit!(postfix_inexact_match_is_low_priority);
+ Some(CompletionRelevancePostfixMatch::NonExact)
+ };
+ let relevance = CompletionRelevance { postfix_match, ..Default::default() };
+ item.set_relevance(relevance);
+ item
+ }
+ }
+ Some(build(ctx, cap, delete_range))
+}
+
+fn add_custom_postfix_completions(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ postfix_snippet: impl Fn(&str, &str, &str) -> Builder,
+ receiver_text: &str,
+) -> Option<()> {
+ if ImportScope::find_insert_use_container(&ctx.token.parent()?, &ctx.sema).is_none() {
+ return None;
+ }
+ ctx.config.postfix_snippets().filter(|(_, snip)| snip.scope == SnippetScope::Expr).for_each(
+ |(trigger, snippet)| {
+ let imports = match snippet.imports(ctx) {
+ Some(imports) => imports,
+ None => return,
+ };
+ let body = snippet.postfix_snippet(receiver_text);
+ let mut builder =
+ postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body);
+ builder.documentation(Documentation::new(format!("```rust\n{}\n```", body)));
+ for import in imports.into_iter() {
+ builder.add_import(import);
+ }
+ builder.add_to(acc);
+ },
+ );
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::{
+ tests::{check_edit, check_edit_with_config, completion_list, TEST_CONFIG},
+ CompletionConfig, Snippet,
+ };
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual)
+ }
+
+ #[test]
+ fn postfix_completion_works_for_trivial_path_expression() {
+ check(
+ r#"
+fn main() {
+ let bar = true;
+ bar.$0
+}
+"#,
+ expect![[r#"
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn if if expr {}
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn while while expr {}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn postfix_completion_works_for_function_calln() {
+ check(
+ r#"
+fn foo(elt: bool) -> bool {
+ !elt
+}
+
+fn main() {
+ let bar = true;
+ foo(bar.$0)
+}
+"#,
+ expect![[r#"
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn if if expr {}
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn while while expr {}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn postfix_type_filtering() {
+ check(
+ r#"
+fn main() {
+ let bar: u8 = 12;
+ bar.$0
+}
+"#,
+ expect![[r#"
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ )
+ }
+
+ #[test]
+ fn let_middle_block() {
+ check(
+ r#"
+fn main() {
+ baz.l$0
+ res
+}
+"#,
+ expect![[r#"
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn if if expr {}
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn while while expr {}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn option_iflet() {
+ check_edit(
+ "ifl",
+ r#"
+//- minicore: option
+fn main() {
+ let bar = Some(true);
+ bar.$0
+}
+"#,
+ r#"
+fn main() {
+ let bar = Some(true);
+ if let Some($1) = bar {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn result_match() {
+ check_edit(
+ "match",
+ r#"
+//- minicore: result
+fn main() {
+ let bar = Ok(true);
+ bar.$0
+}
+"#,
+ r#"
+fn main() {
+ let bar = Ok(true);
+ match bar {
+ Ok(${1:_}) => {$2},
+ Err(${3:_}) => {$0},
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn postfix_completion_works_for_ambiguous_float_literal() {
+ check_edit("refm", r#"fn main() { 42.$0 }"#, r#"fn main() { &mut 42 }"#)
+ }
+
+ #[test]
+ fn works_in_simple_macro() {
+ check_edit(
+ "dbg",
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+fn main() {
+ let bar: u8 = 12;
+ m!(bar.d$0)
+}
+"#,
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+fn main() {
+ let bar: u8 = 12;
+ m!(dbg!(bar))
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn postfix_completion_for_references() {
+ check_edit("dbg", r#"fn main() { &&42.$0 }"#, r#"fn main() { dbg!(&&42) }"#);
+ check_edit("refm", r#"fn main() { &&42.$0 }"#, r#"fn main() { &&&mut 42 }"#);
+ check_edit(
+ "ifl",
+ r#"
+//- minicore: option
+fn main() {
+ let bar = &Some(true);
+ bar.$0
+}
+"#,
+ r#"
+fn main() {
+ let bar = &Some(true);
+ if let Some($1) = bar {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn custom_postfix_completion() {
+ let config = CompletionConfig {
+ snippets: vec![Snippet::new(
+ &[],
+ &["break".into()],
+ &["ControlFlow::Break(${receiver})".into()],
+ "",
+ &["core::ops::ControlFlow".into()],
+ crate::SnippetScope::Expr,
+ )
+ .unwrap()],
+ ..TEST_CONFIG
+ };
+
+ check_edit_with_config(
+ config.clone(),
+ "break",
+ r#"
+//- minicore: try
+fn main() { 42.$0 }
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn main() { ControlFlow::Break(42) }
+"#,
+ );
+
+ // The receiver texts should be escaped, see comments in `get_receiver_text()`
+ // for detail.
+ //
+ // Note that the last argument is what *lsp clients would see* rather than
+ // what users would see. Unescaping happens thereafter.
+ check_edit_with_config(
+ config.clone(),
+ "break",
+ r#"
+//- minicore: try
+fn main() { '\\'.$0 }
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn main() { ControlFlow::Break('\\\\') }
+"#,
+ );
+
+ check_edit_with_config(
+ config.clone(),
+ "break",
+ r#"
+//- minicore: try
+fn main() {
+ match true {
+ true => "${1:placeholder}",
+ false => "\$",
+ }.$0
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn main() {
+ ControlFlow::Break(match true {
+ true => "\${1:placeholder}",
+ false => "\\\$",
+ })
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn postfix_completion_for_format_like_strings() {
+ check_edit(
+ "format",
+ r#"fn main() { "{some_var:?}".$0 }"#,
+ r#"fn main() { format!("{:?}", some_var) }"#,
+ );
+ check_edit(
+ "panic",
+ r#"fn main() { "Panic with {a}".$0 }"#,
+ r#"fn main() { panic!("Panic with {}", a) }"#,
+ );
+ check_edit(
+ "println",
+ r#"fn main() { "{ 2+2 } { SomeStruct { val: 1, other: 32 } :?}".$0 }"#,
+ r#"fn main() { println!("{} {:?}", 2+2, SomeStruct { val: 1, other: 32 }) }"#,
+ );
+ check_edit(
+ "loge",
+ r#"fn main() { "{2+2}".$0 }"#,
+ r#"fn main() { log::error!("{}", 2+2) }"#,
+ );
+ check_edit(
+ "logt",
+ r#"fn main() { "{2+2}".$0 }"#,
+ r#"fn main() { log::trace!("{}", 2+2) }"#,
+ );
+ check_edit(
+ "logd",
+ r#"fn main() { "{2+2}".$0 }"#,
+ r#"fn main() { log::debug!("{}", 2+2) }"#,
+ );
+ check_edit("logi", r#"fn main() { "{2+2}".$0 }"#, r#"fn main() { log::info!("{}", 2+2) }"#);
+ check_edit("logw", r#"fn main() { "{2+2}".$0 }"#, r#"fn main() { log::warn!("{}", 2+2) }"#);
+ check_edit(
+ "loge",
+ r#"fn main() { "{2+2}".$0 }"#,
+ r#"fn main() { log::error!("{}", 2+2) }"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs
new file mode 100644
index 000000000..6b94347e0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs
@@ -0,0 +1,311 @@
+// Feature: Format String Completion
+//
+// `"Result {result} is {2 + 2}"` is expanded to the `"Result {} is {}", result, 2 + 2`.
+//
+// The following postfix snippets are available:
+//
+// * `format` -> `format!(...)`
+// * `panic` -> `panic!(...)`
+// * `println` -> `println!(...)`
+// * `log`:
+// ** `logd` -> `log::debug!(...)`
+// ** `logt` -> `log::trace!(...)`
+// ** `logi` -> `log::info!(...)`
+// ** `logw` -> `log::warn!(...)`
+// ** `loge` -> `log::error!(...)`
+//
+// image::https://user-images.githubusercontent.com/48062697/113020656-b560f500-917a-11eb-87de-02991f61beb8.gif[]
+
+use ide_db::SnippetCap;
+use syntax::ast::{self, AstToken};
+
+use crate::{
+ completions::postfix::build_postfix_snippet_builder, context::CompletionContext, Completions,
+};
+
+/// Mapping ("postfix completion item" => "macro to use")
+static KINDS: &[(&str, &str)] = &[
+ ("format", "format!"),
+ ("panic", "panic!"),
+ ("println", "println!"),
+ ("eprintln", "eprintln!"),
+ ("logd", "log::debug!"),
+ ("logt", "log::trace!"),
+ ("logi", "log::info!"),
+ ("logw", "log::warn!"),
+ ("loge", "log::error!"),
+];
+
+pub(crate) fn add_format_like_completions(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ dot_receiver: &ast::Expr,
+ cap: SnippetCap,
+ receiver_text: &ast::String,
+) {
+ let input = match string_literal_contents(receiver_text) {
+ // It's not a string literal, do not parse input.
+ Some(input) => input,
+ None => return,
+ };
+
+ let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, dot_receiver) {
+ Some(it) => it,
+ None => return,
+ };
+ let mut parser = FormatStrParser::new(input);
+
+ if parser.parse().is_ok() {
+ for (label, macro_name) in KINDS {
+ let snippet = parser.to_suggestion(macro_name);
+
+ postfix_snippet(label, macro_name, &snippet).add_to(acc);
+ }
+ }
+}
+
+/// Checks whether provided item is a string literal.
+fn string_literal_contents(item: &ast::String) -> Option<String> {
+ let item = item.text();
+ if item.len() >= 2 && item.starts_with('\"') && item.ends_with('\"') {
+ return Some(item[1..item.len() - 1].to_owned());
+ }
+
+ None
+}
+
+/// Parser for a format-like string. It is more allowing in terms of string contents,
+/// as we expect variable placeholders to be filled with expressions.
+#[derive(Debug)]
+pub(crate) struct FormatStrParser {
+ input: String,
+ output: String,
+ extracted_expressions: Vec<String>,
+ state: State,
+ parsed: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq)]
+enum State {
+ NotExpr,
+ MaybeExpr,
+ Expr,
+ MaybeIncorrect,
+ FormatOpts,
+}
+
+impl FormatStrParser {
+ pub(crate) fn new(input: String) -> Self {
+ Self {
+ input,
+ output: String::new(),
+ extracted_expressions: Vec::new(),
+ state: State::NotExpr,
+ parsed: false,
+ }
+ }
+
+ pub(crate) fn parse(&mut self) -> Result<(), ()> {
+ let mut current_expr = String::new();
+
+ let mut placeholder_id = 1;
+
+ // Count of open braces inside of an expression.
+ // We assume that user knows what they're doing, thus we treat it like a correct pattern, e.g.
+ // "{MyStruct { val_a: 0, val_b: 1 }}".
+ let mut inexpr_open_count = 0;
+
+ // We need to escape '\' and '$'. See the comments on `get_receiver_text()` for detail.
+ let mut chars = self.input.chars().peekable();
+ while let Some(chr) = chars.next() {
+ match (self.state, chr) {
+ (State::NotExpr, '{') => {
+ self.output.push(chr);
+ self.state = State::MaybeExpr;
+ }
+ (State::NotExpr, '}') => {
+ self.output.push(chr);
+ self.state = State::MaybeIncorrect;
+ }
+ (State::NotExpr, _) => {
+ if matches!(chr, '\\' | '$') {
+ self.output.push('\\');
+ }
+ self.output.push(chr);
+ }
+ (State::MaybeIncorrect, '}') => {
+ // It's okay, we met "}}".
+ self.output.push(chr);
+ self.state = State::NotExpr;
+ }
+ (State::MaybeIncorrect, _) => {
+ // Error in the string.
+ return Err(());
+ }
+ (State::MaybeExpr, '{') => {
+ self.output.push(chr);
+ self.state = State::NotExpr;
+ }
+ (State::MaybeExpr, '}') => {
+ // This is an empty sequence '{}'. Replace it with placeholder.
+ self.output.push(chr);
+ self.extracted_expressions.push(format!("${}", placeholder_id));
+ placeholder_id += 1;
+ self.state = State::NotExpr;
+ }
+ (State::MaybeExpr, _) => {
+ if matches!(chr, '\\' | '$') {
+ current_expr.push('\\');
+ }
+ current_expr.push(chr);
+ self.state = State::Expr;
+ }
+ (State::Expr, '}') => {
+ if inexpr_open_count == 0 {
+ self.output.push(chr);
+ self.extracted_expressions.push(current_expr.trim().into());
+ current_expr = String::new();
+ self.state = State::NotExpr;
+ } else {
+ // We're closing one brace met before inside of the expression.
+ current_expr.push(chr);
+ inexpr_open_count -= 1;
+ }
+ }
+ (State::Expr, ':') if chars.peek().copied() == Some(':') => {
+ // path seperator
+ current_expr.push_str("::");
+ chars.next();
+ }
+ (State::Expr, ':') => {
+ if inexpr_open_count == 0 {
+ // We're outside of braces, thus assume that it's a specifier, like "{Some(value):?}"
+ self.output.push(chr);
+ self.extracted_expressions.push(current_expr.trim().into());
+ current_expr = String::new();
+ self.state = State::FormatOpts;
+ } else {
+ // We're inside of braced expression, assume that it's a struct field name/value delimeter.
+ current_expr.push(chr);
+ }
+ }
+ (State::Expr, '{') => {
+ current_expr.push(chr);
+ inexpr_open_count += 1;
+ }
+ (State::Expr, _) => {
+ if matches!(chr, '\\' | '$') {
+ current_expr.push('\\');
+ }
+ current_expr.push(chr);
+ }
+ (State::FormatOpts, '}') => {
+ self.output.push(chr);
+ self.state = State::NotExpr;
+ }
+ (State::FormatOpts, _) => {
+ if matches!(chr, '\\' | '$') {
+ self.output.push('\\');
+ }
+ self.output.push(chr);
+ }
+ }
+ }
+
+ if self.state != State::NotExpr {
+ return Err(());
+ }
+
+ self.parsed = true;
+ Ok(())
+ }
+
+ pub(crate) fn to_suggestion(&self, macro_name: &str) -> String {
+ assert!(self.parsed, "Attempt to get a suggestion from not parsed expression");
+
+ let expressions_as_string = self.extracted_expressions.join(", ");
+ format!(r#"{}("{}", {})"#, macro_name, self.output, expressions_as_string)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use expect_test::{expect, Expect};
+
+ fn check(input: &str, expect: &Expect) {
+ let mut parser = FormatStrParser::new((*input).to_owned());
+ let outcome_repr = if parser.parse().is_ok() {
+ // Parsing should be OK, expected repr is "string; expr_1, expr_2".
+ if parser.extracted_expressions.is_empty() {
+ parser.output
+ } else {
+ format!("{}; {}", parser.output, parser.extracted_expressions.join(", "))
+ }
+ } else {
+ // Parsing should fail, expected repr is "-".
+ "-".to_owned()
+ };
+
+ expect.assert_eq(&outcome_repr);
+ }
+
+ #[test]
+ fn format_str_parser() {
+ let test_vector = &[
+ ("no expressions", expect![["no expressions"]]),
+ (r"no expressions with \$0$1", expect![r"no expressions with \\\$0\$1"]),
+ ("{expr} is {2 + 2}", expect![["{} is {}; expr, 2 + 2"]]),
+ ("{expr:?}", expect![["{:?}; expr"]]),
+ ("{expr:1$}", expect![[r"{:1\$}; expr"]]),
+ ("{$0}", expect![[r"{}; \$0"]]),
+ ("{malformed", expect![["-"]]),
+ ("malformed}", expect![["-"]]),
+ ("{{correct", expect![["{{correct"]]),
+ ("correct}}", expect![["correct}}"]]),
+ ("{correct}}}", expect![["{}}}; correct"]]),
+ ("{correct}}}}}", expect![["{}}}}}; correct"]]),
+ ("{incorrect}}", expect![["-"]]),
+ ("placeholders {} {}", expect![["placeholders {} {}; $1, $2"]]),
+ ("mixed {} {2 + 2} {}", expect![["mixed {} {} {}; $1, 2 + 2, $2"]]),
+ (
+ "{SomeStruct { val_a: 0, val_b: 1 }}",
+ expect![["{}; SomeStruct { val_a: 0, val_b: 1 }"]],
+ ),
+ ("{expr:?} is {2.32f64:.5}", expect![["{:?} is {:.5}; expr, 2.32f64"]]),
+ (
+ "{SomeStruct { val_a: 0, val_b: 1 }:?}",
+ expect![["{:?}; SomeStruct { val_a: 0, val_b: 1 }"]],
+ ),
+ ("{ 2 + 2 }", expect![["{}; 2 + 2"]]),
+ ("{strsim::jaro_winkle(a)}", expect![["{}; strsim::jaro_winkle(a)"]]),
+ ("{foo::bar::baz()}", expect![["{}; foo::bar::baz()"]]),
+ ("{foo::bar():?}", expect![["{:?}; foo::bar()"]]),
+ ];
+
+ for (input, output) in test_vector {
+ check(input, output)
+ }
+ }
+
+ #[test]
+ fn test_into_suggestion() {
+ let test_vector = &[
+ ("println!", "{}", r#"println!("{}", $1)"#),
+ ("eprintln!", "{}", r#"eprintln!("{}", $1)"#),
+ (
+ "log::info!",
+ "{} {expr} {} {2 + 2}",
+ r#"log::info!("{} {} {} {}", $1, expr, $2, 2 + 2)"#,
+ ),
+ ("format!", "{expr:?}", r#"format!("{:?}", expr)"#),
+ ];
+
+ for (kind, input, output) in test_vector {
+ let mut parser = FormatStrParser::new((*input).to_owned());
+ parser.parse().expect("Parsing must succeed");
+
+ assert_eq!(&parser.to_suggestion(*kind), output);
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
new file mode 100644
index 000000000..1c9042390
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
@@ -0,0 +1,369 @@
+//! Complete fields in record literals and patterns.
+use ide_db::SymbolKind;
+use syntax::ast::{self, Expr};
+
+use crate::{
+ context::{DotAccess, DotAccessKind, ExprCtx, PathCompletionCtx, PatternContext, Qualified},
+ CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance,
+ CompletionRelevancePostfixMatch, Completions,
+};
+
+pub(crate) fn complete_record_pattern_fields(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+) {
+ if let PatternContext { record_pat: Some(record_pat), .. } = pattern_ctx {
+ complete_fields(acc, ctx, ctx.sema.record_pattern_missing_fields(record_pat));
+ }
+}
+
+pub(crate) fn complete_record_expr_fields(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ record_expr: &ast::RecordExpr,
+ &dot_prefix: &bool,
+) {
+ let ty = ctx.sema.type_of_expr(&Expr::RecordExpr(record_expr.clone()));
+
+ let missing_fields = match ty.as_ref().and_then(|t| t.original.as_adt()) {
+ Some(hir::Adt::Union(un)) => {
+ // ctx.sema.record_literal_missing_fields will always return
+ // an empty Vec on a union literal. This is normally
+ // reasonable, but here we'd like to present the full list
+ // of fields if the literal is empty.
+ let were_fields_specified =
+ record_expr.record_expr_field_list().and_then(|fl| fl.fields().next()).is_some();
+
+ match were_fields_specified {
+ false => un.fields(ctx.db).into_iter().map(|f| (f, f.ty(ctx.db))).collect(),
+ true => return,
+ }
+ }
+ _ => {
+ let missing_fields = ctx.sema.record_literal_missing_fields(record_expr);
+ add_default_update(acc, ctx, ty, &missing_fields);
+ if dot_prefix {
+ let mut item =
+ CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), "..");
+ item.insert_text(".");
+ item.add_to(acc);
+ return;
+ }
+ missing_fields
+ }
+ };
+ complete_fields(acc, ctx, missing_fields);
+}
+
+// FIXME: This should probably be part of complete_path_expr
+pub(crate) fn complete_record_expr_func_update(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ expr_ctx: &ExprCtx,
+) {
+ if !matches!(path_ctx.qualified, Qualified::No) {
+ return;
+ }
+ if let ExprCtx { is_func_update: Some(record_expr), .. } = expr_ctx {
+ let ty = ctx.sema.type_of_expr(&Expr::RecordExpr(record_expr.clone()));
+
+ match ty.as_ref().and_then(|t| t.original.as_adt()) {
+ Some(hir::Adt::Union(_)) => (),
+ _ => {
+ let missing_fields = ctx.sema.record_literal_missing_fields(record_expr);
+ add_default_update(acc, ctx, ty, &missing_fields);
+ }
+ };
+ }
+}
+
+fn add_default_update(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ ty: Option<hir::TypeInfo>,
+ missing_fields: &[(hir::Field, hir::Type)],
+) {
+ let default_trait = ctx.famous_defs().core_default_Default();
+ let impl_default_trait = default_trait
+ .zip(ty.as_ref())
+ .map_or(false, |(default_trait, ty)| ty.original.impls_trait(ctx.db, default_trait, &[]));
+ if impl_default_trait && !missing_fields.is_empty() {
+ // FIXME: This should make use of scope_def like completions so we get all the other goodies
+ let completion_text = "..Default::default()";
+ let mut item = CompletionItem::new(SymbolKind::Field, ctx.source_range(), completion_text);
+ let completion_text =
+ completion_text.strip_prefix(ctx.token.text()).unwrap_or(completion_text);
+ item.insert_text(completion_text).set_relevance(CompletionRelevance {
+ postfix_match: Some(CompletionRelevancePostfixMatch::Exact),
+ ..Default::default()
+ });
+ item.add_to(acc);
+ }
+}
+
+fn complete_fields(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ missing_fields: Vec<(hir::Field, hir::Type)>,
+) {
+ for (field, ty) in missing_fields {
+ acc.add_field(
+ ctx,
+ &DotAccess {
+ receiver: None,
+ receiver_ty: None,
+ kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal: false },
+ },
+ None,
+ field,
+ &ty,
+ );
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_edit;
+
+ #[test]
+ fn literal_struct_completion_edit() {
+ check_edit(
+ "FooDesc {…}",
+ r#"
+struct FooDesc { pub bar: bool }
+
+fn create_foo(foo_desc: &FooDesc) -> () { () }
+
+fn baz() {
+ let foo = create_foo(&$0);
+}
+ "#,
+ r#"
+struct FooDesc { pub bar: bool }
+
+fn create_foo(foo_desc: &FooDesc) -> () { () }
+
+fn baz() {
+ let foo = create_foo(&FooDesc { bar: ${1:()} }$0);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn literal_struct_impl_self_completion() {
+ check_edit(
+ "Self {…}",
+ r#"
+struct Foo {
+ bar: u64,
+}
+
+impl Foo {
+ fn new() -> Foo {
+ Self$0
+ }
+}
+ "#,
+ r#"
+struct Foo {
+ bar: u64,
+}
+
+impl Foo {
+ fn new() -> Foo {
+ Self { bar: ${1:()} }$0
+ }
+}
+ "#,
+ );
+
+ check_edit(
+ "Self(…)",
+ r#"
+mod submod {
+ pub struct Foo(pub u64);
+}
+
+impl submod::Foo {
+ fn new() -> submod::Foo {
+ Self$0
+ }
+}
+ "#,
+ r#"
+mod submod {
+ pub struct Foo(pub u64);
+}
+
+impl submod::Foo {
+ fn new() -> submod::Foo {
+ Self(${1:()})$0
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn literal_struct_completion_from_sub_modules() {
+ check_edit(
+ "submod::Struct {…}",
+ r#"
+mod submod {
+ pub struct Struct {
+ pub a: u64,
+ }
+}
+
+fn f() -> submod::Struct {
+ Stru$0
+}
+ "#,
+ r#"
+mod submod {
+ pub struct Struct {
+ pub a: u64,
+ }
+}
+
+fn f() -> submod::Struct {
+ submod::Struct { a: ${1:()} }$0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn literal_struct_complexion_module() {
+ check_edit(
+ "FooDesc {…}",
+ r#"
+mod _69latrick {
+ pub struct FooDesc { pub six: bool, pub neuf: Vec<String>, pub bar: bool }
+ pub fn create_foo(foo_desc: &FooDesc) -> () { () }
+}
+
+fn baz() {
+ use _69latrick::*;
+
+ let foo = create_foo(&$0);
+}
+ "#,
+ r#"
+mod _69latrick {
+ pub struct FooDesc { pub six: bool, pub neuf: Vec<String>, pub bar: bool }
+ pub fn create_foo(foo_desc: &FooDesc) -> () { () }
+}
+
+fn baz() {
+ use _69latrick::*;
+
+ let foo = create_foo(&FooDesc { six: ${1:()}, neuf: ${2:()}, bar: ${3:()} }$0);
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn default_completion_edit() {
+ check_edit(
+ "..Default::default()",
+ r#"
+//- minicore: default
+struct Struct { foo: u32, bar: usize }
+
+impl Default for Struct {
+ fn default() -> Self {}
+}
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ .$0
+ };
+}
+"#,
+ r#"
+struct Struct { foo: u32, bar: usize }
+
+impl Default for Struct {
+ fn default() -> Self {}
+}
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ ..Default::default()
+ };
+}
+"#,
+ );
+ check_edit(
+ "..Default::default()",
+ r#"
+//- minicore: default
+struct Struct { foo: u32, bar: usize }
+
+impl Default for Struct {
+ fn default() -> Self {}
+}
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ $0
+ };
+}
+"#,
+ r#"
+struct Struct { foo: u32, bar: usize }
+
+impl Default for Struct {
+ fn default() -> Self {}
+}
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ ..Default::default()
+ };
+}
+"#,
+ );
+ check_edit(
+ "..Default::default()",
+ r#"
+//- minicore: default
+struct Struct { foo: u32, bar: usize }
+
+impl Default for Struct {
+ fn default() -> Self {}
+}
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ ..$0
+ };
+}
+"#,
+ r#"
+struct Struct { foo: u32, bar: usize }
+
+impl Default for Struct {
+ fn default() -> Self {}
+}
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ ..Default::default()
+ };
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
new file mode 100644
index 000000000..66adb4286
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
@@ -0,0 +1,189 @@
+//! This file provides snippet completions, like `pd` => `eprintln!(...)`.
+
+use hir::Documentation;
+use ide_db::{imports::insert_use::ImportScope, SnippetCap};
+
+use crate::{
+ context::{ExprCtx, ItemListKind, PathCompletionCtx, Qualified},
+ item::Builder,
+ CompletionContext, CompletionItem, CompletionItemKind, Completions, SnippetScope,
+};
+
+pub(crate) fn complete_expr_snippet(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ &ExprCtx { in_block_expr, .. }: &ExprCtx,
+) {
+ if !matches!(path_ctx.qualified, Qualified::No) {
+ return;
+ }
+ if !ctx.qualifier_ctx.none() {
+ return;
+ }
+
+ let cap = match ctx.config.snippet_cap {
+ Some(it) => it,
+ None => return,
+ };
+
+ if !ctx.config.snippets.is_empty() {
+ add_custom_completions(acc, ctx, cap, SnippetScope::Expr);
+ }
+
+ if in_block_expr {
+ snippet(ctx, cap, "pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc);
+ snippet(ctx, cap, "ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc);
+ let item = snippet(
+ ctx,
+ cap,
+ "macro_rules",
+ "\
+macro_rules! $1 {
+ ($2) => {
+ $0
+ };
+}",
+ );
+ item.add_to(acc);
+ }
+}
+
+pub(crate) fn complete_item_snippet(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ kind: &ItemListKind,
+) {
+ if !matches!(path_ctx.qualified, Qualified::No) {
+ return;
+ }
+ if !ctx.qualifier_ctx.none() {
+ return;
+ }
+ let cap = match ctx.config.snippet_cap {
+ Some(it) => it,
+ None => return,
+ };
+
+ if !ctx.config.snippets.is_empty() {
+ add_custom_completions(acc, ctx, cap, SnippetScope::Item);
+ }
+
+ // Test-related snippets shouldn't be shown in blocks.
+ if let ItemListKind::SourceFile | ItemListKind::Module = kind {
+ let mut item = snippet(
+ ctx,
+ cap,
+ "tmod (Test module)",
+ "\
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn ${1:test_name}() {
+ $0
+ }
+}",
+ );
+ item.lookup_by("tmod");
+ item.add_to(acc);
+
+ let mut item = snippet(
+ ctx,
+ cap,
+ "tfn (Test function)",
+ "\
+#[test]
+fn ${1:feature}() {
+ $0
+}",
+ );
+ item.lookup_by("tfn");
+ item.add_to(acc);
+
+ let item = snippet(
+ ctx,
+ cap,
+ "macro_rules",
+ "\
+macro_rules! $1 {
+ ($2) => {
+ $0
+ };
+}",
+ );
+ item.add_to(acc);
+ }
+}
+
+fn snippet(ctx: &CompletionContext<'_>, cap: SnippetCap, label: &str, snippet: &str) -> Builder {
+ let mut item = CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), label);
+ item.insert_snippet(cap, snippet);
+ item
+}
+
+fn add_custom_completions(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ cap: SnippetCap,
+ scope: SnippetScope,
+) -> Option<()> {
+ if ImportScope::find_insert_use_container(&ctx.token.parent()?, &ctx.sema).is_none() {
+ return None;
+ }
+ ctx.config.prefix_snippets().filter(|(_, snip)| snip.scope == scope).for_each(
+ |(trigger, snip)| {
+ let imports = match snip.imports(ctx) {
+ Some(imports) => imports,
+ None => return,
+ };
+ let body = snip.snippet();
+ let mut builder = snippet(ctx, cap, trigger, &body);
+ builder.documentation(Documentation::new(format!("```rust\n{}\n```", body)));
+ for import in imports.into_iter() {
+ builder.add_import(import);
+ }
+ builder.set_detail(snip.description.clone());
+ builder.add_to(acc);
+ },
+ );
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{
+ tests::{check_edit_with_config, TEST_CONFIG},
+ CompletionConfig, Snippet,
+ };
+
+ #[test]
+ fn custom_snippet_completion() {
+ check_edit_with_config(
+ CompletionConfig {
+ snippets: vec![Snippet::new(
+ &["break".into()],
+ &[],
+ &["ControlFlow::Break(())".into()],
+ "",
+ &["core::ops::ControlFlow".into()],
+ crate::SnippetScope::Expr,
+ )
+ .unwrap()],
+ ..TEST_CONFIG
+ },
+ "break",
+ r#"
+//- minicore: try
+fn main() { $0 }
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn main() { ControlFlow::Break(()) }
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs
new file mode 100644
index 000000000..8f9db2f94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs
@@ -0,0 +1,246 @@
+//! Completion of names from the current scope in type position.
+
+use hir::{HirDisplay, ScopeDef};
+use syntax::{ast, AstNode, SyntaxKind};
+
+use crate::{
+ context::{PathCompletionCtx, Qualified, TypeAscriptionTarget, TypeLocation},
+ render::render_type_inference,
+ CompletionContext, Completions,
+};
+
+pub(crate) fn complete_type_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ location: &TypeLocation,
+) {
+ let _p = profile::span("complete_type_path");
+
+ let scope_def_applicable = |def| {
+ use hir::{GenericParam::*, ModuleDef::*};
+ match def {
+ ScopeDef::GenericParam(LifetimeParam(_)) | ScopeDef::Label(_) => false,
+ // no values in type places
+ ScopeDef::ModuleDef(Function(_) | Variant(_) | Static(_)) | ScopeDef::Local(_) => false,
+ // unless its a constant in a generic arg list position
+ ScopeDef::ModuleDef(Const(_)) | ScopeDef::GenericParam(ConstParam(_)) => {
+ matches!(location, TypeLocation::GenericArgList(_))
+ }
+ ScopeDef::ImplSelfType(_) => {
+ !matches!(location, TypeLocation::ImplTarget | TypeLocation::ImplTrait)
+ }
+ // Don't suggest attribute macros and derives.
+ ScopeDef::ModuleDef(Macro(mac)) => mac.is_fn_like(ctx.db),
+ // Type things are fine
+ ScopeDef::ModuleDef(BuiltinType(_) | Adt(_) | Module(_) | Trait(_) | TypeAlias(_))
+ | ScopeDef::AdtSelfType(_)
+ | ScopeDef::Unknown
+ | ScopeDef::GenericParam(TypeParam(_)) => true,
+ }
+ };
+
+ let add_assoc_item = |acc: &mut Completions, item| match item {
+ hir::AssocItem::Const(ct) if matches!(location, TypeLocation::GenericArgList(_)) => {
+ acc.add_const(ctx, ct)
+ }
+ hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => (),
+ hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty),
+ };
+
+ match qualified {
+ Qualified::TypeAnchor { ty: None, trait_: None } => ctx
+ .traits_in_scope()
+ .iter()
+ .flat_map(|&it| hir::Trait::from(it).items(ctx.sema.db))
+ .for_each(|item| add_assoc_item(acc, item)),
+ Qualified::TypeAnchor { trait_: Some(trait_), .. } => {
+ trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item))
+ }
+ Qualified::TypeAnchor { ty: Some(ty), trait_: None } => {
+ ctx.iterate_path_candidates(&ty, |item| {
+ add_assoc_item(acc, item);
+ });
+
+ // Iterate assoc types separately
+ ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
+ if let hir::AssocItem::TypeAlias(ty) = item {
+ acc.add_type_alias(ctx, ty)
+ }
+ None::<()>
+ });
+ }
+ Qualified::With { resolution: None, .. } => {}
+ Qualified::With { resolution: Some(resolution), .. } => {
+ // Add associated types on type parameters and `Self`.
+ ctx.scope.assoc_type_shorthand_candidates(resolution, |_, alias| {
+ acc.add_type_alias(ctx, alias);
+ None::<()>
+ });
+
+ match resolution {
+ hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
+ let module_scope = module.scope(ctx.db, Some(ctx.module));
+ for (name, def) in module_scope {
+ if scope_def_applicable(def) {
+ acc.add_path_resolution(ctx, path_ctx, name, def);
+ }
+ }
+ }
+ hir::PathResolution::Def(
+ def @ (hir::ModuleDef::Adt(_)
+ | hir::ModuleDef::TypeAlias(_)
+ | hir::ModuleDef::BuiltinType(_)),
+ ) => {
+ let ty = match def {
+ hir::ModuleDef::Adt(adt) => adt.ty(ctx.db),
+ hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db),
+ hir::ModuleDef::BuiltinType(builtin) => builtin.ty(ctx.db),
+ _ => return,
+ };
+
+ // XXX: For parity with Rust bug #22519, this does not complete Ty::AssocType.
+ // (where AssocType is defined on a trait, not an inherent impl)
+
+ ctx.iterate_path_candidates(&ty, |item| {
+ add_assoc_item(acc, item);
+ });
+
+ // Iterate assoc types separately
+ ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
+ if let hir::AssocItem::TypeAlias(ty) = item {
+ acc.add_type_alias(ctx, ty)
+ }
+ None::<()>
+ });
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Trait(t)) => {
+ // Handles `Trait::assoc` as well as `<Ty as Trait>::assoc`.
+ for item in t.items(ctx.db) {
+ add_assoc_item(acc, item);
+ }
+ }
+ hir::PathResolution::TypeParam(_) | hir::PathResolution::SelfType(_) => {
+ let ty = match resolution {
+ hir::PathResolution::TypeParam(param) => param.ty(ctx.db),
+ hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db),
+ _ => return,
+ };
+
+ ctx.iterate_path_candidates(&ty, |item| {
+ add_assoc_item(acc, item);
+ });
+ }
+ _ => (),
+ }
+ }
+ Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
+ Qualified::No => {
+ match location {
+ TypeLocation::TypeBound => {
+ acc.add_nameref_keywords_with_colon(ctx);
+ ctx.process_all_names(&mut |name, res| {
+ let add_resolution = match res {
+ ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)) => {
+ mac.is_fn_like(ctx.db)
+ }
+ ScopeDef::ModuleDef(
+ hir::ModuleDef::Trait(_) | hir::ModuleDef::Module(_),
+ ) => true,
+ _ => false,
+ };
+ if add_resolution {
+ acc.add_path_resolution(ctx, path_ctx, name, res);
+ }
+ });
+ return;
+ }
+ TypeLocation::GenericArgList(Some(arg_list)) => {
+ let in_assoc_type_arg = ctx
+ .original_token
+ .parent_ancestors()
+ .any(|node| node.kind() == SyntaxKind::ASSOC_TYPE_ARG);
+
+ if !in_assoc_type_arg {
+ if let Some(path_seg) =
+ arg_list.syntax().parent().and_then(ast::PathSegment::cast)
+ {
+ if path_seg
+ .syntax()
+ .ancestors()
+ .find_map(ast::TypeBound::cast)
+ .is_some()
+ {
+ if let Some(hir::PathResolution::Def(hir::ModuleDef::Trait(
+ trait_,
+ ))) = ctx.sema.resolve_path(&path_seg.parent_path())
+ {
+ let arg_idx = arg_list
+ .generic_args()
+ .filter(|arg| {
+ arg.syntax().text_range().end()
+ < ctx.original_token.text_range().start()
+ })
+ .count();
+
+ let n_required_params =
+ trait_.type_or_const_param_count(ctx.sema.db, true);
+ if arg_idx >= n_required_params {
+ trait_
+ .items_with_supertraits(ctx.sema.db)
+ .into_iter()
+ .for_each(|it| {
+ if let hir::AssocItem::TypeAlias(alias) = it {
+ cov_mark::hit!(
+ complete_assoc_type_in_generics_list
+ );
+ acc.add_type_alias_with_eq(ctx, alias);
+ }
+ });
+
+ let n_params =
+ trait_.type_or_const_param_count(ctx.sema.db, false);
+ if arg_idx >= n_params {
+ return; // only show assoc types
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ _ => {}
+ };
+
+ acc.add_nameref_keywords_with_colon(ctx);
+ ctx.process_all_names(&mut |name, def| {
+ if scope_def_applicable(def) {
+ acc.add_path_resolution(ctx, path_ctx, name, def);
+ }
+ });
+ }
+ }
+}
+
+pub(crate) fn complete_ascribed_type(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ ascription: &TypeAscriptionTarget,
+) -> Option<()> {
+ if !path_ctx.is_trivial_path() {
+ return None;
+ }
+ let x = match ascription {
+ TypeAscriptionTarget::Let(pat) | TypeAscriptionTarget::FnParam(pat) => {
+ ctx.sema.type_of_pat(pat.as_ref()?)
+ }
+ TypeAscriptionTarget::Const(exp) | TypeAscriptionTarget::RetType(exp) => {
+ ctx.sema.type_of_expr(exp.as_ref()?)
+ }
+ }?
+ .adjusted();
+ let ty_string = x.display_source_code(ctx.db, ctx.module.into()).ok()?;
+ acc.add(render_type_inference(ty_string, ctx));
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
new file mode 100644
index 000000000..2555c34aa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
@@ -0,0 +1,120 @@
+//! Completion for use trees
+
+use hir::ScopeDef;
+use ide_db::{FxHashSet, SymbolKind};
+use syntax::{ast, AstNode};
+
+use crate::{
+ context::{CompletionContext, PathCompletionCtx, Qualified},
+ item::Builder,
+ CompletionItem, CompletionItemKind, CompletionRelevance, Completions,
+};
+
+pub(crate) fn complete_use_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, use_tree_parent, .. }: &PathCompletionCtx,
+ name_ref: &Option<ast::NameRef>,
+) {
+ match qualified {
+ Qualified::With { path, resolution: Some(resolution), super_chain_len } => {
+ acc.add_super_keyword(ctx, *super_chain_len);
+
+ // only show `self` in a new use-tree when the qualifier doesn't end in self
+ let not_preceded_by_self = *use_tree_parent
+ && !matches!(
+ path.segment().and_then(|it| it.kind()),
+ Some(ast::PathSegmentKind::SelfKw)
+ );
+ if not_preceded_by_self {
+ acc.add_keyword(ctx, "self");
+ }
+
+ let mut already_imported_names = FxHashSet::default();
+ if let Some(list) = ctx.token.parent_ancestors().find_map(ast::UseTreeList::cast) {
+ let use_tree = list.parent_use_tree();
+ if use_tree.path().as_ref() == Some(path) {
+ for tree in list.use_trees().filter(|tree| tree.is_simple_path()) {
+ if let Some(name) = tree.path().and_then(|path| path.as_single_name_ref()) {
+ already_imported_names.insert(name.to_string());
+ }
+ }
+ }
+ }
+
+ match resolution {
+ hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
+ let module_scope = module.scope(ctx.db, Some(ctx.module));
+ let unknown_is_current = |name: &hir::Name| {
+ matches!(
+ name_ref,
+ Some(name_ref) if name_ref.syntax().text() == name.to_smol_str().as_str()
+ )
+ };
+ for (name, def) in module_scope {
+ let is_name_already_imported = name
+ .as_text()
+ .map_or(false, |text| already_imported_names.contains(text.as_str()));
+
+ let add_resolution = match def {
+ ScopeDef::Unknown if unknown_is_current(&name) => {
+ // for `use self::foo$0`, don't suggest `foo` as a completion
+ cov_mark::hit!(dont_complete_current_use);
+ continue;
+ }
+ ScopeDef::ModuleDef(_) | ScopeDef::Unknown => true,
+ _ => false,
+ };
+
+ if add_resolution {
+ let mut builder = Builder::from_resolution(ctx, path_ctx, name, def);
+ builder.set_relevance(CompletionRelevance {
+ is_name_already_imported,
+ ..Default::default()
+ });
+ acc.add(builder.build());
+ }
+ }
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Enum(e))) => {
+ cov_mark::hit!(enum_plain_qualified_use_tree);
+ acc.add_enum_variants(ctx, path_ctx, *e);
+ }
+ _ => {}
+ }
+ }
+ // fresh use tree with leading colon2, only show crate roots
+ Qualified::Absolute => {
+ cov_mark::hit!(use_tree_crate_roots_only);
+ acc.add_crate_roots(ctx, path_ctx);
+ }
+ // only show modules and non-std enum in a fresh UseTree
+ Qualified::No => {
+ cov_mark::hit!(unqualified_path_selected_only);
+ ctx.process_all_names(&mut |name, res| {
+ match res {
+ ScopeDef::ModuleDef(hir::ModuleDef::Module(module)) => {
+ acc.add_module(ctx, path_ctx, module, name);
+ }
+ ScopeDef::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Enum(e))) => {
+ // exclude prelude enum
+ let is_builtin =
+ res.krate(ctx.db).map_or(false, |krate| krate.is_builtin(ctx.db));
+
+ if !is_builtin {
+ let item = CompletionItem::new(
+ CompletionItemKind::SymbolKind(SymbolKind::Enum),
+ ctx.source_range(),
+ format!("{}::", e.name(ctx.db)),
+ );
+ acc.add(item.build());
+ }
+ }
+ _ => {}
+ };
+ });
+ acc.add_nameref_keywords_with_colon(ctx);
+ }
+ Qualified::TypeAnchor { .. } | Qualified::With { resolution: None, .. } => {}
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs
new file mode 100644
index 000000000..5e6cf4bf9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs
@@ -0,0 +1,41 @@
+//! Completion for visibility specifiers.
+
+use crate::{
+ context::{CompletionContext, PathCompletionCtx, Qualified},
+ Completions,
+};
+
+pub(crate) fn complete_vis_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ &has_in_token: &bool,
+) {
+ match qualified {
+ Qualified::With {
+ resolution: Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))),
+ super_chain_len,
+ ..
+ } => {
+ // Try completing next child module of the path that is still a parent of the current module
+ let next_towards_current =
+ ctx.module.path_to_root(ctx.db).into_iter().take_while(|it| it != module).last();
+ if let Some(next) = next_towards_current {
+ if let Some(name) = next.name(ctx.db) {
+ cov_mark::hit!(visibility_qualified);
+ acc.add_module(ctx, path_ctx, next, name);
+ }
+ }
+
+ acc.add_super_keyword(ctx, *super_chain_len);
+ }
+ Qualified::Absolute | Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
+ Qualified::No => {
+ if !has_in_token {
+ cov_mark::hit!(kw_completion_in);
+ acc.add_keyword(ctx, "in");
+ }
+ acc.add_nameref_keywords(ctx);
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
new file mode 100644
index 000000000..80d6af281
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
@@ -0,0 +1,41 @@
+//! Settings for tweaking completion.
+//!
+//! The fun thing here is `SnippetCap` -- this type can only be created in this
+//! module, and we use to statically check that we only produce snippet
+//! completions if we are allowed to.
+
+use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
+
+use crate::snippet::Snippet;
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct CompletionConfig {
+ pub enable_postfix_completions: bool,
+ pub enable_imports_on_the_fly: bool,
+ pub enable_self_on_the_fly: bool,
+ pub enable_private_editable: bool,
+ pub callable: Option<CallableSnippets>,
+ pub snippet_cap: Option<SnippetCap>,
+ pub insert_use: InsertUseConfig,
+ pub snippets: Vec<Snippet>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum CallableSnippets {
+ FillArguments,
+ AddParentheses,
+}
+
+impl CompletionConfig {
+ pub fn postfix_snippets(&self) -> impl Iterator<Item = (&str, &Snippet)> {
+ self.snippets
+ .iter()
+ .flat_map(|snip| snip.postfix_triggers.iter().map(move |trigger| (&**trigger, snip)))
+ }
+
+ pub fn prefix_snippets(&self) -> impl Iterator<Item = (&str, &Snippet)> {
+ self.snippets
+ .iter()
+ .flat_map(|snip| snip.prefix_triggers.iter().map(move |trigger| (&**trigger, snip)))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
new file mode 100644
index 000000000..e35f79d2b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -0,0 +1,639 @@
+//! See `CompletionContext` structure.
+
+mod analysis;
+#[cfg(test)]
+mod tests;
+
+use std::iter;
+
+use base_db::SourceDatabaseExt;
+use hir::{
+ HasAttrs, Local, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo,
+};
+use ide_db::{
+ base_db::{FilePosition, SourceDatabase},
+ famous_defs::FamousDefs,
+ FxHashMap, FxHashSet, RootDatabase,
+};
+use syntax::{
+ ast::{self, AttrKind, NameOrNameRef},
+ AstNode,
+ SyntaxKind::{self, *},
+ SyntaxToken, TextRange, TextSize,
+};
+use text_edit::Indel;
+
+use crate::CompletionConfig;
+
+const COMPLETION_MARKER: &str = "intellijRulezz";
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(crate) enum PatternRefutability {
+ Refutable,
+ Irrefutable,
+}
+
+#[derive(Debug)]
+pub(crate) enum Visible {
+ Yes,
+ Editable,
+ No,
+}
+
+/// Existing qualifiers for the thing we are currently completing.
+#[derive(Debug, Default)]
+pub(super) struct QualifierCtx {
+ pub(super) unsafe_tok: Option<SyntaxToken>,
+ pub(super) vis_node: Option<ast::Visibility>,
+}
+
+impl QualifierCtx {
+ pub(super) fn none(&self) -> bool {
+ self.unsafe_tok.is_none() && self.vis_node.is_none()
+ }
+}
+
+/// The state of the path we are currently completing.
+#[derive(Debug)]
+pub(crate) struct PathCompletionCtx {
+ /// If this is a call with () already there (or {} in case of record patterns)
+ pub(super) has_call_parens: bool,
+ /// If this has a macro call bang !
+ pub(super) has_macro_bang: bool,
+ /// The qualifier of the current path.
+ pub(super) qualified: Qualified,
+ /// The parent of the path we are completing.
+ pub(super) parent: Option<ast::Path>,
+ /// The path of which we are completing the segment
+ pub(super) path: ast::Path,
+ pub(super) kind: PathKind,
+ /// Whether the path segment has type args or not.
+ pub(super) has_type_args: bool,
+ /// Whether the qualifier comes from a use tree parent or not
+ pub(crate) use_tree_parent: bool,
+}
+
+impl PathCompletionCtx {
+ pub(super) fn is_trivial_path(&self) -> bool {
+ matches!(
+ self,
+ PathCompletionCtx {
+ has_call_parens: false,
+ has_macro_bang: false,
+ qualified: Qualified::No,
+ parent: None,
+ has_type_args: false,
+ ..
+ }
+ )
+ }
+}
+
+/// The kind of path we are completing right now.
+#[derive(Debug, PartialEq, Eq)]
+pub(super) enum PathKind {
+ Expr {
+ expr_ctx: ExprCtx,
+ },
+ Type {
+ location: TypeLocation,
+ },
+ Attr {
+ attr_ctx: AttrCtx,
+ },
+ Derive {
+ existing_derives: ExistingDerives,
+ },
+ /// Path in item position, that is inside an (Assoc)ItemList
+ Item {
+ kind: ItemListKind,
+ },
+ Pat {
+ pat_ctx: PatternContext,
+ },
+ Vis {
+ has_in_token: bool,
+ },
+ Use,
+}
+
+pub(crate) type ExistingDerives = FxHashSet<hir::Macro>;
+
+#[derive(Debug, PartialEq, Eq)]
+pub(crate) struct AttrCtx {
+ pub(crate) kind: AttrKind,
+ pub(crate) annotated_item_kind: Option<SyntaxKind>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub(crate) struct ExprCtx {
+ pub(crate) in_block_expr: bool,
+ pub(crate) in_loop_body: bool,
+ pub(crate) after_if_expr: bool,
+ /// Whether this expression is the direct condition of an if or while expression
+ pub(crate) in_condition: bool,
+ pub(crate) incomplete_let: bool,
+ pub(crate) ref_expr_parent: Option<ast::RefExpr>,
+ pub(crate) is_func_update: Option<ast::RecordExpr>,
+ pub(crate) self_param: Option<hir::SelfParam>,
+ pub(crate) innermost_ret_ty: Option<hir::Type>,
+ pub(crate) impl_: Option<ast::Impl>,
+ /// Whether this expression occurs in match arm guard position: before the
+ /// fat arrow token
+ pub(crate) in_match_guard: bool,
+}
+
+/// Original file ast nodes
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum TypeLocation {
+ TupleField,
+ TypeAscription(TypeAscriptionTarget),
+ GenericArgList(Option<ast::GenericArgList>),
+ TypeBound,
+ ImplTarget,
+ ImplTrait,
+ Other,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum TypeAscriptionTarget {
+ Let(Option<ast::Pat>),
+ FnParam(Option<ast::Pat>),
+ RetType(Option<ast::Expr>),
+ Const(Option<ast::Expr>),
+}
+
+/// The kind of item list a [`PathKind::Item`] belongs to.
+#[derive(Debug, PartialEq, Eq)]
+pub(super) enum ItemListKind {
+ SourceFile,
+ Module,
+ Impl,
+ TraitImpl(Option<ast::Impl>),
+ Trait,
+ ExternBlock,
+}
+
+#[derive(Debug)]
+pub(super) enum Qualified {
+ No,
+ With {
+ path: ast::Path,
+ resolution: Option<PathResolution>,
+ /// How many `super` segments are present in the path
+ ///
+ /// This would be None, if path is not solely made of
+ /// `super` segments, e.g.
+ ///
+ /// ```rust
+ /// use super::foo;
+ /// ```
+ ///
+ /// Otherwise it should be Some(count of `super`)
+ super_chain_len: Option<usize>,
+ },
+ /// <_>::
+ TypeAnchor {
+ ty: Option<hir::Type>,
+ trait_: Option<hir::Trait>,
+ },
+ /// Whether the path is an absolute path
+ Absolute,
+}
+
+/// The state of the pattern we are completing.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(super) struct PatternContext {
+ pub(super) refutability: PatternRefutability,
+ pub(super) param_ctx: Option<ParamContext>,
+ pub(super) has_type_ascription: bool,
+ pub(super) parent_pat: Option<ast::Pat>,
+ pub(super) ref_token: Option<SyntaxToken>,
+ pub(super) mut_token: Option<SyntaxToken>,
+ /// The record pattern this name or ref is a field of
+ pub(super) record_pat: Option<ast::RecordPat>,
+ pub(super) impl_: Option<ast::Impl>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(super) struct ParamContext {
+ pub(super) param_list: ast::ParamList,
+ pub(super) param: ast::Param,
+ pub(super) kind: ParamKind,
+}
+
+/// The state of the lifetime we are completing.
+#[derive(Debug)]
+pub(super) struct LifetimeContext {
+ pub(super) lifetime: Option<ast::Lifetime>,
+ pub(super) kind: LifetimeKind,
+}
+
+/// The kind of lifetime we are completing.
+#[derive(Debug)]
+pub(super) enum LifetimeKind {
+ LifetimeParam { is_decl: bool, param: ast::LifetimeParam },
+ Lifetime,
+ LabelRef,
+ LabelDef,
+}
+
+/// The state of the name we are completing.
+#[derive(Debug)]
+pub(super) struct NameContext {
+ #[allow(dead_code)]
+ pub(super) name: Option<ast::Name>,
+ pub(super) kind: NameKind,
+}
+
+/// The kind of the name we are completing.
+#[derive(Debug)]
+#[allow(dead_code)]
+pub(super) enum NameKind {
+ Const,
+ ConstParam,
+ Enum,
+ Function,
+ IdentPat(PatternContext),
+ MacroDef,
+ MacroRules,
+ /// Fake node
+ Module(ast::Module),
+ RecordField,
+ Rename,
+ SelfParam,
+ Static,
+ Struct,
+ Trait,
+ TypeAlias,
+ TypeParam,
+ Union,
+ Variant,
+}
+
+/// The state of the NameRef we are completing.
+#[derive(Debug)]
+pub(super) struct NameRefContext {
+ /// NameRef syntax in the original file
+ pub(super) nameref: Option<ast::NameRef>,
+ pub(super) kind: NameRefKind,
+}
+
+/// The kind of the NameRef we are completing.
+#[derive(Debug)]
+pub(super) enum NameRefKind {
+ Path(PathCompletionCtx),
+ DotAccess(DotAccess),
+ /// Position where we are only interested in keyword completions
+ Keyword(ast::Item),
+ /// The record expression this nameref is a field of and whether a dot precedes the completion identifier.
+ RecordExpr {
+ dot_prefix: bool,
+ expr: ast::RecordExpr,
+ },
+ Pattern(PatternContext),
+}
+
+/// The identifier we are currently completing.
+#[derive(Debug)]
+pub(super) enum CompletionAnalysis {
+ Name(NameContext),
+ NameRef(NameRefContext),
+ Lifetime(LifetimeContext),
+ /// The string the cursor is currently inside
+ String {
+ /// original token
+ original: ast::String,
+ /// fake token
+ expanded: Option<ast::String>,
+ },
+ /// Set if we are currently completing in an unexpanded attribute, this usually implies a builtin attribute like `allow($0)`
+ UnexpandedAttrTT {
+ colon_prefix: bool,
+ fake_attribute_under_caret: Option<ast::Attr>,
+ },
+}
+
+/// Information about the field or method access we are completing.
+#[derive(Debug)]
+pub(super) struct DotAccess {
+ pub(super) receiver: Option<ast::Expr>,
+ pub(super) receiver_ty: Option<TypeInfo>,
+ pub(super) kind: DotAccessKind,
+}
+
+#[derive(Debug)]
+pub(super) enum DotAccessKind {
+ Field {
+ /// True if the receiver is an integer and there is no ident in the original file after it yet
+ /// like `0.$0`
+ receiver_is_ambiguous_float_literal: bool,
+ },
+ Method {
+ has_parens: bool,
+ },
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum ParamKind {
+ Function(ast::Fn),
+ Closure(ast::ClosureExpr),
+}
+
+/// `CompletionContext` is created early during completion to figure out, where
+/// exactly is the cursor, syntax-wise.
+#[derive(Debug)]
+pub(crate) struct CompletionContext<'a> {
+ pub(super) sema: Semantics<'a, RootDatabase>,
+ pub(super) scope: SemanticsScope<'a>,
+ pub(super) db: &'a RootDatabase,
+ pub(super) config: &'a CompletionConfig,
+ pub(super) position: FilePosition,
+
+ /// The token before the cursor, in the original file.
+ pub(super) original_token: SyntaxToken,
+ /// The token before the cursor, in the macro-expanded file.
+ pub(super) token: SyntaxToken,
+ /// The crate of the current file.
+ pub(super) krate: hir::Crate,
+ /// The module of the `scope`.
+ pub(super) module: hir::Module,
+
+ /// The expected name of what we are completing.
+ /// This is usually the parameter name of the function argument we are completing.
+ pub(super) expected_name: Option<NameOrNameRef>,
+ /// The expected type of what we are completing.
+ pub(super) expected_type: Option<Type>,
+
+ pub(super) qualifier_ctx: QualifierCtx,
+
+ pub(super) locals: FxHashMap<Name, Local>,
+
+ /// The module depth of the current module of the cursor position.
+ /// - crate-root
+ /// - mod foo
+ /// - mod bar
+ /// Here depth will be 2
+ pub(super) depth_from_crate_root: usize,
+}
+
+impl<'a> CompletionContext<'a> {
+ /// The range of the identifier that is being completed.
+ pub(crate) fn source_range(&self) -> TextRange {
+ // check kind of macro-expanded token, but use range of original token
+ let kind = self.token.kind();
+ match kind {
+ CHAR => {
+ // assume we are completing a lifetime but the user has only typed the '
+ cov_mark::hit!(completes_if_lifetime_without_idents);
+ TextRange::at(self.original_token.text_range().start(), TextSize::from(1))
+ }
+ IDENT | LIFETIME_IDENT | UNDERSCORE => self.original_token.text_range(),
+ _ if kind.is_keyword() => self.original_token.text_range(),
+ _ => TextRange::empty(self.position.offset),
+ }
+ }
+
+ pub(crate) fn famous_defs(&self) -> FamousDefs<'_, '_> {
+ FamousDefs(&self.sema, self.krate)
+ }
+
+ /// Checks if an item is visible and not `doc(hidden)` at the completion site.
+ pub(crate) fn def_is_visible(&self, item: &ScopeDef) -> Visible {
+ match item {
+ ScopeDef::ModuleDef(def) => match def {
+ hir::ModuleDef::Module(it) => self.is_visible(it),
+ hir::ModuleDef::Function(it) => self.is_visible(it),
+ hir::ModuleDef::Adt(it) => self.is_visible(it),
+ hir::ModuleDef::Variant(it) => self.is_visible(it),
+ hir::ModuleDef::Const(it) => self.is_visible(it),
+ hir::ModuleDef::Static(it) => self.is_visible(it),
+ hir::ModuleDef::Trait(it) => self.is_visible(it),
+ hir::ModuleDef::TypeAlias(it) => self.is_visible(it),
+ hir::ModuleDef::Macro(it) => self.is_visible(it),
+ hir::ModuleDef::BuiltinType(_) => Visible::Yes,
+ },
+ ScopeDef::GenericParam(_)
+ | ScopeDef::ImplSelfType(_)
+ | ScopeDef::AdtSelfType(_)
+ | ScopeDef::Local(_)
+ | ScopeDef::Label(_)
+ | ScopeDef::Unknown => Visible::Yes,
+ }
+ }
+
+ /// Checks if an item is visible and not `doc(hidden)` at the completion site.
+ pub(crate) fn is_visible<I>(&self, item: &I) -> Visible
+ where
+ I: hir::HasVisibility + hir::HasAttrs + hir::HasCrate + Copy,
+ {
+ let vis = item.visibility(self.db);
+ let attrs = item.attrs(self.db);
+ self.is_visible_impl(&vis, &attrs, item.krate(self.db))
+ }
+
+ /// Check if an item is `#[doc(hidden)]`.
+ pub(crate) fn is_item_hidden(&self, item: &hir::ItemInNs) -> bool {
+ let attrs = item.attrs(self.db);
+ let krate = item.krate(self.db);
+ match (attrs, krate) {
+ (Some(attrs), Some(krate)) => self.is_doc_hidden(&attrs, krate),
+ _ => false,
+ }
+ }
+
+ /// Whether the given trait is an operator trait or not.
+ pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool {
+ match trait_.attrs(self.db).lang() {
+ Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()),
+ None => false,
+ }
+ }
+
+ /// Returns the traits in scope, with the [`Drop`] trait removed.
+ pub(crate) fn traits_in_scope(&self) -> hir::VisibleTraits {
+ let mut traits_in_scope = self.scope.visible_traits();
+ if let Some(drop) = self.famous_defs().core_ops_Drop() {
+ traits_in_scope.0.remove(&drop.into());
+ }
+ traits_in_scope
+ }
+
+ pub(crate) fn iterate_path_candidates(
+ &self,
+ ty: &hir::Type,
+ mut cb: impl FnMut(hir::AssocItem),
+ ) {
+ let mut seen = FxHashSet::default();
+ ty.iterate_path_candidates(
+ self.db,
+ &self.scope,
+ &self.traits_in_scope(),
+ Some(self.module),
+ None,
+ |item| {
+ // We might iterate candidates of a trait multiple times here, so deduplicate
+ // them.
+ if seen.insert(item) {
+ cb(item)
+ }
+ None::<()>
+ },
+ );
+ }
+
+ /// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items.
+ pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
+ let _p = profile::span("CompletionContext::process_all_names");
+ self.scope.process_all_names(&mut |name, def| {
+ if self.is_scope_def_hidden(def) {
+ return;
+ }
+
+ f(name, def);
+ });
+ }
+
+ pub(crate) fn process_all_names_raw(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
+ let _p = profile::span("CompletionContext::process_all_names_raw");
+ self.scope.process_all_names(&mut |name, def| f(name, def));
+ }
+
+ fn is_scope_def_hidden(&self, scope_def: ScopeDef) -> bool {
+ if let (Some(attrs), Some(krate)) = (scope_def.attrs(self.db), scope_def.krate(self.db)) {
+ return self.is_doc_hidden(&attrs, krate);
+ }
+
+ false
+ }
+
+ fn is_visible_impl(
+ &self,
+ vis: &hir::Visibility,
+ attrs: &hir::Attrs,
+ defining_crate: hir::Crate,
+ ) -> Visible {
+ if !vis.is_visible_from(self.db, self.module.into()) {
+ if !self.config.enable_private_editable {
+ return Visible::No;
+ }
+ // If the definition location is editable, also show private items
+ let root_file = defining_crate.root_file(self.db);
+ let source_root_id = self.db.file_source_root(root_file);
+ let is_editable = !self.db.source_root(source_root_id).is_library;
+ return if is_editable { Visible::Editable } else { Visible::No };
+ }
+
+ if self.is_doc_hidden(attrs, defining_crate) {
+ Visible::No
+ } else {
+ Visible::Yes
+ }
+ }
+
+ fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool {
+ // `doc(hidden)` items are only completed within the defining crate.
+ self.krate != defining_crate && attrs.has_doc_hidden()
+ }
+}
+
+// CompletionContext construction
+impl<'a> CompletionContext<'a> {
+ pub(super) fn new(
+ db: &'a RootDatabase,
+ position @ FilePosition { file_id, offset }: FilePosition,
+ config: &'a CompletionConfig,
+ ) -> Option<(CompletionContext<'a>, CompletionAnalysis)> {
+ let _p = profile::span("CompletionContext::new");
+ let sema = Semantics::new(db);
+
+ let original_file = sema.parse(file_id);
+
+ // Insert a fake ident to get a valid parse tree. We will use this file
+ // to determine context, though the original_file will be used for
+ // actual completion.
+ let file_with_fake_ident = {
+ let parse = db.parse(file_id);
+ let edit = Indel::insert(offset, COMPLETION_MARKER.to_string());
+ parse.reparse(&edit).tree()
+ };
+ let fake_ident_token =
+ file_with_fake_ident.syntax().token_at_offset(offset).right_biased()?;
+
+ let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
+ let token = sema.descend_into_macros_single(original_token.clone());
+
+ // adjust for macro input, this still fails if there is no token written yet
+ let scope_offset = if original_token == token { offset } else { token.text_range().end() };
+ let scope = sema.scope_at_offset(&token.parent()?, scope_offset)?;
+
+ let krate = scope.krate();
+ let module = scope.module();
+
+ let mut locals = FxHashMap::default();
+ scope.process_all_names(&mut |name, scope| {
+ if let ScopeDef::Local(local) = scope {
+ locals.insert(name, local);
+ }
+ });
+
+ let depth_from_crate_root = iter::successors(module.parent(db), |m| m.parent(db)).count();
+
+ let mut ctx = CompletionContext {
+ sema,
+ scope,
+ db,
+ config,
+ position,
+ original_token,
+ token,
+ krate,
+ module,
+ expected_name: None,
+ expected_type: None,
+ qualifier_ctx: Default::default(),
+ locals,
+ depth_from_crate_root,
+ };
+ let ident_ctx = ctx.expand_and_analyze(
+ original_file.syntax().clone(),
+ file_with_fake_ident.syntax().clone(),
+ offset,
+ fake_ident_token,
+ )?;
+ Some((ctx, ident_ctx))
+ }
+}
+
+const OP_TRAIT_LANG_NAMES: &[&str] = &[
+ "add_assign",
+ "add",
+ "bitand_assign",
+ "bitand",
+ "bitor_assign",
+ "bitor",
+ "bitxor_assign",
+ "bitxor",
+ "deref_mut",
+ "deref",
+ "div_assign",
+ "div",
+ "eq",
+ "fn_mut",
+ "fn_once",
+ "fn",
+ "index_mut",
+ "index",
+ "mul_assign",
+ "mul",
+ "neg",
+ "not",
+ "partial_ord",
+ "rem_assign",
+ "rem",
+ "shl_assign",
+ "shl",
+ "shr_assign",
+ "shr",
+ "sub",
+];
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
new file mode 100644
index 000000000..22ec7cead
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -0,0 +1,1293 @@
+//! Module responsible for analyzing the code surrounding the cursor for completion.
+use std::iter;
+
+use hir::{Semantics, Type, TypeInfo};
+use ide_db::{active_parameter::ActiveParameter, RootDatabase};
+use syntax::{
+ algo::{find_node_at_offset, non_trivia_sibling},
+ ast::{self, AttrKind, HasArgList, HasLoopBody, HasName, NameOrNameRef},
+ match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode,
+ SyntaxToken, TextRange, TextSize, T,
+};
+
+use crate::context::{
+ AttrCtx, CompletionAnalysis, CompletionContext, DotAccess, DotAccessKind, ExprCtx,
+ ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, NameRefContext,
+ NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathKind, PatternContext,
+ PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, TypeLocation,
+ COMPLETION_MARKER,
+};
+
+impl<'a> CompletionContext<'a> {
+ /// Expand attributes and macro calls at the current cursor position for both the original file
+ /// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
+ /// and speculative states stay in sync.
+ pub(super) fn expand_and_analyze(
+ &mut self,
+ mut original_file: SyntaxNode,
+ mut speculative_file: SyntaxNode,
+ mut offset: TextSize,
+ mut fake_ident_token: SyntaxToken,
+ ) -> Option<CompletionAnalysis> {
+ let _p = profile::span("CompletionContext::expand_and_fill");
+ let mut derive_ctx = None;
+
+ 'expansion: loop {
+ let parent_item =
+ |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
+ let ancestor_items = iter::successors(
+ Option::zip(
+ find_node_at_offset::<ast::Item>(&original_file, offset),
+ find_node_at_offset::<ast::Item>(&speculative_file, offset),
+ ),
+ |(a, b)| parent_item(a).zip(parent_item(b)),
+ );
+
+ // first try to expand attributes as these are always the outermost macro calls
+ 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
+ match (
+ self.sema.expand_attr_macro(&actual_item),
+ self.sema.speculative_expand_attr_macro(
+ &actual_item,
+ &item_with_fake_ident,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ // maybe parent items have attributes, so continue walking the ancestors
+ (None, None) => continue 'ancestors,
+ // successful expansions
+ (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
+ let new_offset = fake_mapped_token.text_range().start();
+ if new_offset > actual_expansion.text_range().end() {
+ // offset outside of bounds from the original expansion,
+ // stop here to prevent problems from happening
+ break 'expansion;
+ }
+ original_file = actual_expansion;
+ speculative_file = fake_expansion;
+ fake_ident_token = fake_mapped_token;
+ offset = new_offset;
+ continue 'expansion;
+ }
+ // exactly one expansion failed, inconsistent state so stop expanding completely
+ _ => break 'expansion,
+ }
+ }
+
+ // No attributes have been expanded, so look for macro_call! token trees or derive token trees
+ let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
+ Some(it) => it,
+ None => break 'expansion,
+ };
+ let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
+ Some(it) => it,
+ None => break 'expansion,
+ };
+
+ // Expand pseudo-derive expansion
+ if let (Some(orig_attr), Some(spec_attr)) = (
+ orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
+ spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
+ ) {
+ if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
+ self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
+ self.sema.speculative_expand_derive_as_pseudo_attr_macro(
+ &orig_attr,
+ &spec_attr,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ derive_ctx = Some((
+ actual_expansion,
+ fake_expansion,
+ fake_mapped_token.text_range().start(),
+ orig_attr,
+ ));
+ }
+ // at this point we won't have any more successful expansions, so stop
+ break 'expansion;
+ }
+
+ // Expand fn-like macro calls
+ if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
+ orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
+ spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
+ ) {
+ let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
+ let mac_call_path1 =
+ macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
+
+ // inconsistent state, stop expanding
+ if mac_call_path0 != mac_call_path1 {
+ break 'expansion;
+ }
+ let speculative_args = match macro_call_with_fake_ident.token_tree() {
+ Some(tt) => tt,
+ None => break 'expansion,
+ };
+
+ match (
+ self.sema.expand(&actual_macro_call),
+ self.sema.speculative_expand(
+ &actual_macro_call,
+ &speculative_args,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ // successful expansions
+ (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
+ let new_offset = fake_mapped_token.text_range().start();
+ if new_offset > actual_expansion.text_range().end() {
+ // offset outside of bounds from the original expansion,
+ // stop here to prevent problems from happening
+ break 'expansion;
+ }
+ original_file = actual_expansion;
+ speculative_file = fake_expansion;
+ fake_ident_token = fake_mapped_token;
+ offset = new_offset;
+ continue 'expansion;
+ }
+ // at least on expansion failed, we won't have anything to expand from this point
+ // onwards so break out
+ _ => break 'expansion,
+ }
+ }
+
+ // none of our states have changed so stop the loop
+ break 'expansion;
+ }
+
+ self.analyze(&original_file, speculative_file, offset, derive_ctx)
+ }
+
+ /// Calculate the expected type and name of the cursor position.
+ fn expected_type_and_name(
+ &self,
+ name_like: &ast::NameLike,
+ ) -> (Option<Type>, Option<NameOrNameRef>) {
+ let mut node = match self.token.parent() {
+ Some(it) => it,
+ None => return (None, None),
+ };
+
+ let strip_refs = |mut ty: Type| match name_like {
+ ast::NameLike::NameRef(n) => {
+ let p = match n.syntax().parent() {
+ Some(it) => it,
+ None => return ty,
+ };
+ let top_syn = match_ast! {
+ match p {
+ ast::FieldExpr(e) => e
+ .syntax()
+ .ancestors()
+ .map_while(ast::FieldExpr::cast)
+ .last()
+ .map(|it| it.syntax().clone()),
+ ast::PathSegment(e) => e
+ .syntax()
+ .ancestors()
+ .skip(1)
+ .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
+ .find_map(ast::PathExpr::cast)
+ .map(|it| it.syntax().clone()),
+ _ => None
+ }
+ };
+ let top_syn = match top_syn {
+ Some(it) => it,
+ None => return ty,
+ };
+ for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) {
+ cov_mark::hit!(expected_type_fn_param_ref);
+ ty = ty.strip_reference();
+ }
+ ty
+ }
+ _ => ty,
+ };
+
+ loop {
+ break match_ast! {
+ match node {
+ ast::LetStmt(it) => {
+ cov_mark::hit!(expected_type_let_with_leading_char);
+ cov_mark::hit!(expected_type_let_without_leading_char);
+ let ty = it.pat()
+ .and_then(|pat| self.sema.type_of_pat(&pat))
+ .or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it)))
+ .map(TypeInfo::original);
+ let name = match it.pat() {
+ Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
+ Some(_) | None => None,
+ };
+
+ (ty, name)
+ },
+ ast::LetExpr(it) => {
+ cov_mark::hit!(expected_type_if_let_without_leading_char);
+ let ty = it.pat()
+ .and_then(|pat| self.sema.type_of_pat(&pat))
+ .or_else(|| it.expr().and_then(|it| self.sema.type_of_expr(&it)))
+ .map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::ArgList(_) => {
+ cov_mark::hit!(expected_type_fn_param);
+ ActiveParameter::at_token(
+ &self.sema,
+ self.token.clone(),
+ ).map(|ap| {
+ let name = ap.ident().map(NameOrNameRef::Name);
+
+ let ty = strip_refs(ap.ty);
+ (Some(ty), name)
+ })
+ .unwrap_or((None, None))
+ },
+ ast::RecordExprFieldList(it) => {
+ // wouldn't try {} be nice...
+ (|| {
+ if self.token.kind() == T![..]
+ || self.token.prev_token().map(|t| t.kind()) == Some(T![..])
+ {
+ cov_mark::hit!(expected_type_struct_func_update);
+ let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
+ let ty = self.sema.type_of_expr(&record_expr.into())?;
+ Some((
+ Some(ty.original),
+ None
+ ))
+ } else {
+ cov_mark::hit!(expected_type_struct_field_without_leading_char);
+ let expr_field = self.token.prev_sibling_or_token()?
+ .into_node()
+ .and_then(ast::RecordExprField::cast)?;
+ let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?;
+ Some((
+ Some(ty),
+ expr_field.field_name().map(NameOrNameRef::NameRef),
+ ))
+ }
+ })().unwrap_or((None, None))
+ },
+ ast::RecordExprField(it) => {
+ if let Some(expr) = it.expr() {
+ cov_mark::hit!(expected_type_struct_field_with_leading_char);
+ (
+ self.sema.type_of_expr(&expr).map(TypeInfo::original),
+ it.field_name().map(NameOrNameRef::NameRef),
+ )
+ } else {
+ cov_mark::hit!(expected_type_struct_field_followed_by_comma);
+ let ty = self.sema.resolve_record_field(&it)
+ .map(|(_, _, ty)| ty);
+ (
+ ty,
+ it.field_name().map(NameOrNameRef::NameRef),
+ )
+ }
+ },
+ // match foo { $0 }
+ // match foo { ..., pat => $0 }
+ ast::MatchExpr(it) => {
+ let on_arrow = previous_non_trivia_token(self.token.clone()).map_or(false, |it| T![=>] == it.kind());
+
+ let ty = if on_arrow {
+ // match foo { ..., pat => $0 }
+ cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
+ cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
+ self.sema.type_of_expr(&it.into())
+ } else {
+ // match foo { $0 }
+ cov_mark::hit!(expected_type_match_arm_without_leading_char);
+ it.expr().and_then(|e| self.sema.type_of_expr(&e))
+ }.map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::IfExpr(it) => {
+ let ty = it.condition()
+ .and_then(|e| self.sema.type_of_expr(&e))
+ .map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::IdentPat(it) => {
+ cov_mark::hit!(expected_type_if_let_with_leading_char);
+ cov_mark::hit!(expected_type_match_arm_with_leading_char);
+ let ty = self.sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::Fn(it) => {
+ cov_mark::hit!(expected_type_fn_ret_with_leading_char);
+ cov_mark::hit!(expected_type_fn_ret_without_leading_char);
+ let def = self.sema.to_def(&it);
+ (def.map(|def| def.ret_type(self.db)), None)
+ },
+ ast::ClosureExpr(it) => {
+ let ty = self.sema.type_of_expr(&it.into());
+ ty.and_then(|ty| ty.original.as_callable(self.db))
+ .map(|c| (Some(c.return_type()), None))
+ .unwrap_or((None, None))
+ },
+ ast::ParamList(_) => (None, None),
+ ast::Stmt(_) => (None, None),
+ ast::Item(_) => (None, None),
+ _ => {
+ match node.parent() {
+ Some(n) => {
+ node = n;
+ continue;
+ },
+ None => (None, None),
+ }
+ },
+ }
+ };
+ }
+ }
+
+ /// Fill the completion context, this is what does semantic reasoning about the surrounding context
+ /// of the completion location.
+ fn analyze(
+ &mut self,
+ original_file: &SyntaxNode,
+ file_with_fake_ident: SyntaxNode,
+ offset: TextSize,
+ derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
+ ) -> Option<CompletionAnalysis> {
+ let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased()?;
+ let syntax_element = NodeOrToken::Token(fake_ident_token);
+ if is_in_token_of_for_loop(syntax_element.clone()) {
+ // for pat $0
+ // there is nothing to complete here except `in` keyword
+ // don't bother populating the context
+ // FIXME: the completion calculations should end up good enough
+ // such that this special case becomes unnecessary
+ return None;
+ }
+
+ // Overwrite the path kind for derives
+ if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
+ if let Some(ast::NameLike::NameRef(name_ref)) =
+ find_node_at_offset(&file_with_fake_ident, offset)
+ {
+ let parent = name_ref.syntax().parent()?;
+ let (mut nameref_ctx, _) =
+ Self::classify_name_ref(&self.sema, &original_file, name_ref, parent)?;
+ if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
+ path_ctx.kind = PathKind::Derive {
+ existing_derives: self
+ .sema
+ .resolve_derive_macro(&origin_attr)
+ .into_iter()
+ .flatten()
+ .flatten()
+ .collect(),
+ };
+ }
+ return Some(CompletionAnalysis::NameRef(nameref_ctx));
+ }
+ return None;
+ }
+
+ let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
+ Some(it) => it,
+ None => {
+ let analysis =
+ if let Some(original) = ast::String::cast(self.original_token.clone()) {
+ CompletionAnalysis::String {
+ original,
+ expanded: ast::String::cast(self.token.clone()),
+ }
+ } else {
+ // Fix up trailing whitespace problem
+ // #[attr(foo = $0
+ let token =
+ syntax::algo::skip_trivia_token(self.token.clone(), Direction::Prev)?;
+ let p = token.parent()?;
+ if p.kind() == SyntaxKind::TOKEN_TREE
+ && p.ancestors().any(|it| it.kind() == SyntaxKind::META)
+ {
+ let colon_prefix = previous_non_trivia_token(self.token.clone())
+ .map_or(false, |it| T![:] == it.kind());
+ CompletionAnalysis::UnexpandedAttrTT {
+ fake_attribute_under_caret: syntax_element
+ .ancestors()
+ .find_map(ast::Attr::cast),
+ colon_prefix,
+ }
+ } else {
+ return None;
+ }
+ };
+ return Some(analysis);
+ }
+ };
+ (self.expected_type, self.expected_name) = self.expected_type_and_name(&name_like);
+ let analysis = match name_like {
+ ast::NameLike::Lifetime(lifetime) => CompletionAnalysis::Lifetime(
+ Self::classify_lifetime(&self.sema, original_file, lifetime)?,
+ ),
+ ast::NameLike::NameRef(name_ref) => {
+ let parent = name_ref.syntax().parent()?;
+ let (nameref_ctx, qualifier_ctx) =
+ Self::classify_name_ref(&self.sema, &original_file, name_ref, parent.clone())?;
+
+ self.qualifier_ctx = qualifier_ctx;
+ CompletionAnalysis::NameRef(nameref_ctx)
+ }
+ ast::NameLike::Name(name) => {
+ let name_ctx = Self::classify_name(&self.sema, original_file, name)?;
+ CompletionAnalysis::Name(name_ctx)
+ }
+ };
+ Some(analysis)
+ }
+
+ fn classify_lifetime(
+ _sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ lifetime: ast::Lifetime,
+ ) -> Option<LifetimeContext> {
+ let parent = lifetime.syntax().parent()?;
+ if parent.kind() == SyntaxKind::ERROR {
+ return None;
+ }
+
+ let kind = match_ast! {
+ match parent {
+ ast::LifetimeParam(param) => LifetimeKind::LifetimeParam {
+ is_decl: param.lifetime().as_ref() == Some(&lifetime),
+ param
+ },
+ ast::BreakExpr(_) => LifetimeKind::LabelRef,
+ ast::ContinueExpr(_) => LifetimeKind::LabelRef,
+ ast::Label(_) => LifetimeKind::LabelDef,
+ _ => LifetimeKind::Lifetime,
+ }
+ };
+ let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start());
+
+ Some(LifetimeContext { lifetime, kind })
+ }
+
+ fn classify_name(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ name: ast::Name,
+ ) -> Option<NameContext> {
+ let parent = name.syntax().parent()?;
+ let kind = match_ast! {
+ match parent {
+ ast::Const(_) => NameKind::Const,
+ ast::ConstParam(_) => NameKind::ConstParam,
+ ast::Enum(_) => NameKind::Enum,
+ ast::Fn(_) => NameKind::Function,
+ ast::IdentPat(bind_pat) => {
+ let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
+ if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
+ pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
+ }
+
+ NameKind::IdentPat(pat_ctx)
+ },
+ ast::MacroDef(_) => NameKind::MacroDef,
+ ast::MacroRules(_) => NameKind::MacroRules,
+ ast::Module(module) => NameKind::Module(module),
+ ast::RecordField(_) => NameKind::RecordField,
+ ast::Rename(_) => NameKind::Rename,
+ ast::SelfParam(_) => NameKind::SelfParam,
+ ast::Static(_) => NameKind::Static,
+ ast::Struct(_) => NameKind::Struct,
+ ast::Trait(_) => NameKind::Trait,
+ ast::TypeAlias(_) => NameKind::TypeAlias,
+ ast::TypeParam(_) => NameKind::TypeParam,
+ ast::Union(_) => NameKind::Union,
+ ast::Variant(_) => NameKind::Variant,
+ _ => return None,
+ }
+ };
+ let name = find_node_at_offset(&original_file, name.syntax().text_range().start());
+ Some(NameContext { name, kind })
+ }
+
+ fn classify_name_ref(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ name_ref: ast::NameRef,
+ parent: SyntaxNode,
+ ) -> Option<(NameRefContext, QualifierCtx)> {
+ let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
+
+ let make_res =
+ |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
+
+ if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
+ let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
+ .map_or(false, |it| T![.] == it.kind());
+
+ return find_node_in_file_compensated(
+ sema,
+ original_file,
+ &record_field.parent_record_lit(),
+ )
+ .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
+ .map(make_res);
+ }
+ if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
+ let kind = NameRefKind::Pattern(PatternContext {
+ param_ctx: None,
+ has_type_ascription: false,
+ ref_token: None,
+ mut_token: None,
+ record_pat: find_node_in_file_compensated(
+ sema,
+ original_file,
+ &record_field.parent_record_pat(),
+ ),
+ ..pattern_context_for(
+ sema,
+ original_file,
+ record_field.parent_record_pat().clone().into(),
+ )
+ });
+ return Some(make_res(kind));
+ }
+
+ let segment = match_ast! {
+ match parent {
+ ast::PathSegment(segment) => segment,
+ ast::FieldExpr(field) => {
+ let receiver = find_opt_node_in_file(original_file, field.expr());
+ let receiver_is_ambiguous_float_literal = match &receiver {
+ Some(ast::Expr::Literal(l)) => matches! {
+ l.kind(),
+ ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.text().ends_with('.'))
+ },
+ _ => false,
+ };
+ let kind = NameRefKind::DotAccess(DotAccess {
+ receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
+ kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
+ receiver
+ });
+ return Some(make_res(kind));
+ },
+ ast::MethodCallExpr(method) => {
+ let receiver = find_opt_node_in_file(original_file, method.receiver());
+ let kind = NameRefKind::DotAccess(DotAccess {
+ receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
+ kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) },
+ receiver
+ });
+ return Some(make_res(kind));
+ },
+ _ => return None,
+ }
+ };
+
+ let path = segment.parent_path();
+ let mut path_ctx = PathCompletionCtx {
+ has_call_parens: false,
+ has_macro_bang: false,
+ qualified: Qualified::No,
+ parent: None,
+ path: path.clone(),
+ kind: PathKind::Item { kind: ItemListKind::SourceFile },
+ has_type_args: false,
+ use_tree_parent: false,
+ };
+
+ let is_in_block = |it: &SyntaxNode| {
+ it.parent()
+ .map(|node| {
+ ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())
+ })
+ .unwrap_or(false)
+ };
+ let func_update_record = |syn: &SyntaxNode| {
+ if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
+ find_node_in_file_compensated(sema, original_file, &record_expr)
+ } else {
+ None
+ }
+ };
+ let after_if_expr = |node: SyntaxNode| {
+ let prev_expr = (|| {
+ let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
+ ast::ExprStmt::cast(prev_sibling)?.expr()
+ })();
+ matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
+ };
+
+ // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
+ // ex. trait Foo $0 {}
+ // in these cases parser recovery usually kicks in for our inserted identifier, causing it
+ // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
+ // expression or an item list.
+ // The following code checks if the body is missing, if it is we either cut off the body
+ // from the item or it was missing in the first place
+ let inbetween_body_and_decl_check = |node: SyntaxNode| {
+ if let Some(NodeOrToken::Node(n)) =
+ syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
+ {
+ if let Some(item) = ast::Item::cast(n) {
+ let is_inbetween = match &item {
+ ast::Item::Const(it) => it.body().is_none(),
+ ast::Item::Enum(it) => it.variant_list().is_none(),
+ ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
+ ast::Item::Fn(it) => it.body().is_none(),
+ ast::Item::Impl(it) => it.assoc_item_list().is_none(),
+ ast::Item::Module(it) => it.item_list().is_none(),
+ ast::Item::Static(it) => it.body().is_none(),
+ ast::Item::Struct(it) => it.field_list().is_none(),
+ ast::Item::Trait(it) => it.assoc_item_list().is_none(),
+ ast::Item::TypeAlias(it) => it.ty().is_none(),
+ ast::Item::Union(it) => it.record_field_list().is_none(),
+ _ => false,
+ };
+ if is_inbetween {
+ return Some(item);
+ }
+ }
+ }
+ None
+ };
+
+ let type_location = |node: &SyntaxNode| {
+ let parent = node.parent()?;
+ let res = match_ast! {
+ match parent {
+ ast::Const(it) => {
+ let name = find_opt_node_in_file(original_file, it.name())?;
+ let original = ast::Const::cast(name.syntax().parent()?)?;
+ TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
+ },
+ ast::RetType(it) => {
+ if it.thin_arrow_token().is_none() {
+ return None;
+ }
+ let parent = match ast::Fn::cast(parent.parent()?) {
+ Some(x) => x.param_list(),
+ None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
+ };
+
+ let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
+ TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
+ match parent {
+ ast::ClosureExpr(it) => {
+ it.body()
+ },
+ ast::Fn(it) => {
+ it.body().map(ast::Expr::BlockExpr)
+ },
+ _ => return None,
+ }
+ }))
+ },
+ ast::Param(it) => {
+ if it.colon_token().is_none() {
+ return None;
+ }
+ TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
+ },
+ ast::LetStmt(it) => {
+ if it.colon_token().is_none() {
+ return None;
+ }
+ TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
+ },
+ ast::Impl(it) => {
+ match it.trait_() {
+ Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
+ _ => match it.self_ty() {
+ Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
+ _ => return None,
+ },
+ }
+ },
+ ast::TypeBound(_) => TypeLocation::TypeBound,
+ // is this case needed?
+ ast::TypeBoundList(_) => TypeLocation::TypeBound,
+ ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
+ // is this case needed?
+ ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))),
+ ast::TupleField(_) => TypeLocation::TupleField,
+ _ => return None,
+ }
+ };
+ Some(res)
+ };
+
+ let is_in_condition = |it: &ast::Expr| {
+ (|| {
+ let parent = it.syntax().parent()?;
+ if let Some(expr) = ast::WhileExpr::cast(parent.clone()) {
+ Some(expr.condition()? == *it)
+ } else if let Some(expr) = ast::IfExpr::cast(parent) {
+ Some(expr.condition()? == *it)
+ } else {
+ None
+ }
+ })()
+ .unwrap_or(false)
+ };
+
+ let make_path_kind_expr = |expr: ast::Expr| {
+ let it = expr.syntax();
+ let in_block_expr = is_in_block(it);
+ let in_loop_body = is_in_loop_body(it);
+ let after_if_expr = after_if_expr(it.clone());
+ let ref_expr_parent =
+ path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
+ let (innermost_ret_ty, self_param) = {
+ let find_ret_ty = |it: SyntaxNode| {
+ if let Some(item) = ast::Item::cast(it.clone()) {
+ match item {
+ ast::Item::Fn(f) => {
+ Some(sema.to_def(&f).map(|it| it.ret_type(sema.db)))
+ }
+ ast::Item::MacroCall(_) => None,
+ _ => Some(None),
+ }
+ } else {
+ let expr = ast::Expr::cast(it)?;
+ let callable = match expr {
+ // FIXME
+ // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
+ ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
+ _ => return None,
+ };
+ Some(
+ callable
+ .and_then(|c| c.adjusted().as_callable(sema.db))
+ .map(|it| it.return_type()),
+ )
+ }
+ };
+ let find_fn_self_param = |it| match it {
+ ast::Item::Fn(fn_) => {
+ Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db)))
+ }
+ ast::Item::MacroCall(_) => None,
+ _ => Some(None),
+ };
+
+ match find_node_in_file_compensated(sema, original_file, &expr) {
+ Some(it) => {
+ let innermost_ret_ty = sema
+ .ancestors_with_macros(it.syntax().clone())
+ .find_map(find_ret_ty)
+ .flatten();
+
+ let self_param = sema
+ .ancestors_with_macros(it.syntax().clone())
+ .filter_map(ast::Item::cast)
+ .find_map(find_fn_self_param)
+ .flatten();
+ (innermost_ret_ty, self_param)
+ }
+ None => (None, None),
+ }
+ };
+ let is_func_update = func_update_record(it);
+ let in_condition = is_in_condition(&expr);
+ let incomplete_let = it
+ .parent()
+ .and_then(ast::LetStmt::cast)
+ .map_or(false, |it| it.semicolon_token().is_none());
+ let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
+
+ let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
+ Some(arm) => arm
+ .fat_arrow_token()
+ .map_or(true, |arrow| it.text_range().start() < arrow.text_range().start()),
+ None => false,
+ };
+
+ PathKind::Expr {
+ expr_ctx: ExprCtx {
+ in_block_expr,
+ in_loop_body,
+ after_if_expr,
+ in_condition,
+ ref_expr_parent,
+ is_func_update,
+ innermost_ret_ty,
+ self_param,
+ incomplete_let,
+ impl_,
+ in_match_guard,
+ },
+ }
+ };
+ let make_path_kind_type = |ty: ast::Type| {
+ let location = type_location(ty.syntax());
+ PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
+ };
+
+ let mut kind_macro_call = |it: ast::MacroCall| {
+ path_ctx.has_macro_bang = it.excl_token().is_some();
+ let parent = it.syntax().parent()?;
+ // Any path in an item list will be treated as a macro call by the parser
+ let kind = match_ast! {
+ match parent {
+ ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
+ ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
+ ast::MacroType(ty) => make_path_kind_type(ty.into()),
+ ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
+ ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
+ Some(it) => match_ast! {
+ match it {
+ ast::Trait(_) => ItemListKind::Trait,
+ ast::Impl(it) => if it.trait_().is_some() {
+ ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
+ } else {
+ ItemListKind::Impl
+ },
+ _ => return None
+ }
+ },
+ None => return None,
+ } },
+ ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
+ ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
+ _ => return None,
+ }
+ };
+ Some(kind)
+ };
+ let make_path_kind_attr = |meta: ast::Meta| {
+ let attr = meta.parent_attr()?;
+ let kind = attr.kind();
+ let attached = attr.syntax().parent()?;
+ let is_trailing_outer_attr = kind != AttrKind::Inner
+ && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next)
+ .is_none();
+ let annotated_item_kind =
+ if is_trailing_outer_attr { None } else { Some(attached.kind()) };
+ Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
+ };
+
+ // Infer the path kind
+ let parent = path.syntax().parent()?;
+ let kind = match_ast! {
+ match parent {
+ ast::PathType(it) => make_path_kind_type(it.into()),
+ ast::PathExpr(it) => {
+ if let Some(p) = it.syntax().parent() {
+ if ast::ExprStmt::can_cast(p.kind()) {
+ if let Some(kind) = inbetween_body_and_decl_check(p) {
+ return Some(make_res(NameRefKind::Keyword(kind)));
+ }
+ }
+ }
+
+ path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
+
+ make_path_kind_expr(it.into())
+ },
+ ast::TupleStructPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::RecordPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::PathPat(it) => {
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
+ },
+ ast::MacroCall(it) => {
+ // A macro call in this position is usually a result of parsing recovery, so check that
+ if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
+ return Some(make_res(NameRefKind::Keyword(kind)));
+ }
+
+ kind_macro_call(it)?
+ },
+ ast::Meta(meta) => make_path_kind_attr(meta)?,
+ ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
+ ast::UseTree(_) => PathKind::Use,
+ // completing inside a qualifier
+ ast::Path(parent) => {
+ path_ctx.parent = Some(parent.clone());
+ let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::PathType(it) => make_path_kind_type(it.into()),
+ ast::PathExpr(it) => {
+ path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
+
+ make_path_kind_expr(it.into())
+ },
+ ast::TupleStructPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::RecordPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::PathPat(it) => {
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
+ },
+ ast::MacroCall(it) => {
+ kind_macro_call(it)?
+ },
+ ast::Meta(meta) => make_path_kind_attr(meta)?,
+ ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
+ ast::UseTree(_) => PathKind::Use,
+ ast::RecordExpr(it) => make_path_kind_expr(it.into()),
+ _ => return None,
+ }
+ }
+ },
+ ast::RecordExpr(it) => make_path_kind_expr(it.into()),
+ _ => return None,
+ }
+ };
+
+ path_ctx.kind = kind;
+ path_ctx.has_type_args = segment.generic_arg_list().is_some();
+
+ // calculate the qualifier context
+ if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
+ path_ctx.use_tree_parent = use_tree_parent;
+ if !use_tree_parent && segment.coloncolon_token().is_some() {
+ path_ctx.qualified = Qualified::Absolute;
+ } else {
+ let qualifier = qualifier
+ .segment()
+ .and_then(|it| find_node_in_file(original_file, &it))
+ .map(|it| it.parent_path());
+ if let Some(qualifier) = qualifier {
+ let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
+ Some(ast::PathSegmentKind::Type {
+ type_ref: Some(type_ref),
+ trait_ref,
+ }) if qualifier.qualifier().is_none() => Some((type_ref, trait_ref)),
+ _ => None,
+ };
+
+ path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
+ let ty = match ty {
+ ast::Type::InferType(_) => None,
+ ty => sema.resolve_type(&ty),
+ };
+ let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
+ Qualified::TypeAnchor { ty, trait_ }
+ } else {
+ let res = sema.resolve_path(&qualifier);
+
+ // For understanding how and why super_chain_len is calculated the way it
+ // is check the documentation at it's definition
+ let mut segment_count = 0;
+ let super_count =
+ iter::successors(Some(qualifier.clone()), |p| p.qualifier())
+ .take_while(|p| {
+ p.segment()
+ .and_then(|s| {
+ segment_count += 1;
+ s.super_token()
+ })
+ .is_some()
+ })
+ .count();
+
+ let super_chain_len =
+ if segment_count > super_count { None } else { Some(super_count) };
+
+ Qualified::With { path: qualifier, resolution: res, super_chain_len }
+ }
+ };
+ }
+ } else if let Some(segment) = path.segment() {
+ if segment.coloncolon_token().is_some() {
+ path_ctx.qualified = Qualified::Absolute;
+ }
+ }
+
+ let mut qualifier_ctx = QualifierCtx::default();
+ if path_ctx.is_trivial_path() {
+ // fetch the full expression that may have qualifiers attached to it
+ let top_node = match path_ctx.kind {
+ PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => {
+ parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
+ let parent = p.parent()?;
+ if ast::StmtList::can_cast(parent.kind()) {
+ Some(p)
+ } else if ast::ExprStmt::can_cast(parent.kind()) {
+ Some(parent)
+ } else {
+ None
+ }
+ })
+ }
+ PathKind::Item { .. } => {
+ parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind()))
+ }
+ _ => None,
+ };
+ if let Some(top) = top_node {
+ if let Some(NodeOrToken::Node(error_node)) =
+ syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
+ {
+ if error_node.kind() == SyntaxKind::ERROR {
+ qualifier_ctx.unsafe_tok = error_node
+ .children_with_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .find(|it| it.kind() == T![unsafe]);
+ qualifier_ctx.vis_node =
+ error_node.children().find_map(ast::Visibility::cast);
+ }
+ }
+
+ if let PathKind::Item { .. } = path_ctx.kind {
+ if qualifier_ctx.none() {
+ if let Some(t) = top.first_token() {
+ if let Some(prev) = t
+ .prev_token()
+ .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
+ {
+ if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) {
+ // This was inferred to be an item position path, but it seems
+ // to be part of some other broken node which leaked into an item
+ // list
+ return None;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
+ }
+}
+
+fn pattern_context_for(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ pat: ast::Pat,
+) -> PatternContext {
+ let mut param_ctx = None;
+ let (refutability, has_type_ascription) =
+ pat
+ .syntax()
+ .ancestors()
+ .skip_while(|it| ast::Pat::can_cast(it.kind()))
+ .next()
+ .map_or((PatternRefutability::Irrefutable, false), |node| {
+ let refutability = match_ast! {
+ match node {
+ ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()),
+ ast::Param(param) => {
+ let has_type_ascription = param.ty().is_some();
+ param_ctx = (|| {
+ let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
+ let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
+ let param_list_owner = param_list.syntax().parent()?;
+ let kind = match_ast! {
+ match param_list_owner {
+ ast::ClosureExpr(closure) => ParamKind::Closure(closure),
+ ast::Fn(fn_) => ParamKind::Function(fn_),
+ _ => return None,
+ }
+ };
+ Some(ParamContext {
+ param_list, param, kind
+ })
+ })();
+ return (PatternRefutability::Irrefutable, has_type_ascription)
+ },
+ ast::MatchArm(_) => PatternRefutability::Refutable,
+ ast::LetExpr(_) => PatternRefutability::Refutable,
+ ast::ForExpr(_) => PatternRefutability::Irrefutable,
+ _ => PatternRefutability::Irrefutable,
+ }
+ };
+ (refutability, false)
+ });
+ let (ref_token, mut_token) = match &pat {
+ ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
+ _ => (None, None),
+ };
+
+ PatternContext {
+ refutability,
+ param_ctx,
+ has_type_ascription,
+ parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
+ mut_token,
+ ref_token,
+ record_pat: None,
+ impl_: fetch_immediate_impl(sema, original_file, pat.syntax()),
+ }
+}
+
+fn fetch_immediate_impl(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ node: &SyntaxNode,
+) -> Option<ast::Impl> {
+ let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
+ .filter_map(ast::Item::cast)
+ .filter(|it| !matches!(it, ast::Item::MacroCall(_)));
+
+ match ancestors.next()? {
+ ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
+ ast::Item::Impl(it) => return Some(it),
+ _ => return None,
+ }
+ match ancestors.next()? {
+ ast::Item::Impl(it) => Some(it),
+ _ => None,
+ }
+}
+
+/// Attempts to find `node` inside `syntax` via `node`'s text range.
+/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
+fn find_opt_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: Option<N>) -> Option<N> {
+ find_node_in_file(syntax, &node?)
+}
+
+/// Attempts to find `node` inside `syntax` via `node`'s text range.
+/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
+fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
+ let syntax_range = syntax.text_range();
+ let range = node.syntax().text_range();
+ let intersection = range.intersect(syntax_range)?;
+ syntax.covering_element(intersection).ancestors().find_map(N::cast)
+}
+
+/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
+/// for the offset introduced by the fake ident.
+/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
+fn find_node_in_file_compensated<N: AstNode>(
+ sema: &Semantics<'_, RootDatabase>,
+ in_file: &SyntaxNode,
+ node: &N,
+) -> Option<N> {
+ ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
+}
+
+fn ancestors_in_file_compensated<'sema>(
+ sema: &'sema Semantics<'_, RootDatabase>,
+ in_file: &SyntaxNode,
+ node: &SyntaxNode,
+) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
+ let syntax_range = in_file.text_range();
+ let range = node.text_range();
+ let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
+ if end < range.start() {
+ return None;
+ }
+ let range = TextRange::new(range.start(), end);
+ // our inserted ident could cause `range` to go outside of the original syntax, so cap it
+ let intersection = range.intersect(syntax_range)?;
+ let node = match in_file.covering_element(intersection) {
+ NodeOrToken::Node(node) => node,
+ NodeOrToken::Token(tok) => tok.parent()?,
+ };
+ Some(sema.ancestors_with_macros(node))
+}
+
+/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
+/// for the offset introduced by the fake ident..
+/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
+fn find_opt_node_in_file_compensated<N: AstNode>(
+ sema: &Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ node: Option<N>,
+) -> Option<N> {
+ find_node_in_file_compensated(sema, syntax, &node?)
+}
+
+fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
+ if let Some(qual) = path.qualifier() {
+ return Some((qual, false));
+ }
+ let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
+ let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?;
+ Some((use_tree.path()?, true))
+}
+
+pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool {
+ // oh my ...
+ (|| {
+ let syntax_token = element.into_token()?;
+ let range = syntax_token.text_range();
+ let for_expr = syntax_token.parent_ancestors().find_map(ast::ForExpr::cast)?;
+
+ // check if the current token is the `in` token of a for loop
+ if let Some(token) = for_expr.in_token() {
+ return Some(syntax_token == token);
+ }
+ let pat = for_expr.pat()?;
+ if range.end() < pat.syntax().text_range().end() {
+ // if we are inside or before the pattern we can't be at the `in` token position
+ return None;
+ }
+ let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
+ Some(match next_sibl {
+ // the loop body is some node, if our token is at the start we are at the `in` position,
+ // otherwise we could be in a recovered expression, we don't wanna ruin completions there
+ syntax::NodeOrToken::Node(n) => n.text_range().start() == range.start(),
+ // the loop body consists of a single token, if we are this we are certainly at the `in` token position
+ syntax::NodeOrToken::Token(t) => t == syntax_token,
+ })
+ })()
+ .unwrap_or(false)
+}
+
+#[test]
+fn test_for_is_prev2() {
+ crate::tests::check_pattern_is_applicable(r"fn __() { for i i$0 }", is_in_token_of_for_loop);
+}
+
+pub(crate) fn is_in_loop_body(node: &SyntaxNode) -> bool {
+ node.ancestors()
+ .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
+ .find_map(|it| {
+ let loop_body = match_ast! {
+ match it {
+ ast::ForExpr(it) => it.loop_body(),
+ ast::WhileExpr(it) => it.loop_body(),
+ ast::LoopExpr(it) => it.loop_body(),
+ _ => None,
+ }
+ };
+ loop_body.filter(|it| it.syntax().text_range().contains_range(node.text_range()))
+ })
+ .is_some()
+}
+
+fn previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
+ let mut token = match e.into() {
+ SyntaxElement::Node(n) => n.first_token()?,
+ SyntaxElement::Token(t) => t,
+ }
+ .prev_token();
+ while let Some(inner) = token {
+ if !inner.kind().is_trivia() {
+ return Some(inner);
+ } else {
+ token = inner.prev_token();
+ }
+ }
+ None
+}
+
+fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
+ let mut e = ele.next_sibling_or_token();
+ while let Some(inner) = e {
+ if !inner.kind().is_trivia() {
+ return Some(inner);
+ } else {
+ e = inner.next_sibling_or_token();
+ }
+ }
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs
new file mode 100644
index 000000000..50845b388
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs
@@ -0,0 +1,413 @@
+use expect_test::{expect, Expect};
+use hir::HirDisplay;
+
+use crate::{
+ context::CompletionContext,
+ tests::{position, TEST_CONFIG},
+};
+
+fn check_expected_type_and_name(ra_fixture: &str, expect: Expect) {
+ let (db, pos) = position(ra_fixture);
+ let config = TEST_CONFIG;
+ let (completion_context, _analysis) = CompletionContext::new(&db, pos, &config).unwrap();
+
+ let ty = completion_context
+ .expected_type
+ .map(|t| t.display_test(&db).to_string())
+ .unwrap_or("?".to_owned());
+
+ let name =
+ completion_context.expected_name.map_or_else(|| "?".to_owned(), |name| name.to_string());
+
+ expect.assert_eq(&format!("ty: {}, name: {}", ty, name));
+}
+
+#[test]
+fn expected_type_let_without_leading_char() {
+ cov_mark::check!(expected_type_let_without_leading_char);
+ check_expected_type_and_name(
+ r#"
+fn foo() {
+ let x: u32 = $0;
+}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+}
+
+#[test]
+fn expected_type_let_with_leading_char() {
+ cov_mark::check!(expected_type_let_with_leading_char);
+ check_expected_type_and_name(
+ r#"
+fn foo() {
+ let x: u32 = c$0;
+}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+}
+
+#[test]
+fn expected_type_let_pat() {
+ check_expected_type_and_name(
+ r#"
+fn foo() {
+ let x$0 = 0u32;
+}
+"#,
+ expect![[r#"ty: u32, name: ?"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+fn foo() {
+ let $0 = 0u32;
+}
+"#,
+ expect![[r#"ty: u32, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_fn_param() {
+ cov_mark::check!(expected_type_fn_param);
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar($0); }
+fn bar(x: u32) {}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar(c$0); }
+fn bar(x: u32) {}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+}
+
+#[test]
+fn expected_type_fn_param_ref() {
+ cov_mark::check!(expected_type_fn_param_ref);
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar(&$0); }
+fn bar(x: &u32) {}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar(&mut $0); }
+fn bar(x: &mut u32) {}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar(& c$0); }
+fn bar(x: &u32) {}
+ "#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar(&mut c$0); }
+fn bar(x: &mut u32) {}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar(&c$0); }
+fn bar(x: &u32) {}
+ "#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+}
+
+#[test]
+fn expected_type_struct_field_without_leading_char() {
+ cov_mark::check!(expected_type_struct_field_without_leading_char);
+ check_expected_type_and_name(
+ r#"
+struct Foo { a: u32 }
+fn foo() {
+ Foo { a: $0 };
+}
+"#,
+ expect![[r#"ty: u32, name: a"#]],
+ )
+}
+
+#[test]
+fn expected_type_struct_field_followed_by_comma() {
+ cov_mark::check!(expected_type_struct_field_followed_by_comma);
+ check_expected_type_and_name(
+ r#"
+struct Foo { a: u32 }
+fn foo() {
+ Foo { a: $0, };
+}
+"#,
+ expect![[r#"ty: u32, name: a"#]],
+ )
+}
+
+#[test]
+fn expected_type_generic_struct_field() {
+ check_expected_type_and_name(
+ r#"
+struct Foo<T> { a: T }
+fn foo() -> Foo<u32> {
+ Foo { a: $0 }
+}
+"#,
+ expect![[r#"ty: u32, name: a"#]],
+ )
+}
+
+#[test]
+fn expected_type_struct_field_with_leading_char() {
+ cov_mark::check!(expected_type_struct_field_with_leading_char);
+ check_expected_type_and_name(
+ r#"
+struct Foo { a: u32 }
+fn foo() {
+ Foo { a: c$0 };
+}
+"#,
+ expect![[r#"ty: u32, name: a"#]],
+ );
+}
+
+#[test]
+fn expected_type_match_arm_without_leading_char() {
+ cov_mark::check!(expected_type_match_arm_without_leading_char);
+ check_expected_type_and_name(
+ r#"
+enum E { X }
+fn foo() {
+ match E::X { $0 }
+}
+"#,
+ expect![[r#"ty: E, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_match_arm_with_leading_char() {
+ cov_mark::check!(expected_type_match_arm_with_leading_char);
+ check_expected_type_and_name(
+ r#"
+enum E { X }
+fn foo() {
+ match E::X { c$0 }
+}
+"#,
+ expect![[r#"ty: E, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_match_arm_body_without_leading_char() {
+ cov_mark::check!(expected_type_match_arm_body_without_leading_char);
+ check_expected_type_and_name(
+ r#"
+struct Foo;
+enum E { X }
+fn foo() -> Foo {
+ match E::X { E::X => $0 }
+}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_match_body_arm_with_leading_char() {
+ cov_mark::check!(expected_type_match_arm_body_with_leading_char);
+ check_expected_type_and_name(
+ r#"
+struct Foo;
+enum E { X }
+fn foo() -> Foo {
+ match E::X { E::X => c$0 }
+}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_if_let_without_leading_char() {
+ cov_mark::check!(expected_type_if_let_without_leading_char);
+ check_expected_type_and_name(
+ r#"
+enum Foo { Bar, Baz, Quux }
+
+fn foo() {
+ let f = Foo::Quux;
+ if let $0 = f { }
+}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ )
+}
+
+#[test]
+fn expected_type_if_let_with_leading_char() {
+ cov_mark::check!(expected_type_if_let_with_leading_char);
+ check_expected_type_and_name(
+ r#"
+enum Foo { Bar, Baz, Quux }
+
+fn foo() {
+ let f = Foo::Quux;
+ if let c$0 = f { }
+}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ )
+}
+
+#[test]
+fn expected_type_fn_ret_without_leading_char() {
+ cov_mark::check!(expected_type_fn_ret_without_leading_char);
+ check_expected_type_and_name(
+ r#"
+fn foo() -> u32 {
+ $0
+}
+"#,
+ expect![[r#"ty: u32, name: ?"#]],
+ )
+}
+
+#[test]
+fn expected_type_fn_ret_with_leading_char() {
+ cov_mark::check!(expected_type_fn_ret_with_leading_char);
+ check_expected_type_and_name(
+ r#"
+fn foo() -> u32 {
+ c$0
+}
+"#,
+ expect![[r#"ty: u32, name: ?"#]],
+ )
+}
+
+#[test]
+fn expected_type_fn_ret_fn_ref_fully_typed() {
+ check_expected_type_and_name(
+ r#"
+fn foo() -> u32 {
+ foo$0
+}
+"#,
+ expect![[r#"ty: u32, name: ?"#]],
+ )
+}
+
+#[test]
+fn expected_type_closure_param_return() {
+ // FIXME: make this work with `|| $0`
+ check_expected_type_and_name(
+ r#"
+//- minicore: fn
+fn foo() {
+ bar(|| a$0);
+}
+
+fn bar(f: impl FnOnce() -> u32) {}
+"#,
+ expect![[r#"ty: u32, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_generic_function() {
+ check_expected_type_and_name(
+ r#"
+fn foo() {
+ bar::<u32>($0);
+}
+
+fn bar<T>(t: T) {}
+"#,
+ expect![[r#"ty: u32, name: t"#]],
+ );
+}
+
+#[test]
+fn expected_type_generic_method() {
+ check_expected_type_and_name(
+ r#"
+fn foo() {
+ S(1u32).bar($0);
+}
+
+struct S<T>(T);
+impl<T> S<T> {
+ fn bar(self, t: T) {}
+}
+"#,
+ expect![[r#"ty: u32, name: t"#]],
+ );
+}
+
+#[test]
+fn expected_type_functional_update() {
+ cov_mark::check!(expected_type_struct_func_update);
+ check_expected_type_and_name(
+ r#"
+struct Foo { field: u32 }
+fn foo() {
+ Foo {
+ ..$0
+ }
+}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_param_pat() {
+ check_expected_type_and_name(
+ r#"
+struct Foo { field: u32 }
+fn foo(a$0: Foo) {}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+struct Foo { field: u32 }
+fn foo($0: Foo) {}
+"#,
+ // FIXME make this work, currently fails due to pattern recovery eating the `:`
+ expect![[r#"ty: ?, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_ref_prefix_on_field() {
+ check_expected_type_and_name(
+ r#"
+fn foo(_: &mut i32) {}
+struct S {
+ field: i32,
+}
+
+fn main() {
+ let s = S {
+ field: 100,
+ };
+ foo(&mut s.f$0);
+}
+"#,
+ expect!["ty: i32, name: ?"],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
new file mode 100644
index 000000000..27c3ccb35
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
@@ -0,0 +1,637 @@
+//! See `CompletionItem` structure.
+
+use std::fmt;
+
+use hir::{Documentation, Mutability};
+use ide_db::{imports::import_assets::LocatedImport, SnippetCap, SymbolKind};
+use smallvec::SmallVec;
+use stdx::{impl_from, never};
+use syntax::{SmolStr, TextRange, TextSize};
+use text_edit::TextEdit;
+
+use crate::{
+ context::{CompletionContext, PathCompletionCtx},
+ render::{render_path_resolution, RenderContext},
+};
+
+/// `CompletionItem` describes a single completion variant in the editor pop-up.
+/// It is basically a POD with various properties. To construct a
+/// `CompletionItem`, use `new` method and the `Builder` struct.
+#[derive(Clone)]
+pub struct CompletionItem {
+ /// Label in the completion pop up which identifies completion.
+ label: SmolStr,
+ /// Range of identifier that is being completed.
+ ///
+ /// It should be used primarily for UI, but we also use this to convert
+ /// generic TextEdit into LSP's completion edit (see conv.rs).
+ ///
+ /// `source_range` must contain the completion offset. `text_edit` should
+ /// start with what `source_range` points to, or VSCode will filter out the
+ /// completion silently.
+ source_range: TextRange,
+ /// What happens when user selects this item.
+ ///
+ /// Typically, replaces `source_range` with new identifier.
+ text_edit: TextEdit,
+ is_snippet: bool,
+
+ /// What item (struct, function, etc) are we completing.
+ kind: CompletionItemKind,
+
+ /// Lookup is used to check if completion item indeed can complete current
+ /// ident.
+ ///
+ /// That is, in `foo.bar$0` lookup of `abracadabra` will be accepted (it
+ /// contains `bar` sub sequence), and `quux` will rejected.
+ lookup: Option<SmolStr>,
+
+ /// Additional info to show in the UI pop up.
+ detail: Option<String>,
+ documentation: Option<Documentation>,
+
+ /// Whether this item is marked as deprecated
+ deprecated: bool,
+
+ /// If completing a function call, ask the editor to show parameter popup
+ /// after completion.
+ trigger_call_info: bool,
+
+ /// We use this to sort completion. Relevance records facts like "do the
+ /// types align precisely?". We can't sort by relevances directly, they are
+ /// only partially ordered.
+ ///
+ /// Note that Relevance ignores fuzzy match score. We compute Relevance for
+ /// all possible items, and then separately build an ordered completion list
+ /// based on relevance and fuzzy matching with the already typed identifier.
+ relevance: CompletionRelevance,
+
+ /// Indicates that a reference or mutable reference to this variable is a
+ /// possible match.
+ ref_match: Option<(Mutability, TextSize)>,
+
+ /// The import data to add to completion's edits.
+ import_to_add: SmallVec<[LocatedImport; 1]>,
+}
+
+// We use custom debug for CompletionItem to make snapshot tests more readable.
+impl fmt::Debug for CompletionItem {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut s = f.debug_struct("CompletionItem");
+ s.field("label", &self.label()).field("source_range", &self.source_range());
+ if self.text_edit().len() == 1 {
+ let atom = &self.text_edit().iter().next().unwrap();
+ s.field("delete", &atom.delete);
+ s.field("insert", &atom.insert);
+ } else {
+ s.field("text_edit", &self.text_edit);
+ }
+ s.field("kind", &self.kind());
+ if self.lookup() != self.label() {
+ s.field("lookup", &self.lookup());
+ }
+ if let Some(detail) = self.detail() {
+ s.field("detail", &detail);
+ }
+ if let Some(documentation) = self.documentation() {
+ s.field("documentation", &documentation);
+ }
+ if self.deprecated {
+ s.field("deprecated", &true);
+ }
+
+ if self.relevance != CompletionRelevance::default() {
+ s.field("relevance", &self.relevance);
+ }
+
+ if let Some((mutability, offset)) = &self.ref_match {
+ s.field("ref_match", &format!("&{}@{offset:?}", mutability.as_keyword_for_ref()));
+ }
+ if self.trigger_call_info {
+ s.field("trigger_call_info", &true);
+ }
+ s.finish()
+ }
+}
+
+#[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
+pub struct CompletionRelevance {
+ /// This is set in cases like these:
+ ///
+ /// ```
+ /// fn f(spam: String) {}
+ /// fn main {
+ /// let spam = 92;
+ /// f($0) // name of local matches the name of param
+ /// }
+ /// ```
+ pub exact_name_match: bool,
+ /// See CompletionRelevanceTypeMatch doc comments for cases where this is set.
+ pub type_match: Option<CompletionRelevanceTypeMatch>,
+ /// This is set in cases like these:
+ ///
+ /// ```
+ /// fn foo(a: u32) {
+ /// let b = 0;
+ /// $0 // `a` and `b` are local
+ /// }
+ /// ```
+ pub is_local: bool,
+ /// This is set when trait items are completed in an impl of that trait.
+ pub is_item_from_trait: bool,
+ /// This is set when an import is suggested whose name is already imported.
+ pub is_name_already_imported: bool,
+ /// This is set for completions that will insert a `use` item.
+ pub requires_import: bool,
+ /// Set for method completions of the `core::ops` and `core::cmp` family.
+ pub is_op_method: bool,
+ /// Set for item completions that are private but in the workspace.
+ pub is_private_editable: bool,
+ /// Set for postfix snippet item completions
+ pub postfix_match: Option<CompletionRelevancePostfixMatch>,
+ /// This is set for type inference results
+ pub is_definite: bool,
+}
+
+#[derive(Debug, Clone, Copy, Eq, PartialEq)]
+pub enum CompletionRelevanceTypeMatch {
+ /// This is set in cases like these:
+ ///
+ /// ```
+ /// enum Option<T> { Some(T), None }
+ /// fn f(a: Option<u32>) {}
+ /// fn main {
+ /// f(Option::N$0) // type `Option<T>` could unify with `Option<u32>`
+ /// }
+ /// ```
+ CouldUnify,
+ /// This is set in cases like these:
+ ///
+ /// ```
+ /// fn f(spam: String) {}
+ /// fn main {
+ /// let foo = String::new();
+ /// f($0) // type of local matches the type of param
+ /// }
+ /// ```
+ Exact,
+}
+
+#[derive(Debug, Clone, Copy, Eq, PartialEq)]
+pub enum CompletionRelevancePostfixMatch {
+ /// Set in cases when item is postfix, but not exact
+ NonExact,
+ /// This is set in cases like these:
+ ///
+ /// ```
+ /// (a > b).not$0
+ /// ```
+ ///
+ /// Basically, we want to guarantee that postfix snippets always takes
+ /// precedence over everything else.
+ Exact,
+}
+
+impl CompletionRelevance {
+ /// Provides a relevance score. Higher values are more relevant.
+ ///
+ /// The absolute value of the relevance score is not meaningful, for
+ /// example a value of 0 doesn't mean "not relevant", rather
+ /// it means "least relevant". The score value should only be used
+ /// for relative ordering.
+ ///
+ /// See is_relevant if you need to make some judgement about score
+ /// in an absolute sense.
+ pub fn score(self) -> u32 {
+ let mut score = 0;
+ let CompletionRelevance {
+ exact_name_match,
+ type_match,
+ is_local,
+ is_item_from_trait,
+ is_name_already_imported,
+ requires_import,
+ is_op_method,
+ is_private_editable,
+ postfix_match,
+ is_definite,
+ } = self;
+
+ // lower rank private things
+ if !is_private_editable {
+ score += 1;
+ }
+ // lower rank trait op methods
+ if !is_op_method {
+ score += 10;
+ }
+ // lower rank for conflicting import names
+ if !is_name_already_imported {
+ score += 1;
+ }
+ // lower rank for items that don't need an import
+ if !requires_import {
+ score += 1;
+ }
+ if exact_name_match {
+ score += 10;
+ }
+ score += match postfix_match {
+ Some(CompletionRelevancePostfixMatch::Exact) => 100,
+ Some(CompletionRelevancePostfixMatch::NonExact) => 0,
+ None => 3,
+ };
+ score += match type_match {
+ Some(CompletionRelevanceTypeMatch::Exact) => 8,
+ Some(CompletionRelevanceTypeMatch::CouldUnify) => 3,
+ None => 0,
+ };
+ // slightly prefer locals
+ if is_local {
+ score += 1;
+ }
+ if is_item_from_trait {
+ score += 1;
+ }
+ if is_definite {
+ score += 10;
+ }
+ score
+ }
+
+ /// Returns true when the score for this threshold is above
+ /// some threshold such that we think it is especially likely
+ /// to be relevant.
+ pub fn is_relevant(&self) -> bool {
+ self.score() > 0
+ }
+}
+
+/// The type of the completion item.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+pub enum CompletionItemKind {
+ SymbolKind(SymbolKind),
+ Binding,
+ BuiltinType,
+ InferredType,
+ Keyword,
+ Method,
+ Snippet,
+ UnresolvedReference,
+}
+
+impl_from!(SymbolKind for CompletionItemKind);
+
+impl CompletionItemKind {
+ #[cfg(test)]
+ pub(crate) fn tag(&self) -> &'static str {
+ match self {
+ CompletionItemKind::SymbolKind(kind) => match kind {
+ SymbolKind::Attribute => "at",
+ SymbolKind::BuiltinAttr => "ba",
+ SymbolKind::Const => "ct",
+ SymbolKind::ConstParam => "cp",
+ SymbolKind::Derive => "de",
+ SymbolKind::DeriveHelper => "dh",
+ SymbolKind::Enum => "en",
+ SymbolKind::Field => "fd",
+ SymbolKind::Function => "fn",
+ SymbolKind::Impl => "im",
+ SymbolKind::Label => "lb",
+ SymbolKind::LifetimeParam => "lt",
+ SymbolKind::Local => "lc",
+ SymbolKind::Macro => "ma",
+ SymbolKind::Module => "md",
+ SymbolKind::SelfParam => "sp",
+ SymbolKind::SelfType => "sy",
+ SymbolKind::Static => "sc",
+ SymbolKind::Struct => "st",
+ SymbolKind::ToolModule => "tm",
+ SymbolKind::Trait => "tt",
+ SymbolKind::TypeAlias => "ta",
+ SymbolKind::TypeParam => "tp",
+ SymbolKind::Union => "un",
+ SymbolKind::ValueParam => "vp",
+ SymbolKind::Variant => "ev",
+ },
+ CompletionItemKind::Binding => "bn",
+ CompletionItemKind::BuiltinType => "bt",
+ CompletionItemKind::InferredType => "it",
+ CompletionItemKind::Keyword => "kw",
+ CompletionItemKind::Method => "me",
+ CompletionItemKind::Snippet => "sn",
+ CompletionItemKind::UnresolvedReference => "??",
+ }
+ }
+}
+
+impl CompletionItem {
+ pub(crate) fn new(
+ kind: impl Into<CompletionItemKind>,
+ source_range: TextRange,
+ label: impl Into<SmolStr>,
+ ) -> Builder {
+ let label = label.into();
+ Builder {
+ source_range,
+ label,
+ insert_text: None,
+ is_snippet: false,
+ trait_name: None,
+ detail: None,
+ documentation: None,
+ lookup: None,
+ kind: kind.into(),
+ text_edit: None,
+ deprecated: false,
+ trigger_call_info: false,
+ relevance: CompletionRelevance::default(),
+ ref_match: None,
+ imports_to_add: Default::default(),
+ }
+ }
+
+ /// What user sees in pop-up in the UI.
+ pub fn label(&self) -> &str {
+ &self.label
+ }
+ pub fn source_range(&self) -> TextRange {
+ self.source_range
+ }
+
+ pub fn text_edit(&self) -> &TextEdit {
+ &self.text_edit
+ }
+ /// Whether `text_edit` is a snippet (contains `$0` markers).
+ pub fn is_snippet(&self) -> bool {
+ self.is_snippet
+ }
+
+ /// Short one-line additional information, like a type
+ pub fn detail(&self) -> Option<&str> {
+ self.detail.as_deref()
+ }
+ /// A doc-comment
+ pub fn documentation(&self) -> Option<Documentation> {
+ self.documentation.clone()
+ }
+ /// What string is used for filtering.
+ pub fn lookup(&self) -> &str {
+ self.lookup.as_deref().unwrap_or(&self.label)
+ }
+
+ pub fn kind(&self) -> CompletionItemKind {
+ self.kind
+ }
+
+ pub fn deprecated(&self) -> bool {
+ self.deprecated
+ }
+
+ pub fn relevance(&self) -> CompletionRelevance {
+ self.relevance
+ }
+
+ pub fn trigger_call_info(&self) -> bool {
+ self.trigger_call_info
+ }
+
+ pub fn ref_match(&self) -> Option<(Mutability, TextSize, CompletionRelevance)> {
+ // Relevance of the ref match should be the same as the original
+ // match, but with exact type match set because self.ref_match
+ // is only set if there is an exact type match.
+ let mut relevance = self.relevance;
+ relevance.type_match = Some(CompletionRelevanceTypeMatch::Exact);
+
+ self.ref_match.map(|(mutability, offset)| (mutability, offset, relevance))
+ }
+
+ pub fn imports_to_add(&self) -> &[LocatedImport] {
+ &self.import_to_add
+ }
+}
+
+/// A helper to make `CompletionItem`s.
+#[must_use]
+#[derive(Clone)]
+pub(crate) struct Builder {
+ source_range: TextRange,
+ imports_to_add: SmallVec<[LocatedImport; 1]>,
+ trait_name: Option<SmolStr>,
+ label: SmolStr,
+ insert_text: Option<String>,
+ is_snippet: bool,
+ detail: Option<String>,
+ documentation: Option<Documentation>,
+ lookup: Option<SmolStr>,
+ kind: CompletionItemKind,
+ text_edit: Option<TextEdit>,
+ deprecated: bool,
+ trigger_call_info: bool,
+ relevance: CompletionRelevance,
+ ref_match: Option<(Mutability, TextSize)>,
+}
+
+impl Builder {
+ pub(crate) fn from_resolution(
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ local_name: hir::Name,
+ resolution: hir::ScopeDef,
+ ) -> Self {
+ render_path_resolution(RenderContext::new(ctx), path_ctx, local_name, resolution)
+ }
+
+ pub(crate) fn build(self) -> CompletionItem {
+ let _p = profile::span("item::Builder::build");
+
+ let mut label = self.label;
+ let mut lookup = self.lookup;
+ let insert_text = self.insert_text.unwrap_or_else(|| label.to_string());
+
+ if let [import_edit] = &*self.imports_to_add {
+ // snippets can have multiple imports, but normal completions only have up to one
+ if let Some(original_path) = import_edit.original_path.as_ref() {
+ lookup = lookup.or_else(|| Some(label.clone()));
+ label = SmolStr::from(format!("{} (use {})", label, original_path));
+ }
+ } else if let Some(trait_name) = self.trait_name {
+ label = SmolStr::from(format!("{} (as {})", label, trait_name));
+ }
+
+ let text_edit = match self.text_edit {
+ Some(it) => it,
+ None => TextEdit::replace(self.source_range, insert_text),
+ };
+
+ CompletionItem {
+ source_range: self.source_range,
+ label,
+ text_edit,
+ is_snippet: self.is_snippet,
+ detail: self.detail,
+ documentation: self.documentation,
+ lookup,
+ kind: self.kind,
+ deprecated: self.deprecated,
+ trigger_call_info: self.trigger_call_info,
+ relevance: self.relevance,
+ ref_match: self.ref_match,
+ import_to_add: self.imports_to_add,
+ }
+ }
+ pub(crate) fn lookup_by(&mut self, lookup: impl Into<SmolStr>) -> &mut Builder {
+ self.lookup = Some(lookup.into());
+ self
+ }
+ pub(crate) fn label(&mut self, label: impl Into<SmolStr>) -> &mut Builder {
+ self.label = label.into();
+ self
+ }
+ pub(crate) fn trait_name(&mut self, trait_name: SmolStr) -> &mut Builder {
+ self.trait_name = Some(trait_name);
+ self
+ }
+ pub(crate) fn insert_text(&mut self, insert_text: impl Into<String>) -> &mut Builder {
+ self.insert_text = Some(insert_text.into());
+ self
+ }
+ pub(crate) fn insert_snippet(
+ &mut self,
+ cap: SnippetCap,
+ snippet: impl Into<String>,
+ ) -> &mut Builder {
+ let _ = cap;
+ self.is_snippet = true;
+ self.insert_text(snippet)
+ }
+ pub(crate) fn text_edit(&mut self, edit: TextEdit) -> &mut Builder {
+ self.text_edit = Some(edit);
+ self
+ }
+ pub(crate) fn snippet_edit(&mut self, _cap: SnippetCap, edit: TextEdit) -> &mut Builder {
+ self.is_snippet = true;
+ self.text_edit(edit)
+ }
+ pub(crate) fn detail(&mut self, detail: impl Into<String>) -> &mut Builder {
+ self.set_detail(Some(detail))
+ }
+ pub(crate) fn set_detail(&mut self, detail: Option<impl Into<String>>) -> &mut Builder {
+ self.detail = detail.map(Into::into);
+ if let Some(detail) = &self.detail {
+ if never!(detail.contains('\n'), "multiline detail:\n{}", detail) {
+ self.detail = Some(detail.splitn(2, '\n').next().unwrap().to_string());
+ }
+ }
+ self
+ }
+ #[allow(unused)]
+ pub(crate) fn documentation(&mut self, docs: Documentation) -> &mut Builder {
+ self.set_documentation(Some(docs))
+ }
+ pub(crate) fn set_documentation(&mut self, docs: Option<Documentation>) -> &mut Builder {
+ self.documentation = docs.map(Into::into);
+ self
+ }
+ pub(crate) fn set_deprecated(&mut self, deprecated: bool) -> &mut Builder {
+ self.deprecated = deprecated;
+ self
+ }
+ pub(crate) fn set_relevance(&mut self, relevance: CompletionRelevance) -> &mut Builder {
+ self.relevance = relevance;
+ self
+ }
+ pub(crate) fn trigger_call_info(&mut self) -> &mut Builder {
+ self.trigger_call_info = true;
+ self
+ }
+ pub(crate) fn add_import(&mut self, import_to_add: LocatedImport) -> &mut Builder {
+ self.imports_to_add.push(import_to_add);
+ self
+ }
+ pub(crate) fn ref_match(&mut self, mutability: Mutability, offset: TextSize) -> &mut Builder {
+ self.ref_match = Some((mutability, offset));
+ self
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use itertools::Itertools;
+ use test_utils::assert_eq_text;
+
+ use super::{
+ CompletionRelevance, CompletionRelevancePostfixMatch, CompletionRelevanceTypeMatch,
+ };
+
+ /// Check that these are CompletionRelevance are sorted in ascending order
+ /// by their relevance score.
+ ///
+ /// We want to avoid making assertions about the absolute score of any
+ /// item, but we do want to assert whether each is >, <, or == to the
+ /// others.
+ ///
+ /// If provided vec![vec![a], vec![b, c], vec![d]], then this will assert:
+ /// a.score < b.score == c.score < d.score
+ fn check_relevance_score_ordered(expected_relevance_order: Vec<Vec<CompletionRelevance>>) {
+ let expected = format!("{:#?}", &expected_relevance_order);
+
+ let actual_relevance_order = expected_relevance_order
+ .into_iter()
+ .flatten()
+ .map(|r| (r.score(), r))
+ .sorted_by_key(|(score, _r)| *score)
+ .fold(
+ (u32::MIN, vec![vec![]]),
+ |(mut currently_collecting_score, mut out), (score, r)| {
+ if currently_collecting_score == score {
+ out.last_mut().unwrap().push(r);
+ } else {
+ currently_collecting_score = score;
+ out.push(vec![r]);
+ }
+ (currently_collecting_score, out)
+ },
+ )
+ .1;
+
+ let actual = format!("{:#?}", &actual_relevance_order);
+
+ assert_eq_text!(&expected, &actual);
+ }
+
+ #[test]
+ fn relevance_score() {
+ use CompletionRelevance as Cr;
+ let default = Cr::default();
+ // This test asserts that the relevance score for these items is ascending, and
+ // that any items in the same vec have the same score.
+ let expected_relevance_order = vec![
+ vec![],
+ vec![Cr { is_op_method: true, is_private_editable: true, ..default }],
+ vec![Cr { is_op_method: true, ..default }],
+ vec![Cr { postfix_match: Some(CompletionRelevancePostfixMatch::NonExact), ..default }],
+ vec![Cr { is_private_editable: true, ..default }],
+ vec![default],
+ vec![Cr { is_local: true, ..default }],
+ vec![Cr { type_match: Some(CompletionRelevanceTypeMatch::CouldUnify), ..default }],
+ vec![Cr { type_match: Some(CompletionRelevanceTypeMatch::Exact), ..default }],
+ vec![Cr { exact_name_match: true, ..default }],
+ vec![Cr { exact_name_match: true, is_local: true, ..default }],
+ vec![Cr {
+ exact_name_match: true,
+ type_match: Some(CompletionRelevanceTypeMatch::Exact),
+ ..default
+ }],
+ vec![Cr {
+ exact_name_match: true,
+ type_match: Some(CompletionRelevanceTypeMatch::Exact),
+ is_local: true,
+ ..default
+ }],
+ vec![Cr { postfix_match: Some(CompletionRelevancePostfixMatch::Exact), ..default }],
+ ];
+
+ check_relevance_score_ordered(expected_relevance_order);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
new file mode 100644
index 000000000..ae1a440d0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
@@ -0,0 +1,247 @@
+//! `completions` crate provides utilities for generating completions of user input.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod completions;
+mod config;
+mod context;
+mod item;
+mod render;
+
+#[cfg(test)]
+mod tests;
+mod snippet;
+
+use ide_db::{
+ base_db::FilePosition,
+ helpers::mod_path_to_ast,
+ imports::{
+ import_assets::NameToImport,
+ insert_use::{self, ImportScope},
+ },
+ items_locator, RootDatabase,
+};
+use syntax::algo;
+use text_edit::TextEdit;
+
+use crate::{
+ completions::Completions,
+ context::{
+ CompletionAnalysis, CompletionContext, NameRefContext, NameRefKind, PathCompletionCtx,
+ PathKind,
+ },
+};
+
+pub use crate::{
+ config::{CallableSnippets, CompletionConfig},
+ item::{
+ CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevancePostfixMatch,
+ },
+ snippet::{Snippet, SnippetScope},
+};
+
+//FIXME: split the following feature into fine-grained features.
+
+// Feature: Magic Completions
+//
+// In addition to usual reference completion, rust-analyzer provides some ✨magic✨
+// completions as well:
+//
+// Keywords like `if`, `else` `while`, `loop` are completed with braces, and cursor
+// is placed at the appropriate position. Even though `if` is easy to type, you
+// still want to complete it, to get ` { }` for free! `return` is inserted with a
+// space or `;` depending on the return type of the function.
+//
+// When completing a function call, `()` are automatically inserted. If a function
+// takes arguments, the cursor is positioned inside the parenthesis.
+//
+// There are postfix completions, which can be triggered by typing something like
+// `foo().if`. The word after `.` determines postfix completion. Possible variants are:
+//
+// - `expr.if` -> `if expr {}` or `if let ... {}` for `Option` or `Result`
+// - `expr.match` -> `match expr {}`
+// - `expr.while` -> `while expr {}` or `while let ... {}` for `Option` or `Result`
+// - `expr.ref` -> `&expr`
+// - `expr.refm` -> `&mut expr`
+// - `expr.let` -> `let $0 = expr;`
+// - `expr.letm` -> `let mut $0 = expr;`
+// - `expr.not` -> `!expr`
+// - `expr.dbg` -> `dbg!(expr)`
+// - `expr.dbgr` -> `dbg!(&expr)`
+// - `expr.call` -> `(expr)`
+//
+// There also snippet completions:
+//
+// .Expressions
+// - `pd` -> `eprintln!(" = {:?}", );`
+// - `ppd` -> `eprintln!(" = {:#?}", );`
+//
+// .Items
+// - `tfn` -> `#[test] fn feature(){}`
+// - `tmod` ->
+// ```rust
+// #[cfg(test)]
+// mod tests {
+// use super::*;
+//
+// #[test]
+// fn test_name() {}
+// }
+// ```
+//
+// And the auto import completions, enabled with the `rust-analyzer.completion.autoimport.enable` setting and the corresponding LSP client capabilities.
+// Those are the additional completion options with automatic `use` import and options from all project importable items,
+// fuzzy matched against the completion input.
+//
+// image::https://user-images.githubusercontent.com/48062697/113020667-b72ab880-917a-11eb-8778-716cf26a0eb3.gif[]
+
+/// Main entry point for completion. We run completion as a two-phase process.
+///
+/// First, we look at the position and collect a so-called `CompletionContext.
+/// This is a somewhat messy process, because, during completion, syntax tree is
+/// incomplete and can look really weird.
+///
+/// Once the context is collected, we run a series of completion routines which
+/// look at the context and produce completion items. One subtlety about this
+/// phase is that completion engine should not filter by the substring which is
+/// already present, it should give all possible variants for the identifier at
+/// the caret. In other words, for
+///
+/// ```no_run
+/// fn f() {
+/// let foo = 92;
+/// let _ = bar$0
+/// }
+/// ```
+///
+/// `foo` *should* be present among the completion variants. Filtering by
+/// identifier prefix/fuzzy match should be done higher in the stack, together
+/// with ordering of completions (currently this is done by the client).
+///
+/// # Speculative Completion Problem
+///
+/// There's a curious unsolved problem in the current implementation. Often, you
+/// want to compute completions on a *slightly different* text document.
+///
+/// In the simplest case, when the code looks like `let x = `, you want to
+/// insert a fake identifier to get a better syntax tree: `let x = complete_me`.
+///
+/// We do this in `CompletionContext`, and it works OK-enough for *syntax*
+/// analysis. However, we might want to, eg, ask for the type of `complete_me`
+/// variable, and that's where our current infrastructure breaks down. salsa
+/// doesn't allow such "phantom" inputs.
+///
+/// Another case where this would be instrumental is macro expansion. We want to
+/// insert a fake ident and re-expand code. There's `expand_speculative` as a
+/// work-around for this.
+///
+/// A different use-case is completion of injection (examples and links in doc
+/// comments). When computing completion for a path in a doc-comment, you want
+/// to inject a fake path expression into the item being documented and complete
+/// that.
+///
+/// IntelliJ has CodeFragment/Context infrastructure for that. You can create a
+/// temporary PSI node, and say that the context ("parent") of this node is some
+/// existing node. Asking for, eg, type of this `CodeFragment` node works
+/// correctly, as the underlying infrastructure makes use of contexts to do
+/// analysis.
+pub fn completions(
+ db: &RootDatabase,
+ config: &CompletionConfig,
+ position: FilePosition,
+ trigger_character: Option<char>,
+) -> Option<Vec<CompletionItem>> {
+ let (ctx, analysis) = &CompletionContext::new(db, position, config)?;
+ let mut completions = Completions::default();
+
+ // prevent `(` from triggering unwanted completion noise
+ if trigger_character == Some('(') {
+ if let CompletionAnalysis::NameRef(NameRefContext { kind, .. }) = &analysis {
+ if let NameRefKind::Path(
+ path_ctx @ PathCompletionCtx { kind: PathKind::Vis { has_in_token }, .. },
+ ) = kind
+ {
+ completions::vis::complete_vis_path(&mut completions, ctx, path_ctx, has_in_token);
+ }
+ }
+ // prevent `(` from triggering unwanted completion noise
+ return Some(completions.into());
+ }
+
+ {
+ let acc = &mut completions;
+
+ match &analysis {
+ CompletionAnalysis::Name(name_ctx) => completions::complete_name(acc, ctx, name_ctx),
+ CompletionAnalysis::NameRef(name_ref_ctx) => {
+ completions::complete_name_ref(acc, ctx, name_ref_ctx)
+ }
+ CompletionAnalysis::Lifetime(lifetime_ctx) => {
+ completions::lifetime::complete_label(acc, ctx, lifetime_ctx);
+ completions::lifetime::complete_lifetime(acc, ctx, lifetime_ctx);
+ }
+ CompletionAnalysis::String { original, expanded: Some(expanded) } => {
+ completions::extern_abi::complete_extern_abi(acc, ctx, expanded);
+ completions::format_string::format_string(acc, ctx, original, expanded);
+ }
+ CompletionAnalysis::UnexpandedAttrTT {
+ colon_prefix,
+ fake_attribute_under_caret: Some(attr),
+ } => {
+ completions::attribute::complete_known_attribute_input(
+ acc,
+ ctx,
+ colon_prefix,
+ attr,
+ );
+ }
+ CompletionAnalysis::UnexpandedAttrTT { .. } | CompletionAnalysis::String { .. } => (),
+ }
+ }
+
+ Some(completions.into())
+}
+
+/// Resolves additional completion data at the position given.
+/// This is used for import insertion done via completions like flyimport and custom user snippets.
+pub fn resolve_completion_edits(
+ db: &RootDatabase,
+ config: &CompletionConfig,
+ FilePosition { file_id, offset }: FilePosition,
+ imports: impl IntoIterator<Item = (String, String)>,
+) -> Option<Vec<TextEdit>> {
+ let _p = profile::span("resolve_completion_edits");
+ let sema = hir::Semantics::new(db);
+
+ let original_file = sema.parse(file_id);
+ let original_token =
+ syntax::AstNode::syntax(&original_file).token_at_offset(offset).left_biased()?;
+ let position_for_import = &original_token.parent()?;
+ let scope = ImportScope::find_insert_use_container(position_for_import, &sema)?;
+
+ let current_module = sema.scope(position_for_import)?.module();
+ let current_crate = current_module.krate();
+ let new_ast = scope.clone_for_update();
+ let mut import_insert = TextEdit::builder();
+
+ imports.into_iter().for_each(|(full_import_path, imported_name)| {
+ let items_with_name = items_locator::items_with_name(
+ &sema,
+ current_crate,
+ NameToImport::exact_case_sensitive(imported_name),
+ items_locator::AssocItemSearch::Include,
+ Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ );
+ let import = items_with_name
+ .filter_map(|candidate| {
+ current_module.find_use_path_prefixed(db, candidate, config.insert_use.prefix_kind)
+ })
+ .find(|mod_path| mod_path.to_string() == full_import_path);
+ if let Some(import_path) = import {
+ insert_use::insert_use(&new_ast, mod_path_to_ast(&import_path), &config.insert_use);
+ }
+ });
+
+ algo::diff(scope.as_syntax_node(), new_ast.as_syntax_node()).into_text_edit(&mut import_insert);
+ Some(vec![import_insert.finish()])
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
new file mode 100644
index 000000000..946134b0f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
@@ -0,0 +1,1910 @@
+//! `render` module provides utilities for rendering completion suggestions
+//! into code pieces that will be presented to user.
+
+pub(crate) mod macro_;
+pub(crate) mod function;
+pub(crate) mod const_;
+pub(crate) mod pattern;
+pub(crate) mod type_alias;
+pub(crate) mod variant;
+pub(crate) mod union_literal;
+pub(crate) mod literal;
+
+use hir::{AsAssocItem, HasAttrs, HirDisplay, ScopeDef};
+use ide_db::{
+ helpers::item_name, imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind,
+};
+use syntax::{AstNode, SmolStr, SyntaxKind, TextRange};
+
+use crate::{
+ context::{DotAccess, PathCompletionCtx, PathKind, PatternContext},
+ item::{Builder, CompletionRelevanceTypeMatch},
+ render::{
+ function::render_fn,
+ literal::render_variant_lit,
+ macro_::{render_macro, render_macro_pat},
+ },
+ CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance,
+};
+/// Interface for data and methods required for items rendering.
+#[derive(Debug, Clone)]
+pub(crate) struct RenderContext<'a> {
+ completion: &'a CompletionContext<'a>,
+ is_private_editable: bool,
+ import_to_add: Option<LocatedImport>,
+}
+
+impl<'a> RenderContext<'a> {
+ pub(crate) fn new(completion: &'a CompletionContext<'a>) -> RenderContext<'a> {
+ RenderContext { completion, is_private_editable: false, import_to_add: None }
+ }
+
+ pub(crate) fn private_editable(mut self, private_editable: bool) -> Self {
+ self.is_private_editable = private_editable;
+ self
+ }
+
+ pub(crate) fn import_to_add(mut self, import_to_add: Option<LocatedImport>) -> Self {
+ self.import_to_add = import_to_add;
+ self
+ }
+
+ fn snippet_cap(&self) -> Option<SnippetCap> {
+ self.completion.config.snippet_cap
+ }
+
+ fn db(&self) -> &'a RootDatabase {
+ self.completion.db
+ }
+
+ fn source_range(&self) -> TextRange {
+ self.completion.source_range()
+ }
+
+ fn completion_relevance(&self) -> CompletionRelevance {
+ CompletionRelevance {
+ is_private_editable: self.is_private_editable,
+ requires_import: self.import_to_add.is_some(),
+ ..Default::default()
+ }
+ }
+
+ fn is_immediately_after_macro_bang(&self) -> bool {
+ self.completion.token.kind() == SyntaxKind::BANG
+ && self
+ .completion
+ .token
+ .parent()
+ .map_or(false, |it| it.kind() == SyntaxKind::MACRO_CALL)
+ }
+
+ fn is_deprecated(&self, def: impl HasAttrs) -> bool {
+ let attrs = def.attrs(self.db());
+ attrs.by_key("deprecated").exists()
+ }
+
+ fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool {
+ let db = self.db();
+ let assoc = match as_assoc_item.as_assoc_item(db) {
+ Some(assoc) => assoc,
+ None => return false,
+ };
+
+ let is_assoc_deprecated = match assoc {
+ hir::AssocItem::Function(it) => self.is_deprecated(it),
+ hir::AssocItem::Const(it) => self.is_deprecated(it),
+ hir::AssocItem::TypeAlias(it) => self.is_deprecated(it),
+ };
+ is_assoc_deprecated
+ || assoc
+ .containing_trait_or_trait_impl(db)
+ .map(|trait_| self.is_deprecated(trait_))
+ .unwrap_or(false)
+ }
+
+ // FIXME: remove this
+ fn docs(&self, def: impl HasAttrs) -> Option<hir::Documentation> {
+ def.docs(self.db())
+ }
+}
+
+pub(crate) fn render_field(
+ ctx: RenderContext<'_>,
+ dot_access: &DotAccess,
+ receiver: Option<hir::Name>,
+ field: hir::Field,
+ ty: &hir::Type,
+) -> CompletionItem {
+ let is_deprecated = ctx.is_deprecated(field);
+ let name = field.name(ctx.db());
+ let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str());
+ let mut item = CompletionItem::new(
+ SymbolKind::Field,
+ ctx.source_range(),
+ field_with_receiver(receiver.as_ref(), &name),
+ );
+ item.set_relevance(CompletionRelevance {
+ type_match: compute_type_match(ctx.completion, ty),
+ exact_name_match: compute_exact_name_match(ctx.completion, name.as_str()),
+ ..CompletionRelevance::default()
+ });
+ item.detail(ty.display(ctx.db()).to_string())
+ .set_documentation(field.docs(ctx.db()))
+ .set_deprecated(is_deprecated)
+ .lookup_by(name.clone());
+ item.insert_text(field_with_receiver(receiver.as_ref(), &escaped_name));
+ if let Some(receiver) = &dot_access.receiver {
+ if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) {
+ if let Some(ref_match) = compute_ref_match(ctx.completion, ty) {
+ item.ref_match(ref_match, original.syntax().text_range().start());
+ }
+ }
+ }
+ item.build()
+}
+
+fn field_with_receiver(receiver: Option<&hir::Name>, field_name: &str) -> SmolStr {
+ receiver
+ .map_or_else(|| field_name.into(), |receiver| format!("{}.{}", receiver, field_name).into())
+}
+
+pub(crate) fn render_tuple_field(
+ ctx: RenderContext<'_>,
+ receiver: Option<hir::Name>,
+ field: usize,
+ ty: &hir::Type,
+) -> CompletionItem {
+ let mut item = CompletionItem::new(
+ SymbolKind::Field,
+ ctx.source_range(),
+ field_with_receiver(receiver.as_ref(), &field.to_string()),
+ );
+ item.detail(ty.display(ctx.db()).to_string()).lookup_by(field.to_string());
+ item.build()
+}
+
+pub(crate) fn render_type_inference(
+ ty_string: String,
+ ctx: &CompletionContext<'_>,
+) -> CompletionItem {
+ let mut builder =
+ CompletionItem::new(CompletionItemKind::InferredType, ctx.source_range(), ty_string);
+ builder.set_relevance(CompletionRelevance { is_definite: true, ..Default::default() });
+ builder.build()
+}
+
+pub(crate) fn render_path_resolution(
+ ctx: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ local_name: hir::Name,
+ resolution: ScopeDef,
+) -> Builder {
+ render_resolution_path(ctx, path_ctx, local_name, None, resolution)
+}
+
+pub(crate) fn render_pattern_resolution(
+ ctx: RenderContext<'_>,
+ pattern_ctx: &PatternContext,
+ local_name: hir::Name,
+ resolution: ScopeDef,
+) -> Builder {
+ render_resolution_pat(ctx, pattern_ctx, local_name, None, resolution)
+}
+
+pub(crate) fn render_resolution_with_import(
+ ctx: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ import_edit: LocatedImport,
+) -> Option<Builder> {
+ let resolution = ScopeDef::from(import_edit.original_item);
+ let local_name = scope_def_to_name(resolution, &ctx, &import_edit)?;
+
+ Some(render_resolution_path(ctx, path_ctx, local_name, Some(import_edit), resolution))
+}
+
+pub(crate) fn render_resolution_with_import_pat(
+ ctx: RenderContext<'_>,
+ pattern_ctx: &PatternContext,
+ import_edit: LocatedImport,
+) -> Option<Builder> {
+ let resolution = ScopeDef::from(import_edit.original_item);
+ let local_name = scope_def_to_name(resolution, &ctx, &import_edit)?;
+ Some(render_resolution_pat(ctx, pattern_ctx, local_name, Some(import_edit), resolution))
+}
+
+fn scope_def_to_name(
+ resolution: ScopeDef,
+ ctx: &RenderContext<'_>,
+ import_edit: &LocatedImport,
+) -> Option<hir::Name> {
+ Some(match resolution {
+ ScopeDef::ModuleDef(hir::ModuleDef::Function(f)) => f.name(ctx.completion.db),
+ ScopeDef::ModuleDef(hir::ModuleDef::Const(c)) => c.name(ctx.completion.db)?,
+ ScopeDef::ModuleDef(hir::ModuleDef::TypeAlias(t)) => t.name(ctx.completion.db),
+ _ => item_name(ctx.db(), import_edit.original_item)?,
+ })
+}
+
+fn render_resolution_pat(
+ ctx: RenderContext<'_>,
+ pattern_ctx: &PatternContext,
+ local_name: hir::Name,
+ import_to_add: Option<LocatedImport>,
+ resolution: ScopeDef,
+) -> Builder {
+ let _p = profile::span("render_resolution");
+ use hir::ModuleDef::*;
+
+ match resolution {
+ ScopeDef::ModuleDef(Macro(mac)) => {
+ let ctx = ctx.import_to_add(import_to_add);
+ return render_macro_pat(ctx, pattern_ctx, local_name, mac);
+ }
+ _ => (),
+ }
+
+ render_resolution_simple_(ctx, &local_name, import_to_add, resolution)
+}
+
+fn render_resolution_path(
+ ctx: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ local_name: hir::Name,
+ import_to_add: Option<LocatedImport>,
+ resolution: ScopeDef,
+) -> Builder {
+ let _p = profile::span("render_resolution");
+ use hir::ModuleDef::*;
+
+ match resolution {
+ ScopeDef::ModuleDef(Macro(mac)) => {
+ let ctx = ctx.import_to_add(import_to_add);
+ return render_macro(ctx, path_ctx, local_name, mac);
+ }
+ ScopeDef::ModuleDef(Function(func)) => {
+ let ctx = ctx.import_to_add(import_to_add);
+ return render_fn(ctx, path_ctx, Some(local_name), func);
+ }
+ ScopeDef::ModuleDef(Variant(var)) => {
+ let ctx = ctx.clone().import_to_add(import_to_add.clone());
+ if let Some(item) =
+ render_variant_lit(ctx, path_ctx, Some(local_name.clone()), var, None)
+ {
+ return item;
+ }
+ }
+ _ => (),
+ }
+
+ let completion = ctx.completion;
+ let cap = ctx.snippet_cap();
+ let db = completion.db;
+ let config = completion.config;
+
+ let name = local_name.to_smol_str();
+ let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution);
+ if local_name.escaped().is_escaped() {
+ item.insert_text(local_name.escaped().to_smol_str());
+ }
+ // Add `<>` for generic types
+ let type_path_no_ty_args = matches!(
+ path_ctx,
+ PathCompletionCtx { kind: PathKind::Type { .. }, has_type_args: false, .. }
+ ) && config.callable.is_some();
+ if type_path_no_ty_args {
+ if let Some(cap) = cap {
+ let has_non_default_type_params = match resolution {
+ ScopeDef::ModuleDef(hir::ModuleDef::Adt(it)) => it.has_non_default_type_params(db),
+ ScopeDef::ModuleDef(hir::ModuleDef::TypeAlias(it)) => {
+ it.has_non_default_type_params(db)
+ }
+ _ => false,
+ };
+
+ if has_non_default_type_params {
+ cov_mark::hit!(inserts_angle_brackets_for_generics);
+ item.lookup_by(name.clone())
+ .label(SmolStr::from_iter([&name, "<…>"]))
+ .trigger_call_info()
+ .insert_snippet(cap, format!("{}<$0>", local_name.escaped()));
+ }
+ }
+ }
+ if let ScopeDef::Local(local) = resolution {
+ let ty = local.ty(db);
+ if !ty.is_unknown() {
+ item.detail(ty.display(db).to_string());
+ }
+
+ item.set_relevance(CompletionRelevance {
+ type_match: compute_type_match(completion, &ty),
+ exact_name_match: compute_exact_name_match(completion, &name),
+ is_local: true,
+ ..CompletionRelevance::default()
+ });
+
+ if let Some(ref_match) = compute_ref_match(completion, &ty) {
+ item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
+ }
+ };
+ item
+}
+
+fn render_resolution_simple_(
+ ctx: RenderContext<'_>,
+ local_name: &hir::Name,
+ import_to_add: Option<LocatedImport>,
+ resolution: ScopeDef,
+) -> Builder {
+ let _p = profile::span("render_resolution");
+
+ let db = ctx.db();
+ let ctx = ctx.import_to_add(import_to_add);
+ let kind = res_to_kind(resolution);
+
+ let mut item = CompletionItem::new(kind, ctx.source_range(), local_name.to_smol_str());
+ item.set_relevance(ctx.completion_relevance())
+ .set_documentation(scope_def_docs(db, resolution))
+ .set_deprecated(scope_def_is_deprecated(&ctx, resolution));
+
+ if let Some(import_to_add) = ctx.import_to_add {
+ item.add_import(import_to_add);
+ }
+ item
+}
+
+fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind {
+ use hir::ModuleDef::*;
+ match resolution {
+ ScopeDef::Unknown => CompletionItemKind::UnresolvedReference,
+ ScopeDef::ModuleDef(Function(_)) => CompletionItemKind::SymbolKind(SymbolKind::Function),
+ ScopeDef::ModuleDef(Variant(_)) => CompletionItemKind::SymbolKind(SymbolKind::Variant),
+ ScopeDef::ModuleDef(Macro(_)) => CompletionItemKind::SymbolKind(SymbolKind::Macro),
+ ScopeDef::ModuleDef(Module(..)) => CompletionItemKind::SymbolKind(SymbolKind::Module),
+ ScopeDef::ModuleDef(Adt(adt)) => CompletionItemKind::SymbolKind(match adt {
+ hir::Adt::Struct(_) => SymbolKind::Struct,
+ hir::Adt::Union(_) => SymbolKind::Union,
+ hir::Adt::Enum(_) => SymbolKind::Enum,
+ }),
+ ScopeDef::ModuleDef(Const(..)) => CompletionItemKind::SymbolKind(SymbolKind::Const),
+ ScopeDef::ModuleDef(Static(..)) => CompletionItemKind::SymbolKind(SymbolKind::Static),
+ ScopeDef::ModuleDef(Trait(..)) => CompletionItemKind::SymbolKind(SymbolKind::Trait),
+ ScopeDef::ModuleDef(TypeAlias(..)) => CompletionItemKind::SymbolKind(SymbolKind::TypeAlias),
+ ScopeDef::ModuleDef(BuiltinType(..)) => CompletionItemKind::BuiltinType,
+ ScopeDef::GenericParam(param) => CompletionItemKind::SymbolKind(match param {
+ hir::GenericParam::TypeParam(_) => SymbolKind::TypeParam,
+ hir::GenericParam::ConstParam(_) => SymbolKind::ConstParam,
+ hir::GenericParam::LifetimeParam(_) => SymbolKind::LifetimeParam,
+ }),
+ ScopeDef::Local(..) => CompletionItemKind::SymbolKind(SymbolKind::Local),
+ ScopeDef::Label(..) => CompletionItemKind::SymbolKind(SymbolKind::Label),
+ ScopeDef::AdtSelfType(..) | ScopeDef::ImplSelfType(..) => {
+ CompletionItemKind::SymbolKind(SymbolKind::SelfParam)
+ }
+ }
+}
+
+fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<hir::Documentation> {
+ use hir::ModuleDef::*;
+ match resolution {
+ ScopeDef::ModuleDef(Module(it)) => it.docs(db),
+ ScopeDef::ModuleDef(Adt(it)) => it.docs(db),
+ ScopeDef::ModuleDef(Variant(it)) => it.docs(db),
+ ScopeDef::ModuleDef(Const(it)) => it.docs(db),
+ ScopeDef::ModuleDef(Static(it)) => it.docs(db),
+ ScopeDef::ModuleDef(Trait(it)) => it.docs(db),
+ ScopeDef::ModuleDef(TypeAlias(it)) => it.docs(db),
+ _ => None,
+ }
+}
+
+fn scope_def_is_deprecated(ctx: &RenderContext<'_>, resolution: ScopeDef) -> bool {
+ match resolution {
+ ScopeDef::ModuleDef(it) => ctx.is_deprecated_assoc_item(it),
+ ScopeDef::GenericParam(it) => ctx.is_deprecated(it),
+ ScopeDef::AdtSelfType(it) => ctx.is_deprecated(it),
+ _ => false,
+ }
+}
+
+fn compute_type_match(
+ ctx: &CompletionContext<'_>,
+ completion_ty: &hir::Type,
+) -> Option<CompletionRelevanceTypeMatch> {
+ let expected_type = ctx.expected_type.as_ref()?;
+
+ // We don't ever consider unit type to be an exact type match, since
+ // nearly always this is not meaningful to the user.
+ if expected_type.is_unit() {
+ return None;
+ }
+
+ if completion_ty == expected_type {
+ Some(CompletionRelevanceTypeMatch::Exact)
+ } else if expected_type.could_unify_with(ctx.db, completion_ty) {
+ Some(CompletionRelevanceTypeMatch::CouldUnify)
+ } else {
+ None
+ }
+}
+
+fn compute_exact_name_match(ctx: &CompletionContext<'_>, completion_name: &str) -> bool {
+ ctx.expected_name.as_ref().map_or(false, |name| name.text() == completion_name)
+}
+
+fn compute_ref_match(
+ ctx: &CompletionContext<'_>,
+ completion_ty: &hir::Type,
+) -> Option<hir::Mutability> {
+ let expected_type = ctx.expected_type.as_ref()?;
+ if completion_ty != expected_type {
+ let expected_type_without_ref = expected_type.remove_ref()?;
+ if completion_ty.autoderef(ctx.db).any(|deref_ty| deref_ty == expected_type_without_ref) {
+ cov_mark::hit!(suggest_ref);
+ let mutability = if expected_type.is_mutable_reference() {
+ hir::Mutability::Mut
+ } else {
+ hir::Mutability::Shared
+ };
+ return Some(mutability);
+ };
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use std::cmp;
+
+ use expect_test::{expect, Expect};
+ use ide_db::SymbolKind;
+ use itertools::Itertools;
+
+ use crate::{
+ item::CompletionRelevanceTypeMatch,
+ tests::{check_edit, do_completion, get_all_items, TEST_CONFIG},
+ CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevancePostfixMatch,
+ };
+
+ #[track_caller]
+ fn check(ra_fixture: &str, kind: impl Into<CompletionItemKind>, expect: Expect) {
+ let actual = do_completion(ra_fixture, kind.into());
+ expect.assert_debug_eq(&actual);
+ }
+
+ #[track_caller]
+ fn check_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) {
+ let actual: Vec<_> =
+ kinds.iter().flat_map(|&kind| do_completion(ra_fixture, kind)).collect();
+ expect.assert_debug_eq(&actual);
+ }
+
+ #[track_caller]
+ fn check_relevance_for_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) {
+ let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
+ actual.retain(|it| kinds.contains(&it.kind()));
+ actual.sort_by_key(|it| cmp::Reverse(it.relevance().score()));
+ check_relevance_(actual, expect);
+ }
+
+ #[track_caller]
+ fn check_relevance(ra_fixture: &str, expect: Expect) {
+ let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
+ actual.retain(|it| it.kind() != CompletionItemKind::Snippet);
+ actual.retain(|it| it.kind() != CompletionItemKind::Keyword);
+ actual.retain(|it| it.kind() != CompletionItemKind::BuiltinType);
+ actual.sort_by_key(|it| cmp::Reverse(it.relevance().score()));
+ check_relevance_(actual, expect);
+ }
+
+ #[track_caller]
+ fn check_relevance_(actual: Vec<CompletionItem>, expect: Expect) {
+ let actual = actual
+ .into_iter()
+ .flat_map(|it| {
+ let mut items = vec![];
+
+ let tag = it.kind().tag();
+ let relevance = display_relevance(it.relevance());
+ items.push(format!("{} {} {}\n", tag, it.label(), relevance));
+
+ if let Some((mutability, _offset, relevance)) = it.ref_match() {
+ let label = format!("&{}{}", mutability.as_keyword_for_ref(), it.label());
+ let relevance = display_relevance(relevance);
+
+ items.push(format!("{} {} {}\n", tag, label, relevance));
+ }
+
+ items
+ })
+ .collect::<String>();
+
+ expect.assert_eq(&actual);
+
+ fn display_relevance(relevance: CompletionRelevance) -> String {
+ let relevance_factors = vec![
+ (relevance.type_match == Some(CompletionRelevanceTypeMatch::Exact), "type"),
+ (
+ relevance.type_match == Some(CompletionRelevanceTypeMatch::CouldUnify),
+ "type_could_unify",
+ ),
+ (relevance.exact_name_match, "name"),
+ (relevance.is_local, "local"),
+ (
+ relevance.postfix_match == Some(CompletionRelevancePostfixMatch::Exact),
+ "snippet",
+ ),
+ (relevance.is_op_method, "op_method"),
+ (relevance.requires_import, "requires_import"),
+ ]
+ .into_iter()
+ .filter_map(|(cond, desc)| if cond { Some(desc) } else { None })
+ .join("+");
+
+ format!("[{}]", relevance_factors)
+ }
+ }
+
+ #[test]
+ fn enum_detail_includes_record_fields() {
+ check(
+ r#"
+enum Foo { Foo { x: i32, y: i32 } }
+
+fn main() { Foo::Fo$0 }
+"#,
+ SymbolKind::Variant,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "Foo {…}",
+ source_range: 54..56,
+ delete: 54..56,
+ insert: "Foo { x: ${1:()}, y: ${2:()} }$0",
+ kind: SymbolKind(
+ Variant,
+ ),
+ detail: "Foo { x: i32, y: i32 }",
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn enum_detail_includes_tuple_fields() {
+ check(
+ r#"
+enum Foo { Foo (i32, i32) }
+
+fn main() { Foo::Fo$0 }
+"#,
+ SymbolKind::Variant,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "Foo(…)",
+ source_range: 46..48,
+ delete: 46..48,
+ insert: "Foo(${1:()}, ${2:()})$0",
+ kind: SymbolKind(
+ Variant,
+ ),
+ detail: "Foo(i32, i32)",
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn fn_detail_includes_args_and_return_type() {
+ check(
+ r#"
+fn foo<T>(a: u32, b: u32, t: T) -> (u32, T) { (a, t) }
+
+fn main() { fo$0 }
+"#,
+ SymbolKind::Function,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "foo(…)",
+ source_range: 68..70,
+ delete: 68..70,
+ insert: "foo(${1:a}, ${2:b}, ${3:t})$0",
+ kind: SymbolKind(
+ Function,
+ ),
+ lookup: "foo",
+ detail: "fn(u32, u32, T) -> (u32, T)",
+ trigger_call_info: true,
+ },
+ CompletionItem {
+ label: "main()",
+ source_range: 68..70,
+ delete: 68..70,
+ insert: "main()$0",
+ kind: SymbolKind(
+ Function,
+ ),
+ lookup: "main",
+ detail: "fn()",
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn enum_detail_just_name_for_unit() {
+ check(
+ r#"
+enum Foo { Foo }
+
+fn main() { Foo::Fo$0 }
+"#,
+ SymbolKind::Variant,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "Foo",
+ source_range: 35..37,
+ delete: 35..37,
+ insert: "Foo$0",
+ kind: SymbolKind(
+ Variant,
+ ),
+ detail: "Foo",
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn lookup_enums_by_two_qualifiers() {
+ check_kinds(
+ r#"
+mod m {
+ pub enum Spam { Foo, Bar(i32) }
+}
+fn main() { let _: m::Spam = S$0 }
+"#,
+ &[
+ CompletionItemKind::SymbolKind(SymbolKind::Function),
+ CompletionItemKind::SymbolKind(SymbolKind::Module),
+ CompletionItemKind::SymbolKind(SymbolKind::Variant),
+ ],
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "main()",
+ source_range: 75..76,
+ delete: 75..76,
+ insert: "main()$0",
+ kind: SymbolKind(
+ Function,
+ ),
+ lookup: "main",
+ detail: "fn()",
+ },
+ CompletionItem {
+ label: "m",
+ source_range: 75..76,
+ delete: 75..76,
+ insert: "m",
+ kind: SymbolKind(
+ Module,
+ ),
+ },
+ CompletionItem {
+ label: "m::Spam::Bar(…)",
+ source_range: 75..76,
+ delete: 75..76,
+ insert: "m::Spam::Bar(${1:()})$0",
+ kind: SymbolKind(
+ Variant,
+ ),
+ lookup: "Spam::Bar(…)",
+ detail: "m::Spam::Bar(i32)",
+ relevance: CompletionRelevance {
+ exact_name_match: false,
+ type_match: Some(
+ Exact,
+ ),
+ is_local: false,
+ is_item_from_trait: false,
+ is_name_already_imported: false,
+ requires_import: false,
+ is_op_method: false,
+ is_private_editable: false,
+ postfix_match: None,
+ is_definite: false,
+ },
+ },
+ CompletionItem {
+ label: "m::Spam::Foo",
+ source_range: 75..76,
+ delete: 75..76,
+ insert: "m::Spam::Foo$0",
+ kind: SymbolKind(
+ Variant,
+ ),
+ lookup: "Spam::Foo",
+ detail: "m::Spam::Foo",
+ relevance: CompletionRelevance {
+ exact_name_match: false,
+ type_match: Some(
+ Exact,
+ ),
+ is_local: false,
+ is_item_from_trait: false,
+ is_name_already_imported: false,
+ requires_import: false,
+ is_op_method: false,
+ is_private_editable: false,
+ postfix_match: None,
+ is_definite: false,
+ },
+ },
+ ]
+ "#]],
+ )
+ }
+
+ #[test]
+ fn sets_deprecated_flag_in_items() {
+ check(
+ r#"
+#[deprecated]
+fn something_deprecated() {}
+
+fn main() { som$0 }
+"#,
+ SymbolKind::Function,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "main()",
+ source_range: 56..59,
+ delete: 56..59,
+ insert: "main()$0",
+ kind: SymbolKind(
+ Function,
+ ),
+ lookup: "main",
+ detail: "fn()",
+ },
+ CompletionItem {
+ label: "something_deprecated()",
+ source_range: 56..59,
+ delete: 56..59,
+ insert: "something_deprecated()$0",
+ kind: SymbolKind(
+ Function,
+ ),
+ lookup: "something_deprecated",
+ detail: "fn()",
+ deprecated: true,
+ },
+ ]
+ "#]],
+ );
+
+ check(
+ r#"
+struct A { #[deprecated] the_field: u32 }
+fn foo() { A { the$0 } }
+"#,
+ SymbolKind::Field,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "the_field",
+ source_range: 57..60,
+ delete: 57..60,
+ insert: "the_field",
+ kind: SymbolKind(
+ Field,
+ ),
+ detail: "u32",
+ deprecated: true,
+ relevance: CompletionRelevance {
+ exact_name_match: false,
+ type_match: Some(
+ CouldUnify,
+ ),
+ is_local: false,
+ is_item_from_trait: false,
+ is_name_already_imported: false,
+ requires_import: false,
+ is_op_method: false,
+ is_private_editable: false,
+ postfix_match: None,
+ is_definite: false,
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn renders_docs() {
+ check_kinds(
+ r#"
+struct S {
+ /// Field docs
+ foo:
+}
+impl S {
+ /// Method docs
+ fn bar(self) { self.$0 }
+}"#,
+ &[CompletionItemKind::Method, CompletionItemKind::SymbolKind(SymbolKind::Field)],
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "bar()",
+ source_range: 94..94,
+ delete: 94..94,
+ insert: "bar()$0",
+ kind: Method,
+ lookup: "bar",
+ detail: "fn(self)",
+ documentation: Documentation(
+ "Method docs",
+ ),
+ },
+ CompletionItem {
+ label: "foo",
+ source_range: 94..94,
+ delete: 94..94,
+ insert: "foo",
+ kind: SymbolKind(
+ Field,
+ ),
+ detail: "{unknown}",
+ documentation: Documentation(
+ "Field docs",
+ ),
+ },
+ ]
+ "#]],
+ );
+
+ check_kinds(
+ r#"
+use self::my$0;
+
+/// mod docs
+mod my { }
+
+/// enum docs
+enum E {
+ /// variant docs
+ V
+}
+use self::E::*;
+"#,
+ &[
+ CompletionItemKind::SymbolKind(SymbolKind::Module),
+ CompletionItemKind::SymbolKind(SymbolKind::Variant),
+ CompletionItemKind::SymbolKind(SymbolKind::Enum),
+ ],
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "my",
+ source_range: 10..12,
+ delete: 10..12,
+ insert: "my",
+ kind: SymbolKind(
+ Module,
+ ),
+ documentation: Documentation(
+ "mod docs",
+ ),
+ },
+ CompletionItem {
+ label: "V",
+ source_range: 10..12,
+ delete: 10..12,
+ insert: "V$0",
+ kind: SymbolKind(
+ Variant,
+ ),
+ detail: "V",
+ documentation: Documentation(
+ "variant docs",
+ ),
+ },
+ CompletionItem {
+ label: "E",
+ source_range: 10..12,
+ delete: 10..12,
+ insert: "E",
+ kind: SymbolKind(
+ Enum,
+ ),
+ documentation: Documentation(
+ "enum docs",
+ ),
+ },
+ ]
+ "#]],
+ )
+ }
+
+ #[test]
+ fn dont_render_attrs() {
+ check(
+ r#"
+struct S;
+impl S {
+ #[inline]
+ fn the_method(&self) { }
+}
+fn foo(s: S) { s.$0 }
+"#,
+ CompletionItemKind::Method,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "the_method()",
+ source_range: 81..81,
+ delete: 81..81,
+ insert: "the_method()$0",
+ kind: Method,
+ lookup: "the_method",
+ detail: "fn(&self)",
+ },
+ ]
+ "#]],
+ )
+ }
+
+ #[test]
+ fn no_call_parens_if_fn_ptr_needed() {
+ cov_mark::check!(no_call_parens_if_fn_ptr_needed);
+ check_edit(
+ "foo",
+ r#"
+fn foo(foo: u8, bar: u8) {}
+struct ManualVtable { f: fn(u8, u8) }
+
+fn main() -> ManualVtable {
+ ManualVtable { f: f$0 }
+}
+"#,
+ r#"
+fn foo(foo: u8, bar: u8) {}
+struct ManualVtable { f: fn(u8, u8) }
+
+fn main() -> ManualVtable {
+ ManualVtable { f: foo }
+}
+"#,
+ );
+ check_edit(
+ "type",
+ r#"
+struct RawIdentTable { r#type: u32 }
+
+fn main() -> RawIdentTable {
+ RawIdentTable { t$0: 42 }
+}
+"#,
+ r#"
+struct RawIdentTable { r#type: u32 }
+
+fn main() -> RawIdentTable {
+ RawIdentTable { r#type: 42 }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_parens_in_use_item() {
+ check_edit(
+ "foo",
+ r#"
+mod m { pub fn foo() {} }
+use crate::m::f$0;
+"#,
+ r#"
+mod m { pub fn foo() {} }
+use crate::m::foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn no_parens_in_call() {
+ check_edit(
+ "foo",
+ r#"
+fn foo(x: i32) {}
+fn main() { f$0(); }
+"#,
+ r#"
+fn foo(x: i32) {}
+fn main() { foo(); }
+"#,
+ );
+ check_edit(
+ "foo",
+ r#"
+struct Foo;
+impl Foo { fn foo(&self){} }
+fn f(foo: &Foo) { foo.f$0(); }
+"#,
+ r#"
+struct Foo;
+impl Foo { fn foo(&self){} }
+fn f(foo: &Foo) { foo.foo(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn inserts_angle_brackets_for_generics() {
+ cov_mark::check!(inserts_angle_brackets_for_generics);
+ check_edit(
+ "Vec",
+ r#"
+struct Vec<T> {}
+fn foo(xs: Ve$0)
+"#,
+ r#"
+struct Vec<T> {}
+fn foo(xs: Vec<$0>)
+"#,
+ );
+ check_edit(
+ "Vec",
+ r#"
+type Vec<T> = (T,);
+fn foo(xs: Ve$0)
+"#,
+ r#"
+type Vec<T> = (T,);
+fn foo(xs: Vec<$0>)
+"#,
+ );
+ check_edit(
+ "Vec",
+ r#"
+struct Vec<T = i128> {}
+fn foo(xs: Ve$0)
+"#,
+ r#"
+struct Vec<T = i128> {}
+fn foo(xs: Vec)
+"#,
+ );
+ check_edit(
+ "Vec",
+ r#"
+struct Vec<T> {}
+fn foo(xs: Ve$0<i128>)
+"#,
+ r#"
+struct Vec<T> {}
+fn foo(xs: Vec<i128>)
+"#,
+ );
+ }
+
+ #[test]
+ fn active_param_relevance() {
+ check_relevance(
+ r#"
+struct S { foo: i64, bar: u32, baz: u32 }
+fn test(bar: u32) { }
+fn foo(s: S) { test(s.$0) }
+"#,
+ expect![[r#"
+ fd bar [type+name]
+ fd baz [type]
+ fd foo []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn record_field_relevances() {
+ check_relevance(
+ r#"
+struct A { foo: i64, bar: u32, baz: u32 }
+struct B { x: (), y: f32, bar: u32 }
+fn foo(a: A) { B { bar: a.$0 }; }
+"#,
+ expect![[r#"
+ fd bar [type+name]
+ fd baz [type]
+ fd foo []
+ "#]],
+ )
+ }
+
+ #[test]
+ fn record_field_and_call_relevances() {
+ check_relevance(
+ r#"
+struct A { foo: i64, bar: u32, baz: u32 }
+struct B { x: (), y: f32, bar: u32 }
+fn f(foo: i64) { }
+fn foo(a: A) { B { bar: f(a.$0) }; }
+"#,
+ expect![[r#"
+ fd foo [type+name]
+ fd bar []
+ fd baz []
+ "#]],
+ );
+ check_relevance(
+ r#"
+struct A { foo: i64, bar: u32, baz: u32 }
+struct B { x: (), y: f32, bar: u32 }
+fn f(foo: i64) { }
+fn foo(a: A) { f(B { bar: a.$0 }); }
+"#,
+ expect![[r#"
+ fd bar [type+name]
+ fd baz [type]
+ fd foo []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn prioritize_exact_ref_match() {
+ check_relevance(
+ r#"
+struct WorldSnapshot { _f: () };
+fn go(world: &WorldSnapshot) { go(w$0) }
+"#,
+ expect![[r#"
+ lc world [type+name+local]
+ st WorldSnapshot {…} []
+ st &WorldSnapshot {…} [type]
+ st WorldSnapshot []
+ fn go(…) []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn too_many_arguments() {
+ cov_mark::check!(too_many_arguments);
+ check_relevance(
+ r#"
+struct Foo;
+fn f(foo: &Foo) { f(foo, w$0) }
+"#,
+ expect![[r#"
+ lc foo [local]
+ st Foo []
+ fn f(…) []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn score_fn_type_and_name_match() {
+ check_relevance(
+ r#"
+struct A { bar: u8 }
+fn baz() -> u8 { 0 }
+fn bar() -> u8 { 0 }
+fn f() { A { bar: b$0 }; }
+"#,
+ expect![[r#"
+ fn bar() [type+name]
+ fn baz() [type]
+ st A []
+ fn f() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn score_method_type_and_name_match() {
+ check_relevance(
+ r#"
+fn baz(aaa: u32){}
+struct Foo;
+impl Foo {
+fn aaa(&self) -> u32 { 0 }
+fn bbb(&self) -> u32 { 0 }
+fn ccc(&self) -> u64 { 0 }
+}
+fn f() {
+ baz(Foo.$0
+}
+"#,
+ expect![[r#"
+ me aaa() [type+name]
+ me bbb() [type]
+ me ccc() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn score_method_name_match_only() {
+ check_relevance(
+ r#"
+fn baz(aaa: u32){}
+struct Foo;
+impl Foo {
+fn aaa(&self) -> u64 { 0 }
+}
+fn f() {
+ baz(Foo.$0
+}
+"#,
+ expect![[r#"
+ me aaa() [name]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn suggest_ref_mut() {
+ cov_mark::check!(suggest_ref);
+ check_relevance(
+ r#"
+struct S;
+fn foo(s: &mut S) {}
+fn main() {
+ let mut s = S;
+ foo($0);
+}
+ "#,
+ expect![[r#"
+ lc s [name+local]
+ lc &mut s [type+name+local]
+ st S []
+ st &mut S [type]
+ st S []
+ fn foo(…) []
+ fn main() []
+ "#]],
+ );
+ check_relevance(
+ r#"
+struct S;
+fn foo(s: &mut S) {}
+fn main() {
+ let mut s = S;
+ foo(&mut $0);
+}
+ "#,
+ expect![[r#"
+ lc s [type+name+local]
+ st S [type]
+ st S []
+ fn foo(…) []
+ fn main() []
+ "#]],
+ );
+ check_relevance(
+ r#"
+struct S;
+fn foo(s: &mut S) {}
+fn main() {
+ let mut ssss = S;
+ foo(&mut s$0);
+}
+ "#,
+ expect![[r#"
+ lc ssss [type+local]
+ st S [type]
+ st S []
+ fn foo(…) []
+ fn main() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn suggest_deref() {
+ check_relevance(
+ r#"
+//- minicore: deref
+struct S;
+struct T(S);
+
+impl core::ops::Deref for T {
+ type Target = S;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn foo(s: &S) {}
+
+fn main() {
+ let t = T(S);
+ let m = 123;
+
+ foo($0);
+}
+ "#,
+ expect![[r#"
+ lc m [local]
+ lc t [local]
+ lc &t [type+local]
+ st S []
+ st &S [type]
+ st S []
+ st T []
+ fn foo(…) []
+ fn main() []
+ md core []
+ "#]],
+ )
+ }
+
+ #[test]
+ fn suggest_deref_mut() {
+ check_relevance(
+ r#"
+//- minicore: deref_mut
+struct S;
+struct T(S);
+
+impl core::ops::Deref for T {
+ type Target = S;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl core::ops::DerefMut for T {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+}
+
+fn foo(s: &mut S) {}
+
+fn main() {
+ let t = T(S);
+ let m = 123;
+
+ foo($0);
+}
+ "#,
+ expect![[r#"
+ lc m [local]
+ lc t [local]
+ lc &mut t [type+local]
+ st S []
+ st &mut S [type]
+ st S []
+ st T []
+ fn foo(…) []
+ fn main() []
+ md core []
+ "#]],
+ )
+ }
+
+ #[test]
+ fn locals() {
+ check_relevance(
+ r#"
+fn foo(bar: u32) {
+ let baz = 0;
+
+ f$0
+}
+"#,
+ expect![[r#"
+ lc baz [local]
+ lc bar [local]
+ fn foo(…) []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn enum_owned() {
+ check_relevance(
+ r#"
+enum Foo { A, B }
+fn foo() {
+ bar($0);
+}
+fn bar(t: Foo) {}
+"#,
+ expect![[r#"
+ ev Foo::A [type]
+ ev Foo::B [type]
+ en Foo []
+ fn bar(…) []
+ fn foo() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn enum_ref() {
+ check_relevance(
+ r#"
+enum Foo { A, B }
+fn foo() {
+ bar($0);
+}
+fn bar(t: &Foo) {}
+"#,
+ expect![[r#"
+ ev Foo::A []
+ ev &Foo::A [type]
+ ev Foo::B []
+ ev &Foo::B [type]
+ en Foo []
+ fn bar(…) []
+ fn foo() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn suggest_deref_fn_ret() {
+ check_relevance(
+ r#"
+//- minicore: deref
+struct S;
+struct T(S);
+
+impl core::ops::Deref for T {
+ type Target = S;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn foo(s: &S) {}
+fn bar() -> T {}
+
+fn main() {
+ foo($0);
+}
+"#,
+ expect![[r#"
+ st S []
+ st &S [type]
+ st S []
+ st T []
+ fn bar() []
+ fn &bar() [type]
+ fn foo(…) []
+ fn main() []
+ md core []
+ "#]],
+ )
+ }
+
+ #[test]
+ fn op_function_relevances() {
+ check_relevance(
+ r#"
+#[lang = "sub"]
+trait Sub {
+ fn sub(self, other: Self) -> Self { self }
+}
+impl Sub for u32 {}
+fn foo(a: u32) { a.$0 }
+"#,
+ expect![[r#"
+ me sub(…) (as Sub) [op_method]
+ "#]],
+ );
+ check_relevance(
+ r#"
+struct Foo;
+impl Foo {
+ fn new() -> Self {}
+}
+#[lang = "eq"]
+pub trait PartialEq<Rhs: ?Sized = Self> {
+ fn eq(&self, other: &Rhs) -> bool;
+ fn ne(&self, other: &Rhs) -> bool;
+}
+
+impl PartialEq for Foo {}
+fn main() {
+ Foo::$0
+}
+"#,
+ expect![[r#"
+ fn new() []
+ me eq(…) (as PartialEq) [op_method]
+ me ne(…) (as PartialEq) [op_method]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn struct_field_method_ref() {
+ check_kinds(
+ r#"
+struct Foo { bar: u32 }
+impl Foo { fn baz(&self) -> u32 { 0 } }
+
+fn foo(f: Foo) { let _: &u32 = f.b$0 }
+"#,
+ &[CompletionItemKind::Method, CompletionItemKind::SymbolKind(SymbolKind::Field)],
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "baz()",
+ source_range: 98..99,
+ delete: 98..99,
+ insert: "baz()$0",
+ kind: Method,
+ lookup: "baz",
+ detail: "fn(&self) -> u32",
+ ref_match: "&@96",
+ },
+ CompletionItem {
+ label: "bar",
+ source_range: 98..99,
+ delete: 98..99,
+ insert: "bar",
+ kind: SymbolKind(
+ Field,
+ ),
+ detail: "u32",
+ ref_match: "&@96",
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn qualified_path_ref() {
+ check_kinds(
+ r#"
+struct S;
+
+struct T;
+impl T {
+ fn foo() -> S {}
+}
+
+fn bar(s: &S) {}
+
+fn main() {
+ bar(T::$0);
+}
+"#,
+ &[CompletionItemKind::SymbolKind(SymbolKind::Function)],
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "foo()",
+ source_range: 95..95,
+ delete: 95..95,
+ insert: "foo()$0",
+ kind: SymbolKind(
+ Function,
+ ),
+ lookup: "foo",
+ detail: "fn() -> S",
+ ref_match: "&@92",
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn generic_enum() {
+ check_relevance(
+ r#"
+enum Foo<T> { A(T), B }
+// bar() should not be an exact type match
+// because the generic parameters are different
+fn bar() -> Foo<u8> { Foo::B }
+// FIXME baz() should be an exact type match
+// because the types could unify, but it currently
+// is not. This is due to the T here being
+// TyKind::Placeholder rather than TyKind::Missing.
+fn baz<T>() -> Foo<T> { Foo::B }
+fn foo() {
+ let foo: Foo<u32> = Foo::B;
+ let _: Foo<u32> = f$0;
+}
+"#,
+ expect![[r#"
+ lc foo [type+local]
+ ev Foo::A(…) [type_could_unify]
+ ev Foo::B [type_could_unify]
+ fn foo() []
+ en Foo []
+ fn bar() []
+ fn baz() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn postfix_exact_match_is_high_priority() {
+ cov_mark::check!(postfix_exact_match_is_high_priority);
+ check_relevance_for_kinds(
+ r#"
+mod ops {
+ pub trait Not {
+ type Output;
+ fn not(self) -> Self::Output;
+ }
+
+ impl Not for bool {
+ type Output = bool;
+ fn not(self) -> bool { if self { false } else { true }}
+ }
+}
+
+fn main() {
+ let _: bool = (9 > 2).not$0;
+}
+ "#,
+ &[CompletionItemKind::Snippet, CompletionItemKind::Method],
+ expect![[r#"
+ sn not [snippet]
+ me not() (use ops::Not) [type_could_unify+requires_import]
+ sn if []
+ sn while []
+ sn ref []
+ sn refm []
+ sn match []
+ sn box []
+ sn dbg []
+ sn dbgr []
+ sn call []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn postfix_inexact_match_is_low_priority() {
+ cov_mark::check!(postfix_inexact_match_is_low_priority);
+ check_relevance_for_kinds(
+ r#"
+struct S;
+impl S {
+ fn f(&self) {}
+}
+fn main() {
+ S.$0
+}
+ "#,
+ &[CompletionItemKind::Snippet, CompletionItemKind::Method],
+ expect![[r#"
+ me f() []
+ sn ref []
+ sn refm []
+ sn match []
+ sn box []
+ sn dbg []
+ sn dbgr []
+ sn call []
+ sn let []
+ sn letm []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn flyimport_reduced_relevance() {
+ check_relevance(
+ r#"
+mod std {
+ pub mod io {
+ pub trait BufRead {}
+ pub struct BufReader;
+ pub struct BufWriter;
+ }
+}
+struct Buffer;
+
+fn f() {
+ Buf$0
+}
+"#,
+ expect![[r#"
+ st Buffer []
+ fn f() []
+ md std []
+ tt BufRead (use std::io::BufRead) [requires_import]
+ st BufReader (use std::io::BufReader) [requires_import]
+ st BufWriter (use std::io::BufWriter) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn completes_struct_with_raw_identifier() {
+ check_edit(
+ "type",
+ r#"
+mod m { pub struct r#type {} }
+fn main() {
+ let r#type = m::t$0;
+}
+"#,
+ r#"
+mod m { pub struct r#type {} }
+fn main() {
+ let r#type = m::r#type;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_fn_with_raw_identifier() {
+ check_edit(
+ "type",
+ r#"
+mod m { pub fn r#type {} }
+fn main() {
+ m::t$0
+}
+"#,
+ r#"
+mod m { pub fn r#type {} }
+fn main() {
+ m::r#type()$0
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_macro_with_raw_identifier() {
+ check_edit(
+ "let!",
+ r#"
+macro_rules! r#let { () => {} }
+fn main() {
+ $0
+}
+"#,
+ r#"
+macro_rules! r#let { () => {} }
+fn main() {
+ r#let!($0)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_variant_with_raw_identifier() {
+ check_edit(
+ "type",
+ r#"
+enum A { r#type }
+fn main() {
+ let a = A::t$0
+}
+"#,
+ r#"
+enum A { r#type }
+fn main() {
+ let a = A::r#type$0
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_field_with_raw_identifier() {
+ check_edit(
+ "fn",
+ r#"
+mod r#type {
+ pub struct r#struct {
+ pub r#fn: u32
+ }
+}
+
+fn main() {
+ let a = r#type::r#struct {};
+ a.$0
+}
+"#,
+ r#"
+mod r#type {
+ pub struct r#struct {
+ pub r#fn: u32
+ }
+}
+
+fn main() {
+ let a = r#type::r#struct {};
+ a.r#fn
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_const_with_raw_identifier() {
+ check_edit(
+ "type",
+ r#"
+struct r#struct {}
+impl r#struct { pub const r#type: u8 = 1; }
+fn main() {
+ r#struct::t$0
+}
+"#,
+ r#"
+struct r#struct {}
+impl r#struct { pub const r#type: u8 = 1; }
+fn main() {
+ r#struct::r#type
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_type_alias_with_raw_identifier() {
+ check_edit(
+ "type type",
+ r#"
+struct r#struct {}
+trait r#trait { type r#type; }
+impl r#trait for r#struct { type t$0 }
+"#,
+ r#"
+struct r#struct {}
+trait r#trait { type r#type; }
+impl r#trait for r#struct { type r#type = $0; }
+"#,
+ )
+ }
+
+ #[test]
+ fn field_access_includes_self() {
+ check_edit(
+ "length",
+ r#"
+struct S {
+ length: i32
+}
+
+impl S {
+ fn some_fn(&self) {
+ let l = len$0
+ }
+}
+"#,
+ r#"
+struct S {
+ length: i32
+}
+
+impl S {
+ fn some_fn(&self) {
+ let l = self.length
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs
new file mode 100644
index 000000000..a810eef18
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs
@@ -0,0 +1,33 @@
+//! Renderer for `const` fields.
+
+use hir::{AsAssocItem, HirDisplay};
+use ide_db::SymbolKind;
+
+use crate::{item::CompletionItem, render::RenderContext};
+
+pub(crate) fn render_const(ctx: RenderContext<'_>, const_: hir::Const) -> Option<CompletionItem> {
+ let _p = profile::span("render_const");
+ render(ctx, const_)
+}
+
+fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option<CompletionItem> {
+ let db = ctx.db();
+ let name = const_.name(db)?;
+ let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str());
+ let detail = const_.display(db).to_string();
+
+ let mut item = CompletionItem::new(SymbolKind::Const, ctx.source_range(), name.clone());
+ item.set_documentation(ctx.docs(const_))
+ .set_deprecated(ctx.is_deprecated(const_) || ctx.is_deprecated_assoc_item(const_))
+ .detail(detail)
+ .set_relevance(ctx.completion_relevance());
+
+ if let Some(actm) = const_.as_assoc_item(db) {
+ if let Some(trt) = actm.containing_trait_or_trait_impl(db) {
+ item.trait_name(trt.name(db).to_smol_str());
+ }
+ }
+ item.insert_text(escaped_name);
+
+ Some(item.build())
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
new file mode 100644
index 000000000..4b5535718
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
@@ -0,0 +1,671 @@
+//! Renderer for function calls.
+
+use hir::{db::HirDatabase, AsAssocItem, HirDisplay};
+use ide_db::{SnippetCap, SymbolKind};
+use itertools::Itertools;
+use stdx::{format_to, to_lower_snake_case};
+use syntax::{AstNode, SmolStr};
+
+use crate::{
+ context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind},
+ item::{Builder, CompletionItem, CompletionItemKind, CompletionRelevance},
+ render::{compute_exact_name_match, compute_ref_match, compute_type_match, RenderContext},
+ CallableSnippets,
+};
+
+#[derive(Debug)]
+enum FuncKind<'ctx> {
+ Function(&'ctx PathCompletionCtx),
+ Method(&'ctx DotAccess, Option<hir::Name>),
+}
+
+pub(crate) fn render_fn(
+ ctx: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ local_name: Option<hir::Name>,
+ func: hir::Function,
+) -> Builder {
+ let _p = profile::span("render_fn");
+ render(ctx, local_name, func, FuncKind::Function(path_ctx))
+}
+
+pub(crate) fn render_method(
+ ctx: RenderContext<'_>,
+ dot_access: &DotAccess,
+ receiver: Option<hir::Name>,
+ local_name: Option<hir::Name>,
+ func: hir::Function,
+) -> Builder {
+ let _p = profile::span("render_method");
+ render(ctx, local_name, func, FuncKind::Method(dot_access, receiver))
+}
+
+fn render(
+ ctx @ RenderContext { completion, .. }: RenderContext<'_>,
+ local_name: Option<hir::Name>,
+ func: hir::Function,
+ func_kind: FuncKind<'_>,
+) -> Builder {
+ let db = completion.db;
+
+ let name = local_name.unwrap_or_else(|| func.name(db));
+
+ let (call, escaped_call) = match &func_kind {
+ FuncKind::Method(_, Some(receiver)) => (
+ format!("{}.{}", receiver, &name).into(),
+ format!("{}.{}", receiver.escaped(), name.escaped()).into(),
+ ),
+ _ => (name.to_smol_str(), name.escaped().to_smol_str()),
+ };
+ let mut item = CompletionItem::new(
+ if func.self_param(db).is_some() {
+ CompletionItemKind::Method
+ } else {
+ CompletionItemKind::SymbolKind(SymbolKind::Function)
+ },
+ ctx.source_range(),
+ call.clone(),
+ );
+
+ let ret_type = func.ret_type(db);
+ let is_op_method = func
+ .as_assoc_item(ctx.db())
+ .and_then(|trait_| trait_.containing_trait_or_trait_impl(ctx.db()))
+ .map_or(false, |trait_| completion.is_ops_trait(trait_));
+ item.set_relevance(CompletionRelevance {
+ type_match: compute_type_match(completion, &ret_type),
+ exact_name_match: compute_exact_name_match(completion, &call),
+ is_op_method,
+ ..ctx.completion_relevance()
+ });
+
+ if let Some(ref_match) = compute_ref_match(completion, &ret_type) {
+ match func_kind {
+ FuncKind::Function(path_ctx) => {
+ item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
+ }
+ FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => {
+ if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) {
+ item.ref_match(ref_match, original_expr.syntax().text_range().start());
+ }
+ }
+ _ => (),
+ }
+ }
+
+ item.set_documentation(ctx.docs(func))
+ .set_deprecated(ctx.is_deprecated(func) || ctx.is_deprecated_assoc_item(func))
+ .detail(detail(db, func))
+ .lookup_by(name.to_smol_str());
+
+ match ctx.completion.config.snippet_cap {
+ Some(cap) => {
+ let complete_params = match func_kind {
+ FuncKind::Function(PathCompletionCtx {
+ kind: PathKind::Expr { .. },
+ has_call_parens: false,
+ ..
+ }) => Some(false),
+ FuncKind::Method(
+ DotAccess {
+ kind:
+ DotAccessKind::Method { has_parens: false } | DotAccessKind::Field { .. },
+ ..
+ },
+ _,
+ ) => Some(true),
+ _ => None,
+ };
+ if let Some(has_dot_receiver) = complete_params {
+ if let Some((self_param, params)) =
+ params(ctx.completion, func, &func_kind, has_dot_receiver)
+ {
+ add_call_parens(
+ &mut item,
+ completion,
+ cap,
+ call,
+ escaped_call,
+ self_param,
+ params,
+ );
+ }
+ }
+ }
+ _ => (),
+ };
+
+ match ctx.import_to_add {
+ Some(import_to_add) => {
+ item.add_import(import_to_add);
+ }
+ None => {
+ if let Some(actm) = func.as_assoc_item(db) {
+ if let Some(trt) = actm.containing_trait_or_trait_impl(db) {
+ item.trait_name(trt.name(db).to_smol_str());
+ }
+ }
+ }
+ }
+ item
+}
+
+pub(super) fn add_call_parens<'b>(
+ builder: &'b mut Builder,
+ ctx: &CompletionContext<'_>,
+ cap: SnippetCap,
+ name: SmolStr,
+ escaped_name: SmolStr,
+ self_param: Option<hir::SelfParam>,
+ params: Vec<hir::Param>,
+) -> &'b mut Builder {
+ cov_mark::hit!(inserts_parens_for_function_calls);
+
+ let (snippet, label_suffix) = if self_param.is_none() && params.is_empty() {
+ (format!("{}()$0", escaped_name), "()")
+ } else {
+ builder.trigger_call_info();
+ let snippet = if let Some(CallableSnippets::FillArguments) = ctx.config.callable {
+ let offset = if self_param.is_some() { 2 } else { 1 };
+ let function_params_snippet =
+ params.iter().enumerate().format_with(", ", |(index, param), f| {
+ match param.name(ctx.db) {
+ Some(n) => {
+ let smol_str = n.to_smol_str();
+ let text = smol_str.as_str().trim_start_matches('_');
+ let ref_ = ref_of_param(ctx, text, param.ty());
+ f(&format_args!("${{{}:{}{}}}", index + offset, ref_, text))
+ }
+ None => {
+ let name = match param.ty().as_adt() {
+ None => "_".to_string(),
+ Some(adt) => adt
+ .name(ctx.db)
+ .as_text()
+ .map(|s| to_lower_snake_case(s.as_str()))
+ .unwrap_or_else(|| "_".to_string()),
+ };
+ f(&format_args!("${{{}:{}}}", index + offset, name))
+ }
+ }
+ });
+ match self_param {
+ Some(self_param) => {
+ format!(
+ "{}(${{1:{}}}{}{})$0",
+ escaped_name,
+ self_param.display(ctx.db),
+ if params.is_empty() { "" } else { ", " },
+ function_params_snippet
+ )
+ }
+ None => {
+ format!("{}({})$0", escaped_name, function_params_snippet)
+ }
+ }
+ } else {
+ cov_mark::hit!(suppress_arg_snippets);
+ format!("{}($0)", escaped_name)
+ };
+
+ (snippet, "(…)")
+ };
+ builder.label(SmolStr::from_iter([&name, label_suffix])).insert_snippet(cap, snippet)
+}
+
+fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type) -> &'static str {
+ if let Some(derefed_ty) = ty.remove_ref() {
+ for (name, local) in ctx.locals.iter() {
+ if name.as_text().as_deref() == Some(arg) {
+ return if local.ty(ctx.db) == derefed_ty {
+ if ty.is_mutable_reference() {
+ "&mut "
+ } else {
+ "&"
+ }
+ } else {
+ ""
+ };
+ }
+ }
+ }
+ ""
+}
+
+fn detail(db: &dyn HirDatabase, func: hir::Function) -> String {
+ let mut ret_ty = func.ret_type(db);
+ let mut detail = String::new();
+
+ if func.is_const(db) {
+ format_to!(detail, "const ");
+ }
+ if func.is_async(db) {
+ format_to!(detail, "async ");
+ if let Some(async_ret) = func.async_ret_type(db) {
+ ret_ty = async_ret;
+ }
+ }
+ if func.is_unsafe_to_call(db) {
+ format_to!(detail, "unsafe ");
+ }
+
+ format_to!(detail, "fn({})", params_display(db, func));
+ if !ret_ty.is_unit() {
+ format_to!(detail, " -> {}", ret_ty.display(db));
+ }
+ detail
+}
+
+fn params_display(db: &dyn HirDatabase, func: hir::Function) -> String {
+ if let Some(self_param) = func.self_param(db) {
+ let assoc_fn_params = func.assoc_fn_params(db);
+ let params = assoc_fn_params
+ .iter()
+ .skip(1) // skip the self param because we are manually handling that
+ .map(|p| p.ty().display(db));
+ format!(
+ "{}{}",
+ self_param.display(db),
+ params.format_with("", |display, f| {
+ f(&", ")?;
+ f(&display)
+ })
+ )
+ } else {
+ let assoc_fn_params = func.assoc_fn_params(db);
+ assoc_fn_params.iter().map(|p| p.ty().display(db)).join(", ")
+ }
+}
+
+fn params(
+ ctx: &CompletionContext<'_>,
+ func: hir::Function,
+ func_kind: &FuncKind<'_>,
+ has_dot_receiver: bool,
+) -> Option<(Option<hir::SelfParam>, Vec<hir::Param>)> {
+ if ctx.config.callable.is_none() {
+ return None;
+ }
+
+ // Don't add parentheses if the expected type is some function reference.
+ if let Some(ty) = &ctx.expected_type {
+ // FIXME: check signature matches?
+ if ty.is_fn() {
+ cov_mark::hit!(no_call_parens_if_fn_ptr_needed);
+ return None;
+ }
+ }
+
+ let self_param = if has_dot_receiver || matches!(func_kind, FuncKind::Method(_, Some(_))) {
+ None
+ } else {
+ func.self_param(ctx.db)
+ };
+ Some((self_param, func.params_without_self(ctx.db)))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{
+ tests::{check_edit, check_edit_with_config, TEST_CONFIG},
+ CallableSnippets, CompletionConfig,
+ };
+
+ #[test]
+ fn inserts_parens_for_function_calls() {
+ cov_mark::check!(inserts_parens_for_function_calls);
+ check_edit(
+ "no_args",
+ r#"
+fn no_args() {}
+fn main() { no_$0 }
+"#,
+ r#"
+fn no_args() {}
+fn main() { no_args()$0 }
+"#,
+ );
+
+ check_edit(
+ "with_args",
+ r#"
+fn with_args(x: i32, y: String) {}
+fn main() { with_$0 }
+"#,
+ r#"
+fn with_args(x: i32, y: String) {}
+fn main() { with_args(${1:x}, ${2:y})$0 }
+"#,
+ );
+
+ check_edit(
+ "foo",
+ r#"
+struct S;
+impl S {
+ fn foo(&self) {}
+}
+fn bar(s: &S) { s.f$0 }
+"#,
+ r#"
+struct S;
+impl S {
+ fn foo(&self) {}
+}
+fn bar(s: &S) { s.foo()$0 }
+"#,
+ );
+
+ check_edit(
+ "foo",
+ r#"
+struct S {}
+impl S {
+ fn foo(&self, x: i32) {}
+}
+fn bar(s: &S) {
+ s.f$0
+}
+"#,
+ r#"
+struct S {}
+impl S {
+ fn foo(&self, x: i32) {}
+}
+fn bar(s: &S) {
+ s.foo(${1:x})$0
+}
+"#,
+ );
+
+ check_edit(
+ "foo",
+ r#"
+struct S {}
+impl S {
+ fn foo(&self, x: i32) {
+ $0
+ }
+}
+"#,
+ r#"
+struct S {}
+impl S {
+ fn foo(&self, x: i32) {
+ self.foo(${1:x})$0
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn parens_for_method_call_as_assoc_fn() {
+ check_edit(
+ "foo",
+ r#"
+struct S;
+impl S {
+ fn foo(&self) {}
+}
+fn main() { S::f$0 }
+"#,
+ r#"
+struct S;
+impl S {
+ fn foo(&self) {}
+}
+fn main() { S::foo(${1:&self})$0 }
+"#,
+ );
+ }
+
+ #[test]
+ fn suppress_arg_snippets() {
+ cov_mark::check!(suppress_arg_snippets);
+ check_edit_with_config(
+ CompletionConfig { callable: Some(CallableSnippets::AddParentheses), ..TEST_CONFIG },
+ "with_args",
+ r#"
+fn with_args(x: i32, y: String) {}
+fn main() { with_$0 }
+"#,
+ r#"
+fn with_args(x: i32, y: String) {}
+fn main() { with_args($0) }
+"#,
+ );
+ }
+
+ #[test]
+ fn strips_underscores_from_args() {
+ check_edit(
+ "foo",
+ r#"
+fn foo(_foo: i32, ___bar: bool, ho_ge_: String) {}
+fn main() { f$0 }
+"#,
+ r#"
+fn foo(_foo: i32, ___bar: bool, ho_ge_: String) {}
+fn main() { foo(${1:foo}, ${2:bar}, ${3:ho_ge_})$0 }
+"#,
+ );
+ }
+
+ #[test]
+ fn insert_ref_when_matching_local_in_scope() {
+ check_edit(
+ "ref_arg",
+ r#"
+struct Foo {}
+fn ref_arg(x: &Foo) {}
+fn main() {
+ let x = Foo {};
+ ref_ar$0
+}
+"#,
+ r#"
+struct Foo {}
+fn ref_arg(x: &Foo) {}
+fn main() {
+ let x = Foo {};
+ ref_arg(${1:&x})$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn insert_mut_ref_when_matching_local_in_scope() {
+ check_edit(
+ "ref_arg",
+ r#"
+struct Foo {}
+fn ref_arg(x: &mut Foo) {}
+fn main() {
+ let x = Foo {};
+ ref_ar$0
+}
+"#,
+ r#"
+struct Foo {}
+fn ref_arg(x: &mut Foo) {}
+fn main() {
+ let x = Foo {};
+ ref_arg(${1:&mut x})$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn insert_ref_when_matching_local_in_scope_for_method() {
+ check_edit(
+ "apply_foo",
+ r#"
+struct Foo {}
+struct Bar {}
+impl Bar {
+ fn apply_foo(&self, x: &Foo) {}
+}
+
+fn main() {
+ let x = Foo {};
+ let y = Bar {};
+ y.$0
+}
+"#,
+ r#"
+struct Foo {}
+struct Bar {}
+impl Bar {
+ fn apply_foo(&self, x: &Foo) {}
+}
+
+fn main() {
+ let x = Foo {};
+ let y = Bar {};
+ y.apply_foo(${1:&x})$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trim_mut_keyword_in_func_completion() {
+ check_edit(
+ "take_mutably",
+ r#"
+fn take_mutably(mut x: &i32) {}
+
+fn main() {
+ take_m$0
+}
+"#,
+ r#"
+fn take_mutably(mut x: &i32) {}
+
+fn main() {
+ take_mutably(${1:x})$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_pattern_args_with_type_name_if_adt() {
+ check_edit(
+ "qux",
+ r#"
+struct Foo {
+ bar: i32
+}
+
+fn qux(Foo { bar }: Foo) {
+ println!("{}", bar);
+}
+
+fn main() {
+ qu$0
+}
+"#,
+ r#"
+struct Foo {
+ bar: i32
+}
+
+fn qux(Foo { bar }: Foo) {
+ println!("{}", bar);
+}
+
+fn main() {
+ qux(${1:foo})$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_fn_param() {
+ // has mut kw
+ check_edit(
+ "mut bar: u32",
+ r#"
+fn f(foo: (), mut bar: u32) {}
+fn g(foo: (), mut ba$0)
+"#,
+ r#"
+fn f(foo: (), mut bar: u32) {}
+fn g(foo: (), mut bar: u32)
+"#,
+ );
+
+ // has type param
+ check_edit(
+ "mut bar: u32",
+ r#"
+fn g(foo: (), mut ba$0: u32)
+fn f(foo: (), mut bar: u32) {}
+"#,
+ r#"
+fn g(foo: (), mut bar: u32)
+fn f(foo: (), mut bar: u32) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_fn_mut_param_add_comma() {
+ // add leading and trailing comma
+ check_edit(
+ ", mut bar: u32,",
+ r#"
+fn f(foo: (), mut bar: u32) {}
+fn g(foo: ()mut ba$0 baz: ())
+"#,
+ r#"
+fn f(foo: (), mut bar: u32) {}
+fn g(foo: (), mut bar: u32, baz: ())
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_fn_mut_param_has_attribute() {
+ check_edit(
+ r#"#[baz = "qux"] mut bar: u32"#,
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: (), mut ba$0)
+"#,
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: (), #[baz = "qux"] mut bar: u32)
+"#,
+ );
+
+ check_edit(
+ r#"#[baz = "qux"] mut bar: u32"#,
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: (), #[baz = "qux"] mut ba$0)
+"#,
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: (), #[baz = "qux"] mut bar: u32)
+"#,
+ );
+
+ check_edit(
+ r#", #[baz = "qux"] mut bar: u32"#,
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: ()#[baz = "qux"] mut ba$0)
+"#,
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: (), #[baz = "qux"] mut bar: u32)
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
new file mode 100644
index 000000000..91a253f8f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
@@ -0,0 +1,191 @@
+//! Renderer for `enum` variants.
+
+use hir::{db::HirDatabase, Documentation, HasAttrs, StructKind};
+use ide_db::SymbolKind;
+use syntax::AstNode;
+
+use crate::{
+ context::{CompletionContext, PathCompletionCtx, PathKind},
+ item::{Builder, CompletionItem},
+ render::{
+ compute_ref_match, compute_type_match,
+ variant::{
+ format_literal_label, render_record_lit, render_tuple_lit, visible_fields,
+ RenderedLiteral,
+ },
+ RenderContext,
+ },
+ CompletionItemKind, CompletionRelevance,
+};
+
+pub(crate) fn render_variant_lit(
+ ctx: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ local_name: Option<hir::Name>,
+ variant: hir::Variant,
+ path: Option<hir::ModPath>,
+) -> Option<Builder> {
+ let _p = profile::span("render_enum_variant");
+ let db = ctx.db();
+
+ let name = local_name.unwrap_or_else(|| variant.name(db));
+ render(ctx, path_ctx, Variant::EnumVariant(variant), name, path)
+}
+
+pub(crate) fn render_struct_literal(
+ ctx: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ strukt: hir::Struct,
+ path: Option<hir::ModPath>,
+ local_name: Option<hir::Name>,
+) -> Option<Builder> {
+ let _p = profile::span("render_struct_literal");
+ let db = ctx.db();
+
+ let name = local_name.unwrap_or_else(|| strukt.name(db));
+ render(ctx, path_ctx, Variant::Struct(strukt), name, path)
+}
+
+fn render(
+ ctx @ RenderContext { completion, .. }: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ thing: Variant,
+ name: hir::Name,
+ path: Option<hir::ModPath>,
+) -> Option<Builder> {
+ let db = completion.db;
+ let mut kind = thing.kind(db);
+ let should_add_parens = match &path_ctx {
+ PathCompletionCtx { has_call_parens: true, .. } => false,
+ PathCompletionCtx { kind: PathKind::Use | PathKind::Type { .. }, .. } => false,
+ _ => true,
+ };
+
+ let fields = thing.fields(completion)?;
+ let (qualified_name, short_qualified_name, qualified) = match path {
+ Some(path) => {
+ let short = hir::ModPath::from_segments(
+ hir::PathKind::Plain,
+ path.segments().iter().skip(path.segments().len().saturating_sub(2)).cloned(),
+ );
+ (path, short, true)
+ }
+ None => (name.clone().into(), name.into(), false),
+ };
+ let (qualified_name, escaped_qualified_name) =
+ (qualified_name.to_string(), qualified_name.escaped().to_string());
+ let snippet_cap = ctx.snippet_cap();
+
+ let mut rendered = match kind {
+ StructKind::Tuple if should_add_parens => {
+ render_tuple_lit(db, snippet_cap, &fields, &escaped_qualified_name)
+ }
+ StructKind::Record if should_add_parens => {
+ render_record_lit(db, snippet_cap, &fields, &escaped_qualified_name)
+ }
+ _ => RenderedLiteral {
+ literal: escaped_qualified_name.clone(),
+ detail: escaped_qualified_name.clone(),
+ },
+ };
+
+ if snippet_cap.is_some() {
+ rendered.literal.push_str("$0");
+ }
+
+ // only show name in label if not adding parens
+ if !should_add_parens {
+ kind = StructKind::Unit;
+ }
+
+ let mut item = CompletionItem::new(
+ CompletionItemKind::SymbolKind(thing.symbol_kind()),
+ ctx.source_range(),
+ format_literal_label(&qualified_name, kind),
+ );
+
+ item.detail(rendered.detail);
+
+ match snippet_cap {
+ Some(snippet_cap) => item.insert_snippet(snippet_cap, rendered.literal),
+ None => item.insert_text(rendered.literal),
+ };
+
+ if qualified {
+ item.lookup_by(format_literal_label(&short_qualified_name.to_string(), kind));
+ }
+ item.set_documentation(thing.docs(db)).set_deprecated(thing.is_deprecated(&ctx));
+
+ let ty = thing.ty(db);
+ item.set_relevance(CompletionRelevance {
+ type_match: compute_type_match(ctx.completion, &ty),
+ ..ctx.completion_relevance()
+ });
+ if let Some(ref_match) = compute_ref_match(completion, &ty) {
+ item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
+ }
+
+ if let Some(import_to_add) = ctx.import_to_add {
+ item.add_import(import_to_add);
+ }
+ Some(item)
+}
+
+#[derive(Clone, Copy)]
+enum Variant {
+ Struct(hir::Struct),
+ EnumVariant(hir::Variant),
+}
+
+impl Variant {
+ fn fields(self, ctx: &CompletionContext<'_>) -> Option<Vec<hir::Field>> {
+ let fields = match self {
+ Variant::Struct(it) => it.fields(ctx.db),
+ Variant::EnumVariant(it) => it.fields(ctx.db),
+ };
+ let (visible_fields, fields_omitted) = match self {
+ Variant::Struct(it) => visible_fields(ctx, &fields, it)?,
+ Variant::EnumVariant(it) => visible_fields(ctx, &fields, it)?,
+ };
+ if !fields_omitted {
+ Some(visible_fields)
+ } else {
+ None
+ }
+ }
+
+ fn kind(self, db: &dyn HirDatabase) -> StructKind {
+ match self {
+ Variant::Struct(it) => it.kind(db),
+ Variant::EnumVariant(it) => it.kind(db),
+ }
+ }
+
+ fn symbol_kind(self) -> SymbolKind {
+ match self {
+ Variant::Struct(_) => SymbolKind::Struct,
+ Variant::EnumVariant(_) => SymbolKind::Variant,
+ }
+ }
+
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ match self {
+ Variant::Struct(it) => it.docs(db),
+ Variant::EnumVariant(it) => it.docs(db),
+ }
+ }
+
+ fn is_deprecated(self, ctx: &RenderContext<'_>) -> bool {
+ match self {
+ Variant::Struct(it) => ctx.is_deprecated(it),
+ Variant::EnumVariant(it) => ctx.is_deprecated(it),
+ }
+ }
+
+ fn ty(self, db: &dyn HirDatabase) -> hir::Type {
+ match self {
+ Variant::Struct(it) => it.ty(db),
+ Variant::EnumVariant(it) => it.parent_enum(db).ty(db),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
new file mode 100644
index 000000000..ca2269f13
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
@@ -0,0 +1,270 @@
+//! Renderer for macro invocations.
+
+use hir::{Documentation, HirDisplay};
+use ide_db::SymbolKind;
+use syntax::SmolStr;
+
+use crate::{
+ context::{PathCompletionCtx, PathKind, PatternContext},
+ item::{Builder, CompletionItem},
+ render::RenderContext,
+};
+
+pub(crate) fn render_macro(
+ ctx: RenderContext<'_>,
+ PathCompletionCtx { kind, has_macro_bang, has_call_parens, .. }: &PathCompletionCtx,
+
+ name: hir::Name,
+ macro_: hir::Macro,
+) -> Builder {
+ let _p = profile::span("render_macro");
+ render(ctx, *kind == PathKind::Use, *has_macro_bang, *has_call_parens, name, macro_)
+}
+
+pub(crate) fn render_macro_pat(
+ ctx: RenderContext<'_>,
+ _pattern_ctx: &PatternContext,
+ name: hir::Name,
+ macro_: hir::Macro,
+) -> Builder {
+ let _p = profile::span("render_macro");
+ render(ctx, false, false, false, name, macro_)
+}
+
+fn render(
+ ctx @ RenderContext { completion, .. }: RenderContext<'_>,
+ is_use_path: bool,
+ has_macro_bang: bool,
+ has_call_parens: bool,
+ name: hir::Name,
+ macro_: hir::Macro,
+) -> Builder {
+ let source_range = if ctx.is_immediately_after_macro_bang() {
+ cov_mark::hit!(completes_macro_call_if_cursor_at_bang_token);
+ completion.token.parent().map_or_else(|| ctx.source_range(), |it| it.text_range())
+ } else {
+ ctx.source_range()
+ };
+
+ let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str());
+ let docs = ctx.docs(macro_);
+ let docs_str = docs.as_ref().map(Documentation::as_str).unwrap_or_default();
+ let is_fn_like = macro_.is_fn_like(completion.db);
+ let (bra, ket) = if is_fn_like { guess_macro_braces(&name, docs_str) } else { ("", "") };
+
+ let needs_bang = is_fn_like && !is_use_path && !has_macro_bang;
+
+ let mut item = CompletionItem::new(
+ SymbolKind::from(macro_.kind(completion.db)),
+ source_range,
+ label(&ctx, needs_bang, bra, ket, &name),
+ );
+ item.set_deprecated(ctx.is_deprecated(macro_))
+ .detail(macro_.display(completion.db).to_string())
+ .set_documentation(docs)
+ .set_relevance(ctx.completion_relevance());
+
+ match ctx.snippet_cap() {
+ Some(cap) if needs_bang && !has_call_parens => {
+ let snippet = format!("{}!{}$0{}", escaped_name, bra, ket);
+ let lookup = banged_name(&name);
+ item.insert_snippet(cap, snippet).lookup_by(lookup);
+ }
+ _ if needs_bang => {
+ item.insert_text(banged_name(&escaped_name)).lookup_by(banged_name(&name));
+ }
+ _ => {
+ cov_mark::hit!(dont_insert_macro_call_parens_unncessary);
+ item.insert_text(escaped_name);
+ }
+ };
+ if let Some(import_to_add) = ctx.import_to_add {
+ item.add_import(import_to_add);
+ }
+
+ item
+}
+
+fn label(
+ ctx: &RenderContext<'_>,
+ needs_bang: bool,
+ bra: &str,
+ ket: &str,
+ name: &SmolStr,
+) -> SmolStr {
+ if needs_bang {
+ if ctx.snippet_cap().is_some() {
+ SmolStr::from_iter([&*name, "!", bra, "…", ket])
+ } else {
+ banged_name(name)
+ }
+ } else {
+ name.clone()
+ }
+}
+
+fn banged_name(name: &str) -> SmolStr {
+ SmolStr::from_iter([name, "!"])
+}
+
+fn guess_macro_braces(macro_name: &str, docs: &str) -> (&'static str, &'static str) {
+ let mut votes = [0, 0, 0];
+ for (idx, s) in docs.match_indices(&macro_name) {
+ let (before, after) = (&docs[..idx], &docs[idx + s.len()..]);
+ // Ensure to match the full word
+ if after.starts_with('!')
+ && !before.ends_with(|c: char| c == '_' || c.is_ascii_alphanumeric())
+ {
+ // It may have spaces before the braces like `foo! {}`
+ match after[1..].chars().find(|&c| !c.is_whitespace()) {
+ Some('{') => votes[0] += 1,
+ Some('[') => votes[1] += 1,
+ Some('(') => votes[2] += 1,
+ _ => {}
+ }
+ }
+ }
+
+ // Insert a space before `{}`.
+ // We prefer the last one when some votes equal.
+ let (_vote, (bra, ket)) = votes
+ .iter()
+ .zip(&[(" {", "}"), ("[", "]"), ("(", ")")])
+ .max_by_key(|&(&vote, _)| vote)
+ .unwrap();
+ (*bra, *ket)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_edit;
+
+ #[test]
+ fn dont_insert_macro_call_parens_unncessary() {
+ cov_mark::check!(dont_insert_macro_call_parens_unncessary);
+ check_edit(
+ "frobnicate",
+ r#"
+//- /main.rs crate:main deps:foo
+use foo::$0;
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! frobnicate { () => () }
+"#,
+ r#"
+use foo::frobnicate;
+"#,
+ );
+
+ check_edit(
+ "frobnicate",
+ r#"
+macro_rules! frobnicate { () => () }
+fn main() { frob$0!(); }
+"#,
+ r#"
+macro_rules! frobnicate { () => () }
+fn main() { frobnicate!(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn add_bang_to_parens() {
+ check_edit(
+ "frobnicate!",
+ r#"
+macro_rules! frobnicate { () => () }
+fn main() {
+ frob$0()
+}
+"#,
+ r#"
+macro_rules! frobnicate { () => () }
+fn main() {
+ frobnicate!()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn guesses_macro_braces() {
+ check_edit(
+ "vec!",
+ r#"
+/// Creates a [`Vec`] containing the arguments.
+///
+/// ```
+/// let v = vec![1, 2, 3];
+/// assert_eq!(v[0], 1);
+/// assert_eq!(v[1], 2);
+/// assert_eq!(v[2], 3);
+/// ```
+macro_rules! vec { () => {} }
+
+fn main() { v$0 }
+"#,
+ r#"
+/// Creates a [`Vec`] containing the arguments.
+///
+/// ```
+/// let v = vec![1, 2, 3];
+/// assert_eq!(v[0], 1);
+/// assert_eq!(v[1], 2);
+/// assert_eq!(v[2], 3);
+/// ```
+macro_rules! vec { () => {} }
+
+fn main() { vec![$0] }
+"#,
+ );
+
+ check_edit(
+ "foo!",
+ r#"
+/// Foo
+///
+/// Don't call `fooo!()` `fooo!()`, or `_foo![]` `_foo![]`,
+/// call as `let _=foo! { hello world };`
+macro_rules! foo { () => {} }
+fn main() { $0 }
+"#,
+ r#"
+/// Foo
+///
+/// Don't call `fooo!()` `fooo!()`, or `_foo![]` `_foo![]`,
+/// call as `let _=foo! { hello world };`
+macro_rules! foo { () => {} }
+fn main() { foo! {$0} }
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_macro_call_if_cursor_at_bang_token() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/9904
+ cov_mark::check!(completes_macro_call_if_cursor_at_bang_token);
+ check_edit(
+ "foo!",
+ r#"
+macro_rules! foo {
+ () => {}
+}
+
+fn main() {
+ foo!$0
+}
+"#,
+ r#"
+macro_rules! foo {
+ () => {}
+}
+
+fn main() {
+ foo!($0)
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
new file mode 100644
index 000000000..34a384f2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
@@ -0,0 +1,193 @@
+//! Renderer for patterns.
+
+use hir::{db::HirDatabase, HasAttrs, Name, StructKind};
+use ide_db::SnippetCap;
+use itertools::Itertools;
+use syntax::SmolStr;
+
+use crate::{
+ context::{ParamContext, ParamKind, PathCompletionCtx, PatternContext},
+ render::{
+ variant::{format_literal_label, visible_fields},
+ RenderContext,
+ },
+ CompletionItem, CompletionItemKind,
+};
+
+pub(crate) fn render_struct_pat(
+ ctx: RenderContext<'_>,
+ pattern_ctx: &PatternContext,
+ strukt: hir::Struct,
+ local_name: Option<Name>,
+) -> Option<CompletionItem> {
+ let _p = profile::span("render_struct_pat");
+
+ let fields = strukt.fields(ctx.db());
+ let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, strukt)?;
+
+ if visible_fields.is_empty() {
+ // Matching a struct without matching its fields is pointless, unlike matching a Variant without its fields
+ return None;
+ }
+
+ let name = local_name.unwrap_or_else(|| strukt.name(ctx.db()));
+ let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str());
+ let kind = strukt.kind(ctx.db());
+ let label = format_literal_label(name.as_str(), kind);
+ let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?;
+
+ Some(build_completion(ctx, label, pat, strukt))
+}
+
+pub(crate) fn render_variant_pat(
+ ctx: RenderContext<'_>,
+ pattern_ctx: &PatternContext,
+ path_ctx: Option<&PathCompletionCtx>,
+ variant: hir::Variant,
+ local_name: Option<Name>,
+ path: Option<&hir::ModPath>,
+) -> Option<CompletionItem> {
+ let _p = profile::span("render_variant_pat");
+
+ let fields = variant.fields(ctx.db());
+ let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, variant)?;
+
+ let (name, escaped_name) = match path {
+ Some(path) => (path.to_string().into(), path.escaped().to_string().into()),
+ None => {
+ let name = local_name.unwrap_or_else(|| variant.name(ctx.db()));
+ (name.to_smol_str(), name.escaped().to_smol_str())
+ }
+ };
+
+ let (label, pat) = match path_ctx {
+ Some(PathCompletionCtx { has_call_parens: true, .. }) => (name, escaped_name.to_string()),
+ _ => {
+ let kind = variant.kind(ctx.db());
+ let label = format_literal_label(name.as_str(), kind);
+ let pat = render_pat(
+ &ctx,
+ pattern_ctx,
+ &escaped_name,
+ kind,
+ &visible_fields,
+ fields_omitted,
+ )?;
+ (label, pat)
+ }
+ };
+
+ Some(build_completion(ctx, label, pat, variant))
+}
+
+fn build_completion(
+ ctx: RenderContext<'_>,
+ label: SmolStr,
+ pat: String,
+ def: impl HasAttrs + Copy,
+) -> CompletionItem {
+ let mut item = CompletionItem::new(CompletionItemKind::Binding, ctx.source_range(), label);
+ item.set_documentation(ctx.docs(def))
+ .set_deprecated(ctx.is_deprecated(def))
+ .detail(&pat)
+ .set_relevance(ctx.completion_relevance());
+ match ctx.snippet_cap() {
+ Some(snippet_cap) => item.insert_snippet(snippet_cap, pat),
+ None => item.insert_text(pat),
+ };
+ item.build()
+}
+
+fn render_pat(
+ ctx: &RenderContext<'_>,
+ pattern_ctx: &PatternContext,
+ name: &str,
+ kind: StructKind,
+ fields: &[hir::Field],
+ fields_omitted: bool,
+) -> Option<String> {
+ let mut pat = match kind {
+ StructKind::Tuple => render_tuple_as_pat(ctx.snippet_cap(), fields, name, fields_omitted),
+ StructKind::Record => {
+ render_record_as_pat(ctx.db(), ctx.snippet_cap(), fields, name, fields_omitted)
+ }
+ StructKind::Unit => name.to_string(),
+ };
+
+ let needs_ascription = matches!(
+ pattern_ctx,
+ PatternContext {
+ param_ctx: Some(ParamContext { kind: ParamKind::Function(_), .. }),
+ has_type_ascription: false,
+ ..
+ }
+ );
+ if needs_ascription {
+ pat.push(':');
+ pat.push(' ');
+ pat.push_str(name);
+ }
+ if ctx.snippet_cap().is_some() {
+ pat.push_str("$0");
+ }
+ Some(pat)
+}
+
+fn render_record_as_pat(
+ db: &dyn HirDatabase,
+ snippet_cap: Option<SnippetCap>,
+ fields: &[hir::Field],
+ name: &str,
+ fields_omitted: bool,
+) -> String {
+ let fields = fields.iter();
+ match snippet_cap {
+ Some(_) => {
+ format!(
+ "{name} {{ {}{} }}",
+ fields.enumerate().format_with(", ", |(idx, field), f| {
+ f(&format_args!("{}${}", field.name(db).escaped(), idx + 1))
+ }),
+ if fields_omitted { ", .." } else { "" },
+ name = name
+ )
+ }
+ None => {
+ format!(
+ "{name} {{ {}{} }}",
+ fields.map(|field| field.name(db).escaped().to_smol_str()).format(", "),
+ if fields_omitted { ", .." } else { "" },
+ name = name
+ )
+ }
+ }
+}
+
+fn render_tuple_as_pat(
+ snippet_cap: Option<SnippetCap>,
+ fields: &[hir::Field],
+ name: &str,
+ fields_omitted: bool,
+) -> String {
+ let fields = fields.iter();
+ match snippet_cap {
+ Some(_) => {
+ format!(
+ "{name}({}{})",
+ fields
+ .enumerate()
+ .format_with(", ", |(idx, _), f| { f(&format_args!("${}", idx + 1)) }),
+ if fields_omitted { ", .." } else { "" },
+ name = name
+ )
+ }
+ None => {
+ format!(
+ "{name}({}{})",
+ fields.enumerate().map(|(idx, _)| idx).format(", "),
+ if fields_omitted { ", .." } else { "" },
+ name = name
+ )
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs
new file mode 100644
index 000000000..f1b23c76e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs
@@ -0,0 +1,57 @@
+//! Renderer for type aliases.
+
+use hir::{AsAssocItem, HirDisplay};
+use ide_db::SymbolKind;
+use syntax::SmolStr;
+
+use crate::{item::CompletionItem, render::RenderContext};
+
+pub(crate) fn render_type_alias(
+ ctx: RenderContext<'_>,
+ type_alias: hir::TypeAlias,
+) -> Option<CompletionItem> {
+ let _p = profile::span("render_type_alias");
+ render(ctx, type_alias, false)
+}
+
+pub(crate) fn render_type_alias_with_eq(
+ ctx: RenderContext<'_>,
+ type_alias: hir::TypeAlias,
+) -> Option<CompletionItem> {
+ let _p = profile::span("render_type_alias_with_eq");
+ render(ctx, type_alias, true)
+}
+
+fn render(
+ ctx: RenderContext<'_>,
+ type_alias: hir::TypeAlias,
+ with_eq: bool,
+) -> Option<CompletionItem> {
+ let db = ctx.db();
+
+ let name = type_alias.name(db);
+ let (name, escaped_name) = if with_eq {
+ (
+ SmolStr::from_iter([&name.to_smol_str(), " = "]),
+ SmolStr::from_iter([&name.escaped().to_smol_str(), " = "]),
+ )
+ } else {
+ (name.to_smol_str(), name.escaped().to_smol_str())
+ };
+ let detail = type_alias.display(db).to_string();
+
+ let mut item = CompletionItem::new(SymbolKind::TypeAlias, ctx.source_range(), name.clone());
+ item.set_documentation(ctx.docs(type_alias))
+ .set_deprecated(ctx.is_deprecated(type_alias) || ctx.is_deprecated_assoc_item(type_alias))
+ .detail(detail)
+ .set_relevance(ctx.completion_relevance());
+
+ if let Some(actm) = type_alias.as_assoc_item(db) {
+ if let Some(trt) = actm.containing_trait_or_trait_impl(db) {
+ item.trait_name(trt.name(db).to_smol_str());
+ }
+ }
+ item.insert_text(escaped_name);
+
+ Some(item.build())
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs
new file mode 100644
index 000000000..9c9540a9b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs
@@ -0,0 +1,77 @@
+//! Renderer for `union` literals.
+
+use hir::{HirDisplay, Name, StructKind};
+use ide_db::SymbolKind;
+use itertools::Itertools;
+
+use crate::{
+ render::{
+ variant::{format_literal_label, visible_fields},
+ RenderContext,
+ },
+ CompletionItem, CompletionItemKind,
+};
+
+pub(crate) fn render_union_literal(
+ ctx: RenderContext<'_>,
+ un: hir::Union,
+ path: Option<hir::ModPath>,
+ local_name: Option<Name>,
+) -> Option<CompletionItem> {
+ let name = local_name.unwrap_or_else(|| un.name(ctx.db()));
+
+ let (qualified_name, escaped_qualified_name) = match path {
+ Some(p) => (p.to_string(), p.escaped().to_string()),
+ None => (name.to_string(), name.escaped().to_string()),
+ };
+
+ let mut item = CompletionItem::new(
+ CompletionItemKind::SymbolKind(SymbolKind::Union),
+ ctx.source_range(),
+ format_literal_label(&name.to_smol_str(), StructKind::Record),
+ );
+
+ let fields = un.fields(ctx.db());
+ let (fields, fields_omitted) = visible_fields(ctx.completion, &fields, un)?;
+
+ if fields.is_empty() {
+ return None;
+ }
+
+ let literal = if ctx.snippet_cap().is_some() {
+ format!(
+ "{} {{ ${{1|{}|}}: ${{2:()}} }}$0",
+ escaped_qualified_name,
+ fields.iter().map(|field| field.name(ctx.db()).escaped().to_smol_str()).format(",")
+ )
+ } else {
+ format!(
+ "{} {{ {} }}",
+ escaped_qualified_name,
+ fields.iter().format_with(", ", |field, f| {
+ f(&format_args!("{}: ()", field.name(ctx.db()).escaped()))
+ })
+ )
+ };
+
+ let detail = format!(
+ "{} {{ {}{} }}",
+ qualified_name,
+ fields.iter().format_with(", ", |field, f| {
+ f(&format_args!("{}: {}", field.name(ctx.db()), field.ty(ctx.db()).display(ctx.db())))
+ }),
+ if fields_omitted { ", .." } else { "" }
+ );
+
+ item.set_documentation(ctx.docs(un))
+ .set_deprecated(ctx.is_deprecated(un))
+ .detail(&detail)
+ .set_relevance(ctx.completion_relevance());
+
+ match ctx.snippet_cap() {
+ Some(snippet_cap) => item.insert_snippet(snippet_cap, literal),
+ None => item.insert_text(literal),
+ };
+
+ Some(item.build())
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
new file mode 100644
index 000000000..003a0c11e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
@@ -0,0 +1,96 @@
+//! Code common to structs, unions, and enum variants.
+
+use crate::context::CompletionContext;
+use hir::{db::HirDatabase, HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind};
+use ide_db::SnippetCap;
+use itertools::Itertools;
+use syntax::SmolStr;
+
+/// A rendered struct, union, or enum variant, split into fields for actual
+/// auto-completion (`literal`, using `field: ()`) and display in the
+/// completions menu (`detail`, using `field: type`).
+pub(crate) struct RenderedLiteral {
+ pub(crate) literal: String,
+ pub(crate) detail: String,
+}
+
+/// Render a record type (or sub-type) to a `RenderedCompound`. Use `None` for
+/// the `name` argument for an anonymous type.
+pub(crate) fn render_record_lit(
+ db: &dyn HirDatabase,
+ snippet_cap: Option<SnippetCap>,
+ fields: &[hir::Field],
+ path: &str,
+) -> RenderedLiteral {
+ let completions = fields.iter().enumerate().format_with(", ", |(idx, field), f| {
+ if snippet_cap.is_some() {
+ f(&format_args!("{}: ${{{}:()}}", field.name(db).escaped(), idx + 1))
+ } else {
+ f(&format_args!("{}: ()", field.name(db).escaped()))
+ }
+ });
+
+ let types = fields.iter().format_with(", ", |field, f| {
+ f(&format_args!("{}: {}", field.name(db), field.ty(db).display(db)))
+ });
+
+ RenderedLiteral {
+ literal: format!("{} {{ {} }}", path, completions),
+ detail: format!("{} {{ {} }}", path, types),
+ }
+}
+
+/// Render a tuple type (or sub-type) to a `RenderedCompound`. Use `None` for
+/// the `name` argument for an anonymous type.
+pub(crate) fn render_tuple_lit(
+ db: &dyn HirDatabase,
+ snippet_cap: Option<SnippetCap>,
+ fields: &[hir::Field],
+ path: &str,
+) -> RenderedLiteral {
+ let completions = fields.iter().enumerate().format_with(", ", |(idx, _), f| {
+ if snippet_cap.is_some() {
+ f(&format_args!("${{{}:()}}", idx + 1))
+ } else {
+ f(&format_args!("()"))
+ }
+ });
+
+ let types = fields.iter().format_with(", ", |field, f| f(&field.ty(db).display(db)));
+
+ RenderedLiteral {
+ literal: format!("{}({})", path, completions),
+ detail: format!("{}({})", path, types),
+ }
+}
+
+/// Find all the visible fields in a given list. Returns the list of visible
+/// fields, plus a boolean for whether the list is comprehensive (contains no
+/// private fields and its item is not marked `#[non_exhaustive]`).
+pub(crate) fn visible_fields(
+ ctx: &CompletionContext<'_>,
+ fields: &[hir::Field],
+ item: impl HasAttrs + HasCrate + Copy,
+) -> Option<(Vec<hir::Field>, bool)> {
+ let module = ctx.module;
+ let n_fields = fields.len();
+ let fields = fields
+ .iter()
+ .filter(|field| field.is_visible_from(ctx.db, module))
+ .copied()
+ .collect::<Vec<_>>();
+ let has_invisible_field = n_fields - fields.len() > 0;
+ let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key("non_exhaustive").exists()
+ && item.krate(ctx.db) != module.krate();
+ let fields_omitted = has_invisible_field || is_foreign_non_exhaustive;
+ Some((fields, fields_omitted))
+}
+
+/// Format a struct, etc. literal option for display in the completions menu.
+pub(crate) fn format_literal_label(name: &str, kind: StructKind) -> SmolStr {
+ match kind {
+ StructKind::Tuple => SmolStr::from_iter([name, "(…)"]),
+ StructKind::Record => SmolStr::from_iter([name, " {…}"]),
+ StructKind::Unit => name.into(),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
new file mode 100644
index 000000000..dc1039fa6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
@@ -0,0 +1,214 @@
+//! User (postfix)-snippet definitions.
+//!
+//! Actual logic is implemented in [`crate::completions::postfix`] and [`crate::completions::snippet`] respectively.
+
+// Feature: User Snippet Completions
+//
+// rust-analyzer allows the user to define custom (postfix)-snippets that may depend on items to be accessible for the current scope to be applicable.
+//
+// A custom snippet can be defined by adding it to the `rust-analyzer.completion.snippets.custom` object respectively.
+//
+// [source,json]
+// ----
+// {
+// "rust-analyzer.completion.snippets.custom": {
+// "thread spawn": {
+// "prefix": ["spawn", "tspawn"],
+// "body": [
+// "thread::spawn(move || {",
+// "\t$0",
+// "});",
+// ],
+// "description": "Insert a thread::spawn call",
+// "requires": "std::thread",
+// "scope": "expr",
+// }
+// }
+// }
+// ----
+//
+// In the example above:
+//
+// * `"thread spawn"` is the name of the snippet.
+//
+// * `prefix` defines one or more trigger words that will trigger the snippets completion.
+// Using `postfix` will instead create a postfix snippet.
+//
+// * `body` is one or more lines of content joined via newlines for the final output.
+//
+// * `description` is an optional description of the snippet, if unset the snippet name will be used.
+//
+// * `requires` is an optional list of item paths that have to be resolvable in the current crate where the completion is rendered.
+// On failure of resolution the snippet won't be applicable, otherwise the snippet will insert an import for the items on insertion if
+// the items aren't yet in scope.
+//
+// * `scope` is an optional filter for when the snippet should be applicable. Possible values are:
+// ** for Snippet-Scopes: `expr`, `item` (default: `item`)
+// ** for Postfix-Snippet-Scopes: `expr`, `type` (default: `expr`)
+//
+// The `body` field also has access to placeholders as visible in the example as `$0`.
+// These placeholders take the form of `$number` or `${number:placeholder_text}` which can be traversed as tabstop in ascending order starting from 1,
+// with `$0` being a special case that always comes last.
+//
+// There is also a special placeholder, `${receiver}`, which will be replaced by the receiver expression for postfix snippets, or a `$0` tabstop in case of normal snippets.
+// This replacement for normal snippets allows you to reuse a snippet for both post- and prefix in a single definition.
+//
+// For the VSCode editor, rust-analyzer also ships with a small set of defaults which can be removed
+// by overwriting the settings object mentioned above, the defaults are:
+// [source,json]
+// ----
+// {
+// "Arc::new": {
+// "postfix": "arc",
+// "body": "Arc::new(${receiver})",
+// "requires": "std::sync::Arc",
+// "description": "Put the expression into an `Arc`",
+// "scope": "expr"
+// },
+// "Rc::new": {
+// "postfix": "rc",
+// "body": "Rc::new(${receiver})",
+// "requires": "std::rc::Rc",
+// "description": "Put the expression into an `Rc`",
+// "scope": "expr"
+// },
+// "Box::pin": {
+// "postfix": "pinbox",
+// "body": "Box::pin(${receiver})",
+// "requires": "std::boxed::Box",
+// "description": "Put the expression into a pinned `Box`",
+// "scope": "expr"
+// },
+// "Ok": {
+// "postfix": "ok",
+// "body": "Ok(${receiver})",
+// "description": "Wrap the expression in a `Result::Ok`",
+// "scope": "expr"
+// },
+// "Err": {
+// "postfix": "err",
+// "body": "Err(${receiver})",
+// "description": "Wrap the expression in a `Result::Err`",
+// "scope": "expr"
+// },
+// "Some": {
+// "postfix": "some",
+// "body": "Some(${receiver})",
+// "description": "Wrap the expression in an `Option::Some`",
+// "scope": "expr"
+// }
+// }
+// ----
+
+use ide_db::imports::import_assets::LocatedImport;
+use itertools::Itertools;
+use syntax::{ast, AstNode, GreenNode, SyntaxNode};
+
+use crate::context::CompletionContext;
+
+/// A snippet scope describing where a snippet may apply to.
+/// These may differ slightly in meaning depending on the snippet trigger.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum SnippetScope {
+ Item,
+ Expr,
+ Type,
+}
+
+/// A user supplied snippet.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct Snippet {
+ pub postfix_triggers: Box<[Box<str>]>,
+ pub prefix_triggers: Box<[Box<str>]>,
+ pub scope: SnippetScope,
+ pub description: Option<Box<str>>,
+ snippet: String,
+ // These are `ast::Path`'s but due to SyntaxNodes not being Send we store these
+ // and reconstruct them on demand instead. This is cheaper than reparsing them
+ // from strings
+ requires: Box<[GreenNode]>,
+}
+
+impl Snippet {
+ pub fn new(
+ prefix_triggers: &[String],
+ postfix_triggers: &[String],
+ snippet: &[String],
+ description: &str,
+ requires: &[String],
+ scope: SnippetScope,
+ ) -> Option<Self> {
+ if prefix_triggers.is_empty() && postfix_triggers.is_empty() {
+ return None;
+ }
+ let (requires, snippet, description) = validate_snippet(snippet, description, requires)?;
+ Some(Snippet {
+ // Box::into doesn't work as that has a Copy bound 😒
+ postfix_triggers: postfix_triggers.iter().map(String::as_str).map(Into::into).collect(),
+ prefix_triggers: prefix_triggers.iter().map(String::as_str).map(Into::into).collect(),
+ scope,
+ snippet,
+ description,
+ requires,
+ })
+ }
+
+ /// Returns [`None`] if the required items do not resolve.
+ pub(crate) fn imports(&self, ctx: &CompletionContext<'_>) -> Option<Vec<LocatedImport>> {
+ import_edits(ctx, &self.requires)
+ }
+
+ pub fn snippet(&self) -> String {
+ self.snippet.replace("${receiver}", "$0")
+ }
+
+ pub fn postfix_snippet(&self, receiver: &str) -> String {
+ self.snippet.replace("${receiver}", receiver)
+ }
+}
+
+fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<Vec<LocatedImport>> {
+ let resolve = |import: &GreenNode| {
+ let path = ast::Path::cast(SyntaxNode::new_root(import.clone()))?;
+ let item = match ctx.scope.speculative_resolve(&path)? {
+ hir::PathResolution::Def(def) => def.into(),
+ _ => return None,
+ };
+ let path =
+ ctx.module.find_use_path_prefixed(ctx.db, item, ctx.config.insert_use.prefix_kind)?;
+ Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item, None)))
+ };
+ let mut res = Vec::with_capacity(requires.len());
+ for import in requires {
+ match resolve(import) {
+ Some(first) => res.extend(first),
+ None => return None,
+ }
+ }
+ Some(res)
+}
+
+fn validate_snippet(
+ snippet: &[String],
+ description: &str,
+ requires: &[String],
+) -> Option<(Box<[GreenNode]>, String, Option<Box<str>>)> {
+ let mut imports = Vec::with_capacity(requires.len());
+ for path in requires.iter() {
+ let use_path = ast::SourceFile::parse(&format!("use {};", path))
+ .syntax_node()
+ .descendants()
+ .find_map(ast::Path::cast)?;
+ if use_path.syntax().text() != path.as_str() {
+ return None;
+ }
+ let green = use_path.syntax().green().into_owned();
+ imports.push(green);
+ }
+ let snippet = snippet.iter().join("\n");
+ let description = (!description.is_empty())
+ .then(|| description.split_once('\n').map_or(description, |(it, _)| it))
+ .map(ToOwned::to_owned)
+ .map(Into::into);
+ Some((imports.into_boxed_slice(), snippet, description))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
new file mode 100644
index 000000000..cf826648d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
@@ -0,0 +1,305 @@
+//! Tests and test utilities for completions.
+//!
+//! Most tests live in this module or its submodules. The tests in these submodules are "location"
+//! oriented, that is they try to check completions for something like type position, param position
+//! etc.
+//! Tests that are more orientated towards specific completion types like visibility checks of path
+//! completions or `check_edit` tests usually live in their respective completion modules instead.
+//! This gives this test module and its submodules here the main purpose of giving the developer an
+//! overview of whats being completed where, not how.
+
+mod attribute;
+mod expression;
+mod flyimport;
+mod fn_param;
+mod item_list;
+mod item;
+mod pattern;
+mod predicate;
+mod proc_macros;
+mod record;
+mod special;
+mod type_pos;
+mod use_tree;
+mod visibility;
+
+use hir::{db::DefDatabase, PrefixKind, Semantics};
+use ide_db::{
+ base_db::{fixture::ChangeFixture, FileLoader, FilePosition},
+ imports::insert_use::{ImportGranularity, InsertUseConfig},
+ RootDatabase, SnippetCap,
+};
+use itertools::Itertools;
+use stdx::{format_to, trim_indent};
+use syntax::{AstNode, NodeOrToken, SyntaxElement};
+use test_utils::assert_eq_text;
+
+use crate::{
+ resolve_completion_edits, CallableSnippets, CompletionConfig, CompletionItem,
+ CompletionItemKind,
+};
+
+/// Lots of basic item definitions
+const BASE_ITEMS_FIXTURE: &str = r#"
+enum Enum { TupleV(u32), RecordV { field: u32 }, UnitV }
+use self::Enum::TupleV;
+mod module {}
+
+trait Trait {}
+static STATIC: Unit = Unit;
+const CONST: Unit = Unit;
+struct Record { field: u32 }
+struct Tuple(u32);
+struct Unit;
+#[macro_export]
+macro_rules! makro {}
+#[rustc_builtin_macro]
+pub macro Clone {}
+fn function() {}
+union Union { field: i32 }
+"#;
+
+pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
+ enable_postfix_completions: true,
+ enable_imports_on_the_fly: true,
+ enable_self_on_the_fly: true,
+ enable_private_editable: false,
+ callable: Some(CallableSnippets::FillArguments),
+ snippet_cap: SnippetCap::new(true),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: PrefixKind::Plain,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ snippets: Vec::new(),
+};
+
+pub(crate) fn completion_list(ra_fixture: &str) -> String {
+ completion_list_with_config(TEST_CONFIG, ra_fixture, true, None)
+}
+
+pub(crate) fn completion_list_no_kw(ra_fixture: &str) -> String {
+ completion_list_with_config(TEST_CONFIG, ra_fixture, false, None)
+}
+
+pub(crate) fn completion_list_no_kw_with_private_editable(ra_fixture: &str) -> String {
+ let mut config = TEST_CONFIG.clone();
+ config.enable_private_editable = true;
+ completion_list_with_config(config, ra_fixture, false, None)
+}
+
+pub(crate) fn completion_list_with_trigger_character(
+ ra_fixture: &str,
+ trigger_character: Option<char>,
+) -> String {
+ completion_list_with_config(TEST_CONFIG, ra_fixture, true, trigger_character)
+}
+
+fn completion_list_with_config(
+ config: CompletionConfig,
+ ra_fixture: &str,
+ include_keywords: bool,
+ trigger_character: Option<char>,
+) -> String {
+ // filter out all but one builtintype completion for smaller test outputs
+ let items = get_all_items(config, ra_fixture, trigger_character);
+ let items = items
+ .into_iter()
+ .filter(|it| it.kind() != CompletionItemKind::BuiltinType || it.label() == "u32")
+ .filter(|it| include_keywords || it.kind() != CompletionItemKind::Keyword)
+ .filter(|it| include_keywords || it.kind() != CompletionItemKind::Snippet)
+ .sorted_by_key(|it| (it.kind(), it.label().to_owned(), it.detail().map(ToOwned::to_owned)))
+ .collect();
+ render_completion_list(items)
+}
+
+/// Creates analysis from a multi-file fixture, returns positions marked with $0.
+pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ let mut database = RootDatabase::default();
+ database.set_enable_proc_attr_macros(true);
+ database.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
+ let offset = range_or_offset.expect_offset();
+ (database, FilePosition { file_id, offset })
+}
+
+pub(crate) fn do_completion(code: &str, kind: CompletionItemKind) -> Vec<CompletionItem> {
+ do_completion_with_config(TEST_CONFIG, code, kind)
+}
+
+pub(crate) fn do_completion_with_config(
+ config: CompletionConfig,
+ code: &str,
+ kind: CompletionItemKind,
+) -> Vec<CompletionItem> {
+ get_all_items(config, code, None)
+ .into_iter()
+ .filter(|c| c.kind() == kind)
+ .sorted_by(|l, r| l.label().cmp(r.label()))
+ .collect()
+}
+
+fn render_completion_list(completions: Vec<CompletionItem>) -> String {
+ fn monospace_width(s: &str) -> usize {
+ s.chars().count()
+ }
+ let label_width =
+ completions.iter().map(|it| monospace_width(it.label())).max().unwrap_or_default().min(22);
+ completions
+ .into_iter()
+ .map(|it| {
+ let tag = it.kind().tag();
+ let var_name = format!("{} {}", tag, it.label());
+ let mut buf = var_name;
+ if let Some(detail) = it.detail() {
+ let width = label_width.saturating_sub(monospace_width(it.label()));
+ format_to!(buf, "{:width$} {}", "", detail, width = width);
+ }
+ if it.deprecated() {
+ format_to!(buf, " DEPRECATED");
+ }
+ format_to!(buf, "\n");
+ buf
+ })
+ .collect()
+}
+
+#[track_caller]
+pub(crate) fn check_edit(what: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
+ check_edit_with_config(TEST_CONFIG, what, ra_fixture_before, ra_fixture_after)
+}
+
+#[track_caller]
+pub(crate) fn check_edit_with_config(
+ config: CompletionConfig,
+ what: &str,
+ ra_fixture_before: &str,
+ ra_fixture_after: &str,
+) {
+ let ra_fixture_after = trim_indent(ra_fixture_after);
+ let (db, position) = position(ra_fixture_before);
+ let completions: Vec<CompletionItem> =
+ crate::completions(&db, &config, position, None).unwrap().into();
+ let (completion,) = completions
+ .iter()
+ .filter(|it| it.lookup() == what)
+ .collect_tuple()
+ .unwrap_or_else(|| panic!("can't find {:?} completion in {:#?}", what, completions));
+ let mut actual = db.file_text(position.file_id).to_string();
+
+ let mut combined_edit = completion.text_edit().to_owned();
+
+ resolve_completion_edits(
+ &db,
+ &config,
+ position,
+ completion.imports_to_add().iter().filter_map(|import_edit| {
+ let import_path = &import_edit.import_path;
+ let import_name = import_path.segments().last()?;
+ Some((import_path.to_string(), import_name.to_string()))
+ }),
+ )
+ .into_iter()
+ .flatten()
+ .for_each(|text_edit| {
+ combined_edit.union(text_edit).expect(
+ "Failed to apply completion resolve changes: change ranges overlap, but should not",
+ )
+ });
+
+ combined_edit.apply(&mut actual);
+ assert_eq_text!(&ra_fixture_after, &actual)
+}
+
+pub(crate) fn check_pattern_is_applicable(code: &str, check: impl FnOnce(SyntaxElement) -> bool) {
+ let (db, pos) = position(code);
+
+ let sema = Semantics::new(&db);
+ let original_file = sema.parse(pos.file_id);
+ let token = original_file.syntax().token_at_offset(pos.offset).left_biased().unwrap();
+ assert!(check(NodeOrToken::Token(token)));
+}
+
+pub(crate) fn get_all_items(
+ config: CompletionConfig,
+ code: &str,
+ trigger_character: Option<char>,
+) -> Vec<CompletionItem> {
+ let (db, position) = position(code);
+ let res = crate::completions(&db, &config, position, trigger_character)
+ .map_or_else(Vec::default, Into::into);
+ // validate
+ res.iter().for_each(|it| {
+ let sr = it.source_range();
+ assert!(
+ sr.contains_inclusive(position.offset),
+ "source range {sr:?} does not contain the offset {:?} of the completion request: {it:?}",
+ position.offset
+ );
+ });
+ res
+}
+
+#[test]
+fn test_no_completions_required() {
+ assert_eq!(completion_list(r#"fn foo() { for i i$0 }"#), String::new());
+}
+
+#[test]
+fn regression_10042() {
+ completion_list(
+ r#"
+macro_rules! preset {
+ ($($x:ident)&&*) => {
+ {
+ let mut v = Vec::new();
+ $(
+ v.push($x.into());
+ )*
+ v
+ }
+ };
+}
+
+fn foo() {
+ preset!(foo$0);
+}
+"#,
+ );
+}
+
+#[test]
+fn no_completions_in_comments() {
+ assert_eq!(
+ completion_list(
+ r#"
+fn test() {
+let x = 2; // A comment$0
+}
+"#,
+ ),
+ String::new(),
+ );
+ assert_eq!(
+ completion_list(
+ r#"
+/*
+Some multi-line comment$0
+*/
+"#,
+ ),
+ String::new(),
+ );
+ assert_eq!(
+ completion_list(
+ r#"
+/// Some doc comment
+/// let test$0 = 1
+"#,
+ ),
+ String::new(),
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
new file mode 100644
index 000000000..1578ba2c3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
@@ -0,0 +1,1016 @@
+//! Completion tests for attributes.
+use expect_test::{expect, Expect};
+
+use crate::tests::{check_edit, completion_list};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn proc_macros() {
+ check(
+ r#"
+//- proc_macros: identity
+#[$0]
+struct Foo;
+"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at derive(…)
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at non_exhaustive
+ at repr(…)
+ at warn(…)
+ md proc_macros
+ kw crate::
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn proc_macros_on_comment() {
+ check(
+ r#"
+//- proc_macros: identity
+/// $0
+#[proc_macros::identity]
+struct Foo;
+"#,
+ expect![[r#""#]],
+ )
+}
+
+#[test]
+fn proc_macros_qualified() {
+ check(
+ r#"
+//- proc_macros: identity
+#[proc_macros::$0]
+struct Foo;
+"#,
+ expect![[r#"
+ at identity proc_macro identity
+ "#]],
+ )
+}
+
+#[test]
+fn inside_nested_attr() {
+ check(r#"#[cfg($0)]"#, expect![[]])
+}
+
+#[test]
+fn with_existing_attr() {
+ check(
+ r#"#[no_mangle] #[$0] mcall!();"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at forbid(…)
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn attr_on_source_file() {
+ check(
+ r#"#![$0]"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at crate_name = ""
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at feature(…)
+ at forbid(…)
+ at must_use
+ at no_implicit_prelude
+ at no_main
+ at no_mangle
+ at no_std
+ at recursion_limit = "…"
+ at type_length_limit = …
+ at warn(…)
+ at windows_subsystem = "…"
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_module() {
+ check(
+ r#"#[$0] mod foo;"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at macro_use
+ at must_use
+ at no_mangle
+ at path = "…"
+ at warn(…)
+ kw crate::
+ kw self::
+ kw super::
+ "#]],
+ );
+ check(
+ r#"mod foo {#![$0]}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_implicit_prelude
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ kw super::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_macro_rules() {
+ check(
+ r#"#[$0] macro_rules! foo {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at macro_export
+ at macro_use
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_macro_def() {
+ check(
+ r#"#[$0] macro foo {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_extern_crate() {
+ check(
+ r#"#[$0] extern crate foo;"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at macro_use
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_use() {
+ check(
+ r#"#[$0] use foo;"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_type_alias() {
+ check(
+ r#"#[$0] type foo = ();"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_struct() {
+ check(
+ r#"
+//- minicore:derive
+#[$0]
+struct Foo;
+"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at derive macro derive
+ at derive(…)
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at non_exhaustive
+ at repr(…)
+ at warn(…)
+ md core
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_enum() {
+ check(
+ r#"#[$0] enum Foo {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at derive(…)
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at non_exhaustive
+ at repr(…)
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_const() {
+ check(
+ r#"#[$0] const FOO: () = ();"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_static() {
+ check(
+ r#"#[$0] static FOO: () = ()"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at export_name = "…"
+ at forbid(…)
+ at global_allocator
+ at link_name = "…"
+ at link_section = "…"
+ at must_use
+ at no_mangle
+ at used
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_trait() {
+ check(
+ r#"#[$0] trait Foo {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_impl() {
+ check(
+ r#"#[$0] impl () {}"#,
+ expect![[r#"
+ at allow(…)
+ at automatically_derived
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check(
+ r#"impl () {#![$0]}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_extern_block() {
+ check(
+ r#"#[$0] extern {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at link
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check(
+ r#"extern {#![$0]}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at link
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_variant() {
+ check(
+ r#"enum Foo { #[$0] Bar }"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at forbid(…)
+ at non_exhaustive
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_fn() {
+ check(
+ r#"#[$0] fn main() {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at cold
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at export_name = "…"
+ at forbid(…)
+ at ignore = "…"
+ at inline
+ at link_name = "…"
+ at link_section = "…"
+ at must_use
+ at must_use
+ at no_mangle
+ at panic_handler
+ at proc_macro
+ at proc_macro_attribute
+ at proc_macro_derive(…)
+ at should_panic
+ at target_feature(enable = "…")
+ at test
+ at track_caller
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_in_source_file_end() {
+ check(
+ r#"#[$0]"#,
+ expect![[r#"
+ at allow(…)
+ at automatically_derived
+ at cfg(…)
+ at cfg_attr(…)
+ at cold
+ at deny(…)
+ at deprecated
+ at derive(…)
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at export_name = "…"
+ at forbid(…)
+ at global_allocator
+ at ignore = "…"
+ at inline
+ at link
+ at link_name = "…"
+ at link_section = "…"
+ at macro_export
+ at macro_use
+ at must_use
+ at no_mangle
+ at non_exhaustive
+ at panic_handler
+ at path = "…"
+ at proc_macro
+ at proc_macro_attribute
+ at proc_macro_derive(…)
+ at repr(…)
+ at should_panic
+ at target_feature(enable = "…")
+ at test
+ at track_caller
+ at used
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+mod cfg {
+ use super::*;
+
+ #[test]
+ fn cfg_target_endian() {
+ check(
+ r#"#[cfg(target_endian = $0"#,
+ expect![[r#"
+ ba big
+ ba little
+ "#]],
+ );
+ }
+}
+
+mod derive {
+ use super::*;
+
+ fn check_derive(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn no_completion_for_incorrect_derive() {
+ check_derive(
+ r#"
+//- minicore: derive, copy, clone, ord, eq, default, fmt
+#[derive{$0)] struct Test;
+"#,
+ expect![[]],
+ )
+ }
+
+ #[test]
+ fn empty_derive() {
+ check_derive(
+ r#"
+//- minicore: derive, copy, clone, ord, eq, default, fmt
+#[derive($0)] struct Test;
+"#,
+ expect![[r#"
+ de Clone macro Clone
+ de Clone, Copy
+ de Default macro Default
+ de PartialEq macro PartialEq
+ de PartialEq, Eq
+ de PartialEq, Eq, PartialOrd, Ord
+ de PartialEq, PartialOrd
+ md core
+ kw crate::
+ kw self::
+ "#]],
+ );
+ }
+
+ #[test]
+ fn derive_with_input_before() {
+ check_derive(
+ r#"
+//- minicore: derive, copy, clone, ord, eq, default, fmt
+#[derive(serde::Serialize, PartialEq, $0)] struct Test;
+"#,
+ expect![[r#"
+ de Clone macro Clone
+ de Clone, Copy
+ de Default macro Default
+ de Eq
+ de Eq, PartialOrd, Ord
+ de PartialOrd
+ md core
+ kw crate::
+ kw self::
+ "#]],
+ )
+ }
+
+ #[test]
+ fn derive_with_input_after() {
+ check_derive(
+ r#"
+//- minicore: derive, copy, clone, ord, eq, default, fmt
+#[derive($0 serde::Serialize, PartialEq)] struct Test;
+"#,
+ expect![[r#"
+ de Clone macro Clone
+ de Clone, Copy
+ de Default macro Default
+ de Eq
+ de Eq, PartialOrd, Ord
+ de PartialOrd
+ md core
+ kw crate::
+ kw self::
+ "#]],
+ );
+ }
+
+ #[test]
+ fn derive_with_existing_derives() {
+ check_derive(
+ r#"
+//- minicore: derive, copy, clone, ord, eq, default, fmt
+#[derive(PartialEq, Eq, Or$0)] struct Test;
+"#,
+ expect![[r#"
+ de Clone macro Clone
+ de Clone, Copy
+ de Default macro Default
+ de PartialOrd
+ de PartialOrd, Ord
+ md core
+ kw crate::
+ kw self::
+ "#]],
+ );
+ }
+
+ #[test]
+ fn derive_flyimport() {
+ check_derive(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+#[derive(der$0)] struct Test;
+"#,
+ expect![[r#"
+ de DeriveIdentity (use proc_macros::DeriveIdentity) proc_macro DeriveIdentity
+ md core
+ md proc_macros
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check_derive(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+use proc_macros::DeriveIdentity;
+#[derive(der$0)] struct Test;
+"#,
+ expect![[r#"
+ de DeriveIdentity proc_macro DeriveIdentity
+ md core
+ md proc_macros
+ kw crate::
+ kw self::
+ "#]],
+ );
+ }
+
+ #[test]
+ fn derive_flyimport_edit() {
+ check_edit(
+ "DeriveIdentity",
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+#[derive(der$0)] struct Test;
+"#,
+ r#"
+use proc_macros::DeriveIdentity;
+
+#[derive(DeriveIdentity)] struct Test;
+"#,
+ );
+ }
+
+ #[test]
+ fn qualified() {
+ check_derive(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive, copy, clone
+#[derive(proc_macros::$0)] struct Test;
+"#,
+ expect![[r#"
+ de DeriveIdentity proc_macro DeriveIdentity
+ "#]],
+ );
+ check_derive(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive, copy, clone
+#[derive(proc_macros::C$0)] struct Test;
+"#,
+ expect![[r#"
+ de DeriveIdentity proc_macro DeriveIdentity
+ "#]],
+ );
+ }
+}
+
+mod lint {
+ use super::*;
+
+ #[test]
+ fn lint_empty() {
+ check_edit(
+ "deprecated",
+ r#"#[allow($0)] struct Test;"#,
+ r#"#[allow(deprecated)] struct Test;"#,
+ )
+ }
+
+ #[test]
+ fn lint_with_existing() {
+ check_edit(
+ "deprecated",
+ r#"#[allow(keyword_idents, $0)] struct Test;"#,
+ r#"#[allow(keyword_idents, deprecated)] struct Test;"#,
+ )
+ }
+
+ #[test]
+ fn lint_qualified() {
+ check_edit(
+ "deprecated",
+ r#"#[allow(keyword_idents, $0)] struct Test;"#,
+ r#"#[allow(keyword_idents, deprecated)] struct Test;"#,
+ )
+ }
+
+ #[test]
+ fn lint_feature() {
+ check_edit(
+ "box_syntax",
+ r#"#[feature(box_$0)] struct Test;"#,
+ r#"#[feature(box_syntax)] struct Test;"#,
+ )
+ }
+
+ #[test]
+ fn lint_clippy_unqualified() {
+ check_edit(
+ "clippy::as_conversions",
+ r#"#[allow($0)] struct Test;"#,
+ r#"#[allow(clippy::as_conversions)] struct Test;"#,
+ );
+ }
+
+ #[test]
+ fn lint_clippy_qualified() {
+ check_edit(
+ "as_conversions",
+ r#"#[allow(clippy::$0)] struct Test;"#,
+ r#"#[allow(clippy::as_conversions)] struct Test;"#,
+ );
+ }
+
+ #[test]
+ fn lint_rustdoc_unqualified() {
+ check_edit(
+ "rustdoc::bare_urls",
+ r#"#[allow($0)] struct Test;"#,
+ r#"#[allow(rustdoc::bare_urls)] struct Test;"#,
+ );
+ }
+
+ #[test]
+ fn lint_rustdoc_qualified() {
+ check_edit(
+ "bare_urls",
+ r#"#[allow(rustdoc::$0)] struct Test;"#,
+ r#"#[allow(rustdoc::bare_urls)] struct Test;"#,
+ );
+ }
+
+ #[test]
+ fn lint_unclosed() {
+ check_edit(
+ "deprecated",
+ r#"#[allow(dep$0 struct Test;"#,
+ r#"#[allow(deprecated struct Test;"#,
+ );
+ check_edit(
+ "bare_urls",
+ r#"#[allow(rustdoc::$0 struct Test;"#,
+ r#"#[allow(rustdoc::bare_urls struct Test;"#,
+ );
+ }
+}
+
+mod repr {
+ use super::*;
+
+ fn check_repr(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn no_completion_for_incorrect_repr() {
+ check_repr(r#"#[repr{$0)] struct Test;"#, expect![[]])
+ }
+
+ #[test]
+ fn empty() {
+ check_repr(
+ r#"#[repr($0)] struct Test;"#,
+ expect![[r#"
+ ba C
+ ba align($0)
+ ba i16
+ ba i28
+ ba i32
+ ba i64
+ ba i8
+ ba isize
+ ba packed
+ ba transparent
+ ba u128
+ ba u16
+ ba u32
+ ba u64
+ ba u8
+ ba usize
+ "#]],
+ );
+ }
+
+ #[test]
+ fn transparent() {
+ check_repr(r#"#[repr(transparent, $0)] struct Test;"#, expect![[r#""#]]);
+ }
+
+ #[test]
+ fn align() {
+ check_repr(
+ r#"#[repr(align(1), $0)] struct Test;"#,
+ expect![[r#"
+ ba C
+ ba i16
+ ba i28
+ ba i32
+ ba i64
+ ba i8
+ ba isize
+ ba transparent
+ ba u128
+ ba u16
+ ba u32
+ ba u64
+ ba u8
+ ba usize
+ "#]],
+ );
+ }
+
+ #[test]
+ fn packed() {
+ check_repr(
+ r#"#[repr(packed, $0)] struct Test;"#,
+ expect![[r#"
+ ba C
+ ba i16
+ ba i28
+ ba i32
+ ba i64
+ ba i8
+ ba isize
+ ba transparent
+ ba u128
+ ba u16
+ ba u32
+ ba u64
+ ba u8
+ ba usize
+ "#]],
+ );
+ }
+
+ #[test]
+ fn c() {
+ check_repr(
+ r#"#[repr(C, $0)] struct Test;"#,
+ expect![[r#"
+ ba align($0)
+ ba i16
+ ba i28
+ ba i32
+ ba i64
+ ba i8
+ ba isize
+ ba packed
+ ba u128
+ ba u16
+ ba u32
+ ba u64
+ ba u8
+ ba usize
+ "#]],
+ );
+ }
+
+ #[test]
+ fn prim() {
+ check_repr(
+ r#"#[repr(usize, $0)] struct Test;"#,
+ expect![[r#"
+ ba C
+ ba align($0)
+ ba packed
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
new file mode 100644
index 000000000..925081ebf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
@@ -0,0 +1,672 @@
+//! Completion tests for expressions.
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+fn check_empty(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn complete_literal_struct_with_a_private_field() {
+ // `FooDesc.bar` is private, the completion should not be triggered.
+ check(
+ r#"
+mod _69latrick {
+ pub struct FooDesc { pub six: bool, pub neuf: Vec<String>, bar: bool }
+ pub fn create_foo(foo_desc: &FooDesc) -> () { () }
+}
+
+fn baz() {
+ use _69latrick::*;
+
+ let foo = create_foo(&$0);
+}
+ "#,
+ // This should not contain `FooDesc {…}`.
+ expect![[r#"
+ ct CONST
+ en Enum
+ fn baz() fn()
+ fn create_foo(…) fn(&FooDesc)
+ fn function() fn()
+ ma makro!(…) macro_rules! makro
+ md _69latrick
+ md module
+ sc STATIC
+ st FooDesc
+ st Record
+ st Tuple
+ st Unit
+ un Union
+ ev TupleV(…) TupleV(u32)
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw mut
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ )
+}
+
+#[test]
+fn completes_various_bindings() {
+ check_empty(
+ r#"
+fn func(param0 @ (param1, param2): (i32, i32)) {
+ let letlocal = 92;
+ if let ifletlocal = 100 {
+ match 0 {
+ matcharm => 1 + $0,
+ otherwise => (),
+ }
+ }
+ let letlocal2 = 44;
+}
+"#,
+ expect![[r#"
+ fn func(…) fn((i32, i32))
+ lc ifletlocal i32
+ lc letlocal i32
+ lc matcharm i32
+ lc param0 (i32, i32)
+ lc param1 i32
+ lc param2 i32
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
+}
+
+#[test]
+fn completes_all_the_things_in_fn_body() {
+ check(
+ r#"
+use non_existant::Unresolved;
+mod qualified { pub enum Enum { Variant } }
+
+impl Unit {
+ fn foo<'lifetime, TypeParam, const CONST_PARAM: usize>(self) {
+ fn local_func() {}
+ $0
+ }
+}
+"#,
+ // `self` is in here twice, once as the module, once as the local
+ expect![[r#"
+ ct CONST
+ cp CONST_PARAM
+ en Enum
+ fn function() fn()
+ fn local_func() fn()
+ lc self Unit
+ ma makro!(…) macro_rules! makro
+ md module
+ md qualified
+ sp Self
+ sc STATIC
+ st Record
+ st Tuple
+ st Unit
+ tp TypeParam
+ un Union
+ ev TupleV(…) TupleV(u32)
+ bt u32
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw let
+ kw loop
+ kw match
+ kw mod
+ kw return
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ me self.foo() fn(self)
+ sn macro_rules
+ sn pd
+ sn ppd
+ ?? Unresolved
+ "#]],
+ );
+ check(
+ r#"
+use non_existant::Unresolved;
+mod qualified { pub enum Enum { Variant } }
+
+impl Unit {
+ fn foo<'lifetime, TypeParam, const CONST_PARAM: usize>(self) {
+ fn local_func() {}
+ self::$0
+ }
+}
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ fn function() fn()
+ ma makro!(…) macro_rules! makro
+ md module
+ md qualified
+ sc STATIC
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ ev TupleV(…) TupleV(u32)
+ ?? Unresolved
+ "#]],
+ );
+}
+
+#[test]
+fn complete_in_block() {
+ check_empty(
+ r#"
+ fn foo() {
+ if true {
+ $0
+ }
+ }
+"#,
+ expect![[r#"
+ fn foo() fn()
+ bt u32
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw let
+ kw loop
+ kw match
+ kw mod
+ kw return
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ sn macro_rules
+ sn pd
+ sn ppd
+ "#]],
+ )
+}
+
+#[test]
+fn complete_after_if_expr() {
+ check_empty(
+ r#"
+ fn foo() {
+ if true {}
+ $0
+ }
+"#,
+ expect![[r#"
+ fn foo() fn()
+ bt u32
+ kw const
+ kw crate::
+ kw else
+ kw else if
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw let
+ kw loop
+ kw match
+ kw mod
+ kw return
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ sn macro_rules
+ sn pd
+ sn ppd
+ "#]],
+ )
+}
+
+#[test]
+fn complete_in_match_arm() {
+ check_empty(
+ r#"
+ fn foo() {
+ match () {
+ () => $0
+ }
+ }
+"#,
+ expect![[r#"
+ fn foo() fn()
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ )
+}
+
+#[test]
+fn completes_in_loop_ctx() {
+ check_empty(
+ r"fn my() { loop { $0 } }",
+ expect![[r#"
+ fn my() fn()
+ bt u32
+ kw break
+ kw const
+ kw continue
+ kw crate::
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw let
+ kw loop
+ kw match
+ kw mod
+ kw return
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ sn macro_rules
+ sn pd
+ sn ppd
+ "#]],
+ );
+}
+
+#[test]
+fn completes_in_let_initializer() {
+ check_empty(
+ r#"fn main() { let _ = $0 }"#,
+ expect![[r#"
+ fn main() fn()
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ )
+}
+
+#[test]
+fn struct_initializer_field_expr() {
+ check_empty(
+ r#"
+struct Foo {
+ pub f: i32,
+}
+fn foo() {
+ Foo {
+ f: $0
+ }
+}
+"#,
+ expect![[r#"
+ fn foo() fn()
+ st Foo
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
+}
+
+#[test]
+fn shadowing_shows_single_completion() {
+ cov_mark::check!(shadowing_shows_single_completion);
+
+ check_empty(
+ r#"
+fn foo() {
+ let bar = 92;
+ {
+ let bar = 62;
+ drop($0)
+ }
+}
+"#,
+ expect![[r#"
+ fn foo() fn()
+ lc bar i32
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
+}
+
+#[test]
+fn in_macro_expr_frag() {
+ check_empty(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+fn quux(x: i32) {
+ m!($0);
+}
+"#,
+ expect![[r#"
+ fn quux(…) fn(i32)
+ lc x i32
+ ma m!(…) macro_rules! m
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
+ check_empty(
+ r"
+macro_rules! m { ($e:expr) => { $e } }
+fn quux(x: i32) {
+ m!(x$0);
+}
+",
+ expect![[r#"
+ fn quux(…) fn(i32)
+ lc x i32
+ ma m!(…) macro_rules! m
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
+ check_empty(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+fn quux(x: i32) {
+ let y = 92;
+ m!(x$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn enum_qualified() {
+ check(
+ r#"
+impl Enum {
+ type AssocType = ();
+ const ASSOC_CONST: () = ();
+ fn assoc_fn() {}
+}
+fn func() {
+ Enum::$0
+}
+"#,
+ expect![[r#"
+ ct ASSOC_CONST const ASSOC_CONST: ()
+ fn assoc_fn() fn()
+ ta AssocType type AssocType = ()
+ ev RecordV {…} RecordV { field: u32 }
+ ev TupleV(…) TupleV(u32)
+ ev UnitV UnitV
+ "#]],
+ );
+}
+
+#[test]
+fn ty_qualified_no_drop() {
+ check_empty(
+ r#"
+//- minicore: drop
+struct Foo;
+impl Drop for Foo {
+ fn drop(&mut self) {}
+}
+fn func() {
+ Foo::$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn with_parens() {
+ check_empty(
+ r#"
+enum Enum {
+ Variant()
+}
+impl Enum {
+ fn variant() -> Self { Enum::Variant() }
+}
+fn func() {
+ Enum::$0()
+}
+"#,
+ expect![[r#"
+ fn variant fn() -> Enum
+ ev Variant Variant
+ "#]],
+ );
+}
+
+#[test]
+fn detail_impl_trait_in_return_position() {
+ check_empty(
+ r"
+//- minicore: sized
+trait Trait<T> {}
+fn foo<U>() -> impl Trait<U> {}
+fn main() {
+ self::$0
+}
+",
+ expect![[r#"
+ fn foo() fn() -> impl Trait<U>
+ fn main() fn()
+ tt Trait
+ "#]],
+ );
+}
+
+#[test]
+fn detail_async_fn() {
+ check_empty(
+ r#"
+//- minicore: future, sized
+trait Trait<T> {}
+async fn foo() -> u8 {}
+async fn bar<U>() -> impl Trait<U> {}
+fn main() {
+ self::$0
+}
+"#,
+ expect![[r#"
+ fn bar() async fn() -> impl Trait<U>
+ fn foo() async fn() -> u8
+ fn main() fn()
+ tt Trait
+ "#]],
+ );
+}
+
+#[test]
+fn detail_impl_trait_in_argument_position() {
+ check_empty(
+ r"
+//- minicore: sized
+trait Trait<T> {}
+struct Foo;
+impl Foo {
+ fn bar<U>(_: impl Trait<U>) {}
+}
+fn main() {
+ Foo::$0
+}
+",
+ expect![[r"
+ fn bar(…) fn(impl Trait<U>)
+ "]],
+ );
+}
+
+#[test]
+fn complete_record_expr_path() {
+ check(
+ r#"
+struct Zulu;
+impl Zulu {
+ fn test() -> Self { }
+}
+fn boi(val: Zulu) { }
+fn main() {
+ boi(Zulu:: $0 {});
+}
+"#,
+ expect![[r#"
+ fn test() fn() -> Zulu
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
new file mode 100644
index 000000000..0bba7f245
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
@@ -0,0 +1,1232 @@
+use expect_test::{expect, Expect};
+
+use crate::{
+ context::{CompletionAnalysis, NameContext, NameKind, NameRefKind},
+ tests::{check_edit, check_edit_with_config, TEST_CONFIG},
+};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let config = TEST_CONFIG;
+ let (db, position) = crate::tests::position(ra_fixture);
+ let (ctx, analysis) = crate::context::CompletionContext::new(&db, position, &config).unwrap();
+
+ let mut acc = crate::completions::Completions::default();
+ if let CompletionAnalysis::Name(NameContext { kind: NameKind::IdentPat(pat_ctx), .. }) =
+ &analysis
+ {
+ crate::completions::flyimport::import_on_the_fly_pat(&mut acc, &ctx, pat_ctx);
+ }
+ if let CompletionAnalysis::NameRef(name_ref_ctx) = &analysis {
+ match &name_ref_ctx.kind {
+ NameRefKind::Path(path) => {
+ crate::completions::flyimport::import_on_the_fly_path(&mut acc, &ctx, path);
+ }
+ NameRefKind::DotAccess(dot_access) => {
+ crate::completions::flyimport::import_on_the_fly_dot(&mut acc, &ctx, dot_access);
+ }
+ NameRefKind::Pattern(pattern) => {
+ crate::completions::flyimport::import_on_the_fly_pat(&mut acc, &ctx, pattern);
+ }
+ _ => (),
+ }
+ }
+
+ expect.assert_eq(&super::render_completion_list(Vec::from(acc)));
+}
+
+#[test]
+fn function_fuzzy_completion() {
+ check_edit(
+ "stdin",
+ r#"
+//- /lib.rs crate:dep
+pub mod io {
+ pub fn stdin() {}
+};
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ stdi$0
+}
+"#,
+ r#"
+use dep::io::stdin;
+
+fn main() {
+ stdin()$0
+}
+"#,
+ );
+}
+
+#[test]
+fn macro_fuzzy_completion() {
+ check_edit(
+ "macro_with_curlies!",
+ r#"
+//- /lib.rs crate:dep
+/// Please call me as macro_with_curlies! {}
+#[macro_export]
+macro_rules! macro_with_curlies {
+ () => {}
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ curli$0
+}
+"#,
+ r#"
+use dep::macro_with_curlies;
+
+fn main() {
+ macro_with_curlies! {$0}
+}
+"#,
+ );
+}
+
+#[test]
+fn struct_fuzzy_completion() {
+ check_edit(
+ "ThirdStruct",
+ r#"
+//- /lib.rs crate:dep
+pub struct FirstStruct;
+pub mod some_module {
+ pub struct SecondStruct;
+ pub struct ThirdStruct;
+}
+
+//- /main.rs crate:main deps:dep
+use dep::{FirstStruct, some_module::SecondStruct};
+
+fn main() {
+ this$0
+}
+"#,
+ r#"
+use dep::{FirstStruct, some_module::{SecondStruct, ThirdStruct}};
+
+fn main() {
+ ThirdStruct
+}
+"#,
+ );
+}
+
+#[test]
+fn short_paths_are_ignored() {
+ cov_mark::check!(flyimport_exact_on_short_path);
+
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub struct Bar;
+pub struct Rcar;
+pub struct Rc;
+pub mod some_module {
+ pub struct Bar;
+ pub struct Rcar;
+ pub struct Rc;
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ rc$0
+}
+"#,
+ expect![[r#"
+ st Rc (use dep::Rc)
+ st Rc (use dep::some_module::Rc)
+ "#]],
+ );
+}
+
+#[test]
+fn fuzzy_completions_come_in_specific_order() {
+ cov_mark::check!(certain_fuzzy_order_test);
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub struct FirstStruct;
+pub mod some_module {
+ // already imported, omitted
+ pub struct SecondStruct;
+ // does not contain all letters from the query, omitted
+ pub struct UnrelatedOne;
+ // contains all letters from the query, but not in sequence, displayed last
+ pub struct ThiiiiiirdStruct;
+ // contains all letters from the query, but not in the beginning, displayed second
+ pub struct AfterThirdStruct;
+ // contains all letters from the query in the begginning, displayed first
+ pub struct ThirdStruct;
+}
+
+//- /main.rs crate:main deps:dep
+use dep::{FirstStruct, some_module::SecondStruct};
+
+fn main() {
+ hir$0
+}
+"#,
+ expect![[r#"
+ st ThirdStruct (use dep::some_module::ThirdStruct)
+ st AfterThirdStruct (use dep::some_module::AfterThirdStruct)
+ st ThiiiiiirdStruct (use dep::some_module::ThiiiiiirdStruct)
+ "#]],
+ );
+}
+
+#[test]
+fn trait_function_fuzzy_completion() {
+ let fixture = r#"
+ //- /lib.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+ }
+
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ dep::test_mod::TestStruct::wei$0
+ }
+ "#;
+
+ check(
+ fixture,
+ expect![[r#"
+ fn weird_function() (use dep::test_mod::TestTrait) fn()
+ "#]],
+ );
+
+ check_edit(
+ "weird_function",
+ fixture,
+ r#"
+use dep::test_mod::TestTrait;
+
+fn main() {
+ dep::test_mod::TestStruct::weird_function()$0
+}
+"#,
+ );
+}
+
+#[test]
+fn trait_const_fuzzy_completion() {
+ let fixture = r#"
+ //- /lib.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+ }
+
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ dep::test_mod::TestStruct::spe$0
+ }
+ "#;
+
+ check(
+ fixture,
+ expect![[r#"
+ ct SPECIAL_CONST (use dep::test_mod::TestTrait)
+ "#]],
+ );
+
+ check_edit(
+ "SPECIAL_CONST",
+ fixture,
+ r#"
+use dep::test_mod::TestTrait;
+
+fn main() {
+ dep::test_mod::TestStruct::SPECIAL_CONST
+}
+"#,
+ );
+}
+
+#[test]
+fn trait_method_fuzzy_completion() {
+ let fixture = r#"
+ //- /lib.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+ }
+
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.ran$0
+ }
+ "#;
+
+ check(
+ fixture,
+ expect![[r#"
+ me random_method() (use dep::test_mod::TestTrait) fn(&self)
+ "#]],
+ );
+
+ check_edit(
+ "random_method",
+ fixture,
+ r#"
+use dep::test_mod::TestTrait;
+
+fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.random_method()$0
+}
+"#,
+ );
+}
+
+#[test]
+fn trait_method_from_alias() {
+ let fixture = r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ fn random_method();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn random_method() {}
+ }
+ pub type TestAlias = TestStruct;
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ dep::test_mod::TestAlias::ran$0
+}
+"#;
+
+ check(
+ fixture,
+ expect![[r#"
+ fn random_method() (use dep::test_mod::TestTrait) fn()
+ "#]],
+ );
+
+ check_edit(
+ "random_method",
+ fixture,
+ r#"
+use dep::test_mod::TestTrait;
+
+fn main() {
+ dep::test_mod::TestAlias::random_method()$0
+}
+"#,
+ );
+}
+
+#[test]
+fn no_trait_type_fuzzy_completion() {
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ dep::test_mod::TestStruct::hum$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn does_not_propose_names_in_scope() {
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+}
+
+//- /main.rs crate:main deps:dep
+use dep::test_mod::TestStruct;
+fn main() {
+ TestSt$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn does_not_propose_traits_in_scope() {
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+}
+
+//- /main.rs crate:main deps:dep
+use dep::test_mod::{TestStruct, TestTrait};
+fn main() {
+ dep::test_mod::TestStruct::hum$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn blanket_trait_impl_import() {
+ check_edit(
+ "another_function",
+ r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ pub struct TestStruct {}
+ pub trait TestTrait {
+ fn another_function();
+ }
+ impl<T> TestTrait for T {
+ fn another_function() {}
+ }
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ dep::test_mod::TestStruct::ano$0
+}
+"#,
+ r#"
+use dep::test_mod::TestTrait;
+
+fn main() {
+ dep::test_mod::TestStruct::another_function()$0
+}
+"#,
+ );
+}
+
+#[test]
+fn zero_input_deprecated_assoc_item_completion() {
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ #[deprecated]
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.$0
+}
+ "#,
+ expect![[r#"
+ me random_method() (use dep::test_mod::TestTrait) fn(&self) DEPRECATED
+ "#]],
+ );
+
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ #[deprecated]
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ dep::test_mod::TestStruct::$0
+}
+"#,
+ expect![[r#"
+ fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
+ ct SPECIAL_CONST (use dep::test_mod::TestTrait) DEPRECATED
+ "#]],
+ );
+}
+
+#[test]
+fn no_completions_in_use_statements() {
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub mod io {
+ pub fn stdin() {}
+};
+
+//- /main.rs crate:main deps:dep
+use stdi$0
+
+fn main() {}
+"#,
+ expect![[]],
+ );
+}
+
+#[test]
+fn prefix_config_usage() {
+ let fixture = r#"
+mod foo {
+ pub mod bar {
+ pub struct Item;
+ }
+}
+
+use crate::foo::bar;
+
+fn main() {
+ Ite$0
+}"#;
+ let mut config = TEST_CONFIG;
+
+ config.insert_use.prefix_kind = hir::PrefixKind::ByCrate;
+ check_edit_with_config(
+ config.clone(),
+ "Item",
+ fixture,
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Item;
+ }
+}
+
+use crate::foo::bar::{self, Item};
+
+fn main() {
+ Item
+}"#,
+ );
+
+ config.insert_use.prefix_kind = hir::PrefixKind::BySelf;
+ check_edit_with_config(
+ config.clone(),
+ "Item",
+ fixture,
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Item;
+ }
+}
+
+use crate::foo::bar;
+
+use self::foo::bar::Item;
+
+fn main() {
+ Item
+}"#,
+ );
+
+ config.insert_use.prefix_kind = hir::PrefixKind::Plain;
+ check_edit_with_config(
+ config,
+ "Item",
+ fixture,
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Item;
+ }
+}
+
+use foo::bar::Item;
+
+use crate::foo::bar;
+
+fn main() {
+ Item
+}"#,
+ );
+}
+
+#[test]
+fn unresolved_qualifier() {
+ let fixture = r#"
+mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct Item;
+ }
+ }
+}
+
+fn main() {
+ bar::baz::Ite$0
+}"#;
+
+ check(
+ fixture,
+ expect![[r#"
+ st Item (use foo::bar::baz::Item)
+ "#]],
+ );
+
+ check_edit(
+ "Item",
+ fixture,
+ r#"
+ use foo::bar;
+
+ mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct Item;
+ }
+ }
+ }
+
+ fn main() {
+ bar::baz::Item
+ }"#,
+ );
+}
+
+#[test]
+fn unresolved_assoc_item_container() {
+ let fixture = r#"
+mod foo {
+ pub struct Item;
+
+ impl Item {
+ pub const TEST_ASSOC: usize = 3;
+ }
+}
+
+fn main() {
+ Item::TEST_A$0
+}"#;
+
+ check(
+ fixture,
+ expect![[r#"
+ ct TEST_ASSOC (use foo::Item)
+ "#]],
+ );
+
+ check_edit(
+ "TEST_ASSOC",
+ fixture,
+ r#"
+use foo::Item;
+
+mod foo {
+ pub struct Item;
+
+ impl Item {
+ pub const TEST_ASSOC: usize = 3;
+ }
+}
+
+fn main() {
+ Item::TEST_ASSOC
+}"#,
+ );
+}
+
+#[test]
+fn unresolved_assoc_item_container_with_path() {
+ let fixture = r#"
+mod foo {
+ pub mod bar {
+ pub struct Item;
+
+ impl Item {
+ pub const TEST_ASSOC: usize = 3;
+ }
+ }
+}
+
+fn main() {
+ bar::Item::TEST_A$0
+}"#;
+
+ check(
+ fixture,
+ expect![[r#"
+ ct TEST_ASSOC (use foo::bar::Item)
+ "#]],
+ );
+
+ check_edit(
+ "TEST_ASSOC",
+ fixture,
+ r#"
+use foo::bar;
+
+mod foo {
+ pub mod bar {
+ pub struct Item;
+
+ impl Item {
+ pub const TEST_ASSOC: usize = 3;
+ }
+ }
+}
+
+fn main() {
+ bar::Item::TEST_ASSOC
+}"#,
+ );
+}
+
+#[test]
+fn fuzzy_unresolved_path() {
+ check(
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Item;
+
+ impl Item {
+ pub const TEST_ASSOC: usize = 3;
+ }
+ }
+}
+
+fn main() {
+ bar::ASS$0
+}"#,
+ expect![[]],
+ )
+}
+
+#[test]
+fn unqualified_assoc_items_are_omitted() {
+ check(
+ r#"
+mod something {
+ pub trait BaseTrait {
+ fn test_function() -> i32;
+ }
+
+ pub struct Item1;
+ pub struct Item2;
+
+ impl BaseTrait for Item1 {
+ fn test_function() -> i32 {
+ 1
+ }
+ }
+
+ impl BaseTrait for Item2 {
+ fn test_function() -> i32 {
+ 2
+ }
+ }
+}
+
+fn main() {
+ test_f$0
+}"#,
+ expect![[]],
+ )
+}
+
+#[test]
+fn case_matters() {
+ check(
+ r#"
+mod foo {
+ pub const TEST_CONST: usize = 3;
+ pub fn test_function() -> i32 {
+ 4
+ }
+}
+
+fn main() {
+ TES$0
+}"#,
+ expect![[r#"
+ ct TEST_CONST (use foo::TEST_CONST)
+ "#]],
+ );
+
+ check(
+ r#"
+mod foo {
+ pub const TEST_CONST: usize = 3;
+ pub fn test_function() -> i32 {
+ 4
+ }
+}
+
+fn main() {
+ tes$0
+}"#,
+ expect![[r#"
+ ct TEST_CONST (use foo::TEST_CONST)
+ fn test_function() (use foo::test_function) fn() -> i32
+ "#]],
+ );
+
+ check(
+ r#"
+mod foo {
+ pub const TEST_CONST: usize = 3;
+ pub fn test_function() -> i32 {
+ 4
+ }
+}
+
+fn main() {
+ Te$0
+}"#,
+ expect![[]],
+ );
+}
+
+#[test]
+fn no_fuzzy_during_fields_of_record_lit_syntax() {
+ check(
+ r#"
+mod m {
+ pub fn some_fn() -> i32 {
+ 42
+ }
+}
+struct Foo {
+ some_field: i32,
+}
+fn main() {
+ let _ = Foo { so$0 };
+}
+"#,
+ expect![[]],
+ );
+}
+
+#[test]
+fn fuzzy_after_fields_of_record_lit_syntax() {
+ check(
+ r#"
+mod m {
+ pub fn some_fn() -> i32 {
+ 42
+ }
+}
+struct Foo {
+ some_field: i32,
+}
+fn main() {
+ let _ = Foo { some_field: som$0 };
+}
+"#,
+ expect![[r#"
+ fn some_fn() (use m::some_fn) fn() -> i32
+ "#]],
+ );
+}
+
+#[test]
+fn no_flyimports_in_traits_and_impl_declarations() {
+ check(
+ r#"
+mod m {
+ pub fn some_fn() -> i32 {
+ 42
+ }
+}
+trait Foo {
+ som$0
+}
+"#,
+ expect![[r#""#]],
+ );
+
+ check(
+ r#"
+mod m {
+ pub fn some_fn() -> i32 {
+ 42
+ }
+}
+struct Foo;
+impl Foo {
+ som$0
+}
+"#,
+ expect![[r#""#]],
+ );
+
+ check(
+ r#"
+mod m {
+ pub fn some_fn() -> i32 {
+ 42
+ }
+}
+struct Foo;
+trait Bar {}
+impl Bar for Foo {
+ som$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn no_inherent_candidates_proposed() {
+ check(
+ r#"
+mod baz {
+ pub trait DefDatabase {
+ fn method1(&self);
+ }
+ pub trait HirDatabase: DefDatabase {
+ fn method2(&self);
+ }
+}
+
+mod bar {
+ fn test(db: &dyn crate::baz::HirDatabase) {
+ db.metho$0
+ }
+}
+ "#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+mod baz {
+ pub trait DefDatabase {
+ fn method1(&self);
+ }
+ pub trait HirDatabase: DefDatabase {
+ fn method2(&self);
+ }
+}
+
+mod bar {
+ fn test(db: &impl crate::baz::HirDatabase) {
+ db.metho$0
+ }
+}
+"#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+mod baz {
+ pub trait DefDatabase {
+ fn method1(&self);
+ }
+ pub trait HirDatabase: DefDatabase {
+ fn method2(&self);
+ }
+}
+
+mod bar {
+ fn test<T: crate::baz::HirDatabase>(db: T) {
+ db.metho$0
+ }
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn respects_doc_hidden() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep
+fn f() {
+ ().fro$0
+}
+
+//- /dep.rs crate:dep
+#[doc(hidden)]
+pub trait Private {
+ fn frob(&self) {}
+}
+
+impl<T> Private for T {}
+ "#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep
+fn f() {
+ ().fro$0
+}
+
+//- /dep.rs crate:dep
+pub trait Private {
+ #[doc(hidden)]
+ fn frob(&self) {}
+}
+
+impl<T> Private for T {}
+ "#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn regression_9760() {
+ check(
+ r#"
+struct Struct;
+fn main() {}
+
+mod mud {
+ fn func() {
+ let struct_instance = Stru$0
+ }
+}
+"#,
+ expect![[r#"
+ st Struct (use crate::Struct)
+ "#]],
+ );
+}
+
+#[test]
+fn flyimport_pattern() {
+ check(
+ r#"
+mod module {
+ pub struct FooStruct {}
+ pub const FooConst: () = ();
+ pub fn foo_fun() {}
+}
+fn function() {
+ let foo$0
+}
+"#,
+ expect![[r#"
+ ct FooConst (use module::FooConst)
+ st FooStruct (use module::FooStruct)
+ "#]],
+ );
+}
+
+#[test]
+fn flyimport_item_name() {
+ check(
+ r#"
+mod module {
+ pub struct Struct;
+}
+struct Str$0
+ "#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn flyimport_rename() {
+ check(
+ r#"
+mod module {
+ pub struct Struct;
+}
+use self as Str$0;
+ "#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn flyimport_enum_variant() {
+ check(
+ r#"
+mod foo {
+ pub struct Barbara;
+}
+
+enum Foo {
+ Barba$0()
+}
+}"#,
+ expect![[r#""#]],
+ );
+
+ check(
+ r#"
+mod foo {
+ pub struct Barbara;
+}
+
+enum Foo {
+ Barba(Barba$0)
+}
+}"#,
+ expect![[r#"
+ st Barbara (use foo::Barbara)
+ "#]],
+ )
+}
+
+#[test]
+fn flyimport_attribute() {
+ check(
+ r#"
+//- proc_macros:identity
+#[ide$0]
+struct Foo;
+"#,
+ expect![[r#"
+ at identity (use proc_macros::identity) proc_macro identity
+ "#]],
+ );
+ check_edit(
+ "identity",
+ r#"
+//- proc_macros:identity
+#[ide$0]
+struct Foo;
+"#,
+ r#"
+use proc_macros::identity;
+
+#[identity]
+struct Foo;
+"#,
+ );
+}
+
+#[test]
+fn flyimport_in_type_bound_omits_types() {
+ check(
+ r#"
+mod module {
+ pub struct CompletemeStruct;
+ pub type CompletemeType = ();
+ pub enum CompletemeEnum {}
+ pub trait CompletemeTrait {}
+}
+
+fn f<T>() where T: Comp$0
+"#,
+ expect![[r#"
+ tt CompletemeTrait (use module::CompletemeTrait)
+ "#]],
+ );
+}
+
+#[test]
+fn flyimport_source_file() {
+ check(
+ r#"
+//- /main.rs crate:main deps:dep
+def$0
+//- /lib.rs crate:dep
+#[macro_export]
+macro_rules! define_struct {
+ () => {
+ pub struct Foo;
+ };
+}
+"#,
+ expect![[r#"
+ ma define_struct!(…) (use dep::define_struct) macro_rules! define_struct
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/fn_param.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/fn_param.rs
new file mode 100644
index 000000000..cce74604c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/fn_param.rs
@@ -0,0 +1,274 @@
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, completion_list_with_trigger_character};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+fn check_with_trigger_character(ra_fixture: &str, trigger_character: char, expect: Expect) {
+ let actual = completion_list_with_trigger_character(ra_fixture, Some(trigger_character));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn only_param() {
+ check(
+ r#"
+fn foo(file_id: usize) {}
+fn bar(file_id: usize) {}
+fn baz(file$0) {}
+"#,
+ expect![[r#"
+ bn file_id: usize
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn last_param() {
+ check(
+ r#"
+fn foo(file_id: usize) {}
+fn bar(file_id: usize) {}
+fn baz(foo: (), file$0) {}
+"#,
+ expect![[r#"
+ bn file_id: usize
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn first_param() {
+ check(
+ r#"
+fn foo(file_id: usize) {}
+fn bar(file_id: usize) {}
+fn baz(file$0 id: u32) {}
+"#,
+ expect![[r#"
+ bn file_id: usize,
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn repeated_param_name() {
+ check(
+ r#"
+fn foo(file_id: usize) {}
+fn bar(file_id: u32, $0) {}
+"#,
+ expect![[r#"
+ kw mut
+ kw ref
+ "#]],
+ );
+
+ check(
+ r#"
+fn f(#[foo = "bar"] baz: u32,) {}
+fn g(baz: (), ba$0)
+"#,
+ expect![[r#"
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn trait_param() {
+ check(
+ r#"
+pub(crate) trait SourceRoot {
+ pub fn contains(file_id: usize) -> bool;
+ pub fn syntax(file$0)
+}
+"#,
+ expect![[r#"
+ bn file_id: usize
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn in_inner_function() {
+ check(
+ r#"
+fn outer(text: &str) {
+ fn inner($0)
+}
+"#,
+ expect![[r#"
+ bn text: &str
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn trigger_by_l_paren() {
+ check_with_trigger_character(
+ r#"
+fn foo($0)
+"#,
+ '(',
+ expect![[]],
+ )
+}
+
+#[test]
+fn shows_non_ident_pat_param() {
+ check(
+ r#"
+struct Bar { bar: u32 }
+fn foo(Bar { bar }: Bar) {}
+fn foo2($0) {}
+"#,
+ expect![[r#"
+ st Bar
+ bn Bar { bar }: Bar
+ bn Bar {…} Bar { bar$1 }: Bar$0
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn in_impl_only_param() {
+ check(
+ r#"
+struct A {}
+
+impl A {
+ fn foo(file_id: usize) {}
+ fn new($0) {}
+}
+"#,
+ expect![[r#"
+ sp Self
+ st A
+ bn &mut self
+ bn &self
+ bn file_id: usize
+ bn mut self
+ bn self
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn in_impl_after_self() {
+ check(
+ r#"
+struct A {}
+
+impl A {
+ fn foo(file_id: usize) {}
+ fn new(self, $0) {}
+}
+"#,
+ expect![[r#"
+ sp Self
+ st A
+ bn file_id: usize
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+// doesn't complete qux due to there being no expression after
+// see source_analyzer::adjust comment
+#[test]
+fn local_fn_shows_locals_for_params() {
+ check(
+ r#"
+fn outer() {
+ let foo = 3;
+ {
+ let bar = 3;
+ fn inner($0) {}
+ let baz = 3;
+ let qux = 3;
+ }
+ let fez = 3;
+}
+"#,
+ expect![[r#"
+ bn bar: i32
+ bn baz: i32
+ bn foo: i32
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn closure_shows_locals_for_params() {
+ check(
+ r#"
+fn outer() {
+ let foo = 3;
+ {
+ let bar = 3;
+ |$0| {};
+ let baz = 3;
+ let qux = 3;
+ }
+ let fez = 3;
+}
+"#,
+ expect![[r#"
+ bn bar: i32
+ bn baz: i32
+ bn foo: i32
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn completes_fully_equal() {
+ check(
+ r#"
+fn foo(bar: u32) {}
+fn bar(bar$0) {}
+"#,
+ expect![[r#"
+ bn bar: u32
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn completes_for_params_with_attributes() {
+ check(
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: (), #[baz = "qux"] mut ba$0)
+"#,
+ expect![[r##"
+ bn #[baz = "qux"] mut bar: u32
+ "##]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
new file mode 100644
index 000000000..409413c1d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
@@ -0,0 +1,154 @@
+//! Completion tests for item specifics overall.
+//!
+//! Except for use items which are tested in [super::use_tree] and mod declarations with are tested
+//! in [crate::completions::mod_].
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn target_type_or_trait_in_impl_block() {
+ check(
+ r#"
+impl Tra$0
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn target_type_in_trait_impl_block() {
+ check(
+ r#"
+impl Trait for Str$0
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn after_trait_name_in_trait_def() {
+ check(
+ r"trait A $0",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+}
+
+#[test]
+fn after_target_name_in_impl() {
+ check(
+ r"impl Trait $0",
+ expect![[r#"
+ kw for
+ kw where
+ "#]],
+ );
+ check(
+ r"impl Trait f$0",
+ expect![[r#"
+ kw for
+ kw where
+ "#]],
+ );
+ check(
+ r"impl Trait for Type $0",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+}
+
+#[test]
+fn completes_where() {
+ check(
+ r"struct Struct $0",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+ check(
+ r"struct Struct $0 {}",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+ // FIXME: This shouldn't be completed here
+ check(
+ r"struct Struct $0 ()",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+ check(
+ r"fn func() $0",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+ check(
+ r"enum Enum $0",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+ check(
+ r"enum Enum $0 {}",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+ check(
+ r"trait Trait $0 {}",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+}
+
+#[test]
+fn before_record_field() {
+ check(
+ r#"
+struct Foo {
+ $0
+ pub f: i32,
+}
+"#,
+ expect![[r#"
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
new file mode 100644
index 000000000..5076c6e86
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
@@ -0,0 +1,247 @@
+//! Completion tests for item list position.
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn in_mod_item_list() {
+ check(
+ r#"mod tests { $0 }"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw fn
+ kw impl
+ kw mod
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw static
+ kw struct
+ kw super::
+ kw trait
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ sn macro_rules
+ sn tfn (Test function)
+ sn tmod (Test module)
+ "#]],
+ )
+}
+
+#[test]
+fn in_source_file_item_list() {
+ check(
+ r#"$0"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw fn
+ kw impl
+ kw mod
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ sn macro_rules
+ sn tfn (Test function)
+ sn tmod (Test module)
+ "#]],
+ )
+}
+
+#[test]
+fn in_item_list_after_attr() {
+ check(
+ r#"#[attr] $0"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw fn
+ kw impl
+ kw mod
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ sn macro_rules
+ sn tfn (Test function)
+ sn tmod (Test module)
+ "#]],
+ )
+}
+
+#[test]
+fn in_qualified_path() {
+ check(
+ r#"crate::$0"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ "#]],
+ )
+}
+
+#[test]
+fn after_unsafe_token() {
+ check(
+ r#"unsafe $0"#,
+ expect![[r#"
+ kw fn
+ kw impl
+ kw trait
+ "#]],
+ );
+}
+
+#[test]
+fn after_visibility() {
+ check(
+ r#"pub $0"#,
+ expect![[r#"
+ kw const
+ kw enum
+ kw extern
+ kw fn
+ kw mod
+ kw static
+ kw struct
+ kw trait
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ "#]],
+ );
+}
+
+#[test]
+fn after_visibility_unsafe() {
+ check(
+ r#"pub unsafe $0"#,
+ expect![[r#"
+ kw fn
+ kw trait
+ "#]],
+ );
+}
+
+#[test]
+fn in_impl_assoc_item_list() {
+ check(
+ r#"impl Struct { $0 }"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw fn
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw unsafe
+ "#]],
+ )
+}
+
+#[test]
+fn in_impl_assoc_item_list_after_attr() {
+ check(
+ r#"impl Struct { #[attr] $0 }"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw fn
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw unsafe
+ "#]],
+ )
+}
+
+#[test]
+fn in_trait_assoc_item_list() {
+ check(
+ r"trait Foo { $0 }",
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw fn
+ kw self::
+ kw type
+ kw unsafe
+ "#]],
+ );
+}
+
+#[test]
+fn in_trait_impl_assoc_item_list() {
+ check(
+ r#"
+trait Test {
+ type Type0;
+ type Type1;
+ const CONST0: ();
+ const CONST1: ();
+ fn function0();
+ fn function1();
+}
+
+impl Test for () {
+ type Type0 = ();
+ const CONST0: () = ();
+ fn function0() {}
+ $0
+}
+"#,
+ expect![[r#"
+ ct const CONST1: () =
+ fn fn function1()
+ ma makro!(…) macro_rules! makro
+ md module
+ ta type Type1 =
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
new file mode 100644
index 000000000..30ddbe2dc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
@@ -0,0 +1,716 @@
+//! Completion tests for pattern position.
+use expect_test::{expect, Expect};
+
+use crate::tests::{check_edit, completion_list, BASE_ITEMS_FIXTURE};
+
+fn check_empty(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual)
+}
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn wildcard() {
+ check(
+ r#"
+fn quux() {
+ let _$0
+}
+"#,
+ expect![""],
+ );
+}
+
+#[test]
+fn ident_rebind_pat() {
+ check_empty(
+ r#"
+fn quux() {
+ let en$0 @ x
+}
+"#,
+ expect![[r#"
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn ident_ref_pat() {
+ check_empty(
+ r#"
+fn quux() {
+ let ref en$0
+}
+"#,
+ expect![[r#"
+ kw mut
+ "#]],
+ );
+ check_empty(
+ r#"
+fn quux() {
+ let ref en$0 @ x
+}
+"#,
+ expect![[r#"
+ kw mut
+ "#]],
+ );
+}
+
+#[test]
+fn ident_ref_mut_pat() {
+ check_empty(
+ r#"
+fn quux() {
+ let ref mut en$0
+}
+"#,
+ expect![[r#""#]],
+ );
+ check_empty(
+ r#"
+fn quux() {
+ let ref mut en$0 @ x
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn ref_pat() {
+ check_empty(
+ r#"
+fn quux() {
+ let &en$0
+}
+"#,
+ expect![[r#"
+ kw mut
+ "#]],
+ );
+ check_empty(
+ r#"
+fn quux() {
+ let &mut en$0
+}
+"#,
+ expect![[r#""#]],
+ );
+ check_empty(
+ r#"
+fn foo() {
+ for &$0 in () {}
+}
+"#,
+ expect![[r#"
+ kw mut
+ "#]],
+ );
+}
+
+#[test]
+fn refutable() {
+ check(
+ r#"
+fn foo() {
+ if let a$0
+}
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ ev TupleV
+ bn Record {…} Record { field$1 }$0
+ bn Tuple(…) Tuple($1)$0
+ bn TupleV(…) TupleV($1)$0
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn irrefutable() {
+ check(
+ r#"
+enum SingleVariantEnum {
+ Variant
+}
+use SingleVariantEnum::Variant;
+fn foo() {
+ let a$0
+}
+"#,
+ expect![[r#"
+ en SingleVariantEnum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ ev Variant
+ bn Record {…} Record { field$1 }$0
+ bn Tuple(…) Tuple($1)$0
+ bn Variant Variant$0
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn in_param() {
+ check(
+ r#"
+fn foo(a$0) {
+}
+"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ bn Record {…} Record { field$1 }: Record$0
+ bn Tuple(…) Tuple($1): Tuple$0
+ kw mut
+ kw ref
+ "#]],
+ );
+ check(
+ r#"
+fn foo(a$0: Tuple) {
+}
+"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ bn Record {…} Record { field$1 }$0
+ bn Tuple(…) Tuple($1)$0
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn only_fn_like_macros() {
+ check_empty(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+
+#[rustc_builtin_macro]
+macro Clone {}
+
+fn foo() {
+ let x$0
+}
+"#,
+ expect![[r#"
+ ma m!(…) macro_rules! m
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn in_simple_macro_call() {
+ check_empty(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+enum E { X }
+
+fn foo() {
+ m!(match E::X { a$0 })
+}
+"#,
+ expect![[r#"
+ en E
+ ma m!(…) macro_rules! m
+ bn E::X E::X$0
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn omits_private_fields_pat() {
+ check_empty(
+ r#"
+mod foo {
+ pub struct Record { pub field: i32, _field: i32 }
+ pub struct Tuple(pub u32, u32);
+ pub struct Invisible(u32, u32);
+}
+use foo::*;
+
+fn outer() {
+ if let a$0
+}
+"#,
+ expect![[r#"
+ md foo
+ st Invisible
+ st Record
+ st Tuple
+ bn Record {…} Record { field$1, .. }$0
+ bn Tuple(…) Tuple($1, ..)$0
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn completes_self_pats() {
+ check_empty(
+ r#"
+struct Foo(i32);
+impl Foo {
+ fn foo() {
+ match Foo(0) {
+ a$0
+ }
+ }
+}
+ "#,
+ expect![[r#"
+ sp Self
+ st Foo
+ bn Foo(…) Foo($1)$0
+ bn Self(…) Self($1)$0
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn enum_qualified() {
+ check(
+ r#"
+impl Enum {
+ type AssocType = ();
+ const ASSOC_CONST: () = ();
+ fn assoc_fn() {}
+}
+fn func() {
+ if let Enum::$0 = unknown {}
+}
+"#,
+ expect![[r#"
+ ct ASSOC_CONST const ASSOC_CONST: ()
+ bn RecordV {…} RecordV { field$1 }$0
+ bn TupleV(…) TupleV($1)$0
+ bn UnitV UnitV$0
+ "#]],
+ );
+}
+
+#[test]
+fn completes_in_record_field_pat() {
+ check_empty(
+ r#"
+struct Foo { bar: Bar }
+struct Bar(u32);
+fn outer(Foo { bar: $0 }: Foo) {}
+"#,
+ expect![[r#"
+ st Bar
+ st Foo
+ bn Bar(…) Bar($1)$0
+ bn Foo {…} Foo { bar$1 }$0
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn skips_in_record_field_pat_name() {
+ check_empty(
+ r#"
+struct Foo { bar: Bar }
+struct Bar(u32);
+fn outer(Foo { bar$0 }: Foo) {}
+"#,
+ expect![[r#"
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn completes_in_fn_param() {
+ check_empty(
+ r#"
+struct Foo { bar: Bar }
+struct Bar(u32);
+fn foo($0) {}
+"#,
+ expect![[r#"
+ st Bar
+ st Foo
+ bn Bar(…) Bar($1): Bar$0
+ bn Foo {…} Foo { bar$1 }: Foo$0
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn completes_in_closure_param() {
+ check_empty(
+ r#"
+struct Foo { bar: Bar }
+struct Bar(u32);
+fn foo() {
+ |$0| {};
+}
+"#,
+ expect![[r#"
+ st Bar
+ st Foo
+ bn Bar(…) Bar($1)$0
+ bn Foo {…} Foo { bar$1 }$0
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn completes_no_delims_if_existing() {
+ check_empty(
+ r#"
+struct Bar(u32);
+fn foo() {
+ match Bar(0) {
+ B$0(b) => {}
+ }
+}
+"#,
+ expect![[r#"
+ st Bar
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check_empty(
+ r#"
+struct Foo { bar: u32 }
+fn foo() {
+ match (Foo { bar: 0 }) {
+ F$0 { bar } => {}
+ }
+}
+"#,
+ expect![[r#"
+ st Foo
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check_empty(
+ r#"
+enum Enum {
+ TupleVariant(u32)
+}
+fn foo() {
+ match Enum::TupleVariant(0) {
+ Enum::T$0(b) => {}
+ }
+}
+"#,
+ expect![[r#"
+ bn TupleVariant TupleVariant
+ "#]],
+ );
+ check_empty(
+ r#"
+enum Enum {
+ RecordVariant { field: u32 }
+}
+fn foo() {
+ match (Enum::RecordVariant { field: 0 }) {
+ Enum::RecordV$0 { field } => {}
+ }
+}
+"#,
+ expect![[r#"
+ bn RecordVariant RecordVariant
+ "#]],
+ );
+}
+
+#[test]
+fn completes_enum_variant_pat() {
+ cov_mark::check!(enum_variant_pattern_path);
+ check_edit(
+ "RecordVariant {…}",
+ r#"
+enum Enum {
+ RecordVariant { field: u32 }
+}
+fn foo() {
+ match (Enum::RecordVariant { field: 0 }) {
+ Enum::RecordV$0
+ }
+}
+"#,
+ r#"
+enum Enum {
+ RecordVariant { field: u32 }
+}
+fn foo() {
+ match (Enum::RecordVariant { field: 0 }) {
+ Enum::RecordVariant { field$1 }$0
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn completes_enum_variant_pat_escape() {
+ cov_mark::check!(enum_variant_pattern_path);
+ check_empty(
+ r#"
+enum Enum {
+ A,
+ B { r#type: i32 },
+ r#type,
+ r#struct { r#type: i32 },
+}
+fn foo() {
+ match (Enum::A) {
+ $0
+ }
+}
+"#,
+ expect![[r#"
+ en Enum
+ bn Enum::A Enum::A$0
+ bn Enum::B {…} Enum::B { r#type$1 }$0
+ bn Enum::struct {…} Enum::r#struct { r#type$1 }$0
+ bn Enum::type Enum::r#type$0
+ kw mut
+ kw ref
+ "#]],
+ );
+
+ check_empty(
+ r#"
+enum Enum {
+ A,
+ B { r#type: i32 },
+ r#type,
+ r#struct { r#type: i32 },
+}
+fn foo() {
+ match (Enum::A) {
+ Enum::$0
+ }
+}
+"#,
+ expect![[r#"
+ bn A A$0
+ bn B {…} B { r#type$1 }$0
+ bn struct {…} r#struct { r#type$1 }$0
+ bn type r#type$0
+ "#]],
+ );
+}
+
+#[test]
+fn completes_associated_const() {
+ check_empty(
+ r#"
+#[derive(PartialEq, Eq)]
+struct Ty(u8);
+
+impl Ty {
+ const ABC: Self = Self(0);
+}
+
+fn f(t: Ty) {
+ match t {
+ Ty::$0 => {}
+ _ => {}
+ }
+}
+"#,
+ expect![[r#"
+ ct ABC const ABC: Self
+ "#]],
+ );
+
+ check_empty(
+ r#"
+enum MyEnum {}
+
+impl MyEnum {
+ pub const A: i32 = 123;
+ pub const B: i32 = 456;
+}
+
+fn f(e: MyEnum) {
+ match e {
+ MyEnum::$0 => {}
+ _ => {}
+ }
+}
+"#,
+ expect![[r#"
+ ct A pub const A: i32
+ ct B pub const B: i32
+ "#]],
+ );
+
+ check_empty(
+ r#"
+union U {
+ i: i32,
+ f: f32,
+}
+
+impl U {
+ pub const C: i32 = 123;
+ pub const D: i32 = 456;
+}
+
+fn f(u: U) {
+ match u {
+ U::$0 => {}
+ _ => {}
+ }
+}
+"#,
+ expect![[r#"
+ ct C pub const C: i32
+ ct D pub const D: i32
+ "#]],
+ );
+
+ check_empty(
+ r#"
+#[lang = "u32"]
+impl u32 {
+ pub const MIN: Self = 0;
+}
+
+fn f(v: u32) {
+ match v {
+ u32::$0
+ }
+}
+ "#,
+ expect![[r#"
+ ct MIN pub const MIN: Self
+ "#]],
+ );
+}
+
+#[test]
+fn in_method_param() {
+ check_empty(
+ r#"
+struct Ty(u8);
+
+impl Ty {
+ fn foo($0)
+}
+"#,
+ expect![[r#"
+ sp Self
+ st Ty
+ bn &mut self
+ bn &self
+ bn Self(…) Self($1): Self$0
+ bn Ty(…) Ty($1): Ty$0
+ bn mut self
+ bn self
+ kw mut
+ kw ref
+ "#]],
+ );
+ check_empty(
+ r#"
+struct Ty(u8);
+
+impl Ty {
+ fn foo(s$0)
+}
+"#,
+ expect![[r#"
+ sp Self
+ st Ty
+ bn &mut self
+ bn &self
+ bn Self(…) Self($1): Self$0
+ bn Ty(…) Ty($1): Ty$0
+ bn mut self
+ bn self
+ kw mut
+ kw ref
+ "#]],
+ );
+ check_empty(
+ r#"
+struct Ty(u8);
+
+impl Ty {
+ fn foo(s$0, foo: u8)
+}
+"#,
+ expect![[r#"
+ sp Self
+ st Ty
+ bn &mut self
+ bn &self
+ bn Self(…) Self($1): Self$0
+ bn Ty(…) Ty($1): Ty$0
+ bn mut self
+ bn self
+ kw mut
+ kw ref
+ "#]],
+ );
+ check_empty(
+ r#"
+struct Ty(u8);
+
+impl Ty {
+ fn foo(foo: u8, b$0)
+}
+"#,
+ expect![[r#"
+ sp Self
+ st Ty
+ bn Self(…) Self($1): Self$0
+ bn Ty(…) Ty($1): Ty$0
+ kw mut
+ kw ref
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs
new file mode 100644
index 000000000..a8676e2f2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs
@@ -0,0 +1,131 @@
+//! Completion tests for predicates and bounds.
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn predicate_start() {
+ // FIXME: `for` kw
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize> where $0 {}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo<…>
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn bound_for_type_pred() {
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize> where T: $0 {}
+"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ tt Trait
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn bound_for_lifetime_pred() {
+ // FIXME: should only show lifetimes here, that is we shouldn't get any completions here when not typing
+ // a `'`
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize> where 'lt: $0 {}
+"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ tt Trait
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn bound_for_for_pred() {
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize> where for<'a> T: $0 {}
+"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ tt Trait
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn param_list_for_for_pred() {
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize> where for<'a> $0 {}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo<…>
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn pred_on_fn_in_impl() {
+ check(
+ r#"
+impl Record {
+ fn method(self) where $0 {}
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ sp Self
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs
new file mode 100644
index 000000000..9eae6f849
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs
@@ -0,0 +1,133 @@
+//! Completion tests for expressions.
+use expect_test::{expect, Expect};
+
+use crate::tests::completion_list;
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn complete_dot_in_attr() {
+ check(
+ r#"
+//- proc_macros: identity
+pub struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+#[proc_macros::identity]
+fn main() {
+ Foo.$0
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ )
+}
+
+#[test]
+fn complete_dot_in_attr2() {
+ check(
+ r#"
+//- proc_macros: identity
+pub struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+#[proc_macros::identity]
+fn main() {
+ Foo.f$0
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ )
+}
+
+#[test]
+fn complete_dot_in_attr_input() {
+ check(
+ r#"
+//- proc_macros: input_replace
+pub struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+#[proc_macros::input_replace(
+ fn suprise() {
+ Foo.$0
+ }
+)]
+fn main() {}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ )
+}
+
+#[test]
+fn complete_dot_in_attr_input2() {
+ check(
+ r#"
+//- proc_macros: input_replace
+pub struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+#[proc_macros::input_replace(
+ fn suprise() {
+ Foo.f$0
+ }
+)]
+fn main() {}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
new file mode 100644
index 000000000..f6accc68e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
@@ -0,0 +1,229 @@
+use expect_test::{expect, Expect};
+
+use crate::tests::completion_list;
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn without_default_impl() {
+ check(
+ r#"
+struct Struct { foo: u32, bar: usize }
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ $0
+ };
+}
+"#,
+ expect![[r#"
+ fd bar usize
+ "#]],
+ );
+}
+
+#[test]
+fn record_pattern_field() {
+ check(
+ r#"
+struct Struct { foo: u32, bar: u32 }
+
+fn foo(s: Struct) {
+ match s {
+ Struct { foo, $0: 92 } => (),
+ }
+}
+"#,
+ expect![[r#"
+ fd bar u32
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn pattern_enum_variant() {
+ check(
+ r#"
+enum Enum { Variant { foo: u32, bar: u32 } }
+fn foo(e: Enum) {
+ match e {
+ Enum::Variant { foo, $0 } => (),
+ }
+}
+"#,
+ expect![[r#"
+ fd bar u32
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn record_literal_field_in_macro() {
+ check(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+struct Struct { field: u32 }
+fn foo() {
+ m!(Struct { fie$0 })
+}
+"#,
+ expect![[r#"
+ fd field u32
+ "#]],
+ );
+}
+
+#[test]
+fn record_pattern_field_in_macro() {
+ check(
+ r"
+macro_rules! m { ($e:expr) => { $e } }
+struct Struct { field: u32 }
+
+fn foo(f: Struct) {
+ m!(match f {
+ Struct { f$0: 92 } => (),
+ })
+}
+",
+ expect![[r#"
+ fd field u32
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn functional_update() {
+ // FIXME: This should filter out all completions that do not have the type `Foo`
+ check(
+ r#"
+//- minicore:default
+struct Foo { foo1: u32, foo2: u32 }
+impl Default for Foo {
+ fn default() -> Self { loop {} }
+}
+
+fn main() {
+ let thing = 1;
+ let foo = Foo { foo1: 0, foo2: 0 };
+ let foo2 = Foo { thing, $0 }
+}
+"#,
+ expect![[r#"
+ fd ..Default::default()
+ fd foo1 u32
+ fd foo2 u32
+ "#]],
+ );
+ check(
+ r#"
+//- minicore:default
+struct Foo { foo1: u32, foo2: u32 }
+impl Default for Foo {
+ fn default() -> Self { loop {} }
+}
+
+fn main() {
+ let thing = 1;
+ let foo = Foo { foo1: 0, foo2: 0 };
+ let foo2 = Foo { thing, .$0 }
+}
+"#,
+ expect![[r#"
+ fd ..Default::default()
+ sn ..
+ "#]],
+ );
+ check(
+ r#"
+//- minicore:default
+struct Foo { foo1: u32, foo2: u32 }
+impl Default for Foo {
+ fn default() -> Self { loop {} }
+}
+
+fn main() {
+ let thing = 1;
+ let foo = Foo { foo1: 0, foo2: 0 };
+ let foo2 = Foo { thing, ..$0 }
+}
+"#,
+ expect![[r#"
+ fd ..Default::default()
+ fn main() fn()
+ lc foo Foo
+ lc thing i32
+ md core
+ st Foo
+ st Foo {…} Foo { foo1: u32, foo2: u32 }
+ tt Default
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check(
+ r#"
+//- minicore:default
+struct Foo { foo1: u32, foo2: u32 }
+impl Default for Foo {
+ fn default() -> Self { loop {} }
+}
+
+fn main() {
+ let thing = 1;
+ let foo = Foo { foo1: 0, foo2: 0 };
+ let foo2 = Foo { thing, ..Default::$0 }
+}
+"#,
+ expect![[r#"
+ fn default() (as Default) fn() -> Self
+ "#]],
+ );
+}
+
+#[test]
+fn empty_union_literal() {
+ check(
+ r#"
+union Union { foo: u32, bar: f32 }
+
+fn foo() {
+ let other = Union {
+ $0
+ };
+}
+ "#,
+ expect![[r#"
+ fd bar f32
+ fd foo u32
+ "#]],
+ )
+}
+
+#[test]
+fn dont_suggest_additional_union_fields() {
+ check(
+ r#"
+union Union { foo: u32, bar: f32 }
+
+fn foo() {
+ let other = Union {
+ foo: 1,
+ $0
+ };
+}
+ "#,
+ expect![[r#""#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
new file mode 100644
index 000000000..033dc99c2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
@@ -0,0 +1,895 @@
+//! Tests that don't fit into a specific category.
+
+use expect_test::{expect, Expect};
+
+use crate::tests::{check_edit, completion_list_no_kw};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw(ra_fixture);
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn completes_if_prefix_is_keyword() {
+ check_edit(
+ "wherewolf",
+ r#"
+fn main() {
+ let wherewolf = 92;
+ drop(where$0)
+}
+"#,
+ r#"
+fn main() {
+ let wherewolf = 92;
+ drop(wherewolf)
+}
+"#,
+ )
+}
+
+/// Regression test for issue #6091.
+#[test]
+fn correctly_completes_module_items_prefixed_with_underscore() {
+ check_edit(
+ "_alpha",
+ r#"
+fn main() {
+ _$0
+}
+fn _alpha() {}
+"#,
+ r#"
+fn main() {
+ _alpha()$0
+}
+fn _alpha() {}
+"#,
+ )
+}
+
+#[test]
+fn completes_prelude() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+fn foo() { let x: $0 }
+
+//- /std/lib.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Option;
+ }
+}
+"#,
+ expect![[r#"
+ md std
+ st Option
+ bt u32
+ "#]],
+ );
+}
+
+#[test]
+fn completes_prelude_macros() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+fn f() {$0}
+
+//- /std/lib.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::concat;
+ }
+}
+
+mod macros {
+ #[rustc_builtin_macro]
+ #[macro_export]
+ macro_rules! concat { }
+}
+"#,
+ expect![[r#"
+ fn f() fn()
+ ma concat!(…) macro_rules! concat
+ md std
+ bt u32
+ "#]],
+ );
+}
+
+#[test]
+fn completes_std_prelude_if_core_is_defined() {
+ check(
+ r#"
+//- /main.rs crate:main deps:core,std
+fn foo() { let x: $0 }
+
+//- /core/lib.rs crate:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Option;
+ }
+}
+
+//- /std/lib.rs crate:std deps:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct String;
+ }
+}
+"#,
+ expect![[r#"
+ md core
+ md std
+ st String
+ bt u32
+ "#]],
+ );
+}
+
+#[test]
+fn respects_doc_hidden() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:std
+fn f() {
+ format_$0
+}
+
+//- /std.rs crate:std
+#[doc(hidden)]
+#[macro_export]
+macro_rules! format_args_nl {
+ () => {}
+}
+
+pub mod prelude {
+ pub mod rust_2018 {}
+}
+ "#,
+ expect![[r#"
+ fn f() fn()
+ md std
+ bt u32
+ "#]],
+ );
+}
+
+#[test]
+fn respects_doc_hidden_in_assoc_item_list() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:std
+struct S;
+impl S {
+ format_$0
+}
+
+//- /std.rs crate:std
+#[doc(hidden)]
+#[macro_export]
+macro_rules! format_args_nl {
+ () => {}
+}
+
+pub mod prelude {
+ pub mod rust_2018 {}
+}
+ "#,
+ expect![[r#"
+ md std
+ "#]],
+ );
+}
+
+#[test]
+fn associated_item_visibility() {
+ check(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub struct S;
+
+impl S {
+ pub fn public_method() { }
+ fn private_method() { }
+ pub type PublicType = u32;
+ type PrivateType = u32;
+ pub const PUBLIC_CONST: u32 = 1;
+ const PRIVATE_CONST: u32 = 1;
+}
+
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo() { let _ = lib::S::$0 }
+"#,
+ expect![[r#"
+ ct PUBLIC_CONST pub const PUBLIC_CONST: u32
+ fn public_method() fn()
+ ta PublicType pub type PublicType = u32
+ "#]],
+ );
+}
+
+#[test]
+fn completes_union_associated_method() {
+ check(
+ r#"
+union U {};
+impl U { fn m() { } }
+
+fn foo() { let _ = U::$0 }
+"#,
+ expect![[r#"
+ fn m() fn()
+ "#]],
+ );
+}
+
+#[test]
+fn completes_trait_associated_method_1() {
+ check(
+ r#"
+trait Trait { fn m(); }
+
+fn foo() { let _ = Trait::$0 }
+"#,
+ expect![[r#"
+ fn m() (as Trait) fn()
+ "#]],
+ );
+}
+
+#[test]
+fn completes_trait_associated_method_2() {
+ check(
+ r#"
+trait Trait { fn m(); }
+
+struct S;
+impl Trait for S {}
+
+fn foo() { let _ = S::$0 }
+"#,
+ expect![[r#"
+ fn m() (as Trait) fn()
+ "#]],
+ );
+}
+
+#[test]
+fn completes_trait_associated_method_3() {
+ check(
+ r#"
+trait Trait { fn m(); }
+
+struct S;
+impl Trait for S {}
+
+fn foo() { let _ = <S as Trait>::$0 }
+"#,
+ expect![[r#"
+ fn m() (as Trait) fn()
+ "#]],
+ );
+}
+
+#[test]
+fn completes_ty_param_assoc_ty() {
+ check(
+ r#"
+trait Super {
+ type Ty;
+ const CONST: u8;
+ fn func() {}
+ fn method(&self) {}
+}
+
+trait Sub: Super {
+ type SubTy;
+ const C2: ();
+ fn subfunc() {}
+ fn submethod(&self) {}
+}
+
+fn foo<T: Sub>() { T::$0 }
+"#,
+ expect![[r#"
+ ct C2 (as Sub) const C2: ()
+ ct CONST (as Super) const CONST: u8
+ fn func() (as Super) fn()
+ fn subfunc() (as Sub) fn()
+ ta SubTy (as Sub) type SubTy
+ ta Ty (as Super) type Ty
+ me method(…) (as Super) fn(&self)
+ me submethod(…) (as Sub) fn(&self)
+ "#]],
+ );
+}
+
+#[test]
+fn completes_self_param_assoc_ty() {
+ check(
+ r#"
+trait Super {
+ type Ty;
+ const CONST: u8 = 0;
+ fn func() {}
+ fn method(&self) {}
+}
+
+trait Sub: Super {
+ type SubTy;
+ const C2: () = ();
+ fn subfunc() {}
+ fn submethod(&self) {}
+}
+
+struct Wrap<T>(T);
+impl<T> Super for Wrap<T> {}
+impl<T> Sub for Wrap<T> {
+ fn subfunc() {
+ // Should be able to assume `Self: Sub + Super`
+ Self::$0
+ }
+}
+"#,
+ expect![[r#"
+ ct C2 (as Sub) const C2: ()
+ ct CONST (as Super) const CONST: u8
+ fn func() (as Super) fn()
+ fn subfunc() (as Sub) fn()
+ ta SubTy (as Sub) type SubTy
+ ta Ty (as Super) type Ty
+ me method(…) (as Super) fn(&self)
+ me submethod(…) (as Sub) fn(&self)
+ "#]],
+ );
+}
+
+#[test]
+fn completes_type_alias() {
+ check(
+ r#"
+struct S;
+impl S { fn foo() {} }
+type T = S;
+impl T { fn bar() {} }
+
+fn main() { T::$0; }
+"#,
+ expect![[r#"
+ fn bar() fn()
+ fn foo() fn()
+ "#]],
+ );
+}
+
+#[test]
+fn completes_qualified_macros() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! foo { () => {} }
+
+fn main() { let _ = crate::$0 }
+"#,
+ expect![[r#"
+ fn main() fn()
+ ma foo!(…) macro_rules! foo
+ "#]],
+ );
+}
+
+#[test]
+fn does_not_complete_non_fn_macros() {
+ check(
+ r#"
+mod m {
+ #[rustc_builtin_macro]
+ pub macro Clone {}
+}
+
+fn f() {m::$0}
+"#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+mod m {
+ #[rustc_builtin_macro]
+ pub macro bench {}
+}
+
+fn f() {m::$0}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn completes_reexported_items_under_correct_name() {
+ check(
+ r#"
+fn foo() { self::m::$0 }
+
+mod m {
+ pub use super::p::wrong_fn as right_fn;
+ pub use super::p::WRONG_CONST as RIGHT_CONST;
+ pub use super::p::WrongType as RightType;
+}
+mod p {
+ pub fn wrong_fn() {}
+ pub const WRONG_CONST: u32 = 1;
+ pub struct WrongType {};
+}
+"#,
+ expect![[r#"
+ ct RIGHT_CONST
+ fn right_fn() fn()
+ st RightType
+ "#]],
+ );
+
+ check_edit(
+ "RightType",
+ r#"
+fn foo() { self::m::$0 }
+
+mod m {
+ pub use super::p::wrong_fn as right_fn;
+ pub use super::p::WRONG_CONST as RIGHT_CONST;
+ pub use super::p::WrongType as RightType;
+}
+mod p {
+ pub fn wrong_fn() {}
+ pub const WRONG_CONST: u32 = 1;
+ pub struct WrongType {};
+}
+"#,
+ r#"
+fn foo() { self::m::RightType }
+
+mod m {
+ pub use super::p::wrong_fn as right_fn;
+ pub use super::p::WRONG_CONST as RIGHT_CONST;
+ pub use super::p::WrongType as RightType;
+}
+mod p {
+ pub fn wrong_fn() {}
+ pub const WRONG_CONST: u32 = 1;
+ pub struct WrongType {};
+}
+"#,
+ );
+}
+
+#[test]
+fn completes_in_simple_macro_call() {
+ check(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+fn main() { m!(self::f$0); }
+fn foo() {}
+"#,
+ expect![[r#"
+ fn foo() fn()
+ fn main() fn()
+ "#]],
+ );
+}
+
+#[test]
+fn function_mod_share_name() {
+ check(
+ r#"
+fn foo() { self::m::$0 }
+
+mod m {
+ pub mod z {}
+ pub fn z() {}
+}
+"#,
+ expect![[r#"
+ fn z() fn()
+ md z
+ "#]],
+ );
+}
+
+#[test]
+fn completes_hashmap_new() {
+ check(
+ r#"
+struct RandomState;
+struct HashMap<K, V, S = RandomState> {}
+
+impl<K, V> HashMap<K, V, RandomState> {
+ pub fn new() -> HashMap<K, V, RandomState> { }
+}
+fn foo() {
+ HashMap::$0
+}
+"#,
+ expect![[r#"
+ fn new() fn() -> HashMap<K, V, RandomState>
+ "#]],
+ );
+}
+
+#[test]
+fn completes_variant_through_self() {
+ cov_mark::check!(completes_variant_through_self);
+ check(
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl Foo {
+ fn foo(self) {
+ Self::$0
+ }
+}
+"#,
+ expect![[r#"
+ ev Bar Bar
+ ev Baz Baz
+ me foo(…) fn(self)
+ "#]],
+ );
+}
+
+#[test]
+fn completes_non_exhaustive_variant_within_the_defining_crate() {
+ check(
+ r#"
+enum Foo {
+ #[non_exhaustive]
+ Bar,
+ Baz,
+}
+
+fn foo(self) {
+ Foo::$0
+}
+"#,
+ expect![[r#"
+ ev Bar Bar
+ ev Baz Baz
+ "#]],
+ );
+
+ check(
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(self) {
+ e::Foo::$0
+}
+
+//- /e.rs crate:e
+enum Foo {
+ #[non_exhaustive]
+ Bar,
+ Baz,
+}
+"#,
+ expect![[r#"
+ ev Baz Baz
+ "#]],
+ );
+}
+
+#[test]
+fn completes_primitive_assoc_const() {
+ cov_mark::check!(completes_primitive_assoc_const);
+ check(
+ r#"
+//- /lib.rs crate:lib deps:core
+fn f() {
+ u8::$0
+}
+
+//- /core.rs crate:core
+#[lang = "u8"]
+impl u8 {
+ pub const MAX: Self = 255;
+
+ pub fn func(self) {}
+}
+"#,
+ expect![[r#"
+ ct MAX pub const MAX: Self
+ me func(…) fn(self)
+ "#]],
+ );
+}
+
+#[test]
+fn completes_variant_through_alias() {
+ cov_mark::check!(completes_variant_through_alias);
+ check(
+ r#"
+enum Foo {
+ Bar
+}
+type Foo2 = Foo;
+fn main() {
+ Foo2::$0
+}
+"#,
+ expect![[r#"
+ ev Bar Bar
+ "#]],
+ );
+}
+
+#[test]
+fn respects_doc_hidden2() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep
+fn f() {
+ dep::$0
+}
+
+//- /dep.rs crate:dep
+#[doc(hidden)]
+#[macro_export]
+macro_rules! m {
+ () => {}
+}
+
+#[doc(hidden)]
+pub fn f() {}
+
+#[doc(hidden)]
+pub struct S;
+
+#[doc(hidden)]
+pub mod m {}
+ "#,
+ expect![[r#""#]],
+ )
+}
+
+#[test]
+fn type_anchor_empty() {
+ check(
+ r#"
+trait Foo {
+ fn foo() -> Self;
+}
+struct Bar;
+impl Foo for Bar {
+ fn foo() -> {
+ Bar
+ }
+}
+fn bar() -> Bar {
+ <_>::$0
+}
+"#,
+ expect![[r#"
+ fn foo() (as Foo) fn() -> Self
+ "#]],
+ );
+}
+
+#[test]
+fn type_anchor_type() {
+ check(
+ r#"
+trait Foo {
+ fn foo() -> Self;
+}
+struct Bar;
+impl Bar {
+ fn bar() {}
+}
+impl Foo for Bar {
+ fn foo() -> {
+ Bar
+ }
+}
+fn bar() -> Bar {
+ <Bar>::$0
+}
+"#,
+ expect![[r#"
+ fn bar() fn()
+ fn foo() (as Foo) fn() -> Self
+ "#]],
+ );
+}
+
+#[test]
+fn type_anchor_type_trait() {
+ check(
+ r#"
+trait Foo {
+ fn foo() -> Self;
+}
+struct Bar;
+impl Bar {
+ fn bar() {}
+}
+impl Foo for Bar {
+ fn foo() -> {
+ Bar
+ }
+}
+fn bar() -> Bar {
+ <Bar as Foo>::$0
+}
+"#,
+ expect![[r#"
+ fn foo() (as Foo) fn() -> Self
+ "#]],
+ );
+}
+
+#[test]
+fn completes_fn_in_pub_trait_generated_by_macro() {
+ check(
+ r#"
+mod other_mod {
+ macro_rules! make_method {
+ ($name:ident) => {
+ fn $name(&self) {}
+ };
+ }
+
+ pub trait MyTrait {
+ make_method! { by_macro }
+ fn not_by_macro(&self) {}
+ }
+
+ pub struct Foo {}
+
+ impl MyTrait for Foo {}
+}
+
+fn main() {
+ use other_mod::{Foo, MyTrait};
+ let f = Foo {};
+ f.$0
+}
+"#,
+ expect![[r#"
+ me by_macro() (as MyTrait) fn(&self)
+ me not_by_macro() (as MyTrait) fn(&self)
+ "#]],
+ )
+}
+
+#[test]
+fn completes_fn_in_pub_trait_generated_by_recursive_macro() {
+ check(
+ r#"
+mod other_mod {
+ macro_rules! make_method {
+ ($name:ident) => {
+ fn $name(&self) {}
+ };
+ }
+
+ macro_rules! make_trait {
+ () => {
+ pub trait MyTrait {
+ make_method! { by_macro }
+ fn not_by_macro(&self) {}
+ }
+ }
+ }
+
+ make_trait!();
+
+ pub struct Foo {}
+
+ impl MyTrait for Foo {}
+}
+
+fn main() {
+ use other_mod::{Foo, MyTrait};
+ let f = Foo {};
+ f.$0
+}
+"#,
+ expect![[r#"
+ me by_macro() (as MyTrait) fn(&self)
+ me not_by_macro() (as MyTrait) fn(&self)
+ "#]],
+ )
+}
+
+#[test]
+fn completes_const_in_pub_trait_generated_by_macro() {
+ check(
+ r#"
+mod other_mod {
+ macro_rules! make_const {
+ ($name:ident) => {
+ const $name: u8 = 1;
+ };
+ }
+
+ pub trait MyTrait {
+ make_const! { by_macro }
+ }
+
+ pub struct Foo {}
+
+ impl MyTrait for Foo {}
+}
+
+fn main() {
+ use other_mod::{Foo, MyTrait};
+ let f = Foo {};
+ Foo::$0
+}
+"#,
+ expect![[r#"
+ ct by_macro (as MyTrait) pub const by_macro: u8
+ "#]],
+ )
+}
+
+#[test]
+fn completes_locals_from_macros() {
+ check(
+ r#"
+
+macro_rules! x {
+ ($x:ident, $expr:expr) => {
+ let $x = 0;
+ $expr
+ };
+}
+fn main() {
+ x! {
+ foobar, {
+ f$0
+ }
+ };
+}
+"#,
+ expect![[r#"
+ fn main() fn()
+ lc foobar i32
+ ma x!(…) macro_rules! x
+ bt u32
+ "#]],
+ )
+}
+
+#[test]
+fn regression_12644() {
+ check(
+ r#"
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
+ };
+}
+macro_rules! vec {
+ ($elem:expr) => {
+ __rust_force_expr!($elem)
+ };
+}
+
+struct Struct;
+impl Struct {
+ fn foo(self) {}
+}
+
+fn f() {
+ vec![Struct].$0;
+}
+"#,
+ expect![[r#"
+ me foo() fn(self)
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs
new file mode 100644
index 000000000..f0b7726c5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs
@@ -0,0 +1,671 @@
+//! Completion tests for type position.
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn record_field_ty() {
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize> {
+ f: $0
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ sp Self
+ st Foo<…>
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tp T
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn tuple_struct_field() {
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize>(f$0);
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ sp Self
+ st Foo<…>
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tp T
+ un Union
+ bt u32
+ kw crate::
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn fn_return_type() {
+ check(
+ r#"
+fn x<'lt, T, const C: usize>() -> $0
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tp T
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn fn_return_type_no_local_items() {
+ check(
+ r#"
+fn foo() -> B$0 {
+ struct Bar;
+ enum Baz {}
+ union Bax {
+ i: i32,
+ f: f32
+ }
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ it ()
+ kw crate::
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn inferred_type_const() {
+ check(
+ r#"
+struct Foo<T>(T);
+const FOO: $0 = Foo(2);
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo<…>
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ it Foo<i32>
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn inferred_type_closure_param() {
+ check(
+ r#"
+fn f1(f: fn(i32) -> i32) {}
+fn f2() {
+ f1(|x: $0);
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ it i32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn inferred_type_closure_return() {
+ check(
+ r#"
+fn f1(f: fn(u64) -> u64) {}
+fn f2() {
+ f1(|x| -> $0 {
+ x + 5
+ });
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ it u64
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn inferred_type_fn_return() {
+ check(
+ r#"
+fn f2(x: u64) -> $0 {
+ x + 5
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ it u64
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn inferred_type_fn_param() {
+ check(
+ r#"
+fn f1(x: i32) {}
+fn f2(x: $0) {
+ f1(x);
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ it i32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn inferred_type_not_in_the_scope() {
+ check(
+ r#"
+mod a {
+ pub struct Foo<T>(T);
+ pub fn x() -> Foo<Foo<i32>> {
+ Foo(Foo(2))
+ }
+}
+fn foo<'lt, T, const C: usize>() {
+ let local = ();
+ let foo: $0 = a::x();
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md a
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tp T
+ un Union
+ bt u32
+ it a::Foo<a::Foo<i32>>
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn inferred_type_let() {
+ check(
+ r#"
+struct Foo<T>(T);
+fn foo<'lt, T, const C: usize>() {
+ let local = ();
+ let foo: $0 = Foo(2);
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo<…>
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tp T
+ un Union
+ bt u32
+ it Foo<i32>
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn body_type_pos() {
+ check(
+ r#"
+fn foo<'lt, T, const C: usize>() {
+ let local = ();
+ let _: $0;
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tp T
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check(
+ r#"
+fn foo<'lt, T, const C: usize>() {
+ let local = ();
+ let _: self::$0;
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ "#]],
+ );
+}
+
+#[test]
+fn completes_types_and_const_in_arg_list() {
+ cov_mark::check!(complete_assoc_type_in_generics_list);
+ check(
+ r#"
+trait Trait1 {
+ type Super;
+}
+trait Trait2: Trait1 {
+ type Foo;
+}
+
+fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {}
+"#,
+ expect![[r#"
+ ta Foo = (as Trait2) type Foo
+ ta Super = (as Trait1) type Super
+ "#]],
+ );
+ check(
+ r#"
+trait Trait1 {
+ type Super;
+}
+trait Trait2<T>: Trait1 {
+ type Foo;
+}
+
+fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {}
+"#,
+ expect![[r#"
+ ct CONST
+ cp CONST_PARAM
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tt Trait1
+ tt Trait2
+ tp T
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check(
+ r#"
+trait Trait2 {
+ type Foo;
+}
+
+fn foo<'lt, T: Trait2<self::$0>, const CONST_PARAM: usize>(_: T) {}
+ "#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tt Trait2
+ un Union
+ "#]],
+ );
+}
+
+#[test]
+fn no_assoc_completion_outside_type_bounds() {
+ check(
+ r#"
+struct S;
+trait Tr<T> {
+ type Ty;
+}
+
+impl Tr<$0
+ "#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ sp Self
+ st Record
+ st S
+ st Tuple
+ st Unit
+ tt Tr
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn enum_qualified() {
+ check(
+ r#"
+impl Enum {
+ type AssocType = ();
+ const ASSOC_CONST: () = ();
+ fn assoc_fn() {}
+}
+fn func(_: Enum::$0) {}
+"#,
+ expect![[r#"
+ ta AssocType type AssocType = ()
+ "#]],
+ );
+}
+
+#[test]
+fn completes_type_parameter_or_associated_type() {
+ check(
+ r#"
+trait MyTrait<T, U> {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<u$0
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt MyTrait
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+
+ check(
+ r#"
+trait MyTrait<T, U> {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<u8, u$0
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt MyTrait
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+
+ check(
+ r#"
+trait MyTrait<T, U> {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<u8, u8, I$0
+"#,
+ expect![[r#"
+ ta Item1 = (as MyTrait) type Item1
+ ta Item2 = (as MyTrait) type Item2
+ "#]],
+ );
+}
+
+#[test]
+fn completes_type_parameter_or_associated_type_with_default_value() {
+ check(
+ r#"
+trait MyTrait<T, U = u8> {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<u$0
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt MyTrait
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+
+ check(
+ r#"
+trait MyTrait<T, U = u8> {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<u8, u$0
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt MyTrait
+ tt Trait
+ ta Item1 = (as MyTrait) type Item1
+ ta Item2 = (as MyTrait) type Item2
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+
+ check(
+ r#"
+trait MyTrait<T, U = u8> {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<u8, u8, I$0
+"#,
+ expect![[r#"
+ ta Item1 = (as MyTrait) type Item1
+ ta Item2 = (as MyTrait) type Item2
+ "#]],
+ );
+}
+
+#[test]
+fn completes_types_after_associated_type() {
+ check(
+ r#"
+trait MyTrait {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<Item1 = $0
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt MyTrait
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+
+ check(
+ r#"
+trait MyTrait {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<Item1 = u8, Item2 = $0
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt MyTrait
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs
new file mode 100644
index 000000000..037d7dce5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs
@@ -0,0 +1,384 @@
+//! Completion tests for use trees.
+use expect_test::{expect, Expect};
+
+use crate::tests::completion_list;
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn use_tree_start() {
+ cov_mark::check!(unqualified_path_selected_only);
+ check(
+ r#"
+//- /lib.rs crate:main deps:other_crate
+use f$0
+
+struct Foo;
+enum FooBar {
+ Foo,
+ Bar
+}
+mod foo {}
+//- /other_crate/lib.rs crate:other_crate
+// nothing here
+"#,
+ expect![[r#"
+ en FooBar::
+ md foo
+ md other_crate
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn use_tree_start_abs() {
+ cov_mark::check!(use_tree_crate_roots_only);
+ check(
+ r#"
+//- /lib.rs crate:main deps:other_crate
+use ::f$0
+
+struct Foo;
+mod foo {}
+//- /other_crate/lib.rs crate:other_crate
+// nothing here
+"#,
+ expect![[r#"
+ md other_crate
+ "#]],
+ );
+}
+
+#[test]
+fn dont_complete_current_use() {
+ cov_mark::check!(dont_complete_current_use);
+ check(r#"use self::foo$0;"#, expect![[r#""#]]);
+ check(
+ r#"
+mod foo { pub struct S; }
+use self::{foo::*, bar$0};
+"#,
+ expect![[r#"
+ md foo
+ st S
+ "#]],
+ );
+}
+
+#[test]
+fn nested_use_tree() {
+ check(
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct FooBar;
+ }
+}
+use foo::{bar::$0}
+"#,
+ expect![[r#"
+ st FooBar
+ "#]],
+ );
+ check(
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct FooBar;
+ }
+}
+use foo::{$0}
+"#,
+ expect![[r#"
+ md bar
+ kw self
+ "#]],
+ );
+}
+
+#[test]
+fn deeply_nested_use_tree() {
+ check(
+ r#"
+mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct FooBarBaz;
+ }
+ }
+}
+use foo::{bar::{baz::$0}}
+"#,
+ expect![[r#"
+ st FooBarBaz
+ "#]],
+ );
+ check(
+ r#"
+mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct FooBarBaz;
+ }
+ }
+}
+use foo::{bar::{$0}}
+"#,
+ expect![[r#"
+ md baz
+ kw self
+ "#]],
+ );
+}
+
+#[test]
+fn plain_qualified_use_tree() {
+ check(
+ r#"
+use foo::$0
+
+mod foo {
+ struct Private;
+ pub struct Foo;
+ macro_rules! foo_ { {} => {} }
+ pub use foo_ as foo;
+}
+struct Bar;
+"#,
+ expect![[r#"
+ ma foo macro_rules! foo_
+ st Foo
+ "#]],
+ );
+}
+
+#[test]
+fn enum_plain_qualified_use_tree() {
+ cov_mark::check!(enum_plain_qualified_use_tree);
+ check(
+ r#"
+use Foo::$0
+
+enum Foo {
+ UnitVariant,
+ TupleVariant(),
+ RecordVariant {},
+}
+impl Foo {
+ const CONST: () = ()
+ fn func() {}
+}
+"#,
+ expect![[r#"
+ ev RecordVariant RecordVariant
+ ev TupleVariant TupleVariant
+ ev UnitVariant UnitVariant
+ "#]],
+ );
+}
+
+#[test]
+fn self_qualified_use_tree() {
+ check(
+ r#"
+use self::$0
+
+mod foo {}
+struct Bar;
+"#,
+ expect![[r#"
+ md foo
+ st Bar
+ "#]],
+ );
+}
+
+#[test]
+fn super_qualified_use_tree() {
+ check(
+ r#"
+mod bar {
+ use super::$0
+}
+
+mod foo {}
+struct Bar;
+"#,
+ expect![[r#"
+ md bar
+ md foo
+ st Bar
+ "#]],
+ );
+}
+
+#[test]
+fn super_super_qualified_use_tree() {
+ check(
+ r#"
+mod a {
+ const A: usize = 0;
+ mod b {
+ const B: usize = 0;
+ mod c { use super::super::$0 }
+ }
+}
+"#,
+ expect![[r#"
+ ct A
+ md b
+ kw super::
+ "#]],
+ );
+}
+
+#[test]
+fn crate_qualified_use_tree() {
+ check(
+ r#"
+use crate::$0
+
+mod foo {}
+struct Bar;
+"#,
+ expect![[r#"
+ md foo
+ st Bar
+ "#]],
+ );
+}
+
+#[test]
+fn extern_crate_qualified_use_tree() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:other_crate
+use other_crate::$0
+//- /other_crate/lib.rs crate:other_crate
+pub struct Foo;
+pub mod foo {}
+"#,
+ expect![[r#"
+ md foo
+ st Foo
+ "#]],
+ );
+}
+
+#[test]
+fn pub_use_tree() {
+ check(
+ r#"
+pub struct X;
+pub mod bar {}
+pub use $0;
+"#,
+ expect![[r#"
+ md bar
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn pub_suggest_use_tree_super_acc_to_depth_in_tree() {
+ // https://github.com/rust-lang/rust-analyzer/issues/12439
+ // Check discussion in https://github.com/rust-lang/rust-analyzer/pull/12447
+
+ check(
+ r#"
+mod foo {
+ mod bar {
+ pub use super::$0;
+ }
+}
+"#,
+ expect![[r#"
+ md bar
+ kw super::
+ "#]],
+ );
+
+ // Not suggest super when at crate root
+ check(
+ r#"
+mod foo {
+ mod bar {
+ pub use super::super::$0;
+ }
+}
+"#,
+ expect![[r#"
+ md foo
+ "#]],
+ );
+
+ check(
+ r#"
+mod foo {
+ use $0;
+}
+"#,
+ expect![[r#"
+ kw crate::
+ kw self::
+ kw super::
+ "#]],
+ );
+
+ // Not suggest super after another kw in path ( here it is foo1 )
+ check(
+ r#"
+mod foo {
+ mod bar {
+ use super::super::foo1::$0;
+ }
+}
+
+mod foo1 {
+ pub mod bar1 {}
+}
+"#,
+ expect![[r#"
+ md bar1
+ "#]],
+ );
+}
+
+#[test]
+fn use_tree_braces_at_start() {
+ check(
+ r#"
+struct X;
+mod bar {}
+use {$0};
+"#,
+ expect![[r#"
+ md bar
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn impl_prefix_does_not_add_fn_snippet() {
+ // regression test for 7222
+ check(
+ r#"
+mod foo {
+ pub fn bar(x: u32) {}
+}
+use self::foo::impl$0
+"#,
+ expect![[r#"
+ fn bar fn(u32)
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/visibility.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/visibility.rs
new file mode 100644
index 000000000..c18d6e66d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/visibility.rs
@@ -0,0 +1,90 @@
+//! Completion tests for visibility modifiers.
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, completion_list_with_trigger_character};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual)
+}
+
+fn check_with_trigger_character(ra_fixture: &str, trigger_character: char, expect: Expect) {
+ let actual = completion_list_with_trigger_character(ra_fixture, Some(trigger_character));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn empty_pub() {
+ cov_mark::check!(kw_completion_in);
+ check_with_trigger_character(
+ r#"
+pub($0)
+"#,
+ '(',
+ expect![[r#"
+ kw crate
+ kw in
+ kw self
+ "#]],
+ );
+}
+
+#[test]
+fn after_in_kw() {
+ check(
+ r#"
+pub(in $0)
+"#,
+ expect![[r#"
+ kw crate
+ kw self
+ "#]],
+ );
+}
+
+#[test]
+fn qualified() {
+ cov_mark::check!(visibility_qualified);
+ check(
+ r#"
+mod foo {
+ pub(in crate::$0)
+}
+
+mod bar {}
+"#,
+ expect![[r#"
+ md foo
+ "#]],
+ );
+ check(
+ r#"
+mod qux {
+ mod foo {
+ pub(in crate::$0)
+ }
+ mod baz {}
+}
+
+mod bar {}
+"#,
+ expect![[r#"
+ md qux
+ "#]],
+ );
+ check(
+ r#"
+mod qux {
+ mod foo {
+ pub(in crate::qux::$0)
+ }
+ mod baz {}
+}
+
+mod bar {}
+"#,
+ expect![[r#"
+ md foo
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
new file mode 100644
index 000000000..a1b0bd6cb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
@@ -0,0 +1,39 @@
+[package]
+name = "ide-db"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+tracing = "0.1.35"
+rayon = "1.5.3"
+fst = { version = "0.4.7", default-features = false }
+rustc-hash = "1.1.0"
+once_cell = "1.12.0"
+either = "1.7.0"
+itertools = "0.10.3"
+arrayvec = "0.7.2"
+indexmap = "1.9.1"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+parser = { path = "../parser", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+# ide should depend only on the top-level `hir` package. if you need
+# something from some `hir-xxx` subpackage, reexport the API via `hir`.
+hir = { path = "../hir", version = "0.0.0" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+xshell = "0.2.2"
+expect-test = "1.4.0"
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
new file mode 100644
index 000000000..7303ef8b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
@@ -0,0 +1,78 @@
+//! This module provides functionality for querying callable information about a token.
+
+use either::Either;
+use hir::{Semantics, Type};
+use syntax::{
+ ast::{self, HasArgList, HasName},
+ AstNode, SyntaxToken,
+};
+
+use crate::RootDatabase;
+
+#[derive(Debug)]
+pub struct ActiveParameter {
+ pub ty: Type,
+ pub pat: Either<ast::SelfParam, ast::Pat>,
+}
+
+impl ActiveParameter {
+ /// Returns information about the call argument this token is part of.
+ pub fn at_token(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Self> {
+ let (signature, active_parameter) = callable_for_token(sema, token)?;
+
+ let idx = active_parameter?;
+ let mut params = signature.params(sema.db);
+ if !(idx < params.len()) {
+ cov_mark::hit!(too_many_arguments);
+ return None;
+ }
+ let (pat, ty) = params.swap_remove(idx);
+ pat.map(|pat| ActiveParameter { ty, pat })
+ }
+
+ pub fn ident(&self) -> Option<ast::Name> {
+ self.pat.as_ref().right().and_then(|param| match param {
+ ast::Pat::IdentPat(ident) => ident.name(),
+ _ => None,
+ })
+ }
+}
+
+/// Returns a [`hir::Callable`] this token is a part of and its argument index of said callable.
+pub fn callable_for_token(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<(hir::Callable, Option<usize>)> {
+ // Find the calling expression and its NameRef
+ let parent = token.parent()?;
+ let calling_node = parent.ancestors().filter_map(ast::CallableExpr::cast).find(|it| {
+ it.arg_list()
+ .map_or(false, |it| it.syntax().text_range().contains(token.text_range().start()))
+ })?;
+
+ callable_for_node(sema, &calling_node, &token)
+}
+
+pub fn callable_for_node(
+ sema: &Semantics<'_, RootDatabase>,
+ calling_node: &ast::CallableExpr,
+ token: &SyntaxToken,
+) -> Option<(hir::Callable, Option<usize>)> {
+ let callable = match &calling_node {
+ ast::CallableExpr::Call(call) => {
+ let expr = call.expr()?;
+ sema.type_of_expr(&expr)?.adjusted().as_callable(sema.db)
+ }
+ ast::CallableExpr::MethodCall(call) => sema.resolve_method_call_as_callable(call),
+ }?;
+ let active_param = if let Some(arg_list) = calling_node.arg_list() {
+ let param = arg_list
+ .args()
+ .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start())
+ .count();
+ Some(param)
+ } else {
+ None
+ };
+ Some((callable, active_param))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
new file mode 100644
index 000000000..98b0e9c94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
@@ -0,0 +1,163 @@
+//! Applies changes to the IDE state transactionally.
+
+use std::sync::Arc;
+
+use base_db::{
+ salsa::{Database, Durability},
+ Change, SourceRootId,
+};
+use profile::{memory_usage, Bytes};
+use rustc_hash::FxHashSet;
+
+use crate::{symbol_index::SymbolsDatabase, RootDatabase};
+
+impl RootDatabase {
+ pub fn request_cancellation(&mut self) {
+ let _p = profile::span("RootDatabase::request_cancellation");
+ self.salsa_runtime_mut().synthetic_write(Durability::LOW);
+ }
+
+ pub fn apply_change(&mut self, change: Change) {
+ let _p = profile::span("RootDatabase::apply_change");
+ self.request_cancellation();
+ tracing::info!("apply_change {:?}", change);
+ if let Some(roots) = &change.roots {
+ let mut local_roots = FxHashSet::default();
+ let mut library_roots = FxHashSet::default();
+ for (idx, root) in roots.iter().enumerate() {
+ let root_id = SourceRootId(idx as u32);
+ if root.is_library {
+ library_roots.insert(root_id);
+ } else {
+ local_roots.insert(root_id);
+ }
+ }
+ self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
+ self.set_library_roots_with_durability(Arc::new(library_roots), Durability::HIGH);
+ }
+ change.apply(self);
+ }
+
+ // Feature: Memory Usage
+ //
+ // Clears rust-analyzer's internal database and prints memory usage statistics.
+ //
+ // |===
+ // | Editor | Action Name
+ //
+ // | VS Code | **Rust Analyzer: Memory Usage (Clears Database)**
+ // |===
+ // image::https://user-images.githubusercontent.com/48062697/113065592-08559f00-91b1-11eb-8c96-64b88068ec02.gif[]
+ pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> {
+ let mut acc: Vec<(String, Bytes)> = vec![];
+ macro_rules! purge_each_query {
+ ($($q:path)*) => {$(
+ let before = memory_usage().allocated;
+ $q.in_db(self).purge();
+ let after = memory_usage().allocated;
+ let q: $q = Default::default();
+ let name = format!("{:?}", q);
+ acc.push((name, before - after));
+ )*}
+ }
+ purge_each_query![
+ // SourceDatabase
+ base_db::ParseQuery
+ base_db::CrateGraphQuery
+
+ // SourceDatabaseExt
+ base_db::FileTextQuery
+ base_db::FileSourceRootQuery
+ base_db::SourceRootQuery
+ base_db::SourceRootCratesQuery
+
+ // AstDatabase
+ hir::db::AstIdMapQuery
+ hir::db::MacroArgTextQuery
+ hir::db::MacroDefQuery
+ hir::db::ParseMacroExpansionQuery
+ hir::db::MacroExpandQuery
+ hir::db::HygieneFrameQuery
+ hir::db::InternMacroCallQuery
+
+ // DefDatabase
+ hir::db::FileItemTreeQuery
+ hir::db::BlockDefMapQuery
+ hir::db::CrateDefMapQueryQuery
+ hir::db::FieldsAttrsQuery
+ hir::db::VariantsAttrsQuery
+ hir::db::FieldsAttrsSourceMapQuery
+ hir::db::VariantsAttrsSourceMapQuery
+ hir::db::StructDataQuery
+ hir::db::UnionDataQuery
+ hir::db::EnumDataQuery
+ hir::db::ImplDataQuery
+ hir::db::TraitDataQuery
+ hir::db::TypeAliasDataQuery
+ hir::db::FunctionDataQuery
+ hir::db::ConstDataQuery
+ hir::db::StaticDataQuery
+ hir::db::BodyWithSourceMapQuery
+ hir::db::BodyQuery
+ hir::db::ExprScopesQuery
+ hir::db::GenericParamsQuery
+ hir::db::AttrsQuery
+ hir::db::CrateLangItemsQuery
+ hir::db::LangItemQuery
+ hir::db::ImportMapQuery
+
+ // HirDatabase
+ hir::db::InferQueryQuery
+ hir::db::TyQuery
+ hir::db::ValueTyQuery
+ hir::db::ImplSelfTyQuery
+ hir::db::ImplTraitQuery
+ hir::db::FieldTypesQuery
+ hir::db::CallableItemSignatureQuery
+ hir::db::GenericPredicatesForParamQuery
+ hir::db::GenericPredicatesQuery
+ hir::db::GenericDefaultsQuery
+ hir::db::InherentImplsInCrateQuery
+ hir::db::TraitEnvironmentQuery
+ hir::db::TraitImplsInCrateQuery
+ hir::db::TraitImplsInDepsQuery
+ hir::db::AssociatedTyDataQuery
+ hir::db::AssociatedTyDataQuery
+ hir::db::TraitDatumQuery
+ hir::db::StructDatumQuery
+ hir::db::ImplDatumQuery
+ hir::db::FnDefDatumQuery
+ hir::db::ReturnTypeImplTraitsQuery
+ hir::db::InternCallableDefQuery
+ hir::db::InternTypeOrConstParamIdQuery
+ hir::db::InternImplTraitIdQuery
+ hir::db::InternClosureQuery
+ hir::db::AssociatedTyValueQuery
+ hir::db::TraitSolveQueryQuery
+ hir::db::InternTypeOrConstParamIdQuery
+
+ // SymbolsDatabase
+ crate::symbol_index::ModuleSymbolsQuery
+ crate::symbol_index::LibrarySymbolsQuery
+ crate::symbol_index::LocalRootsQuery
+ crate::symbol_index::LibraryRootsQuery
+
+ // LineIndexDatabase
+ crate::LineIndexQuery
+
+ // InternDatabase
+ hir::db::InternFunctionQuery
+ hir::db::InternStructQuery
+ hir::db::InternUnionQuery
+ hir::db::InternEnumQuery
+ hir::db::InternConstQuery
+ hir::db::InternStaticQuery
+ hir::db::InternTraitQuery
+ hir::db::InternTypeAliasQuery
+ hir::db::InternImplQuery
+ ];
+
+ acc.sort_by_key(|it| std::cmp::Reverse(it.1));
+ acc
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/assists.rs b/src/tools/rust-analyzer/crates/ide-db/src/assists.rs
new file mode 100644
index 000000000..da23763dc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/assists.rs
@@ -0,0 +1,137 @@
+//! This module defines the `Assist` data structure. The actual assist live in
+//! the `ide_assists` downstream crate. We want to define the data structures in
+//! this low-level crate though, because `ide_diagnostics` also need them
+//! (fixits for diagnostics and assists are the same thing under the hood). We
+//! want to compile `ide_assists` and `ide_diagnostics` in parallel though, so
+//! we pull the common definitions upstream, to this crate.
+
+use std::str::FromStr;
+
+use syntax::TextRange;
+
+use crate::{label::Label, source_change::SourceChange};
+
+#[derive(Debug, Clone)]
+pub struct Assist {
+ pub id: AssistId,
+ /// Short description of the assist, as shown in the UI.
+ pub label: Label,
+ pub group: Option<GroupLabel>,
+ /// Target ranges are used to sort assists: the smaller the target range,
+ /// the more specific assist is, and so it should be sorted first.
+ pub target: TextRange,
+ /// Computing source change sometimes is much more costly then computing the
+ /// other fields. Additionally, the actual change is not required to show
+ /// the lightbulb UI, it only is needed when the user tries to apply an
+ /// assist. So, we compute it lazily: the API allow requesting assists with
+ /// or without source change. We could (and in fact, used to) distinguish
+ /// between resolved and unresolved assists at the type level, but this is
+ /// cumbersome, especially if you want to embed an assist into another data
+ /// structure, such as a diagnostic.
+ pub source_change: Option<SourceChange>,
+ pub trigger_signature_help: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum AssistKind {
+ // FIXME: does the None variant make sense? Probably not.
+ None,
+
+ QuickFix,
+ Generate,
+ Refactor,
+ RefactorExtract,
+ RefactorInline,
+ RefactorRewrite,
+}
+
+impl AssistKind {
+ pub fn contains(self, other: AssistKind) -> bool {
+ if self == other {
+ return true;
+ }
+
+ match self {
+ AssistKind::None | AssistKind::Generate => true,
+ AssistKind::Refactor => matches!(
+ other,
+ AssistKind::RefactorExtract
+ | AssistKind::RefactorInline
+ | AssistKind::RefactorRewrite
+ ),
+ _ => false,
+ }
+ }
+
+ pub fn name(&self) -> &str {
+ match self {
+ AssistKind::None => "None",
+ AssistKind::QuickFix => "QuickFix",
+ AssistKind::Generate => "Generate",
+ AssistKind::Refactor => "Refactor",
+ AssistKind::RefactorExtract => "RefactorExtract",
+ AssistKind::RefactorInline => "RefactorInline",
+ AssistKind::RefactorRewrite => "RefactorRewrite",
+ }
+ }
+}
+
+impl FromStr for AssistKind {
+ type Err = String;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s {
+ "None" => Ok(AssistKind::None),
+ "QuickFix" => Ok(AssistKind::QuickFix),
+ "Generate" => Ok(AssistKind::Generate),
+ "Refactor" => Ok(AssistKind::Refactor),
+ "RefactorExtract" => Ok(AssistKind::RefactorExtract),
+ "RefactorInline" => Ok(AssistKind::RefactorInline),
+ "RefactorRewrite" => Ok(AssistKind::RefactorRewrite),
+ unknown => Err(format!("Unknown AssistKind: '{}'", unknown)),
+ }
+ }
+}
+
+/// Unique identifier of the assist, should not be shown to the user
+/// directly.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct AssistId(pub &'static str, pub AssistKind);
+
+/// A way to control how many asssist to resolve during the assist resolution.
+/// When an assist is resolved, its edits are calculated that might be costly to always do by default.
+#[derive(Debug)]
+pub enum AssistResolveStrategy {
+ /// No assists should be resolved.
+ None,
+ /// All assists should be resolved.
+ All,
+ /// Only a certain assist should be resolved.
+ Single(SingleResolve),
+}
+
+/// Hold the [`AssistId`] data of a certain assist to resolve.
+/// The original id object cannot be used due to a `'static` lifetime
+/// and the requirement to construct this struct dynamically during the resolve handling.
+#[derive(Debug)]
+pub struct SingleResolve {
+ /// The id of the assist.
+ pub assist_id: String,
+ // The kind of the assist.
+ pub assist_kind: AssistKind,
+}
+
+impl AssistResolveStrategy {
+ pub fn should_resolve(&self, id: &AssistId) -> bool {
+ match self {
+ AssistResolveStrategy::None => false,
+ AssistResolveStrategy::All => true,
+ AssistResolveStrategy::Single(single_resolve) => {
+ single_resolve.assist_id == id.0 && single_resolve.assist_kind == id.1
+ }
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct GroupLabel(pub String);
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
new file mode 100644
index 000000000..aeaca00ec
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -0,0 +1,545 @@
+//! `NameDefinition` keeps information about the element we want to search references for.
+//! The element is represented by `NameKind`. It's located inside some `container` and
+//! has a `visibility`, which defines a search scope.
+//! Note that the reference search is possible for not all of the classified items.
+
+// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
+
+use arrayvec::ArrayVec;
+use hir::{
+ Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, Field,
+ Function, GenericParam, HasVisibility, Impl, ItemInNs, Label, Local, Macro, Module, ModuleDef,
+ Name, PathResolution, Semantics, Static, ToolModule, Trait, TypeAlias, Variant, Visibility,
+};
+use stdx::impl_from;
+use syntax::{
+ ast::{self, AstNode},
+ match_ast, SyntaxKind, SyntaxNode, SyntaxToken,
+};
+
+use crate::RootDatabase;
+
+// FIXME: a more precise name would probably be `Symbol`?
+#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
+pub enum Definition {
+ Macro(Macro),
+ Field(Field),
+ Module(Module),
+ Function(Function),
+ Adt(Adt),
+ Variant(Variant),
+ Const(Const),
+ Static(Static),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ BuiltinType(BuiltinType),
+ SelfType(Impl),
+ Local(Local),
+ GenericParam(GenericParam),
+ Label(Label),
+ DeriveHelper(DeriveHelper),
+ BuiltinAttr(BuiltinAttr),
+ ToolModule(ToolModule),
+}
+
+impl Definition {
+ pub fn canonical_module_path(&self, db: &RootDatabase) -> Option<impl Iterator<Item = Module>> {
+ self.module(db).map(|it| it.path_to_root(db).into_iter().rev())
+ }
+
+ pub fn krate(&self, db: &RootDatabase) -> Option<Crate> {
+ Some(match self {
+ Definition::Module(m) => m.krate(),
+ _ => self.module(db)?.krate(),
+ })
+ }
+
+ pub fn module(&self, db: &RootDatabase) -> Option<Module> {
+ let module = match self {
+ Definition::Macro(it) => it.module(db),
+ Definition::Module(it) => it.parent(db)?,
+ Definition::Field(it) => it.parent_def(db).module(db),
+ Definition::Function(it) => it.module(db),
+ Definition::Adt(it) => it.module(db),
+ Definition::Const(it) => it.module(db),
+ Definition::Static(it) => it.module(db),
+ Definition::Trait(it) => it.module(db),
+ Definition::TypeAlias(it) => it.module(db),
+ Definition::Variant(it) => it.module(db),
+ Definition::SelfType(it) => it.module(db),
+ Definition::Local(it) => it.module(db),
+ Definition::GenericParam(it) => it.module(db),
+ Definition::Label(it) => it.module(db),
+ Definition::DeriveHelper(it) => it.derive().module(db),
+ Definition::BuiltinAttr(_) | Definition::BuiltinType(_) | Definition::ToolModule(_) => {
+ return None
+ }
+ };
+ Some(module)
+ }
+
+ pub fn visibility(&self, db: &RootDatabase) -> Option<Visibility> {
+ let vis = match self {
+ Definition::Field(sf) => sf.visibility(db),
+ Definition::Module(it) => it.visibility(db),
+ Definition::Function(it) => it.visibility(db),
+ Definition::Adt(it) => it.visibility(db),
+ Definition::Const(it) => it.visibility(db),
+ Definition::Static(it) => it.visibility(db),
+ Definition::Trait(it) => it.visibility(db),
+ Definition::TypeAlias(it) => it.visibility(db),
+ Definition::Variant(it) => it.visibility(db),
+ Definition::BuiltinType(_) => Visibility::Public,
+ Definition::Macro(_) => return None,
+ Definition::BuiltinAttr(_)
+ | Definition::ToolModule(_)
+ | Definition::SelfType(_)
+ | Definition::Local(_)
+ | Definition::GenericParam(_)
+ | Definition::Label(_)
+ | Definition::DeriveHelper(_) => return None,
+ };
+ Some(vis)
+ }
+
+ pub fn name(&self, db: &RootDatabase) -> Option<Name> {
+ let name = match self {
+ Definition::Macro(it) => it.name(db),
+ Definition::Field(it) => it.name(db),
+ Definition::Module(it) => it.name(db)?,
+ Definition::Function(it) => it.name(db),
+ Definition::Adt(it) => it.name(db),
+ Definition::Variant(it) => it.name(db),
+ Definition::Const(it) => it.name(db)?,
+ Definition::Static(it) => it.name(db),
+ Definition::Trait(it) => it.name(db),
+ Definition::TypeAlias(it) => it.name(db),
+ Definition::BuiltinType(it) => it.name(),
+ Definition::SelfType(_) => return None,
+ Definition::Local(it) => it.name(db),
+ Definition::GenericParam(it) => it.name(db),
+ Definition::Label(it) => it.name(db),
+ Definition::BuiltinAttr(_) => return None, // FIXME
+ Definition::ToolModule(_) => return None, // FIXME
+ Definition::DeriveHelper(it) => it.name(db),
+ };
+ Some(name)
+ }
+}
+
+#[derive(Debug)]
+pub enum IdentClass {
+ NameClass(NameClass),
+ NameRefClass(NameRefClass),
+}
+
+impl IdentClass {
+ pub fn classify_node(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &SyntaxNode,
+ ) -> Option<IdentClass> {
+ match_ast! {
+ match node {
+ ast::Name(name) => NameClass::classify(sema, &name).map(IdentClass::NameClass),
+ ast::NameRef(name_ref) => NameRefClass::classify(sema, &name_ref).map(IdentClass::NameRefClass),
+ ast::Lifetime(lifetime) => {
+ NameClass::classify_lifetime(sema, &lifetime)
+ .map(IdentClass::NameClass)
+ .or_else(|| NameRefClass::classify_lifetime(sema, &lifetime).map(IdentClass::NameRefClass))
+ },
+ _ => None,
+ }
+ }
+ }
+
+ pub fn classify_token(
+ sema: &Semantics<'_, RootDatabase>,
+ token: &SyntaxToken,
+ ) -> Option<IdentClass> {
+ let parent = token.parent()?;
+ Self::classify_node(sema, &parent)
+ }
+
+ pub fn classify_lifetime(
+ sema: &Semantics<'_, RootDatabase>,
+ lifetime: &ast::Lifetime,
+ ) -> Option<IdentClass> {
+ NameRefClass::classify_lifetime(sema, lifetime)
+ .map(IdentClass::NameRefClass)
+ .or_else(|| NameClass::classify_lifetime(sema, lifetime).map(IdentClass::NameClass))
+ }
+
+ pub fn definitions(self) -> ArrayVec<Definition, 2> {
+ let mut res = ArrayVec::new();
+ match self {
+ IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
+ res.push(it)
+ }
+ IdentClass::NameClass(NameClass::PatFieldShorthand { local_def, field_ref }) => {
+ res.push(Definition::Local(local_def));
+ res.push(Definition::Field(field_ref));
+ }
+ IdentClass::NameRefClass(NameRefClass::Definition(it)) => res.push(it),
+ IdentClass::NameRefClass(NameRefClass::FieldShorthand { local_ref, field_ref }) => {
+ res.push(Definition::Local(local_ref));
+ res.push(Definition::Field(field_ref));
+ }
+ }
+ res
+ }
+}
+
+/// On a first blush, a single `ast::Name` defines a single definition at some
+/// scope. That is, that, by just looking at the syntactical category, we can
+/// unambiguously define the semantic category.
+///
+/// Sadly, that's not 100% true, there are special cases. To make sure that
+/// callers handle all the special cases correctly via exhaustive matching, we
+/// add a [`NameClass`] enum which lists all of them!
+///
+/// A model special case is `None` constant in pattern.
+#[derive(Debug)]
+pub enum NameClass {
+ Definition(Definition),
+ /// `None` in `if let None = Some(82) {}`.
+ /// Syntactically, it is a name, but semantically it is a reference.
+ ConstReference(Definition),
+ /// `field` in `if let Foo { field } = foo`. Here, `ast::Name` both introduces
+ /// a definition into a local scope, and refers to an existing definition.
+ PatFieldShorthand {
+ local_def: Local,
+ field_ref: Field,
+ },
+}
+
+impl NameClass {
+ /// `Definition` defined by this name.
+ pub fn defined(self) -> Option<Definition> {
+ let res = match self {
+ NameClass::Definition(it) => it,
+ NameClass::ConstReference(_) => return None,
+ NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
+ Definition::Local(local_def)
+ }
+ };
+ Some(res)
+ }
+
+ pub fn classify(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<NameClass> {
+ let _p = profile::span("classify_name");
+
+ let parent = name.syntax().parent()?;
+
+ let definition = match_ast! {
+ match parent {
+ ast::Item(it) => classify_item(sema, it)?,
+ ast::IdentPat(it) => return classify_ident_pat(sema, it),
+ ast::Rename(it) => classify_rename(sema, it)?,
+ ast::SelfParam(it) => Definition::Local(sema.to_def(&it)?),
+ ast::RecordField(it) => Definition::Field(sema.to_def(&it)?),
+ ast::Variant(it) => Definition::Variant(sema.to_def(&it)?),
+ ast::TypeParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()),
+ ast::ConstParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()),
+ _ => return None,
+ }
+ };
+ return Some(NameClass::Definition(definition));
+
+ fn classify_item(
+ sema: &Semantics<'_, RootDatabase>,
+ item: ast::Item,
+ ) -> Option<Definition> {
+ let definition = match item {
+ ast::Item::MacroRules(it) => {
+ Definition::Macro(sema.to_def(&ast::Macro::MacroRules(it))?)
+ }
+ ast::Item::MacroDef(it) => {
+ Definition::Macro(sema.to_def(&ast::Macro::MacroDef(it))?)
+ }
+ ast::Item::Const(it) => Definition::Const(sema.to_def(&it)?),
+ ast::Item::Fn(it) => {
+ let def = sema.to_def(&it)?;
+ def.as_proc_macro(sema.db)
+ .map(Definition::Macro)
+ .unwrap_or(Definition::Function(def))
+ }
+ ast::Item::Module(it) => Definition::Module(sema.to_def(&it)?),
+ ast::Item::Static(it) => Definition::Static(sema.to_def(&it)?),
+ ast::Item::Trait(it) => Definition::Trait(sema.to_def(&it)?),
+ ast::Item::TypeAlias(it) => Definition::TypeAlias(sema.to_def(&it)?),
+ ast::Item::Enum(it) => Definition::Adt(hir::Adt::Enum(sema.to_def(&it)?)),
+ ast::Item::Struct(it) => Definition::Adt(hir::Adt::Struct(sema.to_def(&it)?)),
+ ast::Item::Union(it) => Definition::Adt(hir::Adt::Union(sema.to_def(&it)?)),
+ _ => return None,
+ };
+ Some(definition)
+ }
+
+ fn classify_ident_pat(
+ sema: &Semantics<'_, RootDatabase>,
+ ident_pat: ast::IdentPat,
+ ) -> Option<NameClass> {
+ if let Some(def) = sema.resolve_bind_pat_to_const(&ident_pat) {
+ return Some(NameClass::ConstReference(Definition::from(def)));
+ }
+
+ let local = sema.to_def(&ident_pat)?;
+ let pat_parent = ident_pat.syntax().parent();
+ if let Some(record_pat_field) = pat_parent.and_then(ast::RecordPatField::cast) {
+ if record_pat_field.name_ref().is_none() {
+ if let Some(field) = sema.resolve_record_pat_field(&record_pat_field) {
+ return Some(NameClass::PatFieldShorthand {
+ local_def: local,
+ field_ref: field,
+ });
+ }
+ }
+ }
+ Some(NameClass::Definition(Definition::Local(local)))
+ }
+
+ fn classify_rename(
+ sema: &Semantics<'_, RootDatabase>,
+ rename: ast::Rename,
+ ) -> Option<Definition> {
+ if let Some(use_tree) = rename.syntax().parent().and_then(ast::UseTree::cast) {
+ let path = use_tree.path()?;
+ sema.resolve_path(&path).map(Definition::from)
+ } else {
+ let extern_crate = rename.syntax().parent().and_then(ast::ExternCrate::cast)?;
+ let krate = sema.resolve_extern_crate(&extern_crate)?;
+ let root_module = krate.root_module(sema.db);
+ Some(Definition::Module(root_module))
+ }
+ }
+ }
+
+ pub fn classify_lifetime(
+ sema: &Semantics<'_, RootDatabase>,
+ lifetime: &ast::Lifetime,
+ ) -> Option<NameClass> {
+ let _p = profile::span("classify_lifetime").detail(|| lifetime.to_string());
+ let parent = lifetime.syntax().parent()?;
+
+ if let Some(it) = ast::LifetimeParam::cast(parent.clone()) {
+ sema.to_def(&it).map(Into::into).map(Definition::GenericParam)
+ } else if let Some(it) = ast::Label::cast(parent) {
+ sema.to_def(&it).map(Definition::Label)
+ } else {
+ None
+ }
+ .map(NameClass::Definition)
+ }
+}
+
+/// This is similar to [`NameClass`], but works for [`ast::NameRef`] rather than
+/// for [`ast::Name`]. Similarly, what looks like a reference in syntax is a
+/// reference most of the time, but there are a couple of annoying exceptions.
+///
+/// A model special case is field shorthand syntax, which uses a single
+/// reference to point to two different defs.
+#[derive(Debug)]
+pub enum NameRefClass {
+ Definition(Definition),
+ FieldShorthand { local_ref: Local, field_ref: Field },
+}
+
+impl NameRefClass {
+ // Note: we don't have unit-tests for this rather important function.
+ // It is primarily exercised via goto definition tests in `ide`.
+ pub fn classify(
+ sema: &Semantics<'_, RootDatabase>,
+ name_ref: &ast::NameRef,
+ ) -> Option<NameRefClass> {
+ let _p = profile::span("classify_name_ref").detail(|| name_ref.to_string());
+
+ let parent = name_ref.syntax().parent()?;
+
+ if let Some(record_field) = ast::RecordExprField::for_field_name(name_ref) {
+ if let Some((field, local, _)) = sema.resolve_record_field(&record_field) {
+ let res = match local {
+ None => NameRefClass::Definition(Definition::Field(field)),
+ Some(local) => {
+ NameRefClass::FieldShorthand { field_ref: field, local_ref: local }
+ }
+ };
+ return Some(res);
+ }
+ }
+
+ if let Some(path) = ast::PathSegment::cast(parent.clone()).map(|it| it.parent_path()) {
+ if path.parent_path().is_none() {
+ if let Some(macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
+ // Only use this to resolve to macro calls for last segments as qualifiers resolve
+ // to modules below.
+ if let Some(macro_def) = sema.resolve_macro_call(&macro_call) {
+ return Some(NameRefClass::Definition(Definition::Macro(macro_def)));
+ }
+ }
+ }
+ return sema.resolve_path(&path).map(Into::into).map(NameRefClass::Definition);
+ }
+
+ match_ast! {
+ match parent {
+ ast::MethodCallExpr(method_call) => {
+ sema.resolve_method_call(&method_call)
+ .map(Definition::Function)
+ .map(NameRefClass::Definition)
+ },
+ ast::FieldExpr(field_expr) => {
+ sema.resolve_field(&field_expr)
+ .map(Definition::Field)
+ .map(NameRefClass::Definition)
+ },
+ ast::RecordPatField(record_pat_field) => {
+ sema.resolve_record_pat_field(&record_pat_field)
+ .map(Definition::Field)
+ .map(NameRefClass::Definition)
+ },
+ ast::AssocTypeArg(_) => {
+ // `Trait<Assoc = Ty>`
+ // ^^^^^
+ let containing_path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?;
+ let resolved = sema.resolve_path(&containing_path)?;
+ if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved {
+ if let Some(ty) = tr
+ .items_with_supertraits(sema.db)
+ .iter()
+ .filter_map(|&assoc| match assoc {
+ hir::AssocItem::TypeAlias(it) => Some(it),
+ _ => None,
+ })
+ .find(|alias| alias.name(sema.db).to_smol_str() == name_ref.text().as_str())
+ {
+ return Some(NameRefClass::Definition(Definition::TypeAlias(ty)));
+ }
+ }
+ None
+ },
+ ast::ExternCrate(extern_crate) => {
+ let krate = sema.resolve_extern_crate(&extern_crate)?;
+ let root_module = krate.root_module(sema.db);
+ Some(NameRefClass::Definition(Definition::Module(root_module)))
+ },
+ _ => None
+ }
+ }
+ }
+
+ pub fn classify_lifetime(
+ sema: &Semantics<'_, RootDatabase>,
+ lifetime: &ast::Lifetime,
+ ) -> Option<NameRefClass> {
+ let _p = profile::span("classify_lifetime_ref").detail(|| lifetime.to_string());
+ let parent = lifetime.syntax().parent()?;
+ match parent.kind() {
+ SyntaxKind::BREAK_EXPR | SyntaxKind::CONTINUE_EXPR => {
+ sema.resolve_label(lifetime).map(Definition::Label).map(NameRefClass::Definition)
+ }
+ SyntaxKind::LIFETIME_ARG
+ | SyntaxKind::SELF_PARAM
+ | SyntaxKind::TYPE_BOUND
+ | SyntaxKind::WHERE_PRED
+ | SyntaxKind::REF_TYPE => sema
+ .resolve_lifetime_param(lifetime)
+ .map(GenericParam::LifetimeParam)
+ .map(Definition::GenericParam)
+ .map(NameRefClass::Definition),
+ // lifetime bounds, as in the 'b in 'a: 'b aren't wrapped in TypeBound nodes so we gotta check
+ // if our lifetime is in a LifetimeParam without being the constrained lifetime
+ _ if ast::LifetimeParam::cast(parent).and_then(|param| param.lifetime()).as_ref()
+ != Some(lifetime) =>
+ {
+ sema.resolve_lifetime_param(lifetime)
+ .map(GenericParam::LifetimeParam)
+ .map(Definition::GenericParam)
+ .map(NameRefClass::Definition)
+ }
+ _ => None,
+ }
+ }
+}
+
+impl_from!(
+ Field, Module, Function, Adt, Variant, Const, Static, Trait, TypeAlias, BuiltinType, Local,
+ GenericParam, Label, Macro
+ for Definition
+);
+
+impl From<Impl> for Definition {
+ fn from(impl_: Impl) -> Self {
+ Definition::SelfType(impl_)
+ }
+}
+
+impl AsAssocItem for Definition {
+ fn as_assoc_item(self, db: &dyn hir::db::HirDatabase) -> Option<AssocItem> {
+ match self {
+ Definition::Function(it) => it.as_assoc_item(db),
+ Definition::Const(it) => it.as_assoc_item(db),
+ Definition::TypeAlias(it) => it.as_assoc_item(db),
+ _ => None,
+ }
+ }
+}
+
+impl From<AssocItem> for Definition {
+ fn from(assoc_item: AssocItem) -> Self {
+ match assoc_item {
+ AssocItem::Function(it) => Definition::Function(it),
+ AssocItem::Const(it) => Definition::Const(it),
+ AssocItem::TypeAlias(it) => Definition::TypeAlias(it),
+ }
+ }
+}
+
+impl From<PathResolution> for Definition {
+ fn from(path_resolution: PathResolution) -> Self {
+ match path_resolution {
+ PathResolution::Def(def) => def.into(),
+ PathResolution::Local(local) => Definition::Local(local),
+ PathResolution::TypeParam(par) => Definition::GenericParam(par.into()),
+ PathResolution::ConstParam(par) => Definition::GenericParam(par.into()),
+ PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def),
+ PathResolution::BuiltinAttr(attr) => Definition::BuiltinAttr(attr),
+ PathResolution::ToolModule(tool) => Definition::ToolModule(tool),
+ PathResolution::DeriveHelper(helper) => Definition::DeriveHelper(helper),
+ }
+ }
+}
+
+impl From<ModuleDef> for Definition {
+ fn from(def: ModuleDef) -> Self {
+ match def {
+ ModuleDef::Module(it) => Definition::Module(it),
+ ModuleDef::Function(it) => Definition::Function(it),
+ ModuleDef::Adt(it) => Definition::Adt(it),
+ ModuleDef::Variant(it) => Definition::Variant(it),
+ ModuleDef::Const(it) => Definition::Const(it),
+ ModuleDef::Static(it) => Definition::Static(it),
+ ModuleDef::Trait(it) => Definition::Trait(it),
+ ModuleDef::TypeAlias(it) => Definition::TypeAlias(it),
+ ModuleDef::Macro(it) => Definition::Macro(it),
+ ModuleDef::BuiltinType(it) => Definition::BuiltinType(it),
+ }
+ }
+}
+
+impl From<Definition> for Option<ItemInNs> {
+ fn from(def: Definition) -> Self {
+ let item = match def {
+ Definition::Module(it) => ModuleDef::Module(it),
+ Definition::Function(it) => ModuleDef::Function(it),
+ Definition::Adt(it) => ModuleDef::Adt(it),
+ Definition::Variant(it) => ModuleDef::Variant(it),
+ Definition::Const(it) => ModuleDef::Const(it),
+ Definition::Static(it) => ModuleDef::Static(it),
+ Definition::Trait(it) => ModuleDef::Trait(it),
+ Definition::TypeAlias(it) => ModuleDef::TypeAlias(it),
+ Definition::BuiltinType(it) => ModuleDef::BuiltinType(it),
+ _ => return None,
+ };
+ Some(ItemInNs::from(item))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
new file mode 100644
index 000000000..c8341fed1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
@@ -0,0 +1,185 @@
+//! See [`FamousDefs`].
+
+use base_db::{CrateOrigin, LangCrateOrigin, SourceDatabase};
+use hir::{Crate, Enum, Macro, Module, ScopeDef, Semantics, Trait};
+
+use crate::RootDatabase;
+
+/// Helps with finding well-know things inside the standard library. This is
+/// somewhat similar to the known paths infra inside hir, but it different; We
+/// want to make sure that IDE specific paths don't become interesting inside
+/// the compiler itself as well.
+///
+/// Note that, by default, rust-analyzer tests **do not** include core or std
+/// libraries. If you are writing tests for functionality using [`FamousDefs`],
+/// you'd want to include minicore (see `test_utils::MiniCore`) declaration at
+/// the start of your tests:
+///
+/// ```
+/// //- minicore: iterator, ord, derive
+/// ```
+pub struct FamousDefs<'a, 'b>(pub &'a Semantics<'b, RootDatabase>, pub Crate);
+
+#[allow(non_snake_case)]
+impl FamousDefs<'_, '_> {
+ pub fn std(&self) -> Option<Crate> {
+ self.find_lang_crate(LangCrateOrigin::Std)
+ }
+
+ pub fn core(&self) -> Option<Crate> {
+ self.find_lang_crate(LangCrateOrigin::Core)
+ }
+
+ pub fn alloc(&self) -> Option<Crate> {
+ self.find_lang_crate(LangCrateOrigin::Alloc)
+ }
+
+ pub fn test(&self) -> Option<Crate> {
+ self.find_lang_crate(LangCrateOrigin::Test)
+ }
+
+ pub fn proc_macro(&self) -> Option<Crate> {
+ self.find_lang_crate(LangCrateOrigin::ProcMacro)
+ }
+
+ pub fn core_cmp_Ord(&self) -> Option<Trait> {
+ self.find_trait("core:cmp:Ord")
+ }
+
+ pub fn core_convert_From(&self) -> Option<Trait> {
+ self.find_trait("core:convert:From")
+ }
+
+ pub fn core_convert_Into(&self) -> Option<Trait> {
+ self.find_trait("core:convert:Into")
+ }
+
+ pub fn core_option_Option(&self) -> Option<Enum> {
+ self.find_enum("core:option:Option")
+ }
+
+ pub fn core_result_Result(&self) -> Option<Enum> {
+ self.find_enum("core:result:Result")
+ }
+
+ pub fn core_default_Default(&self) -> Option<Trait> {
+ self.find_trait("core:default:Default")
+ }
+
+ pub fn core_iter_Iterator(&self) -> Option<Trait> {
+ self.find_trait("core:iter:traits:iterator:Iterator")
+ }
+
+ pub fn core_iter_IntoIterator(&self) -> Option<Trait> {
+ self.find_trait("core:iter:traits:collect:IntoIterator")
+ }
+
+ pub fn core_iter(&self) -> Option<Module> {
+ self.find_module("core:iter")
+ }
+
+ pub fn core_ops_Deref(&self) -> Option<Trait> {
+ self.find_trait("core:ops:Deref")
+ }
+
+ pub fn core_ops_DerefMut(&self) -> Option<Trait> {
+ self.find_trait("core:ops:DerefMut")
+ }
+
+ pub fn core_convert_AsRef(&self) -> Option<Trait> {
+ self.find_trait("core:convert:AsRef")
+ }
+
+ pub fn core_ops_ControlFlow(&self) -> Option<Enum> {
+ self.find_enum("core:ops:ControlFlow")
+ }
+
+ pub fn core_ops_Drop(&self) -> Option<Trait> {
+ self.find_trait("core:ops:Drop")
+ }
+
+ pub fn core_marker_Copy(&self) -> Option<Trait> {
+ self.find_trait("core:marker:Copy")
+ }
+
+ pub fn core_macros_builtin_derive(&self) -> Option<Macro> {
+ self.find_macro("core:macros:builtin:derive")
+ }
+
+ pub fn builtin_crates(&self) -> impl Iterator<Item = Crate> {
+ IntoIterator::into_iter([
+ self.std(),
+ self.core(),
+ self.alloc(),
+ self.test(),
+ self.proc_macro(),
+ ])
+ .flatten()
+ }
+
+ fn find_trait(&self, path: &str) -> Option<Trait> {
+ match self.find_def(path)? {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Trait(it)) => Some(it),
+ _ => None,
+ }
+ }
+
+ fn find_macro(&self, path: &str) -> Option<Macro> {
+ match self.find_def(path)? {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Macro(it)) => Some(it),
+ _ => None,
+ }
+ }
+
+ fn find_enum(&self, path: &str) -> Option<Enum> {
+ match self.find_def(path)? {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Enum(it))) => Some(it),
+ _ => None,
+ }
+ }
+
+ fn find_module(&self, path: &str) -> Option<Module> {
+ match self.find_def(path)? {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(it)) => Some(it),
+ _ => None,
+ }
+ }
+
+ fn find_lang_crate(&self, origin: LangCrateOrigin) -> Option<Crate> {
+ let krate = self.1;
+ let db = self.0.db;
+ let crate_graph = self.0.db.crate_graph();
+ let res = krate
+ .dependencies(db)
+ .into_iter()
+ .find(|dep| crate_graph[dep.krate.into()].origin == CrateOrigin::Lang(origin))?
+ .krate;
+ Some(res)
+ }
+
+ fn find_def(&self, path: &str) -> Option<ScopeDef> {
+ let db = self.0.db;
+ let mut path = path.split(':');
+ let trait_ = path.next_back()?;
+ let lang_crate = path.next()?;
+ let lang_crate = match LangCrateOrigin::from(lang_crate) {
+ LangCrateOrigin::Other => return None,
+ lang_crate => lang_crate,
+ };
+ let std_crate = self.find_lang_crate(lang_crate)?;
+ let mut module = std_crate.root_module(db);
+ for segment in path {
+ module = module.children(db).find_map(|child| {
+ let name = child.name(db)?;
+ if name.to_smol_str() == segment {
+ Some(child)
+ } else {
+ None
+ }
+ })?;
+ }
+ let def =
+ module.scope(db, None).into_iter().find(|(name, _def)| name.to_smol_str() == trait_)?.1;
+ Some(def)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
new file mode 100644
index 000000000..64dd2bb5f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
@@ -0,0 +1,7682 @@
+//! Generated by `sourcegen_lints`, do not edit by hand.
+
+#[derive(Clone)]
+pub struct Lint {
+ pub label: &'static str,
+ pub description: &'static str,
+}
+pub struct LintGroup {
+ pub lint: Lint,
+ pub children: &'static [&'static str],
+}
+pub const DEFAULT_LINTS: &[Lint] = &[
+ Lint {
+ label: "absolute_paths_not_starting_with_crate",
+ description: r##"fully qualified paths that start with a module name instead of `crate`, `self`, or an extern crate name"##,
+ },
+ Lint { label: "ambiguous_associated_items", description: r##"ambiguous associated items"## },
+ Lint { label: "anonymous_parameters", description: r##"detects anonymous parameters"## },
+ Lint { label: "arithmetic_overflow", description: r##"arithmetic operation overflows"## },
+ Lint {
+ label: "array_into_iter",
+ description: r##"detects calling `into_iter` on arrays in Rust 2015 and 2018"##,
+ },
+ Lint {
+ label: "asm_sub_register",
+ description: r##"using only a subset of a register for inline asm inputs"##,
+ },
+ Lint { label: "bad_asm_style", description: r##"incorrect use of inline assembly"## },
+ Lint {
+ label: "bare_trait_objects",
+ description: r##"suggest using `dyn Trait` for trait objects"##,
+ },
+ Lint {
+ label: "bindings_with_variant_name",
+ description: r##"detects pattern bindings with the same name as one of the matched variants"##,
+ },
+ Lint { label: "box_pointers", description: r##"use of owned (Box type) heap memory"## },
+ Lint {
+ label: "break_with_label_and_loop",
+ description: r##"`break` expression with label and unlabeled loop as value expression"##,
+ },
+ Lint {
+ label: "cenum_impl_drop_cast",
+ description: r##"a C-like enum implementing Drop is cast"##,
+ },
+ Lint {
+ label: "clashing_extern_declarations",
+ description: r##"detects when an extern fn has been declared with the same name but different types"##,
+ },
+ Lint {
+ label: "coherence_leak_check",
+ description: r##"distinct impls distinguished only by the leak-check code"##,
+ },
+ Lint {
+ label: "conflicting_repr_hints",
+ description: r##"conflicts between `#[repr(..)]` hints that were previously accepted and used in practice"##,
+ },
+ Lint {
+ label: "confusable_idents",
+ description: r##"detects visually confusable pairs between identifiers"##,
+ },
+ Lint {
+ label: "const_err",
+ description: r##"constant evaluation encountered erroneous expression"##,
+ },
+ Lint {
+ label: "const_evaluatable_unchecked",
+ description: r##"detects a generic constant is used in a type without a emitting a warning"##,
+ },
+ Lint {
+ label: "const_item_mutation",
+ description: r##"detects attempts to mutate a `const` item"##,
+ },
+ Lint { label: "dead_code", description: r##"detect unused, unexported items"## },
+ Lint { label: "deprecated", description: r##"detects use of deprecated items"## },
+ Lint {
+ label: "deprecated_in_future",
+ description: r##"detects use of items that will be deprecated in a future version"##,
+ },
+ Lint {
+ label: "deref_into_dyn_supertrait",
+ description: r##"`Deref` implementation usage with a supertrait trait object for output might be shadowed in the future"##,
+ },
+ Lint {
+ label: "deref_nullptr",
+ description: r##"detects when an null pointer is dereferenced"##,
+ },
+ Lint {
+ label: "drop_bounds",
+ description: r##"bounds of the form `T: Drop` are most likely incorrect"##,
+ },
+ Lint {
+ label: "dyn_drop",
+ description: r##"trait objects of the form `dyn Drop` are useless"##,
+ },
+ Lint {
+ label: "elided_lifetimes_in_paths",
+ description: r##"hidden lifetime parameters in types are deprecated"##,
+ },
+ Lint {
+ label: "ellipsis_inclusive_range_patterns",
+ description: r##"`...` range patterns are deprecated"##,
+ },
+ Lint {
+ label: "enum_intrinsics_non_enums",
+ description: r##"detects calls to `core::mem::discriminant` and `core::mem::variant_count` with non-enum types"##,
+ },
+ Lint {
+ label: "explicit_outlives_requirements",
+ description: r##"outlives requirements can be inferred"##,
+ },
+ Lint {
+ label: "exported_private_dependencies",
+ description: r##"public interface leaks type from a private dependency"##,
+ },
+ Lint { label: "forbidden_lint_groups", description: r##"applying forbid to lint-groups"## },
+ Lint {
+ label: "function_item_references",
+ description: r##"suggest casting to a function pointer when attempting to take references to function items"##,
+ },
+ Lint {
+ label: "future_incompatible",
+ description: r##"lint group for: forbidden-lint-groups, illegal-floating-point-literal-pattern, private-in-public, pub-use-of-private-extern-crate, invalid-type-param-default, const-err, unaligned-references, patterns-in-fns-without-body, missing-fragment-specifier, late-bound-lifetime-arguments, order-dependent-trait-objects, coherence-leak-check, unstable-name-collisions, where-clauses-object-safety, proc-macro-derive-resolution-fallback, macro-expanded-macro-exports-accessed-by-absolute-paths, ill-formed-attribute-input, conflicting-repr-hints, ambiguous-associated-items, mutable-borrow-reservation-conflict, indirect-structural-match, pointer-structural-match, nontrivial-structural-match, soft-unstable, cenum-impl-drop-cast, const-evaluatable-unchecked, uninhabited-static, unsupported-naked-functions, invalid-doc-attributes, semicolon-in-expressions-from-macros, legacy-derive-helpers, proc-macro-back-compat, unsupported-calling-conventions, deref-into-dyn-supertrait"##,
+ },
+ Lint {
+ label: "ill_formed_attribute_input",
+ description: r##"ill-formed attribute inputs that were previously accepted and used in practice"##,
+ },
+ Lint {
+ label: "illegal_floating_point_literal_pattern",
+ description: r##"floating-point literals cannot be used in patterns"##,
+ },
+ Lint {
+ label: "improper_ctypes",
+ description: r##"proper use of libc types in foreign modules"##,
+ },
+ Lint {
+ label: "improper_ctypes_definitions",
+ description: r##"proper use of libc types in foreign item definitions"##,
+ },
+ Lint {
+ label: "incomplete_features",
+ description: r##"incomplete features that may function improperly in some or all cases"##,
+ },
+ Lint { label: "incomplete_include", description: r##"trailing content in included file"## },
+ Lint {
+ label: "indirect_structural_match",
+ description: r##"constant used in pattern contains value of non-structural-match type in a field or a variant"##,
+ },
+ Lint {
+ label: "ineffective_unstable_trait_impl",
+ description: r##"detects `#[unstable]` on stable trait implementations for stable types"##,
+ },
+ Lint {
+ label: "inline_no_sanitize",
+ description: r##"detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]`"##,
+ },
+ Lint {
+ label: "invalid_atomic_ordering",
+ description: r##"usage of invalid atomic ordering in atomic operations and memory fences"##,
+ },
+ Lint {
+ label: "invalid_doc_attributes",
+ description: r##"detects invalid `#[doc(...)]` attributes"##,
+ },
+ Lint {
+ label: "invalid_type_param_default",
+ description: r##"type parameter default erroneously allowed in invalid location"##,
+ },
+ Lint {
+ label: "invalid_value",
+ description: r##"an invalid value is being created (such as a null reference)"##,
+ },
+ Lint {
+ label: "irrefutable_let_patterns",
+ description: r##"detects irrefutable patterns in `if let` and `while let` statements"##,
+ },
+ Lint {
+ label: "keyword_idents",
+ description: r##"detects edition keywords being used as an identifier"##,
+ },
+ Lint { label: "large_assignments", description: r##"detects large moves or copies"## },
+ Lint {
+ label: "late_bound_lifetime_arguments",
+ description: r##"detects generic lifetime arguments in path segments with late bound lifetime parameters"##,
+ },
+ Lint {
+ label: "legacy_derive_helpers",
+ description: r##"detects derive helper attributes that are used before they are introduced"##,
+ },
+ Lint {
+ label: "macro_expanded_macro_exports_accessed_by_absolute_paths",
+ description: r##"macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths"##,
+ },
+ Lint {
+ label: "macro_use_extern_crate",
+ description: r##"the `#[macro_use]` attribute is now deprecated in favor of using macros via the module system"##,
+ },
+ Lint {
+ label: "meta_variable_misuse",
+ description: r##"possible meta-variable misuse at macro definition"##,
+ },
+ Lint { label: "missing_abi", description: r##"No declared ABI for extern declaration"## },
+ Lint {
+ label: "missing_copy_implementations",
+ description: r##"detects potentially-forgotten implementations of `Copy`"##,
+ },
+ Lint {
+ label: "missing_debug_implementations",
+ description: r##"detects missing implementations of Debug"##,
+ },
+ Lint {
+ label: "missing_docs",
+ description: r##"detects missing documentation for public members"##,
+ },
+ Lint {
+ label: "missing_fragment_specifier",
+ description: r##"detects missing fragment specifiers in unused `macro_rules!` patterns"##,
+ },
+ Lint {
+ label: "mixed_script_confusables",
+ description: r##"detects Unicode scripts whose mixed script confusables codepoints are solely used"##,
+ },
+ Lint {
+ label: "must_not_suspend",
+ description: r##"use of a `#[must_not_suspend]` value across a yield point"##,
+ },
+ Lint {
+ label: "mutable_borrow_reservation_conflict",
+ description: r##"reservation of a two-phased borrow conflicts with other shared borrows"##,
+ },
+ Lint {
+ label: "mutable_transmutes",
+ description: r##"mutating transmuted &mut T from &T may cause undefined behavior"##,
+ },
+ Lint { label: "named_asm_labels", description: r##"named labels in inline assembly"## },
+ Lint {
+ label: "no_mangle_const_items",
+ description: r##"const items will not have their symbols exported"##,
+ },
+ Lint { label: "no_mangle_generic_items", description: r##"generic items must be mangled"## },
+ Lint { label: "non_ascii_idents", description: r##"detects non-ASCII identifiers"## },
+ Lint {
+ label: "non_camel_case_types",
+ description: r##"types, variants, traits and type parameters should have camel case names"##,
+ },
+ Lint {
+ label: "non_exhaustive_omitted_patterns",
+ description: r##"detect when patterns of types marked `non_exhaustive` are missed"##,
+ },
+ Lint {
+ label: "non_fmt_panics",
+ description: r##"detect single-argument panic!() invocations in which the argument is not a format string"##,
+ },
+ Lint {
+ label: "non_shorthand_field_patterns",
+ description: r##"using `Struct { x: x }` instead of `Struct { x }` in a pattern"##,
+ },
+ Lint {
+ label: "non_snake_case",
+ description: r##"variables, methods, functions, lifetime parameters and modules should have snake case names"##,
+ },
+ Lint {
+ label: "non_upper_case_globals",
+ description: r##"static constants should have uppercase identifiers"##,
+ },
+ Lint {
+ label: "nonstandard_style",
+ description: r##"lint group for: non-camel-case-types, non-snake-case, non-upper-case-globals"##,
+ },
+ Lint {
+ label: "nontrivial_structural_match",
+ description: r##"constant used in pattern of non-structural-match type and the constant's initializer expression contains values of non-structural-match types"##,
+ },
+ Lint {
+ label: "noop_method_call",
+ description: r##"detects the use of well-known noop methods"##,
+ },
+ Lint {
+ label: "order_dependent_trait_objects",
+ description: r##"trait-object types were treated as different depending on marker-trait order"##,
+ },
+ Lint { label: "overflowing_literals", description: r##"literal out of range for its type"## },
+ Lint {
+ label: "overlapping_range_endpoints",
+ description: r##"detects range patterns with overlapping endpoints"##,
+ },
+ Lint { label: "path_statements", description: r##"path statements with no effect"## },
+ Lint {
+ label: "patterns_in_fns_without_body",
+ description: r##"patterns in functions without body were erroneously allowed"##,
+ },
+ Lint {
+ label: "pointer_structural_match",
+ description: r##"pointers are not structural-match"##,
+ },
+ Lint {
+ label: "private_in_public",
+ description: r##"detect private items in public interfaces not caught by the old implementation"##,
+ },
+ Lint {
+ label: "proc_macro_back_compat",
+ description: r##"detects usage of old versions of certain proc-macro crates"##,
+ },
+ Lint {
+ label: "proc_macro_derive_resolution_fallback",
+ description: r##"detects proc macro derives using inaccessible names from parent modules"##,
+ },
+ Lint {
+ label: "pub_use_of_private_extern_crate",
+ description: r##"detect public re-exports of private extern crates"##,
+ },
+ Lint {
+ label: "redundant_semicolons",
+ description: r##"detects unnecessary trailing semicolons"##,
+ },
+ Lint {
+ label: "renamed_and_removed_lints",
+ description: r##"lints that have been renamed or removed"##,
+ },
+ Lint {
+ label: "rust_2018_compatibility",
+ description: r##"lint group for: keyword-idents, anonymous-parameters, tyvar-behind-raw-pointer, absolute-paths-not-starting-with-crate"##,
+ },
+ Lint {
+ label: "rust_2018_idioms",
+ description: r##"lint group for: bare-trait-objects, unused-extern-crates, ellipsis-inclusive-range-patterns, elided-lifetimes-in-paths, explicit-outlives-requirements"##,
+ },
+ Lint {
+ label: "rust_2021_compatibility",
+ description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects, rust-2021-incompatible-closure-captures, rust-2021-incompatible-or-patterns, rust-2021-prelude-collisions, rust-2021-prefixes-incompatible-syntax, array-into-iter, non-fmt-panics"##,
+ },
+ Lint {
+ label: "rust_2021_incompatible_closure_captures",
+ description: r##"detects closures affected by Rust 2021 changes"##,
+ },
+ Lint {
+ label: "rust_2021_incompatible_or_patterns",
+ description: r##"detects usage of old versions of or-patterns"##,
+ },
+ Lint {
+ label: "rust_2021_prefixes_incompatible_syntax",
+ description: r##"identifiers that will be parsed as a prefix in Rust 2021"##,
+ },
+ Lint {
+ label: "rust_2021_prelude_collisions",
+ description: r##"detects the usage of trait methods which are ambiguous with traits added to the prelude in future editions"##,
+ },
+ Lint {
+ label: "semicolon_in_expressions_from_macros",
+ description: r##"trailing semicolon in macro body used as expression"##,
+ },
+ Lint {
+ label: "single_use_lifetimes",
+ description: r##"detects lifetime parameters that are only used once"##,
+ },
+ Lint {
+ label: "soft_unstable",
+ description: r##"a feature gate that doesn't break dependent crates"##,
+ },
+ Lint {
+ label: "stable_features",
+ description: r##"stable features found in `#[feature]` directive"##,
+ },
+ Lint {
+ label: "temporary_cstring_as_ptr",
+ description: r##"detects getting the inner pointer of a temporary `CString`"##,
+ },
+ Lint {
+ label: "text_direction_codepoint_in_comment",
+ description: r##"invisible directionality-changing codepoints in comment"##,
+ },
+ Lint {
+ label: "text_direction_codepoint_in_literal",
+ description: r##"detect special Unicode codepoints that affect the visual representation of text on screen, changing the direction in which text flows"##,
+ },
+ Lint {
+ label: "trivial_bounds",
+ description: r##"these bounds don't depend on an type parameters"##,
+ },
+ Lint {
+ label: "trivial_casts",
+ description: r##"detects trivial casts which could be removed"##,
+ },
+ Lint {
+ label: "trivial_numeric_casts",
+ description: r##"detects trivial casts of numeric types which could be removed"##,
+ },
+ Lint {
+ label: "type_alias_bounds",
+ description: r##"bounds in type aliases are not enforced"##,
+ },
+ Lint {
+ label: "tyvar_behind_raw_pointer",
+ description: r##"raw pointer to an inference variable"##,
+ },
+ Lint {
+ label: "unaligned_references",
+ description: r##"detects unaligned references to fields of packed structs"##,
+ },
+ Lint {
+ label: "uncommon_codepoints",
+ description: r##"detects uncommon Unicode codepoints in identifiers"##,
+ },
+ Lint {
+ label: "unconditional_panic",
+ description: r##"operation will cause a panic at runtime"##,
+ },
+ Lint {
+ label: "unconditional_recursion",
+ description: r##"functions that cannot return without calling themselves"##,
+ },
+ Lint { label: "uninhabited_static", description: r##"uninhabited static"## },
+ Lint {
+ label: "unknown_crate_types",
+ description: r##"unknown crate type found in `#[crate_type]` directive"##,
+ },
+ Lint { label: "unknown_lints", description: r##"unrecognized lint attribute"## },
+ Lint {
+ label: "unnameable_test_items",
+ description: r##"detects an item that cannot be named being marked as `#[test_case]`"##,
+ },
+ Lint { label: "unreachable_code", description: r##"detects unreachable code paths"## },
+ Lint { label: "unreachable_patterns", description: r##"detects unreachable patterns"## },
+ Lint {
+ label: "unreachable_pub",
+ description: r##"`pub` items not reachable from crate root"##,
+ },
+ Lint { label: "unsafe_code", description: r##"usage of `unsafe` code"## },
+ Lint {
+ label: "unsafe_op_in_unsafe_fn",
+ description: r##"unsafe operations in unsafe functions without an explicit unsafe block are deprecated"##,
+ },
+ Lint {
+ label: "unstable_features",
+ description: r##"enabling unstable features (deprecated. do not use)"##,
+ },
+ Lint {
+ label: "unstable_name_collisions",
+ description: r##"detects name collision with an existing but unstable method"##,
+ },
+ Lint {
+ label: "unsupported_calling_conventions",
+ description: r##"use of unsupported calling convention"##,
+ },
+ Lint {
+ label: "unsupported_naked_functions",
+ description: r##"unsupported naked function definitions"##,
+ },
+ Lint {
+ label: "unused",
+ description: r##"lint group for: unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-allocation, unused-doc-comments, unused-extern-crates, unused-features, unused-labels, unused-parens, unused-braces, redundant-semicolons"##,
+ },
+ Lint {
+ label: "unused_allocation",
+ description: r##"detects unnecessary allocations that can be eliminated"##,
+ },
+ Lint {
+ label: "unused_assignments",
+ description: r##"detect assignments that will never be read"##,
+ },
+ Lint {
+ label: "unused_attributes",
+ description: r##"detects attributes that were not used by the compiler"##,
+ },
+ Lint { label: "unused_braces", description: r##"unnecessary braces around an expression"## },
+ Lint {
+ label: "unused_comparisons",
+ description: r##"comparisons made useless by limits of the types involved"##,
+ },
+ Lint {
+ label: "unused_crate_dependencies",
+ description: r##"crate dependencies that are never used"##,
+ },
+ Lint {
+ label: "unused_doc_comments",
+ description: r##"detects doc comments that aren't used by rustdoc"##,
+ },
+ Lint { label: "unused_extern_crates", description: r##"extern crates that are never used"## },
+ Lint {
+ label: "unused_features",
+ description: r##"unused features found in crate-level `#[feature]` directives"##,
+ },
+ Lint {
+ label: "unused_import_braces",
+ description: r##"unnecessary braces around an imported item"##,
+ },
+ Lint { label: "unused_imports", description: r##"imports that are never used"## },
+ Lint { label: "unused_labels", description: r##"detects labels that are never used"## },
+ Lint {
+ label: "unused_lifetimes",
+ description: r##"detects lifetime parameters that are never used"##,
+ },
+ Lint { label: "unused_macros", description: r##"detects macros that were not used"## },
+ Lint {
+ label: "unused_must_use",
+ description: r##"unused result of a type flagged as `#[must_use]`"##,
+ },
+ Lint {
+ label: "unused_mut",
+ description: r##"detect mut variables which don't need to be mutable"##,
+ },
+ Lint {
+ label: "unused_parens",
+ description: r##"`if`, `match`, `while` and `return` do not need parentheses"##,
+ },
+ Lint {
+ label: "unused_qualifications",
+ description: r##"detects unnecessarily qualified names"##,
+ },
+ Lint {
+ label: "unused_results",
+ description: r##"unused result of an expression in a statement"##,
+ },
+ Lint { label: "unused_unsafe", description: r##"unnecessary use of an `unsafe` block"## },
+ Lint {
+ label: "unused_variables",
+ description: r##"detect variables which are not used in any way"##,
+ },
+ Lint {
+ label: "useless_deprecated",
+ description: r##"detects deprecation attributes with no effect"##,
+ },
+ Lint {
+ label: "variant_size_differences",
+ description: r##"detects enums with widely varying variant sizes"##,
+ },
+ Lint {
+ label: "warnings",
+ description: r##"mass-change the level for lints which produce warnings"##,
+ },
+ Lint {
+ label: "warnings",
+ description: r##"lint group for: all lints that are set to issue warnings"##,
+ },
+ Lint {
+ label: "where_clauses_object_safety",
+ description: r##"checks the object safety of where clauses"##,
+ },
+ Lint {
+ label: "while_true",
+ description: r##"suggest using `loop { }` instead of `while true { }`"##,
+ },
+];
+pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
+ LintGroup {
+ lint: Lint {
+ label: "future_incompatible",
+ description: r##"lint group for: forbidden-lint-groups, illegal-floating-point-literal-pattern, private-in-public, pub-use-of-private-extern-crate, invalid-type-param-default, const-err, unaligned-references, patterns-in-fns-without-body, missing-fragment-specifier, late-bound-lifetime-arguments, order-dependent-trait-objects, coherence-leak-check, unstable-name-collisions, where-clauses-object-safety, proc-macro-derive-resolution-fallback, macro-expanded-macro-exports-accessed-by-absolute-paths, ill-formed-attribute-input, conflicting-repr-hints, ambiguous-associated-items, mutable-borrow-reservation-conflict, indirect-structural-match, pointer-structural-match, nontrivial-structural-match, soft-unstable, cenum-impl-drop-cast, const-evaluatable-unchecked, uninhabited-static, unsupported-naked-functions, invalid-doc-attributes, semicolon-in-expressions-from-macros, legacy-derive-helpers, proc-macro-back-compat, unsupported-calling-conventions, deref-into-dyn-supertrait"##,
+ },
+ children: &[
+ "forbidden_lint_groups",
+ "illegal_floating_point_literal_pattern",
+ "private_in_public",
+ "pub_use_of_private_extern_crate",
+ "invalid_type_param_default",
+ "const_err",
+ "unaligned_references",
+ "patterns_in_fns_without_body",
+ "missing_fragment_specifier",
+ "late_bound_lifetime_arguments",
+ "order_dependent_trait_objects",
+ "coherence_leak_check",
+ "unstable_name_collisions",
+ "where_clauses_object_safety",
+ "proc_macro_derive_resolution_fallback",
+ "macro_expanded_macro_exports_accessed_by_absolute_paths",
+ "ill_formed_attribute_input",
+ "conflicting_repr_hints",
+ "ambiguous_associated_items",
+ "mutable_borrow_reservation_conflict",
+ "indirect_structural_match",
+ "pointer_structural_match",
+ "nontrivial_structural_match",
+ "soft_unstable",
+ "cenum_impl_drop_cast",
+ "const_evaluatable_unchecked",
+ "uninhabited_static",
+ "unsupported_naked_functions",
+ "invalid_doc_attributes",
+ "semicolon_in_expressions_from_macros",
+ "legacy_derive_helpers",
+ "proc_macro_back_compat",
+ "unsupported_calling_conventions",
+ "deref_into_dyn_supertrait",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "nonstandard_style",
+ description: r##"lint group for: non-camel-case-types, non-snake-case, non-upper-case-globals"##,
+ },
+ children: &["non_camel_case_types", "non_snake_case", "non_upper_case_globals"],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "rust_2018_compatibility",
+ description: r##"lint group for: keyword-idents, anonymous-parameters, tyvar-behind-raw-pointer, absolute-paths-not-starting-with-crate"##,
+ },
+ children: &[
+ "keyword_idents",
+ "anonymous_parameters",
+ "tyvar_behind_raw_pointer",
+ "absolute_paths_not_starting_with_crate",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "rust_2018_idioms",
+ description: r##"lint group for: bare-trait-objects, unused-extern-crates, ellipsis-inclusive-range-patterns, elided-lifetimes-in-paths, explicit-outlives-requirements"##,
+ },
+ children: &[
+ "bare_trait_objects",
+ "unused_extern_crates",
+ "ellipsis_inclusive_range_patterns",
+ "elided_lifetimes_in_paths",
+ "explicit_outlives_requirements",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "rust_2021_compatibility",
+ description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects, rust-2021-incompatible-closure-captures, rust-2021-incompatible-or-patterns, rust-2021-prelude-collisions, rust-2021-prefixes-incompatible-syntax, array-into-iter, non-fmt-panics"##,
+ },
+ children: &[
+ "ellipsis_inclusive_range_patterns",
+ "bare_trait_objects",
+ "rust_2021_incompatible_closure_captures",
+ "rust_2021_incompatible_or_patterns",
+ "rust_2021_prelude_collisions",
+ "rust_2021_prefixes_incompatible_syntax",
+ "array_into_iter",
+ "non_fmt_panics",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "unused",
+ description: r##"lint group for: unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-allocation, unused-doc-comments, unused-extern-crates, unused-features, unused-labels, unused-parens, unused-braces, redundant-semicolons"##,
+ },
+ children: &[
+ "unused_imports",
+ "unused_variables",
+ "unused_assignments",
+ "dead_code",
+ "unused_mut",
+ "unreachable_code",
+ "unreachable_patterns",
+ "unused_must_use",
+ "unused_unsafe",
+ "path_statements",
+ "unused_attributes",
+ "unused_macros",
+ "unused_allocation",
+ "unused_doc_comments",
+ "unused_extern_crates",
+ "unused_features",
+ "unused_labels",
+ "unused_parens",
+ "unused_braces",
+ "redundant_semicolons",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "warnings",
+ description: r##"lint group for: all lints that are set to issue warnings"##,
+ },
+ children: &[],
+ },
+];
+
+pub const RUSTDOC_LINTS: &[Lint] = &[
+ Lint {
+ label: "rustdoc::all",
+ description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::missing-doc-code-examples, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs"##,
+ },
+ Lint { label: "rustdoc::bare_urls", description: r##"detects URLs that are not hyperlinks"## },
+ Lint {
+ label: "rustdoc::broken_intra_doc_links",
+ description: r##"failures in resolving intra-doc link targets"##,
+ },
+ Lint {
+ label: "rustdoc::invalid_codeblock_attributes",
+ description: r##"codeblock attribute looks a lot like a known one"##,
+ },
+ Lint {
+ label: "rustdoc::invalid_html_tags",
+ description: r##"detects invalid HTML tags in doc comments"##,
+ },
+ Lint {
+ label: "rustdoc::invalid_rust_codeblocks",
+ description: r##"codeblock could not be parsed as valid Rust or is empty"##,
+ },
+ Lint {
+ label: "rustdoc::missing_crate_level_docs",
+ description: r##"detects crates with no crate-level documentation"##,
+ },
+ Lint {
+ label: "rustdoc::missing_doc_code_examples",
+ description: r##"detects publicly-exported items without code samples in their documentation"##,
+ },
+ Lint {
+ label: "rustdoc::private_doc_tests",
+ description: r##"detects code samples in docs of private items not documented by rustdoc"##,
+ },
+ Lint {
+ label: "rustdoc::private_intra_doc_links",
+ description: r##"linking from a public item to a private one"##,
+ },
+];
+pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &[LintGroup {
+ lint: Lint {
+ label: "rustdoc::all",
+ description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::missing-doc-code-examples, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs"##,
+ },
+ children: &[
+ "rustdoc::broken_intra_doc_links",
+ "rustdoc::private_intra_doc_links",
+ "rustdoc::missing_doc_code_examples",
+ "rustdoc::private_doc_tests",
+ "rustdoc::invalid_codeblock_attributes",
+ "rustdoc::invalid_rust_codeblocks",
+ "rustdoc::invalid_html_tags",
+ "rustdoc::bare_urls",
+ "rustdoc::missing_crate_level_docs",
+ ],
+}];
+
+pub const FEATURES: &[Lint] = &[
+ Lint {
+ label: "abi_c_cmse_nonsecure_call",
+ description: r##"# `abi_c_cmse_nonsecure_call`
+
+The tracking issue for this feature is: [#81391]
+
+[#81391]: https://github.com/rust-lang/rust/issues/81391
+
+------------------------
+
+The [TrustZone-M
+feature](https://developer.arm.com/documentation/100690/latest/) is available
+for targets with the Armv8-M architecture profile (`thumbv8m` in their target
+name).
+LLVM, the Rust compiler and the linker are providing
+[support](https://developer.arm.com/documentation/ecm0359818/latest/) for the
+TrustZone-M feature.
+
+One of the things provided, with this unstable feature, is the
+`C-cmse-nonsecure-call` function ABI. This ABI is used on function pointers to
+non-secure code to mark a non-secure function call (see [section
+5.5](https://developer.arm.com/documentation/ecm0359818/latest/) for details).
+
+With this ABI, the compiler will do the following to perform the call:
+* save registers needed after the call to Secure memory
+* clear all registers that might contain confidential information
+* clear the Least Significant Bit of the function address
+* branches using the BLXNS instruction
+
+To avoid using the non-secure stack, the compiler will constrain the number and
+type of parameters/return value.
+
+The `extern "C-cmse-nonsecure-call"` ABI is otherwise equivalent to the
+`extern "C"` ABI.
+
+<!-- NOTE(ignore) this example is specific to thumbv8m targets -->
+
+``` rust,ignore
+#![no_std]
+#![feature(abi_c_cmse_nonsecure_call)]
+
+#[no_mangle]
+pub fn call_nonsecure_function(addr: usize) -> u32 {
+ let non_secure_function =
+ unsafe { core::mem::transmute::<usize, extern "C-cmse-nonsecure-call" fn() -> u32>(addr) };
+ non_secure_function()
+}
+```
+
+``` text
+$ rustc --emit asm --crate-type lib --target thumbv8m.main-none-eabi function.rs
+
+call_nonsecure_function:
+ .fnstart
+ .save {r7, lr}
+ push {r7, lr}
+ .setfp r7, sp
+ mov r7, sp
+ .pad #16
+ sub sp, #16
+ str r0, [sp, #12]
+ ldr r0, [sp, #12]
+ str r0, [sp, #8]
+ b .LBB0_1
+.LBB0_1:
+ ldr r0, [sp, #8]
+ push.w {r4, r5, r6, r7, r8, r9, r10, r11}
+ bic r0, r0, #1
+ mov r1, r0
+ mov r2, r0
+ mov r3, r0
+ mov r4, r0
+ mov r5, r0
+ mov r6, r0
+ mov r7, r0
+ mov r8, r0
+ mov r9, r0
+ mov r10, r0
+ mov r11, r0
+ mov r12, r0
+ msr apsr_nzcvq, r0
+ blxns r0
+ pop.w {r4, r5, r6, r7, r8, r9, r10, r11}
+ str r0, [sp, #4]
+ b .LBB0_2
+.LBB0_2:
+ ldr r0, [sp, #4]
+ add sp, #16
+ pop {r7, pc}
+```
+"##,
+ },
+ Lint {
+ label: "abi_msp430_interrupt",
+ description: r##"# `abi_msp430_interrupt`
+
+The tracking issue for this feature is: [#38487]
+
+[#38487]: https://github.com/rust-lang/rust/issues/38487
+
+------------------------
+
+In the MSP430 architecture, interrupt handlers have a special calling
+convention. You can use the `"msp430-interrupt"` ABI to make the compiler apply
+the right calling convention to the interrupt handlers you define.
+
+<!-- NOTE(ignore) this example is specific to the msp430 target -->
+
+``` rust,ignore
+#![feature(abi_msp430_interrupt)]
+#![no_std]
+
+// Place the interrupt handler at the appropriate memory address
+// (Alternatively, you can use `#[used]` and remove `pub` and `#[no_mangle]`)
+#[link_section = "__interrupt_vector_10"]
+#[no_mangle]
+pub static TIM0_VECTOR: extern "msp430-interrupt" fn() = tim0;
+
+// The interrupt handler
+extern "msp430-interrupt" fn tim0() {
+ // ..
+}
+```
+
+``` text
+$ msp430-elf-objdump -CD ./target/msp430/release/app
+Disassembly of section __interrupt_vector_10:
+
+0000fff2 <TIM0_VECTOR>:
+ fff2: 00 c0 interrupt service routine at 0xc000
+
+Disassembly of section .text:
+
+0000c000 <int::tim0>:
+ c000: 00 13 reti
+```
+"##,
+ },
+ Lint {
+ label: "abi_ptx",
+ description: r##"# `abi_ptx`
+
+The tracking issue for this feature is: [#38788]
+
+[#38788]: https://github.com/rust-lang/rust/issues/38788
+
+------------------------
+
+When emitting PTX code, all vanilla Rust functions (`fn`) get translated to
+"device" functions. These functions are *not* callable from the host via the
+CUDA API so a crate with only device functions is not too useful!
+
+OTOH, "global" functions *can* be called by the host; you can think of them
+as the real public API of your crate. To produce a global function use the
+`"ptx-kernel"` ABI.
+
+<!-- NOTE(ignore) this example is specific to the nvptx targets -->
+
+``` rust,ignore
+#![feature(abi_ptx)]
+#![no_std]
+
+pub unsafe extern "ptx-kernel" fn global_function() {
+ device_function();
+}
+
+pub fn device_function() {
+ // ..
+}
+```
+
+``` text
+$ xargo rustc --target nvptx64-nvidia-cuda --release -- --emit=asm
+
+$ cat $(find -name '*.s')
+//
+// Generated by LLVM NVPTX Back-End
+//
+
+.version 3.2
+.target sm_20
+.address_size 64
+
+ // .globl _ZN6kernel15global_function17h46111ebe6516b382E
+
+.visible .entry _ZN6kernel15global_function17h46111ebe6516b382E()
+{
+
+
+ ret;
+}
+
+ // .globl _ZN6kernel15device_function17hd6a0e4993bbf3f78E
+.visible .func _ZN6kernel15device_function17hd6a0e4993bbf3f78E()
+{
+
+
+ ret;
+}
+```
+"##,
+ },
+ Lint {
+ label: "abi_thiscall",
+ description: r##"# `abi_thiscall`
+
+The tracking issue for this feature is: [#42202]
+
+[#42202]: https://github.com/rust-lang/rust/issues/42202
+
+------------------------
+
+The MSVC ABI on x86 Windows uses the `thiscall` calling convention for C++
+instance methods by default; it is identical to the usual (C) calling
+convention on x86 Windows except that the first parameter of the method,
+the `this` pointer, is passed in the ECX register.
+"##,
+ },
+ Lint {
+ label: "allocator_api",
+ description: r##"# `allocator_api`
+
+The tracking issue for this feature is [#32838]
+
+[#32838]: https://github.com/rust-lang/rust/issues/32838
+
+------------------------
+
+Sometimes you want the memory for one collection to use a different
+allocator than the memory for another collection. In this case,
+replacing the global allocator is not a workable option. Instead,
+you need to pass in an instance of an `AllocRef` to each collection
+for which you want a custom allocator.
+
+TBD
+"##,
+ },
+ Lint {
+ label: "allocator_internals",
+ description: r##"# `allocator_internals`
+
+This feature does not have a tracking issue, it is an unstable implementation
+detail of the `global_allocator` feature not intended for use outside the
+compiler.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "arbitrary_enum_discriminant",
+ description: r##"# `arbitrary_enum_discriminant`
+
+The tracking issue for this feature is: [#60553]
+
+[#60553]: https://github.com/rust-lang/rust/issues/60553
+
+------------------------
+
+The `arbitrary_enum_discriminant` feature permits tuple-like and
+struct-like enum variants with `#[repr(<int-type>)]` to have explicit discriminants.
+
+## Examples
+
+```rust
+#![feature(arbitrary_enum_discriminant)]
+
+#[allow(dead_code)]
+#[repr(u8)]
+enum Enum {
+ Unit = 3,
+ Tuple(u16) = 2,
+ Struct {
+ a: u8,
+ b: u16,
+ } = 1,
+}
+
+impl Enum {
+ fn tag(&self) -> u8 {
+ unsafe { *(self as *const Self as *const u8) }
+ }
+}
+
+assert_eq!(3, Enum::Unit.tag());
+assert_eq!(2, Enum::Tuple(5).tag());
+assert_eq!(1, Enum::Struct{a: 7, b: 11}.tag());
+```
+"##,
+ },
+ Lint {
+ label: "asm_const",
+ description: r##"# `asm_const`
+
+The tracking issue for this feature is: [#72016]
+
+[#72016]: https://github.com/rust-lang/rust/issues/72016
+
+------------------------
+
+This feature adds a `const <expr>` operand type to `asm!` and `global_asm!`.
+- `<expr>` must be an integer constant expression.
+- The value of the expression is formatted as a string and substituted directly into the asm template string.
+"##,
+ },
+ Lint {
+ label: "asm_experimental_arch",
+ description: r##"# `asm_experimental_arch`
+
+The tracking issue for this feature is: [#72016]
+
+[#72016]: https://github.com/rust-lang/rust/issues/72016
+
+------------------------
+
+This feature tracks `asm!` and `global_asm!` support for the following architectures:
+- NVPTX
+- PowerPC
+- Hexagon
+- MIPS32r2 and MIPS64r2
+- wasm32
+- BPF
+- SPIR-V
+- AVR
+
+## Register classes
+
+| Architecture | Register class | Registers | LLVM constraint code |
+| ------------ | -------------- | ---------------------------------- | -------------------- |
+| MIPS | `reg` | `$[2-25]` | `r` |
+| MIPS | `freg` | `$f[0-31]` | `f` |
+| NVPTX | `reg16` | None\* | `h` |
+| NVPTX | `reg32` | None\* | `r` |
+| NVPTX | `reg64` | None\* | `l` |
+| Hexagon | `reg` | `r[0-28]` | `r` |
+| PowerPC | `reg` | `r[0-31]` | `r` |
+| PowerPC | `reg_nonzero` | `r[1-31]` | `b` |
+| PowerPC | `freg` | `f[0-31]` | `f` |
+| PowerPC | `cr` | `cr[0-7]`, `cr` | Only clobbers |
+| PowerPC | `xer` | `xer` | Only clobbers |
+| wasm32 | `local` | None\* | `r` |
+| BPF | `reg` | `r[0-10]` | `r` |
+| BPF | `wreg` | `w[0-10]` | `w` |
+| AVR | `reg` | `r[2-25]`, `XH`, `XL`, `ZH`, `ZL` | `r` |
+| AVR | `reg_upper` | `r[16-25]`, `XH`, `XL`, `ZH`, `ZL` | `d` |
+| AVR | `reg_pair` | `r3r2` .. `r25r24`, `X`, `Z` | `r` |
+| AVR | `reg_iw` | `r25r24`, `X`, `Z` | `w` |
+| AVR | `reg_ptr` | `X`, `Z` | `e` |
+
+> **Notes**:
+> - NVPTX doesn't have a fixed register set, so named registers are not supported.
+>
+> - WebAssembly doesn't have registers, so named registers are not supported.
+
+# Register class supported types
+
+| Architecture | Register class | Target feature | Allowed types |
+| ------------ | ------------------------------- | -------------- | --------------------------------------- |
+| MIPS32 | `reg` | None | `i8`, `i16`, `i32`, `f32` |
+| MIPS32 | `freg` | None | `f32`, `f64` |
+| MIPS64 | `reg` | None | `i8`, `i16`, `i32`, `i64`, `f32`, `f64` |
+| MIPS64 | `freg` | None | `f32`, `f64` |
+| NVPTX | `reg16` | None | `i8`, `i16` |
+| NVPTX | `reg32` | None | `i8`, `i16`, `i32`, `f32` |
+| NVPTX | `reg64` | None | `i8`, `i16`, `i32`, `f32`, `i64`, `f64` |
+| Hexagon | `reg` | None | `i8`, `i16`, `i32`, `f32` |
+| PowerPC | `reg` | None | `i8`, `i16`, `i32` |
+| PowerPC | `reg_nonzero` | None | `i8`, `i16`, `i32` |
+| PowerPC | `freg` | None | `f32`, `f64` |
+| PowerPC | `cr` | N/A | Only clobbers |
+| PowerPC | `xer` | N/A | Only clobbers |
+| wasm32 | `local` | None | `i8` `i16` `i32` `i64` `f32` `f64` |
+| BPF | `reg` | None | `i8` `i16` `i32` `i64` |
+| BPF | `wreg` | `alu32` | `i8` `i16` `i32` |
+| AVR | `reg`, `reg_upper` | None | `i8` |
+| AVR | `reg_pair`, `reg_iw`, `reg_ptr` | None | `i16` |
+
+## Register aliases
+
+| Architecture | Base register | Aliases |
+| ------------ | ------------- | --------- |
+| Hexagon | `r29` | `sp` |
+| Hexagon | `r30` | `fr` |
+| Hexagon | `r31` | `lr` |
+| BPF | `r[0-10]` | `w[0-10]` |
+| AVR | `XH` | `r27` |
+| AVR | `XL` | `r26` |
+| AVR | `ZH` | `r31` |
+| AVR | `ZL` | `r30` |
+
+## Unsupported registers
+
+| Architecture | Unsupported register | Reason |
+| ------------ | --------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| All | `sp` | The stack pointer must be restored to its original value at the end of an asm code block. |
+| All | `fr` (Hexagon), `$fp` (MIPS), `Y` (AVR) | The frame pointer cannot be used as an input or output. |
+| All | `r19` (Hexagon) | This is used internally by LLVM as a "base pointer" for functions with complex stack frames. |
+| MIPS | `$0` or `$zero` | This is a constant zero register which can't be modified. |
+| MIPS | `$1` or `$at` | Reserved for assembler. |
+| MIPS | `$26`/`$k0`, `$27`/`$k1` | OS-reserved registers. |
+| MIPS | `$28`/`$gp` | Global pointer cannot be used as inputs or outputs. |
+| MIPS | `$ra` | Return address cannot be used as inputs or outputs. |
+| Hexagon | `lr` | This is the link register which cannot be used as an input or output. |
+| AVR | `r0`, `r1`, `r1r0` | Due to an issue in LLVM, the `r0` and `r1` registers cannot be used as inputs or outputs. If modified, they must be restored to their original values before the end of the block. |
+
+## Template modifiers
+
+| Architecture | Register class | Modifier | Example output | LLVM modifier |
+| ------------ | -------------- | -------- | -------------- | ------------- |
+| MIPS | `reg` | None | `$2` | None |
+| MIPS | `freg` | None | `$f0` | None |
+| NVPTX | `reg16` | None | `rs0` | None |
+| NVPTX | `reg32` | None | `r0` | None |
+| NVPTX | `reg64` | None | `rd0` | None |
+| Hexagon | `reg` | None | `r0` | None |
+| PowerPC | `reg` | None | `0` | None |
+| PowerPC | `reg_nonzero` | None | `3` | `b` |
+| PowerPC | `freg` | None | `0` | None |
+
+# Flags covered by `preserves_flags`
+
+These flags registers must be restored upon exiting the asm block if the `preserves_flags` option is set:
+- AVR
+ - The status register `SREG`.
+"##,
+ },
+ Lint {
+ label: "asm_sym",
+ description: r##"# `asm_sym`
+
+The tracking issue for this feature is: [#72016]
+
+[#72016]: https://github.com/rust-lang/rust/issues/72016
+
+------------------------
+
+This feature adds a `sym <path>` operand type to `asm!` and `global_asm!`.
+- `<path>` must refer to a `fn` or `static`.
+- A mangled symbol name referring to the item is substituted into the asm template string.
+- The substituted string does not include any modifiers (e.g. GOT, PLT, relocations, etc).
+- `<path>` is allowed to point to a `#[thread_local]` static, in which case the asm code can combine the symbol with relocations (e.g. `@plt`, `@TPOFF`) to read from thread-local data.
+"##,
+ },
+ Lint {
+ label: "asm_unwind",
+ description: r##"# `asm_unwind`
+
+The tracking issue for this feature is: [#72016]
+
+[#72016]: https://github.com/rust-lang/rust/issues/72016
+
+------------------------
+
+This feature adds a `may_unwind` option to `asm!` which allows an `asm` block to unwind stack and be part of the stack unwinding process. This option is only supported by the LLVM backend right now.
+"##,
+ },
+ Lint {
+ label: "auto_traits",
+ description: r##"# `auto_traits`
+
+The tracking issue for this feature is [#13231]
+
+[#13231]: https://github.com/rust-lang/rust/issues/13231
+
+----
+
+The `auto_traits` feature gate allows you to define auto traits.
+
+Auto traits, like [`Send`] or [`Sync`] in the standard library, are marker traits
+that are automatically implemented for every type, unless the type, or a type it contains,
+has explicitly opted out via a negative impl. (Negative impls are separately controlled
+by the `negative_impls` feature.)
+
+[`Send`]: https://doc.rust-lang.org/std/marker/trait.Send.html
+[`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
+
+```rust,ignore (partial-example)
+impl !Trait for Type {}
+```
+
+Example:
+
+```rust
+#![feature(negative_impls)]
+#![feature(auto_traits)]
+
+auto trait Valid {}
+
+struct True;
+struct False;
+
+impl !Valid for False {}
+
+struct MaybeValid<T>(T);
+
+fn must_be_valid<T: Valid>(_t: T) { }
+
+fn main() {
+ // works
+ must_be_valid( MaybeValid(True) );
+
+ // compiler error - trait bound not satisfied
+ // must_be_valid( MaybeValid(False) );
+}
+```
+
+## Automatic trait implementations
+
+When a type is declared as an `auto trait`, we will automatically
+create impls for every struct/enum/union, unless an explicit impl is
+provided. These automatic impls contain a where clause for each field
+of the form `T: AutoTrait`, where `T` is the type of the field and
+`AutoTrait` is the auto trait in question. As an example, consider the
+struct `List` and the auto trait `Send`:
+
+```rust
+struct List<T> {
+ data: T,
+ next: Option<Box<List<T>>>,
+}
+```
+
+Presuming that there is no explicit impl of `Send` for `List`, the
+compiler will supply an automatic impl of the form:
+
+```rust
+struct List<T> {
+ data: T,
+ next: Option<Box<List<T>>>,
+}
+
+unsafe impl<T> Send for List<T>
+where
+ T: Send, // from the field `data`
+ Option<Box<List<T>>>: Send, // from the field `next`
+{ }
+```
+
+Explicit impls may be either positive or negative. They take the form:
+
+```rust,ignore (partial-example)
+impl<...> AutoTrait for StructName<..> { }
+impl<...> !AutoTrait for StructName<..> { }
+```
+
+## Coinduction: Auto traits permit cyclic matching
+
+Unlike ordinary trait matching, auto traits are **coinductive**. This
+means, in short, that cycles which occur in trait matching are
+considered ok. As an example, consider the recursive struct `List`
+introduced in the previous section. In attempting to determine whether
+`List: Send`, we would wind up in a cycle: to apply the impl, we must
+show that `Option<Box<List>>: Send`, which will in turn require
+`Box<List>: Send` and then finally `List: Send` again. Under ordinary
+trait matching, this cycle would be an error, but for an auto trait it
+is considered a successful match.
+
+## Items
+
+Auto traits cannot have any trait items, such as methods or associated types. This ensures that we can generate default implementations.
+
+## Supertraits
+
+Auto traits cannot have supertraits. This is for soundness reasons, as the interaction of coinduction with implied bounds is difficult to reconcile.
+"##,
+ },
+ Lint {
+ label: "box_patterns",
+ description: r##"# `box_patterns`
+
+The tracking issue for this feature is: [#29641]
+
+[#29641]: https://github.com/rust-lang/rust/issues/29641
+
+See also [`box_syntax`](box-syntax.md)
+
+------------------------
+
+Box patterns let you match on `Box<T>`s:
+
+
+```rust
+#![feature(box_patterns)]
+
+fn main() {
+ let b = Some(Box::new(5));
+ match b {
+ Some(box n) if n < 0 => {
+ println!("Box contains negative number {}", n);
+ },
+ Some(box n) if n >= 0 => {
+ println!("Box contains non-negative number {}", n);
+ },
+ None => {
+ println!("No box");
+ },
+ _ => unreachable!()
+ }
+}
+```
+"##,
+ },
+ Lint {
+ label: "box_syntax",
+ description: r##"# `box_syntax`
+
+The tracking issue for this feature is: [#49733]
+
+[#49733]: https://github.com/rust-lang/rust/issues/49733
+
+See also [`box_patterns`](box-patterns.md)
+
+------------------------
+
+Currently the only stable way to create a `Box` is via the `Box::new` method.
+Also it is not possible in stable Rust to destructure a `Box` in a match
+pattern. The unstable `box` keyword can be used to create a `Box`. An example
+usage would be:
+
+```rust
+#![feature(box_syntax)]
+
+fn main() {
+ let b = box 5;
+}
+```
+"##,
+ },
+ Lint {
+ label: "c_unwind",
+ description: r##"# `c_unwind`
+
+The tracking issue for this feature is: [#74990]
+
+[#74990]: https://github.com/rust-lang/rust/issues/74990
+
+------------------------
+
+Introduces four new ABI strings: "C-unwind", "stdcall-unwind",
+"thiscall-unwind", and "system-unwind". These enable unwinding from other
+languages (such as C++) into Rust frames and from Rust into other languages.
+
+See [RFC 2945] for more information.
+
+[RFC 2945]: https://github.com/rust-lang/rfcs/blob/master/text/2945-c-unwind-abi.md
+"##,
+ },
+ Lint {
+ label: "c_variadic",
+ description: r##"# `c_variadic`
+
+The tracking issue for this feature is: [#44930]
+
+[#44930]: https://github.com/rust-lang/rust/issues/44930
+
+------------------------
+
+The `c_variadic` language feature enables C-variadic functions to be
+defined in Rust. The may be called both from within Rust and via FFI.
+
+## Examples
+
+```rust
+#![feature(c_variadic)]
+
+pub unsafe extern "C" fn add(n: usize, mut args: ...) -> usize {
+ let mut sum = 0;
+ for _ in 0..n {
+ sum += args.arg::<usize>();
+ }
+ sum
+}
+```
+"##,
+ },
+ Lint {
+ label: "c_variadic",
+ description: r##"# `c_variadic`
+
+The tracking issue for this feature is: [#44930]
+
+[#44930]: https://github.com/rust-lang/rust/issues/44930
+
+------------------------
+
+The `c_variadic` library feature exposes the `VaList` structure,
+Rust's analogue of C's `va_list` type.
+
+## Examples
+
+```rust
+#![feature(c_variadic)]
+
+use std::ffi::VaList;
+
+pub unsafe extern "C" fn vadd(n: usize, mut args: VaList) -> usize {
+ let mut sum = 0;
+ for _ in 0..n {
+ sum += args.arg::<usize>();
+ }
+ sum
+}
+```
+"##,
+ },
+ Lint {
+ label: "c_void_variant",
+ description: r##"# `c_void_variant`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_panic",
+ description: r##"# `cfg_panic`
+
+The tracking issue for this feature is: [#77443]
+
+[#77443]: https://github.com/rust-lang/rust/issues/77443
+
+------------------------
+
+The `cfg_panic` feature makes it possible to execute different code
+depending on the panic strategy.
+
+Possible values at the moment are `"unwind"` or `"abort"`, although
+it is possible that new panic strategies may be added to Rust in the
+future.
+
+## Examples
+
+```rust
+#![feature(cfg_panic)]
+
+#[cfg(panic = "unwind")]
+fn a() {
+ // ...
+}
+
+#[cfg(not(panic = "unwind"))]
+fn a() {
+ // ...
+}
+
+fn b() {
+ if cfg!(panic = "abort") {
+ // ...
+ } else {
+ // ...
+ }
+}
+```
+"##,
+ },
+ Lint {
+ label: "cfg_sanitize",
+ description: r##"# `cfg_sanitize`
+
+The tracking issue for this feature is: [#39699]
+
+[#39699]: https://github.com/rust-lang/rust/issues/39699
+
+------------------------
+
+The `cfg_sanitize` feature makes it possible to execute different code
+depending on whether a particular sanitizer is enabled or not.
+
+## Examples
+
+```rust
+#![feature(cfg_sanitize)]
+
+#[cfg(sanitize = "thread")]
+fn a() {
+ // ...
+}
+
+#[cfg(not(sanitize = "thread"))]
+fn a() {
+ // ...
+}
+
+fn b() {
+ if cfg!(sanitize = "leak") {
+ // ...
+ } else {
+ // ...
+ }
+}
+```
+"##,
+ },
+ Lint {
+ label: "cfg_version",
+ description: r##"# `cfg_version`
+
+The tracking issue for this feature is: [#64796]
+
+[#64796]: https://github.com/rust-lang/rust/issues/64796
+
+------------------------
+
+The `cfg_version` feature makes it possible to execute different code
+depending on the compiler version. It will return true if the compiler
+version is greater than or equal to the specified version.
+
+## Examples
+
+```rust
+#![feature(cfg_version)]
+
+#[cfg(version("1.42"))] // 1.42 and above
+fn a() {
+ // ...
+}
+
+#[cfg(not(version("1.42")))] // 1.41 and below
+fn a() {
+ // ...
+}
+
+fn b() {
+ if cfg!(version("1.42")) {
+ // ...
+ } else {
+ // ...
+ }
+}
+```
+"##,
+ },
+ Lint {
+ label: "char_error_internals",
+ description: r##"# `char_error_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "closure_track_caller",
+ description: r##"# `closure_track_caller`
+
+The tracking issue for this feature is: [#87417]
+
+[#87417]: https://github.com/rust-lang/rust/issues/87417
+
+------------------------
+
+Allows using the `#[track_caller]` attribute on closures and generators.
+Calls made to the closure or generator will have caller information
+available through `std::panic::Location::caller()`, just like using
+`#[track_caller]` on a function.
+"##,
+ },
+ Lint {
+ label: "cmse_nonsecure_entry",
+ description: r##"# `cmse_nonsecure_entry`
+
+The tracking issue for this feature is: [#75835]
+
+[#75835]: https://github.com/rust-lang/rust/issues/75835
+
+------------------------
+
+The [TrustZone-M
+feature](https://developer.arm.com/documentation/100690/latest/) is available
+for targets with the Armv8-M architecture profile (`thumbv8m` in their target
+name).
+LLVM, the Rust compiler and the linker are providing
+[support](https://developer.arm.com/documentation/ecm0359818/latest/) for the
+TrustZone-M feature.
+
+One of the things provided, with this unstable feature, is the
+`cmse_nonsecure_entry` attribute. This attribute marks a Secure function as an
+entry function (see [section
+5.4](https://developer.arm.com/documentation/ecm0359818/latest/) for details).
+With this attribute, the compiler will do the following:
+* add a special symbol on the function which is the `__acle_se_` prefix and the
+ standard function name
+* constrain the number of parameters to avoid using the Non-Secure stack
+* before returning from the function, clear registers that might contain Secure
+ information
+* use the `BXNS` instruction to return
+
+Because the stack can not be used to pass parameters, there will be compilation
+errors if:
+* the total size of all parameters is too big (for example more than four 32
+ bits integers)
+* the entry function is not using a C ABI
+
+The special symbol `__acle_se_` will be used by the linker to generate a secure
+gateway veneer.
+
+<!-- NOTE(ignore) this example is specific to thumbv8m targets -->
+
+``` rust,ignore
+#![feature(cmse_nonsecure_entry)]
+
+#[no_mangle]
+#[cmse_nonsecure_entry]
+pub extern "C" fn entry_function(input: u32) -> u32 {
+ input + 6
+}
+```
+
+``` text
+$ rustc --emit obj --crate-type lib --target thumbv8m.main-none-eabi function.rs
+$ arm-none-eabi-objdump -D function.o
+
+00000000 <entry_function>:
+ 0: b580 push {r7, lr}
+ 2: 466f mov r7, sp
+ 4: b082 sub sp, #8
+ 6: 9001 str r0, [sp, #4]
+ 8: 1d81 adds r1, r0, #6
+ a: 460a mov r2, r1
+ c: 4281 cmp r1, r0
+ e: 9200 str r2, [sp, #0]
+ 10: d30b bcc.n 2a <entry_function+0x2a>
+ 12: e7ff b.n 14 <entry_function+0x14>
+ 14: 9800 ldr r0, [sp, #0]
+ 16: b002 add sp, #8
+ 18: e8bd 4080 ldmia.w sp!, {r7, lr}
+ 1c: 4671 mov r1, lr
+ 1e: 4672 mov r2, lr
+ 20: 4673 mov r3, lr
+ 22: 46f4 mov ip, lr
+ 24: f38e 8800 msr CPSR_f, lr
+ 28: 4774 bxns lr
+ 2a: f240 0000 movw r0, #0
+ 2e: f2c0 0000 movt r0, #0
+ 32: f240 0200 movw r2, #0
+ 36: f2c0 0200 movt r2, #0
+ 3a: 211c movs r1, #28
+ 3c: f7ff fffe bl 0 <_ZN4core9panicking5panic17h5c028258ca2fb3f5E>
+ 40: defe udf #254 ; 0xfe
+```
+"##,
+ },
+ Lint {
+ label: "compiler_builtins",
+ description: r##"# `compiler_builtins`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "concat_idents",
+ description: r##"# `concat_idents`
+
+The tracking issue for this feature is: [#29599]
+
+[#29599]: https://github.com/rust-lang/rust/issues/29599
+
+------------------------
+
+The `concat_idents` feature adds a macro for concatenating multiple identifiers
+into one identifier.
+
+## Examples
+
+```rust
+#![feature(concat_idents)]
+
+fn main() {
+ fn foobar() -> u32 { 23 }
+ let f = concat_idents!(foo, bar);
+ assert_eq!(f(), 23);
+}
+```
+"##,
+ },
+ Lint {
+ label: "const_eval_limit",
+ description: r##"# `const_eval_limit`
+
+The tracking issue for this feature is: [#67217]
+
+[#67217]: https://github.com/rust-lang/rust/issues/67217
+
+The `const_eval_limit` allows someone to limit the evaluation steps the CTFE undertakes to evaluate a `const fn`.
+"##,
+ },
+ Lint {
+ label: "core_intrinsics",
+ description: r##"# `core_intrinsics`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "core_panic",
+ description: r##"# `core_panic`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "core_private_bignum",
+ description: r##"# `core_private_bignum`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "core_private_diy_float",
+ description: r##"# `core_private_diy_float`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "crate_visibility_modifier",
+ description: r##"# `crate_visibility_modifier`
+
+The tracking issue for this feature is: [#53120]
+
+[#53120]: https://github.com/rust-lang/rust/issues/53120
+
+-----
+
+The `crate_visibility_modifier` feature allows the `crate` keyword to be used
+as a visibility modifier synonymous to `pub(crate)`, indicating that a type
+(function, _&c._) is to be visible to the entire enclosing crate, but not to
+other crates.
+
+```rust
+#![feature(crate_visibility_modifier)]
+
+crate struct Foo {
+ bar: usize,
+}
+```
+"##,
+ },
+ Lint {
+ label: "custom_test_frameworks",
+ description: r##"# `custom_test_frameworks`
+
+The tracking issue for this feature is: [#50297]
+
+[#50297]: https://github.com/rust-lang/rust/issues/50297
+
+------------------------
+
+The `custom_test_frameworks` feature allows the use of `#[test_case]` and `#![test_runner]`.
+Any function, const, or static can be annotated with `#[test_case]` causing it to be aggregated (like `#[test]`)
+and be passed to the test runner determined by the `#![test_runner]` crate attribute.
+
+```rust
+#![feature(custom_test_frameworks)]
+#![test_runner(my_runner)]
+
+fn my_runner(tests: &[&i32]) {
+ for t in tests {
+ if **t == 0 {
+ println!("PASSED");
+ } else {
+ println!("FAILED");
+ }
+ }
+}
+
+#[test_case]
+const WILL_PASS: i32 = 0;
+
+#[test_case]
+const WILL_FAIL: i32 = 4;
+```
+"##,
+ },
+ Lint {
+ label: "dec2flt",
+ description: r##"# `dec2flt`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "default_free_fn",
+ description: r##"# `default_free_fn`
+
+The tracking issue for this feature is: [#73014]
+
+[#73014]: https://github.com/rust-lang/rust/issues/73014
+
+------------------------
+
+Adds a free `default()` function to the `std::default` module. This function
+just forwards to [`Default::default()`], but may remove repetition of the word
+"default" from the call site.
+
+[`Default::default()`]: https://doc.rust-lang.org/nightly/std/default/trait.Default.html#tymethod.default
+
+Here is an example:
+
+```rust
+#![feature(default_free_fn)]
+use std::default::default;
+
+#[derive(Default)]
+struct AppConfig {
+ foo: FooConfig,
+ bar: BarConfig,
+}
+
+#[derive(Default)]
+struct FooConfig {
+ foo: i32,
+}
+
+#[derive(Default)]
+struct BarConfig {
+ bar: f32,
+ baz: u8,
+}
+
+fn main() {
+ let options = AppConfig {
+ foo: default(),
+ bar: BarConfig {
+ bar: 10.1,
+ ..default()
+ },
+ };
+}
+```
+"##,
+ },
+ Lint {
+ label: "derive_clone_copy",
+ description: r##"# `derive_clone_copy`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "derive_eq",
+ description: r##"# `derive_eq`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "doc_cfg",
+ description: r##"# `doc_cfg`
+
+The tracking issue for this feature is: [#43781]
+
+------
+
+The `doc_cfg` feature allows an API be documented as only available in some specific platforms.
+This attribute has two effects:
+
+1. In the annotated item's documentation, there will be a message saying "This is supported on
+ (platform) only".
+
+2. The item's doc-tests will only run on the specific platform.
+
+In addition to allowing the use of the `#[doc(cfg)]` attribute, this feature enables the use of a
+special conditional compilation flag, `#[cfg(doc)]`, set whenever building documentation on your
+crate.
+
+This feature was introduced as part of PR [#43348] to allow the platform-specific parts of the
+standard library be documented.
+
+```rust
+#![feature(doc_cfg)]
+
+#[cfg(any(windows, doc))]
+#[doc(cfg(windows))]
+/// The application's icon in the notification area (a.k.a. system tray).
+///
+/// # Examples
+///
+/// ```no_run
+/// extern crate my_awesome_ui_library;
+/// use my_awesome_ui_library::current_app;
+/// use my_awesome_ui_library::windows::notification;
+///
+/// let icon = current_app().get::<notification::Icon>();
+/// icon.show();
+/// icon.show_message("Hello");
+/// ```
+pub struct Icon {
+ // ...
+}
+```
+
+[#43781]: https://github.com/rust-lang/rust/issues/43781
+[#43348]: https://github.com/rust-lang/rust/issues/43348
+"##,
+ },
+ Lint {
+ label: "doc_masked",
+ description: r##"# `doc_masked`
+
+The tracking issue for this feature is: [#44027]
+
+-----
+
+The `doc_masked` feature allows a crate to exclude types from a given crate from appearing in lists
+of trait implementations. The specifics of the feature are as follows:
+
+1. When rustdoc encounters an `extern crate` statement annotated with a `#[doc(masked)]` attribute,
+ it marks the crate as being masked.
+
+2. When listing traits a given type implements, rustdoc ensures that traits from masked crates are
+ not emitted into the documentation.
+
+3. When listing types that implement a given trait, rustdoc ensures that types from masked crates
+ are not emitted into the documentation.
+
+This feature was introduced in PR [#44026] to ensure that compiler-internal and
+implementation-specific types and traits were not included in the standard library's documentation.
+Such types would introduce broken links into the documentation.
+
+[#44026]: https://github.com/rust-lang/rust/pull/44026
+[#44027]: https://github.com/rust-lang/rust/pull/44027
+"##,
+ },
+ Lint {
+ label: "doc_notable_trait",
+ description: r##"# `doc_notable_trait`
+
+The tracking issue for this feature is: [#45040]
+
+The `doc_notable_trait` feature allows the use of the `#[doc(notable_trait)]`
+attribute, which will display the trait in a "Notable traits" dialog for
+functions returning types that implement the trait. For example, this attribute
+is applied to the `Iterator`, `Future`, `io::Read`, and `io::Write` traits in
+the standard library.
+
+You can do this on your own traits like so:
+
+```
+#![feature(doc_notable_trait)]
+
+#[doc(notable_trait)]
+pub trait MyTrait {}
+
+pub struct MyStruct;
+impl MyTrait for MyStruct {}
+
+/// The docs for this function will have a button that displays a dialog about
+/// `MyStruct` implementing `MyTrait`.
+pub fn my_fn() -> MyStruct { MyStruct }
+```
+
+This feature was originally implemented in PR [#45039].
+
+See also its documentation in [the rustdoc book][rustdoc-book-notable_trait].
+
+[#45040]: https://github.com/rust-lang/rust/issues/45040
+[#45039]: https://github.com/rust-lang/rust/pull/45039
+[rustdoc-book-notable_trait]: ../../rustdoc/unstable-features.html#adding-your-trait-to-the-notable-traits-dialog
+"##,
+ },
+ Lint {
+ label: "exclusive_range_pattern",
+ description: r##"# `exclusive_range_pattern`
+
+The tracking issue for this feature is: [#37854].
+
+
+[#67264]: https://github.com/rust-lang/rust/issues/67264
+[#37854]: https://github.com/rust-lang/rust/issues/37854
+-----
+
+The `exclusive_range_pattern` feature allows non-inclusive range
+patterns (`0..10`) to be used in appropriate pattern matching
+contexts. It also can be combined with `#![feature(half_open_range_patterns]`
+to be able to use RangeTo patterns (`..10`).
+
+It also enabled RangeFrom patterns but that has since been
+stabilized.
+
+```rust
+#![feature(exclusive_range_pattern)]
+ let x = 5;
+ match x {
+ 0..10 => println!("single digit"),
+ 10 => println!("ten isn't part of the above range"),
+ _ => println!("nor is everything else.")
+ }
+```
+"##,
+ },
+ Lint {
+ label: "explicit_generic_args_with_impl_trait",
+ description: r##"# `explicit_generic_args_with_impl_trait`
+
+The tracking issue for this feature is: [#83701]
+
+[#83701]: https://github.com/rust-lang/rust/issues/83701
+
+------------------------
+
+The `explicit_generic_args_with_impl_trait` feature gate lets you specify generic arguments even
+when `impl Trait` is used in argument position.
+
+A simple example is:
+
+```rust
+#![feature(explicit_generic_args_with_impl_trait)]
+
+fn foo<T: ?Sized>(_f: impl AsRef<T>) {}
+
+fn main() {
+ foo::<str>("".to_string());
+}
+```
+
+This is currently rejected:
+
+```text
+error[E0632]: cannot provide explicit generic arguments when `impl Trait` is used in argument position
+ --> src/main.rs:6:11
+ |
+6 | foo::<str>("".to_string());
+ | ^^^ explicit generic argument not allowed
+
+```
+
+However it would compile if `explicit_generic_args_with_impl_trait` is enabled.
+
+Note that the synthetic type parameters from `impl Trait` are still implicit and you
+cannot explicitly specify these:
+
+```rust,compile_fail
+#![feature(explicit_generic_args_with_impl_trait)]
+
+fn foo<T: ?Sized>(_f: impl AsRef<T>) {}
+fn bar<T: ?Sized, F: AsRef<T>>(_f: F) {}
+
+fn main() {
+ bar::<str, _>("".to_string()); // Okay
+ bar::<str, String>("".to_string()); // Okay
+
+ foo::<str>("".to_string()); // Okay
+ foo::<str, String>("".to_string()); // Error, you cannot specify `impl Trait` explicitly
+}
+```
+"##,
+ },
+ Lint {
+ label: "fd",
+ description: r##"# `fd`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "fd_read",
+ description: r##"# `fd_read`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ffi_const",
+ description: r##"# `ffi_const`
+
+The tracking issue for this feature is: [#58328]
+
+------
+
+The `#[ffi_const]` attribute applies clang's `const` attribute to foreign
+functions declarations.
+
+That is, `#[ffi_const]` functions shall have no effects except for its return
+value, which can only depend on the values of the function parameters, and is
+not affected by changes to the observable state of the program.
+
+Applying the `#[ffi_const]` attribute to a function that violates these
+requirements is undefined behaviour.
+
+This attribute enables Rust to perform common optimizations, like sub-expression
+elimination, and it can avoid emitting some calls in repeated invocations of the
+function with the same argument values regardless of other operations being
+performed in between these functions calls (as opposed to `#[ffi_pure]`
+functions).
+
+## Pitfalls
+
+A `#[ffi_const]` function can only read global memory that would not affect
+its return value for the whole execution of the program (e.g. immutable global
+memory). `#[ffi_const]` functions are referentially-transparent and therefore
+more strict than `#[ffi_pure]` functions.
+
+A common pitfall involves applying the `#[ffi_const]` attribute to a
+function that reads memory through pointer arguments which do not necessarily
+point to immutable global memory.
+
+A `#[ffi_const]` function that returns unit has no effect on the abstract
+machine's state, and a `#[ffi_const]` function cannot be `#[ffi_pure]`.
+
+A `#[ffi_const]` function must not diverge, neither via a side effect (e.g. a
+call to `abort`) nor by infinite loops.
+
+When translating C headers to Rust FFI, it is worth verifying for which targets
+the `const` attribute is enabled in those headers, and using the appropriate
+`cfg` macros in the Rust side to match those definitions. While the semantics of
+`const` are implemented identically by many C and C++ compilers, e.g., clang,
+[GCC], [ARM C/C++ compiler], [IBM ILE C/C++], etc. they are not necessarily
+implemented in this way on all of them. It is therefore also worth verifying
+that the semantics of the C toolchain used to compile the binary being linked
+against are compatible with those of the `#[ffi_const]`.
+
+[#58328]: https://github.com/rust-lang/rust/issues/58328
+[ARM C/C++ compiler]: http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0491c/Cacgigch.html
+[GCC]: https://gcc.gnu.org/onlinedocs/gcc/Common-Function-Attributes.html#index-const-function-attribute
+[IBM ILE C/C++]: https://www.ibm.com/support/knowledgecenter/fr/ssw_ibm_i_71/rzarg/fn_attrib_const.htm
+"##,
+ },
+ Lint {
+ label: "ffi_pure",
+ description: r##"# `ffi_pure`
+
+The tracking issue for this feature is: [#58329]
+
+------
+
+The `#[ffi_pure]` attribute applies clang's `pure` attribute to foreign
+functions declarations.
+
+That is, `#[ffi_pure]` functions shall have no effects except for its return
+value, which shall not change across two consecutive function calls with
+the same parameters.
+
+Applying the `#[ffi_pure]` attribute to a function that violates these
+requirements is undefined behavior.
+
+This attribute enables Rust to perform common optimizations, like sub-expression
+elimination and loop optimizations. Some common examples of pure functions are
+`strlen` or `memcmp`.
+
+These optimizations are only applicable when the compiler can prove that no
+program state observable by the `#[ffi_pure]` function has changed between calls
+of the function, which could alter the result. See also the `#[ffi_const]`
+attribute, which provides stronger guarantees regarding the allowable behavior
+of a function, enabling further optimization.
+
+## Pitfalls
+
+A `#[ffi_pure]` function can read global memory through the function
+parameters (e.g. pointers), globals, etc. `#[ffi_pure]` functions are not
+referentially-transparent, and are therefore more relaxed than `#[ffi_const]`
+functions.
+
+However, accessing global memory through volatile or atomic reads can violate the
+requirement that two consecutive function calls shall return the same value.
+
+A `pure` function that returns unit has no effect on the abstract machine's
+state.
+
+A `#[ffi_pure]` function must not diverge, neither via a side effect (e.g. a
+call to `abort`) nor by infinite loops.
+
+When translating C headers to Rust FFI, it is worth verifying for which targets
+the `pure` attribute is enabled in those headers, and using the appropriate
+`cfg` macros in the Rust side to match those definitions. While the semantics of
+`pure` are implemented identically by many C and C++ compilers, e.g., clang,
+[GCC], [ARM C/C++ compiler], [IBM ILE C/C++], etc. they are not necessarily
+implemented in this way on all of them. It is therefore also worth verifying
+that the semantics of the C toolchain used to compile the binary being linked
+against are compatible with those of the `#[ffi_pure]`.
+
+
+[#58329]: https://github.com/rust-lang/rust/issues/58329
+[ARM C/C++ compiler]: http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0491c/Cacigdac.html
+[GCC]: https://gcc.gnu.org/onlinedocs/gcc/Common-Function-Attributes.html#index-pure-function-attribute
+[IBM ILE C/C++]: https://www.ibm.com/support/knowledgecenter/fr/ssw_ibm_i_71/rzarg/fn_attrib_pure.htm
+"##,
+ },
+ Lint {
+ label: "flt2dec",
+ description: r##"# `flt2dec`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "fmt_internals",
+ description: r##"# `fmt_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "fn_traits",
+ description: r##"# `fn_traits`
+
+The tracking issue for this feature is [#29625]
+
+See Also: [`unboxed_closures`](../language-features/unboxed-closures.md)
+
+[#29625]: https://github.com/rust-lang/rust/issues/29625
+
+----
+
+The `fn_traits` feature allows for implementation of the [`Fn*`] traits
+for creating custom closure-like types.
+
+[`Fn*`]: https://doc.rust-lang.org/std/ops/trait.Fn.html
+
+```rust
+#![feature(unboxed_closures)]
+#![feature(fn_traits)]
+
+struct Adder {
+ a: u32
+}
+
+impl FnOnce<(u32, )> for Adder {
+ type Output = u32;
+ extern "rust-call" fn call_once(self, b: (u32, )) -> Self::Output {
+ self.a + b.0
+ }
+}
+
+fn main() {
+ let adder = Adder { a: 3 };
+ assert_eq!(adder(2), 5);
+}
+```
+"##,
+ },
+ Lint {
+ label: "generators",
+ description: r##"# `generators`
+
+The tracking issue for this feature is: [#43122]
+
+[#43122]: https://github.com/rust-lang/rust/issues/43122
+
+------------------------
+
+The `generators` feature gate in Rust allows you to define generator or
+coroutine literals. A generator is a "resumable function" that syntactically
+resembles a closure but compiles to much different semantics in the compiler
+itself. The primary feature of a generator is that it can be suspended during
+execution to be resumed at a later date. Generators use the `yield` keyword to
+"return", and then the caller can `resume` a generator to resume execution just
+after the `yield` keyword.
+
+Generators are an extra-unstable feature in the compiler right now. Added in
+[RFC 2033] they're mostly intended right now as a information/constraint
+gathering phase. The intent is that experimentation can happen on the nightly
+compiler before actual stabilization. A further RFC will be required to
+stabilize generators/coroutines and will likely contain at least a few small
+tweaks to the overall design.
+
+[RFC 2033]: https://github.com/rust-lang/rfcs/pull/2033
+
+A syntactical example of a generator is:
+
+```rust
+#![feature(generators, generator_trait)]
+
+use std::ops::{Generator, GeneratorState};
+use std::pin::Pin;
+
+fn main() {
+ let mut generator = || {
+ yield 1;
+ return "foo"
+ };
+
+ match Pin::new(&mut generator).resume(()) {
+ GeneratorState::Yielded(1) => {}
+ _ => panic!("unexpected value from resume"),
+ }
+ match Pin::new(&mut generator).resume(()) {
+ GeneratorState::Complete("foo") => {}
+ _ => panic!("unexpected value from resume"),
+ }
+}
+```
+
+Generators are closure-like literals which can contain a `yield` statement. The
+`yield` statement takes an optional expression of a value to yield out of the
+generator. All generator literals implement the `Generator` trait in the
+`std::ops` module. The `Generator` trait has one main method, `resume`, which
+resumes execution of the generator at the previous suspension point.
+
+An example of the control flow of generators is that the following example
+prints all numbers in order:
+
+```rust
+#![feature(generators, generator_trait)]
+
+use std::ops::Generator;
+use std::pin::Pin;
+
+fn main() {
+ let mut generator = || {
+ println!("2");
+ yield;
+ println!("4");
+ };
+
+ println!("1");
+ Pin::new(&mut generator).resume(());
+ println!("3");
+ Pin::new(&mut generator).resume(());
+ println!("5");
+}
+```
+
+At this time the main intended use case of generators is an implementation
+primitive for async/await syntax, but generators will likely be extended to
+ergonomic implementations of iterators and other primitives in the future.
+Feedback on the design and usage is always appreciated!
+
+### The `Generator` trait
+
+The `Generator` trait in `std::ops` currently looks like:
+
+```rust
+# #![feature(arbitrary_self_types, generator_trait)]
+# use std::ops::GeneratorState;
+# use std::pin::Pin;
+
+pub trait Generator<R = ()> {
+ type Yield;
+ type Return;
+ fn resume(self: Pin<&mut Self>, resume: R) -> GeneratorState<Self::Yield, Self::Return>;
+}
+```
+
+The `Generator::Yield` type is the type of values that can be yielded with the
+`yield` statement. The `Generator::Return` type is the returned type of the
+generator. This is typically the last expression in a generator's definition or
+any value passed to `return` in a generator. The `resume` function is the entry
+point for executing the `Generator` itself.
+
+The return value of `resume`, `GeneratorState`, looks like:
+
+```rust
+pub enum GeneratorState<Y, R> {
+ Yielded(Y),
+ Complete(R),
+}
+```
+
+The `Yielded` variant indicates that the generator can later be resumed. This
+corresponds to a `yield` point in a generator. The `Complete` variant indicates
+that the generator is complete and cannot be resumed again. Calling `resume`
+after a generator has returned `Complete` will likely result in a panic of the
+program.
+
+### Closure-like semantics
+
+The closure-like syntax for generators alludes to the fact that they also have
+closure-like semantics. Namely:
+
+* When created, a generator executes no code. A closure literal does not
+ actually execute any of the closure's code on construction, and similarly a
+ generator literal does not execute any code inside the generator when
+ constructed.
+
+* Generators can capture outer variables by reference or by move, and this can
+ be tweaked with the `move` keyword at the beginning of the closure. Like
+ closures all generators will have an implicit environment which is inferred by
+ the compiler. Outer variables can be moved into a generator for use as the
+ generator progresses.
+
+* Generator literals produce a value with a unique type which implements the
+ `std::ops::Generator` trait. This allows actual execution of the generator
+ through the `Generator::resume` method as well as also naming it in return
+ types and such.
+
+* Traits like `Send` and `Sync` are automatically implemented for a `Generator`
+ depending on the captured variables of the environment. Unlike closures,
+ generators also depend on variables live across suspension points. This means
+ that although the ambient environment may be `Send` or `Sync`, the generator
+ itself may not be due to internal variables live across `yield` points being
+ not-`Send` or not-`Sync`. Note that generators do
+ not implement traits like `Copy` or `Clone` automatically.
+
+* Whenever a generator is dropped it will drop all captured environment
+ variables.
+
+### Generators as state machines
+
+In the compiler, generators are currently compiled as state machines. Each
+`yield` expression will correspond to a different state that stores all live
+variables over that suspension point. Resumption of a generator will dispatch on
+the current state and then execute internally until a `yield` is reached, at
+which point all state is saved off in the generator and a value is returned.
+
+Let's take a look at an example to see what's going on here:
+
+```rust
+#![feature(generators, generator_trait)]
+
+use std::ops::Generator;
+use std::pin::Pin;
+
+fn main() {
+ let ret = "foo";
+ let mut generator = move || {
+ yield 1;
+ return ret
+ };
+
+ Pin::new(&mut generator).resume(());
+ Pin::new(&mut generator).resume(());
+}
+```
+
+This generator literal will compile down to something similar to:
+
+```rust
+#![feature(arbitrary_self_types, generators, generator_trait)]
+
+use std::ops::{Generator, GeneratorState};
+use std::pin::Pin;
+
+fn main() {
+ let ret = "foo";
+ let mut generator = {
+ enum __Generator {
+ Start(&'static str),
+ Yield1(&'static str),
+ Done,
+ }
+
+ impl Generator for __Generator {
+ type Yield = i32;
+ type Return = &'static str;
+
+ fn resume(mut self: Pin<&mut Self>, resume: ()) -> GeneratorState<i32, &'static str> {
+ use std::mem;
+ match mem::replace(&mut *self, __Generator::Done) {
+ __Generator::Start(s) => {
+ *self = __Generator::Yield1(s);
+ GeneratorState::Yielded(1)
+ }
+
+ __Generator::Yield1(s) => {
+ *self = __Generator::Done;
+ GeneratorState::Complete(s)
+ }
+
+ __Generator::Done => {
+ panic!("generator resumed after completion")
+ }
+ }
+ }
+ }
+
+ __Generator::Start(ret)
+ };
+
+ Pin::new(&mut generator).resume(());
+ Pin::new(&mut generator).resume(());
+}
+```
+
+Notably here we can see that the compiler is generating a fresh type,
+`__Generator` in this case. This type has a number of states (represented here
+as an `enum`) corresponding to each of the conceptual states of the generator.
+At the beginning we're closing over our outer variable `foo` and then that
+variable is also live over the `yield` point, so it's stored in both states.
+
+When the generator starts it'll immediately yield 1, but it saves off its state
+just before it does so indicating that it has reached the yield point. Upon
+resuming again we'll execute the `return ret` which returns the `Complete`
+state.
+
+Here we can also note that the `Done` state, if resumed, panics immediately as
+it's invalid to resume a completed generator. It's also worth noting that this
+is just a rough desugaring, not a normative specification for what the compiler
+does.
+"##,
+ },
+ Lint {
+ label: "half_open_range_patterns",
+ description: r##"# `half_open_range_patterns`
+
+The tracking issue for this feature is: [#67264]
+It is part of the `#![exclusive_range_pattern]` feature,
+tracked at [#37854].
+
+[#67264]: https://github.com/rust-lang/rust/issues/67264
+[#37854]: https://github.com/rust-lang/rust/issues/37854
+-----
+
+The `half_open_range_patterns` feature allows RangeTo patterns
+(`..10`) to be used in appropriate pattern matching contexts.
+This requires also enabling the `exclusive_range_pattern` feature.
+
+It also enabled RangeFrom patterns but that has since been
+stabilized.
+
+```rust
+#![feature(half_open_range_patterns)]
+#![feature(exclusive_range_pattern)]
+ let x = 5;
+ match x {
+ ..0 => println!("negative!"), // "RangeTo" pattern. Unstable.
+ 0 => println!("zero!"),
+ 1.. => println!("positive!"), // "RangeFrom" pattern. Stable.
+ }
+```
+"##,
+ },
+ Lint {
+ label: "infer_static_outlives_requirements",
+ description: r##"# `infer_static_outlives_requirements`
+
+The tracking issue for this feature is: [#54185]
+
+[#54185]: https://github.com/rust-lang/rust/issues/54185
+
+------------------------
+The `infer_static_outlives_requirements` feature indicates that certain
+`'static` outlives requirements can be inferred by the compiler rather than
+stating them explicitly.
+
+Note: It is an accompanying feature to `infer_outlives_requirements`,
+which must be enabled to infer outlives requirements.
+
+For example, currently generic struct definitions that contain
+references, require where-clauses of the form T: 'static. By using
+this feature the outlives predicates will be inferred, although
+they may still be written explicitly.
+
+```rust,ignore (pseudo-Rust)
+struct Foo<U> where U: 'static { // <-- currently required
+ bar: Bar<U>
+}
+struct Bar<T: 'static> {
+ x: T,
+}
+```
+
+
+## Examples:
+
+```rust,ignore (pseudo-Rust)
+#![feature(infer_outlives_requirements)]
+#![feature(infer_static_outlives_requirements)]
+
+#[rustc_outlives]
+// Implicitly infer U: 'static
+struct Foo<U> {
+ bar: Bar<U>
+}
+struct Bar<T: 'static> {
+ x: T,
+}
+```
+"##,
+ },
+ Lint {
+ label: "inline_const",
+ description: r##"# `inline_const`
+
+The tracking issue for this feature is: [#76001]
+
+See also [`inline_const_pat`](inline-const-pat.md)
+
+------
+
+This feature allows you to use inline constant expressions. For example, you can
+turn this code:
+
+```rust
+# fn add_one(x: i32) -> i32 { x + 1 }
+const MY_COMPUTATION: i32 = 1 + 2 * 3 / 4;
+
+fn main() {
+ let x = add_one(MY_COMPUTATION);
+}
+```
+
+into this code:
+
+```rust
+#![feature(inline_const)]
+
+# fn add_one(x: i32) -> i32 { x + 1 }
+fn main() {
+ let x = add_one(const { 1 + 2 * 3 / 4 });
+}
+```
+
+[#76001]: https://github.com/rust-lang/rust/issues/76001
+"##,
+ },
+ Lint {
+ label: "inline_const_pat",
+ description: r##"# `inline_const_pat`
+
+The tracking issue for this feature is: [#76001]
+
+See also [`inline_const`](inline-const.md)
+
+------
+
+This feature allows you to use inline constant expressions in pattern position:
+
+```rust
+#![feature(inline_const_pat)]
+
+const fn one() -> i32 { 1 }
+
+let some_int = 3;
+match some_int {
+ const { 1 + 2 } => println!("Matched 1 + 2"),
+ const { one() } => println!("Matched const fn returning 1"),
+ _ => println!("Didn't match anything :("),
+}
+```
+
+[#76001]: https://github.com/rust-lang/rust/issues/76001
+"##,
+ },
+ Lint {
+ label: "int_error_internals",
+ description: r##"# `int_error_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "internal_output_capture",
+ description: r##"# `internal_output_capture`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "intra_doc_pointers",
+ description: r##"# `intra-doc-pointers`
+
+The tracking issue for this feature is: [#80896]
+
+[#80896]: https://github.com/rust-lang/rust/issues/80896
+
+------------------------
+
+Rustdoc does not currently allow disambiguating between `*const` and `*mut`, and
+raw pointers in intra-doc links are unstable until it does.
+
+```rust
+#![feature(intra_doc_pointers)]
+//! [pointer::add]
+```
+"##,
+ },
+ Lint {
+ label: "intrinsics",
+ description: r##"# `intrinsics`
+
+The tracking issue for this feature is: None.
+
+Intrinsics are never intended to be stable directly, but intrinsics are often
+exported in some sort of stable manner. Prefer using the stable interfaces to
+the intrinsic directly when you can.
+
+------------------------
+
+
+These are imported as if they were FFI functions, with the special
+`rust-intrinsic` ABI. For example, if one was in a freestanding
+context, but wished to be able to `transmute` between types, and
+perform efficient pointer arithmetic, one would import those functions
+via a declaration like
+
+```rust
+#![feature(intrinsics)]
+# fn main() {}
+
+extern "rust-intrinsic" {
+ fn transmute<T, U>(x: T) -> U;
+
+ fn offset<T>(dst: *const T, offset: isize) -> *const T;
+}
+```
+
+As with any other FFI functions, these are always `unsafe` to call.
+"##,
+ },
+ Lint {
+ label: "is_sorted",
+ description: r##"# `is_sorted`
+
+The tracking issue for this feature is: [#53485]
+
+[#53485]: https://github.com/rust-lang/rust/issues/53485
+
+------------------------
+
+Add the methods `is_sorted`, `is_sorted_by` and `is_sorted_by_key` to `[T]`;
+add the methods `is_sorted`, `is_sorted_by` and `is_sorted_by_key` to
+`Iterator`.
+"##,
+ },
+ Lint {
+ label: "lang_items",
+ description: r##"# `lang_items`
+
+The tracking issue for this feature is: None.
+
+------------------------
+
+The `rustc` compiler has certain pluggable operations, that is,
+functionality that isn't hard-coded into the language, but is
+implemented in libraries, with a special marker to tell the compiler
+it exists. The marker is the attribute `#[lang = "..."]` and there are
+various different values of `...`, i.e. various different 'lang
+items'.
+
+For example, `Box` pointers require two lang items, one for allocation
+and one for deallocation. A freestanding program that uses the `Box`
+sugar for dynamic allocations via `malloc` and `free`:
+
+```rust,ignore (libc-is-finicky)
+#![feature(lang_items, box_syntax, start, libc, core_intrinsics, rustc_private)]
+#![no_std]
+use core::intrinsics;
+use core::panic::PanicInfo;
+
+extern crate libc;
+
+#[lang = "owned_box"]
+pub struct Box<T>(*mut T);
+
+#[lang = "exchange_malloc"]
+unsafe fn allocate(size: usize, _align: usize) -> *mut u8 {
+ let p = libc::malloc(size as libc::size_t) as *mut u8;
+
+ // Check if `malloc` failed:
+ if p as usize == 0 {
+ intrinsics::abort();
+ }
+
+ p
+}
+
+#[lang = "box_free"]
+unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
+ libc::free(ptr as *mut libc::c_void)
+}
+
+#[start]
+fn main(_argc: isize, _argv: *const *const u8) -> isize {
+ let _x = box 1;
+
+ 0
+}
+
+#[lang = "eh_personality"] extern fn rust_eh_personality() {}
+#[lang = "panic_impl"] extern fn rust_begin_panic(info: &PanicInfo) -> ! { unsafe { intrinsics::abort() } }
+#[no_mangle] pub extern fn rust_eh_register_frames () {}
+#[no_mangle] pub extern fn rust_eh_unregister_frames () {}
+```
+
+Note the use of `abort`: the `exchange_malloc` lang item is assumed to
+return a valid pointer, and so needs to do the check internally.
+
+Other features provided by lang items include:
+
+- overloadable operators via traits: the traits corresponding to the
+ `==`, `<`, dereferencing (`*`) and `+` (etc.) operators are all
+ marked with lang items; those specific four are `eq`, `ord`,
+ `deref`, and `add` respectively.
+- stack unwinding and general failure; the `eh_personality`,
+ `panic` and `panic_bounds_check` lang items.
+- the traits in `std::marker` used to indicate types of
+ various kinds; lang items `send`, `sync` and `copy`.
+- the marker types and variance indicators found in
+ `std::marker`; lang items `covariant_type`,
+ `contravariant_lifetime`, etc.
+
+Lang items are loaded lazily by the compiler; e.g. if one never uses
+`Box` then there is no need to define functions for `exchange_malloc`
+and `box_free`. `rustc` will emit an error when an item is needed
+but not found in the current crate or any that it depends on.
+
+Most lang items are defined by `libcore`, but if you're trying to build
+an executable without the standard library, you'll run into the need
+for lang items. The rest of this page focuses on this use-case, even though
+lang items are a bit broader than that.
+
+### Using libc
+
+In order to build a `#[no_std]` executable we will need libc as a dependency.
+We can specify this using our `Cargo.toml` file:
+
+```toml
+[dependencies]
+libc = { version = "0.2.14", default-features = false }
+```
+
+Note that the default features have been disabled. This is a critical step -
+**the default features of libc include the standard library and so must be
+disabled.**
+
+### Writing an executable without stdlib
+
+Controlling the entry point is possible in two ways: the `#[start]` attribute,
+or overriding the default shim for the C `main` function with your own.
+
+The function marked `#[start]` is passed the command line parameters
+in the same format as C:
+
+```rust,ignore (libc-is-finicky)
+#![feature(lang_items, core_intrinsics, rustc_private)]
+#![feature(start)]
+#![no_std]
+use core::intrinsics;
+use core::panic::PanicInfo;
+
+// Pull in the system libc library for what crt0.o likely requires.
+extern crate libc;
+
+// Entry point for this program.
+#[start]
+fn start(_argc: isize, _argv: *const *const u8) -> isize {
+ 0
+}
+
+// These functions are used by the compiler, but not
+// for a bare-bones hello world. These are normally
+// provided by libstd.
+#[lang = "eh_personality"]
+#[no_mangle]
+pub extern fn rust_eh_personality() {
+}
+
+#[lang = "panic_impl"]
+#[no_mangle]
+pub extern fn rust_begin_panic(info: &PanicInfo) -> ! {
+ unsafe { intrinsics::abort() }
+}
+```
+
+To override the compiler-inserted `main` shim, one has to disable it
+with `#![no_main]` and then create the appropriate symbol with the
+correct ABI and the correct name, which requires overriding the
+compiler's name mangling too:
+
+```rust,ignore (libc-is-finicky)
+#![feature(lang_items, core_intrinsics, rustc_private)]
+#![feature(start)]
+#![no_std]
+#![no_main]
+use core::intrinsics;
+use core::panic::PanicInfo;
+
+// Pull in the system libc library for what crt0.o likely requires.
+extern crate libc;
+
+// Entry point for this program.
+#[no_mangle] // ensure that this symbol is called `main` in the output
+pub extern fn main(_argc: i32, _argv: *const *const u8) -> i32 {
+ 0
+}
+
+// These functions are used by the compiler, but not
+// for a bare-bones hello world. These are normally
+// provided by libstd.
+#[lang = "eh_personality"]
+#[no_mangle]
+pub extern fn rust_eh_personality() {
+}
+
+#[lang = "panic_impl"]
+#[no_mangle]
+pub extern fn rust_begin_panic(info: &PanicInfo) -> ! {
+ unsafe { intrinsics::abort() }
+}
+```
+
+In many cases, you may need to manually link to the `compiler_builtins` crate
+when building a `no_std` binary. You may observe this via linker error messages
+such as "```undefined reference to `__rust_probestack'```".
+
+## More about the language items
+
+The compiler currently makes a few assumptions about symbols which are
+available in the executable to call. Normally these functions are provided by
+the standard library, but without it you must define your own. These symbols
+are called "language items", and they each have an internal name, and then a
+signature that an implementation must conform to.
+
+The first of these functions, `rust_eh_personality`, is used by the failure
+mechanisms of the compiler. This is often mapped to GCC's personality function
+(see the [libstd implementation][unwind] for more information), but crates
+which do not trigger a panic can be assured that this function is never
+called. The language item's name is `eh_personality`.
+
+[unwind]: https://github.com/rust-lang/rust/blob/master/library/panic_unwind/src/gcc.rs
+
+The second function, `rust_begin_panic`, is also used by the failure mechanisms of the
+compiler. When a panic happens, this controls the message that's displayed on
+the screen. While the language item's name is `panic_impl`, the symbol name is
+`rust_begin_panic`.
+
+Finally, a `eh_catch_typeinfo` static is needed for certain targets which
+implement Rust panics on top of C++ exceptions.
+
+## List of all language items
+
+This is a list of all language items in Rust along with where they are located in
+the source code.
+
+- Primitives
+ - `i8`: `libcore/num/mod.rs`
+ - `i16`: `libcore/num/mod.rs`
+ - `i32`: `libcore/num/mod.rs`
+ - `i64`: `libcore/num/mod.rs`
+ - `i128`: `libcore/num/mod.rs`
+ - `isize`: `libcore/num/mod.rs`
+ - `u8`: `libcore/num/mod.rs`
+ - `u16`: `libcore/num/mod.rs`
+ - `u32`: `libcore/num/mod.rs`
+ - `u64`: `libcore/num/mod.rs`
+ - `u128`: `libcore/num/mod.rs`
+ - `usize`: `libcore/num/mod.rs`
+ - `f32`: `libstd/f32.rs`
+ - `f64`: `libstd/f64.rs`
+ - `char`: `libcore/char.rs`
+ - `slice`: `liballoc/slice.rs`
+ - `str`: `liballoc/str.rs`
+ - `const_ptr`: `libcore/ptr.rs`
+ - `mut_ptr`: `libcore/ptr.rs`
+ - `unsafe_cell`: `libcore/cell.rs`
+- Runtime
+ - `start`: `libstd/rt.rs`
+ - `eh_personality`: `libpanic_unwind/emcc.rs` (EMCC)
+ - `eh_personality`: `libpanic_unwind/gcc.rs` (GNU)
+ - `eh_personality`: `libpanic_unwind/seh.rs` (SEH)
+ - `eh_catch_typeinfo`: `libpanic_unwind/emcc.rs` (EMCC)
+ - `panic`: `libcore/panicking.rs`
+ - `panic_bounds_check`: `libcore/panicking.rs`
+ - `panic_impl`: `libcore/panicking.rs`
+ - `panic_impl`: `libstd/panicking.rs`
+- Allocations
+ - `owned_box`: `liballoc/boxed.rs`
+ - `exchange_malloc`: `liballoc/heap.rs`
+ - `box_free`: `liballoc/heap.rs`
+- Operands
+ - `not`: `libcore/ops/bit.rs`
+ - `bitand`: `libcore/ops/bit.rs`
+ - `bitor`: `libcore/ops/bit.rs`
+ - `bitxor`: `libcore/ops/bit.rs`
+ - `shl`: `libcore/ops/bit.rs`
+ - `shr`: `libcore/ops/bit.rs`
+ - `bitand_assign`: `libcore/ops/bit.rs`
+ - `bitor_assign`: `libcore/ops/bit.rs`
+ - `bitxor_assign`: `libcore/ops/bit.rs`
+ - `shl_assign`: `libcore/ops/bit.rs`
+ - `shr_assign`: `libcore/ops/bit.rs`
+ - `deref`: `libcore/ops/deref.rs`
+ - `deref_mut`: `libcore/ops/deref.rs`
+ - `index`: `libcore/ops/index.rs`
+ - `index_mut`: `libcore/ops/index.rs`
+ - `add`: `libcore/ops/arith.rs`
+ - `sub`: `libcore/ops/arith.rs`
+ - `mul`: `libcore/ops/arith.rs`
+ - `div`: `libcore/ops/arith.rs`
+ - `rem`: `libcore/ops/arith.rs`
+ - `neg`: `libcore/ops/arith.rs`
+ - `add_assign`: `libcore/ops/arith.rs`
+ - `sub_assign`: `libcore/ops/arith.rs`
+ - `mul_assign`: `libcore/ops/arith.rs`
+ - `div_assign`: `libcore/ops/arith.rs`
+ - `rem_assign`: `libcore/ops/arith.rs`
+ - `eq`: `libcore/cmp.rs`
+ - `ord`: `libcore/cmp.rs`
+- Functions
+ - `fn`: `libcore/ops/function.rs`
+ - `fn_mut`: `libcore/ops/function.rs`
+ - `fn_once`: `libcore/ops/function.rs`
+ - `generator_state`: `libcore/ops/generator.rs`
+ - `generator`: `libcore/ops/generator.rs`
+- Other
+ - `coerce_unsized`: `libcore/ops/unsize.rs`
+ - `drop`: `libcore/ops/drop.rs`
+ - `drop_in_place`: `libcore/ptr.rs`
+ - `clone`: `libcore/clone.rs`
+ - `copy`: `libcore/marker.rs`
+ - `send`: `libcore/marker.rs`
+ - `sized`: `libcore/marker.rs`
+ - `unsize`: `libcore/marker.rs`
+ - `sync`: `libcore/marker.rs`
+ - `phantom_data`: `libcore/marker.rs`
+ - `discriminant_kind`: `libcore/marker.rs`
+ - `freeze`: `libcore/marker.rs`
+ - `debug_trait`: `libcore/fmt/mod.rs`
+ - `non_zero`: `libcore/nonzero.rs`
+ - `arc`: `liballoc/sync.rs`
+ - `rc`: `liballoc/rc.rs`
+"##,
+ },
+ Lint {
+ label: "libstd_sys_internals",
+ description: r##"# `libstd_sys_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "libstd_thread_internals",
+ description: r##"# `libstd_thread_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "link_cfg",
+ description: r##"# `link_cfg`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "llvm_asm",
+ description: r##"# `llvm_asm`
+
+The tracking issue for this feature is: [#70173]
+
+[#70173]: https://github.com/rust-lang/rust/issues/70173
+
+------------------------
+
+For extremely low-level manipulations and performance reasons, one
+might wish to control the CPU directly. Rust supports using inline
+assembly to do this via the `llvm_asm!` macro.
+
+```rust,ignore (pseudo-code)
+llvm_asm!(assembly template
+ : output operands
+ : input operands
+ : clobbers
+ : options
+ );
+```
+
+Any use of `llvm_asm` is feature gated (requires `#![feature(llvm_asm)]` on the
+crate to allow) and of course requires an `unsafe` block.
+
+> **Note**: the examples here are given in x86/x86-64 assembly, but
+> all platforms are supported.
+
+## Assembly template
+
+The `assembly template` is the only required parameter and must be a
+literal string (i.e. `""`)
+
+```rust
+#![feature(llvm_asm)]
+
+#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+fn foo() {
+ unsafe {
+ llvm_asm!("NOP");
+ }
+}
+
+// Other platforms:
+#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+fn foo() { /* ... */ }
+
+fn main() {
+ // ...
+ foo();
+ // ...
+}
+```
+
+(The `feature(llvm_asm)` and `#[cfg]`s are omitted from now on.)
+
+Output operands, input operands, clobbers and options are all optional
+but you must add the right number of `:` if you skip them:
+
+```rust
+# #![feature(llvm_asm)]
+# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+# fn main() { unsafe {
+llvm_asm!("xor %eax, %eax"
+ :
+ :
+ : "eax"
+ );
+# } }
+# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+# fn main() {}
+```
+
+Whitespace also doesn't matter:
+
+```rust
+# #![feature(llvm_asm)]
+# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+# fn main() { unsafe {
+llvm_asm!("xor %eax, %eax" ::: "eax");
+# } }
+# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+# fn main() {}
+```
+
+## Operands
+
+Input and output operands follow the same format: `:
+"constraints1"(expr1), "constraints2"(expr2), ..."`. Output operand
+expressions must be mutable place, or not yet assigned:
+
+```rust
+# #![feature(llvm_asm)]
+# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+fn add(a: i32, b: i32) -> i32 {
+ let c: i32;
+ unsafe {
+ llvm_asm!("add $2, $0"
+ : "=r"(c)
+ : "0"(a), "r"(b)
+ );
+ }
+ c
+}
+# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+# fn add(a: i32, b: i32) -> i32 { a + b }
+
+fn main() {
+ assert_eq!(add(3, 14159), 14162)
+}
+```
+
+If you would like to use real operands in this position, however,
+you are required to put curly braces `{}` around the register that
+you want, and you are required to put the specific size of the
+operand. This is useful for very low level programming, where
+which register you use is important:
+
+```rust
+# #![feature(llvm_asm)]
+# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+# unsafe fn read_byte_in(port: u16) -> u8 {
+let result: u8;
+llvm_asm!("in %dx, %al" : "={al}"(result) : "{dx}"(port));
+result
+# }
+```
+
+## Clobbers
+
+Some instructions modify registers which might otherwise have held
+different values so we use the clobbers list to indicate to the
+compiler not to assume any values loaded into those registers will
+stay valid.
+
+```rust
+# #![feature(llvm_asm)]
+# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+# fn main() { unsafe {
+// Put the value 0x200 in eax:
+llvm_asm!("mov $$0x200, %eax" : /* no outputs */ : /* no inputs */ : "eax");
+# } }
+# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+# fn main() {}
+```
+
+Input and output registers need not be listed since that information
+is already communicated by the given constraints. Otherwise, any other
+registers used either implicitly or explicitly should be listed.
+
+If the assembly changes the condition code register `cc` should be
+specified as one of the clobbers. Similarly, if the assembly modifies
+memory, `memory` should also be specified.
+
+## Options
+
+The last section, `options` is specific to Rust. The format is comma
+separated literal strings (i.e. `:"foo", "bar", "baz"`). It's used to
+specify some extra info about the inline assembly:
+
+Current valid options are:
+
+1. `volatile` - specifying this is analogous to
+ `__asm__ __volatile__ (...)` in gcc/clang.
+2. `alignstack` - certain instructions expect the stack to be
+ aligned a certain way (i.e. SSE) and specifying this indicates to
+ the compiler to insert its usual stack alignment code
+3. `intel` - use intel syntax instead of the default AT&T.
+
+```rust
+# #![feature(llvm_asm)]
+# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+# fn main() {
+let result: i32;
+unsafe {
+ llvm_asm!("mov eax, 2" : "={eax}"(result) : : : "intel")
+}
+println!("eax is currently {}", result);
+# }
+# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+# fn main() {}
+```
+
+## More Information
+
+The current implementation of the `llvm_asm!` macro is a direct binding to [LLVM's
+inline assembler expressions][llvm-docs], so be sure to check out [their
+documentation as well][llvm-docs] for more information about clobbers,
+constraints, etc.
+
+[llvm-docs]: http://llvm.org/docs/LangRef.html#inline-assembler-expressions
+"##,
+ },
+ Lint {
+ label: "marker_trait_attr",
+ description: r##"# `marker_trait_attr`
+
+The tracking issue for this feature is: [#29864]
+
+[#29864]: https://github.com/rust-lang/rust/issues/29864
+
+------------------------
+
+Normally, Rust keeps you from adding trait implementations that could
+overlap with each other, as it would be ambiguous which to use. This
+feature, however, carves out an exception to that rule: a trait can
+opt-in to having overlapping implementations, at the cost that those
+implementations are not allowed to override anything (and thus the
+trait itself cannot have any associated items, as they're pointless
+when they'd need to do the same thing for every type anyway).
+
+```rust
+#![feature(marker_trait_attr)]
+
+#[marker] trait CheapToClone: Clone {}
+
+impl<T: Copy> CheapToClone for T {}
+
+// These could potentially overlap with the blanket implementation above,
+// so are only allowed because CheapToClone is a marker trait.
+impl<T: CheapToClone, U: CheapToClone> CheapToClone for (T, U) {}
+impl<T: CheapToClone> CheapToClone for std::ops::Range<T> {}
+
+fn cheap_clone<T: CheapToClone>(t: T) -> T {
+ t.clone()
+}
+```
+
+This is expected to replace the unstable `overlapping_marker_traits`
+feature, which applied to all empty traits (without needing an opt-in).
+"##,
+ },
+ Lint {
+ label: "more_qualified_paths",
+ description: r##"# `more_qualified_paths`
+
+The `more_qualified_paths` feature can be used in order to enable the
+use of qualified paths in patterns.
+
+## Example
+
+```rust
+#![feature(more_qualified_paths)]
+
+fn main() {
+ // destructure through a qualified path
+ let <Foo as A>::Assoc { br } = StructStruct { br: 2 };
+}
+
+struct StructStruct {
+ br: i8,
+}
+
+struct Foo;
+
+trait A {
+ type Assoc;
+}
+
+impl A for Foo {
+ type Assoc = StructStruct;
+}
+```
+"##,
+ },
+ Lint {
+ label: "native_link_modifiers",
+ description: r##"# `native_link_modifiers`
+
+The tracking issue for this feature is: [#81490]
+
+[#81490]: https://github.com/rust-lang/rust/issues/81490
+
+------------------------
+
+The `native_link_modifiers` feature allows you to use the `modifiers` syntax with the `#[link(..)]` attribute.
+
+Modifiers are specified as a comma-delimited string with each modifier prefixed with either a `+` or `-` to indicate that the modifier is enabled or disabled, respectively. The last boolean value specified for a given modifier wins.
+"##,
+ },
+ Lint {
+ label: "native_link_modifiers_as_needed",
+ description: r##"# `native_link_modifiers_as_needed`
+
+The tracking issue for this feature is: [#81490]
+
+[#81490]: https://github.com/rust-lang/rust/issues/81490
+
+------------------------
+
+The `native_link_modifiers_as_needed` feature allows you to use the `as-needed` modifier.
+
+`as-needed` is only compatible with the `dynamic` and `framework` linking kinds. Using any other kind will result in a compiler error.
+
+`+as-needed` means that the library will be actually linked only if it satisfies some undefined symbols at the point at which it is specified on the command line, making it similar to static libraries in this regard.
+
+This modifier translates to `--as-needed` for ld-like linkers, and to `-dead_strip_dylibs` / `-needed_library` / `-needed_framework` for ld64.
+The modifier does nothing for linkers that don't support it (e.g. `link.exe`).
+
+The default for this modifier is unclear, some targets currently specify it as `+as-needed`, some do not. We may want to try making `+as-needed` a default for all targets.
+"##,
+ },
+ Lint {
+ label: "native_link_modifiers_bundle",
+ description: r##"# `native_link_modifiers_bundle`
+
+The tracking issue for this feature is: [#81490]
+
+[#81490]: https://github.com/rust-lang/rust/issues/81490
+
+------------------------
+
+The `native_link_modifiers_bundle` feature allows you to use the `bundle` modifier.
+
+Only compatible with the `static` linking kind. Using any other kind will result in a compiler error.
+
+`+bundle` means objects from the static library are bundled into the produced crate (a rlib, for example) and are used from this crate later during linking of the final binary.
+
+`-bundle` means the static library is included into the produced rlib "by name" and object files from it are included only during linking of the final binary, the file search by that name is also performed during final linking.
+
+This modifier is supposed to supersede the `static-nobundle` linking kind defined by [RFC 1717](https://github.com/rust-lang/rfcs/pull/1717).
+
+The default for this modifier is currently `+bundle`, but it could be changed later on some future edition boundary.
+"##,
+ },
+ Lint {
+ label: "native_link_modifiers_verbatim",
+ description: r##"# `native_link_modifiers_verbatim`
+
+The tracking issue for this feature is: [#81490]
+
+[#81490]: https://github.com/rust-lang/rust/issues/81490
+
+------------------------
+
+The `native_link_modifiers_verbatim` feature allows you to use the `verbatim` modifier.
+
+`+verbatim` means that rustc itself won't add any target-specified library prefixes or suffixes (like `lib` or `.a`) to the library name, and will try its best to ask for the same thing from the linker.
+
+For `ld`-like linkers rustc will use the `-l:filename` syntax (note the colon) when passing the library, so the linker won't add any prefixes or suffixes as well.
+See [`-l namespec`](https://sourceware.org/binutils/docs/ld/Options.html) in ld documentation for more details.
+For linkers not supporting any verbatim modifiers (e.g. `link.exe` or `ld64`) the library name will be passed as is.
+
+The default for this modifier is `-verbatim`.
+
+This RFC changes the behavior of `raw-dylib` linking kind specified by [RFC 2627](https://github.com/rust-lang/rfcs/pull/2627). The `.dll` suffix (or other target-specified suffixes for other targets) is now added automatically.
+If your DLL doesn't have the `.dll` suffix, it can be specified with `+verbatim`.
+"##,
+ },
+ Lint {
+ label: "native_link_modifiers_whole_archive",
+ description: r##"# `native_link_modifiers_whole_archive`
+
+The tracking issue for this feature is: [#81490]
+
+[#81490]: https://github.com/rust-lang/rust/issues/81490
+
+------------------------
+
+The `native_link_modifiers_whole_archive` feature allows you to use the `whole-archive` modifier.
+
+Only compatible with the `static` linking kind. Using any other kind will result in a compiler error.
+
+`+whole-archive` means that the static library is linked as a whole archive without throwing any object files away.
+
+This modifier translates to `--whole-archive` for `ld`-like linkers, to `/WHOLEARCHIVE` for `link.exe`, and to `-force_load` for `ld64`.
+The modifier does nothing for linkers that don't support it.
+
+The default for this modifier is `-whole-archive`.
+"##,
+ },
+ Lint {
+ label: "negative_impls",
+ description: r##"# `negative_impls`
+
+The tracking issue for this feature is [#68318].
+
+[#68318]: https://github.com/rust-lang/rust/issues/68318
+
+----
+
+With the feature gate `negative_impls`, you can write negative impls as well as positive ones:
+
+```rust
+#![feature(negative_impls)]
+trait DerefMut { }
+impl<T: ?Sized> !DerefMut for &T { }
+```
+
+Negative impls indicate a semver guarantee that the given trait will not be implemented for the given types. Negative impls play an additional purpose for auto traits, described below.
+
+Negative impls have the following characteristics:
+
+* They do not have any items.
+* They must obey the orphan rules as if they were a positive impl.
+* They cannot "overlap" with any positive impls.
+
+## Semver interaction
+
+It is a breaking change to remove a negative impl. Negative impls are a commitment not to implement the given trait for the named types.
+
+## Orphan and overlap rules
+
+Negative impls must obey the same orphan rules as a positive impl. This implies you cannot add a negative impl for types defined in upstream crates and so forth.
+
+Similarly, negative impls cannot overlap with positive impls, again using the same "overlap" check that we ordinarily use to determine if two impls overlap. (Note that positive impls typically cannot overlap with one another either, except as permitted by specialization.)
+
+## Interaction with auto traits
+
+Declaring a negative impl `impl !SomeAutoTrait for SomeType` for an
+auto-trait serves two purposes:
+
+* as with any trait, it declares that `SomeType` will never implement `SomeAutoTrait`;
+* it disables the automatic `SomeType: SomeAutoTrait` impl that would otherwise have been generated.
+
+Note that, at present, there is no way to indicate that a given type
+does not implement an auto trait *but that it may do so in the
+future*. For ordinary types, this is done by simply not declaring any
+impl at all, but that is not an option for auto traits. A workaround
+is that one could embed a marker type as one of the fields, where the
+marker type is `!AutoTrait`.
+
+## Immediate uses
+
+Negative impls are used to declare that `&T: !DerefMut` and `&mut T: !Clone`, as required to fix the soundness of `Pin` described in [#66544](https://github.com/rust-lang/rust/issues/66544).
+
+This serves two purposes:
+
+* For proving the correctness of unsafe code, we can use that impl as evidence that no `DerefMut` or `Clone` impl exists.
+* It prevents downstream crates from creating such impls.
+"##,
+ },
+ Lint {
+ label: "no_coverage",
+ description: r##"# `no_coverage`
+
+The tracking issue for this feature is: [#84605]
+
+[#84605]: https://github.com/rust-lang/rust/issues/84605
+
+---
+
+The `no_coverage` attribute can be used to selectively disable coverage
+instrumentation in an annotated function. This might be useful to:
+
+- Avoid instrumentation overhead in a performance critical function
+- Avoid generating coverage for a function that is not meant to be executed,
+ but still target 100% coverage for the rest of the program.
+
+## Example
+
+```rust
+#![feature(no_coverage)]
+
+// `foo()` will get coverage instrumentation (by default)
+fn foo() {
+ // ...
+}
+
+#[no_coverage]
+fn bar() {
+ // ...
+}
+```
+"##,
+ },
+ Lint {
+ label: "no_sanitize",
+ description: r##"# `no_sanitize`
+
+The tracking issue for this feature is: [#39699]
+
+[#39699]: https://github.com/rust-lang/rust/issues/39699
+
+------------------------
+
+The `no_sanitize` attribute can be used to selectively disable sanitizer
+instrumentation in an annotated function. This might be useful to: avoid
+instrumentation overhead in a performance critical function, or avoid
+instrumenting code that contains constructs unsupported by given sanitizer.
+
+The precise effect of this annotation depends on particular sanitizer in use.
+For example, with `no_sanitize(thread)`, the thread sanitizer will no longer
+instrument non-atomic store / load operations, but it will instrument atomic
+operations to avoid reporting false positives and provide meaning full stack
+traces.
+
+## Examples
+
+``` rust
+#![feature(no_sanitize)]
+
+#[no_sanitize(address)]
+fn foo() {
+ // ...
+}
+```
+"##,
+ },
+ Lint {
+ label: "plugin",
+ description: r##"# `plugin`
+
+The tracking issue for this feature is: [#29597]
+
+[#29597]: https://github.com/rust-lang/rust/issues/29597
+
+
+This feature is part of "compiler plugins." It will often be used with the
+`rustc_private` feature.
+
+------------------------
+
+`rustc` can load compiler plugins, which are user-provided libraries that
+extend the compiler's behavior with new lint checks, etc.
+
+A plugin is a dynamic library crate with a designated *registrar* function that
+registers extensions with `rustc`. Other crates can load these extensions using
+the crate attribute `#![plugin(...)]`. See the
+`rustc_driver::plugin` documentation for more about the
+mechanics of defining and loading a plugin.
+
+In the vast majority of cases, a plugin should *only* be used through
+`#![plugin]` and not through an `extern crate` item. Linking a plugin would
+pull in all of librustc_ast and librustc as dependencies of your crate. This is
+generally unwanted unless you are building another plugin.
+
+The usual practice is to put compiler plugins in their own crate, separate from
+any `macro_rules!` macros or ordinary Rust code meant to be used by consumers
+of a library.
+
+# Lint plugins
+
+Plugins can extend [Rust's lint
+infrastructure](../../reference/attributes/diagnostics.md#lint-check-attributes) with
+additional checks for code style, safety, etc. Now let's write a plugin
+[`lint-plugin-test.rs`](https://github.com/rust-lang/rust/blob/master/src/test/ui-fulldeps/auxiliary/lint-plugin-test.rs)
+that warns about any item named `lintme`.
+
+```rust,ignore (requires-stage-2)
+#![feature(box_syntax, rustc_private)]
+
+extern crate rustc_ast;
+
+// Load rustc as a plugin to get macros
+extern crate rustc_driver;
+#[macro_use]
+extern crate rustc_lint;
+#[macro_use]
+extern crate rustc_session;
+
+use rustc_driver::plugin::Registry;
+use rustc_lint::{EarlyContext, EarlyLintPass, LintArray, LintContext, LintPass};
+use rustc_ast::ast;
+declare_lint!(TEST_LINT, Warn, "Warn about items named 'lintme'");
+
+declare_lint_pass!(Pass => [TEST_LINT]);
+
+impl EarlyLintPass for Pass {
+ fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) {
+ if it.ident.name.as_str() == "lintme" {
+ cx.lint(TEST_LINT, |lint| {
+ lint.build("item is named 'lintme'").set_span(it.span).emit()
+ });
+ }
+ }
+}
+
+#[no_mangle]
+fn __rustc_plugin_registrar(reg: &mut Registry) {
+ reg.lint_store.register_lints(&[&TEST_LINT]);
+ reg.lint_store.register_early_pass(|| box Pass);
+}
+```
+
+Then code like
+
+```rust,ignore (requires-plugin)
+#![feature(plugin)]
+#![plugin(lint_plugin_test)]
+
+fn lintme() { }
+```
+
+will produce a compiler warning:
+
+```txt
+foo.rs:4:1: 4:16 warning: item is named 'lintme', #[warn(test_lint)] on by default
+foo.rs:4 fn lintme() { }
+ ^~~~~~~~~~~~~~~
+```
+
+The components of a lint plugin are:
+
+* one or more `declare_lint!` invocations, which define static `Lint` structs;
+
+* a struct holding any state needed by the lint pass (here, none);
+
+* a `LintPass`
+ implementation defining how to check each syntax element. A single
+ `LintPass` may call `span_lint` for several different `Lint`s, but should
+ register them all through the `get_lints` method.
+
+Lint passes are syntax traversals, but they run at a late stage of compilation
+where type information is available. `rustc`'s [built-in
+lints](https://github.com/rust-lang/rust/blob/master/src/librustc_session/lint/builtin.rs)
+mostly use the same infrastructure as lint plugins, and provide examples of how
+to access type information.
+
+Lints defined by plugins are controlled by the usual [attributes and compiler
+flags](../../reference/attributes/diagnostics.md#lint-check-attributes), e.g.
+`#[allow(test_lint)]` or `-A test-lint`. These identifiers are derived from the
+first argument to `declare_lint!`, with appropriate case and punctuation
+conversion.
+
+You can run `rustc -W help foo.rs` to see a list of lints known to `rustc`,
+including those provided by plugins loaded by `foo.rs`.
+"##,
+ },
+ Lint {
+ label: "print_internals",
+ description: r##"# `print_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "profiler_runtime",
+ description: r##"# `profiler_runtime`
+
+The tracking issue for this feature is: [#42524](https://github.com/rust-lang/rust/issues/42524).
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "profiler_runtime_lib",
+ description: r##"# `profiler_runtime_lib`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "raw_dylib",
+ description: r##"# `raw_dylib`
+
+The tracking issue for this feature is: [#58713]
+
+[#58713]: https://github.com/rust-lang/rust/issues/58713
+
+------------------------
+
+The `raw_dylib` feature allows you to link against the implementations of functions in an `extern`
+block without, on Windows, linking against an import library.
+
+```rust,ignore (partial-example)
+#![feature(raw_dylib)]
+
+#[link(name="library", kind="raw-dylib")]
+extern {
+ fn extern_function(x: i32);
+}
+
+fn main() {
+ unsafe {
+ extern_function(14);
+ }
+}
+```
+
+## Limitations
+
+Currently, this feature is only supported on `-windows-msvc` targets. Non-Windows platforms don't have import
+libraries, and an incompatibility between LLVM and the BFD linker means that it is not currently supported on
+`-windows-gnu` targets.
+
+On the `i686-pc-windows-msvc` target, this feature supports only the `cdecl`, `stdcall`, `system`, and `fastcall`
+calling conventions.
+"##,
+ },
+ Lint {
+ label: "repr128",
+ description: r##"# `repr128`
+
+The tracking issue for this feature is: [#56071]
+
+[#56071]: https://github.com/rust-lang/rust/issues/56071
+
+------------------------
+
+The `repr128` feature adds support for `#[repr(u128)]` on `enum`s.
+
+```rust
+#![feature(repr128)]
+
+#[repr(u128)]
+enum Foo {
+ Bar(u64),
+}
+```
+"##,
+ },
+ Lint {
+ label: "rt",
+ description: r##"# `rt`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "rustc_attrs",
+ description: r##"# `rustc_attrs`
+
+This feature has no tracking issue, and is therefore internal to
+the compiler, not being intended for general use.
+
+Note: `rustc_attrs` enables many rustc-internal attributes and this page
+only discuss a few of them.
+
+------------------------
+
+The `rustc_attrs` feature allows debugging rustc type layouts by using
+`#[rustc_layout(...)]` to debug layout at compile time (it even works
+with `cargo check`) as an alternative to `rustc -Z print-type-sizes`
+that is way more verbose.
+
+Options provided by `#[rustc_layout(...)]` are `debug`, `size`, `align`,
+`abi`. Note that it only works on sized types without generics.
+
+## Examples
+
+```rust,compile_fail
+#![feature(rustc_attrs)]
+
+#[rustc_layout(abi, size)]
+pub enum X {
+ Y(u8, u8, u8),
+ Z(isize),
+}
+```
+
+When that is compiled, the compiler will error with something like
+
+```text
+error: abi: Aggregate { sized: true }
+ --> src/lib.rs:4:1
+ |
+4 | / pub enum T {
+5 | | Y(u8, u8, u8),
+6 | | Z(isize),
+7 | | }
+ | |_^
+
+error: size: Size { raw: 16 }
+ --> src/lib.rs:4:1
+ |
+4 | / pub enum T {
+5 | | Y(u8, u8, u8),
+6 | | Z(isize),
+7 | | }
+ | |_^
+
+error: aborting due to 2 previous errors
+```
+"##,
+ },
+ Lint {
+ label: "sort_internals",
+ description: r##"# `sort_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "str_internals",
+ description: r##"# `str_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "test",
+ description: r##"# `test`
+
+The tracking issue for this feature is: None.
+
+------------------------
+
+The internals of the `test` crate are unstable, behind the `test` flag. The
+most widely used part of the `test` crate are benchmark tests, which can test
+the performance of your code. Let's make our `src/lib.rs` look like this
+(comments elided):
+
+```rust,no_run
+#![feature(test)]
+
+extern crate test;
+
+pub fn add_two(a: i32) -> i32 {
+ a + 2
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use test::Bencher;
+
+ #[test]
+ fn it_works() {
+ assert_eq!(4, add_two(2));
+ }
+
+ #[bench]
+ fn bench_add_two(b: &mut Bencher) {
+ b.iter(|| add_two(2));
+ }
+}
+```
+
+Note the `test` feature gate, which enables this unstable feature.
+
+We've imported the `test` crate, which contains our benchmarking support.
+We have a new function as well, with the `bench` attribute. Unlike regular
+tests, which take no arguments, benchmark tests take a `&mut Bencher`. This
+`Bencher` provides an `iter` method, which takes a closure. This closure
+contains the code we'd like to benchmark.
+
+We can run benchmark tests with `cargo bench`:
+
+```bash
+$ cargo bench
+ Compiling adder v0.0.1 (file:///home/steve/tmp/adder)
+ Running target/release/adder-91b3e234d4ed382a
+
+running 2 tests
+test tests::it_works ... ignored
+test tests::bench_add_two ... bench: 1 ns/iter (+/- 0)
+
+test result: ok. 0 passed; 0 failed; 1 ignored; 1 measured
+```
+
+Our non-benchmark test was ignored. You may have noticed that `cargo bench`
+takes a bit longer than `cargo test`. This is because Rust runs our benchmark
+a number of times, and then takes the average. Because we're doing so little
+work in this example, we have a `1 ns/iter (+/- 0)`, but this would show
+the variance if there was one.
+
+Advice on writing benchmarks:
+
+
+* Move setup code outside the `iter` loop; only put the part you want to measure inside
+* Make the code do "the same thing" on each iteration; do not accumulate or change state
+* Make the outer function idempotent too; the benchmark runner is likely to run
+ it many times
+* Make the inner `iter` loop short and fast so benchmark runs are fast and the
+ calibrator can adjust the run-length at fine resolution
+* Make the code in the `iter` loop do something simple, to assist in pinpointing
+ performance improvements (or regressions)
+
+## Gotcha: optimizations
+
+There's another tricky part to writing benchmarks: benchmarks compiled with
+optimizations activated can be dramatically changed by the optimizer so that
+the benchmark is no longer benchmarking what one expects. For example, the
+compiler might recognize that some calculation has no external effects and
+remove it entirely.
+
+```rust,no_run
+#![feature(test)]
+
+extern crate test;
+use test::Bencher;
+
+#[bench]
+fn bench_xor_1000_ints(b: &mut Bencher) {
+ b.iter(|| {
+ (0..1000).fold(0, |old, new| old ^ new);
+ });
+}
+```
+
+gives the following results
+
+```text
+running 1 test
+test bench_xor_1000_ints ... bench: 0 ns/iter (+/- 0)
+
+test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
+```
+
+The benchmarking runner offers two ways to avoid this. Either, the closure that
+the `iter` method receives can return an arbitrary value which forces the
+optimizer to consider the result used and ensures it cannot remove the
+computation entirely. This could be done for the example above by adjusting the
+`b.iter` call to
+
+```rust
+# struct X;
+# impl X { fn iter<T, F>(&self, _: F) where F: FnMut() -> T {} } let b = X;
+b.iter(|| {
+ // Note lack of `;` (could also use an explicit `return`).
+ (0..1000).fold(0, |old, new| old ^ new)
+});
+```
+
+Or, the other option is to call the generic `test::black_box` function, which
+is an opaque "black box" to the optimizer and so forces it to consider any
+argument as used.
+
+```rust
+#![feature(test)]
+
+extern crate test;
+
+# fn main() {
+# struct X;
+# impl X { fn iter<T, F>(&self, _: F) where F: FnMut() -> T {} } let b = X;
+b.iter(|| {
+ let n = test::black_box(1000);
+
+ (0..n).fold(0, |a, b| a ^ b)
+})
+# }
+```
+
+Neither of these read or modify the value, and are very cheap for small values.
+Larger values can be passed indirectly to reduce overhead (e.g.
+`black_box(&huge_struct)`).
+
+Performing either of the above changes gives the following benchmarking results
+
+```text
+running 1 test
+test bench_xor_1000_ints ... bench: 131 ns/iter (+/- 3)
+
+test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
+```
+
+However, the optimizer can still modify a testcase in an undesirable manner
+even when using either of the above.
+"##,
+ },
+ Lint {
+ label: "thread_local_internals",
+ description: r##"# `thread_local_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "trace_macros",
+ description: r##"# `trace_macros`
+
+The tracking issue for this feature is [#29598].
+
+[#29598]: https://github.com/rust-lang/rust/issues/29598
+
+------------------------
+
+With `trace_macros` you can trace the expansion of macros in your code.
+
+## Examples
+
+```rust
+#![feature(trace_macros)]
+
+fn main() {
+ trace_macros!(true);
+ println!("Hello, Rust!");
+ trace_macros!(false);
+}
+```
+
+The `cargo build` output:
+
+```txt
+note: trace_macro
+ --> src/main.rs:5:5
+ |
+5 | println!("Hello, Rust!");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: expanding `println! { "Hello, Rust!" }`
+ = note: to `print ! ( concat ! ( "Hello, Rust!" , "\n" ) )`
+ = note: expanding `print! { concat ! ( "Hello, Rust!" , "\n" ) }`
+ = note: to `$crate :: io :: _print ( format_args ! ( concat ! ( "Hello, Rust!" , "\n" ) )
+ )`
+
+ Finished dev [unoptimized + debuginfo] target(s) in 0.60 secs
+```
+"##,
+ },
+ Lint {
+ label: "trait_alias",
+ description: r##"# `trait_alias`
+
+The tracking issue for this feature is: [#41517]
+
+[#41517]: https://github.com/rust-lang/rust/issues/41517
+
+------------------------
+
+The `trait_alias` feature adds support for trait aliases. These allow aliases
+to be created for one or more traits (currently just a single regular trait plus
+any number of auto-traits), and used wherever traits would normally be used as
+either bounds or trait objects.
+
+```rust
+#![feature(trait_alias)]
+
+trait Foo = std::fmt::Debug + Send;
+trait Bar = Foo + Sync;
+
+// Use trait alias as bound on type parameter.
+fn foo<T: Foo>(v: &T) {
+ println!("{:?}", v);
+}
+
+pub fn main() {
+ foo(&1);
+
+ // Use trait alias for trait objects.
+ let a: &Bar = &123;
+ println!("{:?}", a);
+ let b = Box::new(456) as Box<dyn Foo>;
+ println!("{:?}", b);
+}
+```
+"##,
+ },
+ Lint {
+ label: "trait_upcasting",
+ description: r##"# `trait_upcasting`
+
+The tracking issue for this feature is: [#65991]
+
+[#65991]: https://github.com/rust-lang/rust/issues/65991
+
+------------------------
+
+The `trait_upcasting` feature adds support for trait upcasting coercion. This allows a
+trait object of type `dyn Bar` to be cast to a trait object of type `dyn Foo`
+so long as `Bar: Foo`.
+
+```rust,edition2018
+#![feature(trait_upcasting)]
+#![allow(incomplete_features)]
+
+trait Foo {}
+
+trait Bar: Foo {}
+
+impl Foo for i32 {}
+
+impl<T: Foo + ?Sized> Bar for T {}
+
+let bar: &dyn Bar = &123;
+let foo: &dyn Foo = bar;
+```
+"##,
+ },
+ Lint {
+ label: "transparent_unions",
+ description: r##"# `transparent_unions`
+
+The tracking issue for this feature is [#60405]
+
+[#60405]: https://github.com/rust-lang/rust/issues/60405
+
+----
+
+The `transparent_unions` feature allows you mark `union`s as
+`#[repr(transparent)]`. A `union` may be `#[repr(transparent)]` in exactly the
+same conditions in which a `struct` may be `#[repr(transparent)]` (generally,
+this means the `union` must have exactly one non-zero-sized field). Some
+concrete illustrations follow.
+
+```rust
+#![feature(transparent_unions)]
+
+// This union has the same representation as `f32`.
+#[repr(transparent)]
+union SingleFieldUnion {
+ field: f32,
+}
+
+// This union has the same representation as `usize`.
+#[repr(transparent)]
+union MultiFieldUnion {
+ field: usize,
+ nothing: (),
+}
+```
+
+For consistency with transparent `struct`s, `union`s must have exactly one
+non-zero-sized field. If all fields are zero-sized, the `union` must not be
+`#[repr(transparent)]`:
+
+```rust
+#![feature(transparent_unions)]
+
+// This (non-transparent) union is already valid in stable Rust:
+pub union GoodUnion {
+ pub nothing: (),
+}
+
+// Error: transparent union needs exactly one non-zero-sized field, but has 0
+// #[repr(transparent)]
+// pub union BadUnion {
+// pub nothing: (),
+// }
+```
+
+The one exception is if the `union` is generic over `T` and has a field of type
+`T`, it may be `#[repr(transparent)]` even if `T` is a zero-sized type:
+
+```rust
+#![feature(transparent_unions)]
+
+// This union has the same representation as `T`.
+#[repr(transparent)]
+pub union GenericUnion<T: Copy> { // Unions with non-`Copy` fields are unstable.
+ pub field: T,
+ pub nothing: (),
+}
+
+// This is okay even though `()` is a zero-sized type.
+pub const THIS_IS_OKAY: GenericUnion<()> = GenericUnion { field: () };
+```
+
+Like transarent `struct`s, a transparent `union` of type `U` has the same
+layout, size, and ABI as its single non-ZST field. If it is generic over a type
+`T`, and all its fields are ZSTs except for exactly one field of type `T`, then
+it has the same layout and ABI as `T` (even if `T` is a ZST when monomorphized).
+
+Like transparent `struct`s, transparent `union`s are FFI-safe if and only if
+their underlying representation type is also FFI-safe.
+
+A `union` may not be eligible for the same nonnull-style optimizations that a
+`struct` or `enum` (with the same fields) are eligible for. Adding
+`#[repr(transparent)]` to `union` does not change this. To give a more concrete
+example, it is unspecified whether `size_of::<T>()` is equal to
+`size_of::<Option<T>>()`, where `T` is a `union` (regardless of whether or not
+it is transparent). The Rust compiler is free to perform this optimization if
+possible, but is not required to, and different compiler versions may differ in
+their application of these optimizations.
+"##,
+ },
+ Lint {
+ label: "try_blocks",
+ description: r##"# `try_blocks`
+
+The tracking issue for this feature is: [#31436]
+
+[#31436]: https://github.com/rust-lang/rust/issues/31436
+
+------------------------
+
+The `try_blocks` feature adds support for `try` blocks. A `try`
+block creates a new scope one can use the `?` operator in.
+
+```rust,edition2018
+#![feature(try_blocks)]
+
+use std::num::ParseIntError;
+
+let result: Result<i32, ParseIntError> = try {
+ "1".parse::<i32>()?
+ + "2".parse::<i32>()?
+ + "3".parse::<i32>()?
+};
+assert_eq!(result, Ok(6));
+
+let result: Result<i32, ParseIntError> = try {
+ "1".parse::<i32>()?
+ + "foo".parse::<i32>()?
+ + "3".parse::<i32>()?
+};
+assert!(result.is_err());
+```
+"##,
+ },
+ Lint {
+ label: "type_changing_struct_update",
+ description: r##"# `type_changing_struct_update`
+
+The tracking issue for this feature is: [#86555]
+
+[#86555]: https://github.com/rust-lang/rust/issues/86555
+
+------------------------
+
+This implements [RFC2528]. When turned on, you can create instances of the same struct
+that have different generic type or lifetime parameters.
+
+[RFC2528]: https://github.com/rust-lang/rfcs/blob/master/text/2528-type-changing-struct-update-syntax.md
+
+```rust
+#![allow(unused_variables, dead_code)]
+#![feature(type_changing_struct_update)]
+
+fn main () {
+ struct Foo<T, U> {
+ field1: T,
+ field2: U,
+ }
+
+ let base: Foo<String, i32> = Foo {
+ field1: String::from("hello"),
+ field2: 1234,
+ };
+ let updated: Foo<f64, i32> = Foo {
+ field1: 3.14,
+ ..base
+ };
+}
+```
+"##,
+ },
+ Lint {
+ label: "unboxed_closures",
+ description: r##"# `unboxed_closures`
+
+The tracking issue for this feature is [#29625]
+
+See Also: [`fn_traits`](../library-features/fn-traits.md)
+
+[#29625]: https://github.com/rust-lang/rust/issues/29625
+
+----
+
+The `unboxed_closures` feature allows you to write functions using the `"rust-call"` ABI,
+required for implementing the [`Fn*`] family of traits. `"rust-call"` functions must have
+exactly one (non self) argument, a tuple representing the argument list.
+
+[`Fn*`]: https://doc.rust-lang.org/std/ops/trait.Fn.html
+
+```rust
+#![feature(unboxed_closures)]
+
+extern "rust-call" fn add_args(args: (u32, u32)) -> u32 {
+ args.0 + args.1
+}
+
+fn main() {}
+```
+"##,
+ },
+ Lint {
+ label: "unsized_locals",
+ description: r##"# `unsized_locals`
+
+The tracking issue for this feature is: [#48055]
+
+[#48055]: https://github.com/rust-lang/rust/issues/48055
+
+------------------------
+
+This implements [RFC1909]. When turned on, you can have unsized arguments and locals:
+
+[RFC1909]: https://github.com/rust-lang/rfcs/blob/master/text/1909-unsized-rvalues.md
+
+```rust
+#![allow(incomplete_features)]
+#![feature(unsized_locals, unsized_fn_params)]
+
+use std::any::Any;
+
+fn main() {
+ let x: Box<dyn Any> = Box::new(42);
+ let x: dyn Any = *x;
+ // ^ unsized local variable
+ // ^^ unsized temporary
+ foo(x);
+}
+
+fn foo(_: dyn Any) {}
+// ^^^^^^ unsized argument
+```
+
+The RFC still forbids the following unsized expressions:
+
+```rust,compile_fail
+#![feature(unsized_locals)]
+
+use std::any::Any;
+
+struct MyStruct<T: ?Sized> {
+ content: T,
+}
+
+struct MyTupleStruct<T: ?Sized>(T);
+
+fn answer() -> Box<dyn Any> {
+ Box::new(42)
+}
+
+fn main() {
+ // You CANNOT have unsized statics.
+ static X: dyn Any = *answer(); // ERROR
+ const Y: dyn Any = *answer(); // ERROR
+
+ // You CANNOT have struct initialized unsized.
+ MyStruct { content: *answer() }; // ERROR
+ MyTupleStruct(*answer()); // ERROR
+ (42, *answer()); // ERROR
+
+ // You CANNOT have unsized return types.
+ fn my_function() -> dyn Any { *answer() } // ERROR
+
+ // You CAN have unsized local variables...
+ let mut x: dyn Any = *answer(); // OK
+ // ...but you CANNOT reassign to them.
+ x = *answer(); // ERROR
+
+ // You CANNOT even initialize them separately.
+ let y: dyn Any; // OK
+ y = *answer(); // ERROR
+
+ // Not mentioned in the RFC, but by-move captured variables are also Sized.
+ let x: dyn Any = *answer();
+ (move || { // ERROR
+ let y = x;
+ })();
+
+ // You CAN create a closure with unsized arguments,
+ // but you CANNOT call it.
+ // This is an implementation detail and may be changed in the future.
+ let f = |x: dyn Any| {};
+ f(*answer()); // ERROR
+}
+```
+
+## By-value trait objects
+
+With this feature, you can have by-value `self` arguments without `Self: Sized` bounds.
+
+```rust
+#![feature(unsized_fn_params)]
+
+trait Foo {
+ fn foo(self) {}
+}
+
+impl<T: ?Sized> Foo for T {}
+
+fn main() {
+ let slice: Box<[i32]> = Box::new([1, 2, 3]);
+ <[i32] as Foo>::foo(*slice);
+}
+```
+
+And `Foo` will also be object-safe.
+
+```rust
+#![feature(unsized_fn_params)]
+
+trait Foo {
+ fn foo(self) {}
+}
+
+impl<T: ?Sized> Foo for T {}
+
+fn main () {
+ let slice: Box<dyn Foo> = Box::new([1, 2, 3]);
+ // doesn't compile yet
+ <dyn Foo as Foo>::foo(*slice);
+}
+```
+
+One of the objectives of this feature is to allow `Box<dyn FnOnce>`.
+
+## Variable length arrays
+
+The RFC also describes an extension to the array literal syntax: `[e; dyn n]`. In the syntax, `n` isn't necessarily a constant expression. The array is dynamically allocated on the stack and has the type of `[T]`, instead of `[T; n]`.
+
+```rust,ignore (not-yet-implemented)
+#![feature(unsized_locals)]
+
+fn mergesort<T: Ord>(a: &mut [T]) {
+ let mut tmp = [T; dyn a.len()];
+ // ...
+}
+
+fn main() {
+ let mut a = [3, 1, 5, 6];
+ mergesort(&mut a);
+ assert_eq!(a, [1, 3, 5, 6]);
+}
+```
+
+VLAs are not implemented yet. The syntax isn't final, either. We may need an alternative syntax for Rust 2015 because, in Rust 2015, expressions like `[e; dyn(1)]` would be ambiguous. One possible alternative proposed in the RFC is `[e; n]`: if `n` captures one or more local variables, then it is considered as `[e; dyn n]`.
+
+## Advisory on stack usage
+
+It's advised not to casually use the `#![feature(unsized_locals)]` feature. Typical use-cases are:
+
+- When you need a by-value trait objects.
+- When you really need a fast allocation of small temporary arrays.
+
+Another pitfall is repetitive allocation and temporaries. Currently the compiler simply extends the stack frame every time it encounters an unsized assignment. So for example, the code
+
+```rust
+#![feature(unsized_locals)]
+
+fn main() {
+ let x: Box<[i32]> = Box::new([1, 2, 3, 4, 5]);
+ let _x = {{{{{{{{{{*x}}}}}}}}}};
+}
+```
+
+and the code
+
+```rust
+#![feature(unsized_locals)]
+
+fn main() {
+ for _ in 0..10 {
+ let x: Box<[i32]> = Box::new([1, 2, 3, 4, 5]);
+ let _x = *x;
+ }
+}
+```
+
+will unnecessarily extend the stack frame.
+"##,
+ },
+ Lint {
+ label: "unsized_tuple_coercion",
+ description: r##"# `unsized_tuple_coercion`
+
+The tracking issue for this feature is: [#42877]
+
+[#42877]: https://github.com/rust-lang/rust/issues/42877
+
+------------------------
+
+This is a part of [RFC0401]. According to the RFC, there should be an implementation like this:
+
+```rust,ignore (partial-example)
+impl<..., T, U: ?Sized> Unsized<(..., U)> for (..., T) where T: Unsized<U> {}
+```
+
+This implementation is currently gated behind `#[feature(unsized_tuple_coercion)]` to avoid insta-stability. Therefore you can use it like this:
+
+```rust
+#![feature(unsized_tuple_coercion)]
+
+fn main() {
+ let x : ([i32; 3], [i32; 3]) = ([1, 2, 3], [4, 5, 6]);
+ let y : &([i32; 3], [i32]) = &x;
+ assert_eq!(y.1[0], 4);
+}
+```
+
+[RFC0401]: https://github.com/rust-lang/rfcs/blob/master/text/0401-coercions.md
+"##,
+ },
+ Lint {
+ label: "update_panic_count",
+ description: r##"# `update_panic_count`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_c",
+ description: r##"# `windows_c`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_handle",
+ description: r##"# `windows_handle`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_net",
+ description: r##"# `windows_net`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_stdio",
+ description: r##"# `windows_stdio`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+];
+
+pub const CLIPPY_LINTS: &[Lint] = &[
+ Lint {
+ label: "clippy::absurd_extreme_comparisons",
+ description: r##"Checks for comparisons where one side of the relation is
+either the minimum or maximum value for its type and warns if it involves a
+case that is always true or always false. Only integer and boolean types are
+checked."##,
+ },
+ Lint {
+ label: "clippy::almost_swapped",
+ description: r##"Checks for `foo = bar; bar = foo` sequences."##,
+ },
+ Lint {
+ label: "clippy::approx_constant",
+ description: r##"Checks for floating point literals that approximate
+constants which are defined in
+[`std::f32::consts`](https://doc.rust-lang.org/stable/std/f32/consts/#constants)
+or
+[`std::f64::consts`](https://doc.rust-lang.org/stable/std/f64/consts/#constants),
+respectively, suggesting to use the predefined constant."##,
+ },
+ Lint {
+ label: "clippy::as_conversions",
+ description: r##"Checks for usage of `as` conversions.
+
+Note that this lint is specialized in linting *every single* use of `as`
+regardless of whether good alternatives exist or not.
+If you want more precise lints for `as`, please consider using these separate lints:
+`unnecessary_cast`, `cast_lossless/possible_truncation/possible_wrap/precision_loss/sign_loss`,
+`fn_to_numeric_cast(_with_truncation)`, `char_lit_as_u8`, `ref_to_mut` and `ptr_as_ptr`.
+There is a good explanation the reason why this lint should work in this way and how it is useful
+[in this issue](https://github.com/rust-lang/rust-clippy/issues/5122)."##,
+ },
+ Lint {
+ label: "clippy::assertions_on_constants",
+ description: r##"Checks for `assert!(true)` and `assert!(false)` calls."##,
+ },
+ Lint {
+ label: "clippy::assign_op_pattern",
+ description: r##"Checks for `a = a op b` or `a = b commutative_op a`
+patterns."##,
+ },
+ Lint {
+ label: "clippy::assign_ops",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::async_yields_async",
+ description: r##"Checks for async blocks that yield values of types
+that can themselves be awaited."##,
+ },
+ Lint {
+ label: "clippy::await_holding_lock",
+ description: r##"Checks for calls to await while holding a
+non-async-aware MutexGuard."##,
+ },
+ Lint {
+ label: "clippy::await_holding_refcell_ref",
+ description: r##"Checks for calls to await while holding a
+`RefCell` `Ref` or `RefMut`."##,
+ },
+ Lint {
+ label: "clippy::bad_bit_mask",
+ description: r##"Checks for incompatible bit masks in comparisons.
+
+The formula for detecting if an expression of the type `_ <bit_op> m
+<cmp_op> c` (where `<bit_op>` is one of {`&`, `|`} and `<cmp_op>` is one of
+{`!=`, `>=`, `>`, `!=`, `>=`, `>`}) can be determined from the following
+table:
+
+|Comparison |Bit Op|Example |is always|Formula |
+|------------|------|------------|---------|----------------------|
+|`==` or `!=`| `&` |`x & 2 == 3`|`false` |`c & m != c` |
+|`<` or `>=`| `&` |`x & 2 < 3` |`true` |`m < c` |
+|`>` or `<=`| `&` |`x & 1 > 1` |`false` |`m <= c` |
+|`==` or `!=`| `|` |`x | 1 == 0`|`false` |`c | m != c` |
+|`<` or `>=`| `|` |`x | 1 < 1` |`false` |`m >= c` |
+|`<=` or `>` | `|` |`x | 1 > 0` |`true` |`m > c` |"##,
+ },
+ Lint {
+ label: "clippy::bind_instead_of_map",
+ description: r##"Checks for usage of `_.and_then(|x| Some(y))`, `_.and_then(|x| Ok(y))` or
+`_.or_else(|x| Err(y))`."##,
+ },
+ Lint {
+ label: "clippy::blacklisted_name",
+ description: r##"Checks for usage of blacklisted names for variables, such
+as `foo`."##,
+ },
+ Lint {
+ label: "clippy::blanket_clippy_restriction_lints",
+ description: r##"Checks for `warn`/`deny`/`forbid` attributes targeting the whole clippy::restriction category."##,
+ },
+ Lint {
+ label: "clippy::blocks_in_if_conditions",
+ description: r##"Checks for `if` conditions that use blocks containing an
+expression, statements or conditions that use closures with blocks."##,
+ },
+ Lint {
+ label: "clippy::bool_assert_comparison",
+ description: r##"This lint warns about boolean comparisons in assert-like macros."##,
+ },
+ Lint {
+ label: "clippy::bool_comparison",
+ description: r##"Checks for expressions of the form `x == true`,
+`x != true` and order comparisons such as `x < true` (or vice versa) and
+suggest using the variable directly."##,
+ },
+ Lint {
+ label: "clippy::borrow_interior_mutable_const",
+ description: r##"Checks if `const` items which is interior mutable (e.g.,
+contains a `Cell`, `Mutex`, `AtomicXxxx`, etc.) has been borrowed directly."##,
+ },
+ Lint {
+ label: "clippy::borrowed_box",
+ description: r##"Checks for use of `&Box<T>` anywhere in the code.
+Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information."##,
+ },
+ Lint {
+ label: "clippy::box_collection",
+ description: r##"Checks for use of `Box<T>` where T is a collection such as Vec anywhere in the code.
+Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information."##,
+ },
+ Lint {
+ label: "clippy::boxed_local",
+ description: r##"Checks for usage of `Box<T>` where an unboxed `T` would
+work fine."##,
+ },
+ Lint {
+ label: "clippy::branches_sharing_code",
+ description: r##"Checks if the `if` and `else` block contain shared code that can be
+moved out of the blocks."##,
+ },
+ Lint {
+ label: "clippy::builtin_type_shadow",
+ description: r##"Warns if a generic shadows a built-in type."##,
+ },
+ Lint {
+ label: "clippy::bytes_nth",
+ description: r##"Checks for the use of `.bytes().nth()`."##,
+ },
+ Lint {
+ label: "clippy::cargo_common_metadata",
+ description: r##"Checks to see if all common metadata is defined in
+`Cargo.toml`. See: https://rust-lang-nursery.github.io/api-guidelines/documentation.html#cargotoml-includes-all-common-metadata-c-metadata"##,
+ },
+ Lint {
+ label: "clippy::case_sensitive_file_extension_comparisons",
+ description: r##"Checks for calls to `ends_with` with possible file extensions
+and suggests to use a case-insensitive approach instead."##,
+ },
+ Lint {
+ label: "clippy::cast_lossless",
+ description: r##"Checks for casts between numerical types that may
+be replaced by safe conversion functions."##,
+ },
+ Lint {
+ label: "clippy::cast_possible_truncation",
+ description: r##"Checks for casts between numerical types that may
+truncate large values. This is expected behavior, so the cast is `Allow` by
+default."##,
+ },
+ Lint {
+ label: "clippy::cast_possible_wrap",
+ description: r##"Checks for casts from an unsigned type to a signed type of
+the same size. Performing such a cast is a 'no-op' for the compiler,
+i.e., nothing is changed at the bit level, and the binary representation of
+the value is reinterpreted. This can cause wrapping if the value is too big
+for the target signed type. However, the cast works as defined, so this lint
+is `Allow` by default."##,
+ },
+ Lint {
+ label: "clippy::cast_precision_loss",
+ description: r##"Checks for casts from any numerical to a float type where
+the receiving type cannot store all values from the original type without
+rounding errors. This possible rounding is to be expected, so this lint is
+`Allow` by default.
+
+Basically, this warns on casting any integer with 32 or more bits to `f32`
+or any 64-bit integer to `f64`."##,
+ },
+ Lint {
+ label: "clippy::cast_ptr_alignment",
+ description: r##"Checks for casts, using `as` or `pointer::cast`,
+from a less-strictly-aligned pointer to a more-strictly-aligned pointer"##,
+ },
+ Lint {
+ label: "clippy::cast_ref_to_mut",
+ description: r##"Checks for casts of `&T` to `&mut T` anywhere in the code."##,
+ },
+ Lint {
+ label: "clippy::cast_sign_loss",
+ description: r##"Checks for casts from a signed to an unsigned numerical
+type. In this case, negative values wrap around to large positive values,
+which can be quite surprising in practice. However, as the cast works as
+defined, this lint is `Allow` by default."##,
+ },
+ Lint {
+ label: "clippy::char_lit_as_u8",
+ description: r##"Checks for expressions where a character literal is cast
+to `u8` and suggests using a byte literal instead."##,
+ },
+ Lint {
+ label: "clippy::chars_last_cmp",
+ description: r##"Checks for usage of `_.chars().last()` or
+`_.chars().next_back()` on a `str` to check if it ends with a given char."##,
+ },
+ Lint {
+ label: "clippy::chars_next_cmp",
+ description: r##"Checks for usage of `.chars().next()` on a `str` to check
+if it starts with a given char."##,
+ },
+ Lint {
+ label: "clippy::checked_conversions",
+ description: r##"Checks for explicit bounds checking when casting."##,
+ },
+ Lint {
+ label: "clippy::clone_double_ref",
+ description: r##"Checks for usage of `.clone()` on an `&&T`."##,
+ },
+ Lint {
+ label: "clippy::clone_on_copy",
+ description: r##"Checks for usage of `.clone()` on a `Copy` type."##,
+ },
+ Lint {
+ label: "clippy::clone_on_ref_ptr",
+ description: r##"Checks for usage of `.clone()` on a ref-counted pointer,
+(`Rc`, `Arc`, `rc::Weak`, or `sync::Weak`), and suggests calling Clone via unified
+function syntax instead (e.g., `Rc::clone(foo)`)."##,
+ },
+ Lint {
+ label: "clippy::cloned_instead_of_copied",
+ description: r##"Checks for usages of `cloned()` on an `Iterator` or `Option` where
+`copied()` could be used instead."##,
+ },
+ Lint { label: "clippy::cmp_nan", description: r##"Checks for comparisons to NaN."## },
+ Lint {
+ label: "clippy::cmp_null",
+ description: r##"This lint checks for equality comparisons with `ptr::null`"##,
+ },
+ Lint {
+ label: "clippy::cmp_owned",
+ description: r##"Checks for conversions to owned values just for the sake
+of a comparison."##,
+ },
+ Lint {
+ label: "clippy::cognitive_complexity",
+ description: r##"Checks for methods with high cognitive complexity."##,
+ },
+ Lint {
+ label: "clippy::collapsible_else_if",
+ description: r##"Checks for collapsible `else { if ... }` expressions
+that can be collapsed to `else if ...`."##,
+ },
+ Lint {
+ label: "clippy::collapsible_if",
+ description: r##"Checks for nested `if` statements which can be collapsed
+by `&&`-combining their conditions."##,
+ },
+ Lint {
+ label: "clippy::collapsible_match",
+ description: r##"Finds nested `match` or `if let` expressions where the patterns may be collapsed together
+without adding any branches.
+
+Note that this lint is not intended to find _all_ cases where nested match patterns can be merged, but only
+cases where merging would most likely make the code more readable."##,
+ },
+ Lint {
+ label: "clippy::comparison_chain",
+ description: r##"Checks comparison chains written with `if` that can be
+rewritten with `match` and `cmp`."##,
+ },
+ Lint {
+ label: "clippy::comparison_to_empty",
+ description: r##"Checks for comparing to an empty slice such as `` or `[]`,
+and suggests using `.is_empty()` where applicable."##,
+ },
+ Lint {
+ label: "clippy::copy_iterator",
+ description: r##"Checks for types that implement `Copy` as well as
+`Iterator`."##,
+ },
+ Lint {
+ label: "clippy::create_dir",
+ description: r##"Checks usage of `std::fs::create_dir` and suggest using `std::fs::create_dir_all` instead."##,
+ },
+ Lint {
+ label: "clippy::crosspointer_transmute",
+ description: r##"Checks for transmutes between a type `T` and `*T`."##,
+ },
+ Lint { label: "clippy::dbg_macro", description: r##"Checks for usage of dbg!() macro."## },
+ Lint {
+ label: "clippy::debug_assert_with_mut_call",
+ description: r##"Checks for function/method calls with a mutable
+parameter in `debug_assert!`, `debug_assert_eq!` and `debug_assert_ne!` macros."##,
+ },
+ Lint {
+ label: "clippy::decimal_literal_representation",
+ description: r##"Warns if there is a better representation for a numeric literal."##,
+ },
+ Lint {
+ label: "clippy::declare_interior_mutable_const",
+ description: r##"Checks for declaration of `const` items which is interior
+mutable (e.g., contains a `Cell`, `Mutex`, `AtomicXxxx`, etc.)."##,
+ },
+ Lint {
+ label: "clippy::default_numeric_fallback",
+ description: r##"Checks for usage of unconstrained numeric literals which may cause default numeric fallback in type
+inference.
+
+Default numeric fallback means that if numeric types have not yet been bound to concrete
+types at the end of type inference, then integer type is bound to `i32`, and similarly
+floating type is bound to `f64`.
+
+See [RFC0212](https://github.com/rust-lang/rfcs/blob/master/text/0212-restore-int-fallback.md) for more information about the fallback."##,
+ },
+ Lint {
+ label: "clippy::default_trait_access",
+ description: r##"Checks for literal calls to `Default::default()`."##,
+ },
+ Lint {
+ label: "clippy::deprecated_cfg_attr",
+ description: r##"Checks for `#[cfg_attr(rustfmt, rustfmt_skip)]` and suggests to replace it
+with `#[rustfmt::skip]`."##,
+ },
+ Lint {
+ label: "clippy::deprecated_semver",
+ description: r##"Checks for `#[deprecated]` annotations with a `since`
+field that is not a valid semantic version."##,
+ },
+ Lint {
+ label: "clippy::deref_addrof",
+ description: r##"Checks for usage of `*&` and `*&mut` in expressions."##,
+ },
+ Lint {
+ label: "clippy::derivable_impls",
+ description: r##"Detects manual `std::default::Default` implementations that are identical to a derived implementation."##,
+ },
+ Lint {
+ label: "clippy::derive_hash_xor_eq",
+ description: r##"Checks for deriving `Hash` but implementing `PartialEq`
+explicitly or vice versa."##,
+ },
+ Lint {
+ label: "clippy::derive_ord_xor_partial_ord",
+ description: r##"Checks for deriving `Ord` but implementing `PartialOrd`
+explicitly or vice versa."##,
+ },
+ Lint {
+ label: "clippy::disallowed_methods",
+ description: r##"Denies the configured methods and functions in clippy.toml"##,
+ },
+ Lint {
+ label: "clippy::disallowed_script_idents",
+ description: r##"Checks for usage of unicode scripts other than those explicitly allowed
+by the lint config.
+
+This lint doesn't take into account non-text scripts such as `Unknown` and `Linear_A`.
+It also ignores the `Common` script type.
+While configuring, be sure to use official script name [aliases] from
+[the list of supported scripts][supported_scripts].
+
+See also: [`non_ascii_idents`].
+
+[aliases]: http://www.unicode.org/reports/tr24/tr24-31.html#Script_Value_Aliases
+[supported_scripts]: https://www.unicode.org/iso15924/iso15924-codes.html"##,
+ },
+ Lint {
+ label: "clippy::disallowed_types",
+ description: r##"Denies the configured types in clippy.toml."##,
+ },
+ Lint {
+ label: "clippy::diverging_sub_expression",
+ description: r##"Checks for diverging calls that are not match arms or
+statements."##,
+ },
+ Lint {
+ label: "clippy::doc_markdown",
+ description: r##"Checks for the presence of `_`, `::` or camel-case words
+outside ticks in documentation."##,
+ },
+ Lint {
+ label: "clippy::double_comparisons",
+ description: r##"Checks for double comparisons that could be simplified to a single expression."##,
+ },
+ Lint {
+ label: "clippy::double_must_use",
+ description: r##"Checks for a `#[must_use]` attribute without
+further information on functions and methods that return a type already
+marked as `#[must_use]`."##,
+ },
+ Lint {
+ label: "clippy::double_neg",
+ description: r##"Detects expressions of the form `--x`."##,
+ },
+ Lint {
+ label: "clippy::double_parens",
+ description: r##"Checks for unnecessary double parentheses."##,
+ },
+ Lint {
+ label: "clippy::drop_copy",
+ description: r##"Checks for calls to `std::mem::drop` with a value
+that derives the Copy trait"##,
+ },
+ Lint {
+ label: "clippy::drop_ref",
+ description: r##"Checks for calls to `std::mem::drop` with a reference
+instead of an owned value."##,
+ },
+ Lint {
+ label: "clippy::duplicate_underscore_argument",
+ description: r##"Checks for function arguments having the similar names
+differing by an underscore."##,
+ },
+ Lint {
+ label: "clippy::duration_subsec",
+ description: r##"Checks for calculation of subsecond microseconds or milliseconds
+from other `Duration` methods."##,
+ },
+ Lint {
+ label: "clippy::else_if_without_else",
+ description: r##"Checks for usage of if expressions with an `else if` branch,
+but without a final `else` branch."##,
+ },
+ Lint {
+ label: "clippy::empty_enum",
+ description: r##"Checks for `enum`s with no variants.
+
+As of this writing, the `never_type` is still a
+nightly-only experimental API. Therefore, this lint is only triggered
+if the `never_type` is enabled."##,
+ },
+ Lint {
+ label: "clippy::empty_line_after_outer_attr",
+ description: r##"Checks for empty lines after outer attributes"##,
+ },
+ Lint { label: "clippy::empty_loop", description: r##"Checks for empty `loop` expressions."## },
+ Lint {
+ label: "clippy::enum_clike_unportable_variant",
+ description: r##"Checks for C-like enumerations that are
+`repr(isize/usize)` and have values that don't fit into an `i32`."##,
+ },
+ Lint { label: "clippy::enum_glob_use", description: r##"Checks for `use Enum::*`."## },
+ Lint {
+ label: "clippy::enum_variant_names",
+ description: r##"Detects enumeration variants that are prefixed or suffixed
+by the same characters."##,
+ },
+ Lint {
+ label: "clippy::eq_op",
+ description: r##"Checks for equal operands to comparison, logical and
+bitwise, difference and division binary operators (`==`, `>`, etc., `&&`,
+`||`, `&`, `|`, `^`, `-` and `/`)."##,
+ },
+ Lint {
+ label: "clippy::equatable_if_let",
+ description: r##"Checks for pattern matchings that can be expressed using equality."##,
+ },
+ Lint {
+ label: "clippy::erasing_op",
+ description: r##"Checks for erasing operations, e.g., `x * 0`."##,
+ },
+ Lint {
+ label: "clippy::eval_order_dependence",
+ description: r##"Checks for a read and a write to the same variable where
+whether the read occurs before or after the write depends on the evaluation
+order of sub-expressions."##,
+ },
+ Lint {
+ label: "clippy::excessive_precision",
+ description: r##"Checks for float literals with a precision greater
+than that supported by the underlying type."##,
+ },
+ Lint {
+ label: "clippy::exhaustive_enums",
+ description: r##"Warns on any exported `enum`s that are not tagged `#[non_exhaustive]`"##,
+ },
+ Lint {
+ label: "clippy::exhaustive_structs",
+ description: r##"Warns on any exported `structs`s that are not tagged `#[non_exhaustive]`"##,
+ },
+ Lint {
+ label: "clippy::exit",
+ description: r##"`exit()` terminates the program and doesn't provide a
+stack trace."##,
+ },
+ Lint {
+ label: "clippy::expect_fun_call",
+ description: r##"Checks for calls to `.expect(&format!(...))`, `.expect(foo(..))`,
+etc., and suggests to use `unwrap_or_else` instead"##,
+ },
+ Lint {
+ label: "clippy::expect_used",
+ description: r##"Checks for `.expect()` calls on `Option`s and `Result`s."##,
+ },
+ Lint {
+ label: "clippy::expl_impl_clone_on_copy",
+ description: r##"Checks for explicit `Clone` implementations for `Copy`
+types."##,
+ },
+ Lint {
+ label: "clippy::explicit_counter_loop",
+ description: r##"Checks `for` loops over slices with an explicit counter
+and suggests the use of `.enumerate()`."##,
+ },
+ Lint {
+ label: "clippy::explicit_deref_methods",
+ description: r##"Checks for explicit `deref()` or `deref_mut()` method calls."##,
+ },
+ Lint {
+ label: "clippy::explicit_into_iter_loop",
+ description: r##"Checks for loops on `y.into_iter()` where `y` will do, and
+suggests the latter."##,
+ },
+ Lint {
+ label: "clippy::explicit_iter_loop",
+ description: r##"Checks for loops on `x.iter()` where `&x` will do, and
+suggests the latter."##,
+ },
+ Lint {
+ label: "clippy::explicit_write",
+ description: r##"Checks for usage of `write!()` / `writeln()!` which can be
+replaced with `(e)print!()` / `(e)println!()`"##,
+ },
+ Lint {
+ label: "clippy::extend_from_slice",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::extend_with_drain",
+ description: r##"Checks for occurrences where one vector gets extended instead of append"##,
+ },
+ Lint {
+ label: "clippy::extra_unused_lifetimes",
+ description: r##"Checks for lifetimes in generics that are never used
+anywhere else."##,
+ },
+ Lint {
+ label: "clippy::fallible_impl_from",
+ description: r##"Checks for impls of `From<..>` that contain `panic!()` or `unwrap()`"##,
+ },
+ Lint {
+ label: "clippy::field_reassign_with_default",
+ description: r##"Checks for immediate reassignment of fields initialized
+with Default::default()."##,
+ },
+ Lint {
+ label: "clippy::filetype_is_file",
+ description: r##"Checks for `FileType::is_file()`."##,
+ },
+ Lint {
+ label: "clippy::filter_map",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::filter_map_identity",
+ description: r##"Checks for usage of `filter_map(|x| x)`."##,
+ },
+ Lint {
+ label: "clippy::filter_map_next",
+ description: r##"Checks for usage of `_.filter_map(_).next()`."##,
+ },
+ Lint {
+ label: "clippy::filter_next",
+ description: r##"Checks for usage of `_.filter(_).next()`."##,
+ },
+ Lint { label: "clippy::find_map", description: r##"Nothing. This lint has been deprecated."## },
+ Lint {
+ label: "clippy::flat_map_identity",
+ description: r##"Checks for usage of `flat_map(|x| x)`."##,
+ },
+ Lint {
+ label: "clippy::flat_map_option",
+ description: r##"Checks for usages of `Iterator::flat_map()` where `filter_map()` could be
+used instead."##,
+ },
+ Lint { label: "clippy::float_arithmetic", description: r##"Checks for float arithmetic."## },
+ Lint {
+ label: "clippy::float_cmp",
+ description: r##"Checks for (in-)equality comparisons on floating-point
+values (apart from zero), except in functions called `*eq*` (which probably
+implement equality for a type involving floats)."##,
+ },
+ Lint {
+ label: "clippy::float_cmp_const",
+ description: r##"Checks for (in-)equality comparisons on floating-point
+value and constant, except in functions called `*eq*` (which probably
+implement equality for a type involving floats)."##,
+ },
+ Lint {
+ label: "clippy::float_equality_without_abs",
+ description: r##"Checks for statements of the form `(a - b) < f32::EPSILON` or
+`(a - b) < f64::EPSILON`. Notes the missing `.abs()`."##,
+ },
+ Lint {
+ label: "clippy::fn_address_comparisons",
+ description: r##"Checks for comparisons with an address of a function item."##,
+ },
+ Lint {
+ label: "clippy::fn_params_excessive_bools",
+ description: r##"Checks for excessive use of
+bools in function definitions."##,
+ },
+ Lint {
+ label: "clippy::fn_to_numeric_cast",
+ description: r##"Checks for casts of function pointers to something other than usize"##,
+ },
+ Lint {
+ label: "clippy::fn_to_numeric_cast_any",
+ description: r##"Checks for casts of a function pointer to any integer type."##,
+ },
+ Lint {
+ label: "clippy::fn_to_numeric_cast_with_truncation",
+ description: r##"Checks for casts of a function pointer to a numeric type not wide enough to
+store address."##,
+ },
+ Lint {
+ label: "clippy::for_kv_map",
+ description: r##"Checks for iterating a map (`HashMap` or `BTreeMap`) and
+ignoring either the keys or values."##,
+ },
+ Lint {
+ label: "clippy::for_loops_over_fallibles",
+ description: r##"Checks for `for` loops over `Option` or `Result` values."##,
+ },
+ Lint {
+ label: "clippy::forget_copy",
+ description: r##"Checks for calls to `std::mem::forget` with a value that
+derives the Copy trait"##,
+ },
+ Lint {
+ label: "clippy::forget_ref",
+ description: r##"Checks for calls to `std::mem::forget` with a reference
+instead of an owned value."##,
+ },
+ Lint {
+ label: "clippy::format_in_format_args",
+ description: r##"Detects `format!` within the arguments of another macro that does
+formatting such as `format!` itself, `write!` or `println!`. Suggests
+inlining the `format!` call."##,
+ },
+ Lint {
+ label: "clippy::from_iter_instead_of_collect",
+ description: r##"Checks for `from_iter()` function calls on types that implement the `FromIterator`
+trait."##,
+ },
+ Lint {
+ label: "clippy::from_over_into",
+ description: r##"Searches for implementations of the `Into<..>` trait and suggests to implement `From<..>` instead."##,
+ },
+ Lint {
+ label: "clippy::from_str_radix_10",
+ description: r##"Checks for function invocations of the form `primitive::from_str_radix(s, 10)`"##,
+ },
+ Lint {
+ label: "clippy::future_not_send",
+ description: r##"This lint requires Future implementations returned from
+functions and methods to implement the `Send` marker trait. It is mostly
+used by library authors (public and internal) that target an audience where
+multithreaded executors are likely to be used for running these Futures."##,
+ },
+ Lint {
+ label: "clippy::get_last_with_len",
+ description: r##"Checks for using `x.get(x.len() - 1)` instead of
+`x.last()`."##,
+ },
+ Lint {
+ label: "clippy::get_unwrap",
+ description: r##"Checks for use of `.get().unwrap()` (or
+`.get_mut().unwrap`) on a standard library type which implements `Index`"##,
+ },
+ Lint {
+ label: "clippy::identity_op",
+ description: r##"Checks for identity operations, e.g., `x + 0`."##,
+ },
+ Lint {
+ label: "clippy::if_let_mutex",
+ description: r##"Checks for `Mutex::lock` calls in `if let` expression
+with lock calls in any of the else blocks."##,
+ },
+ Lint {
+ label: "clippy::if_let_redundant_pattern_matching",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::if_not_else",
+ description: r##"Checks for usage of `!` or `!=` in an if condition with an
+else branch."##,
+ },
+ Lint {
+ label: "clippy::if_same_then_else",
+ description: r##"Checks for `if/else` with the same body as the *then* part
+and the *else* part."##,
+ },
+ Lint {
+ label: "clippy::if_then_some_else_none",
+ description: r##"Checks for if-else that could be written to `bool::then`."##,
+ },
+ Lint {
+ label: "clippy::ifs_same_cond",
+ description: r##"Checks for consecutive `if`s with the same condition."##,
+ },
+ Lint {
+ label: "clippy::implicit_clone",
+ description: r##"Checks for the usage of `_.to_owned()`, `vec.to_vec()`, or similar when calling `_.clone()` would be clearer."##,
+ },
+ Lint {
+ label: "clippy::implicit_hasher",
+ description: r##"Checks for public `impl` or `fn` missing generalization
+over different hashers and implicitly defaulting to the default hashing
+algorithm (`SipHash`)."##,
+ },
+ Lint {
+ label: "clippy::implicit_return",
+ description: r##"Checks for missing return statements at the end of a block."##,
+ },
+ Lint {
+ label: "clippy::implicit_saturating_sub",
+ description: r##"Checks for implicit saturating subtraction."##,
+ },
+ Lint {
+ label: "clippy::imprecise_flops",
+ description: r##"Looks for floating-point expressions that
+can be expressed using built-in methods to improve accuracy
+at the cost of performance."##,
+ },
+ Lint {
+ label: "clippy::inconsistent_digit_grouping",
+ description: r##"Warns if an integral or floating-point constant is
+grouped inconsistently with underscores."##,
+ },
+ Lint {
+ label: "clippy::inconsistent_struct_constructor",
+ description: r##"Checks for struct constructors where all fields are shorthand and
+the order of the field init shorthand in the constructor is inconsistent
+with the order in the struct definition."##,
+ },
+ Lint {
+ label: "clippy::index_refutable_slice",
+ description: r##"The lint checks for slice bindings in patterns that are only used to
+access individual slice values."##,
+ },
+ Lint {
+ label: "clippy::indexing_slicing",
+ description: r##"Checks for usage of indexing or slicing. Arrays are special cases, this lint
+does report on arrays if we can tell that slicing operations are in bounds and does not
+lint on constant `usize` indexing on arrays because that is handled by rustc's `const_err` lint."##,
+ },
+ Lint {
+ label: "clippy::ineffective_bit_mask",
+ description: r##"Checks for bit masks in comparisons which can be removed
+without changing the outcome. The basic structure can be seen in the
+following table:
+
+|Comparison| Bit Op |Example |equals |
+|----------|---------|-----------|-------|
+|`>` / `<=`|`|` / `^`|`x | 2 > 3`|`x > 3`|
+|`<` / `>=`|`|` / `^`|`x ^ 1 < 4`|`x < 4`|"##,
+ },
+ Lint {
+ label: "clippy::inefficient_to_string",
+ description: r##"Checks for usage of `.to_string()` on an `&&T` where
+`T` implements `ToString` directly (like `&&str` or `&&String`)."##,
+ },
+ Lint {
+ label: "clippy::infallible_destructuring_match",
+ description: r##"Checks for matches being used to destructure a single-variant enum
+or tuple struct where a `let` will suffice."##,
+ },
+ Lint {
+ label: "clippy::infinite_iter",
+ description: r##"Checks for iteration that is guaranteed to be infinite."##,
+ },
+ Lint {
+ label: "clippy::inherent_to_string",
+ description: r##"Checks for the definition of inherent methods with a signature of `to_string(&self) -> String`."##,
+ },
+ Lint {
+ label: "clippy::inherent_to_string_shadow_display",
+ description: r##"Checks for the definition of inherent methods with a signature of `to_string(&self) -> String` and if the type implementing this method also implements the `Display` trait."##,
+ },
+ Lint {
+ label: "clippy::inline_always",
+ description: r##"Checks for items annotated with `#[inline(always)]`,
+unless the annotated function is empty or simply panics."##,
+ },
+ Lint {
+ label: "clippy::inline_asm_x86_att_syntax",
+ description: r##"Checks for usage of AT&T x86 assembly syntax."##,
+ },
+ Lint {
+ label: "clippy::inline_asm_x86_intel_syntax",
+ description: r##"Checks for usage of Intel x86 assembly syntax."##,
+ },
+ Lint {
+ label: "clippy::inline_fn_without_body",
+ description: r##"Checks for `#[inline]` on trait methods without bodies"##,
+ },
+ Lint {
+ label: "clippy::inspect_for_each",
+ description: r##"Checks for usage of `inspect().for_each()`."##,
+ },
+ Lint {
+ label: "clippy::int_plus_one",
+ description: r##"Checks for usage of `x >= y + 1` or `x - 1 >= y` (and `<=`) in a block"##,
+ },
+ Lint {
+ label: "clippy::integer_arithmetic",
+ description: r##"Checks for integer arithmetic operations which could overflow or panic.
+
+Specifically, checks for any operators (`+`, `-`, `*`, `<<`, etc) which are capable
+of overflowing according to the [Rust
+Reference](https://doc.rust-lang.org/reference/expressions/operator-expr.html#overflow),
+or which can panic (`/`, `%`). No bounds analysis or sophisticated reasoning is
+attempted."##,
+ },
+ Lint { label: "clippy::integer_division", description: r##"Checks for division of integers"## },
+ Lint {
+ label: "clippy::into_iter_on_ref",
+ description: r##"Checks for `into_iter` calls on references which should be replaced by `iter`
+or `iter_mut`."##,
+ },
+ Lint {
+ label: "clippy::invalid_null_ptr_usage",
+ description: r##"This lint checks for invalid usages of `ptr::null`."##,
+ },
+ Lint {
+ label: "clippy::invalid_regex",
+ description: r##"Checks [regex](https://crates.io/crates/regex) creation
+(with `Regex::new`, `RegexBuilder::new`, or `RegexSet::new`) for correct
+regex syntax."##,
+ },
+ Lint {
+ label: "clippy::invalid_upcast_comparisons",
+ description: r##"Checks for comparisons where the relation is always either
+true or false, but where one side has been upcast so that the comparison is
+necessary. Only integer types are checked."##,
+ },
+ Lint {
+ label: "clippy::invisible_characters",
+ description: r##"Checks for invisible Unicode characters in the code."##,
+ },
+ Lint {
+ label: "clippy::items_after_statements",
+ description: r##"Checks for items declared after some statement in a block."##,
+ },
+ Lint {
+ label: "clippy::iter_cloned_collect",
+ description: r##"Checks for the use of `.cloned().collect()` on slice to
+create a `Vec`."##,
+ },
+ Lint {
+ label: "clippy::iter_count",
+ description: r##"Checks for the use of `.iter().count()`."##,
+ },
+ Lint { label: "clippy::iter_next_loop", description: r##"Checks for loops on `x.next()`."## },
+ Lint {
+ label: "clippy::iter_next_slice",
+ description: r##"Checks for usage of `iter().next()` on a Slice or an Array"##,
+ },
+ Lint {
+ label: "clippy::iter_not_returning_iterator",
+ description: r##"Detects methods named `iter` or `iter_mut` that do not have a return type that implements `Iterator`."##,
+ },
+ Lint {
+ label: "clippy::iter_nth",
+ description: r##"Checks for use of `.iter().nth()` (and the related
+`.iter_mut().nth()`) on standard library types with *O*(1) element access."##,
+ },
+ Lint {
+ label: "clippy::iter_nth_zero",
+ description: r##"Checks for the use of `iter.nth(0)`."##,
+ },
+ Lint {
+ label: "clippy::iter_skip_next",
+ description: r##"Checks for use of `.skip(x).next()` on iterators."##,
+ },
+ Lint {
+ label: "clippy::iterator_step_by_zero",
+ description: r##"Checks for calling `.step_by(0)` on iterators which panics."##,
+ },
+ Lint {
+ label: "clippy::just_underscores_and_digits",
+ description: r##"Checks if you have variables whose name consists of just
+underscores and digits."##,
+ },
+ Lint {
+ label: "clippy::large_const_arrays",
+ description: r##"Checks for large `const` arrays that should
+be defined as `static` instead."##,
+ },
+ Lint {
+ label: "clippy::large_digit_groups",
+ description: r##"Warns if the digits of an integral or floating-point
+constant are grouped into groups that
+are too large."##,
+ },
+ Lint {
+ label: "clippy::large_enum_variant",
+ description: r##"Checks for large size differences between variants on
+`enum`s."##,
+ },
+ Lint {
+ label: "clippy::large_stack_arrays",
+ description: r##"Checks for local arrays that may be too large."##,
+ },
+ Lint {
+ label: "clippy::large_types_passed_by_value",
+ description: r##"Checks for functions taking arguments by value, where
+the argument type is `Copy` and large enough to be worth considering
+passing by reference. Does not trigger if the function is being exported,
+because that might induce API breakage, if the parameter is declared as mutable,
+or if the argument is a `self`."##,
+ },
+ Lint {
+ label: "clippy::len_without_is_empty",
+ description: r##"Checks for items that implement `.len()` but not
+`.is_empty()`."##,
+ },
+ Lint {
+ label: "clippy::len_zero",
+ description: r##"Checks for getting the length of something via `.len()`
+just to compare to zero, and suggests using `.is_empty()` where applicable."##,
+ },
+ Lint {
+ label: "clippy::let_and_return",
+ description: r##"Checks for `let`-bindings, which are subsequently
+returned."##,
+ },
+ Lint {
+ label: "clippy::let_underscore_drop",
+ description: r##"Checks for `let _ = <expr>`
+where expr has a type that implements `Drop`"##,
+ },
+ Lint {
+ label: "clippy::let_underscore_lock",
+ description: r##"Checks for `let _ = sync_lock`.
+This supports `mutex` and `rwlock` in `std::sync` and `parking_lot`."##,
+ },
+ Lint {
+ label: "clippy::let_underscore_must_use",
+ description: r##"Checks for `let _ = <expr>` where expr is `#[must_use]`"##,
+ },
+ Lint { label: "clippy::let_unit_value", description: r##"Checks for binding a unit value."## },
+ Lint {
+ label: "clippy::linkedlist",
+ description: r##"Checks for usage of any `LinkedList`, suggesting to use a
+`Vec` or a `VecDeque` (formerly called `RingBuf`)."##,
+ },
+ Lint {
+ label: "clippy::logic_bug",
+ description: r##"Checks for boolean expressions that contain terminals that
+can be eliminated."##,
+ },
+ Lint {
+ label: "clippy::lossy_float_literal",
+ description: r##"Checks for whole number float literals that
+cannot be represented as the underlying type without loss."##,
+ },
+ Lint {
+ label: "clippy::macro_use_imports",
+ description: r##"Checks for `#[macro_use] use...`."##,
+ },
+ Lint {
+ label: "clippy::main_recursion",
+ description: r##"Checks for recursion using the entrypoint."##,
+ },
+ Lint {
+ label: "clippy::manual_assert",
+ description: r##"Detects `if`-then-`panic!` that can be replaced with `assert!`."##,
+ },
+ Lint {
+ label: "clippy::manual_async_fn",
+ description: r##"It checks for manual implementations of `async` functions."##,
+ },
+ Lint {
+ label: "clippy::manual_filter_map",
+ description: r##"Checks for usage of `_.filter(_).map(_)` that can be written more simply
+as `filter_map(_)`."##,
+ },
+ Lint {
+ label: "clippy::manual_find_map",
+ description: r##"Checks for usage of `_.find(_).map(_)` that can be written more simply
+as `find_map(_)`."##,
+ },
+ Lint {
+ label: "clippy::manual_flatten",
+ description: r##"Check for unnecessary `if let` usage in a for loop
+where only the `Some` or `Ok` variant of the iterator element is used."##,
+ },
+ Lint {
+ label: "clippy::manual_map",
+ description: r##"Checks for usages of `match` which could be implemented using `map`"##,
+ },
+ Lint {
+ label: "clippy::manual_memcpy",
+ description: r##"Checks for for-loops that manually copy items between
+slices that could be optimized by having a memcpy."##,
+ },
+ Lint {
+ label: "clippy::manual_non_exhaustive",
+ description: r##"Checks for manual implementations of the non-exhaustive pattern."##,
+ },
+ Lint {
+ label: "clippy::manual_ok_or",
+ description: r##"Finds patterns that reimplement `Option::ok_or`."##,
+ },
+ Lint {
+ label: "clippy::manual_range_contains",
+ description: r##"Checks for expressions like `x >= 3 && x < 8` that could
+be more readably expressed as `(3..8).contains(x)`."##,
+ },
+ Lint {
+ label: "clippy::manual_saturating_arithmetic",
+ description: r##"Checks for `.checked_add/sub(x).unwrap_or(MAX/MIN)`."##,
+ },
+ Lint {
+ label: "clippy::manual_split_once",
+ description: r##"Checks for usages of `str::splitn(2, _)`"##,
+ },
+ Lint {
+ label: "clippy::manual_str_repeat",
+ description: r##"Checks for manual implementations of `str::repeat`"##,
+ },
+ Lint {
+ label: "clippy::manual_strip",
+ description: r##"Suggests using `strip_{prefix,suffix}` over `str::{starts,ends}_with` and slicing using
+the pattern's length."##,
+ },
+ Lint { label: "clippy::manual_swap", description: r##"Checks for manual swapping."## },
+ Lint {
+ label: "clippy::manual_unwrap_or",
+ description: r##"Finds patterns that reimplement `Option::unwrap_or` or `Result::unwrap_or`."##,
+ },
+ Lint {
+ label: "clippy::many_single_char_names",
+ description: r##"Checks for too many variables whose name consists of a
+single character."##,
+ },
+ Lint {
+ label: "clippy::map_clone",
+ description: r##"Checks for usage of `map(|x| x.clone())` or
+dereferencing closures for `Copy` types, on `Iterator` or `Option`,
+and suggests `cloned()` or `copied()` instead"##,
+ },
+ Lint {
+ label: "clippy::map_collect_result_unit",
+ description: r##"Checks for usage of `_.map(_).collect::<Result<(), _>()`."##,
+ },
+ Lint {
+ label: "clippy::map_entry",
+ description: r##"Checks for uses of `contains_key` + `insert` on `HashMap`
+or `BTreeMap`."##,
+ },
+ Lint {
+ label: "clippy::map_err_ignore",
+ description: r##"Checks for instances of `map_err(|_| Some::Enum)`"##,
+ },
+ Lint {
+ label: "clippy::map_flatten",
+ description: r##"Checks for usage of `_.map(_).flatten(_)` on `Iterator` and `Option`"##,
+ },
+ Lint {
+ label: "clippy::map_identity",
+ description: r##"Checks for instances of `map(f)` where `f` is the identity function."##,
+ },
+ Lint {
+ label: "clippy::map_unwrap_or",
+ description: r##"Checks for usage of `option.map(_).unwrap_or(_)` or `option.map(_).unwrap_or_else(_)` or
+`result.map(_).unwrap_or_else(_)`."##,
+ },
+ Lint {
+ label: "clippy::match_as_ref",
+ description: r##"Checks for match which is used to add a reference to an
+`Option` value."##,
+ },
+ Lint {
+ label: "clippy::match_bool",
+ description: r##"Checks for matches where match expression is a `bool`. It
+suggests to replace the expression with an `if...else` block."##,
+ },
+ Lint {
+ label: "clippy::match_like_matches_macro",
+ description: r##"Checks for `match` or `if let` expressions producing a
+`bool` that could be written using `matches!`"##,
+ },
+ Lint {
+ label: "clippy::match_on_vec_items",
+ description: r##"Checks for `match vec[idx]` or `match vec[n..m]`."##,
+ },
+ Lint {
+ label: "clippy::match_overlapping_arm",
+ description: r##"Checks for overlapping match arms."##,
+ },
+ Lint {
+ label: "clippy::match_ref_pats",
+ description: r##"Checks for matches where all arms match a reference,
+suggesting to remove the reference and deref the matched expression
+instead. It also checks for `if let &foo = bar` blocks."##,
+ },
+ Lint {
+ label: "clippy::match_result_ok",
+ description: r##"Checks for unnecessary `ok()` in `while let`."##,
+ },
+ Lint {
+ label: "clippy::match_same_arms",
+ description: r##"Checks for `match` with identical arm bodies."##,
+ },
+ Lint {
+ label: "clippy::match_single_binding",
+ description: r##"Checks for useless match that binds to only one value."##,
+ },
+ Lint {
+ label: "clippy::match_str_case_mismatch",
+ description: r##"Checks for `match` expressions modifying the case of a string with non-compliant arms"##,
+ },
+ Lint {
+ label: "clippy::match_wild_err_arm",
+ description: r##"Checks for arm which matches all errors with `Err(_)`
+and take drastic actions like `panic!`."##,
+ },
+ Lint {
+ label: "clippy::match_wildcard_for_single_variants",
+ description: r##"Checks for wildcard enum matches for a single variant."##,
+ },
+ Lint {
+ label: "clippy::maybe_infinite_iter",
+ description: r##"Checks for iteration that may be infinite."##,
+ },
+ Lint {
+ label: "clippy::mem_forget",
+ description: r##"Checks for usage of `std::mem::forget(t)` where `t` is
+`Drop`."##,
+ },
+ Lint {
+ label: "clippy::mem_replace_option_with_none",
+ description: r##"Checks for `mem::replace()` on an `Option` with
+`None`."##,
+ },
+ Lint {
+ label: "clippy::mem_replace_with_default",
+ description: r##"Checks for `std::mem::replace` on a value of type
+`T` with `T::default()`."##,
+ },
+ Lint {
+ label: "clippy::mem_replace_with_uninit",
+ description: r##"Checks for `mem::replace(&mut _, mem::uninitialized())`
+and `mem::replace(&mut _, mem::zeroed())`."##,
+ },
+ Lint {
+ label: "clippy::min_max",
+ description: r##"Checks for expressions where `std::cmp::min` and `max` are
+used to clamp values, but switched so that the result is constant."##,
+ },
+ Lint {
+ label: "clippy::misaligned_transmute",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::mismatched_target_os",
+ description: r##"Checks for cfg attributes having operating systems used in target family position."##,
+ },
+ Lint {
+ label: "clippy::misrefactored_assign_op",
+ description: r##"Checks for `a op= a op b` or `a op= b op a` patterns."##,
+ },
+ Lint {
+ label: "clippy::missing_const_for_fn",
+ description: r##"Suggests the use of `const` in functions and methods where possible."##,
+ },
+ Lint {
+ label: "clippy::missing_docs_in_private_items",
+ description: r##"Warns if there is missing doc for any documentable item
+(public or private)."##,
+ },
+ Lint {
+ label: "clippy::missing_enforced_import_renames",
+ description: r##"Checks for imports that do not rename the item as specified
+in the `enforce-import-renames` config option."##,
+ },
+ Lint {
+ label: "clippy::missing_errors_doc",
+ description: r##"Checks the doc comments of publicly visible functions that
+return a `Result` type and warns if there is no `# Errors` section."##,
+ },
+ Lint {
+ label: "clippy::missing_inline_in_public_items",
+ description: r##"It lints if an exported function, method, trait method with default impl,
+or trait method impl is not `#[inline]`."##,
+ },
+ Lint {
+ label: "clippy::missing_panics_doc",
+ description: r##"Checks the doc comments of publicly visible functions that
+may panic and warns if there is no `# Panics` section."##,
+ },
+ Lint {
+ label: "clippy::missing_safety_doc",
+ description: r##"Checks for the doc comments of publicly visible
+unsafe functions and warns if there is no `# Safety` section."##,
+ },
+ Lint {
+ label: "clippy::mistyped_literal_suffixes",
+ description: r##"Warns for mistyped suffix in literals"##,
+ },
+ Lint {
+ label: "clippy::mixed_case_hex_literals",
+ description: r##"Warns on hexadecimal literals with mixed-case letter
+digits."##,
+ },
+ Lint {
+ label: "clippy::mod_module_files",
+ description: r##"Checks that module layout uses only self named module files, bans mod.rs files."##,
+ },
+ Lint {
+ label: "clippy::module_inception",
+ description: r##"Checks for modules that have the same name as their
+parent module"##,
+ },
+ Lint {
+ label: "clippy::module_name_repetitions",
+ description: r##"Detects type names that are prefixed or suffixed by the
+containing module's name."##,
+ },
+ Lint { label: "clippy::modulo_arithmetic", description: r##"Checks for modulo arithmetic."## },
+ Lint {
+ label: "clippy::modulo_one",
+ description: r##"Checks for getting the remainder of a division by one or minus
+one."##,
+ },
+ Lint {
+ label: "clippy::multiple_crate_versions",
+ description: r##"Checks to see if multiple versions of a crate are being
+used."##,
+ },
+ Lint {
+ label: "clippy::multiple_inherent_impl",
+ description: r##"Checks for multiple inherent implementations of a struct"##,
+ },
+ Lint {
+ label: "clippy::must_use_candidate",
+ description: r##"Checks for public functions that have no
+`#[must_use]` attribute, but return something not already marked
+must-use, have no mutable arg and mutate no statics."##,
+ },
+ Lint {
+ label: "clippy::must_use_unit",
+ description: r##"Checks for a `#[must_use]` attribute on
+unit-returning functions and methods."##,
+ },
+ Lint {
+ label: "clippy::mut_from_ref",
+ description: r##"This lint checks for functions that take immutable
+references and return mutable ones."##,
+ },
+ Lint {
+ label: "clippy::mut_mut",
+ description: r##"Checks for instances of `mut mut` references."##,
+ },
+ Lint {
+ label: "clippy::mut_mutex_lock",
+ description: r##"Checks for `&mut Mutex::lock` calls"##,
+ },
+ Lint {
+ label: "clippy::mut_range_bound",
+ description: r##"Checks for loops which have a range bound that is a mutable variable"##,
+ },
+ Lint {
+ label: "clippy::mutable_key_type",
+ description: r##"Checks for sets/maps with mutable key types."##,
+ },
+ Lint {
+ label: "clippy::mutex_atomic",
+ description: r##"Checks for usages of `Mutex<X>` where an atomic will do."##,
+ },
+ Lint {
+ label: "clippy::mutex_integer",
+ description: r##"Checks for usages of `Mutex<X>` where `X` is an integral
+type."##,
+ },
+ Lint { label: "clippy::naive_bytecount", description: r##"Checks for naive byte counts"## },
+ Lint {
+ label: "clippy::needless_arbitrary_self_type",
+ description: r##"The lint checks for `self` in fn parameters that
+specify the `Self`-type explicitly"##,
+ },
+ Lint {
+ label: "clippy::needless_bitwise_bool",
+ description: r##"Checks for uses of bitwise and/or operators between booleans, where performance may be improved by using
+a lazy and."##,
+ },
+ Lint {
+ label: "clippy::needless_bool",
+ description: r##"Checks for expressions of the form `if c { true } else {
+false }` (or vice versa) and suggests using the condition directly."##,
+ },
+ Lint {
+ label: "clippy::needless_borrow",
+ description: r##"Checks for address of operations (`&`) that are going to
+be dereferenced immediately by the compiler."##,
+ },
+ Lint {
+ label: "clippy::needless_borrowed_reference",
+ description: r##"Checks for bindings that destructure a reference and borrow the inner
+value with `&ref`."##,
+ },
+ Lint {
+ label: "clippy::needless_collect",
+ description: r##"Checks for functions collecting an iterator when collect
+is not needed."##,
+ },
+ Lint {
+ label: "clippy::needless_continue",
+ description: r##"The lint checks for `if`-statements appearing in loops
+that contain a `continue` statement in either their main blocks or their
+`else`-blocks, when omitting the `else`-block possibly with some
+rearrangement of code can make the code easier to understand."##,
+ },
+ Lint {
+ label: "clippy::needless_doctest_main",
+ description: r##"Checks for `fn main() { .. }` in doctests"##,
+ },
+ Lint {
+ label: "clippy::needless_for_each",
+ description: r##"Checks for usage of `for_each` that would be more simply written as a
+`for` loop."##,
+ },
+ Lint {
+ label: "clippy::needless_late_init",
+ description: r##"Checks for late initializations that can be replaced by a `let` statement
+with an initializer."##,
+ },
+ Lint {
+ label: "clippy::needless_lifetimes",
+ description: r##"Checks for lifetime annotations which can be removed by
+relying on lifetime elision."##,
+ },
+ Lint {
+ label: "clippy::needless_option_as_deref",
+ description: r##"Checks for no-op uses of Option::{as_deref,as_deref_mut},
+for example, `Option<&T>::as_deref()` returns the same type."##,
+ },
+ Lint {
+ label: "clippy::needless_pass_by_value",
+ description: r##"Checks for functions taking arguments by value, but not
+consuming them in its
+body."##,
+ },
+ Lint {
+ label: "clippy::needless_question_mark",
+ description: r##"Suggests alternatives for useless applications of `?` in terminating expressions"##,
+ },
+ Lint {
+ label: "clippy::needless_range_loop",
+ description: r##"Checks for looping over the range of `0..len` of some
+collection just to get the values by index."##,
+ },
+ Lint {
+ label: "clippy::needless_return",
+ description: r##"Checks for return statements at the end of a block."##,
+ },
+ Lint {
+ label: "clippy::needless_splitn",
+ description: r##"Checks for usages of `str::splitn` (or `str::rsplitn`) where using `str::split` would be the same."##,
+ },
+ Lint {
+ label: "clippy::needless_update",
+ description: r##"Checks for needlessly including a base struct on update
+when all fields are changed anyway.
+
+This lint is not applied to structs marked with
+[non_exhaustive](https://doc.rust-lang.org/reference/attributes/type_system.html)."##,
+ },
+ Lint {
+ label: "clippy::neg_cmp_op_on_partial_ord",
+ description: r##"Checks for the usage of negated comparison operators on types which only implement
+`PartialOrd` (e.g., `f64`)."##,
+ },
+ Lint {
+ label: "clippy::neg_multiply",
+ description: r##"Checks for multiplication by -1 as a form of negation."##,
+ },
+ Lint {
+ label: "clippy::negative_feature_names",
+ description: r##"Checks for negative feature names with prefix `no-` or `not-`"##,
+ },
+ Lint {
+ label: "clippy::never_loop",
+ description: r##"Checks for loops that will always `break`, `return` or
+`continue` an outer loop."##,
+ },
+ Lint {
+ label: "clippy::new_ret_no_self",
+ description: r##"Checks for `new` not returning a type that contains `Self`."##,
+ },
+ Lint {
+ label: "clippy::new_without_default",
+ description: r##"Checks for types with a `fn new() -> Self` method and no
+implementation of
+[`Default`](https://doc.rust-lang.org/std/default/trait.Default.html)."##,
+ },
+ Lint {
+ label: "clippy::no_effect",
+ description: r##"Checks for statements which have no effect."##,
+ },
+ Lint {
+ label: "clippy::no_effect_underscore_binding",
+ description: r##"Checks for binding to underscore prefixed variable without side-effects."##,
+ },
+ Lint {
+ label: "clippy::non_ascii_literal",
+ description: r##"Checks for non-ASCII characters in string and char literals."##,
+ },
+ Lint {
+ label: "clippy::non_octal_unix_permissions",
+ description: r##"Checks for non-octal values used to set Unix file permissions."##,
+ },
+ Lint {
+ label: "clippy::non_send_fields_in_send_ty",
+ description: r##"This lint warns about a `Send` implementation for a type that
+contains fields that are not safe to be sent across threads.
+It tries to detect fields that can cause a soundness issue
+when sent to another thread (e.g., `Rc`) while allowing `!Send` fields
+that are expected to exist in a `Send` type, such as raw pointers."##,
+ },
+ Lint {
+ label: "clippy::nonminimal_bool",
+ description: r##"Checks for boolean expressions that can be written more
+concisely."##,
+ },
+ Lint {
+ label: "clippy::nonsensical_open_options",
+ description: r##"Checks for duplicate open options as well as combinations
+that make no sense."##,
+ },
+ Lint {
+ label: "clippy::nonstandard_macro_braces",
+ description: r##"Checks that common macros are used with consistent bracing."##,
+ },
+ Lint {
+ label: "clippy::not_unsafe_ptr_arg_deref",
+ description: r##"Checks for public functions that dereference raw pointer
+arguments but are not marked `unsafe`."##,
+ },
+ Lint {
+ label: "clippy::octal_escapes",
+ description: r##"Checks for `\\0` escapes in string and byte literals that look like octal
+character escapes in C."##,
+ },
+ Lint { label: "clippy::ok_expect", description: r##"Checks for usage of `ok().expect(..)`."## },
+ Lint {
+ label: "clippy::op_ref",
+ description: r##"Checks for arguments to `==` which have their address
+taken to satisfy a bound
+and suggests to dereference the other argument instead"##,
+ },
+ Lint {
+ label: "clippy::option_as_ref_deref",
+ description: r##"Checks for usage of `_.as_ref().map(Deref::deref)` or it's aliases (such as String::as_str)."##,
+ },
+ Lint {
+ label: "clippy::option_env_unwrap",
+ description: r##"Checks for usage of `option_env!(...).unwrap()` and
+suggests usage of the `env!` macro."##,
+ },
+ Lint {
+ label: "clippy::option_filter_map",
+ description: r##"Checks for indirect collection of populated `Option`"##,
+ },
+ Lint {
+ label: "clippy::option_if_let_else",
+ description: r##"Lints usage of `if let Some(v) = ... { y } else { x }` which is more
+idiomatically done with `Option::map_or` (if the else bit is a pure
+expression) or `Option::map_or_else` (if the else bit is an impure
+expression)."##,
+ },
+ Lint {
+ label: "clippy::option_map_or_none",
+ description: r##"Checks for usage of `_.map_or(None, _)`."##,
+ },
+ Lint {
+ label: "clippy::option_map_unit_fn",
+ description: r##"Checks for usage of `option.map(f)` where f is a function
+or closure that returns the unit type `()`."##,
+ },
+ Lint {
+ label: "clippy::option_option",
+ description: r##"Checks for use of `Option<Option<_>>` in function signatures and type
+definitions"##,
+ },
+ Lint {
+ label: "clippy::or_fun_call",
+ description: r##"Checks for calls to `.or(foo(..))`, `.unwrap_or(foo(..))`,
+etc., and suggests to use `or_else`, `unwrap_or_else`, etc., or
+`unwrap_or_default` instead."##,
+ },
+ Lint {
+ label: "clippy::out_of_bounds_indexing",
+ description: r##"Checks for out of bounds array indexing with a constant
+index."##,
+ },
+ Lint {
+ label: "clippy::overflow_check_conditional",
+ description: r##"Detects classic underflow/overflow checks."##,
+ },
+ Lint { label: "clippy::panic", description: r##"Checks for usage of `panic!`."## },
+ Lint {
+ label: "clippy::panic_in_result_fn",
+ description: r##"Checks for usage of `panic!`, `unimplemented!`, `todo!`, `unreachable!` or assertions in a function of type result."##,
+ },
+ Lint {
+ label: "clippy::panicking_unwrap",
+ description: r##"Checks for calls of `unwrap[_err]()` that will always fail."##,
+ },
+ Lint {
+ label: "clippy::partialeq_ne_impl",
+ description: r##"Checks for manual re-implementations of `PartialEq::ne`."##,
+ },
+ Lint {
+ label: "clippy::path_buf_push_overwrite",
+ description: r##"* Checks for [push](https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.push)
+calls on `PathBuf` that can cause overwrites."##,
+ },
+ Lint {
+ label: "clippy::pattern_type_mismatch",
+ description: r##"Checks for patterns that aren't exact representations of the types
+they are applied to.
+
+To satisfy this lint, you will have to adjust either the expression that is matched
+against or the pattern itself, as well as the bindings that are introduced by the
+adjusted patterns. For matching you will have to either dereference the expression
+with the `*` operator, or amend the patterns to explicitly match against `&<pattern>`
+or `&mut <pattern>` depending on the reference mutability. For the bindings you need
+to use the inverse. You can leave them as plain bindings if you wish for the value
+to be copied, but you must use `ref mut <variable>` or `ref <variable>` to construct
+a reference into the matched structure.
+
+If you are looking for a way to learn about ownership semantics in more detail, it
+is recommended to look at IDE options available to you to highlight types, lifetimes
+and reference semantics in your code. The available tooling would expose these things
+in a general way even outside of the various pattern matching mechanics. Of course
+this lint can still be used to highlight areas of interest and ensure a good understanding
+of ownership semantics."##,
+ },
+ Lint {
+ label: "clippy::possible_missing_comma",
+ description: r##"Checks for possible missing comma in an array. It lints if
+an array element is a binary operator expression and it lies on two lines."##,
+ },
+ Lint {
+ label: "clippy::precedence",
+ description: r##"Checks for operations where precedence may be unclear
+and suggests to add parentheses. Currently it catches the following:
+* mixed usage of arithmetic and bit shifting/combining operators without
+parentheses
+* a negative numeric literal (which is really a unary `-` followed by a
+numeric literal)
+ followed by a method call"##,
+ },
+ Lint {
+ label: "clippy::print_literal",
+ description: r##"This lint warns about the use of literals as `print!`/`println!` args."##,
+ },
+ Lint {
+ label: "clippy::print_stderr",
+ description: r##"Checks for printing on *stderr*. The purpose of this lint
+is to catch debugging remnants."##,
+ },
+ Lint {
+ label: "clippy::print_stdout",
+ description: r##"Checks for printing on *stdout*. The purpose of this lint
+is to catch debugging remnants."##,
+ },
+ Lint {
+ label: "clippy::print_with_newline",
+ description: r##"This lint warns when you use `print!()` with a format
+string that ends in a newline."##,
+ },
+ Lint {
+ label: "clippy::println_empty_string",
+ description: r##"This lint warns when you use `println!()` to
+print a newline."##,
+ },
+ Lint {
+ label: "clippy::ptr_arg",
+ description: r##"This lint checks for function arguments of type `&String`
+or `&Vec` unless the references are mutable. It will also suggest you
+replace `.clone()` calls with the appropriate `.to_owned()`/`to_string()`
+calls."##,
+ },
+ Lint {
+ label: "clippy::ptr_as_ptr",
+ description: r##"Checks for `as` casts between raw pointers without changing its mutability,
+namely `*const T` to `*const U` and `*mut T` to `*mut U`."##,
+ },
+ Lint { label: "clippy::ptr_eq", description: r##"Use `std::ptr::eq` when applicable"## },
+ Lint {
+ label: "clippy::ptr_offset_with_cast",
+ description: r##"Checks for usage of the `offset` pointer method with a `usize` casted to an
+`isize`."##,
+ },
+ Lint {
+ label: "clippy::pub_enum_variant_names",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::question_mark",
+ description: r##"Checks for expressions that could be replaced by the question mark operator."##,
+ },
+ Lint {
+ label: "clippy::range_minus_one",
+ description: r##"Checks for inclusive ranges where 1 is subtracted from
+the upper bound, e.g., `x..=(y-1)`."##,
+ },
+ Lint {
+ label: "clippy::range_plus_one",
+ description: r##"Checks for exclusive ranges where 1 is added to the
+upper bound, e.g., `x..(y+1)`."##,
+ },
+ Lint {
+ label: "clippy::range_step_by_zero",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::range_zip_with_len",
+ description: r##"Checks for zipping a collection with the range of
+`0.._.len()`."##,
+ },
+ Lint {
+ label: "clippy::rc_buffer",
+ description: r##"Checks for `Rc<T>` and `Arc<T>` when `T` is a mutable buffer type such as `String` or `Vec`."##,
+ },
+ Lint { label: "clippy::rc_mutex", description: r##"Checks for `Rc<Mutex<T>>`."## },
+ Lint {
+ label: "clippy::redundant_allocation",
+ description: r##"Checks for use of redundant allocations anywhere in the code."##,
+ },
+ Lint {
+ label: "clippy::redundant_clone",
+ description: r##"Checks for a redundant `clone()` (and its relatives) which clones an owned
+value that is going to be dropped without further use."##,
+ },
+ Lint {
+ label: "clippy::redundant_closure",
+ description: r##"Checks for closures which just call another function where
+the function can be called directly. `unsafe` functions or calls where types
+get adjusted are ignored."##,
+ },
+ Lint {
+ label: "clippy::redundant_closure_call",
+ description: r##"Detects closures called in the same expression where they
+are defined."##,
+ },
+ Lint {
+ label: "clippy::redundant_closure_for_method_calls",
+ description: r##"Checks for closures which only invoke a method on the closure
+argument and can be replaced by referencing the method directly."##,
+ },
+ Lint {
+ label: "clippy::redundant_else",
+ description: r##"Checks for `else` blocks that can be removed without changing semantics."##,
+ },
+ Lint {
+ label: "clippy::redundant_feature_names",
+ description: r##"Checks for feature names with prefix `use-`, `with-` or suffix `-support`"##,
+ },
+ Lint {
+ label: "clippy::redundant_field_names",
+ description: r##"Checks for fields in struct literals where shorthands
+could be used."##,
+ },
+ Lint {
+ label: "clippy::redundant_pattern",
+ description: r##"Checks for patterns in the form `name @ _`."##,
+ },
+ Lint {
+ label: "clippy::redundant_pattern_matching",
+ description: r##"Lint for redundant pattern matching over `Result`, `Option`,
+`std::task::Poll` or `std::net::IpAddr`"##,
+ },
+ Lint {
+ label: "clippy::redundant_pub_crate",
+ description: r##"Checks for items declared `pub(crate)` that are not crate visible because they
+are inside a private module."##,
+ },
+ Lint {
+ label: "clippy::redundant_slicing",
+ description: r##"Checks for redundant slicing expressions which use the full range, and
+do not change the type."##,
+ },
+ Lint {
+ label: "clippy::redundant_static_lifetimes",
+ description: r##"Checks for constants and statics with an explicit `'static` lifetime."##,
+ },
+ Lint {
+ label: "clippy::ref_binding_to_reference",
+ description: r##"Checks for `ref` bindings which create a reference to a reference."##,
+ },
+ Lint {
+ label: "clippy::ref_in_deref",
+ description: r##"Checks for references in expressions that use
+auto dereference."##,
+ },
+ Lint {
+ label: "clippy::ref_option_ref",
+ description: r##"Checks for usage of `&Option<&T>`."##,
+ },
+ Lint {
+ label: "clippy::regex_macro",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::repeat_once",
+ description: r##"Checks for usage of `.repeat(1)` and suggest the following method for each types.
+- `.to_string()` for `str`
+- `.clone()` for `String`
+- `.to_vec()` for `slice`
+
+The lint will evaluate constant expressions and values as arguments of `.repeat(..)` and emit a message if
+they are equivalent to `1`. (Related discussion in [rust-clippy#7306](https://github.com/rust-lang/rust-clippy/issues/7306))"##,
+ },
+ Lint {
+ label: "clippy::replace_consts",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::rest_pat_in_fully_bound_structs",
+ description: r##"Checks for unnecessary '..' pattern binding on struct when all fields are explicitly matched."##,
+ },
+ Lint {
+ label: "clippy::result_map_or_into_option",
+ description: r##"Checks for usage of `_.map_or(None, Some)`."##,
+ },
+ Lint {
+ label: "clippy::result_map_unit_fn",
+ description: r##"Checks for usage of `result.map(f)` where f is a function
+or closure that returns the unit type `()`."##,
+ },
+ Lint {
+ label: "clippy::result_unit_err",
+ description: r##"Checks for public functions that return a `Result`
+with an `Err` type of `()`. It suggests using a custom type that
+implements `std::error::Error`."##,
+ },
+ Lint {
+ label: "clippy::return_self_not_must_use",
+ description: r##"This lint warns when a method returning `Self` doesn't have the `#[must_use]` attribute."##,
+ },
+ Lint {
+ label: "clippy::reversed_empty_ranges",
+ description: r##"Checks for range expressions `x..y` where both `x` and `y`
+are constant and `x` is greater or equal to `y`."##,
+ },
+ Lint {
+ label: "clippy::same_functions_in_if_condition",
+ description: r##"Checks for consecutive `if`s with the same function call."##,
+ },
+ Lint {
+ label: "clippy::same_item_push",
+ description: r##"Checks whether a for loop is being used to push a constant
+value into a Vec."##,
+ },
+ Lint {
+ label: "clippy::same_name_method",
+ description: r##"It lints if a struct has two methods with the same name:
+one from a trait, another not from trait."##,
+ },
+ Lint {
+ label: "clippy::search_is_some",
+ description: r##"Checks for an iterator or string search (such as `find()`,
+`position()`, or `rposition()`) followed by a call to `is_some()` or `is_none()`."##,
+ },
+ Lint {
+ label: "clippy::self_assignment",
+ description: r##"Checks for explicit self-assignments."##,
+ },
+ Lint {
+ label: "clippy::self_named_constructors",
+ description: r##"Warns when constructors have the same name as their types."##,
+ },
+ Lint {
+ label: "clippy::self_named_module_files",
+ description: r##"Checks that module layout uses only mod.rs files."##,
+ },
+ Lint {
+ label: "clippy::semicolon_if_nothing_returned",
+ description: r##"Looks for blocks of expressions and fires if the last expression returns
+`()` but is not followed by a semicolon."##,
+ },
+ Lint {
+ label: "clippy::separated_literal_suffix",
+ description: r##"Warns if literal suffixes are separated by an underscore.
+To enforce separated literal suffix style,
+see the `unseparated_literal_suffix` lint."##,
+ },
+ Lint {
+ label: "clippy::serde_api_misuse",
+ description: r##"Checks for mis-uses of the serde API."##,
+ },
+ Lint {
+ label: "clippy::shadow_reuse",
+ description: r##"Checks for bindings that shadow other bindings already in
+scope, while reusing the original value."##,
+ },
+ Lint {
+ label: "clippy::shadow_same",
+ description: r##"Checks for bindings that shadow other bindings already in
+scope, while just changing reference level or mutability."##,
+ },
+ Lint {
+ label: "clippy::shadow_unrelated",
+ description: r##"Checks for bindings that shadow other bindings already in
+scope, either without an initialization or with one that does not even use
+the original value."##,
+ },
+ Lint {
+ label: "clippy::short_circuit_statement",
+ description: r##"Checks for the use of short circuit boolean conditions as
+a
+statement."##,
+ },
+ Lint {
+ label: "clippy::should_assert_eq",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::should_implement_trait",
+ description: r##"Checks for methods that should live in a trait
+implementation of a `std` trait (see [llogiq's blog
+post](http://llogiq.github.io/2015/07/30/traits.html) for further
+information) instead of an inherent implementation."##,
+ },
+ Lint {
+ label: "clippy::similar_names",
+ description: r##"Checks for names that are very similar and thus confusing."##,
+ },
+ Lint {
+ label: "clippy::single_char_add_str",
+ description: r##"Warns when using `push_str`/`insert_str` with a single-character string literal
+where `push`/`insert` with a `char` would work fine."##,
+ },
+ Lint {
+ label: "clippy::single_char_pattern",
+ description: r##"Checks for string methods that receive a single-character
+`str` as an argument, e.g., `_.split(x)`."##,
+ },
+ Lint {
+ label: "clippy::single_component_path_imports",
+ description: r##"Checking for imports with single component use path."##,
+ },
+ Lint {
+ label: "clippy::single_element_loop",
+ description: r##"Checks whether a for loop has a single element."##,
+ },
+ Lint {
+ label: "clippy::single_match",
+ description: r##"Checks for matches with a single arm where an `if let`
+will usually suffice."##,
+ },
+ Lint {
+ label: "clippy::single_match_else",
+ description: r##"Checks for matches with two arms where an `if let else` will
+usually suffice."##,
+ },
+ Lint {
+ label: "clippy::size_of_in_element_count",
+ description: r##"Detects expressions where
+`size_of::<T>` or `size_of_val::<T>` is used as a
+count of elements of type `T`"##,
+ },
+ Lint {
+ label: "clippy::skip_while_next",
+ description: r##"Checks for usage of `_.skip_while(condition).next()`."##,
+ },
+ Lint {
+ label: "clippy::slow_vector_initialization",
+ description: r##"Checks slow zero-filled vector initialization"##,
+ },
+ Lint {
+ label: "clippy::stable_sort_primitive",
+ description: r##"When sorting primitive values (integers, bools, chars, as well
+as arrays, slices, and tuples of such items), it is better to
+use an unstable sort than a stable sort."##,
+ },
+ Lint {
+ label: "clippy::str_to_string",
+ description: r##"This lint checks for `.to_string()` method calls on values of type `&str`."##,
+ },
+ Lint {
+ label: "clippy::string_add",
+ description: r##"Checks for all instances of `x + _` where `x` is of type
+`String`, but only if [`string_add_assign`](#string_add_assign) does *not*
+match."##,
+ },
+ Lint {
+ label: "clippy::string_add_assign",
+ description: r##"Checks for string appends of the form `x = x + y` (without
+`let`!)."##,
+ },
+ Lint {
+ label: "clippy::string_extend_chars",
+ description: r##"Checks for the use of `.extend(s.chars())` where s is a
+`&str` or `String`."##,
+ },
+ Lint {
+ label: "clippy::string_from_utf8_as_bytes",
+ description: r##"Check if the string is transformed to byte array and casted back to string."##,
+ },
+ Lint {
+ label: "clippy::string_lit_as_bytes",
+ description: r##"Checks for the `as_bytes` method called on string literals
+that contain only ASCII characters."##,
+ },
+ Lint {
+ label: "clippy::string_slice",
+ description: r##"Checks for slice operations on strings"##,
+ },
+ Lint {
+ label: "clippy::string_to_string",
+ description: r##"This lint checks for `.to_string()` method calls on values of type `String`."##,
+ },
+ Lint {
+ label: "clippy::strlen_on_c_strings",
+ description: r##"Checks for usage of `libc::strlen` on a `CString` or `CStr` value,
+and suggest calling `as_bytes().len()` or `to_bytes().len()` respectively instead."##,
+ },
+ Lint {
+ label: "clippy::struct_excessive_bools",
+ description: r##"Checks for excessive
+use of bools in structs."##,
+ },
+ Lint {
+ label: "clippy::suboptimal_flops",
+ description: r##"Looks for floating-point expressions that
+can be expressed using built-in methods to improve both
+accuracy and performance."##,
+ },
+ Lint {
+ label: "clippy::suspicious_arithmetic_impl",
+ description: r##"Lints for suspicious operations in impls of arithmetic operators, e.g.
+subtracting elements in an Add impl."##,
+ },
+ Lint {
+ label: "clippy::suspicious_assignment_formatting",
+ description: r##"Checks for use of the non-existent `=*`, `=!` and `=-`
+operators."##,
+ },
+ Lint {
+ label: "clippy::suspicious_else_formatting",
+ description: r##"Checks for formatting of `else`. It lints if the `else`
+is followed immediately by a newline or the `else` seems to be missing."##,
+ },
+ Lint {
+ label: "clippy::suspicious_map",
+ description: r##"Checks for calls to `map` followed by a `count`."##,
+ },
+ Lint {
+ label: "clippy::suspicious_op_assign_impl",
+ description: r##"Lints for suspicious operations in impls of OpAssign, e.g.
+subtracting elements in an AddAssign impl."##,
+ },
+ Lint {
+ label: "clippy::suspicious_operation_groupings",
+ description: r##"Checks for unlikely usages of binary operators that are almost
+certainly typos and/or copy/paste errors, given the other usages
+of binary operators nearby."##,
+ },
+ Lint {
+ label: "clippy::suspicious_splitn",
+ description: r##"Checks for calls to [`splitn`]
+(https://doc.rust-lang.org/std/primitive.str.html#method.splitn) and
+related functions with either zero or one splits."##,
+ },
+ Lint {
+ label: "clippy::suspicious_unary_op_formatting",
+ description: r##"Checks the formatting of a unary operator on the right hand side
+of a binary operator. It lints if there is no space between the binary and unary operators,
+but there is a space between the unary and its operand."##,
+ },
+ Lint {
+ label: "clippy::tabs_in_doc_comments",
+ description: r##"Checks doc comments for usage of tab characters."##,
+ },
+ Lint {
+ label: "clippy::temporary_assignment",
+ description: r##"Checks for construction of a structure or tuple just to
+assign a value in it."##,
+ },
+ Lint {
+ label: "clippy::to_digit_is_some",
+ description: r##"Checks for `.to_digit(..).is_some()` on `char`s."##,
+ },
+ Lint {
+ label: "clippy::to_string_in_display",
+ description: r##"Checks for uses of `to_string()` in `Display` traits."##,
+ },
+ Lint {
+ label: "clippy::to_string_in_format_args",
+ description: r##"Checks for [`ToString::to_string`](https://doc.rust-lang.org/std/string/trait.ToString.html#tymethod.to_string)
+applied to a type that implements [`Display`](https://doc.rust-lang.org/std/fmt/trait.Display.html)
+in a macro that does formatting."##,
+ },
+ Lint { label: "clippy::todo", description: r##"Checks for usage of `todo!`."## },
+ Lint {
+ label: "clippy::too_many_arguments",
+ description: r##"Checks for functions with too many parameters."##,
+ },
+ Lint {
+ label: "clippy::too_many_lines",
+ description: r##"Checks for functions with a large amount of lines."##,
+ },
+ Lint {
+ label: "clippy::toplevel_ref_arg",
+ description: r##"Checks for function arguments and let bindings denoted as
+`ref`."##,
+ },
+ Lint {
+ label: "clippy::trailing_empty_array",
+ description: r##"Displays a warning when a struct with a trailing zero-sized array is declared without a `repr` attribute."##,
+ },
+ Lint {
+ label: "clippy::trait_duplication_in_bounds",
+ description: r##"Checks for cases where generics are being used and multiple
+syntax specifications for trait bounds are used simultaneously."##,
+ },
+ Lint {
+ label: "clippy::transmute_bytes_to_str",
+ description: r##"Checks for transmutes from a `&[u8]` to a `&str`."##,
+ },
+ Lint {
+ label: "clippy::transmute_float_to_int",
+ description: r##"Checks for transmutes from a float to an integer."##,
+ },
+ Lint {
+ label: "clippy::transmute_int_to_bool",
+ description: r##"Checks for transmutes from an integer to a `bool`."##,
+ },
+ Lint {
+ label: "clippy::transmute_int_to_char",
+ description: r##"Checks for transmutes from an integer to a `char`."##,
+ },
+ Lint {
+ label: "clippy::transmute_int_to_float",
+ description: r##"Checks for transmutes from an integer to a float."##,
+ },
+ Lint {
+ label: "clippy::transmute_num_to_bytes",
+ description: r##"Checks for transmutes from a number to an array of `u8`"##,
+ },
+ Lint {
+ label: "clippy::transmute_ptr_to_ptr",
+ description: r##"Checks for transmutes from a pointer to a pointer, or
+from a reference to a reference."##,
+ },
+ Lint {
+ label: "clippy::transmute_ptr_to_ref",
+ description: r##"Checks for transmutes from a pointer to a reference."##,
+ },
+ Lint {
+ label: "clippy::transmutes_expressible_as_ptr_casts",
+ description: r##"Checks for transmutes that could be a pointer cast."##,
+ },
+ Lint {
+ label: "clippy::transmuting_null",
+ description: r##"Checks for transmute calls which would receive a null pointer."##,
+ },
+ Lint {
+ label: "clippy::trivial_regex",
+ description: r##"Checks for trivial [regex](https://crates.io/crates/regex)
+creation (with `Regex::new`, `RegexBuilder::new`, or `RegexSet::new`)."##,
+ },
+ Lint {
+ label: "clippy::trivially_copy_pass_by_ref",
+ description: r##"Checks for functions taking arguments by reference, where
+the argument type is `Copy` and small enough to be more efficient to always
+pass by value."##,
+ },
+ Lint { label: "clippy::try_err", description: r##"Checks for usages of `Err(x)?`."## },
+ Lint {
+ label: "clippy::type_complexity",
+ description: r##"Checks for types used in structs, parameters and `let`
+declarations above a certain complexity threshold."##,
+ },
+ Lint {
+ label: "clippy::type_repetition_in_bounds",
+ description: r##"This lint warns about unnecessary type repetitions in trait bounds"##,
+ },
+ Lint {
+ label: "clippy::undocumented_unsafe_blocks",
+ description: r##"Checks for `unsafe` blocks without a `// Safety: ` comment
+explaining why the unsafe operations performed inside
+the block are safe."##,
+ },
+ Lint {
+ label: "clippy::undropped_manually_drops",
+ description: r##"Prevents the safe `std::mem::drop` function from being called on `std::mem::ManuallyDrop`."##,
+ },
+ Lint {
+ label: "clippy::unicode_not_nfc",
+ description: r##"Checks for string literals that contain Unicode in a form
+that is not equal to its
+[NFC-recomposition](http://www.unicode.org/reports/tr15/#Norm_Forms)."##,
+ },
+ Lint {
+ label: "clippy::unimplemented",
+ description: r##"Checks for usage of `unimplemented!`."##,
+ },
+ Lint {
+ label: "clippy::uninit_assumed_init",
+ description: r##"Checks for `MaybeUninit::uninit().assume_init()`."##,
+ },
+ Lint {
+ label: "clippy::uninit_vec",
+ description: r##"Checks for `set_len()` call that creates `Vec` with uninitialized elements.
+This is commonly caused by calling `set_len()` right after allocating or
+reserving a buffer with `new()`, `default()`, `with_capacity()`, or `reserve()`."##,
+ },
+ Lint {
+ label: "clippy::unit_arg",
+ description: r##"Checks for passing a unit value as an argument to a function without using a
+unit literal (`()`)."##,
+ },
+ Lint {
+ label: "clippy::unit_cmp",
+ description: r##"Checks for comparisons to unit. This includes all binary
+comparisons (like `==` and `<`) and asserts."##,
+ },
+ Lint { label: "clippy::unit_hash", description: r##"Detects `().hash(_)`."## },
+ Lint {
+ label: "clippy::unit_return_expecting_ord",
+ description: r##"Checks for functions that expect closures of type
+Fn(...) -> Ord where the implemented closure returns the unit type.
+The lint also suggests to remove the semi-colon at the end of the statement if present."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_cast",
+ description: r##"Checks for casts to the same type, casts of int literals to integer types
+and casts of float literals to float types."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_filter_map",
+ description: r##"Checks for `filter_map` calls which could be replaced by `filter` or `map`.
+More specifically it checks if the closure provided is only performing one of the
+filter or map operations and suggests the appropriate option."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_fold",
+ description: r##"Checks for using `fold` when a more succinct alternative exists.
+Specifically, this checks for `fold`s which could be replaced by `any`, `all`,
+`sum` or `product`."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_lazy_evaluations",
+ description: r##"As the counterpart to `or_fun_call`, this lint looks for unnecessary
+lazily evaluated closures on `Option` and `Result`.
+
+This lint suggests changing the following functions, when eager evaluation results in
+simpler code:
+ - `unwrap_or_else` to `unwrap_or`
+ - `and_then` to `and`
+ - `or_else` to `or`
+ - `get_or_insert_with` to `get_or_insert`
+ - `ok_or_else` to `ok_or`"##,
+ },
+ Lint {
+ label: "clippy::unnecessary_mut_passed",
+ description: r##"Detects passing a mutable reference to a function that only
+requires an immutable reference."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_operation",
+ description: r##"Checks for expression statements that can be reduced to a
+sub-expression."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_self_imports",
+ description: r##"Checks for imports ending in `::{self}`."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_sort_by",
+ description: r##"Detects uses of `Vec::sort_by` passing in a closure
+which compares the two arguments, either directly or indirectly."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_to_owned",
+ description: r##"Checks for unnecessary calls to [`ToOwned::to_owned`](https://doc.rust-lang.org/std/borrow/trait.ToOwned.html#tymethod.to_owned)
+and other `to_owned`-like functions."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_unwrap",
+ description: r##"Checks for calls of `unwrap[_err]()` that cannot fail."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_wraps",
+ description: r##"Checks for private functions that only return `Ok` or `Some`."##,
+ },
+ Lint {
+ label: "clippy::unneeded_field_pattern",
+ description: r##"Checks for structure field patterns bound to wildcards."##,
+ },
+ Lint {
+ label: "clippy::unneeded_wildcard_pattern",
+ description: r##"Checks for tuple patterns with a wildcard
+pattern (`_`) is next to a rest pattern (`..`).
+
+_NOTE_: While `_, ..` means there is at least one element left, `..`
+means there are 0 or more elements left. This can make a difference
+when refactoring, but shouldn't result in errors in the refactored code,
+since the wildcard pattern isn't used anyway."##,
+ },
+ Lint {
+ label: "clippy::unnested_or_patterns",
+ description: r##"Checks for unnested or-patterns, e.g., `Some(0) | Some(2)` and
+suggests replacing the pattern with a nested one, `Some(0 | 2)`.
+
+Another way to think of this is that it rewrites patterns in
+*disjunctive normal form (DNF)* into *conjunctive normal form (CNF)*."##,
+ },
+ Lint { label: "clippy::unreachable", description: r##"Checks for usage of `unreachable!`."## },
+ Lint {
+ label: "clippy::unreadable_literal",
+ description: r##"Warns if a long integral or floating-point constant does
+not contain underscores."##,
+ },
+ Lint {
+ label: "clippy::unsafe_derive_deserialize",
+ description: r##"Checks for deriving `serde::Deserialize` on a type that
+has methods using `unsafe`."##,
+ },
+ Lint {
+ label: "clippy::unsafe_removed_from_name",
+ description: r##"Checks for imports that remove unsafe from an item's
+name."##,
+ },
+ Lint {
+ label: "clippy::unsafe_vector_initialization",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::unseparated_literal_suffix",
+ description: r##"Warns if literal suffixes are not separated by an
+underscore.
+To enforce unseparated literal suffix style,
+see the `separated_literal_suffix` lint."##,
+ },
+ Lint {
+ label: "clippy::unsound_collection_transmute",
+ description: r##"Checks for transmutes between collections whose
+types have different ABI, size or alignment."##,
+ },
+ Lint {
+ label: "clippy::unstable_as_mut_slice",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::unstable_as_slice",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::unused_async",
+ description: r##"Checks for functions that are declared `async` but have no `.await`s inside of them."##,
+ },
+ Lint {
+ label: "clippy::unused_collect",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::unused_io_amount",
+ description: r##"Checks for unused written/read amount."##,
+ },
+ Lint {
+ label: "clippy::unused_self",
+ description: r##"Checks methods that contain a `self` argument but don't use it"##,
+ },
+ Lint {
+ label: "clippy::unused_unit",
+ description: r##"Checks for unit (`()`) expressions that can be removed."##,
+ },
+ Lint {
+ label: "clippy::unusual_byte_groupings",
+ description: r##"Warns if hexadecimal or binary literals are not grouped
+by nibble or byte."##,
+ },
+ Lint {
+ label: "clippy::unwrap_in_result",
+ description: r##"Checks for functions of type `Result` that contain `expect()` or `unwrap()`"##,
+ },
+ Lint {
+ label: "clippy::unwrap_or_else_default",
+ description: r##"Checks for usages of `_.unwrap_or_else(Default::default)` on `Option` and
+`Result` values."##,
+ },
+ Lint {
+ label: "clippy::unwrap_used",
+ description: r##"Checks for `.unwrap()` calls on `Option`s and on `Result`s."##,
+ },
+ Lint {
+ label: "clippy::upper_case_acronyms",
+ description: r##"Checks for fully capitalized names and optionally names containing a capitalized acronym."##,
+ },
+ Lint {
+ label: "clippy::use_debug",
+ description: r##"Checks for use of `Debug` formatting. The purpose of this
+lint is to catch debugging remnants."##,
+ },
+ Lint {
+ label: "clippy::use_self",
+ description: r##"Checks for unnecessary repetition of structure name when a
+replacement with `Self` is applicable."##,
+ },
+ Lint {
+ label: "clippy::used_underscore_binding",
+ description: r##"Checks for the use of bindings with a single leading
+underscore."##,
+ },
+ Lint {
+ label: "clippy::useless_asref",
+ description: r##"Checks for usage of `.as_ref()` or `.as_mut()` where the
+types before and after the call are the same."##,
+ },
+ Lint {
+ label: "clippy::useless_attribute",
+ description: r##"Checks for `extern crate` and `use` items annotated with
+lint attributes.
+
+This lint permits `#[allow(unused_imports)]`, `#[allow(deprecated)]`,
+`#[allow(unreachable_pub)]`, `#[allow(clippy::wildcard_imports)]` and
+`#[allow(clippy::enum_glob_use)]` on `use` items and `#[allow(unused_imports)]` on
+`extern crate` items with a `#[macro_use]` attribute."##,
+ },
+ Lint {
+ label: "clippy::useless_conversion",
+ description: r##"Checks for `Into`, `TryInto`, `From`, `TryFrom`, or `IntoIter` calls
+which uselessly convert to the same type."##,
+ },
+ Lint {
+ label: "clippy::useless_format",
+ description: r##"Checks for the use of `format!(string literal with no
+argument)` and `format!({}, foo)` where `foo` is a string."##,
+ },
+ Lint {
+ label: "clippy::useless_let_if_seq",
+ description: r##"Checks for variable declarations immediately followed by a
+conditional affectation."##,
+ },
+ Lint {
+ label: "clippy::useless_transmute",
+ description: r##"Checks for transmutes to the original type of the object
+and transmutes that could be a cast."##,
+ },
+ Lint {
+ label: "clippy::useless_vec",
+ description: r##"Checks for usage of `&vec![..]` when using `&[..]` would
+be possible."##,
+ },
+ Lint {
+ label: "clippy::vec_box",
+ description: r##"Checks for use of `Vec<Box<T>>` where T: Sized anywhere in the code.
+Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information."##,
+ },
+ Lint {
+ label: "clippy::vec_init_then_push",
+ description: r##"Checks for calls to `push` immediately after creating a new `Vec`."##,
+ },
+ Lint {
+ label: "clippy::vec_resize_to_zero",
+ description: r##"Finds occurrences of `Vec::resize(0, an_int)`"##,
+ },
+ Lint {
+ label: "clippy::verbose_bit_mask",
+ description: r##"Checks for bit masks that can be replaced by a call
+to `trailing_zeros`"##,
+ },
+ Lint {
+ label: "clippy::verbose_file_reads",
+ description: r##"Checks for use of File::read_to_end and File::read_to_string."##,
+ },
+ Lint {
+ label: "clippy::vtable_address_comparisons",
+ description: r##"Checks for comparisons with an address of a trait vtable."##,
+ },
+ Lint {
+ label: "clippy::while_immutable_condition",
+ description: r##"Checks whether variables used within while loop condition
+can be (and are) mutated in the body."##,
+ },
+ Lint {
+ label: "clippy::while_let_loop",
+ description: r##"Detects `loop + match` combinations that are easier
+written as a `while let` loop."##,
+ },
+ Lint {
+ label: "clippy::while_let_on_iterator",
+ description: r##"Checks for `while let` expressions on iterators."##,
+ },
+ Lint {
+ label: "clippy::wildcard_dependencies",
+ description: r##"Checks for wildcard dependencies in the `Cargo.toml`."##,
+ },
+ Lint {
+ label: "clippy::wildcard_enum_match_arm",
+ description: r##"Checks for wildcard enum matches using `_`."##,
+ },
+ Lint {
+ label: "clippy::wildcard_imports",
+ description: r##"Checks for wildcard imports `use _::*`."##,
+ },
+ Lint {
+ label: "clippy::wildcard_in_or_patterns",
+ description: r##"Checks for wildcard pattern used with others patterns in same match arm."##,
+ },
+ Lint {
+ label: "clippy::write_literal",
+ description: r##"This lint warns about the use of literals as `write!`/`writeln!` args."##,
+ },
+ Lint {
+ label: "clippy::write_with_newline",
+ description: r##"This lint warns when you use `write!()` with a format
+string that
+ends in a newline."##,
+ },
+ Lint {
+ label: "clippy::writeln_empty_string",
+ description: r##"This lint warns when you use `writeln!(buf, )` to
+print a newline."##,
+ },
+ Lint {
+ label: "clippy::wrong_pub_self_convention",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::wrong_self_convention",
+ description: r##"Checks for methods with certain name prefixes and which
+doesn't match how self is taken. The actual rules are:
+
+|Prefix |Postfix |`self` taken | `self` type |
+|-------|------------|-----------------------|--------------|
+|`as_` | none |`&self` or `&mut self` | any |
+|`from_`| none | none | any |
+|`into_`| none |`self` | any |
+|`is_` | none |`&self` or none | any |
+|`to_` | `_mut` |`&mut self` | any |
+|`to_` | not `_mut` |`self` | `Copy` |
+|`to_` | not `_mut` |`&self` | not `Copy` |
+
+Note: Clippy doesn't trigger methods with `to_` prefix in:
+- Traits definition.
+Clippy can not tell if a type that implements a trait is `Copy` or not.
+- Traits implementation, when `&self` is taken.
+The method signature is controlled by the trait and often `&self` is required for all types that implement the trait
+(see e.g. the `std::string::ToString` trait).
+
+Clippy allows `Pin<&Self>` and `Pin<&mut Self>` if `&self` and `&mut self` is required.
+
+Please find more info here:
+https://rust-lang.github.io/api-guidelines/naming.html#ad-hoc-conversions-follow-as_-to_-into_-conventions-c-conv"##,
+ },
+ Lint {
+ label: "clippy::wrong_transmute",
+ description: r##"Checks for transmutes that can't ever be correct on any
+architecture."##,
+ },
+ Lint { label: "clippy::zero_divided_by_zero", description: r##"Checks for `0.0 / 0.0`."## },
+ Lint {
+ label: "clippy::zero_prefixed_literal",
+ description: r##"Warns if an integral constant literal starts with `0`."##,
+ },
+ Lint {
+ label: "clippy::zero_ptr",
+ description: r##"Catch casts from `0` to some pointer type"##,
+ },
+ Lint {
+ label: "clippy::zero_sized_map_values",
+ description: r##"Checks for maps with zero-sized value types anywhere in the code."##,
+ },
+ Lint {
+ label: "clippy::zst_offset",
+ description: r##"Checks for `offset(_)`, `wrapping_`{`add`, `sub`}, etc. on raw pointers to
+zero-sized types"##,
+ },
+];
+pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
+ LintGroup {
+ lint: Lint {
+ label: "clippy::cargo",
+ description: r##"lint group for: clippy::cargo_common_metadata, clippy::multiple_crate_versions, clippy::negative_feature_names, clippy::redundant_feature_names, clippy::wildcard_dependencies"##,
+ },
+ children: &[
+ "clippy::cargo_common_metadata",
+ "clippy::multiple_crate_versions",
+ "clippy::negative_feature_names",
+ "clippy::redundant_feature_names",
+ "clippy::wildcard_dependencies",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::complexity",
+ description: r##"lint group for: clippy::bind_instead_of_map, clippy::bool_comparison, clippy::borrowed_box, clippy::char_lit_as_u8, clippy::clone_on_copy, clippy::crosspointer_transmute, clippy::deprecated_cfg_attr, clippy::deref_addrof, clippy::derivable_impls, clippy::diverging_sub_expression, clippy::double_comparisons, clippy::double_parens, clippy::duration_subsec, clippy::explicit_counter_loop, clippy::explicit_write, clippy::extra_unused_lifetimes, clippy::filter_map_identity, clippy::filter_next, clippy::flat_map_identity, clippy::get_last_with_len, clippy::identity_op, clippy::inspect_for_each, clippy::int_plus_one, clippy::iter_count, clippy::manual_filter_map, clippy::manual_find_map, clippy::manual_flatten, clippy::manual_split_once, clippy::manual_strip, clippy::manual_swap, clippy::manual_unwrap_or, clippy::map_flatten, clippy::map_identity, clippy::match_as_ref, clippy::match_single_binding, clippy::needless_arbitrary_self_type, clippy::needless_bool, clippy::needless_borrowed_reference, clippy::needless_lifetimes, clippy::needless_option_as_deref, clippy::needless_question_mark, clippy::needless_splitn, clippy::needless_update, clippy::neg_cmp_op_on_partial_ord, clippy::no_effect, clippy::nonminimal_bool, clippy::option_as_ref_deref, clippy::option_filter_map, clippy::option_map_unit_fn, clippy::overflow_check_conditional, clippy::partialeq_ne_impl, clippy::precedence, clippy::ptr_offset_with_cast, clippy::range_zip_with_len, clippy::redundant_closure_call, clippy::redundant_slicing, clippy::ref_in_deref, clippy::repeat_once, clippy::result_map_unit_fn, clippy::search_is_some, clippy::short_circuit_statement, clippy::single_element_loop, clippy::skip_while_next, clippy::string_from_utf8_as_bytes, clippy::strlen_on_c_strings, clippy::temporary_assignment, clippy::too_many_arguments, clippy::transmute_bytes_to_str, clippy::transmute_float_to_int, clippy::transmute_int_to_bool, clippy::transmute_int_to_char, clippy::transmute_int_to_float, clippy::transmute_num_to_bytes, clippy::transmute_ptr_to_ref, clippy::transmutes_expressible_as_ptr_casts, clippy::type_complexity, clippy::unit_arg, clippy::unnecessary_cast, clippy::unnecessary_filter_map, clippy::unnecessary_operation, clippy::unnecessary_sort_by, clippy::unnecessary_unwrap, clippy::unneeded_wildcard_pattern, clippy::useless_asref, clippy::useless_conversion, clippy::useless_format, clippy::vec_box, clippy::while_let_loop, clippy::wildcard_in_or_patterns, clippy::zero_divided_by_zero, clippy::zero_prefixed_literal"##,
+ },
+ children: &[
+ "clippy::bind_instead_of_map",
+ "clippy::bool_comparison",
+ "clippy::borrowed_box",
+ "clippy::char_lit_as_u8",
+ "clippy::clone_on_copy",
+ "clippy::crosspointer_transmute",
+ "clippy::deprecated_cfg_attr",
+ "clippy::deref_addrof",
+ "clippy::derivable_impls",
+ "clippy::diverging_sub_expression",
+ "clippy::double_comparisons",
+ "clippy::double_parens",
+ "clippy::duration_subsec",
+ "clippy::explicit_counter_loop",
+ "clippy::explicit_write",
+ "clippy::extra_unused_lifetimes",
+ "clippy::filter_map_identity",
+ "clippy::filter_next",
+ "clippy::flat_map_identity",
+ "clippy::get_last_with_len",
+ "clippy::identity_op",
+ "clippy::inspect_for_each",
+ "clippy::int_plus_one",
+ "clippy::iter_count",
+ "clippy::manual_filter_map",
+ "clippy::manual_find_map",
+ "clippy::manual_flatten",
+ "clippy::manual_split_once",
+ "clippy::manual_strip",
+ "clippy::manual_swap",
+ "clippy::manual_unwrap_or",
+ "clippy::map_flatten",
+ "clippy::map_identity",
+ "clippy::match_as_ref",
+ "clippy::match_single_binding",
+ "clippy::needless_arbitrary_self_type",
+ "clippy::needless_bool",
+ "clippy::needless_borrowed_reference",
+ "clippy::needless_lifetimes",
+ "clippy::needless_option_as_deref",
+ "clippy::needless_question_mark",
+ "clippy::needless_splitn",
+ "clippy::needless_update",
+ "clippy::neg_cmp_op_on_partial_ord",
+ "clippy::no_effect",
+ "clippy::nonminimal_bool",
+ "clippy::option_as_ref_deref",
+ "clippy::option_filter_map",
+ "clippy::option_map_unit_fn",
+ "clippy::overflow_check_conditional",
+ "clippy::partialeq_ne_impl",
+ "clippy::precedence",
+ "clippy::ptr_offset_with_cast",
+ "clippy::range_zip_with_len",
+ "clippy::redundant_closure_call",
+ "clippy::redundant_slicing",
+ "clippy::ref_in_deref",
+ "clippy::repeat_once",
+ "clippy::result_map_unit_fn",
+ "clippy::search_is_some",
+ "clippy::short_circuit_statement",
+ "clippy::single_element_loop",
+ "clippy::skip_while_next",
+ "clippy::string_from_utf8_as_bytes",
+ "clippy::strlen_on_c_strings",
+ "clippy::temporary_assignment",
+ "clippy::too_many_arguments",
+ "clippy::transmute_bytes_to_str",
+ "clippy::transmute_float_to_int",
+ "clippy::transmute_int_to_bool",
+ "clippy::transmute_int_to_char",
+ "clippy::transmute_int_to_float",
+ "clippy::transmute_num_to_bytes",
+ "clippy::transmute_ptr_to_ref",
+ "clippy::transmutes_expressible_as_ptr_casts",
+ "clippy::type_complexity",
+ "clippy::unit_arg",
+ "clippy::unnecessary_cast",
+ "clippy::unnecessary_filter_map",
+ "clippy::unnecessary_operation",
+ "clippy::unnecessary_sort_by",
+ "clippy::unnecessary_unwrap",
+ "clippy::unneeded_wildcard_pattern",
+ "clippy::useless_asref",
+ "clippy::useless_conversion",
+ "clippy::useless_format",
+ "clippy::vec_box",
+ "clippy::while_let_loop",
+ "clippy::wildcard_in_or_patterns",
+ "clippy::zero_divided_by_zero",
+ "clippy::zero_prefixed_literal",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::correctness",
+ description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_ref_to_mut, clippy::clone_double_ref, clippy::cmp_nan, clippy::deprecated_semver, clippy::derive_hash_xor_eq, clippy::derive_ord_xor_partial_ord, clippy::drop_copy, clippy::drop_ref, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::forget_copy, clippy::forget_ref, clippy::if_let_mutex, clippy::if_same_then_else, clippy::ifs_same_cond, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::logic_bug, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mismatched_target_os, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::to_string_in_display, clippy::transmuting_null, clippy::undropped_manually_drops, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::vtable_address_comparisons, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##,
+ },
+ children: &[
+ "clippy::absurd_extreme_comparisons",
+ "clippy::almost_swapped",
+ "clippy::approx_constant",
+ "clippy::async_yields_async",
+ "clippy::bad_bit_mask",
+ "clippy::cast_ref_to_mut",
+ "clippy::clone_double_ref",
+ "clippy::cmp_nan",
+ "clippy::deprecated_semver",
+ "clippy::derive_hash_xor_eq",
+ "clippy::derive_ord_xor_partial_ord",
+ "clippy::drop_copy",
+ "clippy::drop_ref",
+ "clippy::enum_clike_unportable_variant",
+ "clippy::eq_op",
+ "clippy::erasing_op",
+ "clippy::fn_address_comparisons",
+ "clippy::forget_copy",
+ "clippy::forget_ref",
+ "clippy::if_let_mutex",
+ "clippy::if_same_then_else",
+ "clippy::ifs_same_cond",
+ "clippy::ineffective_bit_mask",
+ "clippy::infinite_iter",
+ "clippy::inherent_to_string_shadow_display",
+ "clippy::inline_fn_without_body",
+ "clippy::invalid_null_ptr_usage",
+ "clippy::invalid_regex",
+ "clippy::invisible_characters",
+ "clippy::iter_next_loop",
+ "clippy::iterator_step_by_zero",
+ "clippy::let_underscore_lock",
+ "clippy::logic_bug",
+ "clippy::match_str_case_mismatch",
+ "clippy::mem_replace_with_uninit",
+ "clippy::min_max",
+ "clippy::mismatched_target_os",
+ "clippy::mistyped_literal_suffixes",
+ "clippy::modulo_one",
+ "clippy::mut_from_ref",
+ "clippy::never_loop",
+ "clippy::non_octal_unix_permissions",
+ "clippy::nonsensical_open_options",
+ "clippy::not_unsafe_ptr_arg_deref",
+ "clippy::option_env_unwrap",
+ "clippy::out_of_bounds_indexing",
+ "clippy::panicking_unwrap",
+ "clippy::possible_missing_comma",
+ "clippy::reversed_empty_ranges",
+ "clippy::self_assignment",
+ "clippy::serde_api_misuse",
+ "clippy::size_of_in_element_count",
+ "clippy::suspicious_splitn",
+ "clippy::to_string_in_display",
+ "clippy::transmuting_null",
+ "clippy::undropped_manually_drops",
+ "clippy::uninit_assumed_init",
+ "clippy::uninit_vec",
+ "clippy::unit_cmp",
+ "clippy::unit_hash",
+ "clippy::unit_return_expecting_ord",
+ "clippy::unsound_collection_transmute",
+ "clippy::unused_io_amount",
+ "clippy::useless_attribute",
+ "clippy::vec_resize_to_zero",
+ "clippy::vtable_address_comparisons",
+ "clippy::while_immutable_condition",
+ "clippy::wrong_transmute",
+ "clippy::zst_offset",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::deprecated",
+ description: r##"lint group for: clippy::assign_ops, clippy::extend_from_slice, clippy::filter_map, clippy::find_map, clippy::if_let_redundant_pattern_matching, clippy::misaligned_transmute, clippy::pub_enum_variant_names, clippy::range_step_by_zero, clippy::regex_macro, clippy::replace_consts, clippy::should_assert_eq, clippy::unsafe_vector_initialization, clippy::unstable_as_mut_slice, clippy::unstable_as_slice, clippy::unused_collect, clippy::wrong_pub_self_convention"##,
+ },
+ children: &[
+ "clippy::assign_ops",
+ "clippy::extend_from_slice",
+ "clippy::filter_map",
+ "clippy::find_map",
+ "clippy::if_let_redundant_pattern_matching",
+ "clippy::misaligned_transmute",
+ "clippy::pub_enum_variant_names",
+ "clippy::range_step_by_zero",
+ "clippy::regex_macro",
+ "clippy::replace_consts",
+ "clippy::should_assert_eq",
+ "clippy::unsafe_vector_initialization",
+ "clippy::unstable_as_mut_slice",
+ "clippy::unstable_as_slice",
+ "clippy::unused_collect",
+ "clippy::wrong_pub_self_convention",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::nursery",
+ description: r##"lint group for: clippy::branches_sharing_code, clippy::cognitive_complexity, clippy::debug_assert_with_mut_call, clippy::disallowed_methods, clippy::disallowed_types, clippy::empty_line_after_outer_attr, clippy::equatable_if_let, clippy::fallible_impl_from, clippy::future_not_send, clippy::imprecise_flops, clippy::index_refutable_slice, clippy::missing_const_for_fn, clippy::mutex_integer, clippy::non_send_fields_in_send_ty, clippy::nonstandard_macro_braces, clippy::option_if_let_else, clippy::path_buf_push_overwrite, clippy::redundant_pub_crate, clippy::string_lit_as_bytes, clippy::suboptimal_flops, clippy::suspicious_operation_groupings, clippy::trailing_empty_array, clippy::trivial_regex, clippy::use_self, clippy::useless_let_if_seq, clippy::useless_transmute"##,
+ },
+ children: &[
+ "clippy::branches_sharing_code",
+ "clippy::cognitive_complexity",
+ "clippy::debug_assert_with_mut_call",
+ "clippy::disallowed_methods",
+ "clippy::disallowed_types",
+ "clippy::empty_line_after_outer_attr",
+ "clippy::equatable_if_let",
+ "clippy::fallible_impl_from",
+ "clippy::future_not_send",
+ "clippy::imprecise_flops",
+ "clippy::index_refutable_slice",
+ "clippy::missing_const_for_fn",
+ "clippy::mutex_integer",
+ "clippy::non_send_fields_in_send_ty",
+ "clippy::nonstandard_macro_braces",
+ "clippy::option_if_let_else",
+ "clippy::path_buf_push_overwrite",
+ "clippy::redundant_pub_crate",
+ "clippy::string_lit_as_bytes",
+ "clippy::suboptimal_flops",
+ "clippy::suspicious_operation_groupings",
+ "clippy::trailing_empty_array",
+ "clippy::trivial_regex",
+ "clippy::use_self",
+ "clippy::useless_let_if_seq",
+ "clippy::useless_transmute",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::pedantic",
+ description: r##"lint group for: clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::implicit_clone, clippy::implicit_hasher, clippy::implicit_saturating_sub, clippy::inconsistent_struct_constructor, clippy::inefficient_to_string, clippy::inline_always, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_not_returning_iterator, clippy::large_digit_groups, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::let_underscore_drop, clippy::let_unit_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_ok_or, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::missing_errors_doc, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::no_effect_underscore_binding, clippy::option_option, clippy::ptr_as_ptr, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::similar_names, clippy::single_match_else, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::too_many_lines, clippy::trait_duplication_in_bounds, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::type_repetition_in_bounds, clippy::unicode_not_nfc, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##,
+ },
+ children: &[
+ "clippy::await_holding_lock",
+ "clippy::await_holding_refcell_ref",
+ "clippy::case_sensitive_file_extension_comparisons",
+ "clippy::cast_lossless",
+ "clippy::cast_possible_truncation",
+ "clippy::cast_possible_wrap",
+ "clippy::cast_precision_loss",
+ "clippy::cast_ptr_alignment",
+ "clippy::cast_sign_loss",
+ "clippy::checked_conversions",
+ "clippy::cloned_instead_of_copied",
+ "clippy::copy_iterator",
+ "clippy::default_trait_access",
+ "clippy::doc_markdown",
+ "clippy::empty_enum",
+ "clippy::enum_glob_use",
+ "clippy::expl_impl_clone_on_copy",
+ "clippy::explicit_deref_methods",
+ "clippy::explicit_into_iter_loop",
+ "clippy::explicit_iter_loop",
+ "clippy::filter_map_next",
+ "clippy::flat_map_option",
+ "clippy::float_cmp",
+ "clippy::fn_params_excessive_bools",
+ "clippy::from_iter_instead_of_collect",
+ "clippy::if_not_else",
+ "clippy::implicit_clone",
+ "clippy::implicit_hasher",
+ "clippy::implicit_saturating_sub",
+ "clippy::inconsistent_struct_constructor",
+ "clippy::inefficient_to_string",
+ "clippy::inline_always",
+ "clippy::invalid_upcast_comparisons",
+ "clippy::items_after_statements",
+ "clippy::iter_not_returning_iterator",
+ "clippy::large_digit_groups",
+ "clippy::large_stack_arrays",
+ "clippy::large_types_passed_by_value",
+ "clippy::let_underscore_drop",
+ "clippy::let_unit_value",
+ "clippy::linkedlist",
+ "clippy::macro_use_imports",
+ "clippy::manual_assert",
+ "clippy::manual_ok_or",
+ "clippy::many_single_char_names",
+ "clippy::map_unwrap_or",
+ "clippy::match_bool",
+ "clippy::match_on_vec_items",
+ "clippy::match_same_arms",
+ "clippy::match_wild_err_arm",
+ "clippy::match_wildcard_for_single_variants",
+ "clippy::maybe_infinite_iter",
+ "clippy::missing_errors_doc",
+ "clippy::missing_panics_doc",
+ "clippy::module_name_repetitions",
+ "clippy::must_use_candidate",
+ "clippy::mut_mut",
+ "clippy::naive_bytecount",
+ "clippy::needless_bitwise_bool",
+ "clippy::needless_continue",
+ "clippy::needless_for_each",
+ "clippy::needless_pass_by_value",
+ "clippy::no_effect_underscore_binding",
+ "clippy::option_option",
+ "clippy::ptr_as_ptr",
+ "clippy::range_minus_one",
+ "clippy::range_plus_one",
+ "clippy::redundant_closure_for_method_calls",
+ "clippy::redundant_else",
+ "clippy::ref_binding_to_reference",
+ "clippy::ref_option_ref",
+ "clippy::same_functions_in_if_condition",
+ "clippy::semicolon_if_nothing_returned",
+ "clippy::similar_names",
+ "clippy::single_match_else",
+ "clippy::string_add_assign",
+ "clippy::struct_excessive_bools",
+ "clippy::too_many_lines",
+ "clippy::trait_duplication_in_bounds",
+ "clippy::transmute_ptr_to_ptr",
+ "clippy::trivially_copy_pass_by_ref",
+ "clippy::type_repetition_in_bounds",
+ "clippy::unicode_not_nfc",
+ "clippy::unnecessary_wraps",
+ "clippy::unnested_or_patterns",
+ "clippy::unreadable_literal",
+ "clippy::unsafe_derive_deserialize",
+ "clippy::unused_async",
+ "clippy::unused_self",
+ "clippy::used_underscore_binding",
+ "clippy::verbose_bit_mask",
+ "clippy::wildcard_imports",
+ "clippy::zero_sized_map_values",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::perf",
+ description: r##"lint group for: clippy::box_collection, clippy::boxed_local, clippy::cmp_owned, clippy::expect_fun_call, clippy::extend_with_drain, clippy::format_in_format_args, clippy::iter_nth, clippy::large_const_arrays, clippy::large_enum_variant, clippy::manual_memcpy, clippy::manual_str_repeat, clippy::map_entry, clippy::mutex_atomic, clippy::needless_collect, clippy::or_fun_call, clippy::redundant_allocation, clippy::redundant_clone, clippy::single_char_pattern, clippy::slow_vector_initialization, clippy::stable_sort_primitive, clippy::to_string_in_format_args, clippy::unnecessary_to_owned, clippy::useless_vec, clippy::vec_init_then_push"##,
+ },
+ children: &[
+ "clippy::box_collection",
+ "clippy::boxed_local",
+ "clippy::cmp_owned",
+ "clippy::expect_fun_call",
+ "clippy::extend_with_drain",
+ "clippy::format_in_format_args",
+ "clippy::iter_nth",
+ "clippy::large_const_arrays",
+ "clippy::large_enum_variant",
+ "clippy::manual_memcpy",
+ "clippy::manual_str_repeat",
+ "clippy::map_entry",
+ "clippy::mutex_atomic",
+ "clippy::needless_collect",
+ "clippy::or_fun_call",
+ "clippy::redundant_allocation",
+ "clippy::redundant_clone",
+ "clippy::single_char_pattern",
+ "clippy::slow_vector_initialization",
+ "clippy::stable_sort_primitive",
+ "clippy::to_string_in_format_args",
+ "clippy::unnecessary_to_owned",
+ "clippy::useless_vec",
+ "clippy::vec_init_then_push",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::restriction",
+ description: r##"lint group for: clippy::as_conversions, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::get_unwrap, clippy::if_then_some_else_none, clippy::implicit_return, clippy::indexing_slicing, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_arithmetic, clippy::integer_division, clippy::let_underscore_must_use, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::missing_docs_in_private_items, clippy::missing_enforced_import_renames, clippy::missing_inline_in_public_items, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::rc_buffer, clippy::rc_mutex, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::str_to_string, clippy::string_add, clippy::string_slice, clippy::string_to_string, clippy::todo, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##,
+ },
+ children: &[
+ "clippy::as_conversions",
+ "clippy::clone_on_ref_ptr",
+ "clippy::create_dir",
+ "clippy::dbg_macro",
+ "clippy::decimal_literal_representation",
+ "clippy::default_numeric_fallback",
+ "clippy::disallowed_script_idents",
+ "clippy::else_if_without_else",
+ "clippy::exhaustive_enums",
+ "clippy::exhaustive_structs",
+ "clippy::exit",
+ "clippy::expect_used",
+ "clippy::filetype_is_file",
+ "clippy::float_arithmetic",
+ "clippy::float_cmp_const",
+ "clippy::fn_to_numeric_cast_any",
+ "clippy::get_unwrap",
+ "clippy::if_then_some_else_none",
+ "clippy::implicit_return",
+ "clippy::indexing_slicing",
+ "clippy::inline_asm_x86_att_syntax",
+ "clippy::inline_asm_x86_intel_syntax",
+ "clippy::integer_arithmetic",
+ "clippy::integer_division",
+ "clippy::let_underscore_must_use",
+ "clippy::lossy_float_literal",
+ "clippy::map_err_ignore",
+ "clippy::mem_forget",
+ "clippy::missing_docs_in_private_items",
+ "clippy::missing_enforced_import_renames",
+ "clippy::missing_inline_in_public_items",
+ "clippy::mod_module_files",
+ "clippy::modulo_arithmetic",
+ "clippy::multiple_inherent_impl",
+ "clippy::non_ascii_literal",
+ "clippy::panic",
+ "clippy::panic_in_result_fn",
+ "clippy::pattern_type_mismatch",
+ "clippy::print_stderr",
+ "clippy::print_stdout",
+ "clippy::rc_buffer",
+ "clippy::rc_mutex",
+ "clippy::rest_pat_in_fully_bound_structs",
+ "clippy::same_name_method",
+ "clippy::self_named_module_files",
+ "clippy::separated_literal_suffix",
+ "clippy::shadow_reuse",
+ "clippy::shadow_same",
+ "clippy::shadow_unrelated",
+ "clippy::str_to_string",
+ "clippy::string_add",
+ "clippy::string_slice",
+ "clippy::string_to_string",
+ "clippy::todo",
+ "clippy::undocumented_unsafe_blocks",
+ "clippy::unimplemented",
+ "clippy::unnecessary_self_imports",
+ "clippy::unneeded_field_pattern",
+ "clippy::unreachable",
+ "clippy::unseparated_literal_suffix",
+ "clippy::unwrap_in_result",
+ "clippy::unwrap_used",
+ "clippy::use_debug",
+ "clippy::verbose_file_reads",
+ "clippy::wildcard_enum_match_arm",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::style",
+ description: r##"lint group for: clippy::assertions_on_constants, clippy::assign_op_pattern, clippy::blacklisted_name, clippy::blocks_in_if_conditions, clippy::bool_assert_comparison, clippy::borrow_interior_mutable_const, clippy::builtin_type_shadow, clippy::bytes_nth, clippy::chars_last_cmp, clippy::chars_next_cmp, clippy::cmp_null, clippy::collapsible_else_if, clippy::collapsible_if, clippy::collapsible_match, clippy::comparison_chain, clippy::comparison_to_empty, clippy::declare_interior_mutable_const, clippy::double_must_use, clippy::double_neg, clippy::duplicate_underscore_argument, clippy::enum_variant_names, clippy::excessive_precision, clippy::field_reassign_with_default, clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation, clippy::for_kv_map, clippy::from_over_into, clippy::from_str_radix_10, clippy::inconsistent_digit_grouping, clippy::infallible_destructuring_match, clippy::inherent_to_string, clippy::into_iter_on_ref, clippy::iter_cloned_collect, clippy::iter_next_slice, clippy::iter_nth_zero, clippy::iter_skip_next, clippy::just_underscores_and_digits, clippy::len_without_is_empty, clippy::len_zero, clippy::let_and_return, clippy::main_recursion, clippy::manual_async_fn, clippy::manual_map, clippy::manual_non_exhaustive, clippy::manual_range_contains, clippy::manual_saturating_arithmetic, clippy::map_clone, clippy::map_collect_result_unit, clippy::match_like_matches_macro, clippy::match_overlapping_arm, clippy::match_ref_pats, clippy::match_result_ok, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default, clippy::missing_safety_doc, clippy::mixed_case_hex_literals, clippy::module_inception, clippy::must_use_unit, clippy::mut_mutex_lock, clippy::needless_borrow, clippy::needless_doctest_main, clippy::needless_late_init, clippy::needless_range_loop, clippy::needless_return, clippy::neg_multiply, clippy::new_ret_no_self, clippy::new_without_default, clippy::ok_expect, clippy::op_ref, clippy::option_map_or_none, clippy::print_literal, clippy::print_with_newline, clippy::println_empty_string, clippy::ptr_arg, clippy::ptr_eq, clippy::question_mark, clippy::redundant_closure, clippy::redundant_field_names, clippy::redundant_pattern, clippy::redundant_pattern_matching, clippy::redundant_static_lifetimes, clippy::result_map_or_into_option, clippy::result_unit_err, clippy::same_item_push, clippy::self_named_constructors, clippy::should_implement_trait, clippy::single_char_add_str, clippy::single_component_path_imports, clippy::single_match, clippy::string_extend_chars, clippy::tabs_in_doc_comments, clippy::to_digit_is_some, clippy::toplevel_ref_arg, clippy::try_err, clippy::unnecessary_fold, clippy::unnecessary_lazy_evaluations, clippy::unnecessary_mut_passed, clippy::unsafe_removed_from_name, clippy::unused_unit, clippy::unusual_byte_groupings, clippy::unwrap_or_else_default, clippy::upper_case_acronyms, clippy::while_let_on_iterator, clippy::write_literal, clippy::write_with_newline, clippy::writeln_empty_string, clippy::wrong_self_convention, clippy::zero_ptr"##,
+ },
+ children: &[
+ "clippy::assertions_on_constants",
+ "clippy::assign_op_pattern",
+ "clippy::blacklisted_name",
+ "clippy::blocks_in_if_conditions",
+ "clippy::bool_assert_comparison",
+ "clippy::borrow_interior_mutable_const",
+ "clippy::builtin_type_shadow",
+ "clippy::bytes_nth",
+ "clippy::chars_last_cmp",
+ "clippy::chars_next_cmp",
+ "clippy::cmp_null",
+ "clippy::collapsible_else_if",
+ "clippy::collapsible_if",
+ "clippy::collapsible_match",
+ "clippy::comparison_chain",
+ "clippy::comparison_to_empty",
+ "clippy::declare_interior_mutable_const",
+ "clippy::double_must_use",
+ "clippy::double_neg",
+ "clippy::duplicate_underscore_argument",
+ "clippy::enum_variant_names",
+ "clippy::excessive_precision",
+ "clippy::field_reassign_with_default",
+ "clippy::fn_to_numeric_cast",
+ "clippy::fn_to_numeric_cast_with_truncation",
+ "clippy::for_kv_map",
+ "clippy::from_over_into",
+ "clippy::from_str_radix_10",
+ "clippy::inconsistent_digit_grouping",
+ "clippy::infallible_destructuring_match",
+ "clippy::inherent_to_string",
+ "clippy::into_iter_on_ref",
+ "clippy::iter_cloned_collect",
+ "clippy::iter_next_slice",
+ "clippy::iter_nth_zero",
+ "clippy::iter_skip_next",
+ "clippy::just_underscores_and_digits",
+ "clippy::len_without_is_empty",
+ "clippy::len_zero",
+ "clippy::let_and_return",
+ "clippy::main_recursion",
+ "clippy::manual_async_fn",
+ "clippy::manual_map",
+ "clippy::manual_non_exhaustive",
+ "clippy::manual_range_contains",
+ "clippy::manual_saturating_arithmetic",
+ "clippy::map_clone",
+ "clippy::map_collect_result_unit",
+ "clippy::match_like_matches_macro",
+ "clippy::match_overlapping_arm",
+ "clippy::match_ref_pats",
+ "clippy::match_result_ok",
+ "clippy::mem_replace_option_with_none",
+ "clippy::mem_replace_with_default",
+ "clippy::missing_safety_doc",
+ "clippy::mixed_case_hex_literals",
+ "clippy::module_inception",
+ "clippy::must_use_unit",
+ "clippy::mut_mutex_lock",
+ "clippy::needless_borrow",
+ "clippy::needless_doctest_main",
+ "clippy::needless_late_init",
+ "clippy::needless_range_loop",
+ "clippy::needless_return",
+ "clippy::neg_multiply",
+ "clippy::new_ret_no_self",
+ "clippy::new_without_default",
+ "clippy::ok_expect",
+ "clippy::op_ref",
+ "clippy::option_map_or_none",
+ "clippy::print_literal",
+ "clippy::print_with_newline",
+ "clippy::println_empty_string",
+ "clippy::ptr_arg",
+ "clippy::ptr_eq",
+ "clippy::question_mark",
+ "clippy::redundant_closure",
+ "clippy::redundant_field_names",
+ "clippy::redundant_pattern",
+ "clippy::redundant_pattern_matching",
+ "clippy::redundant_static_lifetimes",
+ "clippy::result_map_or_into_option",
+ "clippy::result_unit_err",
+ "clippy::same_item_push",
+ "clippy::self_named_constructors",
+ "clippy::should_implement_trait",
+ "clippy::single_char_add_str",
+ "clippy::single_component_path_imports",
+ "clippy::single_match",
+ "clippy::string_extend_chars",
+ "clippy::tabs_in_doc_comments",
+ "clippy::to_digit_is_some",
+ "clippy::toplevel_ref_arg",
+ "clippy::try_err",
+ "clippy::unnecessary_fold",
+ "clippy::unnecessary_lazy_evaluations",
+ "clippy::unnecessary_mut_passed",
+ "clippy::unsafe_removed_from_name",
+ "clippy::unused_unit",
+ "clippy::unusual_byte_groupings",
+ "clippy::unwrap_or_else_default",
+ "clippy::upper_case_acronyms",
+ "clippy::while_let_on_iterator",
+ "clippy::write_literal",
+ "clippy::write_with_newline",
+ "clippy::writeln_empty_string",
+ "clippy::wrong_self_convention",
+ "clippy::zero_ptr",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::suspicious",
+ description: r##"lint group for: clippy::blanket_clippy_restriction_lints, clippy::empty_loop, clippy::eval_order_dependence, clippy::float_equality_without_abs, clippy::for_loops_over_fallibles, clippy::misrefactored_assign_op, clippy::mut_range_bound, clippy::mutable_key_type, clippy::octal_escapes, clippy::return_self_not_must_use, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_unary_op_formatting"##,
+ },
+ children: &[
+ "clippy::blanket_clippy_restriction_lints",
+ "clippy::empty_loop",
+ "clippy::eval_order_dependence",
+ "clippy::float_equality_without_abs",
+ "clippy::for_loops_over_fallibles",
+ "clippy::misrefactored_assign_op",
+ "clippy::mut_range_bound",
+ "clippy::mutable_key_type",
+ "clippy::octal_escapes",
+ "clippy::return_self_not_must_use",
+ "clippy::suspicious_arithmetic_impl",
+ "clippy::suspicious_assignment_formatting",
+ "clippy::suspicious_else_formatting",
+ "clippy::suspicious_map",
+ "clippy::suspicious_op_assign_impl",
+ "clippy::suspicious_unary_op_formatting",
+ ],
+ },
+];
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
new file mode 100644
index 000000000..6e56efe34
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
@@ -0,0 +1,105 @@
+//! Random assortment of ide helpers for high-level ide features that don't fit in any other module.
+
+use std::collections::VecDeque;
+
+use base_db::FileId;
+use hir::{ItemInNs, ModuleDef, Name, Semantics};
+use syntax::{
+ ast::{self, make},
+ AstToken, SyntaxKind, SyntaxToken, TokenAtOffset,
+};
+
+use crate::{defs::Definition, generated, RootDatabase};
+
+pub fn item_name(db: &RootDatabase, item: ItemInNs) -> Option<Name> {
+ match item {
+ ItemInNs::Types(module_def_id) => module_def_id.name(db),
+ ItemInNs::Values(module_def_id) => module_def_id.name(db),
+ ItemInNs::Macros(macro_def_id) => Some(macro_def_id.name(db)),
+ }
+}
+
+/// Picks the token with the highest rank returned by the passed in function.
+pub fn pick_best_token(
+ tokens: TokenAtOffset<SyntaxToken>,
+ f: impl Fn(SyntaxKind) -> usize,
+) -> Option<SyntaxToken> {
+ tokens.max_by_key(move |t| f(t.kind()))
+}
+pub fn pick_token<T: AstToken>(mut tokens: TokenAtOffset<SyntaxToken>) -> Option<T> {
+ tokens.find_map(T::cast)
+}
+
+/// Converts the mod path struct into its ast representation.
+pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path {
+ let _p = profile::span("mod_path_to_ast");
+
+ let mut segments = Vec::new();
+ let mut is_abs = false;
+ match path.kind {
+ hir::PathKind::Plain => {}
+ hir::PathKind::Super(0) => segments.push(make::path_segment_self()),
+ hir::PathKind::Super(n) => segments.extend((0..n).map(|_| make::path_segment_super())),
+ hir::PathKind::DollarCrate(_) | hir::PathKind::Crate => {
+ segments.push(make::path_segment_crate())
+ }
+ hir::PathKind::Abs => is_abs = true,
+ }
+
+ segments.extend(
+ path.segments()
+ .iter()
+ .map(|segment| make::path_segment(make::name_ref(&segment.to_smol_str()))),
+ );
+ make::path_from_segments(segments, is_abs)
+}
+
+/// Iterates all `ModuleDef`s and `Impl` blocks of the given file.
+pub fn visit_file_defs(
+ sema: &Semantics<'_, RootDatabase>,
+ file_id: FileId,
+ cb: &mut dyn FnMut(Definition),
+) {
+ let db = sema.db;
+ let module = match sema.to_module_def(file_id) {
+ Some(it) => it,
+ None => return,
+ };
+ let mut defs: VecDeque<_> = module.declarations(db).into();
+ while let Some(def) = defs.pop_front() {
+ if let ModuleDef::Module(submodule) = def {
+ if let hir::ModuleSource::Module(_) = submodule.definition_source(db).value {
+ defs.extend(submodule.declarations(db));
+ submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into()));
+ }
+ }
+ cb(def.into());
+ }
+ module.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into()));
+
+ let is_root = module.is_crate_root(db);
+ module
+ .legacy_macros(db)
+ .into_iter()
+ // don't show legacy macros declared in the crate-root that were already covered in declarations earlier
+ .filter(|it| !(is_root && it.is_macro_export(db)))
+ .for_each(|mac| cb(mac.into()));
+}
+
+/// Checks if the given lint is equal or is contained by the other lint which may or may not be a group.
+pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool {
+ if lint == lint_is {
+ return true;
+ }
+
+ if let Some(group) = generated::lints::DEFAULT_LINT_GROUPS
+ .iter()
+ .chain(generated::lints::CLIPPY_LINT_GROUPS.iter())
+ .chain(generated::lints::RUSTDOC_LINT_GROUPS.iter())
+ .find(|&check| check.lint.label == lint_is)
+ {
+ group.children.contains(&lint)
+ } else {
+ false
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
new file mode 100644
index 000000000..26ef86155
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
@@ -0,0 +1,674 @@
+//! Look up accessible paths for items.
+use hir::{
+ AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, ModPath, Module, ModuleDef,
+ PathResolution, PrefixKind, ScopeDef, Semantics, SemanticsScope, Type,
+};
+use itertools::Itertools;
+use rustc_hash::FxHashSet;
+use syntax::{
+ ast::{self, HasName},
+ utils::path_to_string_stripping_turbo_fish,
+ AstNode, SyntaxNode,
+};
+
+use crate::{
+ helpers::item_name,
+ items_locator::{self, AssocItemSearch, DEFAULT_QUERY_SEARCH_LIMIT},
+ RootDatabase,
+};
+
+/// A candidate for import, derived during various IDE activities:
+/// * completion with imports on the fly proposals
+/// * completion edit resolve requests
+/// * assists
+/// * etc.
+#[derive(Debug)]
+pub enum ImportCandidate {
+ /// A path, qualified (`std::collections::HashMap`) or not (`HashMap`).
+ Path(PathImportCandidate),
+ /// A trait associated function (with no self parameter) or an associated constant.
+ /// For 'test_mod::TestEnum::test_function', `ty` is the `test_mod::TestEnum` expression type
+ /// and `name` is the `test_function`
+ TraitAssocItem(TraitImportCandidate),
+ /// A trait method with self parameter.
+ /// For 'test_enum.test_method()', `ty` is the `test_enum` expression type
+ /// and `name` is the `test_method`
+ TraitMethod(TraitImportCandidate),
+}
+
+/// A trait import needed for a given associated item access.
+/// For `some::path::SomeStruct::ASSOC_`, contains the
+/// type of `some::path::SomeStruct` and `ASSOC_` as the item name.
+#[derive(Debug)]
+pub struct TraitImportCandidate {
+ /// A type of the item that has the associated item accessed at.
+ pub receiver_ty: Type,
+ /// The associated item name that the trait to import should contain.
+ pub assoc_item_name: NameToImport,
+}
+
+/// Path import for a given name, qualified or not.
+#[derive(Debug)]
+pub struct PathImportCandidate {
+ /// Optional qualifier before name.
+ pub qualifier: Option<FirstSegmentUnresolved>,
+ /// The name the item (struct, trait, enum, etc.) should have.
+ pub name: NameToImport,
+}
+
+/// A qualifier that has a first segment and it's unresolved.
+#[derive(Debug)]
+pub struct FirstSegmentUnresolved {
+ fist_segment: ast::NameRef,
+ full_qualifier: ast::Path,
+}
+
+/// A name that will be used during item lookups.
+#[derive(Debug, Clone)]
+pub enum NameToImport {
+ /// Requires items with names that exactly match the given string, bool indicates case-sensitivity.
+ Exact(String, bool),
+ /// Requires items with names that case-insensitively contain all letters from the string,
+ /// in the same order, but not necessary adjacent.
+ Fuzzy(String),
+}
+
+impl NameToImport {
+ pub fn exact_case_sensitive(s: String) -> NameToImport {
+ NameToImport::Exact(s, true)
+ }
+}
+
+impl NameToImport {
+ pub fn text(&self) -> &str {
+ match self {
+ NameToImport::Exact(text, _) => text.as_str(),
+ NameToImport::Fuzzy(text) => text.as_str(),
+ }
+ }
+}
+
+/// A struct to find imports in the project, given a certain name (or its part) and the context.
+#[derive(Debug)]
+pub struct ImportAssets {
+ import_candidate: ImportCandidate,
+ candidate_node: SyntaxNode,
+ module_with_candidate: Module,
+}
+
+impl ImportAssets {
+ pub fn for_method_call(
+ method_call: &ast::MethodCallExpr,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> Option<Self> {
+ let candidate_node = method_call.syntax().clone();
+ Some(Self {
+ import_candidate: ImportCandidate::for_method_call(sema, method_call)?,
+ module_with_candidate: sema.scope(&candidate_node)?.module(),
+ candidate_node,
+ })
+ }
+
+ pub fn for_exact_path(
+ fully_qualified_path: &ast::Path,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> Option<Self> {
+ let candidate_node = fully_qualified_path.syntax().clone();
+ if let Some(use_tree) = candidate_node.ancestors().find_map(ast::UseTree::cast) {
+ // Path is inside a use tree, then only continue if it is the first segment of a use statement.
+ if use_tree.syntax().parent().and_then(ast::Use::cast).is_none()
+ || fully_qualified_path.qualifier().is_some()
+ {
+ return None;
+ }
+ }
+ Some(Self {
+ import_candidate: ImportCandidate::for_regular_path(sema, fully_qualified_path)?,
+ module_with_candidate: sema.scope(&candidate_node)?.module(),
+ candidate_node,
+ })
+ }
+
+ pub fn for_ident_pat(sema: &Semantics<'_, RootDatabase>, pat: &ast::IdentPat) -> Option<Self> {
+ if !pat.is_simple_ident() {
+ return None;
+ }
+ let name = pat.name()?;
+ let candidate_node = pat.syntax().clone();
+ Some(Self {
+ import_candidate: ImportCandidate::for_name(sema, &name)?,
+ module_with_candidate: sema.scope(&candidate_node)?.module(),
+ candidate_node,
+ })
+ }
+
+ pub fn for_fuzzy_path(
+ module_with_candidate: Module,
+ qualifier: Option<ast::Path>,
+ fuzzy_name: String,
+ sema: &Semantics<'_, RootDatabase>,
+ candidate_node: SyntaxNode,
+ ) -> Option<Self> {
+ Some(Self {
+ import_candidate: ImportCandidate::for_fuzzy_path(qualifier, fuzzy_name, sema)?,
+ module_with_candidate,
+ candidate_node,
+ })
+ }
+
+ pub fn for_fuzzy_method_call(
+ module_with_method_call: Module,
+ receiver_ty: Type,
+ fuzzy_method_name: String,
+ candidate_node: SyntaxNode,
+ ) -> Option<Self> {
+ Some(Self {
+ import_candidate: ImportCandidate::TraitMethod(TraitImportCandidate {
+ receiver_ty,
+ assoc_item_name: NameToImport::Fuzzy(fuzzy_method_name),
+ }),
+ module_with_candidate: module_with_method_call,
+ candidate_node,
+ })
+ }
+}
+
+/// An import (not necessary the only one) that corresponds a certain given [`PathImportCandidate`].
+/// (the structure is not entirely correct, since there can be situations requiring two imports, see FIXME below for the details)
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LocatedImport {
+ /// The path to use in the `use` statement for a given candidate to be imported.
+ pub import_path: ModPath,
+ /// An item that will be imported with the import path given.
+ pub item_to_import: ItemInNs,
+ /// The path import candidate, resolved.
+ ///
+ /// Not necessary matches the import:
+ /// For any associated constant from the trait, we try to access as `some::path::SomeStruct::ASSOC_`
+ /// the original item is the associated constant, but the import has to be a trait that
+ /// defines this constant.
+ pub original_item: ItemInNs,
+ /// A path of the original item.
+ pub original_path: Option<ModPath>,
+}
+
+impl LocatedImport {
+ pub fn new(
+ import_path: ModPath,
+ item_to_import: ItemInNs,
+ original_item: ItemInNs,
+ original_path: Option<ModPath>,
+ ) -> Self {
+ Self { import_path, item_to_import, original_item, original_path }
+ }
+}
+
+impl ImportAssets {
+ pub fn import_candidate(&self) -> &ImportCandidate {
+ &self.import_candidate
+ }
+
+ pub fn search_for_imports(
+ &self,
+ sema: &Semantics<'_, RootDatabase>,
+ prefix_kind: PrefixKind,
+ ) -> Vec<LocatedImport> {
+ let _p = profile::span("import_assets::search_for_imports");
+ self.search_for(sema, Some(prefix_kind))
+ }
+
+ /// This may return non-absolute paths if a part of the returned path is already imported into scope.
+ pub fn search_for_relative_paths(
+ &self,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> Vec<LocatedImport> {
+ let _p = profile::span("import_assets::search_for_relative_paths");
+ self.search_for(sema, None)
+ }
+
+ pub fn path_fuzzy_name_to_exact(&mut self, case_sensitive: bool) {
+ if let ImportCandidate::Path(PathImportCandidate { name: to_import, .. }) =
+ &mut self.import_candidate
+ {
+ let name = match to_import {
+ NameToImport::Fuzzy(name) => std::mem::take(name),
+ _ => return,
+ };
+ *to_import = NameToImport::Exact(name, case_sensitive);
+ }
+ }
+
+ fn search_for(
+ &self,
+ sema: &Semantics<'_, RootDatabase>,
+ prefixed: Option<PrefixKind>,
+ ) -> Vec<LocatedImport> {
+ let _p = profile::span("import_assets::search_for");
+
+ let scope_definitions = self.scope_definitions(sema);
+ let mod_path = |item| {
+ get_mod_path(
+ sema.db,
+ item_for_path_search(sema.db, item)?,
+ &self.module_with_candidate,
+ prefixed,
+ )
+ };
+
+ let krate = self.module_with_candidate.krate();
+ let scope = match sema.scope(&self.candidate_node) {
+ Some(it) => it,
+ None => return Vec::new(),
+ };
+
+ match &self.import_candidate {
+ ImportCandidate::Path(path_candidate) => {
+ path_applicable_imports(sema, krate, path_candidate, mod_path)
+ }
+ ImportCandidate::TraitAssocItem(trait_candidate) => {
+ trait_applicable_items(sema, krate, &scope, trait_candidate, true, mod_path)
+ }
+ ImportCandidate::TraitMethod(trait_candidate) => {
+ trait_applicable_items(sema, krate, &scope, trait_candidate, false, mod_path)
+ }
+ }
+ .into_iter()
+ .filter(|import| import.import_path.len() > 1)
+ .filter(|import| !scope_definitions.contains(&ScopeDef::from(import.item_to_import)))
+ .sorted_by(|a, b| a.import_path.cmp(&b.import_path))
+ .collect()
+ }
+
+ fn scope_definitions(&self, sema: &Semantics<'_, RootDatabase>) -> FxHashSet<ScopeDef> {
+ let _p = profile::span("import_assets::scope_definitions");
+ let mut scope_definitions = FxHashSet::default();
+ if let Some(scope) = sema.scope(&self.candidate_node) {
+ scope.process_all_names(&mut |_, scope_def| {
+ scope_definitions.insert(scope_def);
+ });
+ }
+ scope_definitions
+ }
+}
+
+fn path_applicable_imports(
+ sema: &Semantics<'_, RootDatabase>,
+ current_crate: Crate,
+ path_candidate: &PathImportCandidate,
+ mod_path: impl Fn(ItemInNs) -> Option<ModPath> + Copy,
+) -> FxHashSet<LocatedImport> {
+ let _p = profile::span("import_assets::path_applicable_imports");
+
+ match &path_candidate.qualifier {
+ None => {
+ items_locator::items_with_name(
+ sema,
+ current_crate,
+ path_candidate.name.clone(),
+ // FIXME: we could look up assoc items by the input and propose those in completion,
+ // but that requires more preparation first:
+ // * store non-trait assoc items in import_map to fully enable this lookup
+ // * ensure that does not degrade the performance (benchmark it)
+ // * write more logic to check for corresponding trait presence requirement (we're unable to flyimport multiple item right now)
+ // * improve the associated completion item matching and/or scoring to ensure no noisy completions appear
+ //
+ // see also an ignored test under FIXME comment in the qualify_path.rs module
+ AssocItemSearch::Exclude,
+ Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ )
+ .filter_map(|item| {
+ let mod_path = mod_path(item)?;
+ Some(LocatedImport::new(mod_path.clone(), item, item, Some(mod_path)))
+ })
+ .collect()
+ }
+ Some(first_segment_unresolved) => {
+ let unresolved_qualifier =
+ path_to_string_stripping_turbo_fish(&first_segment_unresolved.full_qualifier);
+ let unresolved_first_segment = first_segment_unresolved.fist_segment.text();
+ items_locator::items_with_name(
+ sema,
+ current_crate,
+ path_candidate.name.clone(),
+ AssocItemSearch::Include,
+ Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ )
+ .filter_map(|item| {
+ import_for_item(
+ sema.db,
+ mod_path,
+ &unresolved_first_segment,
+ &unresolved_qualifier,
+ item,
+ )
+ })
+ .collect()
+ }
+ }
+}
+
+fn import_for_item(
+ db: &RootDatabase,
+ mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
+ unresolved_first_segment: &str,
+ unresolved_qualifier: &str,
+ original_item: ItemInNs,
+) -> Option<LocatedImport> {
+ let _p = profile::span("import_assets::import_for_item");
+
+ let original_item_candidate = item_for_path_search(db, original_item)?;
+ let import_path_candidate = mod_path(original_item_candidate)?;
+ let import_path_string = import_path_candidate.to_string();
+
+ let expected_import_end = if item_as_assoc(db, original_item).is_some() {
+ unresolved_qualifier.to_string()
+ } else {
+ format!("{}::{}", unresolved_qualifier, item_name(db, original_item)?)
+ };
+ if !import_path_string.contains(unresolved_first_segment)
+ || !import_path_string.ends_with(&expected_import_end)
+ {
+ return None;
+ }
+
+ let segment_import =
+ find_import_for_segment(db, original_item_candidate, unresolved_first_segment)?;
+ let trait_item_to_import = item_as_assoc(db, original_item)
+ .and_then(|assoc| assoc.containing_trait(db))
+ .map(|trait_| ItemInNs::from(ModuleDef::from(trait_)));
+ Some(match (segment_import == original_item_candidate, trait_item_to_import) {
+ (true, Some(_)) => {
+ // FIXME we should be able to import both the trait and the segment,
+ // but it's unclear what to do with overlapping edits (merge imports?)
+ // especially in case of lazy completion edit resolutions.
+ return None;
+ }
+ (false, Some(trait_to_import)) => LocatedImport::new(
+ mod_path(trait_to_import)?,
+ trait_to_import,
+ original_item,
+ mod_path(original_item),
+ ),
+ (true, None) => LocatedImport::new(
+ import_path_candidate,
+ original_item_candidate,
+ original_item,
+ mod_path(original_item),
+ ),
+ (false, None) => LocatedImport::new(
+ mod_path(segment_import)?,
+ segment_import,
+ original_item,
+ mod_path(original_item),
+ ),
+ })
+}
+
+pub fn item_for_path_search(db: &RootDatabase, item: ItemInNs) -> Option<ItemInNs> {
+ Some(match item {
+ ItemInNs::Types(_) | ItemInNs::Values(_) => match item_as_assoc(db, item) {
+ Some(assoc_item) => match assoc_item.container(db) {
+ AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
+ AssocItemContainer::Impl(impl_) => {
+ ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?))
+ }
+ },
+ None => item,
+ },
+ ItemInNs::Macros(_) => item,
+ })
+}
+
+fn find_import_for_segment(
+ db: &RootDatabase,
+ original_item: ItemInNs,
+ unresolved_first_segment: &str,
+) -> Option<ItemInNs> {
+ let segment_is_name = item_name(db, original_item)
+ .map(|name| name.to_smol_str() == unresolved_first_segment)
+ .unwrap_or(false);
+
+ Some(if segment_is_name {
+ original_item
+ } else {
+ let matching_module =
+ module_with_segment_name(db, unresolved_first_segment, original_item)?;
+ ItemInNs::from(ModuleDef::from(matching_module))
+ })
+}
+
+fn module_with_segment_name(
+ db: &RootDatabase,
+ segment_name: &str,
+ candidate: ItemInNs,
+) -> Option<Module> {
+ let mut current_module = match candidate {
+ ItemInNs::Types(module_def_id) => module_def_id.module(db),
+ ItemInNs::Values(module_def_id) => module_def_id.module(db),
+ ItemInNs::Macros(macro_def_id) => ModuleDef::from(macro_def_id).module(db),
+ };
+ while let Some(module) = current_module {
+ if let Some(module_name) = module.name(db) {
+ if module_name.to_smol_str() == segment_name {
+ return Some(module);
+ }
+ }
+ current_module = module.parent(db);
+ }
+ None
+}
+
+fn trait_applicable_items(
+ sema: &Semantics<'_, RootDatabase>,
+ current_crate: Crate,
+ scope: &SemanticsScope<'_>,
+ trait_candidate: &TraitImportCandidate,
+ trait_assoc_item: bool,
+ mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
+) -> FxHashSet<LocatedImport> {
+ let _p = profile::span("import_assets::trait_applicable_items");
+
+ let db = sema.db;
+
+ let inherent_traits = trait_candidate.receiver_ty.applicable_inherent_traits(db);
+ let env_traits = trait_candidate.receiver_ty.env_traits(db);
+ let related_traits = inherent_traits.chain(env_traits).collect::<FxHashSet<_>>();
+
+ let mut required_assoc_items = FxHashSet::default();
+ let trait_candidates = items_locator::items_with_name(
+ sema,
+ current_crate,
+ trait_candidate.assoc_item_name.clone(),
+ AssocItemSearch::AssocItemsOnly,
+ Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ )
+ .filter_map(|input| item_as_assoc(db, input))
+ .filter_map(|assoc| {
+ let assoc_item_trait = assoc.containing_trait(db)?;
+ if related_traits.contains(&assoc_item_trait) {
+ None
+ } else {
+ required_assoc_items.insert(assoc);
+ Some(assoc_item_trait.into())
+ }
+ })
+ .collect();
+
+ let mut located_imports = FxHashSet::default();
+
+ if trait_assoc_item {
+ trait_candidate.receiver_ty.iterate_path_candidates(
+ db,
+ scope,
+ &trait_candidates,
+ None,
+ None,
+ |assoc| {
+ if required_assoc_items.contains(&assoc) {
+ if let AssocItem::Function(f) = assoc {
+ if f.self_param(db).is_some() {
+ return None;
+ }
+ }
+ let located_trait = assoc.containing_trait(db)?;
+ let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
+ let original_item = assoc_to_item(assoc);
+ located_imports.insert(LocatedImport::new(
+ mod_path(trait_item)?,
+ trait_item,
+ original_item,
+ mod_path(original_item),
+ ));
+ }
+ None::<()>
+ },
+ )
+ } else {
+ trait_candidate.receiver_ty.iterate_method_candidates(
+ db,
+ scope,
+ &trait_candidates,
+ None,
+ None,
+ |function| {
+ let assoc = function.as_assoc_item(db)?;
+ if required_assoc_items.contains(&assoc) {
+ let located_trait = assoc.containing_trait(db)?;
+ let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
+ let original_item = assoc_to_item(assoc);
+ located_imports.insert(LocatedImport::new(
+ mod_path(trait_item)?,
+ trait_item,
+ original_item,
+ mod_path(original_item),
+ ));
+ }
+ None::<()>
+ },
+ )
+ };
+
+ located_imports
+}
+
+fn assoc_to_item(assoc: AssocItem) -> ItemInNs {
+ match assoc {
+ AssocItem::Function(f) => ItemInNs::from(ModuleDef::from(f)),
+ AssocItem::Const(c) => ItemInNs::from(ModuleDef::from(c)),
+ AssocItem::TypeAlias(t) => ItemInNs::from(ModuleDef::from(t)),
+ }
+}
+
+fn get_mod_path(
+ db: &RootDatabase,
+ item_to_search: ItemInNs,
+ module_with_candidate: &Module,
+ prefixed: Option<PrefixKind>,
+) -> Option<ModPath> {
+ if let Some(prefix_kind) = prefixed {
+ module_with_candidate.find_use_path_prefixed(db, item_to_search, prefix_kind)
+ } else {
+ module_with_candidate.find_use_path(db, item_to_search)
+ }
+}
+
+impl ImportCandidate {
+ fn for_method_call(
+ sema: &Semantics<'_, RootDatabase>,
+ method_call: &ast::MethodCallExpr,
+ ) -> Option<Self> {
+ match sema.resolve_method_call(method_call) {
+ Some(_) => None,
+ None => Some(Self::TraitMethod(TraitImportCandidate {
+ receiver_ty: sema.type_of_expr(&method_call.receiver()?)?.adjusted(),
+ assoc_item_name: NameToImport::exact_case_sensitive(
+ method_call.name_ref()?.to_string(),
+ ),
+ })),
+ }
+ }
+
+ fn for_regular_path(sema: &Semantics<'_, RootDatabase>, path: &ast::Path) -> Option<Self> {
+ if sema.resolve_path(path).is_some() {
+ return None;
+ }
+ path_import_candidate(
+ sema,
+ path.qualifier(),
+ NameToImport::exact_case_sensitive(path.segment()?.name_ref()?.to_string()),
+ )
+ }
+
+ fn for_name(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<Self> {
+ if sema
+ .scope(name.syntax())?
+ .speculative_resolve(&ast::make::ext::ident_path(&name.text()))
+ .is_some()
+ {
+ return None;
+ }
+ Some(ImportCandidate::Path(PathImportCandidate {
+ qualifier: None,
+ name: NameToImport::exact_case_sensitive(name.to_string()),
+ }))
+ }
+
+ fn for_fuzzy_path(
+ qualifier: Option<ast::Path>,
+ fuzzy_name: String,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> Option<Self> {
+ path_import_candidate(sema, qualifier, NameToImport::Fuzzy(fuzzy_name))
+ }
+}
+
+fn path_import_candidate(
+ sema: &Semantics<'_, RootDatabase>,
+ qualifier: Option<ast::Path>,
+ name: NameToImport,
+) -> Option<ImportCandidate> {
+ Some(match qualifier {
+ Some(qualifier) => match sema.resolve_path(&qualifier) {
+ None => {
+ let qualifier_start =
+ qualifier.syntax().descendants().find_map(ast::NameRef::cast)?;
+ let qualifier_start_path =
+ qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?;
+ if sema.resolve_path(&qualifier_start_path).is_none() {
+ ImportCandidate::Path(PathImportCandidate {
+ qualifier: Some(FirstSegmentUnresolved {
+ fist_segment: qualifier_start,
+ full_qualifier: qualifier,
+ }),
+ name,
+ })
+ } else {
+ return None;
+ }
+ }
+ Some(PathResolution::Def(ModuleDef::Adt(assoc_item_path))) => {
+ ImportCandidate::TraitAssocItem(TraitImportCandidate {
+ receiver_ty: assoc_item_path.ty(sema.db),
+ assoc_item_name: name,
+ })
+ }
+ Some(PathResolution::Def(ModuleDef::TypeAlias(alias))) => {
+ let ty = alias.ty(sema.db);
+ if ty.as_adt().is_some() {
+ ImportCandidate::TraitAssocItem(TraitImportCandidate {
+ receiver_ty: ty,
+ assoc_item_name: name,
+ })
+ } else {
+ return None;
+ }
+ }
+ Some(_) => return None,
+ },
+ None => ImportCandidate::Path(PathImportCandidate { qualifier: None, name }),
+ })
+}
+
+fn item_as_assoc(db: &RootDatabase, item: ItemInNs) -> Option<AssocItem> {
+ item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
new file mode 100644
index 000000000..c14182279
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
@@ -0,0 +1,446 @@
+//! Handle syntactic aspects of inserting a new `use` item.
+#[cfg(test)]
+mod tests;
+
+use std::cmp::Ordering;
+
+use hir::Semantics;
+use syntax::{
+ algo,
+ ast::{self, make, AstNode, HasAttrs, HasModuleItem, HasVisibility, PathSegmentKind},
+ ted, Direction, NodeOrToken, SyntaxKind, SyntaxNode,
+};
+
+use crate::{
+ imports::merge_imports::{
+ common_prefix, eq_attrs, eq_visibility, try_merge_imports, use_tree_path_cmp, MergeBehavior,
+ },
+ RootDatabase,
+};
+
+pub use hir::PrefixKind;
+
+/// How imports should be grouped into use statements.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ImportGranularity {
+ /// Do not change the granularity of any imports and preserve the original structure written by the developer.
+ Preserve,
+ /// Merge imports from the same crate into a single use statement.
+ Crate,
+ /// Merge imports from the same module into a single use statement.
+ Module,
+ /// Flatten imports so that each has its own use statement.
+ Item,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub struct InsertUseConfig {
+ pub granularity: ImportGranularity,
+ pub enforce_granularity: bool,
+ pub prefix_kind: PrefixKind,
+ pub group: bool,
+ pub skip_glob_imports: bool,
+}
+
+#[derive(Debug, Clone)]
+pub enum ImportScope {
+ File(ast::SourceFile),
+ Module(ast::ItemList),
+ Block(ast::StmtList),
+}
+
+impl ImportScope {
+ // FIXME: Remove this?
+ #[cfg(test)]
+ fn from(syntax: SyntaxNode) -> Option<Self> {
+ use syntax::match_ast;
+ fn contains_cfg_attr(attrs: &dyn HasAttrs) -> bool {
+ attrs
+ .attrs()
+ .any(|attr| attr.as_simple_call().map_or(false, |(ident, _)| ident == "cfg"))
+ }
+ match_ast! {
+ match syntax {
+ ast::Module(module) => module.item_list().map(ImportScope::Module),
+ ast::SourceFile(file) => Some(ImportScope::File(file)),
+ ast::Fn(func) => contains_cfg_attr(&func).then(|| func.body().and_then(|it| it.stmt_list().map(ImportScope::Block))).flatten(),
+ ast::Const(konst) => contains_cfg_attr(&konst).then(|| match konst.body()? {
+ ast::Expr::BlockExpr(block) => Some(block),
+ _ => None,
+ }).flatten().and_then(|it| it.stmt_list().map(ImportScope::Block)),
+ ast::Static(statik) => contains_cfg_attr(&statik).then(|| match statik.body()? {
+ ast::Expr::BlockExpr(block) => Some(block),
+ _ => None,
+ }).flatten().and_then(|it| it.stmt_list().map(ImportScope::Block)),
+ _ => None,
+
+ }
+ }
+ }
+
+ /// Determines the containing syntax node in which to insert a `use` statement affecting `position`.
+ /// Returns the original source node inside attributes.
+ pub fn find_insert_use_container(
+ position: &SyntaxNode,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> Option<Self> {
+ fn contains_cfg_attr(attrs: &dyn HasAttrs) -> bool {
+ attrs
+ .attrs()
+ .any(|attr| attr.as_simple_call().map_or(false, |(ident, _)| ident == "cfg"))
+ }
+
+ // Walk up the ancestor tree searching for a suitable node to do insertions on
+ // with special handling on cfg-gated items, in which case we want to insert imports locally
+ // or FIXME: annotate inserted imports with the same cfg
+ for syntax in sema.ancestors_with_macros(position.clone()) {
+ if let Some(file) = ast::SourceFile::cast(syntax.clone()) {
+ return Some(ImportScope::File(file));
+ } else if let Some(item) = ast::Item::cast(syntax) {
+ return match item {
+ ast::Item::Const(konst) if contains_cfg_attr(&konst) => {
+ // FIXME: Instead of bailing out with None, we should note down that
+ // this import needs an attribute added
+ match sema.original_ast_node(konst)?.body()? {
+ ast::Expr::BlockExpr(block) => block,
+ _ => return None,
+ }
+ .stmt_list()
+ .map(ImportScope::Block)
+ }
+ ast::Item::Fn(func) if contains_cfg_attr(&func) => {
+ // FIXME: Instead of bailing out with None, we should note down that
+ // this import needs an attribute added
+ sema.original_ast_node(func)?.body()?.stmt_list().map(ImportScope::Block)
+ }
+ ast::Item::Static(statik) if contains_cfg_attr(&statik) => {
+ // FIXME: Instead of bailing out with None, we should note down that
+ // this import needs an attribute added
+ match sema.original_ast_node(statik)?.body()? {
+ ast::Expr::BlockExpr(block) => block,
+ _ => return None,
+ }
+ .stmt_list()
+ .map(ImportScope::Block)
+ }
+ ast::Item::Module(module) => {
+ // early return is important here, if we can't find the original module
+ // in the input there is no way for us to insert an import anywhere.
+ sema.original_ast_node(module)?.item_list().map(ImportScope::Module)
+ }
+ _ => continue,
+ };
+ }
+ }
+ None
+ }
+
+ pub fn as_syntax_node(&self) -> &SyntaxNode {
+ match self {
+ ImportScope::File(file) => file.syntax(),
+ ImportScope::Module(item_list) => item_list.syntax(),
+ ImportScope::Block(block) => block.syntax(),
+ }
+ }
+
+ pub fn clone_for_update(&self) -> Self {
+ match self {
+ ImportScope::File(file) => ImportScope::File(file.clone_for_update()),
+ ImportScope::Module(item_list) => ImportScope::Module(item_list.clone_for_update()),
+ ImportScope::Block(block) => ImportScope::Block(block.clone_for_update()),
+ }
+ }
+}
+
+/// Insert an import path into the given file/node. A `merge` value of none indicates that no import merging is allowed to occur.
+pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
+ let _p = profile::span("insert_use");
+ let mut mb = match cfg.granularity {
+ ImportGranularity::Crate => Some(MergeBehavior::Crate),
+ ImportGranularity::Module => Some(MergeBehavior::Module),
+ ImportGranularity::Item | ImportGranularity::Preserve => None,
+ };
+ if !cfg.enforce_granularity {
+ let file_granularity = guess_granularity_from_scope(scope);
+ mb = match file_granularity {
+ ImportGranularityGuess::Unknown => mb,
+ ImportGranularityGuess::Item => None,
+ ImportGranularityGuess::Module => Some(MergeBehavior::Module),
+ ImportGranularityGuess::ModuleOrItem => mb.and(Some(MergeBehavior::Module)),
+ ImportGranularityGuess::Crate => Some(MergeBehavior::Crate),
+ ImportGranularityGuess::CrateOrModule => mb.or(Some(MergeBehavior::Crate)),
+ };
+ }
+
+ let use_item =
+ make::use_(None, make::use_tree(path.clone(), None, None, false)).clone_for_update();
+ // merge into existing imports if possible
+ if let Some(mb) = mb {
+ let filter = |it: &_| !(cfg.skip_glob_imports && ast::Use::is_simple_glob(it));
+ for existing_use in
+ scope.as_syntax_node().children().filter_map(ast::Use::cast).filter(filter)
+ {
+ if let Some(merged) = try_merge_imports(&existing_use, &use_item, mb) {
+ ted::replace(existing_use.syntax(), merged.syntax());
+ return;
+ }
+ }
+ }
+
+ // either we weren't allowed to merge or there is no import that fits the merge conditions
+ // so look for the place we have to insert to
+ insert_use_(scope, &path, cfg.group, use_item);
+}
+
+pub fn remove_path_if_in_use_stmt(path: &ast::Path) {
+ // FIXME: improve this
+ if path.parent_path().is_some() {
+ return;
+ }
+ if let Some(use_tree) = path.syntax().parent().and_then(ast::UseTree::cast) {
+ if use_tree.use_tree_list().is_some() || use_tree.star_token().is_some() {
+ return;
+ }
+ if let Some(use_) = use_tree.syntax().parent().and_then(ast::Use::cast) {
+ use_.remove();
+ return;
+ }
+ use_tree.remove();
+ }
+}
+
+#[derive(Eq, PartialEq, PartialOrd, Ord)]
+enum ImportGroup {
+ // the order here defines the order of new group inserts
+ Std,
+ ExternCrate,
+ ThisCrate,
+ ThisModule,
+ SuperModule,
+}
+
+impl ImportGroup {
+ fn new(path: &ast::Path) -> ImportGroup {
+ let default = ImportGroup::ExternCrate;
+
+ let first_segment = match path.first_segment() {
+ Some(it) => it,
+ None => return default,
+ };
+
+ let kind = first_segment.kind().unwrap_or(PathSegmentKind::SelfKw);
+ match kind {
+ PathSegmentKind::SelfKw => ImportGroup::ThisModule,
+ PathSegmentKind::SuperKw => ImportGroup::SuperModule,
+ PathSegmentKind::CrateKw => ImportGroup::ThisCrate,
+ PathSegmentKind::Name(name) => match name.text().as_str() {
+ "std" => ImportGroup::Std,
+ "core" => ImportGroup::Std,
+ _ => ImportGroup::ExternCrate,
+ },
+ // these aren't valid use paths, so fall back to something random
+ PathSegmentKind::SelfTypeKw => ImportGroup::ExternCrate,
+ PathSegmentKind::Type { .. } => ImportGroup::ExternCrate,
+ }
+ }
+}
+
+#[derive(PartialEq, PartialOrd, Debug, Clone, Copy)]
+enum ImportGranularityGuess {
+ Unknown,
+ Item,
+ Module,
+ ModuleOrItem,
+ Crate,
+ CrateOrModule,
+}
+
+fn guess_granularity_from_scope(scope: &ImportScope) -> ImportGranularityGuess {
+ // The idea is simple, just check each import as well as the import and its precedent together for
+ // whether they fulfill a granularity criteria.
+ let use_stmt = |item| match item {
+ ast::Item::Use(use_) => {
+ let use_tree = use_.use_tree()?;
+ Some((use_tree, use_.visibility(), use_.attrs()))
+ }
+ _ => None,
+ };
+ let mut use_stmts = match scope {
+ ImportScope::File(f) => f.items(),
+ ImportScope::Module(m) => m.items(),
+ ImportScope::Block(b) => b.items(),
+ }
+ .filter_map(use_stmt);
+ let mut res = ImportGranularityGuess::Unknown;
+ let (mut prev, mut prev_vis, mut prev_attrs) = match use_stmts.next() {
+ Some(it) => it,
+ None => return res,
+ };
+ loop {
+ if let Some(use_tree_list) = prev.use_tree_list() {
+ if use_tree_list.use_trees().any(|tree| tree.use_tree_list().is_some()) {
+ // Nested tree lists can only occur in crate style, or with no proper style being enforced in the file.
+ break ImportGranularityGuess::Crate;
+ } else {
+ // Could still be crate-style so continue looking.
+ res = ImportGranularityGuess::CrateOrModule;
+ }
+ }
+
+ let (curr, curr_vis, curr_attrs) = match use_stmts.next() {
+ Some(it) => it,
+ None => break res,
+ };
+ if eq_visibility(prev_vis, curr_vis.clone()) && eq_attrs(prev_attrs, curr_attrs.clone()) {
+ if let Some((prev_path, curr_path)) = prev.path().zip(curr.path()) {
+ if let Some((prev_prefix, _)) = common_prefix(&prev_path, &curr_path) {
+ if prev.use_tree_list().is_none() && curr.use_tree_list().is_none() {
+ let prefix_c = prev_prefix.qualifiers().count();
+ let curr_c = curr_path.qualifiers().count() - prefix_c;
+ let prev_c = prev_path.qualifiers().count() - prefix_c;
+ if curr_c == 1 && prev_c == 1 {
+ // Same prefix, only differing in the last segment and no use tree lists so this has to be of item style.
+ break ImportGranularityGuess::Item;
+ } else {
+ // Same prefix and no use tree list but differs in more than one segment at the end. This might be module style still.
+ res = ImportGranularityGuess::ModuleOrItem;
+ }
+ } else {
+ // Same prefix with item tree lists, has to be module style as it
+ // can't be crate style since the trees wouldn't share a prefix then.
+ break ImportGranularityGuess::Module;
+ }
+ }
+ }
+ }
+ prev = curr;
+ prev_vis = curr_vis;
+ prev_attrs = curr_attrs;
+ }
+}
+
+fn insert_use_(
+ scope: &ImportScope,
+ insert_path: &ast::Path,
+ group_imports: bool,
+ use_item: ast::Use,
+) {
+ let scope_syntax = scope.as_syntax_node();
+ let group = ImportGroup::new(insert_path);
+ let path_node_iter = scope_syntax
+ .children()
+ .filter_map(|node| ast::Use::cast(node.clone()).zip(Some(node)))
+ .flat_map(|(use_, node)| {
+ let tree = use_.use_tree()?;
+ let path = tree.path()?;
+ let has_tl = tree.use_tree_list().is_some();
+ Some((path, has_tl, node))
+ });
+
+ if group_imports {
+ // Iterator that discards anything thats not in the required grouping
+ // This implementation allows the user to rearrange their import groups as this only takes the first group that fits
+ let group_iter = path_node_iter
+ .clone()
+ .skip_while(|(path, ..)| ImportGroup::new(path) != group)
+ .take_while(|(path, ..)| ImportGroup::new(path) == group);
+
+ // track the last element we iterated over, if this is still None after the iteration then that means we never iterated in the first place
+ let mut last = None;
+ // find the element that would come directly after our new import
+ let post_insert: Option<(_, _, SyntaxNode)> = group_iter
+ .inspect(|(.., node)| last = Some(node.clone()))
+ .find(|&(ref path, has_tl, _)| {
+ use_tree_path_cmp(insert_path, false, path, has_tl) != Ordering::Greater
+ });
+
+ if let Some((.., node)) = post_insert {
+ cov_mark::hit!(insert_group);
+ // insert our import before that element
+ return ted::insert(ted::Position::before(node), use_item.syntax());
+ }
+ if let Some(node) = last {
+ cov_mark::hit!(insert_group_last);
+ // there is no element after our new import, so append it to the end of the group
+ return ted::insert(ted::Position::after(node), use_item.syntax());
+ }
+
+ // the group we were looking for actually doesn't exist, so insert
+
+ let mut last = None;
+ // find the group that comes after where we want to insert
+ let post_group = path_node_iter
+ .inspect(|(.., node)| last = Some(node.clone()))
+ .find(|(p, ..)| ImportGroup::new(p) > group);
+ if let Some((.., node)) = post_group {
+ cov_mark::hit!(insert_group_new_group);
+ ted::insert(ted::Position::before(&node), use_item.syntax());
+ if let Some(node) = algo::non_trivia_sibling(node.into(), Direction::Prev) {
+ ted::insert(ted::Position::after(node), make::tokens::single_newline());
+ }
+ return;
+ }
+ // there is no such group, so append after the last one
+ if let Some(node) = last {
+ cov_mark::hit!(insert_group_no_group);
+ ted::insert(ted::Position::after(&node), use_item.syntax());
+ ted::insert(ted::Position::after(node), make::tokens::single_newline());
+ return;
+ }
+ } else {
+ // There exists a group, so append to the end of it
+ if let Some((_, _, node)) = path_node_iter.last() {
+ cov_mark::hit!(insert_no_grouping_last);
+ ted::insert(ted::Position::after(node), use_item.syntax());
+ return;
+ }
+ }
+
+ let l_curly = match scope {
+ ImportScope::File(_) => None,
+ // don't insert the imports before the item list/block expr's opening curly brace
+ ImportScope::Module(item_list) => item_list.l_curly_token(),
+ // don't insert the imports before the item list's opening curly brace
+ ImportScope::Block(block) => block.l_curly_token(),
+ };
+ // there are no imports in this file at all
+ // so put the import after all inner module attributes and possible license header comments
+ if let Some(last_inner_element) = scope_syntax
+ .children_with_tokens()
+ // skip the curly brace
+ .skip(l_curly.is_some() as usize)
+ .take_while(|child| match child {
+ NodeOrToken::Node(node) => is_inner_attribute(node.clone()),
+ NodeOrToken::Token(token) => {
+ [SyntaxKind::WHITESPACE, SyntaxKind::COMMENT, SyntaxKind::SHEBANG]
+ .contains(&token.kind())
+ }
+ })
+ .filter(|child| child.as_token().map_or(true, |t| t.kind() != SyntaxKind::WHITESPACE))
+ .last()
+ {
+ cov_mark::hit!(insert_empty_inner_attr);
+ ted::insert(ted::Position::after(&last_inner_element), use_item.syntax());
+ ted::insert(ted::Position::after(last_inner_element), make::tokens::single_newline());
+ } else {
+ match l_curly {
+ Some(b) => {
+ cov_mark::hit!(insert_empty_module);
+ ted::insert(ted::Position::after(&b), make::tokens::single_newline());
+ ted::insert(ted::Position::after(&b), use_item.syntax());
+ }
+ None => {
+ cov_mark::hit!(insert_empty_file);
+ ted::insert(
+ ted::Position::first_child_of(scope_syntax),
+ make::tokens::blank_line(),
+ );
+ ted::insert(ted::Position::first_child_of(scope_syntax), use_item.syntax());
+ }
+ }
+ }
+}
+
+fn is_inner_attribute(node: SyntaxNode) -> bool {
+ ast::Attr::cast(node).map(|attr| attr.kind()) == Some(ast::AttrKind::Inner)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
new file mode 100644
index 000000000..59673af32
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
@@ -0,0 +1,1084 @@
+use base_db::fixture::WithFixture;
+use hir::PrefixKind;
+use stdx::trim_indent;
+use test_utils::{assert_eq_text, CURSOR_MARKER};
+
+use super::*;
+
+#[test]
+fn trailing_comment_in_empty_file() {
+ check(
+ "foo::bar",
+ r#"
+struct Struct;
+// 0 = 1
+"#,
+ r#"
+use foo::bar;
+
+struct Struct;
+// 0 = 1
+"#,
+ ImportGranularity::Crate,
+ );
+}
+
+#[test]
+fn respects_cfg_attr_fn() {
+ check(
+ r"bar::Bar",
+ r#"
+#[cfg(test)]
+fn foo() {$0}
+"#,
+ r#"
+#[cfg(test)]
+fn foo() {
+ use bar::Bar;
+}
+"#,
+ ImportGranularity::Crate,
+ );
+}
+
+#[test]
+fn respects_cfg_attr_const() {
+ check(
+ r"bar::Bar",
+ r#"
+#[cfg(test)]
+const FOO: Bar = {$0};
+"#,
+ r#"
+#[cfg(test)]
+const FOO: Bar = {
+ use bar::Bar;
+};
+"#,
+ ImportGranularity::Crate,
+ );
+}
+
+#[test]
+fn insert_skips_lone_glob_imports() {
+ check(
+ "use foo::baz::A",
+ r"
+use foo::bar::*;
+",
+ r"
+use foo::bar::*;
+use foo::baz::A;
+",
+ ImportGranularity::Crate,
+ );
+}
+
+#[test]
+fn insert_not_group() {
+ cov_mark::check!(insert_no_grouping_last);
+ check_with_config(
+ "use external_crate2::bar::A",
+ r"
+use std::bar::B;
+use external_crate::bar::A;
+use crate::bar::A;
+use self::bar::A;
+use super::bar::A;",
+ r"
+use std::bar::B;
+use external_crate::bar::A;
+use crate::bar::A;
+use self::bar::A;
+use super::bar::A;
+use external_crate2::bar::A;",
+ &InsertUseConfig {
+ granularity: ImportGranularity::Item,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: true,
+ },
+ );
+}
+
+#[test]
+fn insert_existing() {
+ check_crate("std::fs", "use std::fs;", "use std::fs;")
+}
+
+#[test]
+fn insert_start() {
+ check_none(
+ "std::bar::AA",
+ r"
+use std::bar::B;
+use std::bar::D;
+use std::bar::F;
+use std::bar::G;",
+ r"
+use std::bar::AA;
+use std::bar::B;
+use std::bar::D;
+use std::bar::F;
+use std::bar::G;",
+ )
+}
+
+#[test]
+fn insert_start_indent() {
+ check_none(
+ "std::bar::AA",
+ r"
+ use std::bar::B;
+ use std::bar::C;",
+ r"
+ use std::bar::AA;
+ use std::bar::B;
+ use std::bar::C;",
+ );
+}
+
+#[test]
+fn insert_middle() {
+ cov_mark::check!(insert_group);
+ check_none(
+ "std::bar::EE",
+ r"
+use std::bar::A;
+use std::bar::D;
+use std::bar::F;
+use std::bar::G;",
+ r"
+use std::bar::A;
+use std::bar::D;
+use std::bar::EE;
+use std::bar::F;
+use std::bar::G;",
+ )
+}
+
+#[test]
+fn insert_middle_indent() {
+ check_none(
+ "std::bar::EE",
+ r"
+ use std::bar::A;
+ use std::bar::D;
+ use std::bar::F;
+ use std::bar::G;",
+ r"
+ use std::bar::A;
+ use std::bar::D;
+ use std::bar::EE;
+ use std::bar::F;
+ use std::bar::G;",
+ )
+}
+
+#[test]
+fn insert_end() {
+ cov_mark::check!(insert_group_last);
+ check_none(
+ "std::bar::ZZ",
+ r"
+use std::bar::A;
+use std::bar::D;
+use std::bar::F;
+use std::bar::G;",
+ r"
+use std::bar::A;
+use std::bar::D;
+use std::bar::F;
+use std::bar::G;
+use std::bar::ZZ;",
+ )
+}
+
+#[test]
+fn insert_end_indent() {
+ check_none(
+ "std::bar::ZZ",
+ r"
+ use std::bar::A;
+ use std::bar::D;
+ use std::bar::F;
+ use std::bar::G;",
+ r"
+ use std::bar::A;
+ use std::bar::D;
+ use std::bar::F;
+ use std::bar::G;
+ use std::bar::ZZ;",
+ )
+}
+
+#[test]
+fn insert_middle_nested() {
+ check_none(
+ "std::bar::EE",
+ r"
+use std::bar::A;
+use std::bar::{D, Z}; // example of weird imports due to user
+use std::bar::F;
+use std::bar::G;",
+ r"
+use std::bar::A;
+use std::bar::EE;
+use std::bar::{D, Z}; // example of weird imports due to user
+use std::bar::F;
+use std::bar::G;",
+ )
+}
+
+#[test]
+fn insert_middle_groups() {
+ check_none(
+ "foo::bar::GG",
+ r"
+ use std::bar::A;
+ use std::bar::D;
+
+ use foo::bar::F;
+ use foo::bar::H;",
+ r"
+ use std::bar::A;
+ use std::bar::D;
+
+ use foo::bar::F;
+ use foo::bar::GG;
+ use foo::bar::H;",
+ )
+}
+
+#[test]
+fn insert_first_matching_group() {
+ check_none(
+ "foo::bar::GG",
+ r"
+ use foo::bar::A;
+ use foo::bar::D;
+
+ use std;
+
+ use foo::bar::F;
+ use foo::bar::H;",
+ r"
+ use foo::bar::A;
+ use foo::bar::D;
+ use foo::bar::GG;
+
+ use std;
+
+ use foo::bar::F;
+ use foo::bar::H;",
+ )
+}
+
+#[test]
+fn insert_missing_group_std() {
+ cov_mark::check!(insert_group_new_group);
+ check_none(
+ "std::fmt",
+ r"
+ use foo::bar::A;
+ use foo::bar::D;",
+ r"
+ use std::fmt;
+
+ use foo::bar::A;
+ use foo::bar::D;",
+ )
+}
+
+#[test]
+fn insert_missing_group_self() {
+ cov_mark::check!(insert_group_no_group);
+ check_none(
+ "self::fmt",
+ r"
+use foo::bar::A;
+use foo::bar::D;",
+ r"
+use foo::bar::A;
+use foo::bar::D;
+
+use self::fmt;",
+ )
+}
+
+#[test]
+fn insert_no_imports() {
+ check_crate(
+ "foo::bar",
+ "fn main() {}",
+ r"use foo::bar;
+
+fn main() {}",
+ )
+}
+
+#[test]
+fn insert_empty_file() {
+ cov_mark::check_count!(insert_empty_file, 2);
+
+ // Default configuration
+ // empty files will get two trailing newlines
+ // this is due to the test case insert_no_imports above
+ check_crate(
+ "foo::bar",
+ "",
+ r"use foo::bar;
+
+",
+ );
+
+ // "not group" configuration
+ check_with_config(
+ "use external_crate2::bar::A",
+ r"",
+ r"use external_crate2::bar::A;
+
+",
+ &InsertUseConfig {
+ granularity: ImportGranularity::Item,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: true,
+ },
+ );
+}
+
+#[test]
+fn insert_empty_module() {
+ cov_mark::check_count!(insert_empty_module, 2);
+
+ // Default configuration
+ check(
+ "foo::bar",
+ r"
+mod x {$0}
+",
+ r"
+mod x {
+ use foo::bar;
+}
+",
+ ImportGranularity::Item,
+ );
+
+ // "not group" configuration
+ check_with_config(
+ "foo::bar",
+ r"mod x {$0}",
+ r"mod x {
+ use foo::bar;
+}",
+ &InsertUseConfig {
+ granularity: ImportGranularity::Item,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: true,
+ },
+ );
+}
+
+#[test]
+fn insert_after_inner_attr() {
+ cov_mark::check_count!(insert_empty_inner_attr, 2);
+
+ // Default configuration
+ check_crate(
+ "foo::bar",
+ r"#![allow(unused_imports)]",
+ r"#![allow(unused_imports)]
+
+use foo::bar;",
+ );
+
+ // "not group" configuration
+ check_with_config(
+ "foo::bar",
+ r"#![allow(unused_imports)]",
+ r"#![allow(unused_imports)]
+
+use foo::bar;",
+ &InsertUseConfig {
+ granularity: ImportGranularity::Item,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: true,
+ },
+ );
+}
+
+#[test]
+fn insert_after_inner_attr2() {
+ check_crate(
+ "foo::bar",
+ r"#![allow(unused_imports)]
+
+#![no_std]
+fn main() {}",
+ r"#![allow(unused_imports)]
+
+#![no_std]
+
+use foo::bar;
+fn main() {}",
+ );
+}
+
+#[test]
+fn inserts_after_single_line_inner_comments() {
+ check_none(
+ "foo::bar::Baz",
+ "//! Single line inner comments do not allow any code before them.",
+ r#"//! Single line inner comments do not allow any code before them.
+
+use foo::bar::Baz;"#,
+ );
+ check_none(
+ "foo::bar::Baz",
+ r"mod foo {
+ //! Single line inner comments do not allow any code before them.
+$0
+}",
+ r"mod foo {
+ //! Single line inner comments do not allow any code before them.
+
+ use foo::bar::Baz;
+
+}",
+ );
+}
+
+#[test]
+fn inserts_after_single_line_comments() {
+ check_none(
+ "foo::bar::Baz",
+ "// Represents a possible license header and/or general module comments",
+ r#"// Represents a possible license header and/or general module comments
+
+use foo::bar::Baz;"#,
+ );
+}
+
+#[test]
+fn inserts_after_shebang() {
+ check_none(
+ "foo::bar::Baz",
+ "#!/usr/bin/env rust",
+ r#"#!/usr/bin/env rust
+
+use foo::bar::Baz;"#,
+ );
+}
+
+#[test]
+fn inserts_after_multiple_single_line_comments() {
+ check_none(
+ "foo::bar::Baz",
+ "// Represents a possible license header and/or general module comments
+// Second single-line comment
+// Third single-line comment",
+ r#"// Represents a possible license header and/or general module comments
+// Second single-line comment
+// Third single-line comment
+
+use foo::bar::Baz;"#,
+ );
+}
+
+#[test]
+fn inserts_before_single_line_item_comments() {
+ check_none(
+ "foo::bar::Baz",
+ r#"// Represents a comment about a function
+fn foo() {}"#,
+ r#"use foo::bar::Baz;
+
+// Represents a comment about a function
+fn foo() {}"#,
+ );
+}
+
+#[test]
+fn inserts_after_single_line_header_comments_and_before_item() {
+ check_none(
+ "foo::bar::Baz",
+ r#"// Represents a possible license header
+// Line two of possible license header
+
+fn foo() {}"#,
+ r#"// Represents a possible license header
+// Line two of possible license header
+
+use foo::bar::Baz;
+
+fn foo() {}"#,
+ );
+}
+
+#[test]
+fn inserts_after_multiline_inner_comments() {
+ check_none(
+ "foo::bar::Baz",
+ r#"/*! Multiline inner comments do not allow any code before them. */
+
+/*! Still an inner comment, cannot place any code before. */
+fn main() {}"#,
+ r#"/*! Multiline inner comments do not allow any code before them. */
+
+/*! Still an inner comment, cannot place any code before. */
+
+use foo::bar::Baz;
+fn main() {}"#,
+ )
+}
+
+#[test]
+fn inserts_after_all_inner_items() {
+ check_none(
+ "foo::bar::Baz",
+ r#"#![allow(unused_imports)]
+/*! Multiline line comment 2 */
+
+
+//! Single line comment 1
+#![no_std]
+//! Single line comment 2
+fn main() {}"#,
+ r#"#![allow(unused_imports)]
+/*! Multiline line comment 2 */
+
+
+//! Single line comment 1
+#![no_std]
+//! Single line comment 2
+
+use foo::bar::Baz;
+fn main() {}"#,
+ )
+}
+
+#[test]
+fn merge_groups() {
+ check_module("std::io", r"use std::fmt;", r"use std::{fmt, io};")
+}
+
+#[test]
+fn merge_groups_last() {
+ check_module(
+ "std::io",
+ r"use std::fmt::{Result, Display};",
+ r"use std::fmt::{Result, Display};
+use std::io;",
+ )
+}
+
+#[test]
+fn merge_last_into_self() {
+ check_module("foo::bar::baz", r"use foo::bar;", r"use foo::bar::{self, baz};");
+}
+
+#[test]
+fn merge_groups_full() {
+ check_crate(
+ "std::io",
+ r"use std::fmt::{Result, Display};",
+ r"use std::{fmt::{Result, Display}, io};",
+ )
+}
+
+#[test]
+fn merge_groups_long_full() {
+ check_crate("std::foo::bar::Baz", r"use std::foo::bar::Qux;", r"use std::foo::bar::{Qux, Baz};")
+}
+
+#[test]
+fn merge_groups_long_last() {
+ check_module(
+ "std::foo::bar::Baz",
+ r"use std::foo::bar::Qux;",
+ r"use std::foo::bar::{Qux, Baz};",
+ )
+}
+
+#[test]
+fn merge_groups_long_full_list() {
+ check_crate(
+ "std::foo::bar::Baz",
+ r"use std::foo::bar::{Qux, Quux};",
+ r"use std::foo::bar::{Qux, Quux, Baz};",
+ )
+}
+
+#[test]
+fn merge_groups_long_last_list() {
+ check_module(
+ "std::foo::bar::Baz",
+ r"use std::foo::bar::{Qux, Quux};",
+ r"use std::foo::bar::{Qux, Quux, Baz};",
+ )
+}
+
+#[test]
+fn merge_groups_long_full_nested() {
+ check_crate(
+ "std::foo::bar::Baz",
+ r"use std::foo::bar::{Qux, quux::{Fez, Fizz}};",
+ r"use std::foo::bar::{Qux, quux::{Fez, Fizz}, Baz};",
+ )
+}
+
+#[test]
+fn merge_groups_long_last_nested() {
+ check_module(
+ "std::foo::bar::Baz",
+ r"use std::foo::bar::{Qux, quux::{Fez, Fizz}};",
+ r"use std::foo::bar::Baz;
+use std::foo::bar::{Qux, quux::{Fez, Fizz}};",
+ )
+}
+
+#[test]
+fn merge_groups_full_nested_deep() {
+ check_crate(
+ "std::foo::bar::quux::Baz",
+ r"use std::foo::bar::{Qux, quux::{Fez, Fizz}};",
+ r"use std::foo::bar::{Qux, quux::{Fez, Fizz, Baz}};",
+ )
+}
+
+#[test]
+fn merge_groups_full_nested_long() {
+ check_crate(
+ "std::foo::bar::Baz",
+ r"use std::{foo::bar::Qux};",
+ r"use std::{foo::bar::{Qux, Baz}};",
+ );
+}
+
+#[test]
+fn merge_groups_last_nested_long() {
+ check_crate(
+ "std::foo::bar::Baz",
+ r"use std::{foo::bar::Qux};",
+ r"use std::{foo::bar::{Qux, Baz}};",
+ );
+}
+
+#[test]
+fn merge_groups_skip_pub() {
+ check_crate(
+ "std::io",
+ r"pub use std::fmt::{Result, Display};",
+ r"pub use std::fmt::{Result, Display};
+use std::io;",
+ )
+}
+
+#[test]
+fn merge_groups_skip_pub_crate() {
+ check_crate(
+ "std::io",
+ r"pub(crate) use std::fmt::{Result, Display};",
+ r"pub(crate) use std::fmt::{Result, Display};
+use std::io;",
+ )
+}
+
+#[test]
+fn merge_groups_skip_attributed() {
+ check_crate(
+ "std::io",
+ r#"
+#[cfg(feature = "gated")] use std::fmt::{Result, Display};
+"#,
+ r#"
+#[cfg(feature = "gated")] use std::fmt::{Result, Display};
+use std::io;
+"#,
+ )
+}
+
+#[test]
+fn split_out_merge() {
+ // FIXME: This is suboptimal, we want to get `use std::fmt::{self, Result}`
+ // instead.
+ check_module(
+ "std::fmt::Result",
+ r"use std::{fmt, io};",
+ r"use std::fmt::Result;
+use std::{fmt, io};",
+ )
+}
+
+#[test]
+fn merge_into_module_import() {
+ check_crate("std::fmt::Result", r"use std::{fmt, io};", r"use std::{fmt::{self, Result}, io};")
+}
+
+#[test]
+fn merge_groups_self() {
+ check_crate("std::fmt::Debug", r"use std::fmt;", r"use std::fmt::{self, Debug};")
+}
+
+#[test]
+fn merge_mod_into_glob() {
+ check_with_config(
+ "token::TokenKind",
+ r"use token::TokenKind::*;",
+ r"use token::TokenKind::{*, self};",
+ &InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: false,
+ },
+ )
+ // FIXME: have it emit `use token::TokenKind::{self, *}`?
+}
+
+#[test]
+fn merge_self_glob() {
+ check_with_config(
+ "self",
+ r"use self::*;",
+ r"use self::{*, self};",
+ &InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: false,
+ },
+ )
+ // FIXME: have it emit `use {self, *}`?
+}
+
+#[test]
+fn merge_glob() {
+ check_crate(
+ "syntax::SyntaxKind",
+ r"
+use syntax::{SyntaxKind::*};",
+ r"
+use syntax::{SyntaxKind::{*, self}};",
+ )
+}
+
+#[test]
+fn merge_glob_nested() {
+ check_crate(
+ "foo::bar::quux::Fez",
+ r"use foo::bar::{Baz, quux::*};",
+ r"use foo::bar::{Baz, quux::{*, Fez}};",
+ )
+}
+
+#[test]
+fn merge_nested_considers_first_segments() {
+ check_crate(
+ "hir_ty::display::write_bounds_like_dyn_trait",
+ r"use hir_ty::{autoderef, display::{HirDisplayError, HirFormatter}, method_resolution};",
+ r"use hir_ty::{autoderef, display::{HirDisplayError, HirFormatter, write_bounds_like_dyn_trait}, method_resolution};",
+ );
+}
+
+#[test]
+fn skip_merge_last_too_long() {
+ check_module(
+ "foo::bar",
+ r"use foo::bar::baz::Qux;",
+ r"use foo::bar;
+use foo::bar::baz::Qux;",
+ );
+}
+
+#[test]
+fn skip_merge_last_too_long2() {
+ check_module(
+ "foo::bar::baz::Qux",
+ r"use foo::bar;",
+ r"use foo::bar;
+use foo::bar::baz::Qux;",
+ );
+}
+
+#[test]
+fn insert_short_before_long() {
+ check_none(
+ "foo::bar",
+ r"use foo::bar::baz::Qux;",
+ r"use foo::bar;
+use foo::bar::baz::Qux;",
+ );
+}
+
+#[test]
+fn merge_last_fail() {
+ check_merge_only_fail(
+ r"use foo::bar::{baz::{Qux, Fez}};",
+ r"use foo::bar::{baaz::{Quux, Feez}};",
+ MergeBehavior::Module,
+ );
+}
+
+#[test]
+fn merge_last_fail1() {
+ check_merge_only_fail(
+ r"use foo::bar::{baz::{Qux, Fez}};",
+ r"use foo::bar::baaz::{Quux, Feez};",
+ MergeBehavior::Module,
+ );
+}
+
+#[test]
+fn merge_last_fail2() {
+ check_merge_only_fail(
+ r"use foo::bar::baz::{Qux, Fez};",
+ r"use foo::bar::{baaz::{Quux, Feez}};",
+ MergeBehavior::Module,
+ );
+}
+
+#[test]
+fn merge_last_fail3() {
+ check_merge_only_fail(
+ r"use foo::bar::baz::{Qux, Fez};",
+ r"use foo::bar::baaz::{Quux, Feez};",
+ MergeBehavior::Module,
+ );
+}
+
+#[test]
+fn guess_empty() {
+ check_guess("", ImportGranularityGuess::Unknown);
+}
+
+#[test]
+fn guess_single() {
+ check_guess(r"use foo::{baz::{qux, quux}, bar};", ImportGranularityGuess::Crate);
+ check_guess(r"use foo::bar;", ImportGranularityGuess::Unknown);
+ check_guess(r"use foo::bar::{baz, qux};", ImportGranularityGuess::CrateOrModule);
+}
+
+#[test]
+fn guess_unknown() {
+ check_guess(
+ r"
+use foo::bar::baz;
+use oof::rab::xuq;
+",
+ ImportGranularityGuess::Unknown,
+ );
+}
+
+#[test]
+fn guess_item() {
+ check_guess(
+ r"
+use foo::bar::baz;
+use foo::bar::qux;
+",
+ ImportGranularityGuess::Item,
+ );
+}
+
+#[test]
+fn guess_module_or_item() {
+ check_guess(
+ r"
+use foo::bar::Bar;
+use foo::qux;
+",
+ ImportGranularityGuess::ModuleOrItem,
+ );
+ check_guess(
+ r"
+use foo::bar::Bar;
+use foo::bar;
+",
+ ImportGranularityGuess::ModuleOrItem,
+ );
+}
+
+#[test]
+fn guess_module() {
+ check_guess(
+ r"
+use foo::bar::baz;
+use foo::bar::{qux, quux};
+",
+ ImportGranularityGuess::Module,
+ );
+ // this is a rather odd case, technically this file isn't following any style properly.
+ check_guess(
+ r"
+use foo::bar::baz;
+use foo::{baz::{qux, quux}, bar};
+",
+ ImportGranularityGuess::Module,
+ );
+ check_guess(
+ r"
+use foo::bar::Bar;
+use foo::baz::Baz;
+use foo::{Foo, Qux};
+",
+ ImportGranularityGuess::Module,
+ );
+}
+
+#[test]
+fn guess_crate_or_module() {
+ check_guess(
+ r"
+use foo::bar::baz;
+use oof::bar::{qux, quux};
+",
+ ImportGranularityGuess::CrateOrModule,
+ );
+}
+
+#[test]
+fn guess_crate() {
+ check_guess(
+ r"
+use frob::bar::baz;
+use foo::{baz::{qux, quux}, bar};
+",
+ ImportGranularityGuess::Crate,
+ );
+}
+
+#[test]
+fn guess_skips_differing_vis() {
+ check_guess(
+ r"
+use foo::bar::baz;
+pub use foo::bar::qux;
+",
+ ImportGranularityGuess::Unknown,
+ );
+}
+
+#[test]
+fn guess_skips_differing_attrs() {
+ check_guess(
+ r"
+pub use foo::bar::baz;
+#[doc(hidden)]
+pub use foo::bar::qux;
+",
+ ImportGranularityGuess::Unknown,
+ );
+}
+
+#[test]
+fn guess_grouping_matters() {
+ check_guess(
+ r"
+use foo::bar::baz;
+use oof::bar::baz;
+use foo::bar::qux;
+",
+ ImportGranularityGuess::Unknown,
+ );
+}
+
+fn check_with_config(
+ path: &str,
+ ra_fixture_before: &str,
+ ra_fixture_after: &str,
+ config: &InsertUseConfig,
+) {
+ let (db, file_id, pos) = if ra_fixture_before.contains(CURSOR_MARKER) {
+ let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture_before);
+ (db, file_id, Some(range_or_offset))
+ } else {
+ let (db, file_id) = RootDatabase::with_single_file(ra_fixture_before);
+ (db, file_id, None)
+ };
+ let sema = &Semantics::new(&db);
+ let source_file = sema.parse(file_id);
+ let syntax = source_file.syntax().clone_for_update();
+ let file = pos
+ .and_then(|pos| syntax.token_at_offset(pos.expect_offset()).next()?.parent())
+ .and_then(|it| ImportScope::find_insert_use_container(&it, sema))
+ .or_else(|| ImportScope::from(syntax))
+ .unwrap();
+ let path = ast::SourceFile::parse(&format!("use {};", path))
+ .tree()
+ .syntax()
+ .descendants()
+ .find_map(ast::Path::cast)
+ .unwrap();
+
+ insert_use(&file, path, config);
+ let result = file.as_syntax_node().ancestors().last().unwrap().to_string();
+ assert_eq_text!(&trim_indent(ra_fixture_after), &result);
+}
+
+fn check(
+ path: &str,
+ ra_fixture_before: &str,
+ ra_fixture_after: &str,
+ granularity: ImportGranularity,
+) {
+ check_with_config(
+ path,
+ ra_fixture_before,
+ ra_fixture_after,
+ &InsertUseConfig {
+ granularity,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: true,
+ skip_glob_imports: true,
+ },
+ )
+}
+
+fn check_crate(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
+ check(path, ra_fixture_before, ra_fixture_after, ImportGranularity::Crate)
+}
+
+fn check_module(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
+ check(path, ra_fixture_before, ra_fixture_after, ImportGranularity::Module)
+}
+
+fn check_none(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
+ check(path, ra_fixture_before, ra_fixture_after, ImportGranularity::Item)
+}
+
+fn check_merge_only_fail(ra_fixture0: &str, ra_fixture1: &str, mb: MergeBehavior) {
+ let use0 = ast::SourceFile::parse(ra_fixture0)
+ .tree()
+ .syntax()
+ .descendants()
+ .find_map(ast::Use::cast)
+ .unwrap();
+
+ let use1 = ast::SourceFile::parse(ra_fixture1)
+ .tree()
+ .syntax()
+ .descendants()
+ .find_map(ast::Use::cast)
+ .unwrap();
+
+ let result = try_merge_imports(&use0, &use1, mb);
+ assert_eq!(result.map(|u| u.to_string()), None);
+}
+
+fn check_guess(ra_fixture: &str, expected: ImportGranularityGuess) {
+ let syntax = ast::SourceFile::parse(ra_fixture).tree().syntax().clone();
+ let file = ImportScope::from(syntax).unwrap();
+ assert_eq!(super::guess_granularity_from_scope(&file), expected);
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
new file mode 100644
index 000000000..7fb4b90e6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
@@ -0,0 +1,295 @@
+//! Handle syntactic aspects of merging UseTrees.
+use std::cmp::Ordering;
+
+use itertools::{EitherOrBoth, Itertools};
+use syntax::{
+ ast::{self, AstNode, HasAttrs, HasVisibility, PathSegmentKind},
+ ted,
+};
+
+use crate::syntax_helpers::node_ext::vis_eq;
+
+/// What type of merges are allowed.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum MergeBehavior {
+ /// Merge imports from the same crate into a single use statement.
+ Crate,
+ /// Merge imports from the same module into a single use statement.
+ Module,
+}
+
+impl MergeBehavior {
+ fn is_tree_allowed(&self, tree: &ast::UseTree) -> bool {
+ match self {
+ MergeBehavior::Crate => true,
+ // only simple single segment paths are allowed
+ MergeBehavior::Module => {
+ tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1)
+ }
+ }
+ }
+}
+
+/// Merge `rhs` into `lhs` keeping both intact.
+/// Returned AST is mutable.
+pub fn try_merge_imports(
+ lhs: &ast::Use,
+ rhs: &ast::Use,
+ merge_behavior: MergeBehavior,
+) -> Option<ast::Use> {
+ // don't merge imports with different visibilities
+ if !eq_visibility(lhs.visibility(), rhs.visibility()) {
+ return None;
+ }
+ if !eq_attrs(lhs.attrs(), rhs.attrs()) {
+ return None;
+ }
+
+ let lhs = lhs.clone_subtree().clone_for_update();
+ let rhs = rhs.clone_subtree().clone_for_update();
+ let lhs_tree = lhs.use_tree()?;
+ let rhs_tree = rhs.use_tree()?;
+ try_merge_trees_mut(&lhs_tree, &rhs_tree, merge_behavior)?;
+ Some(lhs)
+}
+
+/// Merge `rhs` into `lhs` keeping both intact.
+/// Returned AST is mutable.
+pub fn try_merge_trees(
+ lhs: &ast::UseTree,
+ rhs: &ast::UseTree,
+ merge: MergeBehavior,
+) -> Option<ast::UseTree> {
+ let lhs = lhs.clone_subtree().clone_for_update();
+ let rhs = rhs.clone_subtree().clone_for_update();
+ try_merge_trees_mut(&lhs, &rhs, merge)?;
+ Some(lhs)
+}
+
+fn try_merge_trees_mut(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior) -> Option<()> {
+ let lhs_path = lhs.path()?;
+ let rhs_path = rhs.path()?;
+
+ let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?;
+ if !(lhs.is_simple_path()
+ && rhs.is_simple_path()
+ && lhs_path == lhs_prefix
+ && rhs_path == rhs_prefix)
+ {
+ lhs.split_prefix(&lhs_prefix);
+ rhs.split_prefix(&rhs_prefix);
+ }
+ recursive_merge(lhs, rhs, merge)
+}
+
+/// Recursively merges rhs to lhs
+#[must_use]
+fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior) -> Option<()> {
+ let mut use_trees: Vec<ast::UseTree> = lhs
+ .use_tree_list()
+ .into_iter()
+ .flat_map(|list| list.use_trees())
+ // We use Option here to early return from this function(this is not the
+ // same as a `filter` op).
+ .map(|tree| merge.is_tree_allowed(&tree).then(|| tree))
+ .collect::<Option<_>>()?;
+ use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path()));
+ for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) {
+ if !merge.is_tree_allowed(&rhs_t) {
+ return None;
+ }
+ let rhs_path = rhs_t.path();
+
+ match use_trees
+ .binary_search_by(|lhs_t| path_cmp_bin_search(lhs_t.path(), rhs_path.as_ref()))
+ {
+ Ok(idx) => {
+ let lhs_t = &mut use_trees[idx];
+ let lhs_path = lhs_t.path()?;
+ let rhs_path = rhs_path?;
+ let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?;
+ if lhs_prefix == lhs_path && rhs_prefix == rhs_path {
+ let tree_is_self = |tree: &ast::UseTree| {
+ tree.path().as_ref().map(path_is_self).unwrap_or(false)
+ };
+ // Check if only one of the two trees has a tree list, and
+ // whether that then contains `self` or not. If this is the
+ // case we can skip this iteration since the path without
+ // the list is already included in the other one via `self`.
+ let tree_contains_self = |tree: &ast::UseTree| {
+ tree.use_tree_list()
+ .map(|tree_list| tree_list.use_trees().any(|it| tree_is_self(&it)))
+ // Glob imports aren't part of the use-tree lists,
+ // so they need to be handled explicitly
+ .or_else(|| tree.star_token().map(|_| false))
+ };
+ match (tree_contains_self(lhs_t), tree_contains_self(&rhs_t)) {
+ (Some(true), None) => continue,
+ (None, Some(true)) => {
+ ted::replace(lhs_t.syntax(), rhs_t.syntax());
+ *lhs_t = rhs_t;
+ continue;
+ }
+ _ => (),
+ }
+
+ if lhs_t.is_simple_path() && rhs_t.is_simple_path() {
+ continue;
+ }
+ }
+ lhs_t.split_prefix(&lhs_prefix);
+ rhs_t.split_prefix(&rhs_prefix);
+ recursive_merge(lhs_t, &rhs_t, merge)?;
+ }
+ Err(_)
+ if merge == MergeBehavior::Module
+ && !use_trees.is_empty()
+ && rhs_t.use_tree_list().is_some() =>
+ {
+ return None
+ }
+ Err(idx) => {
+ use_trees.insert(idx, rhs_t.clone());
+ lhs.get_or_create_use_tree_list().add_use_tree(rhs_t);
+ }
+ }
+ }
+ Some(())
+}
+
+/// Traverses both paths until they differ, returning the common prefix of both.
+pub fn common_prefix(lhs: &ast::Path, rhs: &ast::Path) -> Option<(ast::Path, ast::Path)> {
+ let mut res = None;
+ let mut lhs_curr = lhs.first_qualifier_or_self();
+ let mut rhs_curr = rhs.first_qualifier_or_self();
+ loop {
+ match (lhs_curr.segment(), rhs_curr.segment()) {
+ (Some(lhs), Some(rhs)) if lhs.syntax().text() == rhs.syntax().text() => (),
+ _ => break res,
+ }
+ res = Some((lhs_curr.clone(), rhs_curr.clone()));
+
+ match lhs_curr.parent_path().zip(rhs_curr.parent_path()) {
+ Some((lhs, rhs)) => {
+ lhs_curr = lhs;
+ rhs_curr = rhs;
+ }
+ _ => break res,
+ }
+ }
+}
+
+/// Orders paths in the following way:
+/// the sole self token comes first, after that come uppercase identifiers, then lowercase identifiers
+// FIXME: rustfmt sorts lowercase idents before uppercase, in general we want to have the same ordering rustfmt has
+// which is `self` and `super` first, then identifier imports with lowercase ones first, then glob imports and at last list imports.
+// Example foo::{self, foo, baz, Baz, Qux, *, {Bar}}
+fn path_cmp_for_sort(a: Option<ast::Path>, b: Option<ast::Path>) -> Ordering {
+ match (a, b) {
+ (None, None) => Ordering::Equal,
+ (None, Some(_)) => Ordering::Less,
+ (Some(_), None) => Ordering::Greater,
+ (Some(ref a), Some(ref b)) => match (path_is_self(a), path_is_self(b)) {
+ (true, true) => Ordering::Equal,
+ (true, false) => Ordering::Less,
+ (false, true) => Ordering::Greater,
+ (false, false) => path_cmp_short(a, b),
+ },
+ }
+}
+
+/// Path comparison func for binary searching for merging.
+fn path_cmp_bin_search(lhs: Option<ast::Path>, rhs: Option<&ast::Path>) -> Ordering {
+ match (lhs.as_ref().and_then(ast::Path::first_segment), rhs.and_then(ast::Path::first_segment))
+ {
+ (None, None) => Ordering::Equal,
+ (None, Some(_)) => Ordering::Less,
+ (Some(_), None) => Ordering::Greater,
+ (Some(ref a), Some(ref b)) => path_segment_cmp(a, b),
+ }
+}
+
+/// Short circuiting comparison, if both paths are equal until one of them ends they are considered
+/// equal
+fn path_cmp_short(a: &ast::Path, b: &ast::Path) -> Ordering {
+ let a = a.segments();
+ let b = b.segments();
+ // cmp_by would be useful for us here but that is currently unstable
+ // cmp doesn't work due the lifetimes on text's return type
+ a.zip(b)
+ .find_map(|(a, b)| match path_segment_cmp(&a, &b) {
+ Ordering::Equal => None,
+ ord => Some(ord),
+ })
+ .unwrap_or(Ordering::Equal)
+}
+
+/// Compares two paths, if one ends earlier than the other the has_tl parameters decide which is
+/// greater as a a path that has a tree list should be greater, while one that just ends without
+/// a tree list should be considered less.
+pub(super) fn use_tree_path_cmp(
+ a: &ast::Path,
+ a_has_tl: bool,
+ b: &ast::Path,
+ b_has_tl: bool,
+) -> Ordering {
+ let a_segments = a.segments();
+ let b_segments = b.segments();
+ // cmp_by would be useful for us here but that is currently unstable
+ // cmp doesn't work due the lifetimes on text's return type
+ a_segments
+ .zip_longest(b_segments)
+ .find_map(|zipped| match zipped {
+ EitherOrBoth::Both(ref a, ref b) => match path_segment_cmp(a, b) {
+ Ordering::Equal => None,
+ ord => Some(ord),
+ },
+ EitherOrBoth::Left(_) if !b_has_tl => Some(Ordering::Greater),
+ EitherOrBoth::Left(_) => Some(Ordering::Less),
+ EitherOrBoth::Right(_) if !a_has_tl => Some(Ordering::Less),
+ EitherOrBoth::Right(_) => Some(Ordering::Greater),
+ })
+ .unwrap_or(Ordering::Equal)
+}
+
+fn path_segment_cmp(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering {
+ let a = a.kind().and_then(|kind| match kind {
+ PathSegmentKind::Name(name_ref) => Some(name_ref),
+ _ => None,
+ });
+ let b = b.kind().and_then(|kind| match kind {
+ PathSegmentKind::Name(name_ref) => Some(name_ref),
+ _ => None,
+ });
+ a.as_ref().map(ast::NameRef::text).cmp(&b.as_ref().map(ast::NameRef::text))
+}
+
+pub fn eq_visibility(vis0: Option<ast::Visibility>, vis1: Option<ast::Visibility>) -> bool {
+ match (vis0, vis1) {
+ (None, None) => true,
+ (Some(vis0), Some(vis1)) => vis_eq(&vis0, &vis1),
+ _ => false,
+ }
+}
+
+pub fn eq_attrs(
+ attrs0: impl Iterator<Item = ast::Attr>,
+ attrs1: impl Iterator<Item = ast::Attr>,
+) -> bool {
+ // FIXME order of attributes should not matter
+ let attrs0 = attrs0
+ .flat_map(|attr| attr.syntax().descendants_with_tokens())
+ .flat_map(|it| it.into_token());
+ let attrs1 = attrs1
+ .flat_map(|attr| attr.syntax().descendants_with_tokens())
+ .flat_map(|it| it.into_token());
+ stdx::iter_eq_by(attrs0, attrs1, |tok, tok2| tok.text() == tok2.text())
+}
+
+fn path_is_self(path: &ast::Path) -> bool {
+ path.segment().and_then(|seg| seg.self_token()).is_some() && path.qualifier().is_none()
+}
+
+fn path_len(path: ast::Path) -> usize {
+ path.segments().count()
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
new file mode 100644
index 000000000..07a57c883
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
@@ -0,0 +1,151 @@
+//! This module has the functionality to search the project and its dependencies for a certain item,
+//! by its name and a few criteria.
+//! The main reason for this module to exist is the fact that project's items and dependencies' items
+//! are located in different caches, with different APIs.
+use either::Either;
+use hir::{
+ import_map::{self, ImportKind},
+ symbols::FileSymbol,
+ AsAssocItem, Crate, ItemInNs, Semantics,
+};
+use limit::Limit;
+use syntax::{ast, AstNode, SyntaxKind::NAME};
+
+use crate::{
+ defs::{Definition, NameClass},
+ imports::import_assets::NameToImport,
+ symbol_index, RootDatabase,
+};
+
+/// A value to use, when uncertain which limit to pick.
+pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(40);
+
+/// Three possible ways to search for the name in associated and/or other items.
+#[derive(Debug, Clone, Copy)]
+pub enum AssocItemSearch {
+ /// Search for the name in both associated and other items.
+ Include,
+ /// Search for the name in other items only.
+ Exclude,
+ /// Search for the name in the associated items only.
+ AssocItemsOnly,
+}
+
+/// Searches for importable items with the given name in the crate and its dependencies.
+pub fn items_with_name<'a>(
+ sema: &'a Semantics<'_, RootDatabase>,
+ krate: Crate,
+ name: NameToImport,
+ assoc_item_search: AssocItemSearch,
+ limit: Option<usize>,
+) -> impl Iterator<Item = ItemInNs> + 'a {
+ let _p = profile::span("items_with_name").detail(|| {
+ format!(
+ "Name: {}, crate: {:?}, assoc items: {:?}, limit: {:?}",
+ name.text(),
+ assoc_item_search,
+ krate.display_name(sema.db).map(|name| name.to_string()),
+ limit,
+ )
+ });
+
+ let (mut local_query, mut external_query) = match name {
+ NameToImport::Exact(exact_name, case_sensitive) => {
+ let mut local_query = symbol_index::Query::new(exact_name.clone());
+ local_query.exact();
+
+ let external_query = import_map::Query::new(exact_name)
+ .name_only()
+ .search_mode(import_map::SearchMode::Equals);
+
+ (
+ local_query,
+ if case_sensitive { external_query.case_sensitive() } else { external_query },
+ )
+ }
+ NameToImport::Fuzzy(fuzzy_search_string) => {
+ let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone());
+
+ let mut external_query = import_map::Query::new(fuzzy_search_string.clone())
+ .search_mode(import_map::SearchMode::Fuzzy)
+ .name_only();
+ match assoc_item_search {
+ AssocItemSearch::Include => {}
+ AssocItemSearch::Exclude => {
+ external_query = external_query.exclude_import_kind(ImportKind::AssociatedItem);
+ }
+ AssocItemSearch::AssocItemsOnly => {
+ external_query = external_query.assoc_items_only();
+ }
+ }
+
+ if fuzzy_search_string.to_lowercase() != fuzzy_search_string {
+ local_query.case_sensitive();
+ external_query = external_query.case_sensitive();
+ }
+
+ (local_query, external_query)
+ }
+ };
+
+ if let Some(limit) = limit {
+ external_query = external_query.limit(limit);
+ local_query.limit(limit);
+ }
+
+ find_items(sema, krate, assoc_item_search, local_query, external_query)
+}
+
+fn find_items<'a>(
+ sema: &'a Semantics<'_, RootDatabase>,
+ krate: Crate,
+ assoc_item_search: AssocItemSearch,
+ local_query: symbol_index::Query,
+ external_query: import_map::Query,
+) -> impl Iterator<Item = ItemInNs> + 'a {
+ let _p = profile::span("find_items");
+ let db = sema.db;
+
+ let external_importables =
+ krate.query_external_importables(db, external_query).map(|external_importable| {
+ match external_importable {
+ Either::Left(module_def) => ItemInNs::from(module_def),
+ Either::Right(macro_def) => ItemInNs::from(macro_def),
+ }
+ });
+
+ // Query the local crate using the symbol index.
+ let local_results = symbol_index::crate_symbols(db, krate, local_query)
+ .into_iter()
+ .filter_map(move |local_candidate| get_name_definition(sema, &local_candidate))
+ .filter_map(|name_definition_to_import| match name_definition_to_import {
+ Definition::Macro(macro_def) => Some(ItemInNs::from(macro_def)),
+ def => <Option<_>>::from(def),
+ });
+
+ external_importables.chain(local_results).filter(move |&item| match assoc_item_search {
+ AssocItemSearch::Include => true,
+ AssocItemSearch::Exclude => !is_assoc_item(item, sema.db),
+ AssocItemSearch::AssocItemsOnly => is_assoc_item(item, sema.db),
+ })
+}
+
+fn get_name_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ import_candidate: &FileSymbol,
+) -> Option<Definition> {
+ let _p = profile::span("get_name_definition");
+
+ let candidate_node = import_candidate.loc.syntax(sema)?;
+ let candidate_name_node = if candidate_node.kind() != NAME {
+ candidate_node.children().find(|it| it.kind() == NAME)?
+ } else {
+ candidate_node
+ };
+ let name = ast::Name::cast(candidate_name_node)?;
+ NameClass::classify(sema, &name)?.defined()
+}
+
+fn is_assoc_item(item: ItemInNs, db: &RootDatabase) -> bool {
+ item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db)).is_some()
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/label.rs b/src/tools/rust-analyzer/crates/ide-db/src/label.rs
new file mode 100644
index 000000000..4b6d54b5e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/label.rs
@@ -0,0 +1,48 @@
+//! See [`Label`]
+use std::fmt;
+
+/// A type to specify UI label, like an entry in the list of assists. Enforces
+/// proper casing:
+///
+/// Frobnicate bar
+///
+/// Note the upper-case first letter and the absence of `.` at the end.
+#[derive(Clone)]
+pub struct Label(String);
+
+impl PartialEq<str> for Label {
+ fn eq(&self, other: &str) -> bool {
+ self.0 == other
+ }
+}
+
+impl PartialEq<&'_ str> for Label {
+ fn eq(&self, other: &&str) -> bool {
+ self == *other
+ }
+}
+
+impl From<Label> for String {
+ fn from(label: Label) -> String {
+ label.0
+ }
+}
+
+impl Label {
+ pub fn new(label: String) -> Label {
+ assert!(label.starts_with(char::is_uppercase) && !label.ends_with('.'));
+ Label(label)
+ }
+}
+
+impl fmt::Display for Label {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.0, f)
+ }
+}
+
+impl fmt::Debug for Label {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&self.0, f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
new file mode 100644
index 000000000..966bba616
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
@@ -0,0 +1,246 @@
+//! This crate defines the core datastructure representing IDE state -- `RootDatabase`.
+//!
+//! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod apply_change;
+
+pub mod active_parameter;
+pub mod assists;
+pub mod defs;
+pub mod famous_defs;
+pub mod helpers;
+pub mod items_locator;
+pub mod label;
+pub mod line_index;
+pub mod path_transform;
+pub mod rename;
+pub mod rust_doc;
+pub mod search;
+pub mod source_change;
+pub mod symbol_index;
+pub mod traits;
+pub mod ty_filter;
+pub mod use_trivial_contructor;
+
+pub mod imports {
+ pub mod import_assets;
+ pub mod insert_use;
+ pub mod merge_imports;
+}
+
+pub mod generated {
+ pub mod lints;
+}
+
+pub mod syntax_helpers {
+ pub mod node_ext;
+ pub mod insert_whitespace_into_node;
+ pub mod format_string;
+
+ pub use parser::LexedStr;
+}
+
+use std::{fmt, mem::ManuallyDrop, sync::Arc};
+
+use base_db::{
+ salsa::{self, Durability},
+ AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
+};
+use hir::{
+ db::{AstDatabase, DefDatabase, HirDatabase},
+ symbols::FileSymbolKind,
+};
+
+use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase};
+pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
+
+/// `base_db` is normally also needed in places where `ide_db` is used, so this re-export is for convenience.
+pub use base_db;
+
+pub type FxIndexSet<T> = indexmap::IndexSet<T, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
+pub type FxIndexMap<K, V> =
+ indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
+
+#[salsa::database(
+ base_db::SourceDatabaseExtStorage,
+ base_db::SourceDatabaseStorage,
+ hir::db::AstDatabaseStorage,
+ hir::db::DefDatabaseStorage,
+ hir::db::HirDatabaseStorage,
+ hir::db::InternDatabaseStorage,
+ LineIndexDatabaseStorage,
+ symbol_index::SymbolsDatabaseStorage
+)]
+pub struct RootDatabase {
+ // We use `ManuallyDrop` here because every codegen unit that contains a
+ // `&RootDatabase -> &dyn OtherDatabase` cast will instantiate its drop glue in the vtable,
+ // which duplicates `Weak::drop` and `Arc::drop` tens of thousands of times, which makes
+ // compile times of all `ide_*` and downstream crates suffer greatly.
+ storage: ManuallyDrop<salsa::Storage<RootDatabase>>,
+}
+
+impl Drop for RootDatabase {
+ fn drop(&mut self) {
+ unsafe { ManuallyDrop::drop(&mut self.storage) };
+ }
+}
+
+impl fmt::Debug for RootDatabase {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("RootDatabase").finish()
+ }
+}
+
+impl Upcast<dyn AstDatabase> for RootDatabase {
+ fn upcast(&self) -> &(dyn AstDatabase + 'static) {
+ &*self
+ }
+}
+
+impl Upcast<dyn DefDatabase> for RootDatabase {
+ fn upcast(&self) -> &(dyn DefDatabase + 'static) {
+ &*self
+ }
+}
+
+impl Upcast<dyn HirDatabase> for RootDatabase {
+ fn upcast(&self) -> &(dyn HirDatabase + 'static) {
+ &*self
+ }
+}
+
+impl FileLoader for RootDatabase {
+ fn file_text(&self, file_id: FileId) -> Arc<String> {
+ FileLoaderDelegate(self).file_text(file_id)
+ }
+ fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+ FileLoaderDelegate(self).resolve_path(path)
+ }
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ FileLoaderDelegate(self).relevant_crates(file_id)
+ }
+}
+
+impl salsa::Database for RootDatabase {}
+
+impl Default for RootDatabase {
+ fn default() -> RootDatabase {
+ RootDatabase::new(None)
+ }
+}
+
+impl RootDatabase {
+ pub fn new(lru_capacity: Option<usize>) -> RootDatabase {
+ let mut db = RootDatabase { storage: ManuallyDrop::new(salsa::Storage::default()) };
+ db.set_crate_graph_with_durability(Default::default(), Durability::HIGH);
+ db.set_local_roots_with_durability(Default::default(), Durability::HIGH);
+ db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
+ db.set_enable_proc_attr_macros(false);
+ db.update_lru_capacity(lru_capacity);
+ db
+ }
+
+ pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) {
+ let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_LRU_CAP);
+ base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
+ hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
+ hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
+ }
+}
+
+impl salsa::ParallelDatabase for RootDatabase {
+ fn snapshot(&self) -> salsa::Snapshot<RootDatabase> {
+ salsa::Snapshot::new(RootDatabase { storage: ManuallyDrop::new(self.storage.snapshot()) })
+ }
+}
+
+#[salsa::query_group(LineIndexDatabaseStorage)]
+pub trait LineIndexDatabase: base_db::SourceDatabase {
+ fn line_index(&self, file_id: FileId) -> Arc<LineIndex>;
+}
+
+fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> {
+ let text = db.file_text(file_id);
+ Arc::new(LineIndex::new(&*text))
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum SymbolKind {
+ Attribute,
+ BuiltinAttr,
+ Const,
+ ConstParam,
+ Derive,
+ DeriveHelper,
+ Enum,
+ Field,
+ Function,
+ Impl,
+ Label,
+ LifetimeParam,
+ Local,
+ Macro,
+ Module,
+ SelfParam,
+ SelfType,
+ Static,
+ Struct,
+ ToolModule,
+ Trait,
+ TypeAlias,
+ TypeParam,
+ Union,
+ ValueParam,
+ Variant,
+}
+
+impl From<hir::MacroKind> for SymbolKind {
+ fn from(it: hir::MacroKind) -> Self {
+ match it {
+ hir::MacroKind::Declarative | hir::MacroKind::BuiltIn | hir::MacroKind::ProcMacro => {
+ SymbolKind::Macro
+ }
+ hir::MacroKind::Derive => SymbolKind::Derive,
+ hir::MacroKind::Attr => SymbolKind::Attribute,
+ }
+ }
+}
+
+impl From<FileSymbolKind> for SymbolKind {
+ fn from(it: FileSymbolKind) -> Self {
+ match it {
+ FileSymbolKind::Const => SymbolKind::Const,
+ FileSymbolKind::Enum => SymbolKind::Enum,
+ FileSymbolKind::Function => SymbolKind::Function,
+ FileSymbolKind::Macro => SymbolKind::Macro,
+ FileSymbolKind::Module => SymbolKind::Module,
+ FileSymbolKind::Static => SymbolKind::Static,
+ FileSymbolKind::Struct => SymbolKind::Struct,
+ FileSymbolKind::Trait => SymbolKind::Trait,
+ FileSymbolKind::TypeAlias => SymbolKind::TypeAlias,
+ FileSymbolKind::Union => SymbolKind::Union,
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub struct SnippetCap {
+ _private: (),
+}
+
+impl SnippetCap {
+ pub const fn new(allow_snippets: bool) -> Option<SnippetCap> {
+ if allow_snippets {
+ Some(SnippetCap { _private: () })
+ } else {
+ None
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ mod sourcegen_lints;
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
new file mode 100644
index 000000000..68ad07ee8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
@@ -0,0 +1,300 @@
+//! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)`
+//! representation.
+use std::{iter, mem};
+
+use rustc_hash::FxHashMap;
+use syntax::{TextRange, TextSize};
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct LineIndex {
+ /// Offset the the beginning of each line, zero-based
+ pub(crate) newlines: Vec<TextSize>,
+ /// List of non-ASCII characters on each line
+ pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct LineColUtf16 {
+ /// Zero-based
+ pub line: u32,
+ /// Zero-based
+ pub col: u32,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct LineCol {
+ /// Zero-based
+ pub line: u32,
+ /// Zero-based utf8 offset
+ pub col: u32,
+}
+
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+pub(crate) struct Utf16Char {
+ /// Start offset of a character inside a line, zero-based
+ pub(crate) start: TextSize,
+ /// End offset of a character inside a line, zero-based
+ pub(crate) end: TextSize,
+}
+
+impl Utf16Char {
+ /// Returns the length in 8-bit UTF-8 code units.
+ fn len(&self) -> TextSize {
+ self.end - self.start
+ }
+
+ /// Returns the length in 16-bit UTF-16 code units.
+ fn len_utf16(&self) -> usize {
+ if self.len() == TextSize::from(4) {
+ 2
+ } else {
+ 1
+ }
+ }
+}
+
+impl LineIndex {
+ pub fn new(text: &str) -> LineIndex {
+ let mut utf16_lines = FxHashMap::default();
+ let mut utf16_chars = Vec::new();
+
+ let mut newlines = vec![0.into()];
+ let mut curr_row @ mut curr_col = 0.into();
+ let mut line = 0;
+ for c in text.chars() {
+ let c_len = TextSize::of(c);
+ curr_row += c_len;
+ if c == '\n' {
+ newlines.push(curr_row);
+
+ // Save any utf-16 characters seen in the previous line
+ if !utf16_chars.is_empty() {
+ utf16_lines.insert(line, mem::take(&mut utf16_chars));
+ }
+
+ // Prepare for processing the next line
+ curr_col = 0.into();
+ line += 1;
+ continue;
+ }
+
+ if !c.is_ascii() {
+ utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + c_len });
+ }
+
+ curr_col += c_len;
+ }
+
+ // Save any utf-16 characters seen in the last line
+ if !utf16_chars.is_empty() {
+ utf16_lines.insert(line, utf16_chars);
+ }
+
+ LineIndex { newlines, utf16_lines }
+ }
+
+ pub fn line_col(&self, offset: TextSize) -> LineCol {
+ let line = self.newlines.partition_point(|&it| it <= offset) - 1;
+ let line_start_offset = self.newlines[line];
+ let col = offset - line_start_offset;
+ LineCol { line: line as u32, col: col.into() }
+ }
+
+ pub fn offset(&self, line_col: LineCol) -> Option<TextSize> {
+ self.newlines
+ .get(line_col.line as usize)
+ .map(|offset| offset + TextSize::from(line_col.col))
+ }
+
+ pub fn to_utf16(&self, line_col: LineCol) -> LineColUtf16 {
+ let col = self.utf8_to_utf16_col(line_col.line, line_col.col.into());
+ LineColUtf16 { line: line_col.line, col: col as u32 }
+ }
+
+ pub fn to_utf8(&self, line_col: LineColUtf16) -> LineCol {
+ let col = self.utf16_to_utf8_col(line_col.line, line_col.col);
+ LineCol { line: line_col.line, col: col.into() }
+ }
+
+ pub fn lines(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ {
+ let lo = self.newlines.partition_point(|&it| it < range.start());
+ let hi = self.newlines.partition_point(|&it| it <= range.end());
+ let all = iter::once(range.start())
+ .chain(self.newlines[lo..hi].iter().copied())
+ .chain(iter::once(range.end()));
+
+ all.clone()
+ .zip(all.skip(1))
+ .map(|(lo, hi)| TextRange::new(lo, hi))
+ .filter(|it| !it.is_empty())
+ }
+
+ fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> usize {
+ let mut res: usize = col.into();
+ if let Some(utf16_chars) = self.utf16_lines.get(&line) {
+ for c in utf16_chars {
+ if c.end <= col {
+ res -= usize::from(c.len()) - c.len_utf16();
+ } else {
+ // From here on, all utf16 characters come *after* the character we are mapping,
+ // so we don't need to take them into account
+ break;
+ }
+ }
+ }
+ res
+ }
+
+ fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize {
+ if let Some(utf16_chars) = self.utf16_lines.get(&line) {
+ for c in utf16_chars {
+ if col > u32::from(c.start) {
+ col += u32::from(c.len()) - c.len_utf16() as u32;
+ } else {
+ // From here on, all utf16 characters come *after* the character we are mapping,
+ // so we don't need to take them into account
+ break;
+ }
+ }
+ }
+
+ col.into()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_line_index() {
+ let text = "hello\nworld";
+ let table = [
+ (00, 0, 0),
+ (01, 0, 1),
+ (05, 0, 5),
+ (06, 1, 0),
+ (07, 1, 1),
+ (08, 1, 2),
+ (10, 1, 4),
+ (11, 1, 5),
+ (12, 1, 6),
+ ];
+
+ let index = LineIndex::new(text);
+ for &(offset, line, col) in &table {
+ assert_eq!(index.line_col(offset.into()), LineCol { line, col });
+ }
+
+ let text = "\nhello\nworld";
+ let table = [(0, 0, 0), (1, 1, 0), (2, 1, 1), (6, 1, 5), (7, 2, 0)];
+ let index = LineIndex::new(text);
+ for &(offset, line, col) in &table {
+ assert_eq!(index.line_col(offset.into()), LineCol { line, col });
+ }
+ }
+
+ #[test]
+ fn test_char_len() {
+ assert_eq!('メ'.len_utf8(), 3);
+ assert_eq!('メ'.len_utf16(), 1);
+ }
+
+ #[test]
+ fn test_empty_index() {
+ let col_index = LineIndex::new(
+ "
+const C: char = 'x';
+",
+ );
+ assert_eq!(col_index.utf16_lines.len(), 0);
+ }
+
+ #[test]
+ fn test_single_char() {
+ let col_index = LineIndex::new(
+ "
+const C: char = 'メ';
+",
+ );
+
+ assert_eq!(col_index.utf16_lines.len(), 1);
+ assert_eq!(col_index.utf16_lines[&1].len(), 1);
+ assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
+
+ // UTF-8 to UTF-16, no changes
+ assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
+
+ // UTF-8 to UTF-16
+ assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20);
+
+ // UTF-16 to UTF-8, no changes
+ assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
+
+ // UTF-16 to UTF-8
+ assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21));
+
+ let col_index = LineIndex::new("a𐐏b");
+ assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5));
+ }
+
+ #[test]
+ fn test_string() {
+ let col_index = LineIndex::new(
+ "
+const C: char = \"メ メ\";
+",
+ );
+
+ assert_eq!(col_index.utf16_lines.len(), 1);
+ assert_eq!(col_index.utf16_lines[&1].len(), 2);
+ assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
+ assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() });
+
+ // UTF-8 to UTF-16
+ assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
+
+ assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19);
+ assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21);
+
+ assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15);
+
+ // UTF-16 to UTF-8
+ assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
+
+ // メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1
+ assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20
+ assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space
+ assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24
+
+ assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
+ }
+
+ #[test]
+ fn test_splitlines() {
+ fn r(lo: u32, hi: u32) -> TextRange {
+ TextRange::new(lo.into(), hi.into())
+ }
+
+ let text = "a\nbb\nccc\n";
+ let line_index = LineIndex::new(text);
+
+ let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>();
+ let expected = vec![r(0, 2), r(2, 5), r(5, 9)];
+ assert_eq!(actual, expected);
+
+ let text = "";
+ let line_index = LineIndex::new(text);
+
+ let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>();
+ let expected = vec![];
+ assert_eq!(actual, expected);
+
+ let text = "\n";
+ let line_index = LineIndex::new(text);
+
+ let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>();
+ let expected = vec![r(0, 1)];
+ assert_eq!(actual, expected)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
new file mode 100644
index 000000000..40af9e6fe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
@@ -0,0 +1,287 @@
+//! See [`PathTransform`].
+
+use crate::helpers::mod_path_to_ast;
+use either::Either;
+use hir::{AsAssocItem, HirDisplay, SemanticsScope};
+use rustc_hash::FxHashMap;
+use syntax::{
+ ast::{self, AstNode},
+ ted, SyntaxNode,
+};
+
+/// `PathTransform` substitutes path in SyntaxNodes in bulk.
+///
+/// This is mostly useful for IDE code generation. If you paste some existing
+/// code into a new context (for example, to add method overrides to an `impl`
+/// block), you generally want to appropriately qualify the names, and sometimes
+/// you might want to substitute generic parameters as well:
+///
+/// ```
+/// mod x {
+/// pub struct A<V>;
+/// pub trait T<U> { fn foo(&self, _: U) -> A<U>; }
+/// }
+///
+/// mod y {
+/// use x::T;
+///
+/// impl T<()> for () {
+/// // If we invoke **Add Missing Members** here, we want to copy-paste `foo`.
+/// // But we want a slightly-modified version of it:
+/// fn foo(&self, _: ()) -> x::A<()> {}
+/// }
+/// }
+/// ```
+pub struct PathTransform<'a> {
+ generic_def: hir::GenericDef,
+ substs: Vec<ast::Type>,
+ target_scope: &'a SemanticsScope<'a>,
+ source_scope: &'a SemanticsScope<'a>,
+}
+
+impl<'a> PathTransform<'a> {
+ pub fn trait_impl(
+ target_scope: &'a SemanticsScope<'a>,
+ source_scope: &'a SemanticsScope<'a>,
+ trait_: hir::Trait,
+ impl_: ast::Impl,
+ ) -> PathTransform<'a> {
+ PathTransform {
+ source_scope,
+ target_scope,
+ generic_def: trait_.into(),
+ substs: get_syntactic_substs(impl_).unwrap_or_default(),
+ }
+ }
+
+ pub fn function_call(
+ target_scope: &'a SemanticsScope<'a>,
+ source_scope: &'a SemanticsScope<'a>,
+ function: hir::Function,
+ generic_arg_list: ast::GenericArgList,
+ ) -> PathTransform<'a> {
+ PathTransform {
+ source_scope,
+ target_scope,
+ generic_def: function.into(),
+ substs: get_type_args_from_arg_list(generic_arg_list).unwrap_or_default(),
+ }
+ }
+
+ pub fn apply(&self, syntax: &SyntaxNode) {
+ self.build_ctx().apply(syntax)
+ }
+
+ fn build_ctx(&self) -> Ctx<'a> {
+ let db = self.source_scope.db;
+ let target_module = self.target_scope.module();
+ let source_module = self.source_scope.module();
+ let skip = match self.generic_def {
+ // this is a trait impl, so we need to skip the first type parameter -- this is a bit hacky
+ hir::GenericDef::Trait(_) => 1,
+ _ => 0,
+ };
+ let substs_by_param: FxHashMap<_, _> = self
+ .generic_def
+ .type_params(db)
+ .into_iter()
+ .skip(skip)
+ // The actual list of trait type parameters may be longer than the one
+ // used in the `impl` block due to trailing default type parameters.
+ // For that case we extend the `substs` with an empty iterator so we
+ // can still hit those trailing values and check if they actually have
+ // a default type. If they do, go for that type from `hir` to `ast` so
+ // the resulting change can be applied correctly.
+ .zip(self.substs.iter().map(Some).chain(std::iter::repeat(None)))
+ .filter_map(|(k, v)| match k.split(db) {
+ Either::Left(_) => None,
+ Either::Right(t) => match v {
+ Some(v) => Some((k, v.clone())),
+ None => {
+ let default = t.default(db)?;
+ Some((
+ k,
+ ast::make::ty(
+ &default.display_source_code(db, source_module.into()).ok()?,
+ ),
+ ))
+ }
+ },
+ })
+ .collect();
+ Ctx { substs: substs_by_param, target_module, source_scope: self.source_scope }
+ }
+}
+
+struct Ctx<'a> {
+ substs: FxHashMap<hir::TypeOrConstParam, ast::Type>,
+ target_module: hir::Module,
+ source_scope: &'a SemanticsScope<'a>,
+}
+
+impl<'a> Ctx<'a> {
+ fn apply(&self, item: &SyntaxNode) {
+ // `transform_path` may update a node's parent and that would break the
+ // tree traversal. Thus all paths in the tree are collected into a vec
+ // so that such operation is safe.
+ let paths = item
+ .preorder()
+ .filter_map(|event| match event {
+ syntax::WalkEvent::Enter(_) => None,
+ syntax::WalkEvent::Leave(node) => Some(node),
+ })
+ .filter_map(ast::Path::cast)
+ .collect::<Vec<_>>();
+
+ for path in paths {
+ self.transform_path(path);
+ }
+ }
+ fn transform_path(&self, path: ast::Path) -> Option<()> {
+ if path.qualifier().is_some() {
+ return None;
+ }
+ if path.segment().map_or(false, |s| {
+ s.param_list().is_some() || (s.self_token().is_some() && path.parent_path().is_none())
+ }) {
+ // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway
+ // don't try to qualify sole `self` either, they are usually locals, but are returned as modules due to namespace clashing
+ return None;
+ }
+
+ let resolution = self.source_scope.speculative_resolve(&path)?;
+
+ match resolution {
+ hir::PathResolution::TypeParam(tp) => {
+ if let Some(subst) = self.substs.get(&tp.merge()) {
+ let parent = path.syntax().parent()?;
+ if let Some(parent) = ast::Path::cast(parent.clone()) {
+ // Path inside path means that there is an associated
+ // type/constant on the type parameter. It is necessary
+ // to fully qualify the type with `as Trait`. Even
+ // though it might be unnecessary if `subst` is generic
+ // type, always fully qualifying the path is safer
+ // because of potential clash of associated types from
+ // multiple traits
+
+ let trait_ref = find_trait_for_assoc_item(
+ self.source_scope,
+ tp,
+ parent.segment()?.name_ref()?,
+ )
+ .and_then(|trait_ref| {
+ let found_path = self.target_module.find_use_path(
+ self.source_scope.db.upcast(),
+ hir::ModuleDef::Trait(trait_ref),
+ )?;
+ match ast::make::ty_path(mod_path_to_ast(&found_path)) {
+ ast::Type::PathType(path_ty) => Some(path_ty),
+ _ => None,
+ }
+ });
+
+ let segment = ast::make::path_segment_ty(subst.clone(), trait_ref);
+ let qualified =
+ ast::make::path_from_segments(std::iter::once(segment), false);
+ ted::replace(path.syntax(), qualified.clone_for_update().syntax());
+ } else if let Some(path_ty) = ast::PathType::cast(parent) {
+ ted::replace(
+ path_ty.syntax(),
+ subst.clone_subtree().clone_for_update().syntax(),
+ );
+ } else {
+ ted::replace(
+ path.syntax(),
+ subst.clone_subtree().clone_for_update().syntax(),
+ );
+ }
+ }
+ }
+ hir::PathResolution::Def(def) if def.as_assoc_item(self.source_scope.db).is_none() => {
+ if let hir::ModuleDef::Trait(_) = def {
+ if matches!(path.segment()?.kind()?, ast::PathSegmentKind::Type { .. }) {
+ // `speculative_resolve` resolves segments like `<T as
+ // Trait>` into `Trait`, but just the trait name should
+ // not be used as the replacement of the original
+ // segment.
+ return None;
+ }
+ }
+
+ let found_path =
+ self.target_module.find_use_path(self.source_scope.db.upcast(), def)?;
+ let res = mod_path_to_ast(&found_path).clone_for_update();
+ if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
+ if let Some(segment) = res.segment() {
+ let old = segment.get_or_create_generic_arg_list();
+ ted::replace(old.syntax(), args.clone_subtree().syntax().clone_for_update())
+ }
+ }
+ ted::replace(path.syntax(), res.syntax())
+ }
+ hir::PathResolution::Local(_)
+ | hir::PathResolution::ConstParam(_)
+ | hir::PathResolution::SelfType(_)
+ | hir::PathResolution::Def(_)
+ | hir::PathResolution::BuiltinAttr(_)
+ | hir::PathResolution::ToolModule(_)
+ | hir::PathResolution::DeriveHelper(_) => (),
+ }
+ Some(())
+ }
+}
+
+// FIXME: It would probably be nicer if we could get this via HIR (i.e. get the
+// trait ref, and then go from the types in the substs back to the syntax).
+fn get_syntactic_substs(impl_def: ast::Impl) -> Option<Vec<ast::Type>> {
+ let target_trait = impl_def.trait_()?;
+ let path_type = match target_trait {
+ ast::Type::PathType(path) => path,
+ _ => return None,
+ };
+ let generic_arg_list = path_type.path()?.segment()?.generic_arg_list()?;
+
+ get_type_args_from_arg_list(generic_arg_list)
+}
+
+fn get_type_args_from_arg_list(generic_arg_list: ast::GenericArgList) -> Option<Vec<ast::Type>> {
+ let mut result = Vec::new();
+ for generic_arg in generic_arg_list.generic_args() {
+ if let ast::GenericArg::TypeArg(type_arg) = generic_arg {
+ result.push(type_arg.ty()?)
+ }
+ }
+
+ Some(result)
+}
+
+fn find_trait_for_assoc_item(
+ scope: &SemanticsScope<'_>,
+ type_param: hir::TypeParam,
+ assoc_item: ast::NameRef,
+) -> Option<hir::Trait> {
+ let db = scope.db;
+ let trait_bounds = type_param.trait_bounds(db);
+
+ let assoc_item_name = assoc_item.text();
+
+ for trait_ in trait_bounds {
+ let names = trait_.items(db).into_iter().filter_map(|item| match item {
+ hir::AssocItem::TypeAlias(ta) => Some(ta.name(db)),
+ hir::AssocItem::Const(cst) => cst.name(db),
+ _ => None,
+ });
+
+ for name in names {
+ if assoc_item_name.as_str() == name.as_text()?.as_str() {
+ // It is fine to return the first match because in case of
+ // multiple possibilities, the exact trait must be disambiguated
+ // in the definition of trait being implemented, so this search
+ // should not be needed.
+ return Some(trait_);
+ }
+ }
+ }
+
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
new file mode 100644
index 000000000..517fe3f24
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
@@ -0,0 +1,540 @@
+//! Rename infrastructure for rust-analyzer. It is used primarily for the
+//! literal "rename" in the ide (look for tests there), but it is also available
+//! as a general-purpose service. For example, it is used by the fix for the
+//! "incorrect case" diagnostic.
+//!
+//! It leverages the [`crate::search`] functionality to find what needs to be
+//! renamed. The actual renames are tricky -- field shorthands need special
+//! attention, and, when renaming modules, you also want to rename files on the
+//! file system.
+//!
+//! Another can of worms are macros:
+//!
+//! ```ignore
+//! macro_rules! m { () => { fn f() {} } }
+//! m!();
+//! fn main() {
+//! f() // <- rename me
+//! }
+//! ```
+//!
+//! The correct behavior in such cases is probably to show a dialog to the user.
+//! Our current behavior is ¯\_(ツ)_/¯.
+use std::fmt;
+
+use base_db::{AnchoredPathBuf, FileId, FileRange};
+use either::Either;
+use hir::{FieldSource, HasSource, InFile, ModuleSource, Semantics};
+use stdx::never;
+use syntax::{
+ ast::{self, HasName},
+ AstNode, SyntaxKind, TextRange, T,
+};
+use text_edit::{TextEdit, TextEditBuilder};
+
+use crate::{
+ defs::Definition,
+ search::FileReference,
+ source_change::{FileSystemEdit, SourceChange},
+ syntax_helpers::node_ext::expr_as_name_ref,
+ traits::convert_to_def_in_trait,
+ RootDatabase,
+};
+
+pub type Result<T, E = RenameError> = std::result::Result<T, E>;
+
+#[derive(Debug)]
+pub struct RenameError(pub String);
+
+impl fmt::Display for RenameError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.0, f)
+ }
+}
+
+#[macro_export]
+macro_rules! _format_err {
+ ($fmt:expr) => { RenameError(format!($fmt)) };
+ ($fmt:expr, $($arg:tt)+) => { RenameError(format!($fmt, $($arg)+)) }
+}
+pub use _format_err as format_err;
+
+#[macro_export]
+macro_rules! _bail {
+ ($($tokens:tt)*) => { return Err(format_err!($($tokens)*)) }
+}
+pub use _bail as bail;
+
+impl Definition {
+ pub fn rename(
+ &self,
+ sema: &Semantics<'_, RootDatabase>,
+ new_name: &str,
+ ) -> Result<SourceChange> {
+ match *self {
+ Definition::Module(module) => rename_mod(sema, module, new_name),
+ Definition::BuiltinType(_) => {
+ bail!("Cannot rename builtin type")
+ }
+ Definition::SelfType(_) => bail!("Cannot rename `Self`"),
+ def => rename_reference(sema, def, new_name),
+ }
+ }
+
+ /// Textual range of the identifier which will change when renaming this
+ /// `Definition`. Note that some definitions, like buitin types, can't be
+ /// renamed.
+ pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
+ let res = match self {
+ Definition::Macro(mac) => {
+ let src = mac.source(sema.db)?;
+ let name = match &src.value {
+ Either::Left(it) => it.name()?,
+ Either::Right(it) => it.name()?,
+ };
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ Definition::Field(field) => {
+ let src = field.source(sema.db)?;
+ match &src.value {
+ FieldSource::Named(record_field) => {
+ let name = record_field.name()?;
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ FieldSource::Pos(_) => None,
+ }
+ }
+ Definition::Module(module) => {
+ let src = module.declaration_source(sema.db)?;
+ let name = src.value.name()?;
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ Definition::Function(it) => name_range(it, sema),
+ Definition::Adt(adt) => match adt {
+ hir::Adt::Struct(it) => name_range(it, sema),
+ hir::Adt::Union(it) => name_range(it, sema),
+ hir::Adt::Enum(it) => name_range(it, sema),
+ },
+ Definition::Variant(it) => name_range(it, sema),
+ Definition::Const(it) => name_range(it, sema),
+ Definition::Static(it) => name_range(it, sema),
+ Definition::Trait(it) => name_range(it, sema),
+ Definition::TypeAlias(it) => name_range(it, sema),
+ Definition::Local(local) => {
+ let src = local.source(sema.db);
+ let name = match &src.value {
+ Either::Left(bind_pat) => bind_pat.name()?,
+ Either::Right(_) => return None,
+ };
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ Definition::GenericParam(generic_param) => match generic_param {
+ hir::GenericParam::LifetimeParam(lifetime_param) => {
+ let src = lifetime_param.source(sema.db)?;
+ src.with_value(src.value.lifetime()?.syntax()).original_file_range_opt(sema.db)
+ }
+ _ => {
+ let x = match generic_param {
+ hir::GenericParam::TypeParam(it) => it.merge(),
+ hir::GenericParam::ConstParam(it) => it.merge(),
+ hir::GenericParam::LifetimeParam(_) => return None,
+ };
+ let src = x.source(sema.db)?;
+ let name = match &src.value {
+ Either::Left(x) => x.name()?,
+ Either::Right(_) => return None,
+ };
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ },
+ Definition::Label(label) => {
+ let src = label.source(sema.db);
+ let lifetime = src.value.lifetime()?;
+ src.with_value(lifetime.syntax()).original_file_range_opt(sema.db)
+ }
+ Definition::BuiltinType(_) => return None,
+ Definition::SelfType(_) => return None,
+ Definition::BuiltinAttr(_) => return None,
+ Definition::ToolModule(_) => return None,
+ // FIXME: This should be doable in theory
+ Definition::DeriveHelper(_) => return None,
+ };
+ return res;
+
+ fn name_range<D>(def: D, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange>
+ where
+ D: HasSource,
+ D::Ast: ast::HasName,
+ {
+ let src = def.source(sema.db)?;
+ let name = src.value.name()?;
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ }
+}
+
+fn rename_mod(
+ sema: &Semantics<'_, RootDatabase>,
+ module: hir::Module,
+ new_name: &str,
+) -> Result<SourceChange> {
+ if IdentifierKind::classify(new_name)? != IdentifierKind::Ident {
+ bail!("Invalid name `{0}`: cannot rename module to {0}", new_name);
+ }
+
+ let mut source_change = SourceChange::default();
+
+ if module.is_crate_root(sema.db) {
+ return Ok(source_change);
+ }
+
+ let InFile { file_id, value: def_source } = module.definition_source(sema.db);
+ if let ModuleSource::SourceFile(..) = def_source {
+ let anchor = file_id.original_file(sema.db);
+
+ let is_mod_rs = module.is_mod_rs(sema.db);
+ let has_detached_child = module.children(sema.db).any(|child| !child.is_inline(sema.db));
+
+ // Module exists in a named file
+ if !is_mod_rs {
+ let path = format!("{}.rs", new_name);
+ let dst = AnchoredPathBuf { anchor, path };
+ source_change.push_file_system_edit(FileSystemEdit::MoveFile { src: anchor, dst })
+ }
+
+ // Rename the dir if:
+ // - Module source is in mod.rs
+ // - Module has submodules defined in separate files
+ let dir_paths = match (is_mod_rs, has_detached_child, module.name(sema.db)) {
+ // Go up one level since the anchor is inside the dir we're trying to rename
+ (true, _, Some(mod_name)) => {
+ Some((format!("../{}", mod_name), format!("../{}", new_name)))
+ }
+ // The anchor is on the same level as target dir
+ (false, true, Some(mod_name)) => Some((mod_name.to_string(), new_name.to_string())),
+ _ => None,
+ };
+
+ if let Some((src, dst)) = dir_paths {
+ let src = AnchoredPathBuf { anchor, path: src };
+ let dst = AnchoredPathBuf { anchor, path: dst };
+ source_change.push_file_system_edit(FileSystemEdit::MoveDir {
+ src,
+ src_id: anchor,
+ dst,
+ })
+ }
+ }
+
+ if let Some(src) = module.declaration_source(sema.db) {
+ let file_id = src.file_id.original_file(sema.db);
+ match src.value.name() {
+ Some(name) => {
+ if let Some(file_range) =
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ {
+ source_change.insert_source_edit(
+ file_id,
+ TextEdit::replace(file_range.range, new_name.to_string()),
+ )
+ };
+ }
+ _ => never!("Module source node is missing a name"),
+ }
+ }
+
+ let def = Definition::Module(module);
+ let usages = def.usages(sema).all();
+ let ref_edits = usages.iter().map(|(&file_id, references)| {
+ (file_id, source_edit_from_references(references, def, new_name))
+ });
+ source_change.extend(ref_edits);
+
+ Ok(source_change)
+}
+
+fn rename_reference(
+ sema: &Semantics<'_, RootDatabase>,
+ def: Definition,
+ new_name: &str,
+) -> Result<SourceChange> {
+ let ident_kind = IdentifierKind::classify(new_name)?;
+
+ if matches!(
+ def,
+ Definition::GenericParam(hir::GenericParam::LifetimeParam(_)) | Definition::Label(_)
+ ) {
+ match ident_kind {
+ IdentifierKind::Ident | IdentifierKind::Underscore => {
+ cov_mark::hit!(rename_not_a_lifetime_ident_ref);
+ bail!("Invalid name `{}`: not a lifetime identifier", new_name);
+ }
+ IdentifierKind::Lifetime => cov_mark::hit!(rename_lifetime),
+ }
+ } else {
+ match ident_kind {
+ IdentifierKind::Lifetime => {
+ cov_mark::hit!(rename_not_an_ident_ref);
+ bail!("Invalid name `{}`: not an identifier", new_name);
+ }
+ IdentifierKind::Ident => cov_mark::hit!(rename_non_local),
+ IdentifierKind::Underscore => (),
+ }
+ }
+
+ let def = convert_to_def_in_trait(sema.db, def);
+ let usages = def.usages(sema).all();
+
+ if !usages.is_empty() && ident_kind == IdentifierKind::Underscore {
+ cov_mark::hit!(rename_underscore_multiple);
+ bail!("Cannot rename reference to `_` as it is being referenced multiple times");
+ }
+ let mut source_change = SourceChange::default();
+ source_change.extend(usages.iter().map(|(&file_id, references)| {
+ (file_id, source_edit_from_references(references, def, new_name))
+ }));
+
+ let mut insert_def_edit = |def| {
+ let (file_id, edit) = source_edit_from_def(sema, def, new_name)?;
+ source_change.insert_source_edit(file_id, edit);
+ Ok(())
+ };
+ match def {
+ Definition::Local(l) => l
+ .associated_locals(sema.db)
+ .iter()
+ .try_for_each(|&local| insert_def_edit(Definition::Local(local))),
+ def => insert_def_edit(def),
+ }?;
+ Ok(source_change)
+}
+
+pub fn source_edit_from_references(
+ references: &[FileReference],
+ def: Definition,
+ new_name: &str,
+) -> TextEdit {
+ let mut edit = TextEdit::builder();
+ // macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far
+ let mut edited_ranges = Vec::new();
+ for &FileReference { range, ref name, .. } in references {
+ let name_range = name.syntax().text_range();
+ if name_range.len() != range.len() {
+ // This usage comes from a different token kind that was downmapped to a NameLike in a macro
+ // Renaming this will most likely break things syntax-wise
+ continue;
+ }
+ let has_emitted_edit = match name {
+ // if the ranges differ then the node is inside a macro call, we can't really attempt
+ // to make special rewrites like shorthand syntax and such, so just rename the node in
+ // the macro input
+ ast::NameLike::NameRef(name_ref) if name_range == range => {
+ source_edit_from_name_ref(&mut edit, name_ref, new_name, def)
+ }
+ ast::NameLike::Name(name) if name_range == range => {
+ source_edit_from_name(&mut edit, name, new_name)
+ }
+ _ => false,
+ };
+ if !has_emitted_edit {
+ if !edited_ranges.contains(&range.start()) {
+ edit.replace(range, new_name.to_string());
+ edited_ranges.push(range.start());
+ }
+ }
+ }
+
+ edit.finish()
+}
+
+fn source_edit_from_name(edit: &mut TextEditBuilder, name: &ast::Name, new_name: &str) -> bool {
+ if ast::RecordPatField::for_field_name(name).is_some() {
+ if let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) {
+ cov_mark::hit!(rename_record_pat_field_name_split);
+ // Foo { ref mut field } -> Foo { new_name: ref mut field }
+ // ^ insert `new_name: `
+
+ // FIXME: instead of splitting the shorthand, recursively trigger a rename of the
+ // other name https://github.com/rust-lang/rust-analyzer/issues/6547
+ edit.insert(ident_pat.syntax().text_range().start(), format!("{}: ", new_name));
+ return true;
+ }
+ }
+
+ false
+}
+
+fn source_edit_from_name_ref(
+ edit: &mut TextEditBuilder,
+ name_ref: &ast::NameRef,
+ new_name: &str,
+ def: Definition,
+) -> bool {
+ if name_ref.super_token().is_some() {
+ return true;
+ }
+
+ if let Some(record_field) = ast::RecordExprField::for_name_ref(name_ref) {
+ let rcf_name_ref = record_field.name_ref();
+ let rcf_expr = record_field.expr();
+ match &(rcf_name_ref, rcf_expr.and_then(|it| expr_as_name_ref(&it))) {
+ // field: init-expr, check if we can use a field init shorthand
+ (Some(field_name), Some(init)) => {
+ if field_name == name_ref {
+ if init.text() == new_name {
+ cov_mark::hit!(test_rename_field_put_init_shorthand);
+ // Foo { field: local } -> Foo { local }
+ // ^^^^^^^ delete this
+
+ // same names, we can use a shorthand here instead.
+ // we do not want to erase attributes hence this range start
+ let s = field_name.syntax().text_range().start();
+ let e = init.syntax().text_range().start();
+ edit.delete(TextRange::new(s, e));
+ return true;
+ }
+ } else if init == name_ref {
+ if field_name.text() == new_name {
+ cov_mark::hit!(test_rename_local_put_init_shorthand);
+ // Foo { field: local } -> Foo { field }
+ // ^^^^^^^ delete this
+
+ // same names, we can use a shorthand here instead.
+ // we do not want to erase attributes hence this range start
+ let s = field_name.syntax().text_range().end();
+ let e = init.syntax().text_range().end();
+ edit.delete(TextRange::new(s, e));
+ return true;
+ }
+ }
+ }
+ // init shorthand
+ (None, Some(_)) if matches!(def, Definition::Field(_)) => {
+ cov_mark::hit!(test_rename_field_in_field_shorthand);
+ // Foo { field } -> Foo { new_name: field }
+ // ^ insert `new_name: `
+ let offset = name_ref.syntax().text_range().start();
+ edit.insert(offset, format!("{}: ", new_name));
+ return true;
+ }
+ (None, Some(_)) if matches!(def, Definition::Local(_)) => {
+ cov_mark::hit!(test_rename_local_in_field_shorthand);
+ // Foo { field } -> Foo { field: new_name }
+ // ^ insert `: new_name`
+ let offset = name_ref.syntax().text_range().end();
+ edit.insert(offset, format!(": {}", new_name));
+ return true;
+ }
+ _ => (),
+ }
+ } else if let Some(record_field) = ast::RecordPatField::for_field_name_ref(name_ref) {
+ let rcf_name_ref = record_field.name_ref();
+ let rcf_pat = record_field.pat();
+ match (rcf_name_ref, rcf_pat) {
+ // field: rename
+ (Some(field_name), Some(ast::Pat::IdentPat(pat)))
+ if field_name == *name_ref && pat.at_token().is_none() =>
+ {
+ // field name is being renamed
+ if let Some(name) = pat.name() {
+ if name.text() == new_name {
+ cov_mark::hit!(test_rename_field_put_init_shorthand_pat);
+ // Foo { field: ref mut local } -> Foo { ref mut field }
+ // ^^^^^^^ delete this
+ // ^^^^^ replace this with `field`
+
+ // same names, we can use a shorthand here instead/
+ // we do not want to erase attributes hence this range start
+ let s = field_name.syntax().text_range().start();
+ let e = pat.syntax().text_range().start();
+ edit.delete(TextRange::new(s, e));
+ edit.replace(name.syntax().text_range(), new_name.to_string());
+ return true;
+ }
+ }
+ }
+ _ => (),
+ }
+ }
+ false
+}
+
+fn source_edit_from_def(
+ sema: &Semantics<'_, RootDatabase>,
+ def: Definition,
+ new_name: &str,
+) -> Result<(FileId, TextEdit)> {
+ let FileRange { file_id, range } = def
+ .range_for_rename(sema)
+ .ok_or_else(|| format_err!("No identifier available to rename"))?;
+
+ let mut edit = TextEdit::builder();
+ if let Definition::Local(local) = def {
+ if let Either::Left(pat) = local.source(sema.db).value {
+ // special cases required for renaming fields/locals in Record patterns
+ if let Some(pat_field) = pat.syntax().parent().and_then(ast::RecordPatField::cast) {
+ let name_range = pat.name().unwrap().syntax().text_range();
+ if let Some(name_ref) = pat_field.name_ref() {
+ if new_name == name_ref.text() && pat.at_token().is_none() {
+ // Foo { field: ref mut local } -> Foo { ref mut field }
+ // ^^^^^^ delete this
+ // ^^^^^ replace this with `field`
+ cov_mark::hit!(test_rename_local_put_init_shorthand_pat);
+ edit.delete(
+ name_ref
+ .syntax()
+ .text_range()
+ .cover_offset(pat.syntax().text_range().start()),
+ );
+ edit.replace(name_range, name_ref.text().to_string());
+ } else {
+ // Foo { field: ref mut local @ local 2} -> Foo { field: ref mut new_name @ local2 }
+ // Foo { field: ref mut local } -> Foo { field: ref mut new_name }
+ // ^^^^^ replace this with `new_name`
+ edit.replace(name_range, new_name.to_string());
+ }
+ } else {
+ // Foo { ref mut field } -> Foo { field: ref mut new_name }
+ // ^ insert `field: `
+ // ^^^^^ replace this with `new_name`
+ edit.insert(
+ pat.syntax().text_range().start(),
+ format!("{}: ", pat_field.field_name().unwrap()),
+ );
+ edit.replace(name_range, new_name.to_string());
+ }
+ }
+ }
+ }
+ if edit.is_empty() {
+ edit.replace(range, new_name.to_string());
+ }
+ Ok((file_id, edit.finish()))
+}
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub enum IdentifierKind {
+ Ident,
+ Lifetime,
+ Underscore,
+}
+
+impl IdentifierKind {
+ pub fn classify(new_name: &str) -> Result<IdentifierKind> {
+ match parser::LexedStr::single_token(new_name) {
+ Some(res) => match res {
+ (SyntaxKind::IDENT, _) => Ok(IdentifierKind::Ident),
+ (T![_], _) => Ok(IdentifierKind::Underscore),
+ (SyntaxKind::LIFETIME_IDENT, _) if new_name != "'static" && new_name != "'_" => {
+ Ok(IdentifierKind::Lifetime)
+ }
+ (SyntaxKind::LIFETIME_IDENT, _) => {
+ bail!("Invalid name `{}`: not a lifetime identifier", new_name)
+ }
+ (_, Some(syntax_error)) => bail!("Invalid name `{}`: {}", new_name, syntax_error),
+ (_, None) => bail!("Invalid name `{}`: not an identifier", new_name),
+ },
+ None => bail!("Invalid name `{}`: not an identifier", new_name),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs b/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs
new file mode 100644
index 000000000..e27e23867
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs
@@ -0,0 +1,34 @@
+//! Rustdoc specific doc comment handling
+
+// stripped down version of https://github.com/rust-lang/rust/blob/392ba2ba1a7d6c542d2459fb8133bebf62a4a423/src/librustdoc/html/markdown.rs#L810-L933
+pub fn is_rust_fence(s: &str) -> bool {
+ let mut seen_rust_tags = false;
+ let mut seen_other_tags = false;
+
+ let tokens = s
+ .trim()
+ .split(|c| c == ',' || c == ' ' || c == '\t')
+ .map(str::trim)
+ .filter(|t| !t.is_empty());
+
+ for token in tokens {
+ match token {
+ "should_panic" | "no_run" | "ignore" | "allow_fail" => {
+ seen_rust_tags = !seen_other_tags
+ }
+ "rust" => seen_rust_tags = true,
+ "test_harness" | "compile_fail" => seen_rust_tags = !seen_other_tags || seen_rust_tags,
+ x if x.starts_with("edition") => {}
+ x if x.starts_with('E') && x.len() == 5 => {
+ if x[1..].parse::<u32>().is_ok() {
+ seen_rust_tags = !seen_other_tags || seen_rust_tags;
+ } else {
+ seen_other_tags = true;
+ }
+ }
+ _ => seen_other_tags = true,
+ }
+ }
+
+ !seen_other_tags || seen_rust_tags
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
new file mode 100644
index 000000000..bd038cdaa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -0,0 +1,785 @@
+//! Implementation of find-usages functionality.
+//!
+//! It is based on the standard ide trick: first, we run a fast text search to
+//! get a super-set of matches. Then, we we confirm each match using precise
+//! name resolution.
+
+use std::{convert::TryInto, mem, sync::Arc};
+
+use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt};
+use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility};
+use once_cell::unsync::Lazy;
+use rustc_hash::FxHashMap;
+use syntax::{ast, match_ast, AstNode, TextRange, TextSize};
+
+use crate::{
+ defs::{Definition, NameClass, NameRefClass},
+ traits::{as_trait_assoc_def, convert_to_def_in_trait},
+ RootDatabase,
+};
+
+#[derive(Debug, Default, Clone)]
+pub struct UsageSearchResult {
+ pub references: FxHashMap<FileId, Vec<FileReference>>,
+}
+
+impl UsageSearchResult {
+ pub fn is_empty(&self) -> bool {
+ self.references.is_empty()
+ }
+
+ pub fn len(&self) -> usize {
+ self.references.len()
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = (&FileId, &[FileReference])> + '_ {
+ self.references.iter().map(|(file_id, refs)| (file_id, &**refs))
+ }
+
+ pub fn file_ranges(&self) -> impl Iterator<Item = FileRange> + '_ {
+ self.references.iter().flat_map(|(&file_id, refs)| {
+ refs.iter().map(move |&FileReference { range, .. }| FileRange { file_id, range })
+ })
+ }
+}
+
+impl IntoIterator for UsageSearchResult {
+ type Item = (FileId, Vec<FileReference>);
+ type IntoIter = <FxHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.references.into_iter()
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct FileReference {
+ /// The range of the reference in the original file
+ pub range: TextRange,
+ /// The node of the reference in the (macro-)file
+ pub name: ast::NameLike,
+ pub category: Option<ReferenceCategory>,
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum ReferenceCategory {
+ // FIXME: Add this variant and delete the `retain_adt_literal_usages` function.
+ // Create
+ Write,
+ Read,
+ // FIXME: Some day should be able to search in doc comments. Would probably
+ // need to switch from enum to bitflags then?
+ // DocComment
+}
+
+/// Generally, `search_scope` returns files that might contain references for the element.
+/// For `pub(crate)` things it's a crate, for `pub` things it's a crate and dependant crates.
+/// In some cases, the location of the references is known to within a `TextRange`,
+/// e.g. for things like local variables.
+#[derive(Clone, Debug)]
+pub struct SearchScope {
+ entries: FxHashMap<FileId, Option<TextRange>>,
+}
+
+impl SearchScope {
+ fn new(entries: FxHashMap<FileId, Option<TextRange>>) -> SearchScope {
+ SearchScope { entries }
+ }
+
+ /// Build a search scope spanning the entire crate graph of files.
+ fn crate_graph(db: &RootDatabase) -> SearchScope {
+ let mut entries = FxHashMap::default();
+
+ let graph = db.crate_graph();
+ for krate in graph.iter() {
+ let root_file = graph[krate].root_file_id;
+ let source_root_id = db.file_source_root(root_file);
+ let source_root = db.source_root(source_root_id);
+ entries.extend(source_root.iter().map(|id| (id, None)));
+ }
+ SearchScope { entries }
+ }
+
+ /// Build a search scope spanning all the reverse dependencies of the given crate.
+ fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope {
+ let mut entries = FxHashMap::default();
+ for rev_dep in of.transitive_reverse_dependencies(db) {
+ let root_file = rev_dep.root_file(db);
+ let source_root_id = db.file_source_root(root_file);
+ let source_root = db.source_root(source_root_id);
+ entries.extend(source_root.iter().map(|id| (id, None)));
+ }
+ SearchScope { entries }
+ }
+
+ /// Build a search scope spanning the given crate.
+ fn krate(db: &RootDatabase, of: hir::Crate) -> SearchScope {
+ let root_file = of.root_file(db);
+ let source_root_id = db.file_source_root(root_file);
+ let source_root = db.source_root(source_root_id);
+ SearchScope {
+ entries: source_root.iter().map(|id| (id, None)).collect::<FxHashMap<_, _>>(),
+ }
+ }
+
+ /// Build a search scope spanning the given module and all its submodules.
+ fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
+ let mut entries = FxHashMap::default();
+
+ let (file_id, range) = {
+ let InFile { file_id, value } = module.definition_source(db);
+ if let Some((file_id, call_source)) = file_id.original_call_node(db) {
+ (file_id, Some(call_source.text_range()))
+ } else {
+ (
+ file_id.original_file(db),
+ match value {
+ ModuleSource::SourceFile(_) => None,
+ ModuleSource::Module(it) => Some(it.syntax().text_range()),
+ ModuleSource::BlockExpr(it) => Some(it.syntax().text_range()),
+ },
+ )
+ }
+ };
+ entries.insert(file_id, range);
+
+ let mut to_visit: Vec<_> = module.children(db).collect();
+ while let Some(module) = to_visit.pop() {
+ if let InFile { file_id, value: ModuleSource::SourceFile(_) } =
+ module.definition_source(db)
+ {
+ entries.insert(file_id.original_file(db), None);
+ }
+ to_visit.extend(module.children(db));
+ }
+ SearchScope { entries }
+ }
+
+ /// Build an empty search scope.
+ pub fn empty() -> SearchScope {
+ SearchScope::new(FxHashMap::default())
+ }
+
+ /// Build a empty search scope spanning the given file.
+ pub fn single_file(file: FileId) -> SearchScope {
+ SearchScope::new(std::iter::once((file, None)).collect())
+ }
+
+ /// Build a empty search scope spanning the text range of the given file.
+ pub fn file_range(range: FileRange) -> SearchScope {
+ SearchScope::new(std::iter::once((range.file_id, Some(range.range))).collect())
+ }
+
+ /// Build a empty search scope spanning the given files.
+ pub fn files(files: &[FileId]) -> SearchScope {
+ SearchScope::new(files.iter().map(|f| (*f, None)).collect())
+ }
+
+ pub fn intersection(&self, other: &SearchScope) -> SearchScope {
+ let (mut small, mut large) = (&self.entries, &other.entries);
+ if small.len() > large.len() {
+ mem::swap(&mut small, &mut large)
+ }
+
+ let intersect_ranges =
+ |r1: Option<TextRange>, r2: Option<TextRange>| -> Option<Option<TextRange>> {
+ match (r1, r2) {
+ (None, r) | (r, None) => Some(r),
+ (Some(r1), Some(r2)) => r1.intersect(r2).map(Some),
+ }
+ };
+ let res = small
+ .iter()
+ .filter_map(|(&file_id, &r1)| {
+ let &r2 = large.get(&file_id)?;
+ let r = intersect_ranges(r1, r2)?;
+ Some((file_id, r))
+ })
+ .collect();
+
+ SearchScope::new(res)
+ }
+}
+
+impl IntoIterator for SearchScope {
+ type Item = (FileId, Option<TextRange>);
+ type IntoIter = std::collections::hash_map::IntoIter<FileId, Option<TextRange>>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.entries.into_iter()
+ }
+}
+
+impl Definition {
+ fn search_scope(&self, db: &RootDatabase) -> SearchScope {
+ let _p = profile::span("search_scope");
+
+ if let Definition::BuiltinType(_) = self {
+ return SearchScope::crate_graph(db);
+ }
+
+ // def is crate root
+ // FIXME: We don't do searches for crates currently, as a crate does not actually have a single name
+ if let &Definition::Module(module) = self {
+ if module.is_crate_root(db) {
+ return SearchScope::reverse_dependencies(db, module.krate());
+ }
+ }
+
+ let module = match self.module(db) {
+ Some(it) => it,
+ None => return SearchScope::empty(),
+ };
+ let InFile { file_id, value: module_source } = module.definition_source(db);
+ let file_id = file_id.original_file(db);
+
+ if let Definition::Local(var) = self {
+ let def = match var.parent(db) {
+ DefWithBody::Function(f) => f.source(db).map(|src| src.syntax().cloned()),
+ DefWithBody::Const(c) => c.source(db).map(|src| src.syntax().cloned()),
+ DefWithBody::Static(s) => s.source(db).map(|src| src.syntax().cloned()),
+ };
+ return match def {
+ Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
+ None => SearchScope::single_file(file_id),
+ };
+ }
+
+ if let Definition::SelfType(impl_) = self {
+ return match impl_.source(db).map(|src| src.syntax().cloned()) {
+ Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
+ None => SearchScope::single_file(file_id),
+ };
+ }
+
+ if let Definition::GenericParam(hir::GenericParam::LifetimeParam(param)) = self {
+ let def = match param.parent(db) {
+ hir::GenericDef::Function(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::Adt(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::Trait(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::TypeAlias(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::Impl(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::Variant(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::Const(it) => it.source(db).map(|src| src.syntax().cloned()),
+ };
+ return match def {
+ Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
+ None => SearchScope::single_file(file_id),
+ };
+ }
+
+ if let Definition::Macro(macro_def) = self {
+ return match macro_def.kind(db) {
+ hir::MacroKind::Declarative => {
+ if macro_def.attrs(db).by_key("macro_export").exists() {
+ SearchScope::reverse_dependencies(db, module.krate())
+ } else {
+ SearchScope::krate(db, module.krate())
+ }
+ }
+ hir::MacroKind::BuiltIn => SearchScope::crate_graph(db),
+ hir::MacroKind::Derive | hir::MacroKind::Attr | hir::MacroKind::ProcMacro => {
+ SearchScope::reverse_dependencies(db, module.krate())
+ }
+ };
+ }
+
+ if let Definition::DeriveHelper(_) = self {
+ return SearchScope::reverse_dependencies(db, module.krate());
+ }
+
+ let vis = self.visibility(db);
+ if let Some(Visibility::Public) = vis {
+ return SearchScope::reverse_dependencies(db, module.krate());
+ }
+ if let Some(Visibility::Module(module)) = vis {
+ return SearchScope::module_and_children(db, module.into());
+ }
+
+ let range = match module_source {
+ ModuleSource::Module(m) => Some(m.syntax().text_range()),
+ ModuleSource::BlockExpr(b) => Some(b.syntax().text_range()),
+ ModuleSource::SourceFile(_) => None,
+ };
+ match range {
+ Some(range) => SearchScope::file_range(FileRange { file_id, range }),
+ None => SearchScope::single_file(file_id),
+ }
+ }
+
+ pub fn usages<'a>(self, sema: &'a Semantics<'_, RootDatabase>) -> FindUsages<'a> {
+ FindUsages {
+ local_repr: match self {
+ Definition::Local(local) => Some(local.representative(sema.db)),
+ _ => None,
+ },
+ def: self,
+ trait_assoc_def: as_trait_assoc_def(sema.db, self),
+ sema,
+ scope: None,
+ include_self_kw_refs: None,
+ search_self_mod: false,
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct FindUsages<'a> {
+ def: Definition,
+ /// If def is an assoc item from a trait or trait impl, this is the corresponding item of the trait definition
+ trait_assoc_def: Option<Definition>,
+ sema: &'a Semantics<'a, RootDatabase>,
+ scope: Option<SearchScope>,
+ include_self_kw_refs: Option<hir::Type>,
+ local_repr: Option<hir::Local>,
+ search_self_mod: bool,
+}
+
+impl<'a> FindUsages<'a> {
+ /// Enable searching for `Self` when the definition is a type or `self` for modules.
+ pub fn include_self_refs(mut self) -> FindUsages<'a> {
+ self.include_self_kw_refs = def_to_ty(self.sema, &self.def);
+ self.search_self_mod = true;
+ self
+ }
+
+ /// Limit the search to a given [`SearchScope`].
+ pub fn in_scope(self, scope: SearchScope) -> FindUsages<'a> {
+ self.set_scope(Some(scope))
+ }
+
+ /// Limit the search to a given [`SearchScope`].
+ pub fn set_scope(mut self, scope: Option<SearchScope>) -> FindUsages<'a> {
+ assert!(self.scope.is_none());
+ self.scope = scope;
+ self
+ }
+
+ pub fn at_least_one(&self) -> bool {
+ let mut found = false;
+ self.search(&mut |_, _| {
+ found = true;
+ true
+ });
+ found
+ }
+
+ pub fn all(self) -> UsageSearchResult {
+ let mut res = UsageSearchResult::default();
+ self.search(&mut |file_id, reference| {
+ res.references.entry(file_id).or_default().push(reference);
+ false
+ });
+ res
+ }
+
+ fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
+ let _p = profile::span("FindUsages:search");
+ let sema = self.sema;
+
+ let search_scope = {
+ let base = self.trait_assoc_def.unwrap_or(self.def).search_scope(sema.db);
+ match &self.scope {
+ None => base,
+ Some(scope) => base.intersection(scope),
+ }
+ };
+
+ let name = match self.def {
+ // special case crate modules as these do not have a proper name
+ Definition::Module(module) if module.is_crate_root(self.sema.db) => {
+ // FIXME: This assumes the crate name is always equal to its display name when it really isn't
+ module
+ .krate()
+ .display_name(self.sema.db)
+ .map(|crate_name| crate_name.crate_name().as_smol_str().clone())
+ }
+ _ => {
+ let self_kw_refs = || {
+ self.include_self_kw_refs.as_ref().and_then(|ty| {
+ ty.as_adt()
+ .map(|adt| adt.name(self.sema.db))
+ .or_else(|| ty.as_builtin().map(|builtin| builtin.name()))
+ })
+ };
+ self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.to_smol_str())
+ }
+ };
+ let name = match &name {
+ Some(s) => s.as_str(),
+ None => return,
+ };
+
+ // these can't be closures because rust infers the lifetimes wrong ...
+ fn match_indices<'a>(
+ text: &'a str,
+ name: &'a str,
+ search_range: TextRange,
+ ) -> impl Iterator<Item = TextSize> + 'a {
+ text.match_indices(name).filter_map(move |(idx, _)| {
+ let offset: TextSize = idx.try_into().unwrap();
+ if !search_range.contains_inclusive(offset) {
+ return None;
+ }
+ Some(offset)
+ })
+ }
+
+ fn scope_files<'a>(
+ sema: &'a Semantics<'_, RootDatabase>,
+ scope: &'a SearchScope,
+ ) -> impl Iterator<Item = (Arc<String>, FileId, TextRange)> + 'a {
+ scope.entries.iter().map(|(&file_id, &search_range)| {
+ let text = sema.db.file_text(file_id);
+ let search_range =
+ search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text.as_str())));
+
+ (text, file_id, search_range)
+ })
+ }
+
+ // FIXME: There should be optimization potential here
+ // Currently we try to descend everything we find which
+ // means we call `Semantics::descend_into_macros` on
+ // every textual hit. That function is notoriously
+ // expensive even for things that do not get down mapped
+ // into macros.
+ for (text, file_id, search_range) in scope_files(sema, &search_scope) {
+ let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
+
+ // Search for occurrences of the items name
+ for offset in match_indices(&text, name, search_range) {
+ for name in sema.find_nodes_at_offset_with_descend(&tree, offset) {
+ if match name {
+ ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
+ ast::NameLike::Name(name) => self.found_name(&name, sink),
+ ast::NameLike::Lifetime(lifetime) => self.found_lifetime(&lifetime, sink),
+ } {
+ return;
+ }
+ }
+ }
+ // Search for occurrences of the `Self` referring to our type
+ if let Some(self_ty) = &self.include_self_kw_refs {
+ for offset in match_indices(&text, "Self", search_range) {
+ for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
+ if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
+ return;
+ }
+ }
+ }
+ }
+ }
+
+ // Search for `super` and `crate` resolving to our module
+ match self.def {
+ Definition::Module(module) => {
+ let scope = search_scope
+ .intersection(&SearchScope::module_and_children(self.sema.db, module));
+
+ let is_crate_root = module.is_crate_root(self.sema.db);
+
+ for (text, file_id, search_range) in scope_files(sema, &scope) {
+ let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
+
+ for offset in match_indices(&text, "super", search_range) {
+ for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
+ if self.found_name_ref(&name_ref, sink) {
+ return;
+ }
+ }
+ }
+ if is_crate_root {
+ for offset in match_indices(&text, "crate", search_range) {
+ for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
+ if self.found_name_ref(&name_ref, sink) {
+ return;
+ }
+ }
+ }
+ }
+ }
+ }
+ _ => (),
+ }
+
+ // search for module `self` references in our module's definition source
+ match self.def {
+ Definition::Module(module) if self.search_self_mod => {
+ let src = module.definition_source(sema.db);
+ let file_id = src.file_id.original_file(sema.db);
+ let (file_id, search_range) = match src.value {
+ ModuleSource::Module(m) => (file_id, Some(m.syntax().text_range())),
+ ModuleSource::BlockExpr(b) => (file_id, Some(b.syntax().text_range())),
+ ModuleSource::SourceFile(_) => (file_id, None),
+ };
+
+ let search_range = if let Some(&range) = search_scope.entries.get(&file_id) {
+ match (range, search_range) {
+ (None, range) | (range, None) => range,
+ (Some(range), Some(search_range)) => match range.intersect(search_range) {
+ Some(range) => Some(range),
+ None => return,
+ },
+ }
+ } else {
+ return;
+ };
+
+ let text = sema.db.file_text(file_id);
+ let search_range =
+ search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text.as_str())));
+
+ let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
+
+ for offset in match_indices(&text, "self", search_range) {
+ for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
+ if self.found_self_module_name_ref(&name_ref, sink) {
+ return;
+ }
+ }
+ }
+ }
+ _ => {}
+ }
+ }
+
+ fn found_self_ty_name_ref(
+ &self,
+ self_ty: &hir::Type,
+ name_ref: &ast::NameRef,
+ sink: &mut dyn FnMut(FileId, FileReference) -> bool,
+ ) -> bool {
+ match NameRefClass::classify(self.sema, name_ref) {
+ Some(NameRefClass::Definition(Definition::SelfType(impl_)))
+ if impl_.self_ty(self.sema.db) == *self_ty =>
+ {
+ let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::NameRef(name_ref.clone()),
+ category: None,
+ };
+ sink(file_id, reference)
+ }
+ _ => false,
+ }
+ }
+
+ fn found_self_module_name_ref(
+ &self,
+ name_ref: &ast::NameRef,
+ sink: &mut dyn FnMut(FileId, FileReference) -> bool,
+ ) -> bool {
+ match NameRefClass::classify(self.sema, name_ref) {
+ Some(NameRefClass::Definition(def @ Definition::Module(_))) if def == self.def => {
+ let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::NameRef(name_ref.clone()),
+ category: None,
+ };
+ sink(file_id, reference)
+ }
+ _ => false,
+ }
+ }
+
+ fn found_lifetime(
+ &self,
+ lifetime: &ast::Lifetime,
+ sink: &mut dyn FnMut(FileId, FileReference) -> bool,
+ ) -> bool {
+ match NameRefClass::classify_lifetime(self.sema, lifetime) {
+ Some(NameRefClass::Definition(def)) if def == self.def => {
+ let FileRange { file_id, range } = self.sema.original_range(lifetime.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::Lifetime(lifetime.clone()),
+ category: None,
+ };
+ sink(file_id, reference)
+ }
+ _ => false,
+ }
+ }
+
+ fn found_name_ref(
+ &self,
+ name_ref: &ast::NameRef,
+ sink: &mut dyn FnMut(FileId, FileReference) -> bool,
+ ) -> bool {
+ match NameRefClass::classify(self.sema, name_ref) {
+ Some(NameRefClass::Definition(def @ Definition::Local(local)))
+ if matches!(
+ self.local_repr, Some(repr) if repr == local.representative(self.sema.db)
+ ) =>
+ {
+ let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::NameRef(name_ref.clone()),
+ category: ReferenceCategory::new(&def, name_ref),
+ };
+ sink(file_id, reference)
+ }
+ Some(NameRefClass::Definition(def))
+ if match self.trait_assoc_def {
+ Some(trait_assoc_def) => {
+ // we have a trait assoc item, so force resolve all assoc items to their trait version
+ convert_to_def_in_trait(self.sema.db, def) == trait_assoc_def
+ }
+ None => self.def == def,
+ } =>
+ {
+ let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::NameRef(name_ref.clone()),
+ category: ReferenceCategory::new(&def, name_ref),
+ };
+ sink(file_id, reference)
+ }
+ Some(NameRefClass::Definition(def)) if self.include_self_kw_refs.is_some() => {
+ if self.include_self_kw_refs == def_to_ty(self.sema, &def) {
+ let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::NameRef(name_ref.clone()),
+ category: ReferenceCategory::new(&def, name_ref),
+ };
+ sink(file_id, reference)
+ } else {
+ false
+ }
+ }
+ Some(NameRefClass::FieldShorthand { local_ref: local, field_ref: field }) => {
+ let field = Definition::Field(field);
+ let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let access = match self.def {
+ Definition::Field(_) if field == self.def => {
+ ReferenceCategory::new(&field, name_ref)
+ }
+ Definition::Local(_) if matches!(self.local_repr, Some(repr) if repr == local.representative(self.sema.db)) => {
+ ReferenceCategory::new(&Definition::Local(local), name_ref)
+ }
+ _ => return false,
+ };
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::NameRef(name_ref.clone()),
+ category: access,
+ };
+ sink(file_id, reference)
+ }
+ _ => false,
+ }
+ }
+
+ fn found_name(
+ &self,
+ name: &ast::Name,
+ sink: &mut dyn FnMut(FileId, FileReference) -> bool,
+ ) -> bool {
+ match NameClass::classify(self.sema, name) {
+ Some(NameClass::PatFieldShorthand { local_def: _, field_ref })
+ if matches!(
+ self.def, Definition::Field(_) if Definition::Field(field_ref) == self.def
+ ) =>
+ {
+ let FileRange { file_id, range } = self.sema.original_range(name.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::Name(name.clone()),
+ // FIXME: mutable patterns should have `Write` access
+ category: Some(ReferenceCategory::Read),
+ };
+ sink(file_id, reference)
+ }
+ Some(NameClass::ConstReference(def)) if self.def == def => {
+ let FileRange { file_id, range } = self.sema.original_range(name.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::Name(name.clone()),
+ category: None,
+ };
+ sink(file_id, reference)
+ }
+ Some(NameClass::Definition(def @ Definition::Local(local))) if def != self.def => {
+ if matches!(
+ self.local_repr,
+ Some(repr) if local.representative(self.sema.db) == repr
+ ) {
+ let FileRange { file_id, range } = self.sema.original_range(name.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::Name(name.clone()),
+ category: None,
+ };
+ return sink(file_id, reference);
+ }
+ false
+ }
+ Some(NameClass::Definition(def)) if def != self.def => {
+ // if the def we are looking for is a trait (impl) assoc item, we'll have to resolve the items to trait definition assoc item
+ if !matches!(
+ self.trait_assoc_def,
+ Some(trait_assoc_def)
+ if convert_to_def_in_trait(self.sema.db, def) == trait_assoc_def
+ ) {
+ return false;
+ }
+ let FileRange { file_id, range } = self.sema.original_range(name.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::Name(name.clone()),
+ category: None,
+ };
+ sink(file_id, reference)
+ }
+ _ => false,
+ }
+ }
+}
+
+fn def_to_ty(sema: &Semantics<'_, RootDatabase>, def: &Definition) -> Option<hir::Type> {
+ match def {
+ Definition::Adt(adt) => Some(adt.ty(sema.db)),
+ Definition::TypeAlias(it) => Some(it.ty(sema.db)),
+ Definition::BuiltinType(it) => Some(it.ty(sema.db)),
+ Definition::SelfType(it) => Some(it.self_ty(sema.db)),
+ _ => None,
+ }
+}
+
+impl ReferenceCategory {
+ fn new(def: &Definition, r: &ast::NameRef) -> Option<ReferenceCategory> {
+ // Only Locals and Fields have accesses for now.
+ if !matches!(def, Definition::Local(_) | Definition::Field(_)) {
+ return None;
+ }
+
+ let mode = r.syntax().ancestors().find_map(|node| {
+ match_ast! {
+ match node {
+ ast::BinExpr(expr) => {
+ if matches!(expr.op_kind()?, ast::BinaryOp::Assignment { .. }) {
+ // If the variable or field ends on the LHS's end then it's a Write (covers fields and locals).
+ // FIXME: This is not terribly accurate.
+ if let Some(lhs) = expr.lhs() {
+ if lhs.syntax().text_range().end() == r.syntax().text_range().end() {
+ return Some(ReferenceCategory::Write);
+ }
+ }
+ }
+ Some(ReferenceCategory::Read)
+ },
+ _ => None
+ }
+ }
+ });
+
+ // Default Locals and Fields to read
+ mode.or(Some(ReferenceCategory::Read))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
new file mode 100644
index 000000000..8132c73ef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
@@ -0,0 +1,99 @@
+//! This modules defines type to represent changes to the source code, that flow
+//! from the server to the client.
+//!
+//! It can be viewed as a dual for `Change`.
+
+use std::{collections::hash_map::Entry, iter};
+
+use base_db::{AnchoredPathBuf, FileId};
+use rustc_hash::FxHashMap;
+use stdx::never;
+use text_edit::TextEdit;
+
+#[derive(Default, Debug, Clone)]
+pub struct SourceChange {
+ pub source_file_edits: FxHashMap<FileId, TextEdit>,
+ pub file_system_edits: Vec<FileSystemEdit>,
+ pub is_snippet: bool,
+}
+
+impl SourceChange {
+ /// Creates a new SourceChange with the given label
+ /// from the edits.
+ pub fn from_edits(
+ source_file_edits: FxHashMap<FileId, TextEdit>,
+ file_system_edits: Vec<FileSystemEdit>,
+ ) -> Self {
+ SourceChange { source_file_edits, file_system_edits, is_snippet: false }
+ }
+
+ pub fn from_text_edit(file_id: FileId, edit: TextEdit) -> Self {
+ SourceChange {
+ source_file_edits: iter::once((file_id, edit)).collect(),
+ ..Default::default()
+ }
+ }
+
+ /// Inserts a [`TextEdit`] for the given [`FileId`]. This properly handles merging existing
+ /// edits for a file if some already exist.
+ pub fn insert_source_edit(&mut self, file_id: FileId, edit: TextEdit) {
+ match self.source_file_edits.entry(file_id) {
+ Entry::Occupied(mut entry) => {
+ never!(entry.get_mut().union(edit).is_err(), "overlapping edits for same file");
+ }
+ Entry::Vacant(entry) => {
+ entry.insert(edit);
+ }
+ }
+ }
+
+ pub fn push_file_system_edit(&mut self, edit: FileSystemEdit) {
+ self.file_system_edits.push(edit);
+ }
+
+ pub fn get_source_edit(&self, file_id: FileId) -> Option<&TextEdit> {
+ self.source_file_edits.get(&file_id)
+ }
+
+ pub fn merge(mut self, other: SourceChange) -> SourceChange {
+ self.extend(other.source_file_edits);
+ self.extend(other.file_system_edits);
+ self.is_snippet |= other.is_snippet;
+ self
+ }
+}
+
+impl Extend<(FileId, TextEdit)> for SourceChange {
+ fn extend<T: IntoIterator<Item = (FileId, TextEdit)>>(&mut self, iter: T) {
+ iter.into_iter().for_each(|(file_id, edit)| self.insert_source_edit(file_id, edit));
+ }
+}
+
+impl Extend<FileSystemEdit> for SourceChange {
+ fn extend<T: IntoIterator<Item = FileSystemEdit>>(&mut self, iter: T) {
+ iter.into_iter().for_each(|edit| self.push_file_system_edit(edit));
+ }
+}
+
+impl From<FxHashMap<FileId, TextEdit>> for SourceChange {
+ fn from(source_file_edits: FxHashMap<FileId, TextEdit>) -> SourceChange {
+ SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub enum FileSystemEdit {
+ CreateFile { dst: AnchoredPathBuf, initial_contents: String },
+ MoveFile { src: FileId, dst: AnchoredPathBuf },
+ MoveDir { src: AnchoredPathBuf, src_id: FileId, dst: AnchoredPathBuf },
+}
+
+impl From<FileSystemEdit> for SourceChange {
+ fn from(edit: FileSystemEdit) -> SourceChange {
+ SourceChange {
+ source_file_edits: Default::default(),
+ file_system_edits: vec![edit],
+ is_snippet: false,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
new file mode 100644
index 000000000..bfb003127
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
@@ -0,0 +1,429 @@
+//! This module handles fuzzy-searching of functions, structs and other symbols
+//! by name across the whole workspace and dependencies.
+//!
+//! It works by building an incrementally-updated text-search index of all
+//! symbols. The backbone of the index is the **awesome** `fst` crate by
+//! @BurntSushi.
+//!
+//! In a nutshell, you give a set of strings to `fst`, and it builds a
+//! finite state machine describing this set of strings. The strings which
+//! could fuzzy-match a pattern can also be described by a finite state machine.
+//! What is freaking cool is that you can now traverse both state machines in
+//! lock-step to enumerate the strings which are both in the input set and
+//! fuzz-match the query. Or, more formally, given two languages described by
+//! FSTs, one can build a product FST which describes the intersection of the
+//! languages.
+//!
+//! `fst` does not support cheap updating of the index, but it supports unioning
+//! of state machines. So, to account for changing source code, we build an FST
+//! for each library (which is assumed to never change) and an FST for each Rust
+//! file in the current workspace, and run a query against the union of all
+//! those FSTs.
+
+use std::{
+ cmp::Ordering,
+ fmt,
+ hash::{Hash, Hasher},
+ mem,
+ sync::Arc,
+};
+
+use base_db::{
+ salsa::{self, ParallelDatabase},
+ SourceDatabaseExt, SourceRootId, Upcast,
+};
+use fst::{self, Streamer};
+use hir::{
+ db::HirDatabase,
+ symbols::{FileSymbol, SymbolCollector},
+ Crate, Module,
+};
+use rayon::prelude::*;
+use rustc_hash::FxHashSet;
+
+use crate::RootDatabase;
+
+#[derive(Debug)]
+pub struct Query {
+ query: String,
+ lowercased: String,
+ only_types: bool,
+ libs: bool,
+ exact: bool,
+ case_sensitive: bool,
+ limit: usize,
+}
+
+impl Query {
+ pub fn new(query: String) -> Query {
+ let lowercased = query.to_lowercase();
+ Query {
+ query,
+ lowercased,
+ only_types: false,
+ libs: false,
+ exact: false,
+ case_sensitive: false,
+ limit: usize::max_value(),
+ }
+ }
+
+ pub fn only_types(&mut self) {
+ self.only_types = true;
+ }
+
+ pub fn libs(&mut self) {
+ self.libs = true;
+ }
+
+ pub fn exact(&mut self) {
+ self.exact = true;
+ }
+
+ pub fn case_sensitive(&mut self) {
+ self.case_sensitive = true;
+ }
+
+ pub fn limit(&mut self, limit: usize) {
+ self.limit = limit
+ }
+}
+
+#[salsa::query_group(SymbolsDatabaseStorage)]
+pub trait SymbolsDatabase: HirDatabase + SourceDatabaseExt + Upcast<dyn HirDatabase> {
+ /// The symbol index for a given module. These modules should only be in source roots that
+ /// are inside local_roots.
+ fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>;
+
+ /// The symbol index for a given source root within library_roots.
+ fn library_symbols(&self, source_root_id: SourceRootId) -> Arc<SymbolIndex>;
+
+ /// The set of "local" (that is, from the current workspace) roots.
+ /// Files in local roots are assumed to change frequently.
+ #[salsa::input]
+ fn local_roots(&self) -> Arc<FxHashSet<SourceRootId>>;
+
+ /// The set of roots for crates.io libraries.
+ /// Files in libraries are assumed to never change.
+ #[salsa::input]
+ fn library_roots(&self) -> Arc<FxHashSet<SourceRootId>>;
+}
+
+fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Arc<SymbolIndex> {
+ let _p = profile::span("library_symbols");
+
+ // todo: this could be parallelized, once I figure out how to do that...
+ let symbols = db
+ .source_root_crates(source_root_id)
+ .iter()
+ .flat_map(|&krate| Crate::from(krate).modules(db.upcast()))
+ // we specifically avoid calling SymbolsDatabase::module_symbols here, even they do the same thing,
+ // as the index for a library is not going to really ever change, and we do not want to store each
+ // module's index in salsa.
+ .flat_map(|module| SymbolCollector::collect(db.upcast(), module))
+ .collect();
+
+ Arc::new(SymbolIndex::new(symbols))
+}
+
+fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc<SymbolIndex> {
+ let _p = profile::span("module_symbols");
+ let symbols = SymbolCollector::collect(db.upcast(), module);
+ Arc::new(SymbolIndex::new(symbols))
+}
+
+/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
+struct Snap<DB>(DB);
+impl<DB: ParallelDatabase> Snap<salsa::Snapshot<DB>> {
+ fn new(db: &DB) -> Self {
+ Self(db.snapshot())
+ }
+}
+impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
+ fn clone(&self) -> Snap<salsa::Snapshot<DB>> {
+ Snap(self.0.snapshot())
+ }
+}
+impl<DB> std::ops::Deref for Snap<DB> {
+ type Target = DB;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+// Feature: Workspace Symbol
+//
+// Uses fuzzy-search to find types, modules and functions by name across your
+// project and dependencies. This is **the** most useful feature, which improves code
+// navigation tremendously. It mostly works on top of the built-in LSP
+// functionality, however `#` and `*` symbols can be used to narrow down the
+// search. Specifically,
+//
+// - `Foo` searches for `Foo` type in the current workspace
+// - `foo#` searches for `foo` function in the current workspace
+// - `Foo*` searches for `Foo` type among dependencies, including `stdlib`
+// - `foo#*` searches for `foo` function among dependencies
+//
+// That is, `#` switches from "types" to all symbols, `*` switches from the current
+// workspace to dependencies.
+//
+// Note that filtering does not currently work in VSCode due to the editor never
+// sending the special symbols to the language server. Instead, you can configure
+// the filtering via the `rust-analyzer.workspace.symbol.search.scope` and
+// `rust-analyzer.workspace.symbol.search.kind` settings.
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[Ctrl+T]
+// |===
+pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
+ let _p = profile::span("world_symbols").detail(|| query.query.clone());
+
+ let indices: Vec<_> = if query.libs {
+ db.library_roots()
+ .par_iter()
+ .map_with(Snap::new(db), |snap, &root| snap.library_symbols(root))
+ .collect()
+ } else {
+ let mut modules = Vec::new();
+
+ for &root in db.local_roots().iter() {
+ let crates = db.source_root_crates(root);
+ for &krate in crates.iter() {
+ modules.extend(Crate::from(krate).modules(db));
+ }
+ }
+
+ modules
+ .par_iter()
+ .map_with(Snap::new(db), |snap, &module| snap.module_symbols(module))
+ .collect()
+ };
+
+ query.search(&indices)
+}
+
+pub fn crate_symbols(db: &RootDatabase, krate: Crate, query: Query) -> Vec<FileSymbol> {
+ let _p = profile::span("crate_symbols").detail(|| format!("{:?}", query));
+
+ let modules = krate.modules(db);
+ let indices: Vec<_> = modules
+ .par_iter()
+ .map_with(Snap::new(db), |snap, &module| snap.module_symbols(module))
+ .collect();
+
+ query.search(&indices)
+}
+
+#[derive(Default)]
+pub struct SymbolIndex {
+ symbols: Vec<FileSymbol>,
+ map: fst::Map<Vec<u8>>,
+}
+
+impl fmt::Debug for SymbolIndex {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SymbolIndex").field("n_symbols", &self.symbols.len()).finish()
+ }
+}
+
+impl PartialEq for SymbolIndex {
+ fn eq(&self, other: &SymbolIndex) -> bool {
+ self.symbols == other.symbols
+ }
+}
+
+impl Eq for SymbolIndex {}
+
+impl Hash for SymbolIndex {
+ fn hash<H: Hasher>(&self, hasher: &mut H) {
+ self.symbols.hash(hasher)
+ }
+}
+
+impl SymbolIndex {
+ fn new(mut symbols: Vec<FileSymbol>) -> SymbolIndex {
+ fn cmp(lhs: &FileSymbol, rhs: &FileSymbol) -> Ordering {
+ let lhs_chars = lhs.name.chars().map(|c| c.to_ascii_lowercase());
+ let rhs_chars = rhs.name.chars().map(|c| c.to_ascii_lowercase());
+ lhs_chars.cmp(rhs_chars)
+ }
+
+ symbols.par_sort_by(cmp);
+
+ let mut builder = fst::MapBuilder::memory();
+
+ let mut last_batch_start = 0;
+
+ for idx in 0..symbols.len() {
+ if let Some(next_symbol) = symbols.get(idx + 1) {
+ if cmp(&symbols[last_batch_start], next_symbol) == Ordering::Equal {
+ continue;
+ }
+ }
+
+ let start = last_batch_start;
+ let end = idx + 1;
+ last_batch_start = end;
+
+ let key = symbols[start].name.as_str().to_ascii_lowercase();
+ let value = SymbolIndex::range_to_map_value(start, end);
+
+ builder.insert(key, value).unwrap();
+ }
+
+ let map = fst::Map::new(builder.into_inner().unwrap()).unwrap();
+ SymbolIndex { symbols, map }
+ }
+
+ pub fn len(&self) -> usize {
+ self.symbols.len()
+ }
+
+ pub fn memory_size(&self) -> usize {
+ self.map.as_fst().size() + self.symbols.len() * mem::size_of::<FileSymbol>()
+ }
+
+ fn range_to_map_value(start: usize, end: usize) -> u64 {
+ debug_assert![start <= (std::u32::MAX as usize)];
+ debug_assert![end <= (std::u32::MAX as usize)];
+
+ ((start as u64) << 32) | end as u64
+ }
+
+ fn map_value_to_range(value: u64) -> (usize, usize) {
+ let end = value as u32 as usize;
+ let start = (value >> 32) as usize;
+ (start, end)
+ }
+}
+
+impl Query {
+ pub(crate) fn search(self, indices: &[Arc<SymbolIndex>]) -> Vec<FileSymbol> {
+ let _p = profile::span("symbol_index::Query::search");
+ let mut op = fst::map::OpBuilder::new();
+ for file_symbols in indices.iter() {
+ let automaton = fst::automaton::Subsequence::new(&self.lowercased);
+ op = op.add(file_symbols.map.search(automaton))
+ }
+ let mut stream = op.union();
+ let mut res = Vec::new();
+ while let Some((_, indexed_values)) = stream.next() {
+ for indexed_value in indexed_values {
+ let symbol_index = &indices[indexed_value.index];
+ let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value);
+
+ for symbol in &symbol_index.symbols[start..end] {
+ if self.only_types && !symbol.kind.is_type() {
+ continue;
+ }
+ if self.exact {
+ if symbol.name != self.query {
+ continue;
+ }
+ } else if self.case_sensitive {
+ if self.query.chars().any(|c| !symbol.name.contains(c)) {
+ continue;
+ }
+ }
+
+ res.push(symbol.clone());
+ if res.len() >= self.limit {
+ return res;
+ }
+ }
+ }
+ }
+ res
+ }
+}
+
+#[cfg(test)]
+mod tests {
+
+ use base_db::fixture::WithFixture;
+ use expect_test::expect_file;
+ use hir::symbols::SymbolCollector;
+
+ use super::*;
+
+ #[test]
+ fn test_symbol_index_collection() {
+ let (db, _) = RootDatabase::with_many_files(
+ r#"
+//- /main.rs
+
+macro_rules! macro_rules_macro {
+ () => {}
+};
+
+macro_rules! define_struct {
+ () => {
+ struct StructFromMacro;
+ }
+};
+
+define_struct!();
+
+macro Macro { }
+
+struct Struct;
+enum Enum {
+ A, B
+}
+union Union {}
+
+impl Struct {
+ fn impl_fn() {}
+}
+
+trait Trait {
+ fn trait_fn(&self);
+}
+
+fn main() {
+ struct StructInFn;
+}
+
+const CONST: u32 = 1;
+static STATIC: &'static str = "2";
+type Alias = Struct;
+
+mod a_mod {
+ struct StructInModA;
+}
+
+const _: () = {
+ struct StructInUnnamedConst;
+
+ ()
+};
+
+const CONST_WITH_INNER: () = {
+ struct StructInNamedConst;
+
+ ()
+};
+
+mod b_mod;
+
+//- /b_mod.rs
+struct StructInModB;
+ "#,
+ );
+
+ let symbols: Vec<_> = Crate::from(db.test_crate())
+ .modules(&db)
+ .into_iter()
+ .map(|module_id| {
+ let mut symbols = SymbolCollector::collect(&db, module_id);
+ symbols.sort_by_key(|it| it.name.clone());
+ (module_id, symbols)
+ })
+ .collect();
+
+ expect_file!["./test_data/test_symbol_index_collection.txt"].assert_debug_eq(&symbols);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
new file mode 100644
index 000000000..f48a57008
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
@@ -0,0 +1,308 @@
+//! Tools to work with format string literals for the `format_args!` family of macros.
+use syntax::{
+ ast::{self, IsString},
+ AstNode, AstToken, TextRange, TextSize,
+};
+
+pub fn is_format_string(string: &ast::String) -> bool {
+ // Check if `string` is a format string argument of a macro invocation.
+ // `string` is a string literal, mapped down into the innermost macro expansion.
+ // Since `format_args!` etc. remove the format string when expanding, but place all arguments
+ // in the expanded output, we know that the string token is (part of) the format string if it
+ // appears in `format_args!` (otherwise it would have been mapped down further).
+ //
+ // This setup lets us correctly highlight the components of `concat!("{}", "bla")` format
+ // strings. It still fails for `concat!("{", "}")`, but that is rare.
+ (|| {
+ let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?;
+ let name = macro_call.path()?.segment()?.name_ref()?;
+
+ if !matches!(
+ name.text().as_str(),
+ "format_args" | "format_args_nl" | "const_format_args" | "panic_2015" | "panic_2021"
+ ) {
+ return None;
+ }
+
+ // NB: we match against `panic_2015`/`panic_2021` here because they have a special-cased arm for
+ // `"{}"`, which otherwise wouldn't get highlighted.
+
+ Some(())
+ })()
+ .is_some()
+}
+
+#[derive(Debug)]
+pub enum FormatSpecifier {
+ Open,
+ Close,
+ Integer,
+ Identifier,
+ Colon,
+ Fill,
+ Align,
+ Sign,
+ NumberSign,
+ Zero,
+ DollarSign,
+ Dot,
+ Asterisk,
+ QuestionMark,
+ Escape,
+}
+
+pub fn lex_format_specifiers(
+ string: &ast::String,
+ mut callback: &mut dyn FnMut(TextRange, FormatSpecifier),
+) {
+ let mut char_ranges = Vec::new();
+ string.escaped_char_ranges(&mut |range, res| char_ranges.push((range, res)));
+ let mut chars = char_ranges
+ .iter()
+ .filter_map(|(range, res)| Some((*range, *res.as_ref().ok()?)))
+ .peekable();
+
+ while let Some((range, first_char)) = chars.next() {
+ if let '{' = first_char {
+ // Format specifier, see syntax at https://doc.rust-lang.org/std/fmt/index.html#syntax
+ if let Some((_, '{')) = chars.peek() {
+ // Escaped format specifier, `{{`
+ read_escaped_format_specifier(&mut chars, &mut callback);
+ continue;
+ }
+
+ callback(range, FormatSpecifier::Open);
+
+ // check for integer/identifier
+ let (_, int_char) = chars.peek().copied().unwrap_or_default();
+ match int_char {
+ // integer
+ '0'..='9' => read_integer(&mut chars, &mut callback),
+ // identifier
+ c if c == '_' || c.is_alphabetic() => read_identifier(&mut chars, &mut callback),
+ _ => {}
+ }
+
+ if let Some((_, ':')) = chars.peek() {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Colon, &mut callback);
+
+ // check for fill/align
+ let mut cloned = chars.clone().take(2);
+ let (_, first) = cloned.next().unwrap_or_default();
+ let (_, second) = cloned.next().unwrap_or_default();
+ match second {
+ '<' | '^' | '>' => {
+ // alignment specifier, first char specifies fillment
+ skip_char_and_emit(&mut chars, FormatSpecifier::Fill, &mut callback);
+ skip_char_and_emit(&mut chars, FormatSpecifier::Align, &mut callback);
+ }
+ _ => {
+ if let '<' | '^' | '>' = first {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Align, &mut callback);
+ }
+ }
+ }
+
+ // check for sign
+ match chars.peek().copied().unwrap_or_default().1 {
+ '+' | '-' => {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Sign, &mut callback);
+ }
+ _ => {}
+ }
+
+ // check for `#`
+ if let Some((_, '#')) = chars.peek() {
+ skip_char_and_emit(&mut chars, FormatSpecifier::NumberSign, &mut callback);
+ }
+
+ // check for `0`
+ let mut cloned = chars.clone().take(2);
+ let first = cloned.next().map(|next| next.1);
+ let second = cloned.next().map(|next| next.1);
+
+ if first == Some('0') && second != Some('$') {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Zero, &mut callback);
+ }
+
+ // width
+ match chars.peek().copied().unwrap_or_default().1 {
+ '0'..='9' => {
+ read_integer(&mut chars, &mut callback);
+ if let Some((_, '$')) = chars.peek() {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::DollarSign,
+ &mut callback,
+ );
+ }
+ }
+ c if c == '_' || c.is_alphabetic() => {
+ read_identifier(&mut chars, &mut callback);
+
+ if chars.peek().map(|&(_, c)| c) == Some('?') {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::QuestionMark,
+ &mut callback,
+ );
+ }
+
+ // can be either width (indicated by dollar sign, or type in which case
+ // the next sign has to be `}`)
+ let next = chars.peek().map(|&(_, c)| c);
+
+ match next {
+ Some('$') => skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::DollarSign,
+ &mut callback,
+ ),
+ Some('}') => {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::Close,
+ &mut callback,
+ );
+ continue;
+ }
+ _ => continue,
+ };
+ }
+ _ => {}
+ }
+
+ // precision
+ if let Some((_, '.')) = chars.peek() {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Dot, &mut callback);
+
+ match chars.peek().copied().unwrap_or_default().1 {
+ '*' => {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::Asterisk,
+ &mut callback,
+ );
+ }
+ '0'..='9' => {
+ read_integer(&mut chars, &mut callback);
+ if let Some((_, '$')) = chars.peek() {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::DollarSign,
+ &mut callback,
+ );
+ }
+ }
+ c if c == '_' || c.is_alphabetic() => {
+ read_identifier(&mut chars, &mut callback);
+ if chars.peek().map(|&(_, c)| c) != Some('$') {
+ continue;
+ }
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::DollarSign,
+ &mut callback,
+ );
+ }
+ _ => {
+ continue;
+ }
+ }
+ }
+
+ // type
+ match chars.peek().copied().unwrap_or_default().1 {
+ '?' => {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::QuestionMark,
+ &mut callback,
+ );
+ }
+ c if c == '_' || c.is_alphabetic() => {
+ read_identifier(&mut chars, &mut callback);
+
+ if chars.peek().map(|&(_, c)| c) == Some('?') {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::QuestionMark,
+ &mut callback,
+ );
+ }
+ }
+ _ => {}
+ }
+ }
+
+ if let Some((_, '}')) = chars.peek() {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Close, &mut callback);
+ }
+ continue;
+ } else if let '}' = first_char {
+ if let Some((_, '}')) = chars.peek() {
+ // Escaped format specifier, `}}`
+ read_escaped_format_specifier(&mut chars, &mut callback);
+ }
+ }
+ }
+
+ fn skip_char_and_emit<I, F>(
+ chars: &mut std::iter::Peekable<I>,
+ emit: FormatSpecifier,
+ callback: &mut F,
+ ) where
+ I: Iterator<Item = (TextRange, char)>,
+ F: FnMut(TextRange, FormatSpecifier),
+ {
+ let (range, _) = chars.next().unwrap();
+ callback(range, emit);
+ }
+
+ fn read_integer<I, F>(chars: &mut std::iter::Peekable<I>, callback: &mut F)
+ where
+ I: Iterator<Item = (TextRange, char)>,
+ F: FnMut(TextRange, FormatSpecifier),
+ {
+ let (mut range, c) = chars.next().unwrap();
+ assert!(c.is_ascii_digit());
+ while let Some(&(r, next_char)) = chars.peek() {
+ if next_char.is_ascii_digit() {
+ chars.next();
+ range = range.cover(r);
+ } else {
+ break;
+ }
+ }
+ callback(range, FormatSpecifier::Integer);
+ }
+
+ fn read_identifier<I, F>(chars: &mut std::iter::Peekable<I>, callback: &mut F)
+ where
+ I: Iterator<Item = (TextRange, char)>,
+ F: FnMut(TextRange, FormatSpecifier),
+ {
+ let (mut range, c) = chars.next().unwrap();
+ assert!(c.is_alphabetic() || c == '_');
+ while let Some(&(r, next_char)) = chars.peek() {
+ if next_char == '_' || next_char.is_ascii_digit() || next_char.is_alphabetic() {
+ chars.next();
+ range = range.cover(r);
+ } else {
+ break;
+ }
+ }
+ callback(range, FormatSpecifier::Identifier);
+ }
+
+ fn read_escaped_format_specifier<I, F>(chars: &mut std::iter::Peekable<I>, callback: &mut F)
+ where
+ I: Iterator<Item = (TextRange, char)>,
+ F: FnMut(TextRange, FormatSpecifier),
+ {
+ let (range, _) = chars.peek().unwrap();
+ let offset = TextSize::from(1);
+ callback(TextRange::new(range.start() - offset, range.end()), FormatSpecifier::Escape);
+ chars.next();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs
new file mode 100644
index 000000000..f54ae6c92
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs
@@ -0,0 +1,136 @@
+//! Utilities for formatting macro expanded nodes until we get a proper formatter.
+use syntax::{
+ ast::make,
+ ted::{self, Position},
+ NodeOrToken,
+ SyntaxKind::{self, *},
+ SyntaxNode, SyntaxToken, WalkEvent, T,
+};
+
+// FIXME: It would also be cool to share logic here and in the mbe tests,
+// which are pretty unreadable at the moment.
+/// Renders a [`SyntaxNode`] with whitespace inserted between tokens that require them.
+pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode {
+ let mut indent = 0;
+ let mut last: Option<SyntaxKind> = None;
+ let mut mods = Vec::new();
+ let syn = syn.clone_subtree().clone_for_update();
+
+ let before = Position::before;
+ let after = Position::after;
+
+ let do_indent = |pos: fn(_) -> Position, token: &SyntaxToken, indent| {
+ (pos(token.clone()), make::tokens::whitespace(&" ".repeat(2 * indent)))
+ };
+ let do_ws = |pos: fn(_) -> Position, token: &SyntaxToken| {
+ (pos(token.clone()), make::tokens::single_space())
+ };
+ let do_nl = |pos: fn(_) -> Position, token: &SyntaxToken| {
+ (pos(token.clone()), make::tokens::single_newline())
+ };
+
+ for event in syn.preorder_with_tokens() {
+ let token = match event {
+ WalkEvent::Enter(NodeOrToken::Token(token)) => token,
+ WalkEvent::Leave(NodeOrToken::Node(node))
+ if matches!(
+ node.kind(),
+ ATTR | MATCH_ARM | STRUCT | ENUM | UNION | FN | IMPL | MACRO_RULES
+ ) =>
+ {
+ if indent > 0 {
+ mods.push((
+ Position::after(node.clone()),
+ make::tokens::whitespace(&" ".repeat(2 * indent)),
+ ));
+ }
+ if node.parent().is_some() {
+ mods.push((Position::after(node), make::tokens::single_newline()));
+ }
+ continue;
+ }
+ _ => continue,
+ };
+ let tok = &token;
+
+ let is_next = |f: fn(SyntaxKind) -> bool, default| -> bool {
+ tok.next_token().map(|it| f(it.kind())).unwrap_or(default)
+ };
+ let is_last =
+ |f: fn(SyntaxKind) -> bool, default| -> bool { last.map(f).unwrap_or(default) };
+
+ match tok.kind() {
+ k if is_text(k) && is_next(|it| !it.is_punct() || it == UNDERSCORE, false) => {
+ mods.push(do_ws(after, tok));
+ }
+ L_CURLY if is_next(|it| it != R_CURLY, true) => {
+ indent += 1;
+ if is_last(is_text, false) {
+ mods.push(do_ws(before, tok));
+ }
+
+ mods.push(do_indent(after, tok, indent));
+ mods.push(do_nl(after, tok));
+ }
+ R_CURLY if is_last(|it| it != L_CURLY, true) => {
+ indent = indent.saturating_sub(1);
+
+ if indent > 0 {
+ mods.push(do_indent(before, tok, indent));
+ }
+ mods.push(do_nl(before, tok));
+ }
+ R_CURLY => {
+ if indent > 0 {
+ mods.push(do_indent(after, tok, indent));
+ }
+ mods.push(do_nl(after, tok));
+ }
+ LIFETIME_IDENT if is_next(is_text, true) => {
+ mods.push(do_ws(after, tok));
+ }
+ MUT_KW if is_next(|it| it == SELF_KW, false) => {
+ mods.push(do_ws(after, tok));
+ }
+ AS_KW | DYN_KW | IMPL_KW | CONST_KW => {
+ mods.push(do_ws(after, tok));
+ }
+ T![;] => {
+ if indent > 0 {
+ mods.push(do_indent(after, tok, indent));
+ }
+ mods.push(do_nl(after, tok));
+ }
+ T![=] if is_next(|it| it == T![>], false) => {
+ // FIXME: this branch is for `=>` in macro_rules!, which is currently parsed as
+ // two separate symbols.
+ mods.push(do_ws(before, tok));
+ mods.push(do_ws(after, &tok.next_token().unwrap()));
+ }
+ T![->] | T![=] | T![=>] => {
+ mods.push(do_ws(before, tok));
+ mods.push(do_ws(after, tok));
+ }
+ T![!] if is_last(|it| it == MACRO_RULES_KW, false) && is_next(is_text, false) => {
+ mods.push(do_ws(after, tok));
+ }
+ _ => (),
+ }
+
+ last = Some(tok.kind());
+ }
+
+ for (pos, insert) in mods {
+ ted::insert(pos, insert);
+ }
+
+ if let Some(it) = syn.last_token().filter(|it| it.kind() == SyntaxKind::WHITESPACE) {
+ ted::remove(it);
+ }
+
+ syn
+}
+
+fn is_text(k: SyntaxKind) -> bool {
+ k.is_keyword() || k.is_literal() || k == IDENT || k == UNDERSCORE
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
new file mode 100644
index 000000000..84bde4d44
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
@@ -0,0 +1,460 @@
+//! Various helper functions to work with SyntaxNodes.
+use itertools::Itertools;
+use parser::T;
+use syntax::{
+ ast::{self, HasLoopBody, PathSegmentKind, VisibilityKind},
+ AstNode, Preorder, RustLanguage, WalkEvent,
+};
+
+pub fn expr_as_name_ref(expr: &ast::Expr) -> Option<ast::NameRef> {
+ if let ast::Expr::PathExpr(expr) = expr {
+ let path = expr.path()?;
+ path.as_single_name_ref()
+ } else {
+ None
+ }
+}
+
+pub fn full_path_of_name_ref(name_ref: &ast::NameRef) -> Option<ast::Path> {
+ let mut ancestors = name_ref.syntax().ancestors();
+ let _ = ancestors.next()?; // skip self
+ let _ = ancestors.next().filter(|it| ast::PathSegment::can_cast(it.kind()))?; // skip self
+ ancestors.take_while(|it| ast::Path::can_cast(it.kind())).last().and_then(ast::Path::cast)
+}
+
+pub fn block_as_lone_tail(block: &ast::BlockExpr) -> Option<ast::Expr> {
+ block.statements().next().is_none().then(|| block.tail_expr()).flatten()
+}
+
+/// Preorder walk all the expression's child expressions.
+pub fn walk_expr(expr: &ast::Expr, cb: &mut dyn FnMut(ast::Expr)) {
+ preorder_expr(expr, &mut |ev| {
+ if let WalkEvent::Enter(expr) = ev {
+ cb(expr);
+ }
+ false
+ })
+}
+
+/// Preorder walk all the expression's child expressions preserving events.
+/// If the callback returns true on an [`WalkEvent::Enter`], the subtree of the expression will be skipped.
+/// Note that the subtree may already be skipped due to the context analysis this function does.
+pub fn preorder_expr(start: &ast::Expr, cb: &mut dyn FnMut(WalkEvent<ast::Expr>) -> bool) {
+ let mut preorder = start.syntax().preorder();
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ WalkEvent::Enter(node) => node,
+ WalkEvent::Leave(node) => {
+ if let Some(expr) = ast::Expr::cast(node) {
+ cb(WalkEvent::Leave(expr));
+ }
+ continue;
+ }
+ };
+ if let Some(let_stmt) = node.parent().and_then(ast::LetStmt::cast) {
+ if Some(node.clone()) != let_stmt.initializer().map(|it| it.syntax().clone()) {
+ // skipping potential const pat expressions in let statements
+ preorder.skip_subtree();
+ continue;
+ }
+ }
+
+ match ast::Stmt::cast(node.clone()) {
+ // Don't skip subtree since we want to process the expression child next
+ Some(ast::Stmt::ExprStmt(_)) | Some(ast::Stmt::LetStmt(_)) => (),
+ // skip inner items which might have their own expressions
+ Some(ast::Stmt::Item(_)) => preorder.skip_subtree(),
+ None => {
+ // skip const args, those expressions are a different context
+ if ast::GenericArg::can_cast(node.kind()) {
+ preorder.skip_subtree();
+ } else if let Some(expr) = ast::Expr::cast(node) {
+ let is_different_context = match &expr {
+ ast::Expr::BlockExpr(block_expr) => {
+ matches!(
+ block_expr.modifier(),
+ Some(
+ ast::BlockModifier::Async(_)
+ | ast::BlockModifier::Try(_)
+ | ast::BlockModifier::Const(_)
+ )
+ )
+ }
+ ast::Expr::ClosureExpr(_) => true,
+ _ => false,
+ } && expr.syntax() != start.syntax();
+ let skip = cb(WalkEvent::Enter(expr));
+ if skip || is_different_context {
+ preorder.skip_subtree();
+ }
+ }
+ }
+ }
+ }
+}
+
+/// Preorder walk all the expression's child patterns.
+pub fn walk_patterns_in_expr(start: &ast::Expr, cb: &mut dyn FnMut(ast::Pat)) {
+ let mut preorder = start.syntax().preorder();
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ WalkEvent::Enter(node) => node,
+ WalkEvent::Leave(_) => continue,
+ };
+ match ast::Stmt::cast(node.clone()) {
+ Some(ast::Stmt::LetStmt(l)) => {
+ if let Some(pat) = l.pat() {
+ walk_pat(&pat, cb);
+ }
+ if let Some(expr) = l.initializer() {
+ walk_patterns_in_expr(&expr, cb);
+ }
+ preorder.skip_subtree();
+ }
+ // Don't skip subtree since we want to process the expression child next
+ Some(ast::Stmt::ExprStmt(_)) => (),
+ // skip inner items which might have their own patterns
+ Some(ast::Stmt::Item(_)) => preorder.skip_subtree(),
+ None => {
+ // skip const args, those are a different context
+ if ast::GenericArg::can_cast(node.kind()) {
+ preorder.skip_subtree();
+ } else if let Some(expr) = ast::Expr::cast(node.clone()) {
+ let is_different_context = match &expr {
+ ast::Expr::BlockExpr(block_expr) => {
+ matches!(
+ block_expr.modifier(),
+ Some(
+ ast::BlockModifier::Async(_)
+ | ast::BlockModifier::Try(_)
+ | ast::BlockModifier::Const(_)
+ )
+ )
+ }
+ ast::Expr::ClosureExpr(_) => true,
+ _ => false,
+ } && expr.syntax() != start.syntax();
+ if is_different_context {
+ preorder.skip_subtree();
+ }
+ } else if let Some(pat) = ast::Pat::cast(node) {
+ preorder.skip_subtree();
+ walk_pat(&pat, cb);
+ }
+ }
+ }
+ }
+}
+
+/// Preorder walk all the pattern's sub patterns.
+pub fn walk_pat(pat: &ast::Pat, cb: &mut dyn FnMut(ast::Pat)) {
+ let mut preorder = pat.syntax().preorder();
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ WalkEvent::Enter(node) => node,
+ WalkEvent::Leave(_) => continue,
+ };
+ let kind = node.kind();
+ match ast::Pat::cast(node) {
+ Some(pat @ ast::Pat::ConstBlockPat(_)) => {
+ preorder.skip_subtree();
+ cb(pat);
+ }
+ Some(pat) => {
+ cb(pat);
+ }
+ // skip const args
+ None if ast::GenericArg::can_cast(kind) => {
+ preorder.skip_subtree();
+ }
+ None => (),
+ }
+ }
+}
+
+/// Preorder walk all the type's sub types.
+pub fn walk_ty(ty: &ast::Type, cb: &mut dyn FnMut(ast::Type)) {
+ let mut preorder = ty.syntax().preorder();
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ WalkEvent::Enter(node) => node,
+ WalkEvent::Leave(_) => continue,
+ };
+ let kind = node.kind();
+ match ast::Type::cast(node) {
+ Some(ty @ ast::Type::MacroType(_)) => {
+ preorder.skip_subtree();
+ cb(ty)
+ }
+ Some(ty) => {
+ cb(ty);
+ }
+ // skip const args
+ None if ast::ConstArg::can_cast(kind) => {
+ preorder.skip_subtree();
+ }
+ None => (),
+ }
+ }
+}
+
+pub fn vis_eq(this: &ast::Visibility, other: &ast::Visibility) -> bool {
+ match (this.kind(), other.kind()) {
+ (VisibilityKind::In(this), VisibilityKind::In(other)) => {
+ stdx::iter_eq_by(this.segments(), other.segments(), |lhs, rhs| {
+ lhs.kind().zip(rhs.kind()).map_or(false, |it| match it {
+ (PathSegmentKind::CrateKw, PathSegmentKind::CrateKw)
+ | (PathSegmentKind::SelfKw, PathSegmentKind::SelfKw)
+ | (PathSegmentKind::SuperKw, PathSegmentKind::SuperKw) => true,
+ (PathSegmentKind::Name(lhs), PathSegmentKind::Name(rhs)) => {
+ lhs.text() == rhs.text()
+ }
+ _ => false,
+ })
+ })
+ }
+ (VisibilityKind::PubSelf, VisibilityKind::PubSelf)
+ | (VisibilityKind::PubSuper, VisibilityKind::PubSuper)
+ | (VisibilityKind::PubCrate, VisibilityKind::PubCrate)
+ | (VisibilityKind::Pub, VisibilityKind::Pub) => true,
+ _ => false,
+ }
+}
+
+/// Returns the `let` only if there is exactly one (that is, `let pat = expr`
+/// or `((let pat = expr))`, but not `let pat = expr && expr` or `non_let_expr`).
+pub fn single_let(expr: ast::Expr) -> Option<ast::LetExpr> {
+ match expr {
+ ast::Expr::ParenExpr(expr) => expr.expr().and_then(single_let),
+ ast::Expr::LetExpr(expr) => Some(expr),
+ _ => None,
+ }
+}
+
+pub fn is_pattern_cond(expr: ast::Expr) -> bool {
+ match expr {
+ ast::Expr::BinExpr(expr)
+ if expr.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And)) =>
+ {
+ expr.lhs()
+ .map(is_pattern_cond)
+ .or_else(|| expr.rhs().map(is_pattern_cond))
+ .unwrap_or(false)
+ }
+ ast::Expr::ParenExpr(expr) => expr.expr().map_or(false, is_pattern_cond),
+ ast::Expr::LetExpr(_) => true,
+ _ => false,
+ }
+}
+
+/// Calls `cb` on each expression inside `expr` that is at "tail position".
+/// Does not walk into `break` or `return` expressions.
+/// Note that modifying the tree while iterating it will cause undefined iteration which might
+/// potentially results in an out of bounds panic.
+pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
+ match expr {
+ ast::Expr::BlockExpr(b) => {
+ match b.modifier() {
+ Some(
+ ast::BlockModifier::Async(_)
+ | ast::BlockModifier::Try(_)
+ | ast::BlockModifier::Const(_),
+ ) => return cb(expr),
+
+ Some(ast::BlockModifier::Label(label)) => {
+ for_each_break_expr(Some(label), b.stmt_list(), &mut |b| {
+ cb(&ast::Expr::BreakExpr(b))
+ });
+ }
+ Some(ast::BlockModifier::Unsafe(_)) => (),
+ None => (),
+ }
+ if let Some(stmt_list) = b.stmt_list() {
+ if let Some(e) = stmt_list.tail_expr() {
+ for_each_tail_expr(&e, cb);
+ }
+ }
+ }
+ ast::Expr::IfExpr(if_) => {
+ let mut if_ = if_.clone();
+ loop {
+ if let Some(block) = if_.then_branch() {
+ for_each_tail_expr(&ast::Expr::BlockExpr(block), cb);
+ }
+ match if_.else_branch() {
+ Some(ast::ElseBranch::IfExpr(it)) => if_ = it,
+ Some(ast::ElseBranch::Block(block)) => {
+ for_each_tail_expr(&ast::Expr::BlockExpr(block), cb);
+ break;
+ }
+ None => break,
+ }
+ }
+ }
+ ast::Expr::LoopExpr(l) => {
+ for_each_break_expr(l.label(), l.loop_body().and_then(|it| it.stmt_list()), &mut |b| {
+ cb(&ast::Expr::BreakExpr(b))
+ })
+ }
+ ast::Expr::MatchExpr(m) => {
+ if let Some(arms) = m.match_arm_list() {
+ arms.arms().filter_map(|arm| arm.expr()).for_each(|e| for_each_tail_expr(&e, cb));
+ }
+ }
+ ast::Expr::ArrayExpr(_)
+ | ast::Expr::AwaitExpr(_)
+ | ast::Expr::BinExpr(_)
+ | ast::Expr::BoxExpr(_)
+ | ast::Expr::BreakExpr(_)
+ | ast::Expr::CallExpr(_)
+ | ast::Expr::CastExpr(_)
+ | ast::Expr::ClosureExpr(_)
+ | ast::Expr::ContinueExpr(_)
+ | ast::Expr::FieldExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::IndexExpr(_)
+ | ast::Expr::Literal(_)
+ | ast::Expr::MacroExpr(_)
+ | ast::Expr::MacroStmts(_)
+ | ast::Expr::MethodCallExpr(_)
+ | ast::Expr::ParenExpr(_)
+ | ast::Expr::PathExpr(_)
+ | ast::Expr::PrefixExpr(_)
+ | ast::Expr::RangeExpr(_)
+ | ast::Expr::RecordExpr(_)
+ | ast::Expr::RefExpr(_)
+ | ast::Expr::ReturnExpr(_)
+ | ast::Expr::TryExpr(_)
+ | ast::Expr::TupleExpr(_)
+ | ast::Expr::WhileExpr(_)
+ | ast::Expr::LetExpr(_)
+ | ast::Expr::UnderscoreExpr(_)
+ | ast::Expr::YieldExpr(_) => cb(expr),
+ }
+}
+
+pub fn for_each_break_and_continue_expr(
+ label: Option<ast::Label>,
+ body: Option<ast::StmtList>,
+ cb: &mut dyn FnMut(ast::Expr),
+) {
+ let label = label.and_then(|lbl| lbl.lifetime());
+ if let Some(b) = body {
+ let tree_depth_iterator = TreeWithDepthIterator::new(b);
+ for (expr, depth) in tree_depth_iterator {
+ match expr {
+ ast::Expr::BreakExpr(b)
+ if (depth == 0 && b.lifetime().is_none())
+ || eq_label_lt(&label, &b.lifetime()) =>
+ {
+ cb(ast::Expr::BreakExpr(b));
+ }
+ ast::Expr::ContinueExpr(c)
+ if (depth == 0 && c.lifetime().is_none())
+ || eq_label_lt(&label, &c.lifetime()) =>
+ {
+ cb(ast::Expr::ContinueExpr(c));
+ }
+ _ => (),
+ }
+ }
+ }
+}
+
+fn for_each_break_expr(
+ label: Option<ast::Label>,
+ body: Option<ast::StmtList>,
+ cb: &mut dyn FnMut(ast::BreakExpr),
+) {
+ let label = label.and_then(|lbl| lbl.lifetime());
+ if let Some(b) = body {
+ let tree_depth_iterator = TreeWithDepthIterator::new(b);
+ for (expr, depth) in tree_depth_iterator {
+ match expr {
+ ast::Expr::BreakExpr(b)
+ if (depth == 0 && b.lifetime().is_none())
+ || eq_label_lt(&label, &b.lifetime()) =>
+ {
+ cb(b);
+ }
+ _ => (),
+ }
+ }
+ }
+}
+
+fn eq_label_lt(lt1: &Option<ast::Lifetime>, lt2: &Option<ast::Lifetime>) -> bool {
+ lt1.as_ref().zip(lt2.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text())
+}
+
+struct TreeWithDepthIterator {
+ preorder: Preorder<RustLanguage>,
+ depth: u32,
+}
+
+impl TreeWithDepthIterator {
+ fn new(body: ast::StmtList) -> Self {
+ let preorder = body.syntax().preorder();
+ Self { preorder, depth: 0 }
+ }
+}
+
+impl Iterator for TreeWithDepthIterator {
+ type Item = (ast::Expr, u32);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ while let Some(event) = self.preorder.find_map(|ev| match ev {
+ WalkEvent::Enter(it) => ast::Expr::cast(it).map(WalkEvent::Enter),
+ WalkEvent::Leave(it) => ast::Expr::cast(it).map(WalkEvent::Leave),
+ }) {
+ match event {
+ WalkEvent::Enter(
+ ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_),
+ ) => {
+ self.depth += 1;
+ }
+ WalkEvent::Leave(
+ ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_),
+ ) => {
+ self.depth -= 1;
+ }
+ WalkEvent::Enter(ast::Expr::BlockExpr(e)) if e.label().is_some() => {
+ self.depth += 1;
+ }
+ WalkEvent::Leave(ast::Expr::BlockExpr(e)) if e.label().is_some() => {
+ self.depth -= 1;
+ }
+ WalkEvent::Enter(expr) => return Some((expr, self.depth)),
+ _ => (),
+ }
+ }
+ None
+ }
+}
+
+/// Parses the input token tree as comma separated plain paths.
+pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> {
+ let r_paren = input.r_paren_token();
+ let tokens =
+ input.syntax().children_with_tokens().skip(1).map_while(|it| match it.into_token() {
+ // seeing a keyword means the attribute is unclosed so stop parsing here
+ Some(tok) if tok.kind().is_keyword() => None,
+ // don't include the right token tree parenthesis if it exists
+ tok @ Some(_) if tok == r_paren => None,
+ // only nodes that we can find are other TokenTrees, those are unexpected in this parse though
+ None => None,
+ Some(tok) => Some(tok),
+ });
+ let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
+ let paths = input_expressions
+ .into_iter()
+ .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
+ .filter_map(|mut tokens| {
+ syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
+ ast::Expr::PathExpr(it) => it.path(),
+ _ => None,
+ })
+ })
+ .collect();
+ Some(paths)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
new file mode 100644
index 000000000..2f531ca0c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -0,0 +1,533 @@
+[
+ (
+ Module {
+ id: ModuleId {
+ krate: CrateId(
+ 0,
+ ),
+ block: None,
+ local_id: Idx::<ModuleData>(0),
+ },
+ },
+ [
+ FileSymbol {
+ name: "Alias",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: TYPE_ALIAS,
+ range: 397..417,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 402..407,
+ },
+ },
+ kind: TypeAlias,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "CONST",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: CONST,
+ range: 340..361,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 346..351,
+ },
+ },
+ kind: Const,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "CONST_WITH_INNER",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: CONST,
+ range: 520..592,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 526..542,
+ },
+ },
+ kind: Const,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "Enum",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: ENUM,
+ range: 185..207,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 190..194,
+ },
+ },
+ kind: Enum,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "Macro",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: MACRO_DEF,
+ range: 153..168,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 159..164,
+ },
+ },
+ kind: Macro,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "STATIC",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STATIC,
+ range: 362..396,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 369..375,
+ },
+ },
+ kind: Static,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "Struct",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 170..184,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 177..183,
+ },
+ },
+ kind: Struct,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "StructFromMacro",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ MacroFile(
+ MacroFile {
+ macro_call_id: MacroCallId(
+ 0,
+ ),
+ },
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 0..22,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 6..21,
+ },
+ },
+ kind: Struct,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "StructInFn",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 318..336,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 325..335,
+ },
+ },
+ kind: Struct,
+ container_name: Some(
+ "main",
+ ),
+ },
+ FileSymbol {
+ name: "StructInNamedConst",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 555..581,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 562..580,
+ },
+ },
+ kind: Struct,
+ container_name: Some(
+ "CONST_WITH_INNER",
+ ),
+ },
+ FileSymbol {
+ name: "StructInUnnamedConst",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 479..507,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 486..506,
+ },
+ },
+ kind: Struct,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "Trait",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: TRAIT,
+ range: 261..300,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 267..272,
+ },
+ },
+ kind: Trait,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "Union",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: UNION,
+ range: 208..222,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 214..219,
+ },
+ },
+ kind: Union,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "a_mod",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: MODULE,
+ range: 419..457,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 423..428,
+ },
+ },
+ kind: Module,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "b_mod",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: MODULE,
+ range: 594..604,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 598..603,
+ },
+ },
+ kind: Module,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "define_struct",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: MACRO_RULES,
+ range: 51..131,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 64..77,
+ },
+ },
+ kind: Macro,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "impl_fn",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: FN,
+ range: 242..257,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 245..252,
+ },
+ },
+ kind: Function,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "macro_rules_macro",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: MACRO_RULES,
+ range: 1..48,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 14..31,
+ },
+ },
+ kind: Macro,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "main",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: FN,
+ range: 302..338,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 305..309,
+ },
+ },
+ kind: Function,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "trait_fn",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: FN,
+ range: 279..298,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 282..290,
+ },
+ },
+ kind: Function,
+ container_name: Some(
+ "Trait",
+ ),
+ },
+ ],
+ ),
+ (
+ Module {
+ id: ModuleId {
+ krate: CrateId(
+ 0,
+ ),
+ block: None,
+ local_id: Idx::<ModuleData>(1),
+ },
+ },
+ [
+ FileSymbol {
+ name: "StructInModA",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 435..455,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 442..454,
+ },
+ },
+ kind: Struct,
+ container_name: None,
+ },
+ ],
+ ),
+ (
+ Module {
+ id: ModuleId {
+ krate: CrateId(
+ 0,
+ ),
+ block: None,
+ local_id: Idx::<ModuleData>(2),
+ },
+ },
+ [
+ FileSymbol {
+ name: "StructInModB",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 1,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 0..20,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 7..19,
+ },
+ },
+ kind: Struct,
+ container_name: None,
+ },
+ ],
+ ),
+]
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs
new file mode 100644
index 000000000..5042f6d81
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs
@@ -0,0 +1,284 @@
+//! Generates descriptors structure for unstable feature from Unstable Book
+use std::{borrow::Cow, fs, path::Path};
+
+use itertools::Itertools;
+use stdx::format_to;
+use test_utils::project_root;
+use xshell::{cmd, Shell};
+
+/// This clones rustc repo, and so is not worth to keep up-to-date. We update
+/// manually by un-ignoring the test from time to time.
+#[test]
+#[ignore]
+fn sourcegen_lint_completions() {
+ let sh = &Shell::new().unwrap();
+
+ let rust_repo = project_root().join("./target/rust");
+ if !rust_repo.exists() {
+ cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust {rust_repo}")
+ .run()
+ .unwrap();
+ }
+
+ let mut contents = String::from(
+ r"
+#[derive(Clone)]
+pub struct Lint {
+ pub label: &'static str,
+ pub description: &'static str,
+}
+pub struct LintGroup {
+ pub lint: Lint,
+ pub children: &'static [&'static str],
+}
+",
+ );
+
+ generate_lint_descriptor(sh, &mut contents);
+ contents.push('\n');
+
+ generate_feature_descriptor(&mut contents, &rust_repo.join("src/doc/unstable-book/src"));
+ contents.push('\n');
+
+ let lints_json = project_root().join("./target/clippy_lints.json");
+ cmd!(
+ sh,
+ "curl https://rust-lang.github.io/rust-clippy/master/lints.json --output {lints_json}"
+ )
+ .run()
+ .unwrap();
+ generate_descriptor_clippy(&mut contents, &lints_json);
+
+ let contents = sourcegen::add_preamble("sourcegen_lints", sourcegen::reformat(contents));
+
+ let destination = project_root().join("crates/ide_db/src/generated/lints.rs");
+ sourcegen::ensure_file_contents(destination.as_path(), &contents);
+}
+
+fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
+ // FIXME: rustdoc currently requires an input file for -Whelp cc https://github.com/rust-lang/rust/pull/88831
+ let file = project_root().join(file!());
+ let stdout = cmd!(sh, "rustdoc -W help {file}").read().unwrap();
+ let start_lints = stdout.find("---- ------- -------").unwrap();
+ let start_lint_groups = stdout.find("---- ---------").unwrap();
+ let start_lints_rustdoc =
+ stdout.find("Lint checks provided by plugins loaded by this crate:").unwrap();
+ let start_lint_groups_rustdoc =
+ stdout.find("Lint groups provided by plugins loaded by this crate:").unwrap();
+
+ buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
+ buf.push('\n');
+
+ let lints = stdout[start_lints..].lines().skip(1).take_while(|l| !l.is_empty()).map(|line| {
+ let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
+ let (_default_level, description) = rest.trim().split_once(char::is_whitespace).unwrap();
+ (name.trim(), Cow::Borrowed(description.trim()), vec![])
+ });
+ let lint_groups =
+ stdout[start_lint_groups..].lines().skip(1).take_while(|l| !l.is_empty()).map(|line| {
+ let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
+ (
+ name.trim(),
+ format!("lint group for: {}", lints.trim()).into(),
+ lints
+ .split_ascii_whitespace()
+ .map(|s| s.trim().trim_matches(',').replace('-', "_"))
+ .collect(),
+ )
+ });
+
+ let lints = lints
+ .chain(lint_groups)
+ .sorted_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2))
+ .collect::<Vec<_>>();
+ for (name, description, ..) in &lints {
+ push_lint_completion(buf, &name.replace('-', "_"), description);
+ }
+ buf.push_str("];\n");
+ buf.push_str(r#"pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &["#);
+ for (name, description, children) in &lints {
+ if !children.is_empty() {
+ // HACK: warnings is emitted with a general description, not with its members
+ if name == &"warnings" {
+ push_lint_group(buf, name, description, &Vec::new());
+ continue;
+ }
+ push_lint_group(buf, &name.replace('-', "_"), description, children);
+ }
+ }
+ buf.push('\n');
+ buf.push_str("];\n");
+
+ // rustdoc
+
+ buf.push('\n');
+ buf.push_str(r#"pub const RUSTDOC_LINTS: &[Lint] = &["#);
+ buf.push('\n');
+
+ let lints_rustdoc =
+ stdout[start_lints_rustdoc..].lines().skip(2).take_while(|l| !l.is_empty()).map(|line| {
+ let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
+ let (_default_level, description) =
+ rest.trim().split_once(char::is_whitespace).unwrap();
+ (name.trim(), Cow::Borrowed(description.trim()), vec![])
+ });
+ let lint_groups_rustdoc =
+ stdout[start_lint_groups_rustdoc..].lines().skip(2).take_while(|l| !l.is_empty()).map(
+ |line| {
+ let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
+ (
+ name.trim(),
+ format!("lint group for: {}", lints.trim()).into(),
+ lints
+ .split_ascii_whitespace()
+ .map(|s| s.trim().trim_matches(',').replace('-', "_"))
+ .collect(),
+ )
+ },
+ );
+
+ let lints_rustdoc = lints_rustdoc
+ .chain(lint_groups_rustdoc)
+ .sorted_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2))
+ .collect::<Vec<_>>();
+
+ for (name, description, ..) in &lints_rustdoc {
+ push_lint_completion(buf, &name.replace('-', "_"), description)
+ }
+ buf.push_str("];\n");
+
+ buf.push_str(r#"pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &["#);
+ for (name, description, children) in &lints_rustdoc {
+ if !children.is_empty() {
+ push_lint_group(buf, &name.replace('-', "_"), description, children);
+ }
+ }
+ buf.push('\n');
+ buf.push_str("];\n");
+}
+
+fn generate_feature_descriptor(buf: &mut String, src_dir: &Path) {
+ let mut features = ["language-features", "library-features"]
+ .into_iter()
+ .flat_map(|it| sourcegen::list_files(&src_dir.join(it)))
+ .filter(|path| {
+ // Get all `.md ` files
+ path.extension().unwrap_or_default().to_str().unwrap_or_default() == "md"
+ })
+ .map(|path| {
+ let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace('-', "_");
+ let doc = fs::read_to_string(path).unwrap();
+ (feature_ident, doc)
+ })
+ .collect::<Vec<_>>();
+ features.sort_by(|(feature_ident, _), (feature_ident2, _)| feature_ident.cmp(feature_ident2));
+
+ buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
+ for (feature_ident, doc) in features.into_iter() {
+ push_lint_completion(buf, &feature_ident, &doc)
+ }
+ buf.push('\n');
+ buf.push_str("];\n");
+}
+
+#[derive(Default)]
+struct ClippyLint {
+ help: String,
+ id: String,
+}
+
+fn unescape(s: &str) -> String {
+ s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
+}
+
+fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
+ let file_content = std::fs::read_to_string(path).unwrap();
+ let mut clippy_lints: Vec<ClippyLint> = Vec::new();
+ let mut clippy_groups: std::collections::BTreeMap<String, Vec<String>> = Default::default();
+
+ for line in file_content.lines().map(|line| line.trim()) {
+ if let Some(line) = line.strip_prefix(r#""id": ""#) {
+ let clippy_lint = ClippyLint {
+ id: line.strip_suffix(r#"","#).expect("should be suffixed by comma").into(),
+ help: String::new(),
+ };
+ clippy_lints.push(clippy_lint)
+ } else if let Some(line) = line.strip_prefix(r#""group": ""#) {
+ if let Some(group) = line.strip_suffix("\",") {
+ clippy_groups
+ .entry(group.to_owned())
+ .or_default()
+ .push(clippy_lints.last().unwrap().id.clone());
+ }
+ } else if let Some(line) = line.strip_prefix(r#""docs": ""#) {
+ let prefix_to_strip = r#" ### What it does"#;
+ let line = match line.strip_prefix(prefix_to_strip) {
+ Some(line) => line,
+ None => {
+ eprintln!("unexpected clippy prefix for {}", clippy_lints.last().unwrap().id);
+ continue;
+ }
+ };
+ // Only take the description, any more than this is a lot of additional data we would embed into the exe
+ // which seems unnecessary
+ let up_to = line.find(r#"###"#).expect("no second section found?");
+ let line = &line[..up_to];
+
+ let clippy_lint = clippy_lints.last_mut().expect("clippy lint must already exist");
+ clippy_lint.help = unescape(line).trim().to_string();
+ }
+ }
+ clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));
+
+ buf.push_str(r#"pub const CLIPPY_LINTS: &[Lint] = &["#);
+ buf.push('\n');
+ for clippy_lint in clippy_lints.into_iter() {
+ let lint_ident = format!("clippy::{}", clippy_lint.id);
+ let doc = clippy_lint.help;
+ push_lint_completion(buf, &lint_ident, &doc);
+ }
+ buf.push_str("];\n");
+
+ buf.push_str(r#"pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &["#);
+ for (id, children) in clippy_groups {
+ let children = children.iter().map(|id| format!("clippy::{}", id)).collect::<Vec<_>>();
+ if !children.is_empty() {
+ let lint_ident = format!("clippy::{}", id);
+ let description = format!("lint group for: {}", children.iter().join(", "));
+ push_lint_group(buf, &lint_ident, &description, &children);
+ }
+ }
+ buf.push('\n');
+ buf.push_str("];\n");
+}
+
+fn push_lint_completion(buf: &mut String, label: &str, description: &str) {
+ format_to!(
+ buf,
+ r###" Lint {{
+ label: "{}",
+ description: r##"{}"##,
+ }},"###,
+ label,
+ description,
+ );
+}
+
+fn push_lint_group(buf: &mut String, label: &str, description: &str, children: &[String]) {
+ buf.push_str(
+ r###" LintGroup {
+ lint:
+ "###,
+ );
+
+ push_lint_completion(buf, label, description);
+
+ let children = format!("&[{}]", children.iter().map(|it| format!("\"{}\"", it)).join(", "));
+ format_to!(
+ buf,
+ r###"
+ children: {},
+ }},"###,
+ children,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
new file mode 100644
index 000000000..6a7ea7c19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
@@ -0,0 +1,273 @@
+//! Functionality for obtaining data related to traits from the DB.
+
+use crate::{defs::Definition, RootDatabase};
+use hir::{db::HirDatabase, AsAssocItem, Semantics};
+use rustc_hash::FxHashSet;
+use syntax::{ast, AstNode};
+
+/// Given the `impl` block, attempts to find the trait this `impl` corresponds to.
+pub fn resolve_target_trait(
+ sema: &Semantics<'_, RootDatabase>,
+ impl_def: &ast::Impl,
+) -> Option<hir::Trait> {
+ let ast_path =
+ impl_def.trait_().map(|it| it.syntax().clone()).and_then(ast::PathType::cast)?.path()?;
+
+ match sema.resolve_path(&ast_path) {
+ Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def),
+ _ => None,
+ }
+}
+
+/// Given the `impl` block, returns the list of associated items (e.g. functions or types) that are
+/// missing in this `impl` block.
+pub fn get_missing_assoc_items(
+ sema: &Semantics<'_, RootDatabase>,
+ impl_def: &ast::Impl,
+) -> Vec<hir::AssocItem> {
+ let imp = match sema.to_def(impl_def) {
+ Some(it) => it,
+ None => return vec![],
+ };
+
+ // Names must be unique between constants and functions. However, type aliases
+ // may share the same name as a function or constant.
+ let mut impl_fns_consts = FxHashSet::default();
+ let mut impl_type = FxHashSet::default();
+
+ for item in imp.items(sema.db) {
+ match item {
+ hir::AssocItem::Function(it) => {
+ impl_fns_consts.insert(it.name(sema.db).to_string());
+ }
+ hir::AssocItem::Const(it) => {
+ if let Some(name) = it.name(sema.db) {
+ impl_fns_consts.insert(name.to_string());
+ }
+ }
+ hir::AssocItem::TypeAlias(it) => {
+ impl_type.insert(it.name(sema.db).to_string());
+ }
+ }
+ }
+
+ resolve_target_trait(sema, impl_def).map_or(vec![], |target_trait| {
+ target_trait
+ .items(sema.db)
+ .into_iter()
+ .filter(|i| match i {
+ hir::AssocItem::Function(f) => {
+ !impl_fns_consts.contains(&f.name(sema.db).to_string())
+ }
+ hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(sema.db).to_string()),
+ hir::AssocItem::Const(c) => c
+ .name(sema.db)
+ .map(|n| !impl_fns_consts.contains(&n.to_string()))
+ .unwrap_or_default(),
+ })
+ .collect()
+ })
+}
+
+/// Converts associated trait impl items to their trait definition counterpart
+pub(crate) fn convert_to_def_in_trait(db: &dyn HirDatabase, def: Definition) -> Definition {
+ (|| {
+ let assoc = def.as_assoc_item(db)?;
+ let trait_ = assoc.containing_trait_impl(db)?;
+ assoc_item_of_trait(db, assoc, trait_)
+ })()
+ .unwrap_or(def)
+}
+
+/// If this is an trait (impl) assoc item, returns the assoc item of the corresponding trait definition.
+pub(crate) fn as_trait_assoc_def(db: &dyn HirDatabase, def: Definition) -> Option<Definition> {
+ let assoc = def.as_assoc_item(db)?;
+ let trait_ = match assoc.container(db) {
+ hir::AssocItemContainer::Trait(_) => return Some(def),
+ hir::AssocItemContainer::Impl(i) => i.trait_(db),
+ }?;
+ assoc_item_of_trait(db, assoc, trait_)
+}
+
+fn assoc_item_of_trait(
+ db: &dyn HirDatabase,
+ assoc: hir::AssocItem,
+ trait_: hir::Trait,
+) -> Option<Definition> {
+ use hir::AssocItem::*;
+ let name = match assoc {
+ Function(it) => it.name(db),
+ Const(it) => it.name(db)?,
+ TypeAlias(it) => it.name(db),
+ };
+ let item = trait_.items(db).into_iter().find(|it| match (it, assoc) {
+ (Function(trait_func), Function(_)) => trait_func.name(db) == name,
+ (Const(trait_konst), Const(_)) => trait_konst.name(db).map_or(false, |it| it == name),
+ (TypeAlias(trait_type_alias), TypeAlias(_)) => trait_type_alias.name(db) == name,
+ _ => false,
+ })?;
+ Some(Definition::from(item))
+}
+
+#[cfg(test)]
+mod tests {
+ use base_db::{fixture::ChangeFixture, FilePosition};
+ use expect_test::{expect, Expect};
+ use hir::Semantics;
+ use syntax::ast::{self, AstNode};
+
+ use crate::RootDatabase;
+
+ /// Creates analysis from a multi-file fixture, returns positions marked with $0.
+ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ let mut database = RootDatabase::default();
+ database.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) =
+ change_fixture.file_position.expect("expected a marker ($0)");
+ let offset = range_or_offset.expect_offset();
+ (database, FilePosition { file_id, offset })
+ }
+
+ fn check_trait(ra_fixture: &str, expect: Expect) {
+ let (db, position) = position(ra_fixture);
+ let sema = Semantics::new(&db);
+ let file = sema.parse(position.file_id);
+ let impl_block: ast::Impl =
+ sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
+ let trait_ = crate::traits::resolve_target_trait(&sema, &impl_block);
+ let actual = match trait_ {
+ Some(trait_) => trait_.name(&db).to_string(),
+ None => String::new(),
+ };
+ expect.assert_eq(&actual);
+ }
+
+ fn check_missing_assoc(ra_fixture: &str, expect: Expect) {
+ let (db, position) = position(ra_fixture);
+ let sema = Semantics::new(&db);
+ let file = sema.parse(position.file_id);
+ let impl_block: ast::Impl =
+ sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
+ let items = crate::traits::get_missing_assoc_items(&sema, &impl_block);
+ let actual = items
+ .into_iter()
+ .map(|item| item.name(&db).unwrap().to_string())
+ .collect::<Vec<_>>()
+ .join("\n");
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn resolve_trait() {
+ check_trait(
+ r#"
+pub trait Foo {
+ fn bar();
+}
+impl Foo for u8 {
+ $0
+}
+ "#,
+ expect![["Foo"]],
+ );
+ check_trait(
+ r#"
+pub trait Foo {
+ fn bar();
+}
+impl Foo for u8 {
+ fn bar() {
+ fn baz() {
+ $0
+ }
+ baz();
+ }
+}
+ "#,
+ expect![["Foo"]],
+ );
+ check_trait(
+ r#"
+pub trait Foo {
+ fn bar();
+}
+pub struct Bar;
+impl Bar {
+ $0
+}
+ "#,
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn missing_assoc_items() {
+ check_missing_assoc(
+ r#"
+pub trait Foo {
+ const FOO: u8;
+ fn bar();
+}
+impl Foo for u8 {
+ $0
+}"#,
+ expect![[r#"
+ FOO
+ bar"#]],
+ );
+
+ check_missing_assoc(
+ r#"
+pub trait Foo {
+ const FOO: u8;
+ fn bar();
+}
+impl Foo for u8 {
+ const FOO: u8 = 10;
+ $0
+}"#,
+ expect![[r#"
+ bar"#]],
+ );
+
+ check_missing_assoc(
+ r#"
+pub trait Foo {
+ const FOO: u8;
+ fn bar();
+}
+impl Foo for u8 {
+ const FOO: u8 = 10;
+ fn bar() {$0}
+}"#,
+ expect![[r#""#]],
+ );
+
+ check_missing_assoc(
+ r#"
+pub struct Foo;
+impl Foo {
+ fn bar() {$0}
+}"#,
+ expect![[r#""#]],
+ );
+
+ check_missing_assoc(
+ r#"
+trait Tr {
+ fn required();
+}
+macro_rules! m {
+ () => { fn required() {} };
+}
+impl Tr for () {
+ m!();
+ $0
+}
+
+ "#,
+ expect![[r#""#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs b/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs
new file mode 100644
index 000000000..46f47f258
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs
@@ -0,0 +1,86 @@
+//! This module contains structures for filtering the expected types.
+//! Use case for structures in this module is, for example, situation when you need to process
+//! only certain `Enum`s.
+
+use std::iter;
+
+use hir::Semantics;
+use syntax::ast::{self, make, Pat};
+
+use crate::RootDatabase;
+
+/// Enum types that implement `std::ops::Try` trait.
+#[derive(Clone, Copy)]
+pub enum TryEnum {
+ Result,
+ Option,
+}
+
+impl TryEnum {
+ const ALL: [TryEnum; 2] = [TryEnum::Option, TryEnum::Result];
+
+ /// Returns `Some(..)` if the provided type is an enum that implements `std::ops::Try`.
+ pub fn from_ty(sema: &Semantics<'_, RootDatabase>, ty: &hir::Type) -> Option<TryEnum> {
+ let enum_ = match ty.as_adt() {
+ Some(hir::Adt::Enum(it)) => it,
+ _ => return None,
+ };
+ TryEnum::ALL.iter().find_map(|&var| {
+ if enum_.name(sema.db).to_smol_str() == var.type_name() {
+ return Some(var);
+ }
+ None
+ })
+ }
+
+ pub fn happy_case(self) -> &'static str {
+ match self {
+ TryEnum::Result => "Ok",
+ TryEnum::Option => "Some",
+ }
+ }
+
+ pub fn sad_pattern(self) -> ast::Pat {
+ match self {
+ TryEnum::Result => make::tuple_struct_pat(
+ make::ext::ident_path("Err"),
+ iter::once(make::wildcard_pat().into()),
+ )
+ .into(),
+ TryEnum::Option => make::ext::simple_ident_pat(make::name("None")).into(),
+ }
+ }
+
+ pub fn happy_pattern(self, pat: Pat) -> ast::Pat {
+ match self {
+ TryEnum::Result => {
+ make::tuple_struct_pat(make::ext::ident_path("Ok"), iter::once(pat)).into()
+ }
+ TryEnum::Option => {
+ make::tuple_struct_pat(make::ext::ident_path("Some"), iter::once(pat)).into()
+ }
+ }
+ }
+
+ pub fn happy_pattern_wildcard(self) -> ast::Pat {
+ match self {
+ TryEnum::Result => make::tuple_struct_pat(
+ make::ext::ident_path("Ok"),
+ iter::once(make::wildcard_pat().into()),
+ )
+ .into(),
+ TryEnum::Option => make::tuple_struct_pat(
+ make::ext::ident_path("Some"),
+ iter::once(make::wildcard_pat().into()),
+ )
+ .into(),
+ }
+ }
+
+ fn type_name(self) -> &'static str {
+ match self {
+ TryEnum::Result => "Result",
+ TryEnum::Option => "Option",
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_contructor.rs b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_contructor.rs
new file mode 100644
index 000000000..39431bed3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_contructor.rs
@@ -0,0 +1,34 @@
+//! Functionality for generating trivial contructors
+
+use hir::StructKind;
+use syntax::ast;
+
+/// given a type return the trivial contructor (if one exists)
+pub fn use_trivial_constructor(
+ db: &crate::RootDatabase,
+ path: ast::Path,
+ ty: &hir::Type,
+) -> Option<ast::Expr> {
+ match ty.as_adt() {
+ Some(hir::Adt::Enum(x)) => {
+ if let &[variant] = &*x.variants(db) {
+ if variant.kind(db) == hir::StructKind::Unit {
+ let path = ast::make::path_qualified(
+ path,
+ syntax::ast::make::path_segment(ast::make::name_ref(
+ &variant.name(db).to_smol_str(),
+ )),
+ );
+
+ return Some(syntax::ast::make::expr_path(path));
+ }
+ }
+ }
+ Some(hir::Adt::Struct(x)) if x.kind(db) == StructKind::Unit => {
+ return Some(syntax::ast::make::expr_path(path));
+ }
+ _ => {}
+ }
+
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
new file mode 100644
index 000000000..e221425ed
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
@@ -0,0 +1,34 @@
+[package]
+name = "ide-diagnostics"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+itertools = "0.10.3"
+
+
+either = "1.7.0"
+
+profile = { path = "../profile", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+hir = { path = "../hir", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
new file mode 100644
index 000000000..d12594a4c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
@@ -0,0 +1,30 @@
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: break-outside-of-loop
+//
+// This diagnostic is triggered if the `break` keyword is used outside of a loop.
+pub(crate) fn break_outside_of_loop(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::BreakOutsideOfLoop,
+) -> Diagnostic {
+ Diagnostic::new(
+ "break-outside-of-loop",
+ "break outside of loop",
+ ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn break_outside_of_loop() {
+ check_diagnostics(
+ r#"
+fn foo() { break; }
+ //^^^^^ error: break outside of loop
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
new file mode 100644
index 000000000..2b7105362
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
@@ -0,0 +1,203 @@
+//! Suggests shortening `Foo { field: field }` to `Foo { field }` in both
+//! expressions and patterns.
+
+use ide_db::{base_db::FileId, source_change::SourceChange};
+use syntax::{ast, match_ast, AstNode, SyntaxNode};
+use text_edit::TextEdit;
+
+use crate::{fix, Diagnostic, Severity};
+
+pub(crate) fn field_shorthand(acc: &mut Vec<Diagnostic>, file_id: FileId, node: &SyntaxNode) {
+ match_ast! {
+ match node {
+ ast::RecordExpr(it) => check_expr_field_shorthand(acc, file_id, it),
+ ast::RecordPat(it) => check_pat_field_shorthand(acc, file_id, it),
+ _ => ()
+ }
+ };
+}
+
+fn check_expr_field_shorthand(
+ acc: &mut Vec<Diagnostic>,
+ file_id: FileId,
+ record_expr: ast::RecordExpr,
+) {
+ let record_field_list = match record_expr.record_expr_field_list() {
+ Some(it) => it,
+ None => return,
+ };
+ for record_field in record_field_list.fields() {
+ let (name_ref, expr) = match record_field.name_ref().zip(record_field.expr()) {
+ Some(it) => it,
+ None => continue,
+ };
+
+ let field_name = name_ref.syntax().text().to_string();
+ let field_expr = expr.syntax().text().to_string();
+ let field_name_is_tup_index = name_ref.as_tuple_field().is_some();
+ if field_name != field_expr || field_name_is_tup_index {
+ continue;
+ }
+
+ let mut edit_builder = TextEdit::builder();
+ edit_builder.delete(record_field.syntax().text_range());
+ edit_builder.insert(record_field.syntax().text_range().start(), field_name);
+ let edit = edit_builder.finish();
+
+ let field_range = record_field.syntax().text_range();
+ acc.push(
+ Diagnostic::new("use-field-shorthand", "Shorthand struct initialization", field_range)
+ .severity(Severity::WeakWarning)
+ .with_fixes(Some(vec![fix(
+ "use_expr_field_shorthand",
+ "Use struct shorthand initialization",
+ SourceChange::from_text_edit(file_id, edit),
+ field_range,
+ )])),
+ );
+ }
+}
+
+fn check_pat_field_shorthand(
+ acc: &mut Vec<Diagnostic>,
+ file_id: FileId,
+ record_pat: ast::RecordPat,
+) {
+ let record_pat_field_list = match record_pat.record_pat_field_list() {
+ Some(it) => it,
+ None => return,
+ };
+ for record_pat_field in record_pat_field_list.fields() {
+ let (name_ref, pat) = match record_pat_field.name_ref().zip(record_pat_field.pat()) {
+ Some(it) => it,
+ None => continue,
+ };
+
+ let field_name = name_ref.syntax().text().to_string();
+ let field_pat = pat.syntax().text().to_string();
+ let field_name_is_tup_index = name_ref.as_tuple_field().is_some();
+ if field_name != field_pat || field_name_is_tup_index {
+ continue;
+ }
+
+ let mut edit_builder = TextEdit::builder();
+ edit_builder.delete(record_pat_field.syntax().text_range());
+ edit_builder.insert(record_pat_field.syntax().text_range().start(), field_name);
+ let edit = edit_builder.finish();
+
+ let field_range = record_pat_field.syntax().text_range();
+ acc.push(
+ Diagnostic::new("use-field-shorthand", "Shorthand struct pattern", field_range)
+ .severity(Severity::WeakWarning)
+ .with_fixes(Some(vec![fix(
+ "use_pat_field_shorthand",
+ "Use struct field shorthand",
+ SourceChange::from_text_edit(file_id, edit),
+ field_range,
+ )])),
+ );
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn test_check_expr_field_shorthand() {
+ check_diagnostics(
+ r#"
+struct A { a: &'static str }
+fn main() { A { a: "hello" }; }
+"#,
+ );
+ check_diagnostics(
+ r#"
+struct A(usize);
+fn main() { A { 0: 0 }; }
+"#,
+ );
+
+ check_fix(
+ r#"
+struct A { a: &'static str }
+fn main() {
+ let a = "haha";
+ A { a$0: a };
+}
+"#,
+ r#"
+struct A { a: &'static str }
+fn main() {
+ let a = "haha";
+ A { a };
+}
+"#,
+ );
+
+ check_fix(
+ r#"
+struct A { a: &'static str, b: &'static str }
+fn main() {
+ let a = "haha";
+ let b = "bb";
+ A { a$0: a, b };
+}
+"#,
+ r#"
+struct A { a: &'static str, b: &'static str }
+fn main() {
+ let a = "haha";
+ let b = "bb";
+ A { a, b };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_check_pat_field_shorthand() {
+ check_diagnostics(
+ r#"
+struct A { a: &'static str }
+fn f(a: A) { let A { a: hello } = a; }
+"#,
+ );
+ check_diagnostics(
+ r#"
+struct A(usize);
+fn f(a: A) { let A { 0: 0 } = a; }
+"#,
+ );
+
+ check_fix(
+ r#"
+struct A { a: &'static str }
+fn f(a: A) {
+ let A { a$0: a } = a;
+}
+"#,
+ r#"
+struct A { a: &'static str }
+fn f(a: A) {
+ let A { a } = a;
+}
+"#,
+ );
+
+ check_fix(
+ r#"
+struct A { a: &'static str, b: &'static str }
+fn f(a: A) {
+ let A { a$0: a, b } = a;
+}
+"#,
+ r#"
+struct A { a: &'static str, b: &'static str }
+fn f(a: A) {
+ let A { a, b } = a;
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
new file mode 100644
index 000000000..97ea5c456
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -0,0 +1,144 @@
+use cfg::DnfExpr;
+use stdx::format_to;
+
+use crate::{Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: inactive-code
+//
+// This diagnostic is shown for code with inactive `#[cfg]` attributes.
+pub(crate) fn inactive_code(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::InactiveCode,
+) -> Option<Diagnostic> {
+ // If there's inactive code somewhere in a macro, don't propagate to the call-site.
+ if d.node.file_id.is_macro() {
+ return None;
+ }
+
+ let inactive = DnfExpr::new(d.cfg.clone()).why_inactive(&d.opts);
+ let mut message = "code is inactive due to #[cfg] directives".to_string();
+
+ if let Some(inactive) = inactive {
+ let inactive_reasons = inactive.to_string();
+
+ if inactive_reasons.is_empty() {
+ format_to!(message);
+ } else {
+ format_to!(message, ": {}", inactive);
+ }
+ }
+
+ let res = Diagnostic::new(
+ "inactive-code",
+ message,
+ ctx.sema.diagnostics_display_range(d.node.clone()).range,
+ )
+ .severity(Severity::WeakWarning)
+ .with_unused(true);
+ Some(res)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{tests::check_diagnostics_with_config, DiagnosticsConfig};
+
+ pub(crate) fn check(ra_fixture: &str) {
+ let config = DiagnosticsConfig::default();
+ check_diagnostics_with_config(config, ra_fixture)
+ }
+
+ #[test]
+ fn cfg_diagnostics() {
+ check(
+ r#"
+fn f() {
+ // The three g̶e̶n̶d̶e̶r̶s̶ statements:
+
+ #[cfg(a)] fn f() {} // Item statement
+ //^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+ #[cfg(a)] {} // Expression statement
+ //^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+ #[cfg(a)] let x = 0; // let statement
+ //^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+
+ abc(#[cfg(a)] 0);
+ //^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+ let x = Struct {
+ #[cfg(a)] f: 0,
+ //^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+ };
+ match () {
+ () => (),
+ #[cfg(a)] () => (),
+ //^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+ }
+
+ #[cfg(a)] 0 // Trailing expression of block
+ //^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn inactive_item() {
+ // Additional tests in `cfg` crate. This only tests disabled cfgs.
+
+ check(
+ r#"
+ #[cfg(no)] pub fn f() {}
+ //^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
+
+ #[cfg(no)] #[cfg(no2)] mod m;
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no and no2 are disabled
+
+ #[cfg(all(not(a), b))] enum E {}
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: b is disabled
+
+ #[cfg(feature = "std")] use std;
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: feature = "std" is disabled
+
+ #[cfg(any())] pub fn f() {}
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives
+"#,
+ );
+ }
+
+ #[test]
+ fn inactive_assoc_item() {
+ // FIXME these currently don't work, hence the *
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ #[cfg(any())] pub fn f() {}
+ //*************************** weak: code is inactive due to #[cfg] directives
+}
+
+trait Bar {
+ #[cfg(any())] pub fn f() {}
+ //*************************** weak: code is inactive due to #[cfg] directives
+}
+"#,
+ );
+ }
+
+ /// Tests that `cfg` attributes behind `cfg_attr` is handled properly.
+ #[test]
+ fn inactive_via_cfg_attr() {
+ cov_mark::check!(cfg_attr_active);
+ check(
+ r#"
+ #[cfg_attr(not(never), cfg(no))] fn f() {}
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
+
+ #[cfg_attr(not(never), cfg(not(no)))] fn f() {}
+
+ #[cfg_attr(never, cfg(no))] fn g() {}
+
+ #[cfg_attr(not(never), inline, cfg(no))] fn h() {}
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
new file mode 100644
index 000000000..6a78c08d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -0,0 +1,486 @@
+use hir::{db::AstDatabase, InFile};
+use ide_db::{assists::Assist, defs::NameClass};
+use syntax::AstNode;
+
+use crate::{
+ // references::rename::rename_with_semantics,
+ unresolved_fix,
+ Diagnostic,
+ DiagnosticsContext,
+ Severity,
+};
+
+// Diagnostic: incorrect-ident-case
+//
+// This diagnostic is triggered if an item name doesn't follow https://doc.rust-lang.org/1.0.0/style/style/naming/README.html[Rust naming convention].
+pub(crate) fn incorrect_case(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Diagnostic {
+ Diagnostic::new(
+ "incorrect-ident-case",
+ format!(
+ "{} `{}` should have {} name, e.g. `{}`",
+ d.ident_type, d.ident_text, d.expected_case, d.suggested_text
+ ),
+ ctx.sema.diagnostics_display_range(InFile::new(d.file, d.ident.clone().into())).range,
+ )
+ .severity(Severity::WeakWarning)
+ .with_fixes(fixes(ctx, d))
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option<Vec<Assist>> {
+ let root = ctx.sema.db.parse_or_expand(d.file)?;
+ let name_node = d.ident.to_node(&root);
+ let def = NameClass::classify(&ctx.sema, &name_node)?.defined()?;
+
+ let name_node = InFile::new(d.file, name_node.syntax());
+ let frange = name_node.original_file_range(ctx.sema.db);
+
+ let label = format!("Rename to {}", d.suggested_text);
+ let mut res = unresolved_fix("change_case", &label, frange.range);
+ if ctx.resolve.should_resolve(&res.id) {
+ let source_change = def.rename(&ctx.sema, &d.suggested_text);
+ res.source_change = Some(source_change.ok().unwrap_or_default());
+ }
+
+ Some(vec![res])
+}
+
+#[cfg(test)]
+mod change_case {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn test_rename_incorrect_case() {
+ check_fix(
+ r#"
+pub struct test_struct$0 { one: i32 }
+
+pub fn some_fn(val: test_struct) -> test_struct {
+ test_struct { one: val.one + 1 }
+}
+"#,
+ r#"
+pub struct TestStruct { one: i32 }
+
+pub fn some_fn(val: TestStruct) -> TestStruct {
+ TestStruct { one: val.one + 1 }
+}
+"#,
+ );
+
+ check_fix(
+ r#"
+pub fn some_fn(NonSnakeCase$0: u8) -> u8 {
+ NonSnakeCase
+}
+"#,
+ r#"
+pub fn some_fn(non_snake_case: u8) -> u8 {
+ non_snake_case
+}
+"#,
+ );
+
+ check_fix(
+ r#"
+pub fn SomeFn$0(val: u8) -> u8 {
+ if val != 0 { SomeFn(val - 1) } else { val }
+}
+"#,
+ r#"
+pub fn some_fn(val: u8) -> u8 {
+ if val != 0 { some_fn(val - 1) } else { val }
+}
+"#,
+ );
+
+ check_fix(
+ r#"
+fn some_fn() {
+ let whatAWeird_Formatting$0 = 10;
+ another_func(whatAWeird_Formatting);
+}
+"#,
+ r#"
+fn some_fn() {
+ let what_aweird_formatting = 10;
+ another_func(what_aweird_formatting);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_uppercase_const_no_diagnostics() {
+ check_diagnostics(
+ r#"
+fn foo() {
+ const ANOTHER_ITEM: &str = "some_item";
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_incorrect_case_struct_method() {
+ check_fix(
+ r#"
+pub struct TestStruct;
+
+impl TestStruct {
+ pub fn SomeFn$0() -> TestStruct {
+ TestStruct
+ }
+}
+"#,
+ r#"
+pub struct TestStruct;
+
+impl TestStruct {
+ pub fn some_fn() -> TestStruct {
+ TestStruct
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_single_incorrect_case_diagnostic_in_function_name_issue_6970() {
+ check_diagnostics(
+ r#"
+fn FOO() {}
+// ^^^ 💡 weak: Function `FOO` should have snake_case name, e.g. `foo`
+"#,
+ );
+ check_fix(r#"fn FOO$0() {}"#, r#"fn foo() {}"#);
+ }
+
+ #[test]
+ fn incorrect_function_name() {
+ check_diagnostics(
+ r#"
+fn NonSnakeCaseName() {}
+// ^^^^^^^^^^^^^^^^ 💡 weak: Function `NonSnakeCaseName` should have snake_case name, e.g. `non_snake_case_name`
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_function_params() {
+ check_diagnostics(
+ r#"
+fn foo(SomeParam: u8) {}
+ // ^^^^^^^^^ 💡 weak: Parameter `SomeParam` should have snake_case name, e.g. `some_param`
+
+fn foo2(ok_param: &str, CAPS_PARAM: u8) {}
+ // ^^^^^^^^^^ 💡 weak: Parameter `CAPS_PARAM` should have snake_case name, e.g. `caps_param`
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_variable_names() {
+ check_diagnostics(
+ r#"
+fn foo() {
+ let SOME_VALUE = 10;
+ // ^^^^^^^^^^ 💡 weak: Variable `SOME_VALUE` should have snake_case name, e.g. `some_value`
+ let AnotherValue = 20;
+ // ^^^^^^^^^^^^ 💡 weak: Variable `AnotherValue` should have snake_case name, e.g. `another_value`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_struct_names() {
+ check_diagnostics(
+ r#"
+struct non_camel_case_name {}
+ // ^^^^^^^^^^^^^^^^^^^ 💡 weak: Structure `non_camel_case_name` should have CamelCase name, e.g. `NonCamelCaseName`
+
+struct SCREAMING_CASE {}
+ // ^^^^^^^^^^^^^^ 💡 weak: Structure `SCREAMING_CASE` should have CamelCase name, e.g. `ScreamingCase`
+"#,
+ );
+ }
+
+ #[test]
+ fn no_diagnostic_for_camel_cased_acronyms_in_struct_name() {
+ check_diagnostics(
+ r#"
+struct AABB {}
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_struct_field() {
+ check_diagnostics(
+ r#"
+struct SomeStruct { SomeField: u8 }
+ // ^^^^^^^^^ 💡 weak: Field `SomeField` should have snake_case name, e.g. `some_field`
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_enum_names() {
+ check_diagnostics(
+ r#"
+enum some_enum { Val(u8) }
+ // ^^^^^^^^^ 💡 weak: Enum `some_enum` should have CamelCase name, e.g. `SomeEnum`
+
+enum SOME_ENUM {}
+ // ^^^^^^^^^ 💡 weak: Enum `SOME_ENUM` should have CamelCase name, e.g. `SomeEnum`
+"#,
+ );
+ }
+
+ #[test]
+ fn no_diagnostic_for_camel_cased_acronyms_in_enum_name() {
+ check_diagnostics(
+ r#"
+enum AABB {}
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_enum_variant_name() {
+ check_diagnostics(
+ r#"
+enum SomeEnum { SOME_VARIANT(u8) }
+ // ^^^^^^^^^^^^ 💡 weak: Variant `SOME_VARIANT` should have CamelCase name, e.g. `SomeVariant`
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_const_name() {
+ check_diagnostics(
+ r#"
+const some_weird_const: u8 = 10;
+ // ^^^^^^^^^^^^^^^^ 💡 weak: Constant `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST`
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_static_name() {
+ check_diagnostics(
+ r#"
+static some_weird_const: u8 = 10;
+ // ^^^^^^^^^^^^^^^^ 💡 weak: Static variable `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST`
+"#,
+ );
+ }
+
+ #[test]
+ fn fn_inside_impl_struct() {
+ check_diagnostics(
+ r#"
+struct someStruct;
+ // ^^^^^^^^^^ 💡 weak: Structure `someStruct` should have CamelCase name, e.g. `SomeStruct`
+
+impl someStruct {
+ fn SomeFunc(&self) {
+ // ^^^^^^^^ 💡 weak: Function `SomeFunc` should have snake_case name, e.g. `some_func`
+ let WHY_VAR_IS_CAPS = 10;
+ // ^^^^^^^^^^^^^^^ 💡 weak: Variable `WHY_VAR_IS_CAPS` should have snake_case name, e.g. `why_var_is_caps`
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_diagnostic_for_enum_varinats() {
+ check_diagnostics(
+ r#"
+enum Option { Some, None }
+
+fn main() {
+ match Option::None {
+ None => (),
+ Some => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn non_let_bind() {
+ check_diagnostics(
+ r#"
+enum Option { Some, None }
+
+fn main() {
+ match Option::None {
+ SOME_VAR @ None => (),
+ // ^^^^^^^^ 💡 weak: Variable `SOME_VAR` should have snake_case name, e.g. `some_var`
+ Some => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn allow_attributes_crate_attr() {
+ check_diagnostics(
+ r#"
+#![allow(non_snake_case)]
+#![allow(non_camel_case_types)]
+
+struct S {
+ fooBar: bool,
+}
+
+enum E {
+ fooBar,
+}
+
+mod F {
+ fn CheckItWorksWithCrateAttr(BAD_NAME_HI: u8) {}
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn complex_ignore() {
+ // FIXME: this should trigger errors for the second case.
+ check_diagnostics(
+ r#"
+trait T { fn a(); }
+struct U {}
+impl T for U {
+ fn a() {
+ #[allow(non_snake_case)]
+ trait __BitFlagsOk {
+ const HiImAlsoBad: u8 = 2;
+ fn Dirty(&self) -> bool { false }
+ }
+
+ trait __BitFlagsBad {
+ const HiImAlsoBad: u8 = 2;
+ fn Dirty(&self) -> bool { false }
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn infinite_loop_inner_items() {
+ check_diagnostics(
+ r#"
+fn qualify() {
+ mod foo {
+ use super::*;
+ }
+}
+ "#,
+ )
+ }
+
+ #[test] // Issue #8809.
+ fn parenthesized_parameter() {
+ check_diagnostics(r#"fn f((O): _) {}"#)
+ }
+
+ #[test]
+ fn ignores_extern_items() {
+ cov_mark::check!(extern_func_incorrect_case_ignored);
+ cov_mark::check!(extern_static_incorrect_case_ignored);
+ check_diagnostics(
+ r#"
+extern {
+ fn NonSnakeCaseName(SOME_VAR: u8) -> u8;
+ pub static SomeStatic: u8 = 10;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn ignores_extern_items_from_macro() {
+ check_diagnostics(
+ r#"
+macro_rules! m {
+ () => {
+ fn NonSnakeCaseName(SOME_VAR: u8) -> u8;
+ pub static SomeStatic: u8 = 10;
+ }
+}
+
+extern {
+ m!();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn bug_traits_arent_checked() {
+ // FIXME: Traits and functions in traits aren't currently checked by
+ // r-a, even though rustc will complain about them.
+ check_diagnostics(
+ r#"
+trait BAD_TRAIT {
+ fn BAD_FUNCTION();
+ fn BadFunction();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn allow_attributes() {
+ check_diagnostics(
+ r#"
+#[allow(non_snake_case)]
+fn NonSnakeCaseName(SOME_VAR: u8) -> u8{
+ // cov_flags generated output from elsewhere in this file
+ extern "C" {
+ #[no_mangle]
+ static lower_case: u8;
+ }
+
+ let OtherVar = SOME_VAR + 1;
+ OtherVar
+}
+
+#[allow(nonstandard_style)]
+mod CheckNonstandardStyle {
+ fn HiImABadFnName() {}
+}
+
+#[allow(bad_style)]
+mod CheckBadStyle {
+ fn HiImABadFnName() {}
+}
+
+mod F {
+ #![allow(non_snake_case)]
+ fn CheckItWorksWithModAttr(BAD_NAME_HI: u8) {}
+}
+
+#[allow(non_snake_case, non_camel_case_types)]
+pub struct some_type {
+ SOME_FIELD: u8,
+ SomeField: u16,
+}
+
+#[allow(non_upper_case_globals)]
+pub const some_const: u8 = 10;
+
+#[allow(non_upper_case_globals)]
+pub static SomeStatic: u8 = 10;
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
new file mode 100644
index 000000000..c779266bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
@@ -0,0 +1,38 @@
+use crate::{Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: invalid-derive-target
+//
+// This diagnostic is shown when the derive attribute is used on an item other than a `struct`,
+// `enum` or `union`.
+pub(crate) fn invalid_derive_target(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::InvalidDeriveTarget,
+) -> Diagnostic {
+ let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
+
+ Diagnostic::new(
+ "invalid-derive-target",
+ "`derive` may only be applied to `struct`s, `enum`s and `union`s",
+ display_range,
+ )
+ .severity(Severity::Error)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn fails_on_function() {
+ check_diagnostics(
+ r#"
+//- minicore:derive
+mod __ {
+ #[derive()]
+ //^^^^^^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s
+ fn main() {}
+}
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
new file mode 100644
index 000000000..d6a66dc15
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -0,0 +1,218 @@
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: macro-error
+//
+// This diagnostic is shown for macro expansion errors.
+pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic {
+ // Use more accurate position if available.
+ let display_range = d
+ .precise_location
+ .unwrap_or_else(|| ctx.sema.diagnostics_display_range(d.node.clone()).range);
+
+ Diagnostic::new("macro-error", d.message.clone(), display_range).experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{
+ tests::{check_diagnostics, check_diagnostics_with_config},
+ DiagnosticsConfig,
+ };
+
+ #[test]
+ fn builtin_macro_fails_expansion() {
+ check_diagnostics(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+
+#[rustc_builtin_macro]
+macro_rules! compile_error { () => {} }
+
+ include!("doesntexist");
+//^^^^^^^ error: failed to load file `doesntexist`
+
+ compile_error!("compile_error macro works");
+//^^^^^^^^^^^^^ error: compile_error macro works
+ "#,
+ );
+ }
+
+ #[test]
+ fn eager_macro_concat() {
+ // FIXME: this is incorrectly handling `$crate`, resulting in a wrong diagnostic.
+ // See: https://github.com/rust-lang/rust-analyzer/issues/10300
+
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:lib deps:core
+use core::{panic, concat};
+
+mod private {
+ pub use core::concat;
+}
+
+macro_rules! m {
+ () => {
+ panic!(concat!($crate::private::concat!("")));
+ };
+}
+
+fn f() {
+ m!();
+ //^^^^ error: unresolved macro `$crate::private::concat!`
+}
+
+//- /core.rs crate:core
+#[macro_export]
+#[rustc_builtin_macro]
+macro_rules! concat { () => {} }
+
+pub macro panic {
+ ($msg:expr) => (
+ $crate::panicking::panic_str($msg)
+ ),
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn include_macro_should_allow_empty_content() {
+ let mut config = DiagnosticsConfig::default();
+
+ // FIXME: This is a false-positive, the file is actually linked in via
+ // `include!` macro
+ config.disabled.insert("unlinked-file".to_string());
+
+ check_diagnostics_with_config(
+ config,
+ r#"
+//- /lib.rs
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+
+include!("foo/bar.rs");
+//- /foo/bar.rs
+// empty
+"#,
+ );
+ }
+
+ #[test]
+ fn good_out_dir_diagnostic() {
+ check_diagnostics(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+#[rustc_builtin_macro]
+macro_rules! env { () => {} }
+#[rustc_builtin_macro]
+macro_rules! concat { () => {} }
+
+ include!(concat!(env!("OUT_DIR"), "/out.rs"));
+//^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
+"#,
+ );
+ }
+
+ #[test]
+ fn register_attr_and_tool() {
+ cov_mark::check!(register_attr);
+ cov_mark::check!(register_tool);
+ check_diagnostics(
+ r#"
+#![register_tool(tool)]
+#![register_attr(attr)]
+
+#[tool::path]
+#[attr]
+struct S;
+"#,
+ );
+ // NB: we don't currently emit diagnostics here
+ }
+
+ #[test]
+ fn macro_diag_builtin() {
+ check_diagnostics(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! env {}
+
+#[rustc_builtin_macro]
+macro_rules! include {}
+
+#[rustc_builtin_macro]
+macro_rules! compile_error {}
+
+#[rustc_builtin_macro]
+macro_rules! format_args { () => {} }
+
+fn main() {
+ // Test a handful of built-in (eager) macros:
+
+ include!(invalid);
+ //^^^^^^^ error: could not convert tokens
+ include!("does not exist");
+ //^^^^^^^ error: failed to load file `does not exist`
+
+ env!(invalid);
+ //^^^ error: could not convert tokens
+
+ env!("OUT_DIR");
+ //^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
+
+ compile_error!("compile_error works");
+ //^^^^^^^^^^^^^ error: compile_error works
+
+ // Lazy:
+
+ format_args!();
+ //^^^^^^^^^^^ error: no rule matches input tokens
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn macro_rules_diag() {
+ check_diagnostics(
+ r#"
+macro_rules! m {
+ () => {};
+}
+fn f() {
+ m!();
+
+ m!(hi);
+ //^ error: leftover tokens
+}
+ "#,
+ );
+ }
+ #[test]
+ fn dollar_crate_in_builtin_macro() {
+ check_diagnostics(
+ r#"
+#[macro_export]
+#[rustc_builtin_macro]
+macro_rules! format_args {}
+
+#[macro_export]
+macro_rules! arg { () => {} }
+
+#[macro_export]
+macro_rules! outer {
+ () => {
+ $crate::format_args!( "", $crate::arg!(1) )
+ };
+}
+
+fn f() {
+ outer!();
+} //^^^^^^^^ error: leftover tokens
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
new file mode 100644
index 000000000..cd48bdba0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
@@ -0,0 +1,37 @@
+use crate::{Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: malformed-derive
+//
+// This diagnostic is shown when the derive attribute has invalid input.
+pub(crate) fn malformed_derive(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::MalformedDerive,
+) -> Diagnostic {
+ let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
+
+ Diagnostic::new(
+ "malformed-derive",
+ "malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`",
+ display_range,
+ )
+ .severity(Severity::Error)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn invalid_input() {
+ check_diagnostics(
+ r#"
+//- minicore:derive
+mod __ {
+ #[derive = "aaaa"]
+ //^^^^^^^^^^^^^^^^^^ error: malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`
+ struct Foo;
+}
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
new file mode 100644
index 000000000..5f8b3e543
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
@@ -0,0 +1,334 @@
+use syntax::{
+ ast::{self, HasArgList},
+ AstNode, TextRange,
+};
+
+use crate::{adjusted_display_range, Diagnostic, DiagnosticsContext};
+
+// Diagnostic: mismatched-arg-count
+//
+// This diagnostic is triggered if a function is invoked with an incorrect amount of arguments.
+pub(crate) fn mismatched_arg_count(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::MismatchedArgCount,
+) -> Diagnostic {
+ let s = if d.expected == 1 { "" } else { "s" };
+ let message = format!("expected {} argument{}, found {}", d.expected, s, d.found);
+ Diagnostic::new("mismatched-arg-count", message, invalid_args_range(ctx, d))
+}
+
+fn invalid_args_range(ctx: &DiagnosticsContext<'_>, d: &hir::MismatchedArgCount) -> TextRange {
+ adjusted_display_range::<ast::Expr>(ctx, d.call_expr.clone().map(|it| it.into()), &|expr| {
+ let arg_list = match expr {
+ ast::Expr::CallExpr(call) => call.arg_list()?,
+ ast::Expr::MethodCallExpr(call) => call.arg_list()?,
+ _ => return None,
+ };
+ if d.found < d.expected {
+ if d.found == 0 {
+ return Some(arg_list.syntax().text_range());
+ }
+ if let Some(r_paren) = arg_list.r_paren_token() {
+ return Some(r_paren.text_range());
+ }
+ }
+ if d.expected < d.found {
+ if d.expected == 0 {
+ return Some(arg_list.syntax().text_range());
+ }
+ let zip = arg_list.args().nth(d.expected).zip(arg_list.r_paren_token());
+ if let Some((arg, r_paren)) = zip {
+ return Some(arg.syntax().text_range().cover(r_paren.text_range()));
+ }
+ }
+
+ None
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn simple_free_fn_zero() {
+ check_diagnostics(
+ r#"
+fn zero() {}
+fn f() { zero(1); }
+ //^^^ error: expected 0 arguments, found 1
+"#,
+ );
+
+ check_diagnostics(
+ r#"
+fn zero() {}
+fn f() { zero(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_free_fn_one() {
+ check_diagnostics(
+ r#"
+fn one(arg: u8) {}
+fn f() { one(); }
+ //^^ error: expected 1 argument, found 0
+"#,
+ );
+
+ check_diagnostics(
+ r#"
+fn one(arg: u8) {}
+fn f() { one(1); }
+"#,
+ );
+ }
+
+ #[test]
+ fn method_as_fn() {
+ check_diagnostics(
+ r#"
+struct S;
+impl S { fn method(&self) {} }
+
+fn f() {
+ S::method();
+} //^^ error: expected 1 argument, found 0
+"#,
+ );
+
+ check_diagnostics(
+ r#"
+struct S;
+impl S { fn method(&self) {} }
+
+fn f() {
+ S::method(&S);
+ S.method();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_with_arg() {
+ check_diagnostics(
+ r#"
+struct S;
+impl S { fn method(&self, arg: u8) {} }
+
+ fn f() {
+ S.method();
+ } //^^ error: expected 1 argument, found 0
+ "#,
+ );
+
+ check_diagnostics(
+ r#"
+struct S;
+impl S { fn method(&self, arg: u8) {} }
+
+fn f() {
+ S::method(&S, 0);
+ S.method(1);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_unknown_receiver() {
+ // note: this is incorrect code, so there might be errors on this in the
+ // future, but we shouldn't emit an argument count diagnostic here
+ check_diagnostics(
+ r#"
+trait Foo { fn method(&self, arg: usize) {} }
+
+fn f() {
+ let x;
+ x.method();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_struct() {
+ check_diagnostics(
+ r#"
+struct Tup(u8, u16);
+fn f() {
+ Tup(0);
+} //^ error: expected 2 arguments, found 1
+"#,
+ )
+ }
+
+ #[test]
+ fn enum_variant() {
+ check_diagnostics(
+ r#"
+enum En { Variant(u8, u16), }
+fn f() {
+ En::Variant(0);
+} //^ error: expected 2 arguments, found 1
+"#,
+ )
+ }
+
+ #[test]
+ fn enum_variant_type_macro() {
+ check_diagnostics(
+ r#"
+macro_rules! Type {
+ () => { u32 };
+}
+enum Foo {
+ Bar(Type![])
+}
+impl Foo {
+ fn new() {
+ Foo::Bar(0);
+ Foo::Bar(0, 1);
+ //^^ error: expected 1 argument, found 2
+ Foo::Bar();
+ //^^ error: expected 1 argument, found 0
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn varargs() {
+ check_diagnostics(
+ r#"
+extern "C" {
+ fn fixed(fixed: u8);
+ fn varargs(fixed: u8, ...);
+ fn varargs2(...);
+}
+
+fn f() {
+ unsafe {
+ fixed(0);
+ fixed(0, 1);
+ //^^ error: expected 1 argument, found 2
+ varargs(0);
+ varargs(0, 1);
+ varargs2();
+ varargs2(0);
+ varargs2(0, 1);
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn arg_count_lambda() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let f = |()| ();
+ f();
+ //^^ error: expected 1 argument, found 0
+ f(());
+ f((), ());
+ //^^^ error: expected 1 argument, found 2
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn cfgd_out_call_arguments() {
+ check_diagnostics(
+ r#"
+struct C(#[cfg(FALSE)] ());
+impl C {
+ fn new() -> Self {
+ Self(
+ #[cfg(FALSE)]
+ (),
+ )
+ }
+
+ fn method(&self) {}
+}
+
+fn main() {
+ C::new().method(#[cfg(FALSE)] 0);
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn cfgd_out_fn_params() {
+ check_diagnostics(
+ r#"
+fn foo(#[cfg(NEVER)] x: ()) {}
+
+struct S;
+
+impl S {
+ fn method(#[cfg(NEVER)] self) {}
+ fn method2(#[cfg(NEVER)] self, arg: u8) {}
+ fn method3(self, #[cfg(NEVER)] arg: u8) {}
+}
+
+extern "C" {
+ fn fixed(fixed: u8, #[cfg(NEVER)] ...);
+ fn varargs(#[cfg(not(NEVER))] ...);
+}
+
+fn main() {
+ foo();
+ S::method();
+ S::method2(0);
+ S::method3(S);
+ S.method3();
+ unsafe {
+ fixed(0);
+ varargs(1, 2, 3);
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn legacy_const_generics() {
+ check_diagnostics(
+ r#"
+#[rustc_legacy_const_generics(1, 3)]
+fn mixed<const N1: &'static str, const N2: bool>(
+ a: u8,
+ b: i8,
+) {}
+
+fn f() {
+ mixed(0, "", -1, true);
+ mixed::<"", true>(0, -1);
+}
+
+#[rustc_legacy_const_generics(1, 3)]
+fn b<const N1: u8, const N2: u8>(
+ a: u8,
+ b: u8,
+) {}
+
+fn g() {
+ b(0, 1, 2, 3);
+ b::<1, 3>(0, 2);
+
+ b(0, 1, 2);
+ //^ error: expected 4 arguments, found 3
+}
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
new file mode 100644
index 000000000..edb1fc091
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -0,0 +1,837 @@
+use either::Either;
+use hir::{
+ db::{AstDatabase, HirDatabase},
+ known, AssocItem, HirDisplay, InFile, Type,
+};
+use ide_db::{
+ assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search,
+ source_change::SourceChange, use_trivial_contructor::use_trivial_constructor, FxHashMap,
+};
+use stdx::format_to;
+use syntax::{
+ algo,
+ ast::{self, make},
+ AstNode, SyntaxNode, SyntaxNodePtr,
+};
+use text_edit::TextEdit;
+
+use crate::{fix, Diagnostic, DiagnosticsContext};
+
+// Diagnostic: missing-fields
+//
+// This diagnostic is triggered if record lacks some fields that exist in the corresponding structure.
+//
+// Example:
+//
+// ```rust
+// struct A { a: u8, b: u8 }
+//
+// let a = A { a: 10 };
+// ```
+pub(crate) fn missing_fields(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Diagnostic {
+ let mut message = String::from("missing structure fields:\n");
+ for field in &d.missed_fields {
+ format_to!(message, "- {}\n", field);
+ }
+
+ let ptr = InFile::new(
+ d.file,
+ d.field_list_parent_path
+ .clone()
+ .map(SyntaxNodePtr::from)
+ .unwrap_or_else(|| d.field_list_parent.clone().either(|it| it.into(), |it| it.into())),
+ );
+
+ Diagnostic::new("missing-fields", message, ctx.sema.diagnostics_display_range(ptr).range)
+ .with_fixes(fixes(ctx, d))
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Assist>> {
+ // Note that although we could add a diagnostics to
+ // fill the missing tuple field, e.g :
+ // `struct A(usize);`
+ // `let a = A { 0: () }`
+ // but it is uncommon usage and it should not be encouraged.
+ if d.missed_fields.iter().any(|it| it.as_tuple_index().is_some()) {
+ return None;
+ }
+
+ let root = ctx.sema.db.parse_or_expand(d.file)?;
+
+ let current_module = match &d.field_list_parent {
+ Either::Left(ptr) => ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()),
+ Either::Right(ptr) => ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()),
+ };
+
+ let build_text_edit = |parent_syntax, new_syntax: &SyntaxNode, old_syntax| {
+ let edit = {
+ let mut builder = TextEdit::builder();
+ if d.file.is_macro() {
+ // we can't map the diff up into the macro input unfortunately, as the macro loses all
+ // whitespace information so the diff wouldn't be applicable no matter what
+ // This has the downside that the cursor will be moved in macros by doing it without a diff
+ // but that is a trade off we can make.
+ // FIXME: this also currently discards a lot of whitespace in the input... we really need a formatter here
+ let range = ctx.sema.original_range_opt(old_syntax)?;
+ builder.replace(range.range, new_syntax.to_string());
+ } else {
+ algo::diff(old_syntax, new_syntax).into_text_edit(&mut builder);
+ }
+ builder.finish()
+ };
+ Some(vec![fix(
+ "fill_missing_fields",
+ "Fill struct fields",
+ SourceChange::from_text_edit(d.file.original_file(ctx.sema.db), edit),
+ ctx.sema.original_range(parent_syntax).range,
+ )])
+ };
+
+ match &d.field_list_parent {
+ Either::Left(record_expr) => {
+ let field_list_parent = record_expr.to_node(&root);
+ let missing_fields = ctx.sema.record_literal_missing_fields(&field_list_parent);
+
+ let mut locals = FxHashMap::default();
+ ctx.sema.scope(field_list_parent.syntax())?.process_all_names(&mut |name, def| {
+ if let hir::ScopeDef::Local(local) = def {
+ locals.insert(name, local);
+ }
+ });
+
+ let generate_fill_expr = |ty: &Type| match ctx.config.expr_fill_default {
+ crate::ExprFillDefaultMode::Todo => make::ext::expr_todo(),
+ crate::ExprFillDefaultMode::Default => {
+ get_default_constructor(ctx, d, ty).unwrap_or_else(|| make::ext::expr_todo())
+ }
+ };
+
+ let old_field_list = field_list_parent.record_expr_field_list()?;
+ let new_field_list = old_field_list.clone_for_update();
+ for (f, ty) in missing_fields.iter() {
+ let field_expr = if let Some(local_candidate) = locals.get(&f.name(ctx.sema.db)) {
+ cov_mark::hit!(field_shorthand);
+ let candidate_ty = local_candidate.ty(ctx.sema.db);
+ if ty.could_unify_with(ctx.sema.db, &candidate_ty) {
+ None
+ } else {
+ Some(generate_fill_expr(ty))
+ }
+ } else {
+ let expr = (|| -> Option<ast::Expr> {
+ let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
+
+ let type_path = current_module?.find_use_path(
+ ctx.sema.db,
+ item_for_path_search(ctx.sema.db, item_in_ns)?,
+ )?;
+
+ use_trivial_constructor(
+ &ctx.sema.db,
+ ide_db::helpers::mod_path_to_ast(&type_path),
+ &ty,
+ )
+ })();
+
+ if expr.is_some() {
+ expr
+ } else {
+ Some(generate_fill_expr(ty))
+ }
+ };
+ let field = make::record_expr_field(
+ make::name_ref(&f.name(ctx.sema.db).to_smol_str()),
+ field_expr,
+ );
+ new_field_list.add_field(field.clone_for_update());
+ }
+ build_text_edit(
+ field_list_parent.syntax(),
+ new_field_list.syntax(),
+ old_field_list.syntax(),
+ )
+ }
+ Either::Right(record_pat) => {
+ let field_list_parent = record_pat.to_node(&root);
+ let missing_fields = ctx.sema.record_pattern_missing_fields(&field_list_parent);
+
+ let old_field_list = field_list_parent.record_pat_field_list()?;
+ let new_field_list = old_field_list.clone_for_update();
+ for (f, _) in missing_fields.iter() {
+ let field = make::record_pat_field_shorthand(make::name_ref(
+ &f.name(ctx.sema.db).to_smol_str(),
+ ));
+ new_field_list.add_field(field.clone_for_update());
+ }
+ build_text_edit(
+ field_list_parent.syntax(),
+ new_field_list.syntax(),
+ old_field_list.syntax(),
+ )
+ }
+ }
+}
+
+fn make_ty(ty: &hir::Type, db: &dyn HirDatabase, module: hir::Module) -> ast::Type {
+ let ty_str = match ty.as_adt() {
+ Some(adt) => adt.name(db).to_string(),
+ None => ty.display_source_code(db, module.into()).ok().unwrap_or_else(|| "_".to_string()),
+ };
+
+ make::ty(&ty_str)
+}
+
+fn get_default_constructor(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::MissingFields,
+ ty: &Type,
+) -> Option<ast::Expr> {
+ if let Some(builtin_ty) = ty.as_builtin() {
+ if builtin_ty.is_int() || builtin_ty.is_uint() {
+ return Some(make::ext::zero_number());
+ }
+ if builtin_ty.is_float() {
+ return Some(make::ext::zero_float());
+ }
+ if builtin_ty.is_char() {
+ return Some(make::ext::empty_char());
+ }
+ if builtin_ty.is_str() {
+ return Some(make::ext::empty_str());
+ }
+ if builtin_ty.is_bool() {
+ return Some(make::ext::default_bool());
+ }
+ }
+
+ let krate = ctx.sema.to_module_def(d.file.original_file(ctx.sema.db))?.krate();
+ let module = krate.root_module(ctx.sema.db);
+
+ // Look for a ::new() associated function
+ let has_new_func = ty
+ .iterate_assoc_items(ctx.sema.db, krate, |assoc_item| {
+ if let AssocItem::Function(func) = assoc_item {
+ if func.name(ctx.sema.db) == known::new
+ && func.assoc_fn_params(ctx.sema.db).is_empty()
+ {
+ return Some(());
+ }
+ }
+
+ None
+ })
+ .is_some();
+
+ let famous_defs = FamousDefs(&ctx.sema, krate);
+ if has_new_func {
+ Some(make::ext::expr_ty_new(&make_ty(ty, ctx.sema.db, module)))
+ } else if ty.as_adt() == famous_defs.core_option_Option()?.ty(ctx.sema.db).as_adt() {
+ Some(make::ext::option_none())
+ } else if !ty.is_array()
+ && ty.impls_trait(ctx.sema.db, famous_defs.core_default_Default()?, &[])
+ {
+ Some(make::ext::expr_ty_default(&make_ty(ty, ctx.sema.db, module)))
+ } else {
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn missing_record_pat_field_diagnostic() {
+ check_diagnostics(
+ r#"
+struct S { foo: i32, bar: () }
+fn baz(s: S) {
+ let S { foo: _ } = s;
+ //^ 💡 error: missing structure fields:
+ //| - bar
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn missing_record_pat_field_no_diagnostic_if_not_exhaustive() {
+ check_diagnostics(
+ r"
+struct S { foo: i32, bar: () }
+fn baz(s: S) -> i32 {
+ match s {
+ S { foo, .. } => foo,
+ }
+}
+",
+ )
+ }
+
+ #[test]
+ fn missing_record_pat_field_box() {
+ check_diagnostics(
+ r"
+struct S { s: Box<u32> }
+fn x(a: S) {
+ let S { box s } = a;
+}
+",
+ )
+ }
+
+ #[test]
+ fn missing_record_pat_field_ref() {
+ check_diagnostics(
+ r"
+struct S { s: u32 }
+fn x(a: S) {
+ let S { ref s } = a;
+}
+",
+ )
+ }
+
+ #[test]
+ fn missing_record_expr_in_assignee_expr() {
+ check_diagnostics(
+ r"
+struct S { s: usize, t: usize }
+struct S2 { s: S, t: () }
+struct T(S);
+fn regular(a: S) {
+ let s;
+ S { s, .. } = a;
+}
+fn nested(a: S2) {
+ let s;
+ S2 { s: S { s, .. }, .. } = a;
+}
+fn in_tuple(a: (S,)) {
+ let s;
+ (S { s, .. },) = a;
+}
+fn in_array(a: [S;1]) {
+ let s;
+ [S { s, .. },] = a;
+}
+fn in_tuple_struct(a: T) {
+ let s;
+ T(S { s, .. }) = a;
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn range_mapping_out_of_macros() {
+ check_fix(
+ r#"
+fn some() {}
+fn items() {}
+fn here() {}
+
+macro_rules! id { ($($tt:tt)*) => { $($tt)*}; }
+
+fn main() {
+ let _x = id![Foo { a: $042 }];
+}
+
+pub struct Foo { pub a: i32, pub b: i32 }
+"#,
+ r#"
+fn some() {}
+fn items() {}
+fn here() {}
+
+macro_rules! id { ($($tt:tt)*) => { $($tt)*}; }
+
+fn main() {
+ let _x = id![Foo {a:42, b: 0 }];
+}
+
+pub struct Foo { pub a: i32, pub b: i32 }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_empty() {
+ check_fix(
+ r#"
+//- minicore: option
+struct TestStruct { one: i32, two: i64, three: Option<i32>, four: bool }
+
+fn test_fn() {
+ let s = TestStruct {$0};
+}
+"#,
+ r#"
+struct TestStruct { one: i32, two: i64, three: Option<i32>, four: bool }
+
+fn test_fn() {
+ let s = TestStruct { one: 0, two: 0, three: None, four: false };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_zst_fields() {
+ check_fix(
+ r#"
+struct Empty;
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct {$0};
+}
+"#,
+ r#"
+struct Empty;
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct { one: 0, two: Empty };
+}
+"#,
+ );
+ check_fix(
+ r#"
+enum Empty { Foo };
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct {$0};
+}
+"#,
+ r#"
+enum Empty { Foo };
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct { one: 0, two: Empty::Foo };
+}
+"#,
+ );
+
+ // make sure the assist doesn't fill non Unit variants
+ check_fix(
+ r#"
+struct Empty {};
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct {$0};
+}
+"#,
+ r#"
+struct Empty {};
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct { one: 0, two: todo!() };
+}
+"#,
+ );
+ check_fix(
+ r#"
+enum Empty { Foo {} };
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct {$0};
+}
+"#,
+ r#"
+enum Empty { Foo {} };
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct { one: 0, two: todo!() };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_self() {
+ check_fix(
+ r#"
+struct TestStruct { one: i32 }
+
+impl TestStruct {
+ fn test_fn() { let s = Self {$0}; }
+}
+"#,
+ r#"
+struct TestStruct { one: i32 }
+
+impl TestStruct {
+ fn test_fn() { let s = Self { one: 0 }; }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_enum() {
+ check_fix(
+ r#"
+enum Expr {
+ Bin { lhs: Box<Expr>, rhs: Box<Expr> }
+}
+
+impl Expr {
+ fn new_bin(lhs: Box<Expr>, rhs: Box<Expr>) -> Expr {
+ Expr::Bin {$0 }
+ }
+}
+"#,
+ r#"
+enum Expr {
+ Bin { lhs: Box<Expr>, rhs: Box<Expr> }
+}
+
+impl Expr {
+ fn new_bin(lhs: Box<Expr>, rhs: Box<Expr>) -> Expr {
+ Expr::Bin { lhs, rhs }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_partial() {
+ check_fix(
+ r#"
+struct TestStruct { one: i32, two: i64 }
+
+fn test_fn() {
+ let s = TestStruct{ two: 2$0 };
+}
+"#,
+ r"
+struct TestStruct { one: i32, two: i64 }
+
+fn test_fn() {
+ let s = TestStruct{ two: 2, one: 0 };
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_new() {
+ check_fix(
+ r#"
+struct TestWithNew(usize);
+impl TestWithNew {
+ pub fn new() -> Self {
+ Self(0)
+ }
+}
+struct TestStruct { one: i32, two: TestWithNew }
+
+fn test_fn() {
+ let s = TestStruct{ $0 };
+}
+"#,
+ r"
+struct TestWithNew(usize);
+impl TestWithNew {
+ pub fn new() -> Self {
+ Self(0)
+ }
+}
+struct TestStruct { one: i32, two: TestWithNew }
+
+fn test_fn() {
+ let s = TestStruct{ one: 0, two: TestWithNew::new() };
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_default() {
+ check_fix(
+ r#"
+//- minicore: default, option
+struct TestWithDefault(usize);
+impl Default for TestWithDefault {
+ pub fn default() -> Self {
+ Self(0)
+ }
+}
+struct TestStruct { one: i32, two: TestWithDefault }
+
+fn test_fn() {
+ let s = TestStruct{ $0 };
+}
+"#,
+ r"
+struct TestWithDefault(usize);
+impl Default for TestWithDefault {
+ pub fn default() -> Self {
+ Self(0)
+ }
+}
+struct TestStruct { one: i32, two: TestWithDefault }
+
+fn test_fn() {
+ let s = TestStruct{ one: 0, two: TestWithDefault::default() };
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_raw_ident() {
+ check_fix(
+ r#"
+struct TestStruct { r#type: u8 }
+
+fn test_fn() {
+ TestStruct { $0 };
+}
+"#,
+ r"
+struct TestStruct { r#type: u8 }
+
+fn test_fn() {
+ TestStruct { r#type: 0 };
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_no_diagnostic() {
+ check_diagnostics(
+ r#"
+struct TestStruct { one: i32, two: i64 }
+
+fn test_fn() {
+ let one = 1;
+ let s = TestStruct{ one, two: 2 };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_no_diagnostic_on_spread() {
+ check_diagnostics(
+ r#"
+struct TestStruct { one: i32, two: i64 }
+
+fn test_fn() {
+ let one = 1;
+ let s = TestStruct{ ..a };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_blank_line() {
+ check_fix(
+ r#"
+struct S { a: (), b: () }
+
+fn f() {
+ S {
+ $0
+ };
+}
+"#,
+ r#"
+struct S { a: (), b: () }
+
+fn f() {
+ S {
+ a: todo!(),
+ b: todo!(),
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_shorthand() {
+ cov_mark::check!(field_shorthand);
+ check_fix(
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let a = "hello";
+ let b = 1i32;
+ S {
+ $0
+ };
+}
+"#,
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let a = "hello";
+ let b = 1i32;
+ S {
+ a,
+ b,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_shorthand_ty_mismatch() {
+ check_fix(
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let a = "hello";
+ let b = 1usize;
+ S {
+ $0
+ };
+}
+"#,
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let a = "hello";
+ let b = 1usize;
+ S {
+ a,
+ b: 0,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_shorthand_unifies() {
+ check_fix(
+ r#"
+struct S<T> { a: &'static str, b: T }
+
+fn f() {
+ let a = "hello";
+ let b = 1i32;
+ S {
+ $0
+ };
+}
+"#,
+ r#"
+struct S<T> { a: &'static str, b: T }
+
+fn f() {
+ let a = "hello";
+ let b = 1i32;
+ S {
+ a,
+ b,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_pat_fields() {
+ check_fix(
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let S {
+ $0
+ };
+}
+"#,
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let S {
+ a,
+ b,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_pat_fields_partial() {
+ check_fix(
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let S {
+ a,$0
+ };
+}
+"#,
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let S {
+ a,
+ b,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn import_extern_crate_clash_with_inner_item() {
+ // This is more of a resolver test, but doesn't really work with the hir_def testsuite.
+
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:lib deps:jwt
+mod permissions;
+
+use permissions::jwt;
+
+fn f() {
+ fn inner() {}
+ jwt::Claims {}; // should resolve to the local one with 0 fields, and not get a diagnostic
+}
+
+//- /permissions.rs
+pub mod jwt {
+ pub struct Claims {}
+}
+
+//- /jwt/lib.rs crate:jwt
+pub struct Claims {
+ field: u8,
+}
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
new file mode 100644
index 000000000..9e66fbfb7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -0,0 +1,1012 @@
+use hir::InFile;
+
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: missing-match-arm
+//
+// This diagnostic is triggered if `match` block is missing one or more match arms.
+pub(crate) fn missing_match_arms(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::MissingMatchArms,
+) -> Diagnostic {
+ Diagnostic::new(
+ "missing-match-arm",
+ format!("missing match arm: {}", d.uncovered_patterns),
+ ctx.sema.diagnostics_display_range(InFile::new(d.file, d.match_expr.clone().into())).range,
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ fn check_diagnostics_no_bails(ra_fixture: &str) {
+ cov_mark::check_count!(validate_match_bailed_out, 0);
+ crate::tests::check_diagnostics(ra_fixture)
+ }
+
+ #[test]
+ fn empty_tuple() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match () { }
+ //^^ error: missing match arm: type `()` is non-empty
+ match (()) { }
+ //^^^^ error: missing match arm: type `()` is non-empty
+
+ match () { _ => (), }
+ match () { () => (), }
+ match (()) { (()) => (), }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_two_empty_tuple() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match ((), ()) { }
+ //^^^^^^^^ error: missing match arm: type `((), ())` is non-empty
+
+ match ((), ()) { ((), ()) => (), }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn boolean() {
+ check_diagnostics_no_bails(
+ r#"
+fn test_main() {
+ match false { }
+ //^^^^^ error: missing match arm: type `bool` is non-empty
+ match false { true => (), }
+ //^^^^^ error: missing match arm: `false` not covered
+ match (false, true) {}
+ //^^^^^^^^^^^^^ error: missing match arm: type `(bool, bool)` is non-empty
+ match (false, true) { (true, true) => (), }
+ //^^^^^^^^^^^^^ error: missing match arm: `(false, _)` not covered
+ match (false, true) {
+ //^^^^^^^^^^^^^ error: missing match arm: `(true, true)` not covered
+ (false, true) => (),
+ (false, false) => (),
+ (true, false) => (),
+ }
+ match (false, true) { (true, _x) => (), }
+ //^^^^^^^^^^^^^ error: missing match arm: `(false, _)` not covered
+
+ match false { true => (), false => (), }
+ match (false, true) {
+ (false, _) => (),
+ (true, false) => (),
+ (_, true) => (),
+ }
+ match (false, true) {
+ (true, true) => (),
+ (true, false) => (),
+ (false, true) => (),
+ (false, false) => (),
+ }
+ match (false, true) {
+ (true, _x) => (),
+ (false, true) => (),
+ (false, false) => (),
+ }
+ match (false, true, false) {
+ (false, ..) => (),
+ (true, ..) => (),
+ }
+ match (false, true, false) {
+ (.., false) => (),
+ (.., true) => (),
+ }
+ match (false, true, false) { (..) => (), }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_tuple_and_bools() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match (false, ((), false)) {}
+ //^^^^^^^^^^^^^^^^^^^^ error: missing match arm: type `(bool, ((), bool))` is non-empty
+ match (false, ((), false)) { (true, ((), true)) => (), }
+ //^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `(false, _)` not covered
+ match (false, ((), false)) { (true, _) => (), }
+ //^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `(false, _)` not covered
+
+ match (false, ((), false)) {
+ (true, ((), true)) => (),
+ (true, ((), false)) => (),
+ (false, ((), true)) => (),
+ (false, ((), false)) => (),
+ }
+ match (false, ((), false)) {
+ (true, ((), true)) => (),
+ (true, ((), false)) => (),
+ (false, _) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enums() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either { A, B, }
+
+fn main() {
+ match Either::A { }
+ //^^^^^^^^^ error: missing match arm: `A` and `B` not covered
+ match Either::B { Either::A => (), }
+ //^^^^^^^^^ error: missing match arm: `B` not covered
+
+ match &Either::B {
+ //^^^^^^^^^^ error: missing match arm: `&B` not covered
+ Either::A => (),
+ }
+
+ match Either::B {
+ Either::A => (), Either::B => (),
+ }
+ match &Either::B {
+ Either::A => (), Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_containing_bool() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either { A(bool), B }
+
+fn main() {
+ match Either::B { }
+ //^^^^^^^^^ error: missing match arm: `A(_)` and `B` not covered
+ match Either::B {
+ //^^^^^^^^^ error: missing match arm: `A(false)` not covered
+ Either::A(true) => (), Either::B => ()
+ }
+
+ match Either::B {
+ Either::A(true) => (),
+ Either::A(false) => (),
+ Either::B => (),
+ }
+ match Either::B {
+ Either::B => (),
+ _ => (),
+ }
+ match Either::B {
+ Either::A(_) => (),
+ Either::B => (),
+ }
+
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn enum_different_sizes() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either { A(bool), B(bool, bool) }
+
+fn main() {
+ match Either::A(false) {
+ //^^^^^^^^^^^^^^^^ error: missing match arm: `B(true, _)` not covered
+ Either::A(_) => (),
+ Either::B(false, _) => (),
+ }
+
+ match Either::A(false) {
+ Either::A(_) => (),
+ Either::B(true, _) => (),
+ Either::B(false, _) => (),
+ }
+ match Either::A(false) {
+ Either::A(true) | Either::A(false) => (),
+ Either::B(true, _) => (),
+ Either::B(false, _) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_enum_no_diagnostic() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either { A(bool), B(bool, bool) }
+enum Either2 { C, D }
+
+fn main() {
+ match (Either::A(false), Either2::C) {
+ (Either::A(true), _) | (Either::A(false), _) => (),
+ (Either::B(true, _), Either2::C) => (),
+ (Either::B(false, _), Either2::C) => (),
+ (Either::B(_, _), Either2::D) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn or_pattern_no_diagnostic() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either {A, B}
+
+fn main() {
+ match (Either::A, Either::B) {
+ (Either::A | Either::B, _) => (),
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn mismatched_types() {
+ cov_mark::check_count!(validate_match_bailed_out, 4);
+ // Match statements with arms that don't match the
+ // expression pattern do not fire this diagnostic.
+ check_diagnostics(
+ r#"
+enum Either { A, B }
+enum Either2 { C, D }
+
+fn main() {
+ match Either::A {
+ Either2::C => (),
+ Either2::D => (),
+ }
+ match (true, false) {
+ (true, false, true) => (),
+ (true) => (),
+ // ^^^^ error: expected (bool, bool), found bool
+ }
+ match (true, false) { (true,) => {} }
+ match (0) { () => () }
+ match Unresolved::Bar { Unresolved::Baz => () }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn mismatched_types_in_or_patterns() {
+ cov_mark::check_count!(validate_match_bailed_out, 2);
+ check_diagnostics(
+ r#"
+fn main() {
+ match false { true | () => {} }
+ match (false,) { (true | (),) => {} }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn malformed_match_arm_tuple_enum_missing_pattern() {
+ // We are testing to be sure we don't panic here when the match
+ // arm `Either::B` is missing its pattern.
+ check_diagnostics_no_bails(
+ r#"
+enum Either { A, B(u32) }
+
+fn main() {
+ match Either::A {
+ Either::A => (),
+ Either::B() => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn malformed_match_arm_extra_fields() {
+ cov_mark::check_count!(validate_match_bailed_out, 2);
+ check_diagnostics(
+ r#"
+enum A { B(isize, isize), C }
+fn main() {
+ match A::B(1, 2) {
+ A::B(_, _, _) => (),
+ }
+ match A::B(1, 2) {
+ A::C(_) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn expr_diverges() {
+ cov_mark::check_count!(validate_match_bailed_out, 2);
+ check_diagnostics(
+ r#"
+enum Either { A, B }
+
+fn main() {
+ match loop {} {
+ Either::A => (),
+ Either::B => (),
+ }
+ match loop {} {
+ Either::A => (),
+ }
+ match loop { break Foo::A } {
+ //^^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `B` not covered
+ Either::A => (),
+ }
+ match loop { break Foo::A } {
+ Either::A => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn expr_partially_diverges() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either<T> { A(T), B }
+
+fn foo() -> Either<!> { Either::B }
+fn main() -> u32 {
+ match foo() {
+ Either::A(val) => val,
+ Either::B => 0,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_record() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either { A { foo: bool }, B }
+
+fn main() {
+ let a = Either::A { foo: true };
+ match a { }
+ //^ error: missing match arm: `A { .. }` and `B` not covered
+ match a { Either::A { foo: true } => () }
+ //^ error: missing match arm: `B` not covered
+ match a {
+ Either::A { } => (),
+ //^^^^^^^^^ 💡 error: missing structure fields:
+ // | - foo
+ Either::B => (),
+ }
+ match a {
+ //^ error: missing match arm: `B` not covered
+ Either::A { } => (),
+ } //^^^^^^^^^ 💡 error: missing structure fields:
+ // | - foo
+
+ match a {
+ Either::A { foo: true } => (),
+ Either::A { foo: false } => (),
+ Either::B => (),
+ }
+ match a {
+ Either::A { foo: _ } => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_record_fields_out_of_order() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either {
+ A { foo: bool, bar: () },
+ B,
+}
+
+fn main() {
+ let a = Either::A { foo: true, bar: () };
+ match a {
+ //^ error: missing match arm: `B` not covered
+ Either::A { bar: (), foo: false } => (),
+ Either::A { foo: true, bar: () } => (),
+ }
+
+ match a {
+ Either::A { bar: (), foo: false } => (),
+ Either::A { foo: true, bar: () } => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_record_ellipsis() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either {
+ A { foo: bool, bar: bool },
+ B,
+}
+
+fn main() {
+ let a = Either::B;
+ match a {
+ //^ error: missing match arm: `A { foo: false, .. }` not covered
+ Either::A { foo: true, .. } => (),
+ Either::B => (),
+ }
+ match a {
+ //^ error: missing match arm: `B` not covered
+ Either::A { .. } => (),
+ }
+
+ match a {
+ Either::A { foo: true, .. } => (),
+ Either::A { foo: false, .. } => (),
+ Either::B => (),
+ }
+
+ match a {
+ Either::A { .. } => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_tuple_partial_ellipsis() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either {
+ A(bool, bool, bool, bool),
+ B,
+}
+
+fn main() {
+ match Either::B {
+ //^^^^^^^^^ error: missing match arm: `A(false, _, _, true)` not covered
+ Either::A(true, .., true) => (),
+ Either::A(true, .., false) => (),
+ Either::A(false, .., false) => (),
+ Either::B => (),
+ }
+ match Either::B {
+ //^^^^^^^^^ error: missing match arm: `A(false, _, _, false)` not covered
+ Either::A(true, .., true) => (),
+ Either::A(true, .., false) => (),
+ Either::A(.., true) => (),
+ Either::B => (),
+ }
+
+ match Either::B {
+ Either::A(true, .., true) => (),
+ Either::A(true, .., false) => (),
+ Either::A(false, .., true) => (),
+ Either::A(false, .., false) => (),
+ Either::B => (),
+ }
+ match Either::B {
+ Either::A(true, .., true) => (),
+ Either::A(true, .., false) => (),
+ Either::A(.., true) => (),
+ Either::A(.., false) => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn never() {
+ check_diagnostics_no_bails(
+ r#"
+enum Never {}
+
+fn enum_(never: Never) {
+ match never {}
+}
+fn enum_ref(never: &Never) {
+ match never {}
+ //^^^^^ error: missing match arm: type `&Never` is non-empty
+}
+fn bang(never: !) {
+ match never {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unknown_type() {
+ cov_mark::check_count!(validate_match_bailed_out, 1);
+
+ check_diagnostics(
+ r#"
+enum Option<T> { Some(T), None }
+
+fn main() {
+ // `Never` is deliberately not defined so that it's an uninferred type.
+ match Option::<Never>::None {
+ None => (),
+ Some(never) => match never {},
+ }
+ match Option::<Never>::None {
+ //^^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `None` not covered
+ Option::Some(_never) => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_bools_with_ellipsis_at_end_missing_arm() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match (false, true, false) {
+ //^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `(true, _, _)` not covered
+ (false, ..) => (),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match (false, true, false) {
+ //^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `(_, _, true)` not covered
+ (.., false) => (),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_bools_with_ellipsis_in_middle_missing_arm() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match (false, true, false) {
+ //^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `(false, _, _)` not covered
+ (true, .., false) => (),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn record_struct() {
+ check_diagnostics_no_bails(
+ r#"struct Foo { a: bool }
+fn main(f: Foo) {
+ match f {}
+ //^ error: missing match arm: type `Foo` is non-empty
+ match f { Foo { a: true } => () }
+ //^ error: missing match arm: `Foo { a: false }` not covered
+ match &f { Foo { a: true } => () }
+ //^^ error: missing match arm: `&Foo { a: false }` not covered
+ match f { Foo { a: _ } => () }
+ match f {
+ Foo { a: true } => (),
+ Foo { a: false } => (),
+ }
+ match &f {
+ Foo { a: true } => (),
+ Foo { a: false } => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_struct() {
+ check_diagnostics_no_bails(
+ r#"struct Foo(bool);
+fn main(f: Foo) {
+ match f {}
+ //^ error: missing match arm: type `Foo` is non-empty
+ match f { Foo(true) => () }
+ //^ error: missing match arm: `Foo(false)` not covered
+ match f {
+ Foo(true) => (),
+ Foo(false) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unit_struct() {
+ check_diagnostics_no_bails(
+ r#"struct Foo;
+fn main(f: Foo) {
+ match f {}
+ //^ error: missing match arm: type `Foo` is non-empty
+ match f { Foo => () }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn record_struct_ellipsis() {
+ check_diagnostics_no_bails(
+ r#"struct Foo { foo: bool, bar: bool }
+fn main(f: Foo) {
+ match f { Foo { foo: true, .. } => () }
+ //^ error: missing match arm: `Foo { foo: false, .. }` not covered
+ match f {
+ //^ error: missing match arm: `Foo { foo: false, bar: true }` not covered
+ Foo { foo: true, .. } => (),
+ Foo { bar: false, .. } => ()
+ }
+ match f { Foo { .. } => () }
+ match f {
+ Foo { foo: true, .. } => (),
+ Foo { foo: false, .. } => ()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn internal_or() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ enum Either { A(bool), B }
+ match Either::B {
+ //^^^^^^^^^ error: missing match arm: `B` not covered
+ Either::A(true | false) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_panic_at_unimplemented_subpattern_type() {
+ cov_mark::check_count!(validate_match_bailed_out, 1);
+
+ check_diagnostics(
+ r#"
+struct S { a: char}
+fn main(v: S) {
+ match v { S{ a } => {} }
+ match v { S{ a: _x } => {} }
+ match v { S{ a: 'a' } => {} }
+ match v { S{..} => {} }
+ match v { _ => {} }
+ match v { }
+ //^ error: missing match arm: type `S` is non-empty
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn binding() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match true {
+ _x @ true => {}
+ false => {}
+ }
+ match true { _x @ true => {} }
+ //^^^^ error: missing match arm: `false` not covered
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn binding_ref_has_correct_type() {
+ cov_mark::check_count!(validate_match_bailed_out, 1);
+
+ // Asserts `PatKind::Binding(ref _x): bool`, not &bool.
+ // If that's not true match checking will panic with "incompatible constructors"
+ // FIXME: make facilities to test this directly like `tests::check_infer(..)`
+ check_diagnostics(
+ r#"
+enum Foo { A }
+fn main() {
+ // FIXME: this should not bail out but current behavior is such as the old algorithm.
+ // ExprValidator::validate_match(..) checks types of top level patterns incorrecly.
+ match Foo::A {
+ ref _x => {}
+ Foo::A => {}
+ }
+ match (true,) {
+ (ref _x,) => {}
+ (true,) => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_non_exhaustive() {
+ check_diagnostics_no_bails(
+ r#"
+//- /lib.rs crate:lib
+#[non_exhaustive]
+pub enum E { A, B }
+fn _local() {
+ match E::A { _ => {} }
+ match E::A {
+ E::A => {}
+ E::B => {}
+ }
+ match E::A {
+ E::A | E::B => {}
+ }
+}
+
+//- /main.rs crate:main deps:lib
+use lib::E;
+fn main() {
+ match E::A { _ => {} }
+ match E::A {
+ //^^^^ error: missing match arm: `_` not covered
+ E::A => {}
+ E::B => {}
+ }
+ match E::A {
+ //^^^^ error: missing match arm: `_` not covered
+ E::A | E::B => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn match_guard() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match true {
+ true if false => {}
+ true => {}
+ false => {}
+ }
+ match true {
+ //^^^^ error: missing match arm: `true` not covered
+ true if false => {}
+ false => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn pattern_type_is_of_substitution() {
+ check_diagnostics_no_bails(
+ r#"
+struct Foo<T>(T);
+struct Bar;
+fn main() {
+ match Foo(Bar) {
+ _ | Foo(Bar) => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn record_struct_no_such_field() {
+ cov_mark::check_count!(validate_match_bailed_out, 1);
+
+ check_diagnostics(
+ r#"
+struct Foo { }
+fn main(f: Foo) {
+ match f { Foo { bar } => () }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn match_ergonomics_issue_9095() {
+ check_diagnostics_no_bails(
+ r#"
+enum Foo<T> { A(T) }
+fn main() {
+ match &Foo::A(true) {
+ _ => {}
+ Foo::A(_) => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn normalize_field_ty() {
+ check_diagnostics_no_bails(
+ r"
+trait Trait { type Projection; }
+enum E {Foo, Bar}
+struct A;
+impl Trait for A { type Projection = E; }
+struct Next<T: Trait>(T::Projection);
+static __: () = {
+ let n: Next<A> = Next(E::Foo);
+ match n { Next(E::Foo) => {} }
+ // ^ error: missing match arm: `Next(Bar)` not covered
+ match n { Next(E::Foo | E::Bar) => {} }
+ match n { Next(E::Foo | _ ) => {} }
+ match n { Next(_ | E::Bar) => {} }
+ match n { _ | Next(E::Bar) => {} }
+ match &n { Next(E::Foo | E::Bar) => {} }
+ match &n { _ | Next(E::Bar) => {} }
+};",
+ );
+ }
+
+ #[test]
+ fn binding_mode_by_ref() {
+ check_diagnostics_no_bails(
+ r"
+enum E{ A, B }
+fn foo() {
+ match &E::A {
+ E::A => {}
+ x => {}
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn macro_or_pat() {
+ check_diagnostics_no_bails(
+ r#"
+macro_rules! m {
+ () => {
+ Enum::Type1 | Enum::Type2
+ };
+}
+
+enum Enum {
+ Type1,
+ Type2,
+ Type3,
+}
+
+fn f(ty: Enum) {
+ match ty {
+ //^^ error: missing match arm: `Type3` not covered
+ m!() => (),
+ }
+
+ match ty {
+ m!() | Enum::Type3 => ()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unexpected_ty_fndef() {
+ cov_mark::check!(validate_match_bailed_out);
+ check_diagnostics(
+ r"
+enum Exp {
+ Tuple(()),
+}
+fn f() {
+ match __unknown {
+ Exp::Tuple => {}
+ }
+}",
+ );
+ }
+
+ mod false_negatives {
+ //! The implementation of match checking here is a work in progress. As we roll this out, we
+ //! prefer false negatives to false positives (ideally there would be no false positives). This
+ //! test module should document known false negatives. Eventually we will have a complete
+ //! implementation of match checking and this module will be empty.
+ //!
+ //! The reasons for documenting known false negatives:
+ //!
+ //! 1. It acts as a backlog of work that can be done to improve the behavior of the system.
+ //! 2. It ensures the code doesn't panic when handling these cases.
+ use super::*;
+
+ #[test]
+ fn integers() {
+ cov_mark::check_count!(validate_match_bailed_out, 1);
+
+ // We don't currently check integer exhaustiveness.
+ check_diagnostics(
+ r#"
+fn main() {
+ match 5 {
+ 10 => (),
+ 11..20 => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn reference_patterns_at_top_level() {
+ cov_mark::check_count!(validate_match_bailed_out, 1);
+
+ check_diagnostics(
+ r#"
+fn main() {
+ match &false {
+ &true => {}
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn reference_patterns_in_fields() {
+ cov_mark::check_count!(validate_match_bailed_out, 2);
+
+ check_diagnostics(
+ r#"
+fn main() {
+ match (&false,) {
+ (true,) => {}
+ }
+ match (&false,) {
+ (&true,) => {}
+ }
+}
+ "#,
+ );
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
new file mode 100644
index 000000000..7acd9228a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -0,0 +1,101 @@
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: missing-unsafe
+//
+// This diagnostic is triggered if an operation marked as `unsafe` is used outside of an `unsafe` function or block.
+pub(crate) fn missing_unsafe(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Diagnostic {
+ Diagnostic::new(
+ "missing-unsafe",
+ "this operation is unsafe and requires an unsafe function or block",
+ ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn missing_unsafe_diagnostic_with_raw_ptr() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let x = &5 as *const usize;
+ unsafe { let y = *x; }
+ let z = *x;
+} //^^ error: this operation is unsafe and requires an unsafe function or block
+"#,
+ )
+ }
+
+ #[test]
+ fn missing_unsafe_diagnostic_with_unsafe_call() {
+ check_diagnostics(
+ r#"
+struct HasUnsafe;
+
+impl HasUnsafe {
+ unsafe fn unsafe_fn(&self) {
+ let x = &5 as *const usize;
+ let y = *x;
+ }
+}
+
+unsafe fn unsafe_fn() {
+ let x = &5 as *const usize;
+ let y = *x;
+}
+
+fn main() {
+ unsafe_fn();
+ //^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
+ HasUnsafe.unsafe_fn();
+ //^^^^^^^^^^^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
+ unsafe {
+ unsafe_fn();
+ HasUnsafe.unsafe_fn();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn missing_unsafe_diagnostic_with_static_mut() {
+ check_diagnostics(
+ r#"
+struct Ty {
+ a: u8,
+}
+
+static mut STATIC_MUT: Ty = Ty { a: 0 };
+
+fn main() {
+ let x = STATIC_MUT.a;
+ //^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
+ unsafe {
+ let x = STATIC_MUT.a;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_missing_unsafe_diagnostic_with_safe_intrinsic() {
+ check_diagnostics(
+ r#"
+extern "rust-intrinsic" {
+ pub fn bitreverse(x: u32) -> u32; // Safe intrinsic
+ pub fn floorf32(x: f32) -> f32; // Unsafe intrinsic
+}
+
+fn main() {
+ let _ = bitreverse(12);
+ let _ = floorf32(12.0);
+ //^^^^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
new file mode 100644
index 000000000..e032c578f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -0,0 +1,283 @@
+use hir::{db::AstDatabase, HasSource, HirDisplay, Semantics};
+use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
+use syntax::{
+ ast::{self, edit::IndentLevel, make},
+ AstNode,
+};
+use text_edit::TextEdit;
+
+use crate::{fix, Assist, Diagnostic, DiagnosticsContext};
+
+// Diagnostic: no-such-field
+//
+// This diagnostic is triggered if created structure does not have field provided in record.
+pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Diagnostic {
+ Diagnostic::new(
+ "no-such-field",
+ "no such field",
+ ctx.sema.diagnostics_display_range(d.field.clone().map(|it| it.into())).range,
+ )
+ .with_fixes(fixes(ctx, d))
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assist>> {
+ let root = ctx.sema.db.parse_or_expand(d.field.file_id)?;
+ missing_record_expr_field_fixes(
+ &ctx.sema,
+ d.field.file_id.original_file(ctx.sema.db),
+ &d.field.value.to_node(&root),
+ )
+}
+
+fn missing_record_expr_field_fixes(
+ sema: &Semantics<'_, RootDatabase>,
+ usage_file_id: FileId,
+ record_expr_field: &ast::RecordExprField,
+) -> Option<Vec<Assist>> {
+ let record_lit = ast::RecordExpr::cast(record_expr_field.syntax().parent()?.parent()?)?;
+ let def_id = sema.resolve_variant(record_lit)?;
+ let module;
+ let def_file_id;
+ let record_fields = match def_id {
+ hir::VariantDef::Struct(s) => {
+ module = s.module(sema.db);
+ let source = s.source(sema.db)?;
+ def_file_id = source.file_id;
+ let fields = source.value.field_list()?;
+ record_field_list(fields)?
+ }
+ hir::VariantDef::Union(u) => {
+ module = u.module(sema.db);
+ let source = u.source(sema.db)?;
+ def_file_id = source.file_id;
+ source.value.record_field_list()?
+ }
+ hir::VariantDef::Variant(e) => {
+ module = e.module(sema.db);
+ let source = e.source(sema.db)?;
+ def_file_id = source.file_id;
+ let fields = source.value.field_list()?;
+ record_field_list(fields)?
+ }
+ };
+ let def_file_id = def_file_id.original_file(sema.db);
+
+ let new_field_type = sema.type_of_expr(&record_expr_field.expr()?)?.adjusted();
+ if new_field_type.is_unknown() {
+ return None;
+ }
+ let new_field = make::record_field(
+ None,
+ make::name(&record_expr_field.field_name()?.text()),
+ make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?),
+ );
+
+ let last_field = record_fields.fields().last()?;
+ let last_field_syntax = last_field.syntax();
+ let indent = IndentLevel::from_node(last_field_syntax);
+
+ let mut new_field = new_field.to_string();
+ if usage_file_id != def_file_id {
+ new_field = format!("pub(crate) {}", new_field);
+ }
+ new_field = format!("\n{}{}", indent, new_field);
+
+ let needs_comma = !last_field_syntax.to_string().ends_with(',');
+ if needs_comma {
+ new_field = format!(",{}", new_field);
+ }
+
+ let source_change = SourceChange::from_text_edit(
+ def_file_id,
+ TextEdit::insert(last_field_syntax.text_range().end(), new_field),
+ );
+
+ return Some(vec![fix(
+ "create_field",
+ "Create field",
+ source_change,
+ record_expr_field.syntax().text_range(),
+ )]);
+
+ fn record_field_list(field_def_list: ast::FieldList) -> Option<ast::RecordFieldList> {
+ match field_def_list {
+ ast::FieldList::RecordFieldList(it) => Some(it),
+ ast::FieldList::TupleFieldList(_) => None,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn no_such_field_diagnostics() {
+ check_diagnostics(
+ r#"
+struct S { foo: i32, bar: () }
+impl S {
+ fn new() -> S {
+ S {
+ //^ 💡 error: missing structure fields:
+ //| - bar
+ foo: 92,
+ baz: 62,
+ //^^^^^^^ 💡 error: no such field
+ }
+ }
+}
+"#,
+ );
+ }
+ #[test]
+ fn no_such_field_with_feature_flag_diagnostics() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+struct MyStruct {
+ my_val: usize,
+ #[cfg(feature = "foo")]
+ bar: bool,
+}
+
+impl MyStruct {
+ #[cfg(feature = "foo")]
+ pub(crate) fn new(my_val: usize, bar: bool) -> Self {
+ Self { my_val, bar }
+ }
+ #[cfg(not(feature = "foo"))]
+ pub(crate) fn new(my_val: usize, _bar: bool) -> Self {
+ Self { my_val }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_such_field_enum_with_feature_flag_diagnostics() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+enum Foo {
+ #[cfg(not(feature = "foo"))]
+ Buz,
+ #[cfg(feature = "foo")]
+ Bar,
+ Baz
+}
+
+fn test_fn(f: Foo) {
+ match f {
+ Foo::Bar => {},
+ Foo::Baz => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_such_field_with_feature_flag_diagnostics_on_struct_lit() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+struct S {
+ #[cfg(feature = "foo")]
+ foo: u32,
+ #[cfg(not(feature = "foo"))]
+ bar: u32,
+}
+
+impl S {
+ #[cfg(feature = "foo")]
+ fn new(foo: u32) -> Self {
+ Self { foo }
+ }
+ #[cfg(not(feature = "foo"))]
+ fn new(bar: u32) -> Self {
+ Self { bar }
+ }
+ fn new2(bar: u32) -> Self {
+ #[cfg(feature = "foo")]
+ { Self { foo: bar } }
+ #[cfg(not(feature = "foo"))]
+ { Self { bar } }
+ }
+ fn new2(val: u32) -> Self {
+ Self {
+ #[cfg(feature = "foo")]
+ foo: val,
+ #[cfg(not(feature = "foo"))]
+ bar: val,
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_such_field_with_type_macro() {
+ check_diagnostics(
+ r#"
+macro_rules! Type { () => { u32 }; }
+struct Foo { bar: Type![] }
+
+impl Foo {
+ fn new() -> Self {
+ Foo { bar: 0 }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_field_from_usage() {
+ check_fix(
+ r"
+fn main() {
+ Foo { bar: 3, baz$0: false};
+}
+struct Foo {
+ bar: i32
+}
+",
+ r"
+fn main() {
+ Foo { bar: 3, baz: false};
+}
+struct Foo {
+ bar: i32,
+ baz: bool
+}
+",
+ )
+ }
+
+ #[test]
+ fn test_add_field_in_other_file_from_usage() {
+ check_fix(
+ r#"
+//- /main.rs
+mod foo;
+
+fn main() {
+ foo::Foo { bar: 3, $0baz: false};
+}
+//- /foo.rs
+struct Foo {
+ bar: i32
+}
+"#,
+ r#"
+struct Foo {
+ bar: i32,
+ pub(crate) baz: bool
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
new file mode 100644
index 000000000..9826e1c70
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
@@ -0,0 +1,131 @@
+use hir::{db::AstDatabase, InFile};
+use ide_db::source_change::SourceChange;
+use syntax::{
+ ast::{self, HasArgList},
+ AstNode, TextRange,
+};
+use text_edit::TextEdit;
+
+use crate::{fix, Assist, Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: replace-filter-map-next-with-find-map
+//
+// This diagnostic is triggered when `.filter_map(..).next()` is used, rather than the more concise `.find_map(..)`.
+pub(crate) fn replace_filter_map_next_with_find_map(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::ReplaceFilterMapNextWithFindMap,
+) -> Diagnostic {
+ Diagnostic::new(
+ "replace-filter-map-next-with-find-map",
+ "replace filter_map(..).next() with find_map(..)",
+ ctx.sema.diagnostics_display_range(InFile::new(d.file, d.next_expr.clone().into())).range,
+ )
+ .severity(Severity::WeakWarning)
+ .with_fixes(fixes(ctx, d))
+}
+
+fn fixes(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::ReplaceFilterMapNextWithFindMap,
+) -> Option<Vec<Assist>> {
+ let root = ctx.sema.db.parse_or_expand(d.file)?;
+ let next_expr = d.next_expr.to_node(&root);
+ let next_call = ast::MethodCallExpr::cast(next_expr.syntax().clone())?;
+
+ let filter_map_call = ast::MethodCallExpr::cast(next_call.receiver()?.syntax().clone())?;
+ let filter_map_name_range = filter_map_call.name_ref()?.ident_token()?.text_range();
+ let filter_map_args = filter_map_call.arg_list()?;
+
+ let range_to_replace =
+ TextRange::new(filter_map_name_range.start(), next_expr.syntax().text_range().end());
+ let replacement = format!("find_map{}", filter_map_args.syntax().text());
+ let trigger_range = next_expr.syntax().text_range();
+
+ let edit = TextEdit::replace(range_to_replace, replacement);
+
+ let source_change = SourceChange::from_text_edit(d.file.original_file(ctx.sema.db), edit);
+
+ Some(vec![fix(
+ "replace_with_find_map",
+ "Replace filter_map(..).next() with find_map()",
+ source_change,
+ trigger_range,
+ )])
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn replace_filter_map_next_with_find_map2() {
+ check_diagnostics(
+ r#"
+//- minicore: iterators
+fn foo() {
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_filter_map_next_with_find_map_no_diagnostic_without_next() {
+ check_diagnostics(
+ r#"
+//- minicore: iterators
+fn foo() {
+ let m = core::iter::repeat(())
+ .filter_map(|()| Some(92))
+ .count();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_filter_map_next_with_find_map_no_diagnostic_with_intervening_methods() {
+ check_diagnostics(
+ r#"
+//- minicore: iterators
+fn foo() {
+ let m = core::iter::repeat(())
+ .filter_map(|()| Some(92))
+ .map(|x| x + 2)
+ .next();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_filter_map_next_with_find_map_no_diagnostic_if_not_in_chain() {
+ check_diagnostics(
+ r#"
+//- minicore: iterators
+fn foo() {
+ let m = core::iter::repeat(())
+ .filter_map(|()| Some(92));
+ let n = m.next();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_with_find_map() {
+ check_fix(
+ r#"
+//- minicore: iterators
+fn foo() {
+ let m = core::iter::repeat(()).$0filter_map(|()| Some(92)).next();
+}
+"#,
+ r#"
+fn foo() {
+ let m = core::iter::repeat(()).find_map(|()| Some(92));
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
new file mode 100644
index 000000000..6bf90e645
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -0,0 +1,573 @@
+use hir::{db::AstDatabase, HirDisplay, Type};
+use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
+use syntax::{
+ ast::{self, BlockExpr, ExprStmt},
+ AstNode,
+};
+use text_edit::TextEdit;
+
+use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticsContext};
+
+// Diagnostic: type-mismatch
+//
+// This diagnostic is triggered when the type of an expression does not match
+// the expected type.
+pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Diagnostic {
+ let display_range = adjusted_display_range::<ast::BlockExpr>(
+ ctx,
+ d.expr.clone().map(|it| it.into()),
+ &|block| {
+ let r_curly_range = block.stmt_list()?.r_curly_token()?.text_range();
+ cov_mark::hit!(type_mismatch_on_block);
+ Some(r_curly_range)
+ },
+ );
+
+ let mut diag = Diagnostic::new(
+ "type-mismatch",
+ format!(
+ "expected {}, found {}",
+ d.expected.display(ctx.sema.db),
+ d.actual.display(ctx.sema.db)
+ ),
+ display_range,
+ )
+ .with_fixes(fixes(ctx, d));
+ if diag.fixes.is_none() {
+ diag.experimental = true;
+ }
+ diag
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assist>> {
+ let mut fixes = Vec::new();
+
+ add_reference(ctx, d, &mut fixes);
+ add_missing_ok_or_some(ctx, d, &mut fixes);
+ remove_semicolon(ctx, d, &mut fixes);
+ str_ref_to_owned(ctx, d, &mut fixes);
+
+ if fixes.is_empty() {
+ None
+ } else {
+ Some(fixes)
+ }
+}
+
+fn add_reference(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TypeMismatch,
+ acc: &mut Vec<Assist>,
+) -> Option<()> {
+ let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
+ let expr_node = d.expr.value.to_node(&root);
+
+ let range = ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range;
+
+ let (_, mutability) = d.expected.as_reference()?;
+ let actual_with_ref = Type::reference(&d.actual, mutability);
+ if !actual_with_ref.could_coerce_to(ctx.sema.db, &d.expected) {
+ return None;
+ }
+
+ let ampersands = format!("&{}", mutability.as_keyword_for_ref());
+
+ let edit = TextEdit::insert(expr_node.syntax().text_range().start(), ampersands);
+ let source_change =
+ SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit);
+ acc.push(fix("add_reference_here", "Add reference here", source_change, range));
+ Some(())
+}
+
+fn add_missing_ok_or_some(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TypeMismatch,
+ acc: &mut Vec<Assist>,
+) -> Option<()> {
+ let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
+ let expr = d.expr.value.to_node(&root);
+ let expr_range = expr.syntax().text_range();
+ let scope = ctx.sema.scope(expr.syntax())?;
+
+ let expected_adt = d.expected.as_adt()?;
+ let expected_enum = expected_adt.as_enum()?;
+
+ let famous_defs = FamousDefs(&ctx.sema, scope.krate());
+ let core_result = famous_defs.core_result_Result();
+ let core_option = famous_defs.core_option_Option();
+
+ if Some(expected_enum) != core_result && Some(expected_enum) != core_option {
+ return None;
+ }
+
+ let variant_name = if Some(expected_enum) == core_result { "Ok" } else { "Some" };
+
+ let wrapped_actual_ty = expected_adt.ty_with_args(ctx.sema.db, &[d.actual.clone()]);
+
+ if !d.expected.could_unify_with(ctx.sema.db, &wrapped_actual_ty) {
+ return None;
+ }
+
+ let mut builder = TextEdit::builder();
+ builder.insert(expr.syntax().text_range().start(), format!("{}(", variant_name));
+ builder.insert(expr.syntax().text_range().end(), ")".to_string());
+ let source_change =
+ SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), builder.finish());
+ let name = format!("Wrap in {}", variant_name);
+ acc.push(fix("wrap_in_constructor", &name, source_change, expr_range));
+ Some(())
+}
+
+fn remove_semicolon(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TypeMismatch,
+ acc: &mut Vec<Assist>,
+) -> Option<()> {
+ let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
+ let expr = d.expr.value.to_node(&root);
+ if !d.actual.is_unit() {
+ return None;
+ }
+ let block = BlockExpr::cast(expr.syntax().clone())?;
+ let expr_before_semi =
+ block.statements().last().and_then(|s| ExprStmt::cast(s.syntax().clone()))?;
+ let type_before_semi = ctx.sema.type_of_expr(&expr_before_semi.expr()?)?.original();
+ if !type_before_semi.could_coerce_to(ctx.sema.db, &d.expected) {
+ return None;
+ }
+ let semicolon_range = expr_before_semi.semicolon_token()?.text_range();
+
+ let edit = TextEdit::delete(semicolon_range);
+ let source_change =
+ SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit);
+
+ acc.push(fix("remove_semicolon", "Remove this semicolon", source_change, semicolon_range));
+ Some(())
+}
+
+fn str_ref_to_owned(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TypeMismatch,
+ acc: &mut Vec<Assist>,
+) -> Option<()> {
+ let expected = d.expected.display(ctx.sema.db);
+ let actual = d.actual.display(ctx.sema.db);
+
+ if expected.to_string() != "String" || actual.to_string() != "&str" {
+ return None;
+ }
+
+ let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
+ let expr = d.expr.value.to_node(&root);
+ let expr_range = expr.syntax().text_range();
+
+ let to_owned = format!(".to_owned()");
+
+ let edit = TextEdit::insert(expr.syntax().text_range().end(), to_owned);
+ let source_change =
+ SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit);
+ acc.push(fix("str_ref_to_owned", "Add .to_owned() here", source_change, expr_range));
+
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix, check_no_fix};
+
+ #[test]
+ fn missing_reference() {
+ check_diagnostics(
+ r#"
+fn main() {
+ test(123);
+ //^^^ 💡 error: expected &i32, found i32
+}
+fn test(arg: &i32) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_reference_to_int() {
+ check_fix(
+ r#"
+fn main() {
+ test(123$0);
+}
+fn test(arg: &i32) {}
+ "#,
+ r#"
+fn main() {
+ test(&123);
+}
+fn test(arg: &i32) {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_mutable_reference_to_int() {
+ check_fix(
+ r#"
+fn main() {
+ test($0123);
+}
+fn test(arg: &mut i32) {}
+ "#,
+ r#"
+fn main() {
+ test(&mut 123);
+}
+fn test(arg: &mut i32) {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_reference_to_array() {
+ check_fix(
+ r#"
+//- minicore: coerce_unsized
+fn main() {
+ test($0[1, 2, 3]);
+}
+fn test(arg: &[i32]) {}
+ "#,
+ r#"
+fn main() {
+ test(&[1, 2, 3]);
+}
+fn test(arg: &[i32]) {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_reference_with_autoderef() {
+ check_fix(
+ r#"
+//- minicore: coerce_unsized, deref
+struct Foo;
+struct Bar;
+impl core::ops::Deref for Foo {
+ type Target = Bar;
+}
+
+fn main() {
+ test($0Foo);
+}
+fn test(arg: &Bar) {}
+ "#,
+ r#"
+struct Foo;
+struct Bar;
+impl core::ops::Deref for Foo {
+ type Target = Bar;
+}
+
+fn main() {
+ test(&Foo);
+}
+fn test(arg: &Bar) {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_reference_to_method_call() {
+ check_fix(
+ r#"
+fn main() {
+ Test.call_by_ref($0123);
+}
+struct Test;
+impl Test {
+ fn call_by_ref(&self, arg: &i32) {}
+}
+ "#,
+ r#"
+fn main() {
+ Test.call_by_ref(&123);
+}
+struct Test;
+impl Test {
+ fn call_by_ref(&self, arg: &i32) {}
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_reference_to_let_stmt() {
+ check_fix(
+ r#"
+fn main() {
+ let test: &i32 = $0123;
+}
+ "#,
+ r#"
+fn main() {
+ let test: &i32 = &123;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_mutable_reference_to_let_stmt() {
+ check_fix(
+ r#"
+fn main() {
+ let test: &mut i32 = $0123;
+}
+ "#,
+ r#"
+fn main() {
+ let test: &mut i32 = &mut 123;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type_option() {
+ check_fix(
+ r#"
+//- minicore: option, result
+fn div(x: i32, y: i32) -> Option<i32> {
+ if y == 0 {
+ return None;
+ }
+ x / y$0
+}
+"#,
+ r#"
+fn div(x: i32, y: i32) -> Option<i32> {
+ if y == 0 {
+ return None;
+ }
+ Some(x / y)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn const_generic_type_mismatch() {
+ check_diagnostics(
+ r#"
+ pub struct Rate<const N: u32>;
+ fn f<const N: u64>() -> Rate<N> { // FIXME: add some error
+ loop {}
+ }
+ fn run(t: Rate<5>) {
+ }
+ fn main() {
+ run(f()) // FIXME: remove this error
+ //^^^ error: expected Rate<5>, found Rate<_>
+ }
+"#,
+ );
+ }
+
+ #[test]
+ fn const_generic_unknown() {
+ check_diagnostics(
+ r#"
+ pub struct Rate<T, const NOM: u32, const DENOM: u32>(T);
+ fn run(t: Rate<u32, 1, 1>) {
+ }
+ fn main() {
+ run(Rate::<_, _, _>(5));
+ }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type_option_tails() {
+ check_fix(
+ r#"
+//- minicore: option, result
+fn div(x: i32, y: i32) -> Option<i32> {
+ if y == 0 {
+ Some(0)
+ } else if true {
+ 100$0
+ } else {
+ None
+ }
+}
+"#,
+ r#"
+fn div(x: i32, y: i32) -> Option<i32> {
+ if y == 0 {
+ Some(0)
+ } else if true {
+ Some(100)
+ } else {
+ None
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type() {
+ check_fix(
+ r#"
+//- minicore: option, result
+fn div(x: i32, y: i32) -> Result<i32, ()> {
+ if y == 0 {
+ return Err(());
+ }
+ x / y$0
+}
+"#,
+ r#"
+fn div(x: i32, y: i32) -> Result<i32, ()> {
+ if y == 0 {
+ return Err(());
+ }
+ Ok(x / y)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type_handles_generic_functions() {
+ check_fix(
+ r#"
+//- minicore: option, result
+fn div<T>(x: T) -> Result<T, i32> {
+ if x == 0 {
+ return Err(7);
+ }
+ $0x
+}
+"#,
+ r#"
+fn div<T>(x: T) -> Result<T, i32> {
+ if x == 0 {
+ return Err(7);
+ }
+ Ok(x)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type_handles_type_aliases() {
+ check_fix(
+ r#"
+//- minicore: option, result
+type MyResult<T> = Result<T, ()>;
+
+fn div(x: i32, y: i32) -> MyResult<i32> {
+ if y == 0 {
+ return Err(());
+ }
+ x $0/ y
+}
+"#,
+ r#"
+type MyResult<T> = Result<T, ()>;
+
+fn div(x: i32, y: i32) -> MyResult<i32> {
+ if y == 0 {
+ return Err(());
+ }
+ Ok(x / y)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_in_const_and_static() {
+ check_fix(
+ r#"
+//- minicore: option, result
+static A: Option<()> = {($0)};
+ "#,
+ r#"
+static A: Option<()> = {Some(())};
+ "#,
+ );
+ check_fix(
+ r#"
+//- minicore: option, result
+const _: Option<()> = {($0)};
+ "#,
+ r#"
+const _: Option<()> = {Some(())};
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type_not_applicable_when_expr_type_does_not_match_ok_type() {
+ check_no_fix(
+ r#"
+//- minicore: option, result
+fn foo() -> Result<(), i32> { 0$0 }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type_not_applicable_when_return_type_is_not_result_or_option() {
+ check_no_fix(
+ r#"
+//- minicore: option, result
+enum SomeOtherEnum { Ok(i32), Err(String) }
+
+fn foo() -> SomeOtherEnum { 0$0 }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_semicolon() {
+ check_fix(r#"fn f() -> i32 { 92$0; }"#, r#"fn f() -> i32 { 92 }"#);
+ }
+
+ #[test]
+ fn str_ref_to_owned() {
+ check_fix(
+ r#"
+struct String;
+
+fn test() -> String {
+ "a"$0
+}
+ "#,
+ r#"
+struct String;
+
+fn test() -> String {
+ "a".to_owned()
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn type_mismatch_on_block() {
+ cov_mark::check!(type_mismatch_on_block);
+ check_diagnostics(
+ r#"
+fn f() -> i32 {
+ let x = 1;
+ let y = 2;
+ let _ = x + y;
+ }
+//^ error: expected i32, found ()
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
new file mode 100644
index 000000000..e879de75c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
@@ -0,0 +1,16 @@
+use crate::{Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: unimplemented-builtin-macro
+//
+// This diagnostic is shown for builtin macros which are not yet implemented by rust-analyzer
+pub(crate) fn unimplemented_builtin_macro(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnimplementedBuiltinMacro,
+) -> Diagnostic {
+ Diagnostic::new(
+ "unimplemented-builtin-macro",
+ "unimplemented built-in macro".to_string(),
+ ctx.sema.diagnostics_display_range(d.node.clone()).range,
+ )
+ .severity(Severity::WeakWarning)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
new file mode 100644
index 000000000..c626932f1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -0,0 +1,336 @@
+//! Diagnostic emitted for files that aren't part of any crate.
+
+use hir::db::DefDatabase;
+use ide_db::{
+ base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt},
+ source_change::SourceChange,
+ RootDatabase,
+};
+use syntax::{
+ ast::{self, HasModuleItem, HasName},
+ AstNode, TextRange, TextSize,
+};
+use text_edit::TextEdit;
+
+use crate::{fix, Assist, Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: unlinked-file
+//
+// This diagnostic is shown for files that are not included in any crate, or files that are part of
+// crates rust-analyzer failed to discover. The file will not have IDE features available.
+pub(crate) fn unlinked_file(
+ ctx: &DiagnosticsContext<'_>,
+ acc: &mut Vec<Diagnostic>,
+ file_id: FileId,
+) {
+ // Limit diagnostic to the first few characters in the file. This matches how VS Code
+ // renders it with the full span, but on other editors, and is less invasive.
+ let range = ctx.sema.db.parse(file_id).syntax_node().text_range();
+ // FIXME: This is wrong if one of the first three characters is not ascii: `//Ы`.
+ let range = range.intersect(TextRange::up_to(TextSize::of("..."))).unwrap_or(range);
+
+ acc.push(
+ Diagnostic::new("unlinked-file", "file not included in module tree", range)
+ .severity(Severity::WeakWarning)
+ .with_fixes(fixes(ctx, file_id)),
+ );
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> {
+ // If there's an existing module that could add `mod` or `pub mod` items to include the unlinked file,
+ // suggest that as a fix.
+
+ let source_root = ctx.sema.db.source_root(ctx.sema.db.file_source_root(file_id));
+ let our_path = source_root.path_for_file(&file_id)?;
+ let (mut module_name, _) = our_path.name_and_extension()?;
+
+ // Candidates to look for:
+ // - `mod.rs`, `main.rs` and `lib.rs` in the same folder
+ // - `$dir.rs` in the parent folder, where `$dir` is the directory containing `self.file_id`
+ let parent = our_path.parent()?;
+ let paths = {
+ let parent = if module_name == "mod" {
+ // for mod.rs we need to actually look up one higher
+ // and take the parent as our to be module name
+ let (name, _) = parent.name_and_extension()?;
+ module_name = name;
+ parent.parent()?
+ } else {
+ parent
+ };
+ let mut paths =
+ vec![parent.join("mod.rs")?, parent.join("lib.rs")?, parent.join("main.rs")?];
+
+ // `submod/bla.rs` -> `submod.rs`
+ let parent_mod = (|| {
+ let (name, _) = parent.name_and_extension()?;
+ parent.parent()?.join(&format!("{}.rs", name))
+ })();
+ paths.extend(parent_mod);
+ paths
+ };
+
+ for &parent_id in paths.iter().filter_map(|path| source_root.file_for_path(path)) {
+ for &krate in ctx.sema.db.relevant_crates(parent_id).iter() {
+ let crate_def_map = ctx.sema.db.crate_def_map(krate);
+ for (_, module) in crate_def_map.modules() {
+ if module.origin.is_inline() {
+ // We don't handle inline `mod parent {}`s, they use different paths.
+ continue;
+ }
+
+ if module.origin.file_id() == Some(parent_id) {
+ return make_fixes(ctx.sema.db, parent_id, module_name, file_id);
+ }
+ }
+ }
+ }
+
+ None
+}
+
+fn make_fixes(
+ db: &RootDatabase,
+ parent_file_id: FileId,
+ new_mod_name: &str,
+ added_file_id: FileId,
+) -> Option<Vec<Assist>> {
+ fn is_outline_mod(item: &ast::Item) -> bool {
+ matches!(item, ast::Item::Module(m) if m.item_list().is_none())
+ }
+
+ let mod_decl = format!("mod {};", new_mod_name);
+ let pub_mod_decl = format!("pub mod {};", new_mod_name);
+
+ let ast: ast::SourceFile = db.parse(parent_file_id).tree();
+
+ let mut mod_decl_builder = TextEdit::builder();
+ let mut pub_mod_decl_builder = TextEdit::builder();
+
+ // If there's an existing `mod m;` statement matching the new one, don't emit a fix (it's
+ // probably `#[cfg]`d out).
+ for item in ast.items() {
+ if let ast::Item::Module(m) = item {
+ if let Some(name) = m.name() {
+ if m.item_list().is_none() && name.to_string() == new_mod_name {
+ cov_mark::hit!(unlinked_file_skip_fix_when_mod_already_exists);
+ return None;
+ }
+ }
+ }
+ }
+
+ // If there are existing `mod m;` items, append after them (after the first group of them, rather).
+ match ast.items().skip_while(|item| !is_outline_mod(item)).take_while(is_outline_mod).last() {
+ Some(last) => {
+ cov_mark::hit!(unlinked_file_append_to_existing_mods);
+ let offset = last.syntax().text_range().end();
+ mod_decl_builder.insert(offset, format!("\n{}", mod_decl));
+ pub_mod_decl_builder.insert(offset, format!("\n{}", pub_mod_decl));
+ }
+ None => {
+ // Prepend before the first item in the file.
+ match ast.items().next() {
+ Some(item) => {
+ cov_mark::hit!(unlinked_file_prepend_before_first_item);
+ let offset = item.syntax().text_range().start();
+ mod_decl_builder.insert(offset, format!("{}\n\n", mod_decl));
+ pub_mod_decl_builder.insert(offset, format!("{}\n\n", pub_mod_decl));
+ }
+ None => {
+ // No items in the file, so just append at the end.
+ cov_mark::hit!(unlinked_file_empty_file);
+ let offset = ast.syntax().text_range().end();
+ mod_decl_builder.insert(offset, format!("{}\n", mod_decl));
+ pub_mod_decl_builder.insert(offset, format!("{}\n", pub_mod_decl));
+ }
+ }
+ }
+ }
+
+ let trigger_range = db.parse(added_file_id).tree().syntax().text_range();
+ Some(vec![
+ fix(
+ "add_mod_declaration",
+ &format!("Insert `{}`", mod_decl),
+ SourceChange::from_text_edit(parent_file_id, mod_decl_builder.finish()),
+ trigger_range,
+ ),
+ fix(
+ "add_pub_mod_declaration",
+ &format!("Insert `{}`", pub_mod_decl),
+ SourceChange::from_text_edit(parent_file_id, pub_mod_decl_builder.finish()),
+ trigger_range,
+ ),
+ ])
+}
+
+#[cfg(test)]
+mod tests {
+
+ use crate::tests::{check_diagnostics, check_fix, check_fixes, check_no_fix};
+
+ #[test]
+ fn unlinked_file_prepend_first_item() {
+ cov_mark::check!(unlinked_file_prepend_before_first_item);
+ // Only tests the first one for `pub mod` since the rest are the same
+ check_fixes(
+ r#"
+//- /main.rs
+fn f() {}
+//- /foo.rs
+$0
+"#,
+ vec![
+ r#"
+mod foo;
+
+fn f() {}
+"#,
+ r#"
+pub mod foo;
+
+fn f() {}
+"#,
+ ],
+ );
+ }
+
+ #[test]
+ fn unlinked_file_append_mod() {
+ cov_mark::check!(unlinked_file_append_to_existing_mods);
+ check_fix(
+ r#"
+//- /main.rs
+//! Comment on top
+
+mod preexisting;
+
+mod preexisting2;
+
+struct S;
+
+mod preexisting_bottom;)
+//- /foo.rs
+$0
+"#,
+ r#"
+//! Comment on top
+
+mod preexisting;
+
+mod preexisting2;
+mod foo;
+
+struct S;
+
+mod preexisting_bottom;)
+"#,
+ );
+ }
+
+ #[test]
+ fn unlinked_file_insert_in_empty_file() {
+ cov_mark::check!(unlinked_file_empty_file);
+ check_fix(
+ r#"
+//- /main.rs
+//- /foo.rs
+$0
+"#,
+ r#"
+mod foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn unlinked_file_insert_in_empty_file_mod_file() {
+ check_fix(
+ r#"
+//- /main.rs
+//- /foo/mod.rs
+$0
+"#,
+ r#"
+mod foo;
+"#,
+ );
+ check_fix(
+ r#"
+//- /main.rs
+mod bar;
+//- /bar.rs
+// bar module
+//- /bar/foo/mod.rs
+$0
+"#,
+ r#"
+// bar module
+mod foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn unlinked_file_old_style_modrs() {
+ check_fix(
+ r#"
+//- /main.rs
+mod submod;
+//- /submod/mod.rs
+// in mod.rs
+//- /submod/foo.rs
+$0
+"#,
+ r#"
+// in mod.rs
+mod foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn unlinked_file_new_style_mod() {
+ check_fix(
+ r#"
+//- /main.rs
+mod submod;
+//- /submod.rs
+//- /submod/foo.rs
+$0
+"#,
+ r#"
+mod foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn unlinked_file_with_cfg_off() {
+ cov_mark::check!(unlinked_file_skip_fix_when_mod_already_exists);
+ check_no_fix(
+ r#"
+//- /main.rs
+#[cfg(never)]
+mod foo;
+
+//- /foo.rs
+$0
+"#,
+ );
+ }
+
+ #[test]
+ fn unlinked_file_with_cfg_on() {
+ check_diagnostics(
+ r#"
+//- /main.rs
+#[cfg(not(never))]
+mod foo;
+
+//- /foo.rs
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
new file mode 100644
index 000000000..74e4a69c6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
@@ -0,0 +1,49 @@
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: unresolved-extern-crate
+//
+// This diagnostic is triggered if rust-analyzer is unable to discover referred extern crate.
+pub(crate) fn unresolved_extern_crate(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedExternCrate,
+) -> Diagnostic {
+ Diagnostic::new(
+ "unresolved-extern-crate",
+ "unresolved extern crate",
+ ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range,
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn unresolved_extern_crate() {
+ check_diagnostics(
+ r#"
+//- /main.rs crate:main deps:core
+extern crate core;
+ extern crate doesnotexist;
+//^^^^^^^^^^^^^^^^^^^^^^^^^^ error: unresolved extern crate
+//- /lib.rs crate:core
+"#,
+ );
+ }
+
+ #[test]
+ fn extern_crate_self_as() {
+ cov_mark::check!(extern_crate_self_as);
+ check_diagnostics(
+ r#"
+//- /lib.rs
+ extern crate doesnotexist;
+//^^^^^^^^^^^^^^^^^^^^^^^^^^ error: unresolved extern crate
+// Should not error.
+extern crate self as foo;
+struct Foo;
+use foo::Foo as Bar;
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs
new file mode 100644
index 000000000..e52a88459
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs
@@ -0,0 +1,90 @@
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: unresolved-import
+//
+// This diagnostic is triggered if rust-analyzer is unable to resolve a path in
+// a `use` declaration.
+pub(crate) fn unresolved_import(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedImport,
+) -> Diagnostic {
+ Diagnostic::new(
+ "unresolved-import",
+ "unresolved import",
+ ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range,
+ )
+ // This currently results in false positives in the following cases:
+ // - `cfg_if!`-generated code in libstd (we don't load the sysroot correctly)
+ // - `core::arch` (we don't handle `#[path = "../<path>"]` correctly)
+ // - proc macros and/or proc macro generated code
+ .experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn unresolved_import() {
+ check_diagnostics(
+ r#"
+use does_exist;
+use does_not_exist;
+ //^^^^^^^^^^^^^^ error: unresolved import
+
+mod does_exist {}
+"#,
+ );
+ }
+
+ #[test]
+ fn unresolved_import_in_use_tree() {
+ // Only the relevant part of a nested `use` item should be highlighted.
+ check_diagnostics(
+ r#"
+use does_exist::{Exists, DoesntExist};
+ //^^^^^^^^^^^ error: unresolved import
+
+use {does_not_exist::*, does_exist};
+ //^^^^^^^^^^^^^^^^^ error: unresolved import
+
+use does_not_exist::{
+ a,
+ //^ error: unresolved import
+ b,
+ //^ error: unresolved import
+ c,
+ //^ error: unresolved import
+};
+
+mod does_exist {
+ pub struct Exists;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn dedup_unresolved_import_from_unresolved_crate() {
+ check_diagnostics(
+ r#"
+//- /main.rs crate:main
+mod a {
+ extern crate doesnotexist;
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^ error: unresolved extern crate
+
+ // Should not error, since we already errored for the missing crate.
+ use doesnotexist::{self, bla, *};
+
+ use crate::doesnotexist;
+ //^^^^^^^^^^^^^^^^^^^ error: unresolved import
+}
+
+mod m {
+ use super::doesnotexist;
+ //^^^^^^^^^^^^^^^^^^^ error: unresolved import
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
new file mode 100644
index 000000000..4b4312475
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
@@ -0,0 +1,76 @@
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: unresolved-macro-call
+//
+// This diagnostic is triggered if rust-analyzer is unable to resolve the path
+// to a macro in a macro invocation.
+pub(crate) fn unresolved_macro_call(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedMacroCall,
+) -> Diagnostic {
+ // Use more accurate position if available.
+ let display_range = d
+ .precise_location
+ .unwrap_or_else(|| ctx.sema.diagnostics_display_range(d.macro_call.clone()).range);
+
+ let bang = if d.is_bang { "!" } else { "" };
+ Diagnostic::new(
+ "unresolved-macro-call",
+ format!("unresolved macro `{}{}`", d.path, bang),
+ display_range,
+ )
+ .experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn unresolved_macro_diag() {
+ check_diagnostics(
+ r#"
+fn f() {
+ m!();
+} //^ error: unresolved macro `m!`
+
+"#,
+ );
+ }
+
+ #[test]
+ fn test_unresolved_macro_range() {
+ check_diagnostics(
+ r#"
+foo::bar!(92);
+ //^^^ error: unresolved macro `foo::bar!`
+"#,
+ );
+ }
+
+ #[test]
+ fn unresolved_legacy_scope_macro() {
+ check_diagnostics(
+ r#"
+macro_rules! m { () => {} }
+
+m!(); m2!();
+ //^^ error: unresolved macro `m2!`
+"#,
+ );
+ }
+
+ #[test]
+ fn unresolved_module_scope_macro() {
+ check_diagnostics(
+ r#"
+mod mac {
+#[macro_export]
+macro_rules! m { () => {} } }
+
+self::m!(); self::m2!();
+ //^^ error: unresolved macro `self::m2!`
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
new file mode 100644
index 000000000..b8f2a9e94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
@@ -0,0 +1,156 @@
+use hir::db::AstDatabase;
+use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit};
+use itertools::Itertools;
+use syntax::AstNode;
+
+use crate::{fix, Diagnostic, DiagnosticsContext};
+
+// Diagnostic: unresolved-module
+//
+// This diagnostic is triggered if rust-analyzer is unable to discover referred module.
+pub(crate) fn unresolved_module(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedModule,
+) -> Diagnostic {
+ Diagnostic::new(
+ "unresolved-module",
+ match &*d.candidates {
+ [] => "unresolved module".to_string(),
+ [candidate] => format!("unresolved module, can't find module file: {}", candidate),
+ [candidates @ .., last] => {
+ format!(
+ "unresolved module, can't find module file: {}, or {}",
+ candidates.iter().format(", "),
+ last
+ )
+ }
+ },
+ ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range,
+ )
+ .with_fixes(fixes(ctx, d))
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option<Vec<Assist>> {
+ let root = ctx.sema.db.parse_or_expand(d.decl.file_id)?;
+ let unresolved_module = d.decl.value.to_node(&root);
+ Some(
+ d.candidates
+ .iter()
+ .map(|candidate| {
+ fix(
+ "create_module",
+ &format!("Create module at `{candidate}`"),
+ FileSystemEdit::CreateFile {
+ dst: AnchoredPathBuf {
+ anchor: d.decl.file_id.original_file(ctx.sema.db),
+ path: candidate.clone(),
+ },
+ initial_contents: "".to_string(),
+ }
+ .into(),
+ unresolved_module.syntax().text_range(),
+ )
+ })
+ .collect(),
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::expect;
+
+ use crate::tests::{check_diagnostics, check_expect};
+
+ #[test]
+ fn unresolved_module() {
+ check_diagnostics(
+ r#"
+//- /lib.rs
+mod foo;
+ mod bar;
+//^^^^^^^^ 💡 error: unresolved module, can't find module file: bar.rs, or bar/mod.rs
+mod baz {}
+//- /foo.rs
+"#,
+ );
+ }
+
+ #[test]
+ fn test_unresolved_module_diagnostic() {
+ check_expect(
+ r#"mod foo;"#,
+ expect![[r#"
+ [
+ Diagnostic {
+ code: DiagnosticCode(
+ "unresolved-module",
+ ),
+ message: "unresolved module, can't find module file: foo.rs, or foo/mod.rs",
+ range: 0..8,
+ severity: Error,
+ unused: false,
+ experimental: false,
+ fixes: Some(
+ [
+ Assist {
+ id: AssistId(
+ "create_module",
+ QuickFix,
+ ),
+ label: "Create module at `foo.rs`",
+ group: None,
+ target: 0..8,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {},
+ file_system_edits: [
+ CreateFile {
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 0,
+ ),
+ path: "foo.rs",
+ },
+ initial_contents: "",
+ },
+ ],
+ is_snippet: false,
+ },
+ ),
+ trigger_signature_help: false,
+ },
+ Assist {
+ id: AssistId(
+ "create_module",
+ QuickFix,
+ ),
+ label: "Create module at `foo/mod.rs`",
+ group: None,
+ target: 0..8,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {},
+ file_system_edits: [
+ CreateFile {
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 0,
+ ),
+ path: "foo/mod.rs",
+ },
+ initial_contents: "",
+ },
+ ],
+ is_snippet: false,
+ },
+ ),
+ trigger_signature_help: false,
+ },
+ ],
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
new file mode 100644
index 000000000..760f51f90
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
@@ -0,0 +1,62 @@
+use hir::db::DefDatabase;
+use syntax::NodeOrToken;
+
+use crate::{Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: unresolved-proc-macro
+//
+// This diagnostic is shown when a procedural macro can not be found. This usually means that
+// procedural macro support is simply disabled (and hence is only a weak hint instead of an error),
+// but can also indicate project setup problems.
+//
+// If you are seeing a lot of "proc macro not expanded" warnings, you can add this option to the
+// `rust-analyzer.diagnostics.disabled` list to prevent them from showing. Alternatively you can
+// enable support for procedural macros (see `rust-analyzer.procMacro.attributes.enable`).
+pub(crate) fn unresolved_proc_macro(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedProcMacro,
+ proc_macros_enabled: bool,
+ proc_attr_macros_enabled: bool,
+) -> Diagnostic {
+ // Use more accurate position if available.
+ let display_range = (|| {
+ let precise_location = d.precise_location?;
+ let root = ctx.sema.parse_or_expand(d.node.file_id)?;
+ match root.covering_element(precise_location) {
+ NodeOrToken::Node(it) => Some(ctx.sema.original_range(&it)),
+ NodeOrToken::Token(it) => d.node.with_value(it).original_file_range_opt(ctx.sema.db),
+ }
+ })()
+ .unwrap_or_else(|| ctx.sema.diagnostics_display_range(d.node.clone()))
+ .range;
+
+ let config_enabled = match d.kind {
+ hir::MacroKind::Attr => proc_macros_enabled && proc_attr_macros_enabled,
+ _ => proc_macros_enabled,
+ };
+
+ let message = match &d.macro_name {
+ Some(name) => format!("proc macro `{}` not expanded", name),
+ None => "proc macro not expanded".to_string(),
+ };
+ let severity = if config_enabled { Severity::Error } else { Severity::WeakWarning };
+ let def_map = ctx.sema.db.crate_def_map(d.krate);
+ let message = format!(
+ "{message}: {}",
+ if config_enabled {
+ match def_map.proc_macro_loading_error() {
+ Some(e) => e,
+ None => "proc macro not found in the built dylib",
+ }
+ } else {
+ match d.kind {
+ hir::MacroKind::Attr if proc_macros_enabled => {
+ "attribute macro expansion is disabled"
+ }
+ _ => "proc-macro expansion is disabled",
+ }
+ },
+ );
+
+ Diagnostic::new("unresolved-proc-macro", message, display_range).severity(severity)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
new file mode 100644
index 000000000..8b9330e04
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
@@ -0,0 +1,148 @@
+use ide_db::{base_db::FileId, source_change::SourceChange};
+use itertools::Itertools;
+use syntax::{ast, AstNode, SyntaxNode, TextRange};
+use text_edit::TextEdit;
+
+use crate::{fix, Diagnostic, Severity};
+
+// Diagnostic: unnecessary-braces
+//
+// Diagnostic for unnecessary braces in `use` items.
+pub(crate) fn useless_braces(
+ acc: &mut Vec<Diagnostic>,
+ file_id: FileId,
+ node: &SyntaxNode,
+) -> Option<()> {
+ let use_tree_list = ast::UseTreeList::cast(node.clone())?;
+ if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
+ // If there is a comment inside the bracketed `use`,
+ // assume it is a commented out module path and don't show diagnostic.
+ if use_tree_list.has_inner_comment() {
+ return Some(());
+ }
+
+ let use_range = use_tree_list.syntax().text_range();
+ let edit = remove_braces(&single_use_tree).unwrap_or_else(|| {
+ let to_replace = single_use_tree.syntax().text().to_string();
+ let mut edit_builder = TextEdit::builder();
+ edit_builder.delete(use_range);
+ edit_builder.insert(use_range.start(), to_replace);
+ edit_builder.finish()
+ });
+
+ acc.push(
+ Diagnostic::new(
+ "unnecessary-braces",
+ "Unnecessary braces in use statement".to_string(),
+ use_range,
+ )
+ .severity(Severity::WeakWarning)
+ .with_fixes(Some(vec![fix(
+ "remove_braces",
+ "Remove unnecessary braces",
+ SourceChange::from_text_edit(file_id, edit),
+ use_range,
+ )])),
+ );
+ }
+
+ Some(())
+}
+
+fn remove_braces(single_use_tree: &ast::UseTree) -> Option<TextEdit> {
+ let use_tree_list_node = single_use_tree.syntax().parent()?;
+ if single_use_tree.path()?.segment()?.self_token().is_some() {
+ let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start();
+ let end = use_tree_list_node.text_range().end();
+ return Some(TextEdit::delete(TextRange::new(start, end)));
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn test_check_unnecessary_braces_in_use_statement() {
+ check_diagnostics(
+ r#"
+use a;
+use a::{c, d::e};
+
+mod a {
+ mod c {}
+ mod d {
+ mod e {}
+ }
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+use a;
+use a::{
+ c,
+ // d::e
+};
+
+mod a {
+ mod c {}
+ mod d {
+ mod e {}
+ }
+}
+"#,
+ );
+ check_fix(
+ r#"
+mod b {}
+use {$0b};
+"#,
+ r#"
+mod b {}
+use b;
+"#,
+ );
+ check_fix(
+ r#"
+mod b {}
+use {b$0};
+"#,
+ r#"
+mod b {}
+use b;
+"#,
+ );
+ check_fix(
+ r#"
+mod a { mod c {} }
+use a::{c$0};
+"#,
+ r#"
+mod a { mod c {} }
+use a::c;
+"#,
+ );
+ check_fix(
+ r#"
+mod a {}
+use a::{self$0};
+"#,
+ r#"
+mod a {}
+use a;
+"#,
+ );
+ check_fix(
+ r#"
+mod a { mod c {} mod d { mod e {} } }
+use a::{c, d::{e$0}};
+"#,
+ r#"
+mod a { mod c {} mod d { mod e {} } }
+use a::{c, d::e};
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
new file mode 100644
index 000000000..41abaa836
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
@@ -0,0 +1,260 @@
+//! Diagnostics rendering and fixits.
+//!
+//! Most of the diagnostics originate from the dark depth of the compiler, and
+//! are originally expressed in term of IR. When we emit the diagnostic, we are
+//! usually not in the position to decide how to best "render" it in terms of
+//! user-authored source code. We are especially not in the position to offer
+//! fixits, as the compiler completely lacks the infrastructure to edit the
+//! source code.
+//!
+//! Instead, we "bubble up" raw, structured diagnostics until the `hir` crate,
+//! where we "cook" them so that each diagnostic is formulated in terms of `hir`
+//! types. Well, at least that's the aspiration, the "cooking" is somewhat
+//! ad-hoc at the moment. Anyways, we get a bunch of ide-friendly diagnostic
+//! structs from hir, and we want to render them to unified serializable
+//! representation (span, level, message) here. If we can, we also provide
+//! fixits. By the way, that's why we want to keep diagnostics structured
+//! internally -- so that we have all the info to make fixes.
+//!
+//! We have one "handler" module per diagnostic code. Such a module contains
+//! rendering, optional fixes and tests. It's OK if some low-level compiler
+//! functionality ends up being tested via a diagnostic.
+//!
+//! There are also a couple of ad-hoc diagnostics implemented directly here, we
+//! don't yet have a great pattern for how to do them properly.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod handlers {
+ pub(crate) mod break_outside_of_loop;
+ pub(crate) mod inactive_code;
+ pub(crate) mod incorrect_case;
+ pub(crate) mod invalid_derive_target;
+ pub(crate) mod macro_error;
+ pub(crate) mod malformed_derive;
+ pub(crate) mod mismatched_arg_count;
+ pub(crate) mod missing_fields;
+ pub(crate) mod missing_match_arms;
+ pub(crate) mod missing_unsafe;
+ pub(crate) mod no_such_field;
+ pub(crate) mod replace_filter_map_next_with_find_map;
+ pub(crate) mod type_mismatch;
+ pub(crate) mod unimplemented_builtin_macro;
+ pub(crate) mod unresolved_extern_crate;
+ pub(crate) mod unresolved_import;
+ pub(crate) mod unresolved_macro_call;
+ pub(crate) mod unresolved_module;
+ pub(crate) mod unresolved_proc_macro;
+
+ // The handlers below are unusual, the implement the diagnostics as well.
+ pub(crate) mod field_shorthand;
+ pub(crate) mod useless_braces;
+ pub(crate) mod unlinked_file;
+}
+
+#[cfg(test)]
+mod tests;
+
+use hir::{diagnostics::AnyDiagnostic, InFile, Semantics};
+use ide_db::{
+ assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
+ base_db::{FileId, FileRange, SourceDatabase},
+ label::Label,
+ source_change::SourceChange,
+ FxHashSet, RootDatabase,
+};
+use syntax::{algo::find_node_at_range, ast::AstNode, SyntaxNodePtr, TextRange};
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub struct DiagnosticCode(pub &'static str);
+
+impl DiagnosticCode {
+ pub fn as_str(&self) -> &str {
+ self.0
+ }
+}
+
+#[derive(Debug)]
+pub struct Diagnostic {
+ pub code: DiagnosticCode,
+ pub message: String,
+ pub range: TextRange,
+ pub severity: Severity,
+ pub unused: bool,
+ pub experimental: bool,
+ pub fixes: Option<Vec<Assist>>,
+}
+
+impl Diagnostic {
+ fn new(code: &'static str, message: impl Into<String>, range: TextRange) -> Diagnostic {
+ let message = message.into();
+ Diagnostic {
+ code: DiagnosticCode(code),
+ message,
+ range,
+ severity: Severity::Error,
+ unused: false,
+ experimental: false,
+ fixes: None,
+ }
+ }
+
+ fn experimental(mut self) -> Diagnostic {
+ self.experimental = true;
+ self
+ }
+
+ fn severity(mut self, severity: Severity) -> Diagnostic {
+ self.severity = severity;
+ self
+ }
+
+ fn with_fixes(mut self, fixes: Option<Vec<Assist>>) -> Diagnostic {
+ self.fixes = fixes;
+ self
+ }
+
+ fn with_unused(mut self, unused: bool) -> Diagnostic {
+ self.unused = unused;
+ self
+ }
+}
+
+#[derive(Debug, Copy, Clone)]
+pub enum Severity {
+ Error,
+ // We don't actually emit this one yet, but we should at some point.
+ // Warning,
+ WeakWarning,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum ExprFillDefaultMode {
+ Todo,
+ Default,
+}
+impl Default for ExprFillDefaultMode {
+ fn default() -> Self {
+ Self::Todo
+ }
+}
+
+#[derive(Default, Debug, Clone)]
+pub struct DiagnosticsConfig {
+ pub proc_macros_enabled: bool,
+ pub proc_attr_macros_enabled: bool,
+ pub disable_experimental: bool,
+ pub disabled: FxHashSet<String>,
+ pub expr_fill_default: ExprFillDefaultMode,
+}
+
+struct DiagnosticsContext<'a> {
+ config: &'a DiagnosticsConfig,
+ sema: Semantics<'a, RootDatabase>,
+ resolve: &'a AssistResolveStrategy,
+}
+
+pub fn diagnostics(
+ db: &RootDatabase,
+ config: &DiagnosticsConfig,
+ resolve: &AssistResolveStrategy,
+ file_id: FileId,
+) -> Vec<Diagnostic> {
+ let _p = profile::span("diagnostics");
+ let sema = Semantics::new(db);
+ let parse = db.parse(file_id);
+ let mut res = Vec::new();
+
+ // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
+ res.extend(
+ parse.errors().iter().take(128).map(|err| {
+ Diagnostic::new("syntax-error", format!("Syntax Error: {}", err), err.range())
+ }),
+ );
+
+ for node in parse.tree().syntax().descendants() {
+ handlers::useless_braces::useless_braces(&mut res, file_id, &node);
+ handlers::field_shorthand::field_shorthand(&mut res, file_id, &node);
+ }
+
+ let module = sema.to_module_def(file_id);
+
+ let ctx = DiagnosticsContext { config, sema, resolve };
+ if module.is_none() {
+ handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id);
+ }
+
+ let mut diags = Vec::new();
+ if let Some(m) = module {
+ m.diagnostics(db, &mut diags)
+ }
+
+ for diag in diags {
+ #[rustfmt::skip]
+ let d = match diag {
+ AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d),
+ AnyDiagnostic::IncorrectCase(d) => handlers::incorrect_case::incorrect_case(&ctx, &d),
+ AnyDiagnostic::MacroError(d) => handlers::macro_error::macro_error(&ctx, &d),
+ AnyDiagnostic::MalformedDerive(d) => handlers::malformed_derive::malformed_derive(&ctx, &d),
+ AnyDiagnostic::MismatchedArgCount(d) => handlers::mismatched_arg_count::mismatched_arg_count(&ctx, &d),
+ AnyDiagnostic::MissingFields(d) => handlers::missing_fields::missing_fields(&ctx, &d),
+ AnyDiagnostic::MissingMatchArms(d) => handlers::missing_match_arms::missing_match_arms(&ctx, &d),
+ AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d),
+ AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d),
+ AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d),
+ AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d),
+ AnyDiagnostic::UnimplementedBuiltinMacro(d) => handlers::unimplemented_builtin_macro::unimplemented_builtin_macro(&ctx, &d),
+ AnyDiagnostic::UnresolvedExternCrate(d) => handlers::unresolved_extern_crate::unresolved_extern_crate(&ctx, &d),
+ AnyDiagnostic::UnresolvedImport(d) => handlers::unresolved_import::unresolved_import(&ctx, &d),
+ AnyDiagnostic::UnresolvedMacroCall(d) => handlers::unresolved_macro_call::unresolved_macro_call(&ctx, &d),
+ AnyDiagnostic::UnresolvedModule(d) => handlers::unresolved_module::unresolved_module(&ctx, &d),
+ AnyDiagnostic::UnresolvedProcMacro(d) => handlers::unresolved_proc_macro::unresolved_proc_macro(&ctx, &d, config.proc_macros_enabled, config.proc_attr_macros_enabled),
+ AnyDiagnostic::InvalidDeriveTarget(d) => handlers::invalid_derive_target::invalid_derive_target(&ctx, &d),
+
+ AnyDiagnostic::InactiveCode(d) => match handlers::inactive_code::inactive_code(&ctx, &d) {
+ Some(it) => it,
+ None => continue,
+ }
+ };
+ res.push(d)
+ }
+
+ res.retain(|d| {
+ !ctx.config.disabled.contains(d.code.as_str())
+ && !(ctx.config.disable_experimental && d.experimental)
+ });
+
+ res
+}
+
+fn fix(id: &'static str, label: &str, source_change: SourceChange, target: TextRange) -> Assist {
+ let mut res = unresolved_fix(id, label, target);
+ res.source_change = Some(source_change);
+ res
+}
+
+fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist {
+ assert!(!id.contains(' '));
+ Assist {
+ id: AssistId(id, AssistKind::QuickFix),
+ label: Label::new(label.to_string()),
+ group: None,
+ target,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+}
+
+fn adjusted_display_range<N: AstNode>(
+ ctx: &DiagnosticsContext<'_>,
+ diag_ptr: InFile<SyntaxNodePtr>,
+ adj: &dyn Fn(N) -> Option<TextRange>,
+) -> TextRange {
+ let FileRange { file_id, range } = ctx.sema.diagnostics_display_range(diag_ptr);
+
+ let source_file = ctx.sema.db.parse(file_id);
+ find_node_at_range::<N>(&source_file.syntax_node(), range)
+ .filter(|it| it.syntax().text_range() == range)
+ .and_then(adj)
+ .unwrap_or(range)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
new file mode 100644
index 000000000..7312bca32
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
@@ -0,0 +1,145 @@
+#[cfg(not(feature = "in-rust-tree"))]
+mod sourcegen;
+
+use expect_test::Expect;
+use ide_db::{
+ assists::AssistResolveStrategy,
+ base_db::{fixture::WithFixture, SourceDatabaseExt},
+ RootDatabase,
+};
+use stdx::trim_indent;
+use test_utils::{assert_eq_text, extract_annotations};
+
+use crate::{DiagnosticsConfig, ExprFillDefaultMode, Severity};
+
+/// Takes a multi-file input fixture with annotated cursor positions,
+/// and checks that:
+/// * a diagnostic is produced
+/// * the first diagnostic fix trigger range touches the input cursor position
+/// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied
+#[track_caller]
+pub(crate) fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
+ check_nth_fix(0, ra_fixture_before, ra_fixture_after);
+}
+/// Takes a multi-file input fixture with annotated cursor positions,
+/// and checks that:
+/// * a diagnostic is produced
+/// * every diagnostic fixes trigger range touches the input cursor position
+/// * that the contents of the file containing the cursor match `after` after each diagnostic fix is applied
+pub(crate) fn check_fixes(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) {
+ for (i, ra_fixture_after) in ra_fixtures_after.iter().enumerate() {
+ check_nth_fix(i, ra_fixture_before, ra_fixture_after)
+ }
+}
+
+#[track_caller]
+fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
+ let after = trim_indent(ra_fixture_after);
+
+ let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
+ let mut conf = DiagnosticsConfig::default();
+ conf.expr_fill_default = ExprFillDefaultMode::Default;
+ let diagnostic =
+ super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
+ .pop()
+ .expect("no diagnostics");
+ let fix = &diagnostic.fixes.expect("diagnostic misses fixes")[nth];
+ let actual = {
+ let source_change = fix.source_change.as_ref().unwrap();
+ let file_id = *source_change.source_file_edits.keys().next().unwrap();
+ let mut actual = db.file_text(file_id).to_string();
+
+ for edit in source_change.source_file_edits.values() {
+ edit.apply(&mut actual);
+ }
+ actual
+ };
+
+ assert!(
+ fix.target.contains_inclusive(file_position.offset),
+ "diagnostic fix range {:?} does not touch cursor position {:?}",
+ fix.target,
+ file_position.offset
+ );
+ assert_eq_text!(&after, &actual);
+}
+
+/// Checks that there's a diagnostic *without* fix at `$0`.
+pub(crate) fn check_no_fix(ra_fixture: &str) {
+ let (db, file_position) = RootDatabase::with_position(ra_fixture);
+ let diagnostic = super::diagnostics(
+ &db,
+ &DiagnosticsConfig::default(),
+ &AssistResolveStrategy::All,
+ file_position.file_id,
+ )
+ .pop()
+ .unwrap();
+ assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {:?}", diagnostic);
+}
+
+pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
+ let (db, file_id) = RootDatabase::with_single_file(ra_fixture);
+ let diagnostics = super::diagnostics(
+ &db,
+ &DiagnosticsConfig::default(),
+ &AssistResolveStrategy::All,
+ file_id,
+ );
+ expect.assert_debug_eq(&diagnostics)
+}
+
+#[track_caller]
+pub(crate) fn check_diagnostics(ra_fixture: &str) {
+ let mut config = DiagnosticsConfig::default();
+ config.disabled.insert("inactive-code".to_string());
+ check_diagnostics_with_config(config, ra_fixture)
+}
+
+#[track_caller]
+pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) {
+ let (db, files) = RootDatabase::with_many_files(ra_fixture);
+ for file_id in files {
+ let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
+
+ let expected = extract_annotations(&*db.file_text(file_id));
+ let mut actual = diagnostics
+ .into_iter()
+ .map(|d| {
+ let mut annotation = String::new();
+ if let Some(fixes) = &d.fixes {
+ assert!(!fixes.is_empty());
+ annotation.push_str("💡 ")
+ }
+ annotation.push_str(match d.severity {
+ Severity::Error => "error",
+ Severity::WeakWarning => "weak",
+ });
+ annotation.push_str(": ");
+ annotation.push_str(&d.message);
+ (d.range, annotation)
+ })
+ .collect::<Vec<_>>();
+ actual.sort_by_key(|(range, _)| range.start());
+ assert_eq!(expected, actual);
+ }
+}
+
+#[test]
+fn test_disabled_diagnostics() {
+ let mut config = DiagnosticsConfig::default();
+ config.disabled.insert("unresolved-module".into());
+
+ let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
+
+ let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
+ assert!(diagnostics.is_empty());
+
+ let diagnostics = super::diagnostics(
+ &db,
+ &DiagnosticsConfig::default(),
+ &AssistResolveStrategy::All,
+ file_id,
+ );
+ assert!(!diagnostics.is_empty());
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests/sourcegen.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests/sourcegen.rs
new file mode 100644
index 000000000..ec6558a46
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests/sourcegen.rs
@@ -0,0 +1,73 @@
+//! Generates `assists.md` documentation.
+
+use std::{fmt, fs, io, path::PathBuf};
+
+use sourcegen::project_root;
+
+#[test]
+fn sourcegen_diagnostic_docs() {
+ let diagnostics = Diagnostic::collect().unwrap();
+ let contents =
+ diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
+ let contents = sourcegen::add_preamble("sourcegen_diagnostic_docs", contents);
+ let dst = project_root().join("docs/user/generated_diagnostic.adoc");
+ fs::write(&dst, &contents).unwrap();
+}
+
+#[derive(Debug)]
+struct Diagnostic {
+ id: String,
+ location: sourcegen::Location,
+ doc: String,
+}
+
+impl Diagnostic {
+ fn collect() -> io::Result<Vec<Diagnostic>> {
+ let handlers_dir = project_root().join("crates/ide-diagnostics/src/handlers");
+
+ let mut res = Vec::new();
+ for path in sourcegen::list_rust_files(&handlers_dir) {
+ collect_file(&mut res, path)?;
+ }
+ res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
+ return Ok(res);
+
+ fn collect_file(acc: &mut Vec<Diagnostic>, path: PathBuf) -> io::Result<()> {
+ let text = fs::read_to_string(&path)?;
+ let comment_blocks = sourcegen::CommentBlock::extract("Diagnostic", &text);
+
+ for block in comment_blocks {
+ let id = block.id;
+ if let Err(msg) = is_valid_diagnostic_name(&id) {
+ panic!("invalid diagnostic name: {:?}:\n {}", id, msg)
+ }
+ let doc = block.contents.join("\n");
+ let location = sourcegen::Location { file: path.clone(), line: block.line };
+ acc.push(Diagnostic { id, location, doc })
+ }
+
+ Ok(())
+ }
+ }
+}
+
+fn is_valid_diagnostic_name(diagnostic: &str) -> Result<(), String> {
+ let diagnostic = diagnostic.trim();
+ if diagnostic.find(char::is_whitespace).is_some() {
+ return Err("Diagnostic names can't contain whitespace symbols".into());
+ }
+ if diagnostic.chars().any(|c| c.is_ascii_uppercase()) {
+ return Err("Diagnostic names can't contain uppercase symbols".into());
+ }
+ if diagnostic.chars().any(|c| !c.is_ascii()) {
+ return Err("Diagnostic can't contain non-ASCII symbols".into());
+ }
+
+ Ok(())
+}
+
+impl fmt::Display for Diagnostic {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ writeln!(f, "=== {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
new file mode 100644
index 000000000..d36dd02d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
@@ -0,0 +1,26 @@
+[package]
+name = "ide-ssr"
+version = "0.0.0"
+description = "Structural search and replace of Rust code"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/rust-analyzer"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+
+itertools = "0.10.3"
+
+text-edit = { path = "../text-edit", version = "0.0.0" }
+parser = { path = "../parser", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+hir = { path = "../hir", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+expect-test = "1.4.0"
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/errors.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/errors.rs
new file mode 100644
index 000000000..c02bacae6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/errors.rs
@@ -0,0 +1,29 @@
+//! Code relating to errors produced by SSR.
+
+/// Constructs an SsrError taking arguments like the format macro.
+macro_rules! _error {
+ ($fmt:expr) => {$crate::SsrError::new(format!($fmt))};
+ ($fmt:expr, $($arg:tt)+) => {$crate::SsrError::new(format!($fmt, $($arg)+))}
+}
+pub(crate) use _error as error;
+
+/// Returns from the current function with an error, supplied by arguments as for format!
+macro_rules! _bail {
+ ($($tokens:tt)*) => {return Err(crate::errors::error!($($tokens)*))}
+}
+pub(crate) use _bail as bail;
+
+#[derive(Debug, PartialEq)]
+pub struct SsrError(pub(crate) String);
+
+impl std::fmt::Display for SsrError {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ write!(f, "Parse error: {}", self.0)
+ }
+}
+
+impl SsrError {
+ pub(crate) fn new(message: impl Into<String>) -> SsrError {
+ SsrError(message.into())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs
new file mode 100644
index 000000000..503754afe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs
@@ -0,0 +1,58 @@
+//! When specifying SSR rule, you generally want to map one *kind* of thing to
+//! the same kind of thing: path to path, expression to expression, type to
+//! type.
+//!
+//! The problem is, while this *kind* is generally obvious to the human, the ide
+//! needs to determine it somehow. We do this in a stupid way -- by pasting SSR
+//! rule into different contexts and checking what works.
+
+use syntax::{ast, AstNode, SyntaxNode};
+
+pub(crate) fn ty(s: &str) -> Result<SyntaxNode, ()> {
+ fragment::<ast::Type>("type T = {};", s)
+}
+
+pub(crate) fn item(s: &str) -> Result<SyntaxNode, ()> {
+ fragment::<ast::Item>("{}", s)
+}
+
+pub(crate) fn pat(s: &str) -> Result<SyntaxNode, ()> {
+ fragment::<ast::Pat>("const _: () = {let {} = ();};", s)
+}
+
+pub(crate) fn expr(s: &str) -> Result<SyntaxNode, ()> {
+ fragment::<ast::Expr>("const _: () = {};", s)
+}
+
+pub(crate) fn stmt(s: &str) -> Result<SyntaxNode, ()> {
+ let template = "const _: () = { {}; };";
+ let input = template.replace("{}", s);
+ let parse = syntax::SourceFile::parse(&input);
+ if !parse.errors().is_empty() {
+ return Err(());
+ }
+ let mut node =
+ parse.tree().syntax().descendants().skip(2).find_map(ast::Stmt::cast).ok_or(())?;
+ if !s.ends_with(';') && node.to_string().ends_with(';') {
+ node = node.clone_for_update();
+ node.syntax().last_token().map(|it| it.detach());
+ }
+ if node.to_string() != s {
+ return Err(());
+ }
+ Ok(node.syntax().clone_subtree())
+}
+
+fn fragment<T: AstNode>(template: &str, s: &str) -> Result<SyntaxNode, ()> {
+ let s = s.trim();
+ let input = template.replace("{}", s);
+ let parse = syntax::SourceFile::parse(&input);
+ if !parse.errors().is_empty() {
+ return Err(());
+ }
+ let node = parse.tree().syntax().descendants().find_map(T::cast).ok_or(())?;
+ if node.syntax().text() != s {
+ return Err(());
+ }
+ Ok(node.syntax().clone_subtree())
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
new file mode 100644
index 000000000..5b6e01625
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
@@ -0,0 +1,35 @@
+//! This module allows building an SSR MatchFinder by parsing the SSR rule
+//! from a comment.
+
+use ide_db::{
+ base_db::{FilePosition, FileRange, SourceDatabase},
+ RootDatabase,
+};
+use syntax::{
+ ast::{self, AstNode, AstToken},
+ TextRange,
+};
+
+use crate::MatchFinder;
+
+/// Attempts to build an SSR MatchFinder from a comment at the given file
+/// range. If successful, returns the MatchFinder and a TextRange covering
+/// comment.
+pub fn ssr_from_comment(
+ db: &RootDatabase,
+ frange: FileRange,
+) -> Option<(MatchFinder<'_>, TextRange)> {
+ let comment = {
+ let file = db.parse(frange.file_id);
+ file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast)
+ }?;
+ let comment_text_without_prefix = comment.text().strip_prefix(comment.prefix()).unwrap();
+ let ssr_rule = comment_text_without_prefix.parse().ok()?;
+
+ let lookup_context = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
+
+ let mut match_finder = MatchFinder::in_context(db, lookup_context, vec![]).ok()?;
+ match_finder.add_rule(ssr_rule).ok()?;
+
+ Some((match_finder, comment.syntax().text_range()))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
new file mode 100644
index 000000000..a5e24daa9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
@@ -0,0 +1,358 @@
+//! Structural Search Replace
+//!
+//! Allows searching the AST for code that matches one or more patterns and then replacing that code
+//! based on a template.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+// Feature: Structural Search and Replace
+//
+// Search and replace with named wildcards that will match any expression, type, path, pattern or item.
+// The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`.
+// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
+// Within a macro call, a placeholder will match up until whatever token follows the placeholder.
+//
+// All paths in both the search pattern and the replacement template must resolve in the context
+// in which this command is invoked. Paths in the search pattern will then match the code if they
+// resolve to the same item, even if they're written differently. For example if we invoke the
+// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
+// to `foo::Bar` will match.
+//
+// Paths in the replacement template will be rendered appropriately for the context in which the
+// replacement occurs. For example if our replacement template is `foo::Bar` and we match some
+// code in the `foo` module, we'll insert just `Bar`.
+//
+// Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will
+// match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`. When a
+// placeholder is the receiver of a method call in the search pattern (e.g. `$s.foo()`), but not in
+// the replacement template (e.g. `bar($s)`), then *, & and &mut will be added as needed to mirror
+// whatever autoderef and autoref was happening implicitly in the matched code.
+//
+// The scope of the search / replace will be restricted to the current selection if any, otherwise
+// it will apply to the whole workspace.
+//
+// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
+//
+// Supported constraints:
+//
+// |===
+// | Constraint | Restricts placeholder
+//
+// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`)
+// | not(a) | Negates the constraint `a`
+// |===
+//
+// Available via the command `rust-analyzer.ssr`.
+//
+// ```rust
+// // Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)]
+//
+// // BEFORE
+// String::from(foo(y + 5, z))
+//
+// // AFTER
+// String::from((y + 5).foo(z))
+// ```
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Structural Search Replace**
+// |===
+//
+// Also available as an assist, by writing a comment containing the structural
+// search and replace rule. You will only see the assist if the comment can
+// be parsed as a valid structural search and replace rule.
+//
+// ```rust
+// // Place the cursor on the line below to see the assist 💡.
+// // foo($a, $b) ==>> ($a).foo($b)
+// ```
+
+mod from_comment;
+mod matching;
+mod nester;
+mod parsing;
+mod fragments;
+mod replacing;
+mod resolving;
+mod search;
+#[macro_use]
+mod errors;
+#[cfg(test)]
+mod tests;
+
+pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Match};
+
+use crate::{errors::bail, matching::MatchFailureReason};
+use hir::Semantics;
+use ide_db::{
+ base_db::{FileId, FilePosition, FileRange},
+ FxHashMap,
+};
+use resolving::ResolvedRule;
+use syntax::{ast, AstNode, SyntaxNode, TextRange};
+use text_edit::TextEdit;
+
+// A structured search replace rule. Create by calling `parse` on a str.
+#[derive(Debug)]
+pub struct SsrRule {
+ /// A structured pattern that we're searching for.
+ pattern: parsing::RawPattern,
+ /// What we'll replace it with.
+ template: parsing::RawPattern,
+ parsed_rules: Vec<parsing::ParsedRule>,
+}
+
+#[derive(Debug)]
+pub struct SsrPattern {
+ parsed_rules: Vec<parsing::ParsedRule>,
+}
+
+#[derive(Debug, Default)]
+pub struct SsrMatches {
+ pub matches: Vec<Match>,
+}
+
+/// Searches a crate for pattern matches and possibly replaces them with something else.
+pub struct MatchFinder<'db> {
+ /// Our source of information about the user's code.
+ sema: Semantics<'db, ide_db::RootDatabase>,
+ rules: Vec<ResolvedRule>,
+ resolution_scope: resolving::ResolutionScope<'db>,
+ restrict_ranges: Vec<FileRange>,
+}
+
+impl<'db> MatchFinder<'db> {
+ /// Constructs a new instance where names will be looked up as if they appeared at
+ /// `lookup_context`.
+ pub fn in_context(
+ db: &'db ide_db::RootDatabase,
+ lookup_context: FilePosition,
+ mut restrict_ranges: Vec<FileRange>,
+ ) -> Result<MatchFinder<'db>, SsrError> {
+ restrict_ranges.retain(|range| !range.range.is_empty());
+ let sema = Semantics::new(db);
+ let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context)
+ .ok_or_else(|| SsrError("no resolution scope for file".into()))?;
+ Ok(MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges })
+ }
+
+ /// Constructs an instance using the start of the first file in `db` as the lookup context.
+ pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
+ use ide_db::base_db::SourceDatabaseExt;
+ use ide_db::symbol_index::SymbolsDatabase;
+ if let Some(first_file_id) =
+ db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
+ {
+ MatchFinder::in_context(
+ db,
+ FilePosition { file_id: first_file_id, offset: 0.into() },
+ vec![],
+ )
+ } else {
+ bail!("No files to search");
+ }
+ }
+
+ /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take
+ /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to
+ /// match to it.
+ pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> {
+ for parsed_rule in rule.parsed_rules {
+ self.rules.push(ResolvedRule::new(
+ parsed_rule,
+ &self.resolution_scope,
+ self.rules.len(),
+ )?);
+ }
+ Ok(())
+ }
+
+ /// Finds matches for all added rules and returns edits for all found matches.
+ pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
+ use ide_db::base_db::SourceDatabaseExt;
+ let mut matches_by_file = FxHashMap::default();
+ for m in self.matches().matches {
+ matches_by_file
+ .entry(m.range.file_id)
+ .or_insert_with(SsrMatches::default)
+ .matches
+ .push(m);
+ }
+ matches_by_file
+ .into_iter()
+ .map(|(file_id, matches)| {
+ (
+ file_id,
+ replacing::matches_to_edit(
+ &matches,
+ &self.sema.db.file_text(file_id),
+ &self.rules,
+ ),
+ )
+ })
+ .collect()
+ }
+
+ /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
+ /// intend to do replacement, use `add_rule` instead.
+ pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> {
+ for parsed_rule in pattern.parsed_rules {
+ self.rules.push(ResolvedRule::new(
+ parsed_rule,
+ &self.resolution_scope,
+ self.rules.len(),
+ )?);
+ }
+ Ok(())
+ }
+
+ /// Returns matches for all added rules.
+ pub fn matches(&self) -> SsrMatches {
+ let mut matches = Vec::new();
+ let mut usage_cache = search::UsageCache::default();
+ for rule in &self.rules {
+ self.find_matches_for_rule(rule, &mut usage_cache, &mut matches);
+ }
+ nester::nest_and_remove_collisions(matches, &self.sema)
+ }
+
+ /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
+ /// them, while recording reasons why they don't match. This API is useful for command
+ /// line-based debugging where providing a range is difficult.
+ pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> {
+ use ide_db::base_db::SourceDatabaseExt;
+ let file = self.sema.parse(file_id);
+ let mut res = Vec::new();
+ let file_text = self.sema.db.file_text(file_id);
+ let mut remaining_text = file_text.as_str();
+ let mut base = 0;
+ let len = snippet.len() as u32;
+ while let Some(offset) = remaining_text.find(snippet) {
+ let start = base + offset as u32;
+ let end = start + len;
+ self.output_debug_for_nodes_at_range(
+ file.syntax(),
+ FileRange { file_id, range: TextRange::new(start.into(), end.into()) },
+ &None,
+ &mut res,
+ );
+ remaining_text = &remaining_text[offset + snippet.len()..];
+ base = end;
+ }
+ res
+ }
+
+ fn output_debug_for_nodes_at_range(
+ &self,
+ node: &SyntaxNode,
+ range: FileRange,
+ restrict_range: &Option<FileRange>,
+ out: &mut Vec<MatchDebugInfo>,
+ ) {
+ for node in node.children() {
+ let node_range = self.sema.original_range(&node);
+ if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range)
+ {
+ continue;
+ }
+ if node_range.range == range.range {
+ for rule in &self.rules {
+ // For now we ignore rules that have a different kind than our node, otherwise
+ // we get lots of noise. If at some point we add support for restricting rules
+ // to a particular kind of thing (e.g. only match type references), then we can
+ // relax this. We special-case expressions, since function calls can match
+ // method calls.
+ if rule.pattern.node.kind() != node.kind()
+ && !(ast::Expr::can_cast(rule.pattern.node.kind())
+ && ast::Expr::can_cast(node.kind()))
+ {
+ continue;
+ }
+ out.push(MatchDebugInfo {
+ matched: matching::get_match(true, rule, &node, restrict_range, &self.sema)
+ .map_err(|e| MatchFailureReason {
+ reason: e.reason.unwrap_or_else(|| {
+ "Match failed, but no reason was given".to_owned()
+ }),
+ }),
+ pattern: rule.pattern.node.clone(),
+ node: node.clone(),
+ });
+ }
+ } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) {
+ if let Some(expanded) = self.sema.expand(&macro_call) {
+ if let Some(tt) = macro_call.token_tree() {
+ self.output_debug_for_nodes_at_range(
+ &expanded,
+ range,
+ &Some(self.sema.original_range(tt.syntax())),
+ out,
+ );
+ }
+ }
+ }
+ self.output_debug_for_nodes_at_range(&node, range, restrict_range, out);
+ }
+ }
+}
+
+pub struct MatchDebugInfo {
+ node: SyntaxNode,
+ /// Our search pattern parsed as an expression or item, etc
+ pattern: SyntaxNode,
+ matched: Result<Match, MatchFailureReason>,
+}
+
+impl std::fmt::Debug for MatchDebugInfo {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match &self.matched {
+ Ok(_) => writeln!(f, "Node matched")?,
+ Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?,
+ }
+ writeln!(
+ f,
+ "============ AST ===========\n\
+ {:#?}",
+ self.node
+ )?;
+ writeln!(f, "========= PATTERN ==========")?;
+ writeln!(f, "{:#?}", self.pattern)?;
+ writeln!(f, "============================")?;
+ Ok(())
+ }
+}
+
+impl SsrMatches {
+ /// Returns `self` with any nested matches removed and made into top-level matches.
+ pub fn flattened(self) -> SsrMatches {
+ let mut out = SsrMatches::default();
+ self.flatten_into(&mut out);
+ out
+ }
+
+ fn flatten_into(self, out: &mut SsrMatches) {
+ for mut m in self.matches {
+ for p in m.placeholder_values.values_mut() {
+ std::mem::take(&mut p.inner_matches).flatten_into(out);
+ }
+ out.matches.push(m);
+ }
+ }
+}
+
+impl Match {
+ pub fn matched_text(&self) -> String {
+ self.matched_node.text().to_string()
+ }
+}
+
+impl std::error::Error for SsrError {}
+
+#[cfg(test)]
+impl MatchDebugInfo {
+ pub(crate) fn match_failure_reason(&self) -> Option<&str> {
+ self.matched.as_ref().err().map(|r| r.reason.as_str())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
new file mode 100644
index 000000000..e3a837ddc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
@@ -0,0 +1,803 @@
+//! This module is responsible for matching a search pattern against a node in the AST. In the
+//! process of matching, placeholder values are recorded.
+
+use crate::{
+ parsing::{Constraint, NodeKind, Placeholder, Var},
+ resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo},
+ SsrMatches,
+};
+use hir::Semantics;
+use ide_db::{base_db::FileRange, FxHashMap};
+use std::{cell::Cell, iter::Peekable};
+use syntax::{
+ ast::{self, AstNode, AstToken},
+ SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken,
+};
+
+// Creates a match error. If we're currently attempting to match some code that we thought we were
+// going to match, as indicated by the --debug-snippet flag, then populate the reason field.
+macro_rules! match_error {
+ ($e:expr) => {{
+ MatchFailed {
+ reason: if recording_match_fail_reasons() {
+ Some(format!("{}", $e))
+ } else {
+ None
+ }
+ }
+ }};
+ ($fmt:expr, $($arg:tt)+) => {{
+ MatchFailed {
+ reason: if recording_match_fail_reasons() {
+ Some(format!($fmt, $($arg)+))
+ } else {
+ None
+ }
+ }
+ }};
+}
+
+// Fails the current match attempt, recording the supplied reason if we're recording match fail reasons.
+macro_rules! fail_match {
+ ($($args:tt)*) => {return Err(match_error!($($args)*))};
+}
+
+/// Information about a match that was found.
+#[derive(Debug)]
+pub struct Match {
+ pub(crate) range: FileRange,
+ pub(crate) matched_node: SyntaxNode,
+ pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>,
+ pub(crate) ignored_comments: Vec<ast::Comment>,
+ pub(crate) rule_index: usize,
+ /// The depth of matched_node.
+ pub(crate) depth: usize,
+ // Each path in the template rendered for the module in which the match was found.
+ pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>,
+}
+
+/// Information about a placeholder bound in a match.
+#[derive(Debug)]
+pub(crate) struct PlaceholderMatch {
+ pub(crate) range: FileRange,
+ /// More matches, found within `node`.
+ pub(crate) inner_matches: SsrMatches,
+ /// How many times the code that the placeholder matched needed to be dereferenced. Will only be
+ /// non-zero if the placeholder matched to the receiver of a method call.
+ pub(crate) autoderef_count: usize,
+ pub(crate) autoref_kind: ast::SelfParamKind,
+}
+
+#[derive(Debug)]
+pub(crate) struct MatchFailureReason {
+ pub(crate) reason: String,
+}
+
+/// An "error" indicating that matching failed. Use the fail_match! macro to create and return this.
+#[derive(Clone)]
+pub(crate) struct MatchFailed {
+ /// The reason why we failed to match. Only present when debug_active true in call to
+ /// `get_match`.
+ pub(crate) reason: Option<String>,
+}
+
+/// Checks if `code` matches the search pattern found in `search_scope`, returning information about
+/// the match, if it does. Since we only do matching in this module and searching is done by the
+/// parent module, we don't populate nested matches.
+pub(crate) fn get_match(
+ debug_active: bool,
+ rule: &ResolvedRule,
+ code: &SyntaxNode,
+ restrict_range: &Option<FileRange>,
+ sema: &Semantics<'_, ide_db::RootDatabase>,
+) -> Result<Match, MatchFailed> {
+ record_match_fails_reasons_scope(debug_active, || {
+ Matcher::try_match(rule, code, restrict_range, sema)
+ })
+}
+
+/// Checks if our search pattern matches a particular node of the AST.
+struct Matcher<'db, 'sema> {
+ sema: &'sema Semantics<'db, ide_db::RootDatabase>,
+ /// If any placeholders come from anywhere outside of this range, then the match will be
+ /// rejected.
+ restrict_range: Option<FileRange>,
+ rule: &'sema ResolvedRule,
+}
+
+/// Which phase of matching we're currently performing. We do two phases because most attempted
+/// matches will fail and it means we can defer more expensive checks to the second phase.
+enum Phase<'a> {
+ /// On the first phase, we perform cheap checks. No state is mutated and nothing is recorded.
+ First,
+ /// On the second phase, we construct the `Match`. Things like what placeholders bind to is
+ /// recorded.
+ Second(&'a mut Match),
+}
+
+impl<'db, 'sema> Matcher<'db, 'sema> {
+ fn try_match(
+ rule: &ResolvedRule,
+ code: &SyntaxNode,
+ restrict_range: &Option<FileRange>,
+ sema: &'sema Semantics<'db, ide_db::RootDatabase>,
+ ) -> Result<Match, MatchFailed> {
+ let match_state = Matcher { sema, restrict_range: *restrict_range, rule };
+ // First pass at matching, where we check that node types and idents match.
+ match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
+ match_state.validate_range(&sema.original_range(code))?;
+ let mut the_match = Match {
+ range: sema.original_range(code),
+ matched_node: code.clone(),
+ placeholder_values: FxHashMap::default(),
+ ignored_comments: Vec::new(),
+ rule_index: rule.index,
+ depth: 0,
+ rendered_template_paths: FxHashMap::default(),
+ };
+ // Second matching pass, where we record placeholder matches, ignored comments and maybe do
+ // any other more expensive checks that we didn't want to do on the first pass.
+ match_state.attempt_match_node(
+ &mut Phase::Second(&mut the_match),
+ &rule.pattern.node,
+ code,
+ )?;
+ the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count();
+ if let Some(template) = &rule.template {
+ the_match.render_template_paths(template, sema)?;
+ }
+ Ok(the_match)
+ }
+
+ /// Checks that `range` is within the permitted range if any. This is applicable when we're
+ /// processing a macro expansion and we want to fail the match if we're working with a node that
+ /// didn't originate from the token tree of the macro call.
+ fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> {
+ if let Some(restrict_range) = &self.restrict_range {
+ if restrict_range.file_id != range.file_id
+ || !restrict_range.range.contains_range(range.range)
+ {
+ fail_match!("Node originated from a macro");
+ }
+ }
+ Ok(())
+ }
+
+ fn attempt_match_node(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: &SyntaxNode,
+ code: &SyntaxNode,
+ ) -> Result<(), MatchFailed> {
+ // Handle placeholders.
+ if let Some(placeholder) = self.get_placeholder_for_node(pattern) {
+ for constraint in &placeholder.constraints {
+ self.check_constraint(constraint, code)?;
+ }
+ if let Phase::Second(matches_out) = phase {
+ let original_range = self.sema.original_range(code);
+ // We validated the range for the node when we started the match, so the placeholder
+ // probably can't fail range validation, but just to be safe...
+ self.validate_range(&original_range)?;
+ matches_out.placeholder_values.insert(
+ placeholder.ident.clone(),
+ PlaceholderMatch::from_range(original_range),
+ );
+ }
+ return Ok(());
+ }
+ // We allow a UFCS call to match a method call, provided they resolve to the same function.
+ if let Some(pattern_ufcs) = self.rule.pattern.ufcs_function_calls.get(pattern) {
+ if let Some(code) = ast::MethodCallExpr::cast(code.clone()) {
+ return self.attempt_match_ufcs_to_method_call(phase, pattern_ufcs, &code);
+ }
+ if let Some(code) = ast::CallExpr::cast(code.clone()) {
+ return self.attempt_match_ufcs_to_ufcs(phase, pattern_ufcs, &code);
+ }
+ }
+ if pattern.kind() != code.kind() {
+ fail_match!(
+ "Pattern had `{}` ({:?}), code had `{}` ({:?})",
+ pattern.text(),
+ pattern.kind(),
+ code.text(),
+ code.kind()
+ );
+ }
+ // Some kinds of nodes have special handling. For everything else, we fall back to default
+ // matching.
+ match code.kind() {
+ SyntaxKind::RECORD_EXPR_FIELD_LIST => {
+ self.attempt_match_record_field_list(phase, pattern, code)
+ }
+ SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code),
+ SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code),
+ _ => self.attempt_match_node_children(phase, pattern, code),
+ }
+ }
+
+ fn attempt_match_node_children(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: &SyntaxNode,
+ code: &SyntaxNode,
+ ) -> Result<(), MatchFailed> {
+ self.attempt_match_sequences(
+ phase,
+ PatternIterator::new(pattern),
+ code.children_with_tokens(),
+ )
+ }
+
+ fn attempt_match_sequences(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern_it: PatternIterator,
+ mut code_it: SyntaxElementChildren,
+ ) -> Result<(), MatchFailed> {
+ let mut pattern_it = pattern_it.peekable();
+ loop {
+ match phase.next_non_trivial(&mut code_it) {
+ None => {
+ if let Some(p) = pattern_it.next() {
+ fail_match!("Part of the pattern was unmatched: {:?}", p);
+ }
+ return Ok(());
+ }
+ Some(SyntaxElement::Token(c)) => {
+ self.attempt_match_token(phase, &mut pattern_it, &c)?;
+ }
+ Some(SyntaxElement::Node(c)) => match pattern_it.next() {
+ Some(SyntaxElement::Node(p)) => {
+ self.attempt_match_node(phase, &p, &c)?;
+ }
+ Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()),
+ None => fail_match!("Pattern reached end, code has {}", c.text()),
+ },
+ }
+ }
+ }
+
+ fn attempt_match_token(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: &mut Peekable<PatternIterator>,
+ code: &syntax::SyntaxToken,
+ ) -> Result<(), MatchFailed> {
+ phase.record_ignored_comments(code);
+ // Ignore whitespace and comments.
+ if code.kind().is_trivia() {
+ return Ok(());
+ }
+ if let Some(SyntaxElement::Token(p)) = pattern.peek() {
+ // If the code has a comma and the pattern is about to close something, then accept the
+ // comma without advancing the pattern. i.e. ignore trailing commas.
+ if code.kind() == SyntaxKind::COMMA && is_closing_token(p.kind()) {
+ return Ok(());
+ }
+ // Conversely, if the pattern has a comma and the code doesn't, skip that part of the
+ // pattern and continue to match the code.
+ if p.kind() == SyntaxKind::COMMA && is_closing_token(code.kind()) {
+ pattern.next();
+ }
+ }
+ // Consume an element from the pattern and make sure it matches.
+ match pattern.next() {
+ Some(SyntaxElement::Token(p)) => {
+ if p.kind() != code.kind() || p.text() != code.text() {
+ fail_match!(
+ "Pattern wanted token '{}' ({:?}), but code had token '{}' ({:?})",
+ p.text(),
+ p.kind(),
+ code.text(),
+ code.kind()
+ )
+ }
+ }
+ Some(SyntaxElement::Node(p)) => {
+ // Not sure if this is actually reachable.
+ fail_match!(
+ "Pattern wanted {:?}, but code had token '{}' ({:?})",
+ p,
+ code.text(),
+ code.kind()
+ );
+ }
+ None => {
+ fail_match!("Pattern exhausted, while code remains: `{}`", code.text());
+ }
+ }
+ Ok(())
+ }
+
+ fn check_constraint(
+ &self,
+ constraint: &Constraint,
+ code: &SyntaxNode,
+ ) -> Result<(), MatchFailed> {
+ match constraint {
+ Constraint::Kind(kind) => {
+ kind.matches(code)?;
+ }
+ Constraint::Not(sub) => {
+ if self.check_constraint(&*sub, code).is_ok() {
+ fail_match!("Constraint {:?} failed for '{}'", constraint, code.text());
+ }
+ }
+ }
+ Ok(())
+ }
+
+ /// Paths are matched based on whether they refer to the same thing, even if they're written
+ /// differently.
+ fn attempt_match_path(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: &SyntaxNode,
+ code: &SyntaxNode,
+ ) -> Result<(), MatchFailed> {
+ if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) {
+ let pattern_path = ast::Path::cast(pattern.clone()).unwrap();
+ let code_path = ast::Path::cast(code.clone()).unwrap();
+ if let (Some(pattern_segment), Some(code_segment)) =
+ (pattern_path.segment(), code_path.segment())
+ {
+ // Match everything within the segment except for the name-ref, which is handled
+ // separately via comparing what the path resolves to below.
+ self.attempt_match_opt(
+ phase,
+ pattern_segment.generic_arg_list(),
+ code_segment.generic_arg_list(),
+ )?;
+ self.attempt_match_opt(
+ phase,
+ pattern_segment.param_list(),
+ code_segment.param_list(),
+ )?;
+ }
+ if matches!(phase, Phase::Second(_)) {
+ let resolution = self
+ .sema
+ .resolve_path(&code_path)
+ .ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?;
+ if pattern_resolved.resolution != resolution {
+ fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text());
+ }
+ }
+ } else {
+ return self.attempt_match_node_children(phase, pattern, code);
+ }
+ Ok(())
+ }
+
+ fn attempt_match_opt<T: AstNode>(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: Option<T>,
+ code: Option<T>,
+ ) -> Result<(), MatchFailed> {
+ match (pattern, code) {
+ (Some(p), Some(c)) => self.attempt_match_node(phase, p.syntax(), c.syntax()),
+ (None, None) => Ok(()),
+ (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
+ (None, Some(c)) => {
+ fail_match!("Nothing in pattern to match code `{}`", c.syntax().text())
+ }
+ }
+ }
+
+ /// We want to allow the records to match in any order, so we have special matching logic for
+ /// them.
+ fn attempt_match_record_field_list(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: &SyntaxNode,
+ code: &SyntaxNode,
+ ) -> Result<(), MatchFailed> {
+ // Build a map keyed by field name.
+ let mut fields_by_name: FxHashMap<SmolStr, SyntaxNode> = FxHashMap::default();
+ for child in code.children() {
+ if let Some(record) = ast::RecordExprField::cast(child.clone()) {
+ if let Some(name) = record.field_name() {
+ fields_by_name.insert(name.text().into(), child.clone());
+ }
+ }
+ }
+ for p in pattern.children_with_tokens() {
+ if let SyntaxElement::Node(p) = p {
+ if let Some(name_element) = p.first_child_or_token() {
+ if self.get_placeholder(&name_element).is_some() {
+ // If the pattern is using placeholders for field names then order
+ // independence doesn't make sense. Fall back to regular ordered
+ // matching.
+ return self.attempt_match_node_children(phase, pattern, code);
+ }
+ if let Some(ident) = only_ident(name_element) {
+ let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| {
+ match_error!(
+ "Placeholder has record field '{}', but code doesn't",
+ ident
+ )
+ })?;
+ self.attempt_match_node(phase, &p, &code_record)?;
+ }
+ }
+ }
+ }
+ if let Some(unmatched_fields) = fields_by_name.keys().next() {
+ fail_match!(
+ "{} field(s) of a record literal failed to match, starting with {}",
+ fields_by_name.len(),
+ unmatched_fields
+ );
+ }
+ Ok(())
+ }
+
+ /// Outside of token trees, a placeholder can only match a single AST node, whereas in a token
+ /// tree it can match a sequence of tokens. Note, that this code will only be used when the
+ /// pattern matches the macro invocation. For matches within the macro call, we'll already have
+ /// expanded the macro.
+ fn attempt_match_token_tree(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: &SyntaxNode,
+ code: &syntax::SyntaxNode,
+ ) -> Result<(), MatchFailed> {
+ let mut pattern = PatternIterator::new(pattern).peekable();
+ let mut children = code.children_with_tokens();
+ while let Some(child) = children.next() {
+ if let Some(placeholder) = pattern.peek().and_then(|p| self.get_placeholder(p)) {
+ pattern.next();
+ let next_pattern_token = pattern
+ .peek()
+ .and_then(|p| match p {
+ SyntaxElement::Token(t) => Some(t.clone()),
+ SyntaxElement::Node(n) => n.first_token(),
+ })
+ .map(|p| p.text().to_string());
+ let first_matched_token = child.clone();
+ let mut last_matched_token = child;
+ // Read code tokens util we reach one equal to the next token from our pattern
+ // or we reach the end of the token tree.
+ for next in &mut children {
+ match &next {
+ SyntaxElement::Token(t) => {
+ if Some(t.to_string()) == next_pattern_token {
+ pattern.next();
+ break;
+ }
+ }
+ SyntaxElement::Node(n) => {
+ if let Some(first_token) = n.first_token() {
+ if Some(first_token.text()) == next_pattern_token.as_deref() {
+ if let Some(SyntaxElement::Node(p)) = pattern.next() {
+ // We have a subtree that starts with the next token in our pattern.
+ self.attempt_match_token_tree(phase, &p, n)?;
+ break;
+ }
+ }
+ }
+ }
+ };
+ last_matched_token = next;
+ }
+ if let Phase::Second(match_out) = phase {
+ match_out.placeholder_values.insert(
+ placeholder.ident.clone(),
+ PlaceholderMatch::from_range(FileRange {
+ file_id: self.sema.original_range(code).file_id,
+ range: first_matched_token
+ .text_range()
+ .cover(last_matched_token.text_range()),
+ }),
+ );
+ }
+ continue;
+ }
+ // Match literal (non-placeholder) tokens.
+ match child {
+ SyntaxElement::Token(token) => {
+ self.attempt_match_token(phase, &mut pattern, &token)?;
+ }
+ SyntaxElement::Node(node) => match pattern.next() {
+ Some(SyntaxElement::Node(p)) => {
+ self.attempt_match_token_tree(phase, &p, &node)?;
+ }
+ Some(SyntaxElement::Token(p)) => fail_match!(
+ "Pattern has token '{}', code has subtree '{}'",
+ p.text(),
+ node.text()
+ ),
+ None => fail_match!("Pattern has nothing, code has '{}'", node.text()),
+ },
+ }
+ }
+ if let Some(p) = pattern.next() {
+ fail_match!("Reached end of token tree in code, but pattern still has {:?}", p);
+ }
+ Ok(())
+ }
+
+ fn attempt_match_ufcs_to_method_call(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern_ufcs: &UfcsCallInfo,
+ code: &ast::MethodCallExpr,
+ ) -> Result<(), MatchFailed> {
+ use ast::HasArgList;
+ let code_resolved_function = self
+ .sema
+ .resolve_method_call(code)
+ .ok_or_else(|| match_error!("Failed to resolve method call"))?;
+ if pattern_ufcs.function != code_resolved_function {
+ fail_match!("Method call resolved to a different function");
+ }
+ // Check arguments.
+ let mut pattern_args = pattern_ufcs
+ .call_expr
+ .arg_list()
+ .ok_or_else(|| match_error!("Pattern function call has no args"))?
+ .args();
+ // If the function we're calling takes a self parameter, then we store additional
+ // information on the placeholder match about autoderef and autoref. This allows us to use
+ // the placeholder in a context where autoderef and autoref don't apply.
+ if code_resolved_function.self_param(self.sema.db).is_some() {
+ if let (Some(pattern_type), Some(expr)) =
+ (&pattern_ufcs.qualifier_type, &code.receiver())
+ {
+ let deref_count = self.check_expr_type(pattern_type, expr)?;
+ let pattern_receiver = pattern_args.next();
+ self.attempt_match_opt(phase, pattern_receiver.clone(), code.receiver())?;
+ if let Phase::Second(match_out) = phase {
+ if let Some(placeholder_value) = pattern_receiver
+ .and_then(|n| self.get_placeholder_for_node(n.syntax()))
+ .and_then(|placeholder| {
+ match_out.placeholder_values.get_mut(&placeholder.ident)
+ })
+ {
+ placeholder_value.autoderef_count = deref_count;
+ placeholder_value.autoref_kind = self
+ .sema
+ .resolve_method_call_as_callable(code)
+ .and_then(|callable| callable.receiver_param(self.sema.db))
+ .map(|self_param| self_param.kind())
+ .unwrap_or(ast::SelfParamKind::Owned);
+ }
+ }
+ }
+ } else {
+ self.attempt_match_opt(phase, pattern_args.next(), code.receiver())?;
+ }
+ let mut code_args =
+ code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args();
+ loop {
+ match (pattern_args.next(), code_args.next()) {
+ (None, None) => return Ok(()),
+ (p, c) => self.attempt_match_opt(phase, p, c)?,
+ }
+ }
+ }
+
+ fn attempt_match_ufcs_to_ufcs(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern_ufcs: &UfcsCallInfo,
+ code: &ast::CallExpr,
+ ) -> Result<(), MatchFailed> {
+ use ast::HasArgList;
+ // Check that the first argument is the expected type.
+ if let (Some(pattern_type), Some(expr)) = (
+ &pattern_ufcs.qualifier_type,
+ &code.arg_list().and_then(|code_args| code_args.args().next()),
+ ) {
+ self.check_expr_type(pattern_type, expr)?;
+ }
+ self.attempt_match_node_children(phase, pattern_ufcs.call_expr.syntax(), code.syntax())
+ }
+
+ /// Verifies that `expr` matches `pattern_type`, possibly after dereferencing some number of
+ /// times. Returns the number of times it needed to be dereferenced.
+ fn check_expr_type(
+ &self,
+ pattern_type: &hir::Type,
+ expr: &ast::Expr,
+ ) -> Result<usize, MatchFailed> {
+ use hir::HirDisplay;
+ let code_type = self
+ .sema
+ .type_of_expr(expr)
+ .ok_or_else(|| {
+ match_error!("Failed to get receiver type for `{}`", expr.syntax().text())
+ })?
+ .original;
+ // Temporary needed to make the borrow checker happy.
+ let res = code_type
+ .autoderef(self.sema.db)
+ .enumerate()
+ .find(|(_, deref_code_type)| pattern_type == deref_code_type)
+ .map(|(count, _)| count)
+ .ok_or_else(|| {
+ match_error!(
+ "Pattern type `{}` didn't match code type `{}`",
+ pattern_type.display(self.sema.db),
+ code_type.display(self.sema.db)
+ )
+ });
+ res
+ }
+
+ fn get_placeholder_for_node(&self, node: &SyntaxNode) -> Option<&Placeholder> {
+ self.get_placeholder(&SyntaxElement::Node(node.clone()))
+ }
+
+ fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> {
+ only_ident(element.clone()).and_then(|ident| self.rule.get_placeholder(&ident))
+ }
+}
+
+impl Match {
+ fn render_template_paths(
+ &mut self,
+ template: &ResolvedPattern,
+ sema: &Semantics<'_, ide_db::RootDatabase>,
+ ) -> Result<(), MatchFailed> {
+ let module = sema
+ .scope(&self.matched_node)
+ .ok_or_else(|| match_error!("Matched node isn't in a module"))?
+ .module();
+ for (path, resolved_path) in &template.resolved_paths {
+ if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
+ let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| {
+ match_error!("Failed to render template path `{}` at match location")
+ })?;
+ self.rendered_template_paths.insert(path.clone(), mod_path);
+ }
+ }
+ Ok(())
+ }
+}
+
+impl Phase<'_> {
+ fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> {
+ loop {
+ let c = code_it.next();
+ if let Some(SyntaxElement::Token(t)) = &c {
+ self.record_ignored_comments(t);
+ if t.kind().is_trivia() {
+ continue;
+ }
+ }
+ return c;
+ }
+ }
+
+ fn record_ignored_comments(&mut self, token: &SyntaxToken) {
+ if token.kind() == SyntaxKind::COMMENT {
+ if let Phase::Second(match_out) = self {
+ if let Some(comment) = ast::Comment::cast(token.clone()) {
+ match_out.ignored_comments.push(comment);
+ }
+ }
+ }
+ }
+}
+
+fn is_closing_token(kind: SyntaxKind) -> bool {
+ kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK
+}
+
+pub(crate) fn record_match_fails_reasons_scope<F, T>(debug_active: bool, f: F) -> T
+where
+ F: Fn() -> T,
+{
+ RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(debug_active));
+ let res = f();
+ RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(false));
+ res
+}
+
+// For performance reasons, we don't want to record the reason why every match fails, only the bit
+// of code that the user indicated they thought would match. We use a thread local to indicate when
+// we are trying to match that bit of code. This saves us having to pass a boolean into all the bits
+// of code that can make the decision to not match.
+thread_local! {
+ pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = Cell::new(false);
+}
+
+fn recording_match_fail_reasons() -> bool {
+ RECORDING_MATCH_FAIL_REASONS.with(|c| c.get())
+}
+
+impl PlaceholderMatch {
+ fn from_range(range: FileRange) -> Self {
+ Self {
+ range,
+ inner_matches: SsrMatches::default(),
+ autoderef_count: 0,
+ autoref_kind: ast::SelfParamKind::Owned,
+ }
+ }
+}
+
+impl NodeKind {
+ fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> {
+ let ok = match self {
+ Self::Literal => {
+ cov_mark::hit!(literal_constraint);
+ ast::Literal::can_cast(node.kind())
+ }
+ };
+ if !ok {
+ fail_match!("Code '{}' isn't of kind {:?}", node.text(), self);
+ }
+ Ok(())
+ }
+}
+
+// If `node` contains nothing but an ident then return it, otherwise return None.
+fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> {
+ match element {
+ SyntaxElement::Token(t) => {
+ if t.kind() == SyntaxKind::IDENT {
+ return Some(t);
+ }
+ }
+ SyntaxElement::Node(n) => {
+ let mut children = n.children_with_tokens();
+ if let (Some(only_child), None) = (children.next(), children.next()) {
+ return only_ident(only_child);
+ }
+ }
+ }
+ None
+}
+
+struct PatternIterator {
+ iter: SyntaxElementChildren,
+}
+
+impl Iterator for PatternIterator {
+ type Item = SyntaxElement;
+
+ fn next(&mut self) -> Option<SyntaxElement> {
+ for element in &mut self.iter {
+ if !element.kind().is_trivia() {
+ return Some(element);
+ }
+ }
+ None
+ }
+}
+
+impl PatternIterator {
+ fn new(parent: &SyntaxNode) -> Self {
+ Self { iter: parent.children_with_tokens() }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{MatchFinder, SsrRule};
+
+ #[test]
+ fn parse_match_replace() {
+ let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap();
+ let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
+
+ let (db, position, selections) = crate::tests::single_file(input);
+ let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
+ match_finder.add_rule(rule).unwrap();
+ let matches = match_finder.matches();
+ assert_eq!(matches.matches.len(), 1);
+ assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)");
+ assert_eq!(matches.matches[0].placeholder_values.len(), 1);
+
+ let edits = match_finder.edits();
+ assert_eq!(edits.len(), 1);
+ let edit = &edits[&position.file_id];
+ let mut after = input.to_string();
+ edit.apply(&mut after);
+ assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/nester.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/nester.rs
new file mode 100644
index 000000000..afaaafd1f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/nester.rs
@@ -0,0 +1,99 @@
+//! Converts a flat collection of matches into a nested form suitable for replacement. When there
+//! are multiple matches for a node, or that overlap, priority is given to the earlier rule. Nested
+//! matches are only permitted if the inner match is contained entirely within a placeholder of an
+//! outer match.
+//!
+//! For example, if our search pattern is `foo(foo($a))` and the code had `foo(foo(foo(foo(42))))`,
+//! then we'll get 3 matches, however only the outermost and innermost matches can be accepted. The
+//! middle match would take the second `foo` from the outer match.
+
+use ide_db::FxHashMap;
+use syntax::SyntaxNode;
+
+use crate::{Match, SsrMatches};
+
+pub(crate) fn nest_and_remove_collisions(
+ mut matches: Vec<Match>,
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+) -> SsrMatches {
+ // We sort the matches by depth then by rule index. Sorting by depth means that by the time we
+ // see a match, any parent matches or conflicting matches will have already been seen. Sorting
+ // by rule_index means that if there are two matches for the same node, the rule added first
+ // will take precedence.
+ matches.sort_by(|a, b| a.depth.cmp(&b.depth).then_with(|| a.rule_index.cmp(&b.rule_index)));
+ let mut collector = MatchCollector::default();
+ for m in matches {
+ collector.add_match(m, sema);
+ }
+ collector.into()
+}
+
+#[derive(Default)]
+struct MatchCollector {
+ matches_by_node: FxHashMap<SyntaxNode, Match>,
+}
+
+impl MatchCollector {
+ /// Attempts to add `m` to matches. If it conflicts with an existing match, it is discarded. If
+ /// it is entirely within the a placeholder of an existing match, then it is added as a child
+ /// match of the existing match.
+ fn add_match(&mut self, m: Match, sema: &hir::Semantics<'_, ide_db::RootDatabase>) {
+ let matched_node = m.matched_node.clone();
+ if let Some(existing) = self.matches_by_node.get_mut(&matched_node) {
+ try_add_sub_match(m, existing, sema);
+ return;
+ }
+ for ancestor in sema.ancestors_with_macros(m.matched_node.clone()) {
+ if let Some(existing) = self.matches_by_node.get_mut(&ancestor) {
+ try_add_sub_match(m, existing, sema);
+ return;
+ }
+ }
+ self.matches_by_node.insert(matched_node, m);
+ }
+}
+
+/// Attempts to add `m` as a sub-match of `existing`.
+fn try_add_sub_match(
+ m: Match,
+ existing: &mut Match,
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+) {
+ for p in existing.placeholder_values.values_mut() {
+ // Note, no need to check if p.range.file is equal to m.range.file, since we
+ // already know we're within `existing`.
+ if p.range.range.contains_range(m.range.range) {
+ // Convert the inner matches in `p` into a temporary MatchCollector. When
+ // we're done, we then convert it back into an SsrMatches. If we expected
+ // lots of inner matches, it might be worthwhile keeping a MatchCollector
+ // around for each placeholder match. However we expect most placeholder
+ // will have 0 and a few will have 1. More than that should hopefully be
+ // exceptional.
+ let mut collector = MatchCollector::default();
+ for m in std::mem::take(&mut p.inner_matches.matches) {
+ collector.matches_by_node.insert(m.matched_node.clone(), m);
+ }
+ collector.add_match(m, sema);
+ p.inner_matches = collector.into();
+ break;
+ }
+ }
+}
+
+impl From<MatchCollector> for SsrMatches {
+ fn from(mut match_collector: MatchCollector) -> Self {
+ let mut matches = SsrMatches::default();
+ for (_, m) in match_collector.matches_by_node.drain() {
+ matches.matches.push(m);
+ }
+ matches.matches.sort_by(|a, b| {
+ // Order matches by file_id then by start range. This should be sufficient since ranges
+ // shouldn't be overlapping.
+ a.range
+ .file_id
+ .cmp(&b.range.file_id)
+ .then_with(|| a.range.range.start().cmp(&b.range.range.start()))
+ });
+ matches
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs
new file mode 100644
index 000000000..f6220b928
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs
@@ -0,0 +1,406 @@
+//! This file contains code for parsing SSR rules, which look something like `foo($a) ==>> bar($b)`.
+//! We first split everything before and after the separator `==>>`. Next, both the search pattern
+//! and the replacement template get tokenized by the Rust tokenizer. Tokens are then searched for
+//! placeholders, which start with `$`. For replacement templates, this is the final form. For
+//! search patterns, we go further and parse the pattern as each kind of thing that we can match.
+//! e.g. expressions, type references etc.
+use ide_db::{FxHashMap, FxHashSet};
+use std::{fmt::Display, str::FromStr};
+use syntax::{SmolStr, SyntaxKind, SyntaxNode, T};
+
+use crate::errors::bail;
+use crate::{fragments, SsrError, SsrPattern, SsrRule};
+
+#[derive(Debug)]
+pub(crate) struct ParsedRule {
+ pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
+ pub(crate) pattern: SyntaxNode,
+ pub(crate) template: Option<SyntaxNode>,
+}
+
+#[derive(Debug)]
+pub(crate) struct RawPattern {
+ tokens: Vec<PatternElement>,
+}
+
+// Part of a search or replace pattern.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum PatternElement {
+ Token(Token),
+ Placeholder(Placeholder),
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) struct Placeholder {
+ /// The name of this placeholder. e.g. for "$a", this would be "a"
+ pub(crate) ident: Var,
+ /// A unique name used in place of this placeholder when we parse the pattern as Rust code.
+ stand_in_name: String,
+ pub(crate) constraints: Vec<Constraint>,
+}
+
+/// Represents a `$var` in an SSR query.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub(crate) struct Var(pub(crate) String);
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum Constraint {
+ Kind(NodeKind),
+ Not(Box<Constraint>),
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum NodeKind {
+ Literal,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(crate) struct Token {
+ kind: SyntaxKind,
+ pub(crate) text: SmolStr,
+}
+
+impl ParsedRule {
+ fn new(
+ pattern: &RawPattern,
+ template: Option<&RawPattern>,
+ ) -> Result<Vec<ParsedRule>, SsrError> {
+ let raw_pattern = pattern.as_rust_code();
+ let raw_template = template.map(|t| t.as_rust_code());
+ let raw_template = raw_template.as_deref();
+ let mut builder = RuleBuilder {
+ placeholders_by_stand_in: pattern.placeholders_by_stand_in(),
+ rules: Vec::new(),
+ };
+
+ let raw_template_stmt = raw_template.map(fragments::stmt);
+ if let raw_template_expr @ Some(Ok(_)) = raw_template.map(fragments::expr) {
+ builder.try_add(fragments::expr(&raw_pattern), raw_template_expr);
+ } else {
+ builder.try_add(fragments::expr(&raw_pattern), raw_template_stmt.clone());
+ }
+ builder.try_add(fragments::ty(&raw_pattern), raw_template.map(fragments::ty));
+ builder.try_add(fragments::item(&raw_pattern), raw_template.map(fragments::item));
+ builder.try_add(fragments::pat(&raw_pattern), raw_template.map(fragments::pat));
+ builder.try_add(fragments::stmt(&raw_pattern), raw_template_stmt);
+ builder.build()
+ }
+}
+
+struct RuleBuilder {
+ placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
+ rules: Vec<ParsedRule>,
+}
+
+impl RuleBuilder {
+ fn try_add(
+ &mut self,
+ pattern: Result<SyntaxNode, ()>,
+ template: Option<Result<SyntaxNode, ()>>,
+ ) {
+ match (pattern, template) {
+ (Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule {
+ placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
+ pattern,
+ template: Some(template),
+ }),
+ (Ok(pattern), None) => self.rules.push(ParsedRule {
+ placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
+ pattern,
+ template: None,
+ }),
+ _ => {}
+ }
+ }
+
+ fn build(mut self) -> Result<Vec<ParsedRule>, SsrError> {
+ if self.rules.is_empty() {
+ bail!("Not a valid Rust expression, type, item, path or pattern");
+ }
+ // If any rules contain paths, then we reject any rules that don't contain paths. Allowing a
+ // mix leads to strange semantics, since the path-based rules only match things where the
+ // path refers to semantically the same thing, whereas the non-path-based rules could match
+ // anything. Specifically, if we have a rule like `foo ==>> bar` we only want to match the
+ // `foo` that is in the current scope, not any `foo`. However "foo" can be parsed as a
+ // pattern (IDENT_PAT -> NAME -> IDENT). Allowing such a rule through would result in
+ // renaming everything called `foo` to `bar`. It'd also be slow, since without a path, we'd
+ // have to use the slow-scan search mechanism.
+ if self.rules.iter().any(|rule| contains_path(&rule.pattern)) {
+ let old_len = self.rules.len();
+ self.rules.retain(|rule| contains_path(&rule.pattern));
+ if self.rules.len() < old_len {
+ cov_mark::hit!(pattern_is_a_single_segment_path);
+ }
+ }
+ Ok(self.rules)
+ }
+}
+
+/// Returns whether there are any paths in `node`.
+fn contains_path(node: &SyntaxNode) -> bool {
+ node.kind() == SyntaxKind::PATH
+ || node.descendants().any(|node| node.kind() == SyntaxKind::PATH)
+}
+
+impl FromStr for SsrRule {
+ type Err = SsrError;
+
+ fn from_str(query: &str) -> Result<SsrRule, SsrError> {
+ let mut it = query.split("==>>");
+ let pattern = it.next().expect("at least empty string").trim();
+ let template = it
+ .next()
+ .ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))?
+ .trim()
+ .to_string();
+ if it.next().is_some() {
+ return Err(SsrError("More than one delimiter found".into()));
+ }
+ let raw_pattern = pattern.parse()?;
+ let raw_template = template.parse()?;
+ let parsed_rules = ParsedRule::new(&raw_pattern, Some(&raw_template))?;
+ let rule = SsrRule { pattern: raw_pattern, template: raw_template, parsed_rules };
+ validate_rule(&rule)?;
+ Ok(rule)
+ }
+}
+
+impl FromStr for RawPattern {
+ type Err = SsrError;
+
+ fn from_str(pattern_str: &str) -> Result<RawPattern, SsrError> {
+ Ok(RawPattern { tokens: parse_pattern(pattern_str)? })
+ }
+}
+
+impl RawPattern {
+ /// Returns this search pattern as Rust source code that we can feed to the Rust parser.
+ fn as_rust_code(&self) -> String {
+ let mut res = String::new();
+ for t in &self.tokens {
+ res.push_str(match t {
+ PatternElement::Token(token) => token.text.as_str(),
+ PatternElement::Placeholder(placeholder) => placeholder.stand_in_name.as_str(),
+ });
+ }
+ res
+ }
+
+ pub(crate) fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> {
+ let mut res = FxHashMap::default();
+ for t in &self.tokens {
+ if let PatternElement::Placeholder(placeholder) = t {
+ res.insert(SmolStr::new(placeholder.stand_in_name.clone()), placeholder.clone());
+ }
+ }
+ res
+ }
+}
+
+impl FromStr for SsrPattern {
+ type Err = SsrError;
+
+ fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> {
+ let raw_pattern = pattern_str.parse()?;
+ let parsed_rules = ParsedRule::new(&raw_pattern, None)?;
+ Ok(SsrPattern { parsed_rules })
+ }
+}
+
+/// Returns `pattern_str`, parsed as a search or replace pattern. If `remove_whitespace` is true,
+/// then any whitespace tokens will be removed, which we do for the search pattern, but not for the
+/// replace pattern.
+fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> {
+ let mut res = Vec::new();
+ let mut placeholder_names = FxHashSet::default();
+ let mut tokens = tokenize(pattern_str)?.into_iter();
+ while let Some(token) = tokens.next() {
+ if token.kind == T![$] {
+ let placeholder = parse_placeholder(&mut tokens)?;
+ if !placeholder_names.insert(placeholder.ident.clone()) {
+ bail!("Placeholder `{}` repeats more than once", placeholder.ident);
+ }
+ res.push(PatternElement::Placeholder(placeholder));
+ } else {
+ res.push(PatternElement::Token(token));
+ }
+ }
+ Ok(res)
+}
+
+/// Checks for errors in a rule. e.g. the replace pattern referencing placeholders that the search
+/// pattern didn't define.
+fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
+ let mut defined_placeholders = FxHashSet::default();
+ for p in &rule.pattern.tokens {
+ if let PatternElement::Placeholder(placeholder) = p {
+ defined_placeholders.insert(&placeholder.ident);
+ }
+ }
+ let mut undefined = Vec::new();
+ for p in &rule.template.tokens {
+ if let PatternElement::Placeholder(placeholder) = p {
+ if !defined_placeholders.contains(&placeholder.ident) {
+ undefined.push(placeholder.ident.to_string());
+ }
+ if !placeholder.constraints.is_empty() {
+ bail!("Replacement placeholders cannot have constraints");
+ }
+ }
+ }
+ if !undefined.is_empty() {
+ bail!("Replacement contains undefined placeholders: {}", undefined.join(", "));
+ }
+ Ok(())
+}
+
+fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> {
+ let lexed = parser::LexedStr::new(source);
+ if let Some((_, first_error)) = lexed.errors().next() {
+ bail!("Failed to parse pattern: {}", first_error);
+ }
+ let mut tokens: Vec<Token> = Vec::new();
+ for i in 0..lexed.len() {
+ tokens.push(Token { kind: lexed.kind(i), text: lexed.text(i).into() });
+ }
+ Ok(tokens)
+}
+
+fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> {
+ let mut name = None;
+ let mut constraints = Vec::new();
+ if let Some(token) = tokens.next() {
+ match token.kind {
+ SyntaxKind::IDENT => {
+ name = Some(token.text);
+ }
+ T!['{'] => {
+ let token =
+ tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?;
+ if token.kind == SyntaxKind::IDENT {
+ name = Some(token.text);
+ }
+ loop {
+ let token = tokens
+ .next()
+ .ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?;
+ match token.kind {
+ T![:] => {
+ constraints.push(parse_constraint(tokens)?);
+ }
+ T!['}'] => break,
+ _ => bail!("Unexpected token while parsing placeholder: '{}'", token.text),
+ }
+ }
+ }
+ _ => {
+ bail!("Placeholders should either be $name or ${{name:constraints}}");
+ }
+ }
+ }
+ let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?;
+ Ok(Placeholder::new(name, constraints))
+}
+
+fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> {
+ let constraint_type = tokens
+ .next()
+ .ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))?
+ .text
+ .to_string();
+ match constraint_type.as_str() {
+ "kind" => {
+ expect_token(tokens, "(")?;
+ let t = tokens.next().ok_or_else(|| {
+ SsrError::new("Unexpected end of constraint while looking for kind")
+ })?;
+ if t.kind != SyntaxKind::IDENT {
+ bail!("Expected ident, found {:?} while parsing kind constraint", t.kind);
+ }
+ expect_token(tokens, ")")?;
+ Ok(Constraint::Kind(NodeKind::from(&t.text)?))
+ }
+ "not" => {
+ expect_token(tokens, "(")?;
+ let sub = parse_constraint(tokens)?;
+ expect_token(tokens, ")")?;
+ Ok(Constraint::Not(Box::new(sub)))
+ }
+ x => bail!("Unsupported constraint type '{}'", x),
+ }
+}
+
+fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> {
+ if let Some(t) = tokens.next() {
+ if t.text == expected {
+ return Ok(());
+ }
+ bail!("Expected {} found {}", expected, t.text);
+ }
+ bail!("Expected {} found end of stream", expected);
+}
+
+impl NodeKind {
+ fn from(name: &SmolStr) -> Result<NodeKind, SsrError> {
+ Ok(match name.as_str() {
+ "literal" => NodeKind::Literal,
+ _ => bail!("Unknown node kind '{}'", name),
+ })
+ }
+}
+
+impl Placeholder {
+ fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self {
+ Self {
+ stand_in_name: format!("__placeholder_{}", name),
+ constraints,
+ ident: Var(name.to_string()),
+ }
+ }
+}
+
+impl Display for Var {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "${}", self.0)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn parser_happy_case() {
+ fn token(kind: SyntaxKind, text: &str) -> PatternElement {
+ PatternElement::Token(Token { kind, text: SmolStr::new(text) })
+ }
+ fn placeholder(name: &str) -> PatternElement {
+ PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new()))
+ }
+ let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap();
+ assert_eq!(
+ result.pattern.tokens,
+ vec![
+ token(SyntaxKind::IDENT, "foo"),
+ token(T!['('], "("),
+ placeholder("a"),
+ token(T![,], ","),
+ token(SyntaxKind::WHITESPACE, " "),
+ placeholder("b"),
+ token(T![')'], ")"),
+ ]
+ );
+ assert_eq!(
+ result.template.tokens,
+ vec![
+ token(SyntaxKind::IDENT, "bar"),
+ token(T!['('], "("),
+ placeholder("b"),
+ token(T![,], ","),
+ token(SyntaxKind::WHITESPACE, " "),
+ placeholder("a"),
+ token(T![')'], ")"),
+ ]
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs
new file mode 100644
index 000000000..e27ef6e35
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs
@@ -0,0 +1,242 @@
+//! Code for applying replacement templates for matches that have previously been found.
+
+use ide_db::{FxHashMap, FxHashSet};
+use itertools::Itertools;
+use syntax::{
+ ast::{self, AstNode, AstToken},
+ SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize,
+};
+use text_edit::TextEdit;
+
+use crate::{fragments, resolving::ResolvedRule, Match, SsrMatches};
+
+/// Returns a text edit that will replace each match in `matches` with its corresponding replacement
+/// template. Placeholders in the template will have been substituted with whatever they matched to
+/// in the original code.
+pub(crate) fn matches_to_edit(
+ matches: &SsrMatches,
+ file_src: &str,
+ rules: &[ResolvedRule],
+) -> TextEdit {
+ matches_to_edit_at_offset(matches, file_src, 0.into(), rules)
+}
+
+fn matches_to_edit_at_offset(
+ matches: &SsrMatches,
+ file_src: &str,
+ relative_start: TextSize,
+ rules: &[ResolvedRule],
+) -> TextEdit {
+ let mut edit_builder = TextEdit::builder();
+ for m in &matches.matches {
+ edit_builder.replace(
+ m.range.range.checked_sub(relative_start).unwrap(),
+ render_replace(m, file_src, rules),
+ );
+ }
+ edit_builder.finish()
+}
+
+struct ReplacementRenderer<'a> {
+ match_info: &'a Match,
+ file_src: &'a str,
+ rules: &'a [ResolvedRule],
+ rule: &'a ResolvedRule,
+ out: String,
+ // Map from a range within `out` to a token in `template` that represents a placeholder. This is
+ // used to validate that the generated source code doesn't split any placeholder expansions (see
+ // below).
+ placeholder_tokens_by_range: FxHashMap<TextRange, SyntaxToken>,
+ // Which placeholder tokens need to be wrapped in parenthesis in order to ensure that when `out`
+ // is parsed, placeholders don't get split. e.g. if a template of `$a.to_string()` results in `1
+ // + 2.to_string()` then the placeholder value `1 + 2` was split and needs parenthesis.
+ placeholder_tokens_requiring_parenthesis: FxHashSet<SyntaxToken>,
+}
+
+fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String {
+ let rule = &rules[match_info.rule_index];
+ let template = rule
+ .template
+ .as_ref()
+ .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern");
+ let mut renderer = ReplacementRenderer {
+ match_info,
+ file_src,
+ rules,
+ rule,
+ out: String::new(),
+ placeholder_tokens_requiring_parenthesis: FxHashSet::default(),
+ placeholder_tokens_by_range: FxHashMap::default(),
+ };
+ renderer.render_node(&template.node);
+ renderer.maybe_rerender_with_extra_parenthesis(&template.node);
+ for comment in &match_info.ignored_comments {
+ renderer.out.push_str(&comment.syntax().to_string());
+ }
+ renderer.out
+}
+
+impl ReplacementRenderer<'_> {
+ fn render_node_children(&mut self, node: &SyntaxNode) {
+ for node_or_token in node.children_with_tokens() {
+ self.render_node_or_token(&node_or_token);
+ }
+ }
+
+ fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) {
+ match node_or_token {
+ SyntaxElement::Token(token) => {
+ self.render_token(token);
+ }
+ SyntaxElement::Node(child_node) => {
+ self.render_node(child_node);
+ }
+ }
+ }
+
+ fn render_node(&mut self, node: &SyntaxNode) {
+ if let Some(mod_path) = self.match_info.rendered_template_paths.get(node) {
+ self.out.push_str(&mod_path.to_string());
+ // Emit everything except for the segment's name-ref, since we already effectively
+ // emitted that as part of `mod_path`.
+ if let Some(path) = ast::Path::cast(node.clone()) {
+ if let Some(segment) = path.segment() {
+ for node_or_token in segment.syntax().children_with_tokens() {
+ if node_or_token.kind() != SyntaxKind::NAME_REF {
+ self.render_node_or_token(&node_or_token);
+ }
+ }
+ }
+ }
+ } else {
+ self.render_node_children(node);
+ }
+ }
+
+ fn render_token(&mut self, token: &SyntaxToken) {
+ if let Some(placeholder) = self.rule.get_placeholder(token) {
+ if let Some(placeholder_value) =
+ self.match_info.placeholder_values.get(&placeholder.ident)
+ {
+ let range = &placeholder_value.range.range;
+ let mut matched_text =
+ self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned();
+ // If a method call is performed directly on the placeholder, then autoderef and
+ // autoref will apply, so we can just substitute whatever the placeholder matched to
+ // directly. If we're not applying a method call, then we need to add explicitly
+ // deref and ref in order to match whatever was being done implicitly at the match
+ // site.
+ if !token_is_method_call_receiver(token)
+ && (placeholder_value.autoderef_count > 0
+ || placeholder_value.autoref_kind != ast::SelfParamKind::Owned)
+ {
+ cov_mark::hit!(replace_autoref_autoderef_capture);
+ let ref_kind = match placeholder_value.autoref_kind {
+ ast::SelfParamKind::Owned => "",
+ ast::SelfParamKind::Ref => "&",
+ ast::SelfParamKind::MutRef => "&mut ",
+ };
+ matched_text = format!(
+ "{}{}{}",
+ ref_kind,
+ "*".repeat(placeholder_value.autoderef_count),
+ matched_text
+ );
+ }
+ let edit = matches_to_edit_at_offset(
+ &placeholder_value.inner_matches,
+ self.file_src,
+ range.start(),
+ self.rules,
+ );
+ let needs_parenthesis =
+ self.placeholder_tokens_requiring_parenthesis.contains(token);
+ edit.apply(&mut matched_text);
+ if needs_parenthesis {
+ self.out.push('(');
+ }
+ self.placeholder_tokens_by_range.insert(
+ TextRange::new(
+ TextSize::of(&self.out),
+ TextSize::of(&self.out) + TextSize::of(&matched_text),
+ ),
+ token.clone(),
+ );
+ self.out.push_str(&matched_text);
+ if needs_parenthesis {
+ self.out.push(')');
+ }
+ } else {
+ // We validated that all placeholder references were valid before we
+ // started, so this shouldn't happen.
+ panic!(
+ "Internal error: replacement referenced unknown placeholder {}",
+ placeholder.ident
+ );
+ }
+ } else {
+ self.out.push_str(token.text());
+ }
+ }
+
+ // Checks if the resulting code, when parsed doesn't split any placeholders due to different
+ // order of operations between the search pattern and the replacement template. If any do, then
+ // we rerender the template and wrap the problematic placeholders with parenthesis.
+ fn maybe_rerender_with_extra_parenthesis(&mut self, template: &SyntaxNode) {
+ if let Some(node) = parse_as_kind(&self.out, template.kind()) {
+ self.remove_node_ranges(node);
+ if self.placeholder_tokens_by_range.is_empty() {
+ return;
+ }
+ self.placeholder_tokens_requiring_parenthesis =
+ self.placeholder_tokens_by_range.values().cloned().collect();
+ self.out.clear();
+ self.render_node(template);
+ }
+ }
+
+ fn remove_node_ranges(&mut self, node: SyntaxNode) {
+ self.placeholder_tokens_by_range.remove(&node.text_range());
+ for child in node.children() {
+ self.remove_node_ranges(child);
+ }
+ }
+}
+
+/// Returns whether token is the receiver of a method call. Note, being within the receiver of a
+/// method call doesn't count. e.g. if the token is `$a`, then `$a.foo()` will return true, while
+/// `($a + $b).foo()` or `x.foo($a)` will return false.
+fn token_is_method_call_receiver(token: &SyntaxToken) -> bool {
+ // Find the first method call among the ancestors of `token`, then check if the only token
+ // within the receiver is `token`.
+ if let Some(receiver) = token
+ .parent_ancestors()
+ .find_map(ast::MethodCallExpr::cast)
+ .and_then(|call| call.receiver())
+ {
+ let tokens = receiver.syntax().descendants_with_tokens().filter_map(|node_or_token| {
+ match node_or_token {
+ SyntaxElement::Token(t) => Some(t),
+ _ => None,
+ }
+ });
+ if let Some((only_token,)) = tokens.collect_tuple() {
+ return only_token == *token;
+ }
+ }
+ false
+}
+
+fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option<SyntaxNode> {
+ if ast::Expr::can_cast(kind) {
+ if let Ok(expr) = fragments::expr(code) {
+ return Some(expr);
+ }
+ }
+ if ast::Item::can_cast(kind) {
+ if let Ok(item) = fragments::item(code) {
+ return Some(item);
+ }
+ }
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs
new file mode 100644
index 000000000..4731f14f4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs
@@ -0,0 +1,308 @@
+//! This module is responsible for resolving paths within rules.
+
+use hir::AsAssocItem;
+use ide_db::{base_db::FilePosition, FxHashMap};
+use parsing::Placeholder;
+use syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken};
+
+use crate::{errors::error, parsing, SsrError};
+
+pub(crate) struct ResolutionScope<'db> {
+ scope: hir::SemanticsScope<'db>,
+ node: SyntaxNode,
+}
+
+pub(crate) struct ResolvedRule {
+ pub(crate) pattern: ResolvedPattern,
+ pub(crate) template: Option<ResolvedPattern>,
+ pub(crate) index: usize,
+}
+
+pub(crate) struct ResolvedPattern {
+ pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
+ pub(crate) node: SyntaxNode,
+ // Paths in `node` that we've resolved.
+ pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
+ pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, UfcsCallInfo>,
+ pub(crate) contains_self: bool,
+}
+
+pub(crate) struct ResolvedPath {
+ pub(crate) resolution: hir::PathResolution,
+ /// The depth of the ast::Path that was resolved within the pattern.
+ pub(crate) depth: u32,
+}
+
+pub(crate) struct UfcsCallInfo {
+ pub(crate) call_expr: ast::CallExpr,
+ pub(crate) function: hir::Function,
+ pub(crate) qualifier_type: Option<hir::Type>,
+}
+
+impl ResolvedRule {
+ pub(crate) fn new(
+ rule: parsing::ParsedRule,
+ resolution_scope: &ResolutionScope<'_>,
+ index: usize,
+ ) -> Result<ResolvedRule, SsrError> {
+ let resolver =
+ Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in };
+ let resolved_template = match rule.template {
+ Some(template) => Some(resolver.resolve_pattern_tree(template)?),
+ None => None,
+ };
+ Ok(ResolvedRule {
+ pattern: resolver.resolve_pattern_tree(rule.pattern)?,
+ template: resolved_template,
+ index,
+ })
+ }
+
+ pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> {
+ if token.kind() != SyntaxKind::IDENT {
+ return None;
+ }
+ self.pattern.placeholders_by_stand_in.get(token.text())
+ }
+}
+
+struct Resolver<'a, 'db> {
+ resolution_scope: &'a ResolutionScope<'db>,
+ placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
+}
+
+impl Resolver<'_, '_> {
+ fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
+ use syntax::ast::AstNode;
+ use syntax::{SyntaxElement, T};
+ let mut resolved_paths = FxHashMap::default();
+ self.resolve(pattern.clone(), 0, &mut resolved_paths)?;
+ let ufcs_function_calls = resolved_paths
+ .iter()
+ .filter_map(|(path_node, resolved)| {
+ if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) {
+ if let Some(call_expr) = ast::CallExpr::cast(grandparent.clone()) {
+ if let hir::PathResolution::Def(hir::ModuleDef::Function(function)) =
+ resolved.resolution
+ {
+ if function.as_assoc_item(self.resolution_scope.scope.db).is_some() {
+ let qualifier_type =
+ self.resolution_scope.qualifier_type(path_node);
+ return Some((
+ grandparent,
+ UfcsCallInfo { call_expr, function, qualifier_type },
+ ));
+ }
+ }
+ }
+ }
+ None
+ })
+ .collect();
+ let contains_self =
+ pattern.descendants_with_tokens().any(|node_or_token| match node_or_token {
+ SyntaxElement::Token(t) => t.kind() == T![self],
+ _ => false,
+ });
+ Ok(ResolvedPattern {
+ node: pattern,
+ resolved_paths,
+ placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
+ ufcs_function_calls,
+ contains_self,
+ })
+ }
+
+ fn resolve(
+ &self,
+ node: SyntaxNode,
+ depth: u32,
+ resolved_paths: &mut FxHashMap<SyntaxNode, ResolvedPath>,
+ ) -> Result<(), SsrError> {
+ use syntax::ast::AstNode;
+ if let Some(path) = ast::Path::cast(node.clone()) {
+ if is_self(&path) {
+ // Self cannot be resolved like other paths.
+ return Ok(());
+ }
+ // Check if this is an appropriate place in the path to resolve. If the path is
+ // something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains
+ // a placeholder. e.g. `a::$b::c` then we want to resolve `a`.
+ if !path_contains_type_arguments(path.qualifier())
+ && !self.path_contains_placeholder(&path)
+ {
+ let resolution = self
+ .resolution_scope
+ .resolve_path(&path)
+ .ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?;
+ if self.ok_to_use_path_resolution(&resolution) {
+ resolved_paths.insert(node, ResolvedPath { resolution, depth });
+ return Ok(());
+ }
+ }
+ }
+ for node in node.children() {
+ self.resolve(node, depth + 1, resolved_paths)?;
+ }
+ Ok(())
+ }
+
+ /// Returns whether `path` contains a placeholder, but ignores any placeholders within type
+ /// arguments.
+ fn path_contains_placeholder(&self, path: &ast::Path) -> bool {
+ if let Some(segment) = path.segment() {
+ if let Some(name_ref) = segment.name_ref() {
+ if self.placeholders_by_stand_in.contains_key(name_ref.text().as_str()) {
+ return true;
+ }
+ }
+ }
+ if let Some(qualifier) = path.qualifier() {
+ return self.path_contains_placeholder(&qualifier);
+ }
+ false
+ }
+
+ fn ok_to_use_path_resolution(&self, resolution: &hir::PathResolution) -> bool {
+ match resolution {
+ hir::PathResolution::Def(hir::ModuleDef::Function(function))
+ if function.as_assoc_item(self.resolution_scope.scope.db).is_some() =>
+ {
+ if function.self_param(self.resolution_scope.scope.db).is_some() {
+ // If we don't use this path resolution, then we won't be able to match method
+ // calls. e.g. `Foo::bar($s)` should match `x.bar()`.
+ true
+ } else {
+ cov_mark::hit!(replace_associated_trait_default_function_call);
+ false
+ }
+ }
+ hir::PathResolution::Def(
+ def @ (hir::ModuleDef::Const(_) | hir::ModuleDef::TypeAlias(_)),
+ ) if def.as_assoc_item(self.resolution_scope.scope.db).is_some() => {
+ // Not a function. Could be a constant or an associated type.
+ cov_mark::hit!(replace_associated_trait_constant);
+ false
+ }
+ _ => true,
+ }
+ }
+}
+
+impl<'db> ResolutionScope<'db> {
+ pub(crate) fn new(
+ sema: &hir::Semantics<'db, ide_db::RootDatabase>,
+ resolve_context: FilePosition,
+ ) -> Option<ResolutionScope<'db>> {
+ use syntax::ast::AstNode;
+ let file = sema.parse(resolve_context.file_id);
+ // Find a node at the requested position, falling back to the whole file.
+ let node = file
+ .syntax()
+ .token_at_offset(resolve_context.offset)
+ .left_biased()
+ .and_then(|token| token.parent())
+ .unwrap_or_else(|| file.syntax().clone());
+ let node = pick_node_for_resolution(node);
+ let scope = sema.scope(&node)?;
+ Some(ResolutionScope { scope, node })
+ }
+
+ /// Returns the function in which SSR was invoked, if any.
+ pub(crate) fn current_function(&self) -> Option<SyntaxNode> {
+ self.node.ancestors().find(|node| node.kind() == SyntaxKind::FN)
+ }
+
+ fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> {
+ // First try resolving the whole path. This will work for things like
+ // `std::collections::HashMap`, but will fail for things like
+ // `std::collections::HashMap::new`.
+ if let Some(resolution) = self.scope.speculative_resolve(path) {
+ return Some(resolution);
+ }
+ // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if
+ // that succeeds, then iterate through the candidates on the resolved type with the provided
+ // name.
+ let resolved_qualifier = self.scope.speculative_resolve(&path.qualifier()?)?;
+ if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
+ let name = path.segment()?.name_ref()?;
+ let module = self.scope.module();
+ adt.ty(self.scope.db).iterate_path_candidates(
+ self.scope.db,
+ &self.scope,
+ &self.scope.visible_traits().0,
+ Some(module),
+ None,
+ |assoc_item| {
+ let item_name = assoc_item.name(self.scope.db)?;
+ if item_name.to_smol_str().as_str() == name.text() {
+ Some(hir::PathResolution::Def(assoc_item.into()))
+ } else {
+ None
+ }
+ },
+ )
+ } else {
+ None
+ }
+ }
+
+ fn qualifier_type(&self, path: &SyntaxNode) -> Option<hir::Type> {
+ use syntax::ast::AstNode;
+ if let Some(path) = ast::Path::cast(path.clone()) {
+ if let Some(qualifier) = path.qualifier() {
+ if let Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) =
+ self.resolve_path(&qualifier)
+ {
+ return Some(adt.ty(self.scope.db));
+ }
+ }
+ }
+ None
+ }
+}
+
+fn is_self(path: &ast::Path) -> bool {
+ path.segment().map(|segment| segment.self_token().is_some()).unwrap_or(false)
+}
+
+/// Returns a suitable node for resolving paths in the current scope. If we create a scope based on
+/// a statement node, then we can't resolve local variables that were defined in the current scope
+/// (only in parent scopes). So we find another node, ideally a child of the statement where local
+/// variable resolution is permitted.
+fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode {
+ match node.kind() {
+ SyntaxKind::EXPR_STMT => {
+ if let Some(n) = node.first_child() {
+ cov_mark::hit!(cursor_after_semicolon);
+ return n;
+ }
+ }
+ SyntaxKind::LET_STMT | SyntaxKind::IDENT_PAT => {
+ if let Some(next) = node.next_sibling() {
+ return pick_node_for_resolution(next);
+ }
+ }
+ SyntaxKind::NAME => {
+ if let Some(parent) = node.parent() {
+ return pick_node_for_resolution(parent);
+ }
+ }
+ _ => {}
+ }
+ node
+}
+
+/// Returns whether `path` or any of its qualifiers contains type arguments.
+fn path_contains_type_arguments(path: Option<ast::Path>) -> bool {
+ if let Some(path) = path {
+ if let Some(segment) = path.segment() {
+ if segment.generic_arg_list().is_some() {
+ cov_mark::hit!(type_arguments_within_path);
+ return true;
+ }
+ }
+ return path_contains_type_arguments(path.qualifier());
+ }
+ false
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
new file mode 100644
index 000000000..0a85569b6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
@@ -0,0 +1,289 @@
+//! Searching for matches.
+
+use crate::{
+ matching,
+ resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
+ Match, MatchFinder,
+};
+use ide_db::{
+ base_db::{FileId, FileRange},
+ defs::Definition,
+ search::{SearchScope, UsageSearchResult},
+ FxHashSet,
+};
+use syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
+
+/// A cache for the results of find_usages. This is for when we have multiple patterns that have the
+/// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type
+/// and as a pattern. In each, the usages of `foo::Bar` are the same and we'd like to avoid finding
+/// them more than once.
+#[derive(Default)]
+pub(crate) struct UsageCache {
+ usages: Vec<(Definition, UsageSearchResult)>,
+}
+
+impl<'db> MatchFinder<'db> {
+ /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
+ /// replacement impossible, so further processing is required in order to properly nest matches
+ /// and remove overlapping matches. This is done in the `nesting` module.
+ pub(crate) fn find_matches_for_rule(
+ &self,
+ rule: &ResolvedRule,
+ usage_cache: &mut UsageCache,
+ matches_out: &mut Vec<Match>,
+ ) {
+ if rule.pattern.contains_self {
+ // If the pattern contains `self` we restrict the scope of the search to just the
+ // current method. No other method can reference the same `self`. This makes the
+ // behavior of `self` consistent with other variables.
+ if let Some(current_function) = self.resolution_scope.current_function() {
+ self.slow_scan_node(&current_function, rule, &None, matches_out);
+ }
+ return;
+ }
+ if pick_path_for_usages(&rule.pattern).is_none() {
+ self.slow_scan(rule, matches_out);
+ return;
+ }
+ self.find_matches_for_pattern_tree(rule, &rule.pattern, usage_cache, matches_out);
+ }
+
+ fn find_matches_for_pattern_tree(
+ &self,
+ rule: &ResolvedRule,
+ pattern: &ResolvedPattern,
+ usage_cache: &mut UsageCache,
+ matches_out: &mut Vec<Match>,
+ ) {
+ if let Some(resolved_path) = pick_path_for_usages(pattern) {
+ let definition: Definition = resolved_path.resolution.clone().into();
+ for file_range in self.find_usages(usage_cache, definition).file_ranges() {
+ for node_to_match in self.find_nodes_to_match(resolved_path, file_range) {
+ if !is_search_permitted_ancestors(&node_to_match) {
+ cov_mark::hit!(use_declaration_with_braces);
+ continue;
+ }
+ self.try_add_match(rule, &node_to_match, &None, matches_out);
+ }
+ }
+ }
+ }
+
+ fn find_nodes_to_match(
+ &self,
+ resolved_path: &ResolvedPath,
+ file_range: FileRange,
+ ) -> Vec<SyntaxNode> {
+ let file = self.sema.parse(file_range.file_id);
+ let depth = resolved_path.depth as usize;
+ let offset = file_range.range.start();
+
+ let mut paths = self
+ .sema
+ .find_nodes_at_offset_with_descend::<ast::Path>(file.syntax(), offset)
+ .peekable();
+
+ if paths.peek().is_some() {
+ paths
+ .filter_map(|path| {
+ self.sema.ancestors_with_macros(path.syntax().clone()).nth(depth)
+ })
+ .collect::<Vec<_>>()
+ } else {
+ self.sema
+ .find_nodes_at_offset_with_descend::<ast::MethodCallExpr>(file.syntax(), offset)
+ .filter_map(|path| {
+ // If the pattern contained a path and we found a reference to that path that wasn't
+ // itself a path, but was a method call, then we need to adjust how far up to try
+ // matching by how deep the path was within a CallExpr. The structure would have been
+ // CallExpr, PathExpr, Path - i.e. a depth offset of 2. We don't need to check if the
+ // path was part of a CallExpr because if it wasn't then all that will happen is we'll
+ // fail to match, which is the desired behavior.
+ const PATH_DEPTH_IN_CALL_EXPR: usize = 2;
+ if depth < PATH_DEPTH_IN_CALL_EXPR {
+ return None;
+ }
+ self.sema
+ .ancestors_with_macros(path.syntax().clone())
+ .nth(depth - PATH_DEPTH_IN_CALL_EXPR)
+ })
+ .collect::<Vec<_>>()
+ }
+ }
+
+ fn find_usages<'a>(
+ &self,
+ usage_cache: &'a mut UsageCache,
+ definition: Definition,
+ ) -> &'a UsageSearchResult {
+ // Logically if a lookup succeeds we should just return it. Unfortunately returning it would
+ // extend the lifetime of the borrow, then we wouldn't be able to do the insertion on a
+ // cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two
+ // lookups in the case of a cache hit.
+ if usage_cache.find(&definition).is_none() {
+ let usages = definition.usages(&self.sema).in_scope(self.search_scope()).all();
+ usage_cache.usages.push((definition, usages));
+ return &usage_cache.usages.last().unwrap().1;
+ }
+ usage_cache.find(&definition).unwrap()
+ }
+
+ /// Returns the scope within which we want to search. We don't want un unrestricted search
+ /// scope, since we don't want to find references in external dependencies.
+ fn search_scope(&self) -> SearchScope {
+ // FIXME: We should ideally have a test that checks that we edit local roots and not library
+ // roots. This probably would require some changes to fixtures, since currently everything
+ // seems to get put into a single source root.
+ let mut files = Vec::new();
+ self.search_files_do(|file_id| {
+ files.push(file_id);
+ });
+ SearchScope::files(&files)
+ }
+
+ fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
+ self.search_files_do(|file_id| {
+ let file = self.sema.parse(file_id);
+ let code = file.syntax();
+ self.slow_scan_node(code, rule, &None, matches_out);
+ })
+ }
+
+ fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
+ if self.restrict_ranges.is_empty() {
+ // Unrestricted search.
+ use ide_db::base_db::SourceDatabaseExt;
+ use ide_db::symbol_index::SymbolsDatabase;
+ for &root in self.sema.db.local_roots().iter() {
+ let sr = self.sema.db.source_root(root);
+ for file_id in sr.iter() {
+ callback(file_id);
+ }
+ }
+ } else {
+ // Search is restricted, deduplicate file IDs (generally only one).
+ let mut files = FxHashSet::default();
+ for range in &self.restrict_ranges {
+ if files.insert(range.file_id) {
+ callback(range.file_id);
+ }
+ }
+ }
+ }
+
+ fn slow_scan_node(
+ &self,
+ code: &SyntaxNode,
+ rule: &ResolvedRule,
+ restrict_range: &Option<FileRange>,
+ matches_out: &mut Vec<Match>,
+ ) {
+ if !is_search_permitted(code) {
+ return;
+ }
+ self.try_add_match(rule, code, restrict_range, matches_out);
+ // If we've got a macro call, we already tried matching it pre-expansion, which is the only
+ // way to match the whole macro, now try expanding it and matching the expansion.
+ if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
+ if let Some(expanded) = self.sema.expand(&macro_call) {
+ if let Some(tt) = macro_call.token_tree() {
+ // When matching within a macro expansion, we only want to allow matches of
+ // nodes that originated entirely from within the token tree of the macro call.
+ // i.e. we don't want to match something that came from the macro itself.
+ self.slow_scan_node(
+ &expanded,
+ rule,
+ &Some(self.sema.original_range(tt.syntax())),
+ matches_out,
+ );
+ }
+ }
+ }
+ for child in code.children() {
+ self.slow_scan_node(&child, rule, restrict_range, matches_out);
+ }
+ }
+
+ fn try_add_match(
+ &self,
+ rule: &ResolvedRule,
+ code: &SyntaxNode,
+ restrict_range: &Option<FileRange>,
+ matches_out: &mut Vec<Match>,
+ ) {
+ if !self.within_range_restrictions(code) {
+ cov_mark::hit!(replace_nonpath_within_selection);
+ return;
+ }
+ if let Ok(m) = matching::get_match(false, rule, code, restrict_range, &self.sema) {
+ matches_out.push(m);
+ }
+ }
+
+ /// Returns whether `code` is within one of our range restrictions if we have any. No range
+ /// restrictions is considered unrestricted and always returns true.
+ fn within_range_restrictions(&self, code: &SyntaxNode) -> bool {
+ if self.restrict_ranges.is_empty() {
+ // There is no range restriction.
+ return true;
+ }
+ let node_range = self.sema.original_range(code);
+ for range in &self.restrict_ranges {
+ if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) {
+ return true;
+ }
+ }
+ false
+ }
+}
+
+/// Returns whether we support matching within `node` and all of its ancestors.
+fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool {
+ if let Some(parent) = node.parent() {
+ if !is_search_permitted_ancestors(&parent) {
+ return false;
+ }
+ }
+ is_search_permitted(node)
+}
+
+/// Returns whether we support matching within this kind of node.
+fn is_search_permitted(node: &SyntaxNode) -> bool {
+ // FIXME: Properly handle use declarations. At the moment, if our search pattern is `foo::bar`
+ // and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`.
+ // However we'll then replace just the part we matched `bar`. We probably need to instead remove
+ // `bar` and insert a new use declaration.
+ node.kind() != SyntaxKind::USE
+}
+
+impl UsageCache {
+ fn find(&mut self, definition: &Definition) -> Option<&UsageSearchResult> {
+ // We expect a very small number of cache entries (generally 1), so a linear scan should be
+ // fast enough and avoids the need to implement Hash for Definition.
+ for (d, refs) in &self.usages {
+ if d == definition {
+ return Some(refs);
+ }
+ }
+ None
+ }
+}
+
+/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't
+/// something that we can find references to. We then somewhat arbitrarily pick the path that is the
+/// longest as this is hopefully more likely to be less common, making it faster to find.
+fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> {
+ // FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are
+ // private to the current module, then we definitely would want to pick them over say a path
+ // from std. Possibly we should go further than this and intersect the search scopes for all
+ // resolved paths then search only in that scope.
+ pattern
+ .resolved_paths
+ .iter()
+ .filter(|(_, p)| {
+ !matches!(p.resolution, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_)))
+ })
+ .map(|(node, resolved)| (node.text().len(), resolved))
+ .max_by(|(a, _), (b, _)| a.cmp(b))
+ .map(|(_, resolved)| resolved)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs
new file mode 100644
index 000000000..1ecb7aa9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs
@@ -0,0 +1,1397 @@
+use expect_test::{expect, Expect};
+use ide_db::{
+ base_db::{salsa::Durability, FileId, FilePosition, FileRange, SourceDatabaseExt},
+ FxHashSet,
+};
+use std::sync::Arc;
+use test_utils::RangeOrOffset;
+
+use crate::{MatchFinder, SsrRule};
+
+fn parse_error_text(query: &str) -> String {
+ format!("{}", query.parse::<SsrRule>().unwrap_err())
+}
+
+#[test]
+fn parser_empty_query() {
+ assert_eq!(parse_error_text(""), "Parse error: Cannot find delimiter `==>>`");
+}
+
+#[test]
+fn parser_no_delimiter() {
+ assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delimiter `==>>`");
+}
+
+#[test]
+fn parser_two_delimiters() {
+ assert_eq!(
+ parse_error_text("foo() ==>> a ==>> b "),
+ "Parse error: More than one delimiter found"
+ );
+}
+
+#[test]
+fn parser_repeated_name() {
+ assert_eq!(
+ parse_error_text("foo($a, $a) ==>>"),
+ "Parse error: Placeholder `$a` repeats more than once"
+ );
+}
+
+#[test]
+fn parser_invalid_pattern() {
+ assert_eq!(
+ parse_error_text(" ==>> ()"),
+ "Parse error: Not a valid Rust expression, type, item, path or pattern"
+ );
+}
+
+#[test]
+fn parser_invalid_template() {
+ assert_eq!(
+ parse_error_text("() ==>> )"),
+ "Parse error: Not a valid Rust expression, type, item, path or pattern"
+ );
+}
+
+#[test]
+fn parser_undefined_placeholder_in_replacement() {
+ assert_eq!(
+ parse_error_text("42 ==>> $a"),
+ "Parse error: Replacement contains undefined placeholders: $a"
+ );
+}
+
+/// `code` may optionally contain a cursor marker `$0`. If it doesn't, then the position will be
+/// the start of the file. If there's a second cursor marker, then we'll return a single range.
+pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Vec<FileRange>) {
+ use ide_db::base_db::fixture::WithFixture;
+ use ide_db::symbol_index::SymbolsDatabase;
+ let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) {
+ ide_db::RootDatabase::with_range_or_offset(code)
+ } else {
+ let (db, file_id) = ide_db::RootDatabase::with_single_file(code);
+ (db, file_id, RangeOrOffset::Offset(0.into()))
+ };
+ let selections;
+ let position;
+ match range_or_offset {
+ RangeOrOffset::Range(range) => {
+ position = FilePosition { file_id, offset: range.start() };
+ selections = vec![FileRange { file_id, range }];
+ }
+ RangeOrOffset::Offset(offset) => {
+ position = FilePosition { file_id, offset };
+ selections = vec![];
+ }
+ }
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(ide_db::base_db::fixture::WORKSPACE);
+ db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
+ (db, position, selections)
+}
+
+fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
+ assert_ssr_transforms(&[rule], input, expected);
+}
+
+fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
+ let (db, position, selections) = single_file(input);
+ let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
+ for rule in rules {
+ let rule: SsrRule = rule.parse().unwrap();
+ match_finder.add_rule(rule).unwrap();
+ }
+ let edits = match_finder.edits();
+ if edits.is_empty() {
+ panic!("No edits were made");
+ }
+ // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters
+ // stuff.
+ let mut actual = db.file_text(position.file_id).to_string();
+ edits[&position.file_id].apply(&mut actual);
+ expected.assert_eq(&actual);
+}
+
+fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: FileId, snippet: &str) {
+ let debug_info = match_finder.debug_where_text_equal(file_id, snippet);
+ println!(
+ "Match debug info: {} nodes had text exactly equal to '{}'",
+ debug_info.len(),
+ snippet
+ );
+ for (index, d) in debug_info.iter().enumerate() {
+ println!("Node #{}\n{:#?}\n", index, d);
+ }
+}
+
+fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
+ let (db, position, selections) = single_file(code);
+ let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
+ match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
+ let matched_strings: Vec<String> =
+ match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
+ if matched_strings != expected && !expected.is_empty() {
+ print_match_debug_info(&match_finder, position.file_id, expected[0]);
+ }
+ assert_eq!(matched_strings, expected);
+}
+
+fn assert_no_match(pattern: &str, code: &str) {
+ let (db, position, selections) = single_file(code);
+ let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
+ match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
+ let matches = match_finder.matches().flattened().matches;
+ if !matches.is_empty() {
+ print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text());
+ panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches);
+ }
+}
+
+fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
+ let (db, position, selections) = single_file(code);
+ let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
+ match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
+ let mut reasons = Vec::new();
+ for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
+ if let Some(reason) = d.match_failure_reason() {
+ reasons.push(reason.to_owned());
+ }
+ }
+ assert_eq!(reasons, vec![expected_reason]);
+}
+
+#[test]
+fn ssr_let_stmt_in_macro_match() {
+ assert_matches(
+ "let a = 0",
+ r#"
+ macro_rules! m1 { ($a:stmt) => {$a}; }
+ fn f() {m1!{ let a = 0 };}"#,
+ // FIXME: Whitespace is not part of the matched block
+ &["leta=0"],
+ );
+}
+
+#[test]
+fn ssr_let_stmt_in_fn_match() {
+ assert_matches("let $a = 10;", "fn main() { let x = 10; x }", &["let x = 10;"]);
+ assert_matches("let $a = $b;", "fn main() { let x = 10; x }", &["let x = 10;"]);
+}
+
+#[test]
+fn ssr_block_expr_match() {
+ assert_matches("{ let $a = $b; }", "fn main() { let x = 10; }", &["{ let x = 10; }"]);
+ assert_matches("{ let $a = $b; $c }", "fn main() { let x = 10; x }", &["{ let x = 10; x }"]);
+}
+
+#[test]
+fn ssr_let_stmt_replace() {
+ // Pattern and template with trailing semicolon
+ assert_ssr_transform(
+ "let $a = $b; ==>> let $a = 11;",
+ "fn main() { let x = 10; x }",
+ expect![["fn main() { let x = 11; x }"]],
+ );
+}
+
+#[test]
+fn ssr_let_stmt_replace_expr() {
+ // Trailing semicolon should be dropped from the new expression
+ assert_ssr_transform(
+ "let $a = $b; ==>> $b",
+ "fn main() { let x = 10; }",
+ expect![["fn main() { 10 }"]],
+ );
+}
+
+#[test]
+fn ssr_blockexpr_replace_stmt_with_stmt() {
+ assert_ssr_transform(
+ "if $a() {$b;} ==>> $b;",
+ "{
+ if foo() {
+ bar();
+ }
+ Ok(())
+}",
+ expect![[r#"{
+ bar();
+ Ok(())
+}"#]],
+ );
+}
+
+#[test]
+fn ssr_blockexpr_match_trailing_expr() {
+ assert_matches(
+ "if $a() {$b;}",
+ "{
+ if foo() {
+ bar();
+ }
+}",
+ &["if foo() {
+ bar();
+ }"],
+ );
+}
+
+#[test]
+fn ssr_blockexpr_replace_trailing_expr_with_stmt() {
+ assert_ssr_transform(
+ "if $a() {$b;} ==>> $b;",
+ "{
+ if foo() {
+ bar();
+ }
+}",
+ expect![["{
+ bar();
+}"]],
+ );
+}
+
+#[test]
+fn ssr_function_to_method() {
+ assert_ssr_transform(
+ "my_function($a, $b) ==>> ($a).my_method($b)",
+ "fn my_function() {} fn main() { loop { my_function( other_func(x, y), z + w) } }",
+ expect![["fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }"]],
+ )
+}
+
+#[test]
+fn ssr_nested_function() {
+ assert_ssr_transform(
+ "foo($a, $b, $c) ==>> bar($c, baz($a, $b))",
+ r#"
+ //- /lib.rs crate:foo
+ fn foo() {}
+ fn bar() {}
+ fn baz() {}
+ fn main { foo (x + value.method(b), x+y-z, true && false) }
+ "#,
+ expect![[r#"
+ fn foo() {}
+ fn bar() {}
+ fn baz() {}
+ fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }
+ "#]],
+ )
+}
+
+#[test]
+fn ssr_expected_spacing() {
+ assert_ssr_transform(
+ "foo($x) + bar() ==>> bar($x)",
+ "fn foo() {} fn bar() {} fn main() { foo(5) + bar() }",
+ expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
+ );
+}
+
+#[test]
+fn ssr_with_extra_space() {
+ assert_ssr_transform(
+ "foo($x ) + bar() ==>> bar($x)",
+ "fn foo() {} fn bar() {} fn main() { foo( 5 ) +bar( ) }",
+ expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
+ );
+}
+
+#[test]
+fn ssr_keeps_nested_comment() {
+ assert_ssr_transform(
+ "foo($x) ==>> bar($x)",
+ "fn foo() {} fn bar() {} fn main() { foo(other(5 /* using 5 */)) }",
+ expect![["fn foo() {} fn bar() {} fn main() { bar(other(5 /* using 5 */)) }"]],
+ )
+}
+
+#[test]
+fn ssr_keeps_comment() {
+ assert_ssr_transform(
+ "foo($x) ==>> bar($x)",
+ "fn foo() {} fn bar() {} fn main() { foo(5 /* using 5 */) }",
+ expect![["fn foo() {} fn bar() {} fn main() { bar(5)/* using 5 */ }"]],
+ )
+}
+
+#[test]
+fn ssr_struct_lit() {
+ assert_ssr_transform(
+ "Foo{a: $a, b: $b} ==>> Foo::new($a, $b)",
+ r#"
+ struct Foo() {}
+ impl Foo { fn new() {} }
+ fn main() { Foo{b:2, a:1} }
+ "#,
+ expect![[r#"
+ struct Foo() {}
+ impl Foo { fn new() {} }
+ fn main() { Foo::new(1, 2) }
+ "#]],
+ )
+}
+
+#[test]
+fn ssr_struct_def() {
+ assert_ssr_transform(
+ "struct Foo { $f: $t } ==>> struct Foo($t);",
+ r#"struct Foo { field: i32 }"#,
+ expect![[r#"struct Foo(i32);"#]],
+ )
+}
+
+#[test]
+fn ignores_whitespace() {
+ assert_matches("1+2", "fn f() -> i32 {1 + 2}", &["1 + 2"]);
+ assert_matches("1 + 2", "fn f() -> i32 {1+2}", &["1+2"]);
+}
+
+#[test]
+fn no_match() {
+ assert_no_match("1 + 3", "fn f() -> i32 {1 + 2}");
+}
+
+#[test]
+fn match_fn_definition() {
+ assert_matches("fn $a($b: $t) {$c}", "fn f(a: i32) {bar()}", &["fn f(a: i32) {bar()}"]);
+}
+
+#[test]
+fn match_struct_definition() {
+ let code = r#"
+ struct Option<T> {}
+ struct Bar {}
+ struct Foo {name: Option<String>}"#;
+ assert_matches("struct $n {$f: Option<String>}", code, &["struct Foo {name: Option<String>}"]);
+}
+
+#[test]
+fn match_expr() {
+ let code = r#"
+ fn foo() {}
+ fn f() -> i32 {foo(40 + 2, 42)}"#;
+ assert_matches("foo($a, $b)", code, &["foo(40 + 2, 42)"]);
+ assert_no_match("foo($a, $b, $c)", code);
+ assert_no_match("foo($a)", code);
+}
+
+#[test]
+fn match_nested_method_calls() {
+ assert_matches(
+ "$a.z().z().z()",
+ "fn f() {h().i().j().z().z().z().d().e()}",
+ &["h().i().j().z().z().z()"],
+ );
+}
+
+// Make sure that our node matching semantics don't differ within macro calls.
+#[test]
+fn match_nested_method_calls_with_macro_call() {
+ assert_matches(
+ "$a.z().z().z()",
+ r#"
+ macro_rules! m1 { ($a:expr) => {$a}; }
+ fn f() {m1!(h().i().j().z().z().z().d().e())}"#,
+ &["h().i().j().z().z().z()"],
+ );
+}
+
+#[test]
+fn match_complex_expr() {
+ let code = r#"
+ fn foo() {} fn bar() {}
+ fn f() -> i32 {foo(bar(40, 2), 42)}"#;
+ assert_matches("foo($a, $b)", code, &["foo(bar(40, 2), 42)"]);
+ assert_no_match("foo($a, $b, $c)", code);
+ assert_no_match("foo($a)", code);
+ assert_matches("bar($a, $b)", code, &["bar(40, 2)"]);
+}
+
+// Trailing commas in the code should be ignored.
+#[test]
+fn match_with_trailing_commas() {
+ // Code has comma, pattern doesn't.
+ assert_matches("foo($a, $b)", "fn foo() {} fn f() {foo(1, 2,);}", &["foo(1, 2,)"]);
+ assert_matches("Foo{$a, $b}", "struct Foo {} fn f() {Foo{1, 2,};}", &["Foo{1, 2,}"]);
+
+ // Pattern has comma, code doesn't.
+ assert_matches("foo($a, $b,)", "fn foo() {} fn f() {foo(1, 2);}", &["foo(1, 2)"]);
+ assert_matches("Foo{$a, $b,}", "struct Foo {} fn f() {Foo{1, 2};}", &["Foo{1, 2}"]);
+}
+
+#[test]
+fn match_type() {
+ assert_matches("i32", "fn f() -> i32 {1 + 2}", &["i32"]);
+ assert_matches(
+ "Option<$a>",
+ "struct Option<T> {} fn f() -> Option<i32> {42}",
+ &["Option<i32>"],
+ );
+ assert_no_match(
+ "Option<$a>",
+ "struct Option<T> {} struct Result<T, E> {} fn f() -> Result<i32, ()> {42}",
+ );
+}
+
+#[test]
+fn match_struct_instantiation() {
+ let code = r#"
+ struct Foo {bar: i32, baz: i32}
+ fn f() {Foo {bar: 1, baz: 2}}"#;
+ assert_matches("Foo {bar: 1, baz: 2}", code, &["Foo {bar: 1, baz: 2}"]);
+ // Now with placeholders for all parts of the struct.
+ assert_matches("Foo {$a: $b, $c: $d}", code, &["Foo {bar: 1, baz: 2}"]);
+ assert_matches("Foo {}", "struct Foo {} fn f() {Foo {}}", &["Foo {}"]);
+}
+
+#[test]
+fn match_path() {
+ let code = r#"
+ mod foo {
+ pub fn bar() {}
+ }
+ fn f() {foo::bar(42)}"#;
+ assert_matches("foo::bar", code, &["foo::bar"]);
+ assert_matches("$a::bar", code, &["foo::bar"]);
+ assert_matches("foo::$b", code, &["foo::bar"]);
+}
+
+#[test]
+fn match_pattern() {
+ assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]);
+}
+
+// If our pattern has a full path, e.g. a::b::c() and the code has c(), but c resolves to
+// a::b::c, then we should match.
+#[test]
+fn match_fully_qualified_fn_path() {
+ let code = r#"
+ mod a {
+ pub mod b {
+ pub fn c(_: i32) {}
+ }
+ }
+ use a::b::c;
+ fn f1() {
+ c(42);
+ }
+ "#;
+ assert_matches("a::b::c($a)", code, &["c(42)"]);
+}
+
+#[test]
+fn match_resolved_type_name() {
+ let code = r#"
+ mod m1 {
+ pub mod m2 {
+ pub trait Foo<T> {}
+ }
+ }
+ mod m3 {
+ trait Foo<T> {}
+ fn f1(f: Option<&dyn Foo<bool>>) {}
+ }
+ mod m4 {
+ use crate::m1::m2::Foo;
+ fn f1(f: Option<&dyn Foo<i32>>) {}
+ }
+ "#;
+ assert_matches("m1::m2::Foo<$t>", code, &["Foo<i32>"]);
+}
+
+#[test]
+fn type_arguments_within_path() {
+ cov_mark::check!(type_arguments_within_path);
+ let code = r#"
+ mod foo {
+ pub struct Bar<T> {t: T}
+ impl<T> Bar<T> {
+ pub fn baz() {}
+ }
+ }
+ fn f1() {foo::Bar::<i32>::baz();}
+ "#;
+ assert_no_match("foo::Bar::<i64>::baz()", code);
+ assert_matches("foo::Bar::<i32>::baz()", code, &["foo::Bar::<i32>::baz()"]);
+}
+
+#[test]
+fn literal_constraint() {
+ cov_mark::check!(literal_constraint);
+ let code = r#"
+ enum Option<T> { Some(T), None }
+ use Option::Some;
+ fn f1() {
+ let x1 = Some(42);
+ let x2 = Some("foo");
+ let x3 = Some(x1);
+ let x4 = Some(40 + 2);
+ let x5 = Some(true);
+ }
+ "#;
+ assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]);
+ assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]);
+}
+
+#[test]
+fn match_reordered_struct_instantiation() {
+ assert_matches(
+ "Foo {aa: 1, b: 2, ccc: 3}",
+ "struct Foo {} fn f() {Foo {b: 2, ccc: 3, aa: 1}}",
+ &["Foo {b: 2, ccc: 3, aa: 1}"],
+ );
+ assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {b: 1}}");
+ assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {a: 2}}");
+ assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {a: 1}}");
+ assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {b: 2}}");
+ assert_no_match("Foo {a: 1, }", "struct Foo {} fn f() {Foo {a: 1, b: 2}}");
+ assert_no_match("Foo {a: 1, z: 9}", "struct Foo {} fn f() {Foo {a: 1}}");
+}
+
+#[test]
+fn match_macro_invocation() {
+ assert_matches(
+ "foo!($a)",
+ "macro_rules! foo {() => {}} fn() {foo(foo!(foo()))}",
+ &["foo!(foo())"],
+ );
+ assert_matches(
+ "foo!(41, $a, 43)",
+ "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43)}",
+ &["foo!(41, 42, 43)"],
+ );
+ assert_no_match("foo!(50, $a, 43)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}");
+ assert_no_match("foo!(41, $a, 50)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}");
+ assert_matches(
+ "foo!($a())",
+ "macro_rules! foo {() => {}} fn() {foo!(bar())}",
+ &["foo!(bar())"],
+ );
+}
+
+// When matching within a macro expansion, we only allow matches of nodes that originated from
+// the macro call, not from the macro definition.
+#[test]
+fn no_match_expression_from_macro() {
+ assert_no_match(
+ "$a.clone()",
+ r#"
+ macro_rules! m1 {
+ () => {42.clone()}
+ }
+ fn f1() {m1!()}
+ "#,
+ );
+}
+
+// We definitely don't want to allow matching of an expression that part originates from the
+// macro call `42` and part from the macro definition `.clone()`.
+#[test]
+fn no_match_split_expression() {
+ assert_no_match(
+ "$a.clone()",
+ r#"
+ macro_rules! m1 {
+ ($x:expr) => {$x.clone()}
+ }
+ fn f1() {m1!(42)}
+ "#,
+ );
+}
+
+#[test]
+fn replace_function_call() {
+ // This test also makes sure that we ignore empty-ranges.
+ assert_ssr_transform(
+ "foo() ==>> bar()",
+ "fn foo() {$0$0} fn bar() {} fn f1() {foo(); foo();}",
+ expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]],
+ );
+}
+
+#[test]
+fn replace_function_call_with_placeholders() {
+ assert_ssr_transform(
+ "foo($a, $b) ==>> bar($b, $a)",
+ "fn foo() {} fn bar() {} fn f1() {foo(5, 42)}",
+ expect![["fn foo() {} fn bar() {} fn f1() {bar(42, 5)}"]],
+ );
+}
+
+#[test]
+fn replace_nested_function_calls() {
+ assert_ssr_transform(
+ "foo($a) ==>> bar($a)",
+ "fn foo() {} fn bar() {} fn f1() {foo(foo(42))}",
+ expect![["fn foo() {} fn bar() {} fn f1() {bar(bar(42))}"]],
+ );
+}
+
+#[test]
+fn replace_associated_function_call() {
+ assert_ssr_transform(
+ "Foo::new() ==>> Bar::new()",
+ r#"
+ struct Foo {}
+ impl Foo { fn new() {} }
+ struct Bar {}
+ impl Bar { fn new() {} }
+ fn f1() {Foo::new();}
+ "#,
+ expect![[r#"
+ struct Foo {}
+ impl Foo { fn new() {} }
+ struct Bar {}
+ impl Bar { fn new() {} }
+ fn f1() {Bar::new();}
+ "#]],
+ );
+}
+
+#[test]
+fn replace_associated_trait_default_function_call() {
+ cov_mark::check!(replace_associated_trait_default_function_call);
+ assert_ssr_transform(
+ "Bar2::foo() ==>> Bar2::foo2()",
+ r#"
+ trait Foo { fn foo() {} }
+ pub struct Bar {}
+ impl Foo for Bar {}
+ pub struct Bar2 {}
+ impl Foo for Bar2 {}
+ impl Bar2 { fn foo2() {} }
+ fn main() {
+ Bar::foo();
+ Bar2::foo();
+ }
+ "#,
+ expect![[r#"
+ trait Foo { fn foo() {} }
+ pub struct Bar {}
+ impl Foo for Bar {}
+ pub struct Bar2 {}
+ impl Foo for Bar2 {}
+ impl Bar2 { fn foo2() {} }
+ fn main() {
+ Bar::foo();
+ Bar2::foo2();
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn replace_associated_trait_constant() {
+ cov_mark::check!(replace_associated_trait_constant);
+ assert_ssr_transform(
+ "Bar2::VALUE ==>> Bar2::VALUE_2222",
+ r#"
+ trait Foo { const VALUE: i32; const VALUE_2222: i32; }
+ pub struct Bar {}
+ impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
+ pub struct Bar2 {}
+ impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
+ impl Bar2 { fn foo2() {} }
+ fn main() {
+ Bar::VALUE;
+ Bar2::VALUE;
+ }
+ "#,
+ expect![[r#"
+ trait Foo { const VALUE: i32; const VALUE_2222: i32; }
+ pub struct Bar {}
+ impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
+ pub struct Bar2 {}
+ impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
+ impl Bar2 { fn foo2() {} }
+ fn main() {
+ Bar::VALUE;
+ Bar2::VALUE_2222;
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn replace_path_in_different_contexts() {
+ // Note the $0 inside module a::b which marks the point where the rule is interpreted. We
+ // replace foo with bar, but both need different path qualifiers in different contexts. In f4,
+ // foo is unqualified because of a use statement, however the replacement needs to be fully
+ // qualified.
+ assert_ssr_transform(
+ "c::foo() ==>> c::bar()",
+ r#"
+ mod a {
+ pub mod b {$0
+ pub mod c {
+ pub fn foo() {}
+ pub fn bar() {}
+ fn f1() { foo() }
+ }
+ fn f2() { c::foo() }
+ }
+ fn f3() { b::c::foo() }
+ }
+ use a::b::c::foo;
+ fn f4() { foo() }
+ "#,
+ expect![[r#"
+ mod a {
+ pub mod b {
+ pub mod c {
+ pub fn foo() {}
+ pub fn bar() {}
+ fn f1() { bar() }
+ }
+ fn f2() { c::bar() }
+ }
+ fn f3() { b::c::bar() }
+ }
+ use a::b::c::foo;
+ fn f4() { a::b::c::bar() }
+ "#]],
+ );
+}
+
+#[test]
+fn replace_associated_function_with_generics() {
+ assert_ssr_transform(
+ "c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()",
+ r#"
+ mod c {
+ pub struct Foo<T> {v: T}
+ impl<T> Foo<T> { pub fn new() {} }
+ fn f1() {
+ Foo::<i32>::new();
+ }
+ }
+ mod d {
+ pub struct Bar<T> {v: T}
+ impl<T> Bar<T> { pub fn default() {} }
+ fn f1() {
+ super::c::Foo::<i32>::new();
+ }
+ }
+ "#,
+ expect![[r#"
+ mod c {
+ pub struct Foo<T> {v: T}
+ impl<T> Foo<T> { pub fn new() {} }
+ fn f1() {
+ crate::d::Bar::<i32>::default();
+ }
+ }
+ mod d {
+ pub struct Bar<T> {v: T}
+ impl<T> Bar<T> { pub fn default() {} }
+ fn f1() {
+ Bar::<i32>::default();
+ }
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn replace_type() {
+ assert_ssr_transform(
+ "Result<(), $a> ==>> Option<$a>",
+ "struct Result<T, E> {} struct Option<T> {} fn f1() -> Result<(), Vec<Error>> {foo()}",
+ expect![[
+ "struct Result<T, E> {} struct Option<T> {} fn f1() -> Option<Vec<Error>> {foo()}"
+ ]],
+ );
+ assert_ssr_transform(
+ "dyn Trait<$a> ==>> DynTrait<$a>",
+ r#"
+trait Trait<T> {}
+struct DynTrait<T> {}
+fn f1() -> dyn Trait<Vec<Error>> {foo()}
+"#,
+ expect![[r#"
+trait Trait<T> {}
+struct DynTrait<T> {}
+fn f1() -> DynTrait<Vec<Error>> {foo()}
+"#]],
+ );
+}
+
+#[test]
+fn replace_macro_invocations() {
+ assert_ssr_transform(
+ "try!($a) ==>> $a?",
+ "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}",
+ expect![["macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}"]],
+ );
+ // FIXME: Figure out why this doesn't work anymore
+ // assert_ssr_transform(
+ // "foo!($a($b)) ==>> foo($b, $a)",
+ // "macro_rules! foo {() => {}} fn f1() {foo!(abc(def() + 2));}",
+ // expect![["macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}"]],
+ // );
+}
+
+#[test]
+fn replace_binary_op() {
+ assert_ssr_transform(
+ "$a + $b ==>> $b + $a",
+ "fn f() {2 * 3 + 4 * 5}",
+ expect![["fn f() {4 * 5 + 2 * 3}"]],
+ );
+ assert_ssr_transform(
+ "$a + $b ==>> $b + $a",
+ "fn f() {1 + 2 + 3 + 4}",
+ expect![[r#"fn f() {4 + (3 + (2 + 1))}"#]],
+ );
+}
+
+#[test]
+fn match_binary_op() {
+ assert_matches("$a + $b", "fn f() {1 + 2 + 3 + 4}", &["1 + 2", "1 + 2 + 3", "1 + 2 + 3 + 4"]);
+}
+
+#[test]
+fn multiple_rules() {
+ assert_ssr_transforms(
+ &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"],
+ "fn add() {} fn add_one() {} fn f() -> i32 {3 + 2 + 1}",
+ expect![["fn add() {} fn add_one() {} fn f() -> i32 {add_one(add(3, 2))}"]],
+ )
+}
+
+#[test]
+fn multiple_rules_with_nested_matches() {
+ assert_ssr_transforms(
+ &["foo1($a) ==>> bar1($a)", "foo2($a) ==>> bar2($a)"],
+ r#"
+ fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
+ fn f() {foo1(foo2(foo1(foo2(foo1(42)))))}
+ "#,
+ expect![[r#"
+ fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
+ fn f() {bar1(bar2(bar1(bar2(bar1(42)))))}
+ "#]],
+ )
+}
+
+#[test]
+fn match_within_macro_invocation() {
+ let code = r#"
+ macro_rules! foo {
+ ($a:stmt; $b:expr) => {
+ $b
+ };
+ }
+ struct A {}
+ impl A {
+ fn bar() {}
+ }
+ fn f1() {
+ let aaa = A {};
+ foo!(macro_ignores_this(); aaa.bar());
+ }
+ "#;
+ assert_matches("$a.bar()", code, &["aaa.bar()"]);
+}
+
+#[test]
+fn replace_within_macro_expansion() {
+ assert_ssr_transform(
+ "$a.foo() ==>> bar($a)",
+ r#"
+ macro_rules! macro1 {
+ ($a:expr) => {$a}
+ }
+ fn bar() {}
+ fn f() {macro1!(5.x().foo().o2())}
+ "#,
+ expect![[r#"
+ macro_rules! macro1 {
+ ($a:expr) => {$a}
+ }
+ fn bar() {}
+ fn f() {macro1!(bar(5.x()).o2())}
+ "#]],
+ )
+}
+
+#[test]
+fn replace_outside_and_within_macro_expansion() {
+ assert_ssr_transform(
+ "foo($a) ==>> bar($a)",
+ r#"
+ fn foo() {} fn bar() {}
+ macro_rules! macro1 {
+ ($a:expr) => {$a}
+ }
+ fn f() {foo(foo(macro1!(foo(foo(42)))))}
+ "#,
+ expect![[r#"
+ fn foo() {} fn bar() {}
+ macro_rules! macro1 {
+ ($a:expr) => {$a}
+ }
+ fn f() {bar(bar(macro1!(bar(bar(42)))))}
+ "#]],
+ )
+}
+
+#[test]
+fn preserves_whitespace_within_macro_expansion() {
+ assert_ssr_transform(
+ "$a + $b ==>> $b - $a",
+ r#"
+ macro_rules! macro1 {
+ ($a:expr) => {$a}
+ }
+ fn f() {macro1!(1 * 2 + 3 + 4)}
+ "#,
+ expect![[r#"
+ macro_rules! macro1 {
+ ($a:expr) => {$a}
+ }
+ fn f() {macro1!(4 - (3 - 1 * 2))}
+ "#]],
+ )
+}
+
+#[test]
+fn add_parenthesis_when_necessary() {
+ assert_ssr_transform(
+ "foo($a) ==>> $a.to_string()",
+ r#"
+ fn foo(_: i32) {}
+ fn bar3(v: i32) {
+ foo(1 + 2);
+ foo(-v);
+ }
+ "#,
+ expect![[r#"
+ fn foo(_: i32) {}
+ fn bar3(v: i32) {
+ (1 + 2).to_string();
+ (-v).to_string();
+ }
+ "#]],
+ )
+}
+
+#[test]
+fn match_failure_reasons() {
+ let code = r#"
+ fn bar() {}
+ macro_rules! foo {
+ ($a:expr) => {
+ 1 + $a + 2
+ };
+ }
+ fn f1() {
+ bar(1, 2);
+ foo!(5 + 43.to_string() + 5);
+ }
+ "#;
+ assert_match_failure_reason(
+ "bar($a, 3)",
+ code,
+ "bar(1, 2)",
+ r#"Pattern wanted token '3' (INT_NUMBER), but code had token '2' (INT_NUMBER)"#,
+ );
+ assert_match_failure_reason(
+ "42.to_string()",
+ code,
+ "43.to_string()",
+ r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#,
+ );
+}
+
+#[test]
+fn overlapping_possible_matches() {
+ // There are three possible matches here, however the middle one, `foo(foo(foo(42)))` shouldn't
+ // match because it overlaps with the outer match. The inner match is permitted since it's is
+ // contained entirely within the placeholder of the outer match.
+ assert_matches(
+ "foo(foo($a))",
+ "fn foo() {} fn main() {foo(foo(foo(foo(42))))}",
+ &["foo(foo(42))", "foo(foo(foo(foo(42))))"],
+ );
+}
+
+#[test]
+fn use_declaration_with_braces() {
+ // It would be OK for a path rule to match and alter a use declaration. We shouldn't mess it up
+ // though. In particular, we must not change `use foo::{baz, bar}` to `use foo::{baz,
+ // foo2::bar2}`.
+ cov_mark::check!(use_declaration_with_braces);
+ assert_ssr_transform(
+ "foo::bar ==>> foo2::bar2",
+ r#"
+ mod foo { pub fn bar() {} pub fn baz() {} }
+ mod foo2 { pub fn bar2() {} }
+ use foo::{baz, bar};
+ fn main() { bar() }
+ "#,
+ expect![["
+ mod foo { pub fn bar() {} pub fn baz() {} }
+ mod foo2 { pub fn bar2() {} }
+ use foo::{baz, bar};
+ fn main() { foo2::bar2() }
+ "]],
+ )
+}
+
+#[test]
+fn ufcs_matches_method_call() {
+ let code = r#"
+ struct Foo {}
+ impl Foo {
+ fn new(_: i32) -> Foo { Foo {} }
+ fn do_stuff(&self, _: i32) {}
+ }
+ struct Bar {}
+ impl Bar {
+ fn new(_: i32) -> Bar { Bar {} }
+ fn do_stuff(&self, v: i32) {}
+ }
+ fn main() {
+ let b = Bar {};
+ let f = Foo {};
+ b.do_stuff(1);
+ f.do_stuff(2);
+ Foo::new(4).do_stuff(3);
+ // Too many / too few args - should never match
+ f.do_stuff(2, 10);
+ f.do_stuff();
+ }
+ "#;
+ assert_matches("Foo::do_stuff($a, $b)", code, &["f.do_stuff(2)", "Foo::new(4).do_stuff(3)"]);
+ // The arguments needs special handling in the case of a function call matching a method call
+ // and the first argument is different.
+ assert_matches("Foo::do_stuff($a, 2)", code, &["f.do_stuff(2)"]);
+ assert_matches("Foo::do_stuff(Foo::new(4), $b)", code, &["Foo::new(4).do_stuff(3)"]);
+
+ assert_ssr_transform(
+ "Foo::do_stuff(Foo::new($a), $b) ==>> Bar::new($b).do_stuff($a)",
+ code,
+ expect![[r#"
+ struct Foo {}
+ impl Foo {
+ fn new(_: i32) -> Foo { Foo {} }
+ fn do_stuff(&self, _: i32) {}
+ }
+ struct Bar {}
+ impl Bar {
+ fn new(_: i32) -> Bar { Bar {} }
+ fn do_stuff(&self, v: i32) {}
+ }
+ fn main() {
+ let b = Bar {};
+ let f = Foo {};
+ b.do_stuff(1);
+ f.do_stuff(2);
+ Bar::new(3).do_stuff(4);
+ // Too many / too few args - should never match
+ f.do_stuff(2, 10);
+ f.do_stuff();
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn pattern_is_a_single_segment_path() {
+ cov_mark::check!(pattern_is_a_single_segment_path);
+ // The first function should not be altered because the `foo` in scope at the cursor position is
+ // a different `foo`. This case is special because "foo" can be parsed as a pattern (IDENT_PAT ->
+ // NAME -> IDENT), which contains no path. If we're not careful we'll end up matching the `foo`
+ // in `let foo` from the first function. Whether we should match the `let foo` in the second
+ // function is less clear. At the moment, we don't. Doing so sounds like a rename operation,
+ // which isn't really what SSR is for, especially since the replacement `bar` must be able to be
+ // resolved, which means if we rename `foo` we'll get a name collision.
+ assert_ssr_transform(
+ "foo ==>> bar",
+ r#"
+ fn f1() -> i32 {
+ let foo = 1;
+ let bar = 2;
+ foo
+ }
+ fn f1() -> i32 {
+ let foo = 1;
+ let bar = 2;
+ foo$0
+ }
+ "#,
+ expect![[r#"
+ fn f1() -> i32 {
+ let foo = 1;
+ let bar = 2;
+ foo
+ }
+ fn f1() -> i32 {
+ let foo = 1;
+ let bar = 2;
+ bar
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn replace_local_variable_reference() {
+ // The pattern references a local variable `foo` in the block containing the cursor. We should
+ // only replace references to this variable `foo`, not other variables that just happen to have
+ // the same name.
+ cov_mark::check!(cursor_after_semicolon);
+ assert_ssr_transform(
+ "foo + $a ==>> $a - foo",
+ r#"
+ fn bar1() -> i32 {
+ let mut res = 0;
+ let foo = 5;
+ res += foo + 1;
+ let foo = 10;
+ res += foo + 2;$0
+ res += foo + 3;
+ let foo = 15;
+ res += foo + 4;
+ res
+ }
+ "#,
+ expect![[r#"
+ fn bar1() -> i32 {
+ let mut res = 0;
+ let foo = 5;
+ res += foo + 1;
+ let foo = 10;
+ res += 2 - foo;
+ res += 3 - foo;
+ let foo = 15;
+ res += foo + 4;
+ res
+ }
+ "#]],
+ )
+}
+
+#[test]
+fn replace_path_within_selection() {
+ assert_ssr_transform(
+ "foo ==>> bar",
+ r#"
+ fn main() {
+ let foo = 41;
+ let bar = 42;
+ do_stuff(foo);
+ do_stuff(foo);$0
+ do_stuff(foo);
+ do_stuff(foo);$0
+ do_stuff(foo);
+ }"#,
+ expect![[r#"
+ fn main() {
+ let foo = 41;
+ let bar = 42;
+ do_stuff(foo);
+ do_stuff(foo);
+ do_stuff(bar);
+ do_stuff(bar);
+ do_stuff(foo);
+ }"#]],
+ );
+}
+
+#[test]
+fn replace_nonpath_within_selection() {
+ cov_mark::check!(replace_nonpath_within_selection);
+ assert_ssr_transform(
+ "$a + $b ==>> $b * $a",
+ r#"
+ fn main() {
+ let v = 1 + 2;$0
+ let v2 = 3 + 3;
+ let v3 = 4 + 5;$0
+ let v4 = 6 + 7;
+ }"#,
+ expect![[r#"
+ fn main() {
+ let v = 1 + 2;
+ let v2 = 3 * 3;
+ let v3 = 5 * 4;
+ let v4 = 6 + 7;
+ }"#]],
+ );
+}
+
+#[test]
+fn replace_self() {
+ // `foo(self)` occurs twice in the code, however only the first occurrence is the `self` that's
+ // in scope where the rule is invoked.
+ assert_ssr_transform(
+ "foo(self) ==>> bar(self)",
+ r#"
+ struct S1 {}
+ fn foo(_: &S1) {}
+ fn bar(_: &S1) {}
+ impl S1 {
+ fn f1(&self) {
+ foo(self)$0
+ }
+ fn f2(&self) {
+ foo(self)
+ }
+ }
+ "#,
+ expect![[r#"
+ struct S1 {}
+ fn foo(_: &S1) {}
+ fn bar(_: &S1) {}
+ impl S1 {
+ fn f1(&self) {
+ bar(self)
+ }
+ fn f2(&self) {
+ foo(self)
+ }
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn match_trait_method_call() {
+ // `Bar::foo` and `Bar2::foo` resolve to the same function. Make sure we only match if the type
+ // matches what's in the pattern. Also checks that we handle autoderef.
+ let code = r#"
+ pub struct Bar {}
+ pub struct Bar2 {}
+ pub trait Foo {
+ fn foo(&self, _: i32) {}
+ }
+ impl Foo for Bar {}
+ impl Foo for Bar2 {}
+ fn main() {
+ let v1 = Bar {};
+ let v2 = Bar2 {};
+ let v1_ref = &v1;
+ let v2_ref = &v2;
+ v1.foo(1);
+ v2.foo(2);
+ Bar::foo(&v1, 3);
+ Bar2::foo(&v2, 4);
+ v1_ref.foo(5);
+ v2_ref.foo(6);
+ }
+ "#;
+ assert_matches("Bar::foo($a, $b)", code, &["v1.foo(1)", "Bar::foo(&v1, 3)", "v1_ref.foo(5)"]);
+ assert_matches("Bar2::foo($a, $b)", code, &["v2.foo(2)", "Bar2::foo(&v2, 4)", "v2_ref.foo(6)"]);
+}
+
+#[test]
+fn replace_autoref_autoderef_capture() {
+ // Here we have several calls to `$a.foo()`. In the first case autoref is applied, in the
+ // second, we already have a reference, so it isn't. When $a is used in a context where autoref
+ // doesn't apply, we need to prefix it with `&`. Finally, we have some cases where autoderef
+ // needs to be applied.
+ cov_mark::check!(replace_autoref_autoderef_capture);
+ let code = r#"
+ struct Foo {}
+ impl Foo {
+ fn foo(&self) {}
+ fn foo2(&self) {}
+ }
+ fn bar(_: &Foo) {}
+ fn main() {
+ let f = Foo {};
+ let fr = &f;
+ let fr2 = &fr;
+ let fr3 = &fr2;
+ f.foo();
+ fr.foo();
+ fr2.foo();
+ fr3.foo();
+ }
+ "#;
+ assert_ssr_transform(
+ "Foo::foo($a) ==>> bar($a)",
+ code,
+ expect![[r#"
+ struct Foo {}
+ impl Foo {
+ fn foo(&self) {}
+ fn foo2(&self) {}
+ }
+ fn bar(_: &Foo) {}
+ fn main() {
+ let f = Foo {};
+ let fr = &f;
+ let fr2 = &fr;
+ let fr3 = &fr2;
+ bar(&f);
+ bar(&*fr);
+ bar(&**fr2);
+ bar(&***fr3);
+ }
+ "#]],
+ );
+ // If the placeholder is used as the receiver of another method call, then we don't need to
+ // explicitly autoderef or autoref.
+ assert_ssr_transform(
+ "Foo::foo($a) ==>> $a.foo2()",
+ code,
+ expect![[r#"
+ struct Foo {}
+ impl Foo {
+ fn foo(&self) {}
+ fn foo2(&self) {}
+ }
+ fn bar(_: &Foo) {}
+ fn main() {
+ let f = Foo {};
+ let fr = &f;
+ let fr2 = &fr;
+ let fr3 = &fr2;
+ f.foo2();
+ fr.foo2();
+ fr2.foo2();
+ fr3.foo2();
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn replace_autoref_mut() {
+ let code = r#"
+ struct Foo {}
+ impl Foo {
+ fn foo(&mut self) {}
+ }
+ fn bar(_: &mut Foo) {}
+ fn main() {
+ let mut f = Foo {};
+ f.foo();
+ let fr = &mut f;
+ fr.foo();
+ }
+ "#;
+ assert_ssr_transform(
+ "Foo::foo($a) ==>> bar($a)",
+ code,
+ expect![[r#"
+ struct Foo {}
+ impl Foo {
+ fn foo(&mut self) {}
+ }
+ fn bar(_: &mut Foo) {}
+ fn main() {
+ let mut f = Foo {};
+ bar(&mut f);
+ let fr = &mut f;
+ bar(&mut *fr);
+ }
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml
new file mode 100644
index 000000000..0e9771cd2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml
@@ -0,0 +1,47 @@
+[package]
+name = "ide"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+crossbeam-channel = "0.5.5"
+either = "1.7.0"
+itertools = "0.10.3"
+tracing = "0.1.35"
+oorandom = "11.1.3"
+pulldown-cmark-to-cmark = "10.0.1"
+pulldown-cmark = { version = "0.9.1", default-features = false }
+url = "2.2.2"
+dot = "0.1.4"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+ide-assists = { path = "../ide-assists", version = "0.0.0" }
+ide-diagnostics = { path = "../ide-diagnostics", version = "0.0.0" }
+ide-ssr = { path = "../ide-ssr", version = "0.0.0" }
+ide-completion = { path = "../ide-completion", version = "0.0.0" }
+
+# ide should depend only on the top-level `hir` package. if you need
+# something from some `hir-xxx` subpackage, reexport the API via `hir`.
+hir = { path = "../hir", version = "0.0.0" }
+
+[target.'cfg(not(any(target_arch = "wasm32", target_os = "emscripten")))'.dependencies]
+toolchain = { path = "../toolchain", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+expect-test = "1.4.0"
+
+[features]
+in-rust-tree = ["ide-assists/in-rust-tree", "ide-diagnostics/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs
new file mode 100644
index 000000000..210c5c7fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs
@@ -0,0 +1,789 @@
+use hir::{HasSource, InFile, Semantics};
+use ide_db::{
+ base_db::{FileId, FilePosition, FileRange},
+ defs::Definition,
+ helpers::visit_file_defs,
+ RootDatabase,
+};
+use syntax::{ast::HasName, AstNode, TextRange};
+
+use crate::{
+ fn_references::find_all_methods,
+ goto_implementation::goto_implementation,
+ references::find_all_refs,
+ runnables::{runnables, Runnable},
+ NavigationTarget, RunnableKind,
+};
+
+// Feature: Annotations
+//
+// Provides user with annotations above items for looking up references or impl blocks
+// and running/debugging binaries.
+//
+// image::https://user-images.githubusercontent.com/48062697/113020672-b7c34f00-917a-11eb-8f6e-858735660a0e.png[]
+#[derive(Debug)]
+pub struct Annotation {
+ pub range: TextRange,
+ pub kind: AnnotationKind,
+}
+
+#[derive(Debug)]
+pub enum AnnotationKind {
+ Runnable(Runnable),
+ HasImpls { file_id: FileId, data: Option<Vec<NavigationTarget>> },
+ HasReferences { file_id: FileId, data: Option<Vec<FileRange>> },
+}
+
+pub struct AnnotationConfig {
+ pub binary_target: bool,
+ pub annotate_runnables: bool,
+ pub annotate_impls: bool,
+ pub annotate_references: bool,
+ pub annotate_method_references: bool,
+ pub annotate_enum_variant_references: bool,
+}
+
+pub(crate) fn annotations(
+ db: &RootDatabase,
+ config: &AnnotationConfig,
+ file_id: FileId,
+) -> Vec<Annotation> {
+ let mut annotations = Vec::default();
+
+ if config.annotate_runnables {
+ for runnable in runnables(db, file_id) {
+ if should_skip_runnable(&runnable.kind, config.binary_target) {
+ continue;
+ }
+
+ let range = runnable.nav.focus_or_full_range();
+
+ annotations.push(Annotation { range, kind: AnnotationKind::Runnable(runnable) });
+ }
+ }
+
+ visit_file_defs(&Semantics::new(db), file_id, &mut |def| {
+ let range = match def {
+ Definition::Const(konst) if config.annotate_references => {
+ konst.source(db).and_then(|node| name_range(db, node, file_id))
+ }
+ Definition::Trait(trait_) if config.annotate_references || config.annotate_impls => {
+ trait_.source(db).and_then(|node| name_range(db, node, file_id))
+ }
+ Definition::Adt(adt) => match adt {
+ hir::Adt::Enum(enum_) => {
+ if config.annotate_enum_variant_references {
+ enum_
+ .variants(db)
+ .into_iter()
+ .map(|variant| {
+ variant.source(db).and_then(|node| name_range(db, node, file_id))
+ })
+ .flatten()
+ .for_each(|range| {
+ annotations.push(Annotation {
+ range,
+ kind: AnnotationKind::HasReferences { file_id, data: None },
+ })
+ })
+ }
+ if config.annotate_references || config.annotate_impls {
+ enum_.source(db).and_then(|node| name_range(db, node, file_id))
+ } else {
+ None
+ }
+ }
+ _ => {
+ if config.annotate_references || config.annotate_impls {
+ adt.source(db).and_then(|node| name_range(db, node, file_id))
+ } else {
+ None
+ }
+ }
+ },
+ _ => None,
+ };
+
+ let range = match range {
+ Some(range) => range,
+ None => return,
+ };
+
+ if config.annotate_impls && !matches!(def, Definition::Const(_)) {
+ annotations
+ .push(Annotation { range, kind: AnnotationKind::HasImpls { file_id, data: None } });
+ }
+ if config.annotate_references {
+ annotations.push(Annotation {
+ range,
+ kind: AnnotationKind::HasReferences { file_id, data: None },
+ });
+ }
+
+ fn name_range<T: HasName>(
+ db: &RootDatabase,
+ node: InFile<T>,
+ source_file_id: FileId,
+ ) -> Option<TextRange> {
+ if let Some(InFile { file_id, value }) = node.original_ast_node(db) {
+ if file_id == source_file_id.into() {
+ return value.name().map(|it| it.syntax().text_range());
+ }
+ }
+ None
+ }
+ });
+
+ if config.annotate_method_references {
+ annotations.extend(find_all_methods(db, file_id).into_iter().map(
+ |FileRange { file_id, range }| Annotation {
+ range,
+ kind: AnnotationKind::HasReferences { file_id, data: None },
+ },
+ ));
+ }
+
+ annotations
+}
+
+pub(crate) fn resolve_annotation(db: &RootDatabase, mut annotation: Annotation) -> Annotation {
+ match annotation.kind {
+ AnnotationKind::HasImpls { file_id, ref mut data } => {
+ *data =
+ goto_implementation(db, FilePosition { file_id, offset: annotation.range.start() })
+ .map(|range| range.info);
+ }
+ AnnotationKind::HasReferences { file_id, ref mut data } => {
+ *data = find_all_refs(
+ &Semantics::new(db),
+ FilePosition { file_id, offset: annotation.range.start() },
+ None,
+ )
+ .map(|result| {
+ result
+ .into_iter()
+ .flat_map(|res| res.references)
+ .flat_map(|(file_id, access)| {
+ access.into_iter().map(move |(range, _)| FileRange { file_id, range })
+ })
+ .collect()
+ });
+ }
+ _ => {}
+ };
+
+ annotation
+}
+
+fn should_skip_runnable(kind: &RunnableKind, binary_target: bool) -> bool {
+ match kind {
+ RunnableKind::Bin => !binary_target,
+ _ => false,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::{fixture, Annotation, AnnotationConfig};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+
+ let annotations: Vec<Annotation> = analysis
+ .annotations(
+ &AnnotationConfig {
+ binary_target: true,
+ annotate_runnables: true,
+ annotate_impls: true,
+ annotate_references: true,
+ annotate_method_references: true,
+ annotate_enum_variant_references: true,
+ },
+ file_id,
+ )
+ .unwrap()
+ .into_iter()
+ .map(|annotation| analysis.resolve_annotation(annotation).unwrap())
+ .collect();
+
+ expect.assert_debug_eq(&annotations);
+ }
+
+ #[test]
+ fn const_annotations() {
+ check(
+ r#"
+const DEMO: i32 = 123;
+
+const UNUSED: i32 = 123;
+
+fn main() {
+ let hello = DEMO;
+}
+ "#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 53..57,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 50..85,
+ focus_range: 53..57,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 6..10,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 78..82,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 30..36,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ Annotation {
+ range: 53..57,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn struct_references_annotations() {
+ check(
+ r#"
+struct Test;
+
+fn main() {
+ let test = Test;
+}
+ "#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 17..21,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 14..48,
+ focus_range: 17..21,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 7..11,
+ kind: HasImpls {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ Annotation {
+ range: 7..11,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 41..45,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 17..21,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn struct_and_trait_impls_annotations() {
+ check(
+ r#"
+struct Test;
+
+trait MyCoolTrait {}
+
+impl MyCoolTrait for Test {}
+
+fn main() {
+ let test = Test;
+}
+ "#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 69..73,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 66..100,
+ focus_range: 69..73,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 7..11,
+ kind: HasImpls {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 36..64,
+ focus_range: 57..61,
+ name: "impl",
+ kind: Impl,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 7..11,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 57..61,
+ },
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 93..97,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 20..31,
+ kind: HasImpls {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 36..64,
+ focus_range: 57..61,
+ name: "impl",
+ kind: Impl,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 20..31,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 41..52,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 69..73,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn runnable_annotation() {
+ check(
+ r#"
+fn main() {}
+ "#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 3..7,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 3..7,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 3..7,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn method_annotations() {
+ check(
+ r#"
+struct Test;
+
+impl Test {
+ fn self_by_ref(&self) {}
+}
+
+fn main() {
+ Test.self_by_ref();
+}
+ "#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 61..65,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 58..95,
+ focus_range: 61..65,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 7..11,
+ kind: HasImpls {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 14..56,
+ focus_range: 19..23,
+ name: "impl",
+ kind: Impl,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 7..11,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 19..23,
+ },
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 74..78,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 33..44,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 79..90,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 61..65,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_annotations() {
+ check(
+ r#"
+fn main() {}
+
+mod tests {
+ #[test]
+ fn my_cool_test() {}
+}
+ "#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 3..7,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 3..7,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 18..23,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 14..64,
+ focus_range: 18..23,
+ name: "tests",
+ kind: Module,
+ description: "mod tests",
+ },
+ kind: TestMod {
+ path: "tests",
+ },
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 45..57,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 30..62,
+ focus_range: 45..57,
+ name: "my_cool_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::my_cool_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 3..7,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_no_annotations_outside_module_tree() {
+ check(
+ r#"
+//- /foo.rs
+struct Foo;
+//- /lib.rs
+// this file comes last since `check` checks the first file only
+"#,
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_no_annotations_macro_struct_def() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! m {
+ () => {
+ struct A {}
+ };
+}
+
+m!();
+"#,
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
new file mode 100644
index 000000000..a18a6bea9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
@@ -0,0 +1,460 @@
+//! Entry point for call-hierarchy
+
+use hir::Semantics;
+use ide_db::{
+ defs::{Definition, NameClass, NameRefClass},
+ helpers::pick_best_token,
+ search::FileReference,
+ FxIndexMap, RootDatabase,
+};
+use syntax::{ast, AstNode, SyntaxKind::NAME, TextRange};
+
+use crate::{goto_definition, FilePosition, NavigationTarget, RangeInfo, TryToNav};
+
+#[derive(Debug, Clone)]
+pub struct CallItem {
+ pub target: NavigationTarget,
+ pub ranges: Vec<TextRange>,
+}
+
+impl CallItem {
+ #[cfg(test)]
+ pub(crate) fn debug_render(&self) -> String {
+ format!("{} : {:?}", self.target.debug_render(), self.ranges)
+ }
+}
+
+pub(crate) fn call_hierarchy(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ goto_definition::goto_definition(db, position)
+}
+
+pub(crate) fn incoming_calls(
+ db: &RootDatabase,
+ FilePosition { file_id, offset }: FilePosition,
+) -> Option<Vec<CallItem>> {
+ let sema = &Semantics::new(db);
+
+ let file = sema.parse(file_id);
+ let file = file.syntax();
+ let mut calls = CallLocations::default();
+
+ let references = sema
+ .find_nodes_at_offset_with_descend(file, offset)
+ .filter_map(move |node| match node {
+ ast::NameLike::NameRef(name_ref) => match NameRefClass::classify(sema, &name_ref)? {
+ NameRefClass::Definition(def @ Definition::Function(_)) => Some(def),
+ _ => None,
+ },
+ ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
+ NameClass::Definition(def @ Definition::Function(_)) => Some(def),
+ _ => None,
+ },
+ ast::NameLike::Lifetime(_) => None,
+ })
+ .flat_map(|func| func.usages(sema).all());
+
+ for (_, references) in references {
+ let references = references.into_iter().map(|FileReference { name, .. }| name);
+ for name in references {
+ // This target is the containing function
+ let nav = sema.ancestors_with_macros(name.syntax().clone()).find_map(|node| {
+ let def = ast::Fn::cast(node).and_then(|fn_| sema.to_def(&fn_))?;
+ def.try_to_nav(sema.db)
+ });
+ if let Some(nav) = nav {
+ calls.add(nav, sema.original_range(name.syntax()).range);
+ }
+ }
+ }
+
+ Some(calls.into_items())
+}
+
+pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
+ let sema = Semantics::new(db);
+ let file_id = position.file_id;
+ let file = sema.parse(file_id);
+ let file = file.syntax();
+ let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
+ NAME => 1,
+ _ => 0,
+ })?;
+ let mut calls = CallLocations::default();
+
+ sema.descend_into_macros(token)
+ .into_iter()
+ .filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast))
+ .filter_map(|item| match item {
+ ast::Item::Const(c) => c.body().map(|it| it.syntax().descendants()),
+ ast::Item::Fn(f) => f.body().map(|it| it.syntax().descendants()),
+ ast::Item::Static(s) => s.body().map(|it| it.syntax().descendants()),
+ _ => None,
+ })
+ .flatten()
+ .filter_map(ast::CallableExpr::cast)
+ .filter_map(|call_node| {
+ let (nav_target, range) = match call_node {
+ ast::CallableExpr::Call(call) => {
+ let expr = call.expr()?;
+ let callable = sema.type_of_expr(&expr)?.original.as_callable(db)?;
+ match callable.kind() {
+ hir::CallableKind::Function(it) => {
+ let range = expr.syntax().text_range();
+ it.try_to_nav(db).zip(Some(range))
+ }
+ _ => None,
+ }
+ }
+ ast::CallableExpr::MethodCall(expr) => {
+ let range = expr.name_ref()?.syntax().text_range();
+ let function = sema.resolve_method_call(&expr)?;
+ function.try_to_nav(db).zip(Some(range))
+ }
+ }?;
+ Some((nav_target, range))
+ })
+ .for_each(|(nav, range)| calls.add(nav, range));
+
+ Some(calls.into_items())
+}
+
+#[derive(Default)]
+struct CallLocations {
+ funcs: FxIndexMap<NavigationTarget, Vec<TextRange>>,
+}
+
+impl CallLocations {
+ fn add(&mut self, target: NavigationTarget, range: TextRange) {
+ self.funcs.entry(target).or_default().push(range);
+ }
+
+ fn into_items(self) -> Vec<CallItem> {
+ self.funcs.into_iter().map(|(target, ranges)| CallItem { target, ranges }).collect()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use ide_db::base_db::FilePosition;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ fn check_hierarchy(
+ ra_fixture: &str,
+ expected: Expect,
+ expected_incoming: Expect,
+ expected_outgoing: Expect,
+ ) {
+ let (analysis, pos) = fixture::position(ra_fixture);
+
+ let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info;
+ assert_eq!(navs.len(), 1);
+ let nav = navs.pop().unwrap();
+ expected.assert_eq(&nav.debug_render());
+
+ let item_pos =
+ FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() };
+ let incoming_calls = analysis.incoming_calls(item_pos).unwrap().unwrap();
+ expected_incoming
+ .assert_eq(&incoming_calls.into_iter().map(|call| call.debug_render()).join("\n"));
+
+ let outgoing_calls = analysis.outgoing_calls(item_pos).unwrap().unwrap();
+ expected_outgoing
+ .assert_eq(&outgoing_calls.into_iter().map(|call| call.debug_render()).join("\n"));
+ }
+
+ #[test]
+ fn test_call_hierarchy_on_ref() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn callee() {}
+fn caller() {
+ call$0ee();
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![["caller Function FileId(0) 15..44 18..24 : [33..39]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_on_def() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn call$0ee() {}
+fn caller() {
+ callee();
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![["caller Function FileId(0) 15..44 18..24 : [33..39]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_same_fn() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn callee() {}
+fn caller() {
+ call$0ee();
+ callee();
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![["caller Function FileId(0) 15..58 18..24 : [33..39, 47..53]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_different_fn() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn callee() {}
+fn caller1() {
+ call$0ee();
+}
+
+fn caller2() {
+ callee();
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![["
+ caller1 Function FileId(0) 15..45 18..25 : [34..40]
+ caller2 Function FileId(0) 47..77 50..57 : [66..72]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_tests_mod() {
+ check_hierarchy(
+ r#"
+//- /lib.rs cfg:test
+fn callee() {}
+fn caller1() {
+ call$0ee();
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_caller() {
+ callee();
+ }
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![[r#"
+ caller1 Function FileId(0) 15..45 18..25 : [34..40]
+ test_caller Function FileId(0) 95..149 110..121 : [134..140]"#]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_different_files() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::callee;
+
+fn caller() {
+ call$0ee();
+}
+
+//- /foo/mod.rs
+pub fn callee() {}
+"#,
+ expect![["callee Function FileId(1) 0..18 7..13"]],
+ expect![["caller Function FileId(0) 27..56 30..36 : [45..51]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_outgoing() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn callee() {}
+fn call$0er() {
+ callee();
+ callee();
+}
+"#,
+ expect![["caller Function FileId(0) 15..58 18..24"]],
+ expect![[]],
+ expect![["callee Function FileId(0) 0..14 3..9 : [33..39, 47..53]"]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_outgoing_in_different_files() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::callee;
+
+fn call$0er() {
+ callee();
+}
+
+//- /foo/mod.rs
+pub fn callee() {}
+"#,
+ expect![["caller Function FileId(0) 27..56 30..36"]],
+ expect![[]],
+ expect![["callee Function FileId(1) 0..18 7..13 : [45..51]"]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_incoming_outgoing() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn caller1() {
+ call$0er2();
+}
+
+fn caller2() {
+ caller3();
+}
+
+fn caller3() {
+
+}
+"#,
+ expect![["caller2 Function FileId(0) 33..64 36..43"]],
+ expect![["caller1 Function FileId(0) 0..31 3..10 : [19..26]"]],
+ expect![["caller3 Function FileId(0) 66..83 69..76 : [52..59]"]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_issue_5103() {
+ check_hierarchy(
+ r#"
+fn a() {
+ b()
+}
+
+fn b() {}
+
+fn main() {
+ a$0()
+}
+"#,
+ expect![["a Function FileId(0) 0..18 3..4"]],
+ expect![["main Function FileId(0) 31..52 34..38 : [47..48]"]],
+ expect![["b Function FileId(0) 20..29 23..24 : [13..14]"]],
+ );
+
+ check_hierarchy(
+ r#"
+fn a() {
+ b$0()
+}
+
+fn b() {}
+
+fn main() {
+ a()
+}
+"#,
+ expect![["b Function FileId(0) 20..29 23..24"]],
+ expect![["a Function FileId(0) 0..18 3..4 : [13..14]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_macros_incoming() {
+ check_hierarchy(
+ r#"
+macro_rules! define {
+ ($ident:ident) => {
+ fn $ident {}
+ }
+}
+macro_rules! call {
+ ($ident:ident) => {
+ $ident()
+ }
+}
+define!(callee)
+fn caller() {
+ call!(call$0ee);
+}
+"#,
+ expect![[r#"callee Function FileId(0) 144..159 152..158"#]],
+ expect![[r#"caller Function FileId(0) 160..194 163..169 : [184..190]"#]],
+ expect![[]],
+ );
+ check_hierarchy(
+ r#"
+macro_rules! define {
+ ($ident:ident) => {
+ fn $ident {}
+ }
+}
+macro_rules! call {
+ ($ident:ident) => {
+ $ident()
+ }
+}
+define!(cal$0lee)
+fn caller() {
+ call!(callee);
+}
+"#,
+ expect![[r#"callee Function FileId(0) 144..159 152..158"#]],
+ expect![[r#"caller Function FileId(0) 160..194 163..169 : [184..190]"#]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_macros_outgoing() {
+ check_hierarchy(
+ r#"
+macro_rules! define {
+ ($ident:ident) => {
+ fn $ident {}
+ }
+}
+macro_rules! call {
+ ($ident:ident) => {
+ $ident()
+ }
+}
+define!(callee)
+fn caller$0() {
+ call!(callee);
+}
+"#,
+ expect![[r#"caller Function FileId(0) 160..194 163..169"#]],
+ expect![[]],
+ // FIXME
+ expect![[]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
new file mode 100644
index 000000000..582e9fe7e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
@@ -0,0 +1,549 @@
+//! Extracts, resolves and rewrites links and intra-doc links in markdown documentation.
+
+#[cfg(test)]
+mod tests;
+
+mod intra_doc_links;
+
+use pulldown_cmark::{BrokenLink, CowStr, Event, InlineStr, LinkType, Options, Parser, Tag};
+use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions};
+use stdx::format_to;
+use url::Url;
+
+use hir::{db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs};
+use ide_db::{
+ base_db::{CrateOrigin, LangCrateOrigin, SourceDatabase},
+ defs::{Definition, NameClass, NameRefClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use syntax::{
+ ast::{self, IsString},
+ match_ast, AstNode, AstToken,
+ SyntaxKind::*,
+ SyntaxNode, SyntaxToken, TextRange, TextSize, T,
+};
+
+use crate::{
+ doc_links::intra_doc_links::{parse_intra_doc_link, strip_prefixes_suffixes},
+ FilePosition, Semantics,
+};
+
+/// Weblink to an item's documentation.
+pub(crate) type DocumentationLink = String;
+
+const MARKDOWN_OPTIONS: Options =
+ Options::ENABLE_FOOTNOTES.union(Options::ENABLE_TABLES).union(Options::ENABLE_TASKLISTS);
+
+/// Rewrite documentation links in markdown to point to an online host (e.g. docs.rs)
+pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: Definition) -> String {
+ let mut cb = broken_link_clone_cb;
+ let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb));
+
+ let doc = map_links(doc, |target, title| {
+ // This check is imperfect, there's some overlap between valid intra-doc links
+ // and valid URLs so we choose to be too eager to try to resolve what might be
+ // a URL.
+ if target.contains("://") {
+ (Some(LinkType::Inline), target.to_string(), title.to_string())
+ } else {
+ // Two possibilities:
+ // * path-based links: `../../module/struct.MyStruct.html`
+ // * module-based links (AKA intra-doc links): `super::super::module::MyStruct`
+ if let Some((target, title)) = rewrite_intra_doc_link(db, definition, target, title) {
+ return (None, target, title);
+ }
+ if let Some(target) = rewrite_url_link(db, definition, target) {
+ return (Some(LinkType::Inline), target, title.to_string());
+ }
+
+ (None, target.to_string(), title.to_string())
+ }
+ });
+ let mut out = String::new();
+ cmark_resume_with_options(
+ doc,
+ &mut out,
+ None,
+ CMarkOptions { code_block_token_count: 3, ..Default::default() },
+ )
+ .ok();
+ out
+}
+
+/// Remove all links in markdown documentation.
+pub(crate) fn remove_links(markdown: &str) -> String {
+ let mut drop_link = false;
+
+ let mut cb = |_: BrokenLink<'_>| {
+ let empty = InlineStr::try_from("").unwrap();
+ Some((CowStr::Inlined(empty), CowStr::Inlined(empty)))
+ };
+ let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb));
+ let doc = doc.filter_map(move |evt| match evt {
+ Event::Start(Tag::Link(link_type, target, title)) => {
+ if link_type == LinkType::Inline && target.contains("://") {
+ Some(Event::Start(Tag::Link(link_type, target, title)))
+ } else {
+ drop_link = true;
+ None
+ }
+ }
+ Event::End(_) if drop_link => {
+ drop_link = false;
+ None
+ }
+ _ => Some(evt),
+ });
+
+ let mut out = String::new();
+ cmark_resume_with_options(
+ doc,
+ &mut out,
+ None,
+ CMarkOptions { code_block_token_count: 3, ..Default::default() },
+ )
+ .ok();
+ out
+}
+
+/// Retrieve a link to documentation for the given symbol.
+pub(crate) fn external_docs(
+ db: &RootDatabase,
+ position: &FilePosition,
+) -> Option<DocumentationLink> {
+ let sema = &Semantics::new(db);
+ let file = sema.parse(position.file_id).syntax().clone();
+ let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
+ IDENT | INT_NUMBER | T![self] => 3,
+ T!['('] | T![')'] => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+ let token = sema.descend_into_macros_single(token);
+
+ let node = token.parent()?;
+ let definition = match_ast! {
+ match node {
+ ast::NameRef(name_ref) => match NameRefClass::classify(sema, &name_ref)? {
+ NameRefClass::Definition(def) => def,
+ NameRefClass::FieldShorthand { local_ref: _, field_ref } => {
+ Definition::Field(field_ref)
+ }
+ },
+ ast::Name(name) => match NameClass::classify(sema, &name)? {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def: _, field_ref } => Definition::Field(field_ref),
+ },
+ _ => return None,
+ }
+ };
+
+ get_doc_link(db, definition)
+}
+
+/// Extracts all links from a given markdown text returning the definition text range, link-text
+/// and the namespace if known.
+pub(crate) fn extract_definitions_from_docs(
+ docs: &hir::Documentation,
+) -> Vec<(TextRange, String, Option<hir::Namespace>)> {
+ Parser::new_with_broken_link_callback(
+ docs.as_str(),
+ MARKDOWN_OPTIONS,
+ Some(&mut broken_link_clone_cb),
+ )
+ .into_offset_iter()
+ .filter_map(|(event, range)| match event {
+ Event::Start(Tag::Link(_, target, _)) => {
+ let (link, ns) = parse_intra_doc_link(&target);
+ Some((
+ TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?),
+ link.to_string(),
+ ns,
+ ))
+ }
+ _ => None,
+ })
+ .collect()
+}
+
+pub(crate) fn resolve_doc_path_for_def(
+ db: &dyn HirDatabase,
+ def: Definition,
+ link: &str,
+ ns: Option<hir::Namespace>,
+) -> Option<Definition> {
+ match def {
+ Definition::Module(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Function(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Adt(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Variant(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Const(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Static(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Trait(it) => it.resolve_doc_path(db, link, ns),
+ Definition::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Field(it) => it.resolve_doc_path(db, link, ns),
+ Definition::BuiltinAttr(_)
+ | Definition::ToolModule(_)
+ | Definition::BuiltinType(_)
+ | Definition::SelfType(_)
+ | Definition::Local(_)
+ | Definition::GenericParam(_)
+ | Definition::Label(_)
+ | Definition::DeriveHelper(_) => None,
+ }
+ .map(Definition::from)
+}
+
+pub(crate) fn doc_attributes(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &SyntaxNode,
+) -> Option<(hir::AttrsWithOwner, Definition)> {
+ match_ast! {
+ match node {
+ ast::SourceFile(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Module(def))),
+ ast::Module(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Module(def))),
+ ast::Fn(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Function(def))),
+ ast::Struct(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Adt(hir::Adt::Struct(def)))),
+ ast::Union(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Adt(hir::Adt::Union(def)))),
+ ast::Enum(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Adt(hir::Adt::Enum(def)))),
+ ast::Variant(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Variant(def))),
+ ast::Trait(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Trait(def))),
+ ast::Static(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Static(def))),
+ ast::Const(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Const(def))),
+ ast::TypeAlias(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::TypeAlias(def))),
+ ast::Impl(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::SelfType(def))),
+ ast::RecordField(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Field(def))),
+ ast::TupleField(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Field(def))),
+ ast::Macro(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Macro(def))),
+ // ast::Use(it) => sema.to_def(&it).map(|def| (Box::new(it) as _, def.attrs(sema.db))),
+ _ => None
+ }
+ }
+}
+
+pub(crate) struct DocCommentToken {
+ doc_token: SyntaxToken,
+ prefix_len: TextSize,
+}
+
+pub(crate) fn token_as_doc_comment(doc_token: &SyntaxToken) -> Option<DocCommentToken> {
+ (match_ast! {
+ match doc_token {
+ ast::Comment(comment) => TextSize::try_from(comment.prefix().len()).ok(),
+ ast::String(string) => doc_token.parent_ancestors().find_map(ast::Attr::cast)
+ .filter(|attr| attr.simple_name().as_deref() == Some("doc")).and_then(|_| string.open_quote_text_range().map(|it| it.len())),
+ _ => None,
+ }
+ }).map(|prefix_len| DocCommentToken { prefix_len, doc_token: doc_token.clone() })
+}
+
+impl DocCommentToken {
+ pub(crate) fn get_definition_with_descend_at<T>(
+ self,
+ sema: &Semantics<'_, RootDatabase>,
+ offset: TextSize,
+ // Definition, CommentOwner, range of intra doc link in original file
+ mut cb: impl FnMut(Definition, SyntaxNode, TextRange) -> Option<T>,
+ ) -> Option<T> {
+ let DocCommentToken { prefix_len, doc_token } = self;
+ // offset relative to the comments contents
+ let original_start = doc_token.text_range().start();
+ let relative_comment_offset = offset - original_start - prefix_len;
+
+ sema.descend_into_macros(doc_token).into_iter().find_map(|t| {
+ let (node, descended_prefix_len) = match_ast! {
+ match t {
+ ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),
+ ast::String(string) => (t.parent_ancestors().skip_while(|n| n.kind() != ATTR).nth(1)?, string.open_quote_text_range()?.len()),
+ _ => return None,
+ }
+ };
+ let token_start = t.text_range().start();
+ let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len;
+
+ let (attributes, def) = doc_attributes(sema, &node)?;
+ let (docs, doc_mapping) = attributes.docs_with_rangemap(sema.db)?;
+ let (in_expansion_range, link, ns) =
+ extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
+ let mapped = doc_mapping.map(range)?;
+ (mapped.value.contains(abs_in_expansion_offset)).then(|| (mapped.value, link, ns))
+ })?;
+ // get the relative range to the doc/attribute in the expansion
+ let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
+ // Apply relative range to the original input comment
+ let absolute_range = in_expansion_relative_range + original_start + prefix_len;
+ let def = resolve_doc_path_for_def(sema.db, def, &link, ns)?;
+ cb(def, node, absolute_range)
+ })
+ }
+}
+
+fn broken_link_clone_cb<'a>(link: BrokenLink<'a>) -> Option<(CowStr<'a>, CowStr<'a>)> {
+ Some((/*url*/ link.reference.clone(), /*title*/ link.reference))
+}
+
+// FIXME:
+// BUG: For Option::Some
+// Returns https://doc.rust-lang.org/nightly/core/prelude/v1/enum.Option.html#variant.Some
+// Instead of https://doc.rust-lang.org/nightly/core/option/enum.Option.html
+//
+// This should cease to be a problem if RFC2988 (Stable Rustdoc URLs) is implemented
+// https://github.com/rust-lang/rfcs/pull/2988
+fn get_doc_link(db: &RootDatabase, def: Definition) -> Option<String> {
+ let (target, file, frag) = filename_and_frag_for_def(db, def)?;
+
+ let mut url = get_doc_base_url(db, target)?;
+
+ if let Some(path) = mod_path_of_def(db, target) {
+ url = url.join(&path).ok()?;
+ }
+
+ url = url.join(&file).ok()?;
+ url.set_fragment(frag.as_deref());
+
+ Some(url.into())
+}
+
+fn rewrite_intra_doc_link(
+ db: &RootDatabase,
+ def: Definition,
+ target: &str,
+ title: &str,
+) -> Option<(String, String)> {
+ let (link, ns) = parse_intra_doc_link(target);
+
+ let resolved = resolve_doc_path_for_def(db, def, link, ns)?;
+ let mut url = get_doc_base_url(db, resolved)?;
+
+ let (_, file, frag) = filename_and_frag_for_def(db, resolved)?;
+ if let Some(path) = mod_path_of_def(db, resolved) {
+ url = url.join(&path).ok()?;
+ }
+
+ url = url.join(&file).ok()?;
+ url.set_fragment(frag.as_deref());
+
+ Some((url.into(), strip_prefixes_suffixes(title).to_string()))
+}
+
+/// Try to resolve path to local documentation via path-based links (i.e. `../gateway/struct.Shard.html`).
+fn rewrite_url_link(db: &RootDatabase, def: Definition, target: &str) -> Option<String> {
+ if !(target.contains('#') || target.contains(".html")) {
+ return None;
+ }
+
+ let mut url = get_doc_base_url(db, def)?;
+ let (def, file, frag) = filename_and_frag_for_def(db, def)?;
+
+ if let Some(path) = mod_path_of_def(db, def) {
+ url = url.join(&path).ok()?;
+ }
+
+ url = url.join(&file).ok()?;
+ url.set_fragment(frag.as_deref());
+ url.join(target).ok().map(Into::into)
+}
+
+fn mod_path_of_def(db: &RootDatabase, def: Definition) -> Option<String> {
+ def.canonical_module_path(db).map(|it| {
+ let mut path = String::new();
+ it.flat_map(|it| it.name(db)).for_each(|name| format_to!(path, "{}/", name));
+ path
+ })
+}
+
+/// Rewrites a markdown document, applying 'callback' to each link.
+fn map_links<'e>(
+ events: impl Iterator<Item = Event<'e>>,
+ callback: impl Fn(&str, &str) -> (Option<LinkType>, String, String),
+) -> impl Iterator<Item = Event<'e>> {
+ let mut in_link = false;
+ // holds the origin link target on start event and the rewritten one on end event
+ let mut end_link_target: Option<CowStr<'_>> = None;
+ // normally link's type is determined by the type of link tag in the end event,
+ // however in some cases we want to change the link type, for example,
+ // `Shortcut` type parsed from Start/End tags doesn't make sense for url links
+ let mut end_link_type: Option<LinkType> = None;
+
+ events.map(move |evt| match evt {
+ Event::Start(Tag::Link(link_type, ref target, _)) => {
+ in_link = true;
+ end_link_target = Some(target.clone());
+ end_link_type = Some(link_type);
+ evt
+ }
+ Event::End(Tag::Link(link_type, target, _)) => {
+ in_link = false;
+ Event::End(Tag::Link(
+ end_link_type.unwrap_or(link_type),
+ end_link_target.take().unwrap_or(target),
+ CowStr::Borrowed(""),
+ ))
+ }
+ Event::Text(s) if in_link => {
+ let (link_type, link_target_s, link_name) =
+ callback(&end_link_target.take().unwrap(), &s);
+ end_link_target = Some(CowStr::Boxed(link_target_s.into()));
+ if !matches!(end_link_type, Some(LinkType::Autolink)) {
+ end_link_type = link_type;
+ }
+ Event::Text(CowStr::Boxed(link_name.into()))
+ }
+ Event::Code(s) if in_link => {
+ let (link_type, link_target_s, link_name) =
+ callback(&end_link_target.take().unwrap(), &s);
+ end_link_target = Some(CowStr::Boxed(link_target_s.into()));
+ if !matches!(end_link_type, Some(LinkType::Autolink)) {
+ end_link_type = link_type;
+ }
+ Event::Code(CowStr::Boxed(link_name.into()))
+ }
+ _ => evt,
+ })
+}
+
+/// Get the root URL for the documentation of a definition.
+///
+/// ```ignore
+/// https://doc.rust-lang.org/std/iter/trait.Iterator.html#tymethod.next
+/// ^^^^^^^^^^^^^^^^^^^^^^^^^^
+/// ```
+fn get_doc_base_url(db: &RootDatabase, def: Definition) -> Option<Url> {
+ // special case base url of `BuiltinType` to core
+ // https://github.com/rust-lang/rust-analyzer/issues/12250
+ if let Definition::BuiltinType(..) = def {
+ return Url::parse("https://doc.rust-lang.org/nightly/core/").ok();
+ };
+
+ let krate = def.krate(db)?;
+ let display_name = krate.display_name(db)?;
+
+ let base = match db.crate_graph()[krate.into()].origin {
+ // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself.
+ // FIXME: Use the toolchains channel instead of nightly
+ CrateOrigin::Lang(
+ origin @ (LangCrateOrigin::Alloc
+ | LangCrateOrigin::Core
+ | LangCrateOrigin::ProcMacro
+ | LangCrateOrigin::Std
+ | LangCrateOrigin::Test),
+ ) => {
+ format!("https://doc.rust-lang.org/nightly/{origin}")
+ }
+ _ => {
+ krate.get_html_root_url(db).or_else(|| {
+ let version = krate.version(db);
+ // Fallback to docs.rs. This uses `display_name` and can never be
+ // correct, but that's what fallbacks are about.
+ //
+ // FIXME: clicking on the link should just open the file in the editor,
+ // instead of falling back to external urls.
+ Some(format!(
+ "https://docs.rs/{krate}/{version}/",
+ krate = display_name,
+ version = version.as_deref().unwrap_or("*")
+ ))
+ })?
+ }
+ };
+ Url::parse(&base).ok()?.join(&format!("{}/", display_name)).ok()
+}
+
+/// Get the filename and extension generated for a symbol by rustdoc.
+///
+/// ```ignore
+/// https://doc.rust-lang.org/std/iter/trait.Iterator.html#tymethod.next
+/// ^^^^^^^^^^^^^^^^^^^
+/// ```
+fn filename_and_frag_for_def(
+ db: &dyn HirDatabase,
+ def: Definition,
+) -> Option<(Definition, String, Option<String>)> {
+ if let Some(assoc_item) = def.as_assoc_item(db) {
+ let def = match assoc_item.container(db) {
+ AssocItemContainer::Trait(t) => t.into(),
+ AssocItemContainer::Impl(i) => i.self_ty(db).as_adt()?.into(),
+ };
+ let (_, file, _) = filename_and_frag_for_def(db, def)?;
+ let frag = get_assoc_item_fragment(db, assoc_item)?;
+ return Some((def, file, Some(frag)));
+ }
+
+ let res = match def {
+ Definition::Adt(adt) => match adt {
+ Adt::Struct(s) => format!("struct.{}.html", s.name(db)),
+ Adt::Enum(e) => format!("enum.{}.html", e.name(db)),
+ Adt::Union(u) => format!("union.{}.html", u.name(db)),
+ },
+ Definition::Module(m) => match m.name(db) {
+ // `#[doc(keyword = "...")]` is internal used only by rust compiler
+ Some(name) => match m.attrs(db).by_key("doc").find_string_value_in_tt("keyword") {
+ Some(kw) => {
+ format!("keyword.{}.html", kw.trim_matches('"'))
+ }
+ None => format!("{}/index.html", name),
+ },
+ None => String::from("index.html"),
+ },
+ Definition::Trait(t) => format!("trait.{}.html", t.name(db)),
+ Definition::TypeAlias(t) => format!("type.{}.html", t.name(db)),
+ Definition::BuiltinType(t) => format!("primitive.{}.html", t.name()),
+ Definition::Function(f) => format!("fn.{}.html", f.name(db)),
+ Definition::Variant(ev) => {
+ format!("enum.{}.html#variant.{}", ev.parent_enum(db).name(db), ev.name(db))
+ }
+ Definition::Const(c) => format!("const.{}.html", c.name(db)?),
+ Definition::Static(s) => format!("static.{}.html", s.name(db)),
+ Definition::Macro(mac) => format!("macro.{}.html", mac.name(db)),
+ Definition::Field(field) => {
+ let def = match field.parent_def(db) {
+ hir::VariantDef::Struct(it) => Definition::Adt(it.into()),
+ hir::VariantDef::Union(it) => Definition::Adt(it.into()),
+ hir::VariantDef::Variant(it) => Definition::Variant(it),
+ };
+ let (_, file, _) = filename_and_frag_for_def(db, def)?;
+ return Some((def, file, Some(format!("structfield.{}", field.name(db)))));
+ }
+ Definition::SelfType(impl_) => {
+ let adt = impl_.self_ty(db).as_adt()?.into();
+ let (_, file, _) = filename_and_frag_for_def(db, adt)?;
+ // FIXME fragment numbering
+ return Some((adt, file, Some(String::from("impl"))));
+ }
+ Definition::Local(_)
+ | Definition::GenericParam(_)
+ | Definition::Label(_)
+ | Definition::BuiltinAttr(_)
+ | Definition::ToolModule(_)
+ | Definition::DeriveHelper(_) => return None,
+ };
+
+ Some((def, res, None))
+}
+
+/// Get the fragment required to link to a specific field, method, associated type, or associated constant.
+///
+/// ```ignore
+/// https://doc.rust-lang.org/std/iter/trait.Iterator.html#tymethod.next
+/// ^^^^^^^^^^^^^^
+/// ```
+fn get_assoc_item_fragment(db: &dyn HirDatabase, assoc_item: hir::AssocItem) -> Option<String> {
+ Some(match assoc_item {
+ AssocItem::Function(function) => {
+ let is_trait_method =
+ function.as_assoc_item(db).and_then(|assoc| assoc.containing_trait(db)).is_some();
+ // This distinction may get more complicated when specialization is available.
+ // Rustdoc makes this decision based on whether a method 'has defaultness'.
+ // Currently this is only the case for provided trait methods.
+ if is_trait_method && !function.has_body(db) {
+ format!("tymethod.{}", function.name(db))
+ } else {
+ format!("method.{}", function.name(db))
+ }
+ }
+ AssocItem::Const(constant) => format!("associatedconstant.{}", constant.name(db)?),
+ AssocItem::TypeAlias(ty) => format!("associatedtype.{}", ty.name(db)),
+ })
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs
new file mode 100644
index 000000000..1df9aaae2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs
@@ -0,0 +1,77 @@
+//! Helper tools for intra doc links.
+
+const TYPES: ([&str; 9], [&str; 0]) =
+ (["type", "struct", "enum", "mod", "trait", "union", "module", "prim", "primitive"], []);
+const VALUES: ([&str; 8], [&str; 1]) =
+ (["value", "function", "fn", "method", "const", "static", "mod", "module"], ["()"]);
+const MACROS: ([&str; 2], [&str; 1]) = (["macro", "derive"], ["!"]);
+
+/// Extract the specified namespace from an intra-doc-link if one exists.
+///
+/// # Examples
+///
+/// * `struct MyStruct` -> ("MyStruct", `Namespace::Types`)
+/// * `panic!` -> ("panic", `Namespace::Macros`)
+/// * `fn@from_intra_spec` -> ("from_intra_spec", `Namespace::Values`)
+pub(super) fn parse_intra_doc_link(s: &str) -> (&str, Option<hir::Namespace>) {
+ let s = s.trim_matches('`');
+
+ [
+ (hir::Namespace::Types, (TYPES.0.iter(), TYPES.1.iter())),
+ (hir::Namespace::Values, (VALUES.0.iter(), VALUES.1.iter())),
+ (hir::Namespace::Macros, (MACROS.0.iter(), MACROS.1.iter())),
+ ]
+ .into_iter()
+ .find_map(|(ns, (mut prefixes, mut suffixes))| {
+ if let Some(prefix) = prefixes.find(|&&prefix| {
+ s.starts_with(prefix)
+ && s.chars().nth(prefix.len()).map_or(false, |c| c == '@' || c == ' ')
+ }) {
+ Some((&s[prefix.len() + 1..], ns))
+ } else {
+ suffixes.find_map(|&suffix| s.strip_suffix(suffix).zip(Some(ns)))
+ }
+ })
+ .map_or((s, None), |(s, ns)| (s, Some(ns)))
+}
+
+pub(super) fn strip_prefixes_suffixes(s: &str) -> &str {
+ [
+ (TYPES.0.iter(), TYPES.1.iter()),
+ (VALUES.0.iter(), VALUES.1.iter()),
+ (MACROS.0.iter(), MACROS.1.iter()),
+ ]
+ .into_iter()
+ .find_map(|(mut prefixes, mut suffixes)| {
+ if let Some(prefix) = prefixes.find(|&&prefix| {
+ s.starts_with(prefix)
+ && s.chars().nth(prefix.len()).map_or(false, |c| c == '@' || c == ' ')
+ }) {
+ Some(&s[prefix.len() + 1..])
+ } else {
+ suffixes.find_map(|&suffix| s.strip_suffix(suffix))
+ }
+ })
+ .unwrap_or(s)
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use super::*;
+
+ fn check(link: &str, expected: Expect) {
+ let (l, a) = parse_intra_doc_link(link);
+ let a = a.map_or_else(String::new, |a| format!(" ({:?})", a));
+ expected.assert_eq(&format!("{}{}", l, a));
+ }
+
+ #[test]
+ fn test_name() {
+ check("foo", expect![[r#"foo"#]]);
+ check("struct Struct", expect![[r#"Struct (Types)"#]]);
+ check("makro!", expect![[r#"makro (Macros)"#]]);
+ check("fn@function", expect![[r#"function (Values)"#]]);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
new file mode 100644
index 000000000..c6bfb6b9d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
@@ -0,0 +1,491 @@
+use expect_test::{expect, Expect};
+use hir::{HasAttrs, Semantics};
+use ide_db::{
+ base_db::{FilePosition, FileRange},
+ defs::Definition,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, match_ast, AstNode, SyntaxNode};
+
+use crate::{
+ doc_links::{extract_definitions_from_docs, resolve_doc_path_for_def, rewrite_links},
+ fixture, TryToNav,
+};
+
+fn check_external_docs(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let url = analysis.external_docs(position).unwrap().expect("could not find url for symbol");
+
+ expect.assert_eq(&url)
+}
+
+fn check_rewrite(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let sema = &Semantics::new(&*analysis.db);
+ let (cursor_def, docs) = def_under_cursor(sema, &position);
+ let res = rewrite_links(sema.db, docs.as_str(), cursor_def);
+ expect.assert_eq(&res)
+}
+
+fn check_doc_links(ra_fixture: &str) {
+ let key_fn = |&(FileRange { file_id, range }, _): &_| (file_id, range.start());
+
+ let (analysis, position, mut expected) = fixture::annotations(ra_fixture);
+ expected.sort_by_key(key_fn);
+ let sema = &Semantics::new(&*analysis.db);
+ let (cursor_def, docs) = def_under_cursor(sema, &position);
+ let defs = extract_definitions_from_docs(&docs);
+ let actual: Vec<_> = defs
+ .into_iter()
+ .map(|(_, link, ns)| {
+ let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns)
+ .unwrap_or_else(|| panic!("Failed to resolve {}", link));
+ let nav_target = def.try_to_nav(sema.db).unwrap();
+ let range =
+ FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() };
+ (range, link)
+ })
+ .sorted_by_key(key_fn)
+ .collect();
+ assert_eq!(expected, actual);
+}
+
+fn def_under_cursor(
+ sema: &Semantics<'_, RootDatabase>,
+ position: &FilePosition,
+) -> (Definition, hir::Documentation) {
+ let (docs, def) = sema
+ .parse(position.file_id)
+ .syntax()
+ .token_at_offset(position.offset)
+ .left_biased()
+ .unwrap()
+ .parent_ancestors()
+ .find_map(|it| node_to_def(sema, &it))
+ .expect("no def found")
+ .unwrap();
+ let docs = docs.expect("no docs found for cursor def");
+ (def, docs)
+}
+
+fn node_to_def(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &SyntaxNode,
+) -> Option<Option<(Option<hir::Documentation>, Definition)>> {
+ Some(match_ast! {
+ match node {
+ ast::SourceFile(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Module(def))),
+ ast::Module(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Module(def))),
+ ast::Fn(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Function(def))),
+ ast::Struct(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Adt(hir::Adt::Struct(def)))),
+ ast::Union(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Adt(hir::Adt::Union(def)))),
+ ast::Enum(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Adt(hir::Adt::Enum(def)))),
+ ast::Variant(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Variant(def))),
+ ast::Trait(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Trait(def))),
+ ast::Static(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Static(def))),
+ ast::Const(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Const(def))),
+ ast::TypeAlias(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::TypeAlias(def))),
+ ast::Impl(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::SelfType(def))),
+ ast::RecordField(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Field(def))),
+ ast::TupleField(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Field(def))),
+ ast::Macro(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Macro(def))),
+ // ast::Use(it) => sema.to_def(&it).map(|def| (Box::new(it) as _, def.attrs(sema.db))),
+ _ => return None,
+ }
+ })
+}
+
+#[test]
+fn external_docs_doc_url_crate() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:main deps:foo
+use foo$0::Foo;
+//- /lib.rs crate:foo
+pub struct Foo;
+"#,
+ expect![[r#"https://docs.rs/foo/*/foo/index.html"#]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_std_crate() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:std
+use self$0;
+"#,
+ expect![[r#"https://doc.rust-lang.org/nightly/std/index.html"#]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_struct() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Fo$0o;
+"#,
+ expect![[r#"https://docs.rs/foo/*/foo/struct.Foo.html"#]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_struct_field() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo {
+ field$0: ()
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/struct.Foo.html#structfield.field"##]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_fn() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub fn fo$0o() {}
+"#,
+ expect![[r#"https://docs.rs/foo/*/foo/fn.foo.html"#]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_impl_assoc() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo;
+impl Foo {
+ pub fn method$0() {}
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/struct.Foo.html#method.method"##]],
+ );
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo;
+impl Foo {
+ const CONST$0: () = ();
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/struct.Foo.html#associatedconstant.CONST"##]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_impl_trait_assoc() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo;
+pub trait Trait {
+ fn method() {}
+}
+impl Trait for Foo {
+ pub fn method$0() {}
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/struct.Foo.html#method.method"##]],
+ );
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo;
+pub trait Trait {
+ const CONST: () = ();
+}
+impl Trait for Foo {
+ const CONST$0: () = ();
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/struct.Foo.html#associatedconstant.CONST"##]],
+ );
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo;
+pub trait Trait {
+ type Type;
+}
+impl Trait for Foo {
+ type Type$0 = ();
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/struct.Foo.html#associatedtype.Type"##]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_trait_assoc() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub trait Foo {
+ fn method$0();
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/trait.Foo.html#tymethod.method"##]],
+ );
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub trait Foo {
+ const CONST$0: ();
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/trait.Foo.html#associatedconstant.CONST"##]],
+ );
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub trait Foo {
+ type Type$0;
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/trait.Foo.html#associatedtype.Type"##]],
+ );
+}
+
+#[test]
+fn external_docs_trait() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+trait Trait$0 {}
+"#,
+ expect![[r#"https://docs.rs/foo/*/foo/trait.Trait.html"#]],
+ )
+}
+
+#[test]
+fn external_docs_module() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub mod foo {
+ pub mod ba$0r {}
+}
+"#,
+ expect![[r#"https://docs.rs/foo/*/foo/foo/bar/index.html"#]],
+ )
+}
+
+#[test]
+fn external_docs_reexport_order() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub mod wrapper {
+ pub use module::Item;
+
+ pub mod module {
+ pub struct Item;
+ }
+}
+
+fn foo() {
+ let bar: wrapper::It$0em;
+}
+ "#,
+ expect![[r#"https://docs.rs/foo/*/foo/wrapper/module/struct.Item.html"#]],
+ )
+}
+
+#[test]
+fn doc_links_items_simple() {
+ check_doc_links(
+ r#"
+//- /main.rs crate:main deps:krate
+/// [`krate`]
+//! [`Trait`]
+//! [`function`]
+//! [`CONST`]
+//! [`STATIC`]
+//! [`Struct`]
+//! [`Enum`]
+//! [`Union`]
+//! [`Type`]
+//! [`module`]
+use self$0;
+
+const CONST: () = ();
+ // ^^^^^ CONST
+static STATIC: () = ();
+ // ^^^^^^ STATIC
+trait Trait {
+ // ^^^^^ Trait
+}
+fn function() {}
+// ^^^^^^^^ function
+struct Struct;
+ // ^^^^^^ Struct
+enum Enum {}
+ // ^^^^ Enum
+union Union {__: ()}
+ // ^^^^^ Union
+type Type = ();
+ // ^^^^ Type
+mod module {}
+ // ^^^^^^ module
+//- /krate.rs crate:krate
+// empty
+//^file krate
+"#,
+ )
+}
+
+#[test]
+fn doc_links_inherent_impl_items() {
+ check_doc_links(
+ r#"
+// /// [`Struct::CONST`]
+// /// [`Struct::function`]
+/// FIXME #9694
+struct Struct$0;
+
+impl Struct {
+ const CONST: () = ();
+ fn function() {}
+}
+"#,
+ )
+}
+
+#[test]
+fn doc_links_trait_impl_items() {
+ check_doc_links(
+ r#"
+trait Trait {
+ type Type;
+ const CONST: usize;
+ fn function();
+}
+// /// [`Struct::Type`]
+// /// [`Struct::CONST`]
+// /// [`Struct::function`]
+/// FIXME #9694
+struct Struct$0;
+
+impl Trait for Struct {
+ type Type = ();
+ const CONST: () = ();
+ fn function() {}
+}
+"#,
+ )
+}
+
+#[test]
+fn doc_links_trait_items() {
+ check_doc_links(
+ r#"
+/// [`Trait`]
+/// [`Trait::Type`]
+/// [`Trait::CONST`]
+/// [`Trait::function`]
+trait Trait$0 {
+ // ^^^^^ Trait
+type Type;
+ // ^^^^ Trait::Type
+const CONST: usize;
+ // ^^^^^ Trait::CONST
+fn function();
+// ^^^^^^^^ Trait::function
+}
+ "#,
+ )
+}
+
+#[test]
+fn rewrite_html_root_url() {
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+#![doc(arbitrary_attribute = "test", html_root_url = "https:/example.com", arbitrary_attribute2)]
+
+pub mod foo {
+ pub struct Foo;
+}
+/// [Foo](foo::Foo)
+pub struct B$0ar
+"#,
+ expect![[r#"[Foo](https://example.com/foo/foo/struct.Foo.html)"#]],
+ );
+}
+
+#[test]
+fn rewrite_on_field() {
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo {
+ /// [Foo](struct.Foo.html)
+ fie$0ld: ()
+}
+"#,
+ expect![[r#"[Foo](https://docs.rs/foo/*/foo/struct.Foo.html)"#]],
+ );
+}
+
+#[test]
+fn rewrite_struct() {
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [Foo]
+pub struct $0Foo;
+"#,
+ expect![[r#"[Foo](https://docs.rs/foo/*/foo/struct.Foo.html)"#]],
+ );
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [`Foo`]
+pub struct $0Foo;
+"#,
+ expect![[r#"[`Foo`](https://docs.rs/foo/*/foo/struct.Foo.html)"#]],
+ );
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [Foo](struct.Foo.html)
+pub struct $0Foo;
+"#,
+ expect![[r#"[Foo](https://docs.rs/foo/*/foo/struct.Foo.html)"#]],
+ );
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [struct Foo](struct.Foo.html)
+pub struct $0Foo;
+"#,
+ expect![[r#"[struct Foo](https://docs.rs/foo/*/foo/struct.Foo.html)"#]],
+ );
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [my Foo][foo]
+///
+/// [foo]: Foo
+pub struct $0Foo;
+"#,
+ expect![[r#"[my Foo](https://docs.rs/foo/*/foo/struct.Foo.html)"#]],
+ );
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [`foo`]
+///
+/// [`foo`]: Foo
+pub struct $0Foo;
+"#,
+ expect![["[`foo`]"]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
new file mode 100644
index 000000000..efa8551a0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
@@ -0,0 +1,521 @@
+use hir::Semantics;
+use ide_db::{
+ base_db::FileId, helpers::pick_best_token,
+ syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
+};
+use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T};
+
+use crate::FilePosition;
+
+pub struct ExpandedMacro {
+ pub name: String,
+ pub expansion: String,
+}
+
+// Feature: Expand Macro Recursively
+//
+// Shows the full macro expansion of the macro at current cursor.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Expand macro recursively**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[]
+pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(position.file_id);
+
+ let tok = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
+ SyntaxKind::IDENT => 1,
+ _ => 0,
+ })?;
+
+ // due to how Rust Analyzer works internally, we need to special case derive attributes,
+ // otherwise they might not get found, e.g. here with the cursor at $0 `#[attr]` would expand:
+ // ```
+ // #[attr]
+ // #[derive($0Foo)]
+ // struct Bar;
+ // ```
+
+ let derive = sema.descend_into_macros(tok.clone()).into_iter().find_map(|descended| {
+ let hir_file = sema.hir_file_for(&descended.parent()?);
+ if !hir_file.is_derive_attr_pseudo_expansion(db) {
+ return None;
+ }
+
+ let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
+ // up map out of the #[derive] expansion
+ let token = hir::InFile::new(hir_file, descended).upmap(db)?.value;
+ let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
+ let expansions = sema.expand_derive_macro(&attr)?;
+ let idx = attr
+ .token_tree()?
+ .token_trees_and_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .take_while(|it| it != &token)
+ .filter(|it| it.kind() == T![,])
+ .count();
+ let expansion =
+ format(db, SyntaxKind::MACRO_ITEMS, position.file_id, expansions.get(idx).cloned()?);
+ Some(ExpandedMacro { name, expansion })
+ });
+
+ if derive.is_some() {
+ return derive;
+ }
+
+ // FIXME: Intermix attribute and bang! expansions
+ // currently we only recursively expand one of the two types
+ let mut anc = tok.parent_ancestors();
+ let (name, expanded, kind) = loop {
+ let node = anc.next()?;
+
+ if let Some(item) = ast::Item::cast(node.clone()) {
+ if let Some(def) = sema.resolve_attr_macro_call(&item) {
+ break (
+ def.name(db).to_string(),
+ expand_attr_macro_recur(&sema, &item)?,
+ SyntaxKind::MACRO_ITEMS,
+ );
+ }
+ }
+ if let Some(mac) = ast::MacroCall::cast(node) {
+ break (
+ mac.path()?.segment()?.name_ref()?.to_string(),
+ expand_macro_recur(&sema, &mac)?,
+ mac.syntax().parent().map(|it| it.kind()).unwrap_or(SyntaxKind::MACRO_ITEMS),
+ );
+ }
+ };
+
+ // FIXME:
+ // macro expansion may lose all white space information
+ // But we hope someday we can use ra_fmt for that
+ let expansion = format(db, kind, position.file_id, expanded);
+
+ Some(ExpandedMacro { name, expansion })
+}
+
+fn expand_macro_recur(
+ sema: &Semantics<'_, RootDatabase>,
+ macro_call: &ast::MacroCall,
+) -> Option<SyntaxNode> {
+ let expanded = sema.expand(macro_call)?.clone_for_update();
+ expand(sema, expanded, ast::MacroCall::cast, expand_macro_recur)
+}
+
+fn expand_attr_macro_recur(
+ sema: &Semantics<'_, RootDatabase>,
+ item: &ast::Item,
+) -> Option<SyntaxNode> {
+ let expanded = sema.expand_attr_macro(item)?.clone_for_update();
+ expand(sema, expanded, ast::Item::cast, expand_attr_macro_recur)
+}
+
+fn expand<T: AstNode>(
+ sema: &Semantics<'_, RootDatabase>,
+ expanded: SyntaxNode,
+ f: impl FnMut(SyntaxNode) -> Option<T>,
+ exp: impl Fn(&Semantics<'_, RootDatabase>, &T) -> Option<SyntaxNode>,
+) -> Option<SyntaxNode> {
+ let children = expanded.descendants().filter_map(f);
+ let mut replacements = Vec::new();
+
+ for child in children {
+ if let Some(new_node) = exp(sema, &child) {
+ // check if the whole original syntax is replaced
+ if expanded == *child.syntax() {
+ return Some(new_node);
+ }
+ replacements.push((child, new_node));
+ }
+ }
+
+ replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
+ Some(expanded)
+}
+
+fn format(db: &RootDatabase, kind: SyntaxKind, file_id: FileId, expanded: SyntaxNode) -> String {
+ let expansion = insert_ws_into(expanded).to_string();
+
+ _format(db, kind, file_id, &expansion).unwrap_or(expansion)
+}
+
+#[cfg(any(test, target_arch = "wasm32", target_os = "emscripten"))]
+fn _format(
+ _db: &RootDatabase,
+ _kind: SyntaxKind,
+ _file_id: FileId,
+ _expansion: &str,
+) -> Option<String> {
+ None
+}
+
+#[cfg(not(any(test, target_arch = "wasm32", target_os = "emscripten")))]
+fn _format(
+ db: &RootDatabase,
+ kind: SyntaxKind,
+ file_id: FileId,
+ expansion: &str,
+) -> Option<String> {
+ use ide_db::base_db::{FileLoader, SourceDatabase};
+ // hack until we get hygiene working (same character amount to preserve formatting as much as possible)
+ const DOLLAR_CRATE_REPLACE: &str = &"__r_a_";
+ let expansion = expansion.replace("$crate", DOLLAR_CRATE_REPLACE);
+ let (prefix, suffix) = match kind {
+ SyntaxKind::MACRO_PAT => ("fn __(", ": u32);"),
+ SyntaxKind::MACRO_EXPR | SyntaxKind::MACRO_STMTS => ("fn __() {", "}"),
+ SyntaxKind::MACRO_TYPE => ("type __ =", ";"),
+ _ => ("", ""),
+ };
+ let expansion = format!("{prefix}{expansion}{suffix}");
+
+ let &crate_id = db.relevant_crates(file_id).iter().next()?;
+ let edition = db.crate_graph()[crate_id].edition;
+
+ let mut cmd = std::process::Command::new(toolchain::rustfmt());
+ cmd.arg("--edition");
+ cmd.arg(edition.to_string());
+
+ let mut rustfmt = cmd
+ .stdin(std::process::Stdio::piped())
+ .stdout(std::process::Stdio::piped())
+ .stderr(std::process::Stdio::piped())
+ .spawn()
+ .ok()?;
+
+ std::io::Write::write_all(&mut rustfmt.stdin.as_mut()?, expansion.as_bytes()).ok()?;
+
+ let output = rustfmt.wait_with_output().ok()?;
+ let captured_stdout = String::from_utf8(output.stdout).ok()?;
+
+ if output.status.success() && !captured_stdout.trim().is_empty() {
+ let output = captured_stdout.replace(DOLLAR_CRATE_REPLACE, "$crate");
+ let output = output.trim().strip_prefix(prefix)?;
+ let output = match kind {
+ SyntaxKind::MACRO_PAT => {
+ output.strip_suffix(suffix).or_else(|| output.strip_suffix(": u32,\n);"))?
+ }
+ _ => output.strip_suffix(suffix)?,
+ };
+ let trim_indent = stdx::trim_indent(output);
+ tracing::debug!("expand_macro: formatting succeeded");
+ Some(trim_indent)
+ } else {
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::fixture;
+
+ #[track_caller]
+ fn check(ra_fixture: &str, expect: Expect) {
+ let (analysis, pos) = fixture::position(ra_fixture);
+ let expansion = analysis.expand_macro(pos).unwrap().unwrap();
+ let actual = format!("{}\n{}", expansion.name, expansion.expansion);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn macro_expand_as_keyword() {
+ check(
+ r#"
+macro_rules! bar {
+ ($i:tt) => { $i as _ }
+}
+fn main() {
+ let x: u64 = ba$0r!(5i64);
+}
+"#,
+ expect![[r#"
+ bar
+ 5i64 as _"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_underscore() {
+ check(
+ r#"
+macro_rules! bar {
+ ($i:tt) => { for _ in 0..$i {} }
+}
+fn main() {
+ ba$0r!(42);
+}
+"#,
+ expect![[r#"
+ bar
+ for _ in 0..42{}"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_recursive_expansion() {
+ check(
+ r#"
+macro_rules! bar {
+ () => { fn b() {} }
+}
+macro_rules! foo {
+ () => { bar!(); }
+}
+macro_rules! baz {
+ () => { foo!(); }
+}
+f$0oo!();
+"#,
+ expect![[r#"
+ foo
+ fn b(){}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_multiple_lines() {
+ check(
+ r#"
+macro_rules! foo {
+ () => {
+ fn some_thing() -> u32 {
+ let a = 0;
+ a + 10
+ }
+ }
+}
+f$0oo!();
+ "#,
+ expect![[r#"
+ foo
+ fn some_thing() -> u32 {
+ let a = 0;
+ a+10
+ }"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_match_ast() {
+ check(
+ r#"
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+ (match ($node:expr) {
+ $( ast::$ast:ident($it:ident) => $res:block, )*
+ _ => $catch_all:expr $(,)?
+ }) => {{
+ $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )*
+ { $catch_all }
+ }};
+}
+
+fn main() {
+ mat$0ch_ast! {
+ match container {
+ ast::TraitDef(it) => {},
+ ast::ImplDef(it) => {},
+ _ => { continue },
+ }
+ }
+}
+"#,
+ expect![[r#"
+ match_ast
+ {
+ if let Some(it) = ast::TraitDef::cast(container.clone()){}
+ else if let Some(it) = ast::ImplDef::cast(container.clone()){}
+ else {
+ {
+ continue
+ }
+ }
+ }"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_match_ast_inside_let_statement() {
+ check(
+ r#"
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+ (match ($node:expr) {}) => {{}};
+}
+
+fn main() {
+ let p = f(|it| {
+ let res = mat$0ch_ast! { match c {}};
+ Some(res)
+ })?;
+}
+"#,
+ expect![[r#"
+ match_ast
+ {}"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_inner_macro_rules() {
+ check(
+ r#"
+macro_rules! foo {
+ ($t:tt) => {{
+ macro_rules! bar {
+ () => {
+ $t
+ }
+ }
+ bar!()
+ }};
+}
+
+fn main() {
+ foo$0!(42);
+}
+ "#,
+ expect![[r#"
+ foo
+ {
+ macro_rules! bar {
+ () => {
+ 42
+ }
+ }
+ 42
+ }"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_inner_macro_fail_to_expand() {
+ check(
+ r#"
+macro_rules! bar {
+ (BAD) => {};
+}
+macro_rules! foo {
+ () => {bar!()};
+}
+
+fn main() {
+ let res = fo$0o!();
+}
+"#,
+ expect![[r#"
+ foo
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_with_dollar_crate() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! bar {
+ () => {0};
+}
+macro_rules! foo {
+ () => {$crate::bar!()};
+}
+
+fn main() {
+ let res = fo$0o!();
+}
+"#,
+ expect![[r#"
+ foo
+ 0"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_with_dyn_absolute_path() {
+ check(
+ r#"
+macro_rules! foo {
+ () => {fn f<T>(_: &dyn ::std::marker::Copy) {}};
+}
+
+fn main() {
+ let res = fo$0o!();
+}
+"#,
+ expect![[r#"
+ foo
+ fn f<T>(_: &dyn ::std::marker::Copy){}"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_derive() {
+ check(
+ r#"
+//- proc_macros: identity
+//- minicore: clone, derive
+
+#[proc_macros::identity]
+#[derive(C$0lone)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Clone
+ impl < >core::clone::Clone for Foo< >{}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_derive2() {
+ check(
+ r#"
+//- minicore: copy, clone, derive
+
+#[derive(Cop$0y)]
+#[derive(Clone)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Copy
+ impl < >core::marker::Copy for Foo< >{}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_derive_multi() {
+ check(
+ r#"
+//- minicore: copy, clone, derive
+
+#[derive(Cop$0y, Clone)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Copy
+ impl < >core::marker::Copy for Foo< >{}
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: copy, clone, derive
+
+#[derive(Copy, Cl$0one)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Clone
+ impl < >core::clone::Clone for Foo< >{}
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
new file mode 100644
index 000000000..45f1fd748
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
@@ -0,0 +1,662 @@
+use std::iter::successors;
+
+use hir::Semantics;
+use ide_db::RootDatabase;
+use syntax::{
+ algo::{self, skip_trivia_token},
+ ast::{self, AstNode, AstToken},
+ Direction, NodeOrToken,
+ SyntaxKind::{self, *},
+ SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, T,
+};
+
+use crate::FileRange;
+
+// Feature: Expand and Shrink Selection
+//
+// Extends or shrinks the current selection to the encompassing syntactic construct
+// (expression, statement, item, module, etc). It works with multiple cursors.
+//
+// This is a standard LSP feature and not a protocol extension.
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[Alt+Shift+→], kbd:[Alt+Shift+←]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020651-b42fc800-917a-11eb-8a4f-cf1a07859fac.gif[]
+pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
+ let sema = Semantics::new(db);
+ let src = sema.parse(frange.file_id);
+ try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range)
+}
+
+fn try_extend_selection(
+ sema: &Semantics<'_, RootDatabase>,
+ root: &SyntaxNode,
+ frange: FileRange,
+) -> Option<TextRange> {
+ let range = frange.range;
+
+ let string_kinds = [COMMENT, STRING, BYTE_STRING];
+ let list_kinds = [
+ RECORD_PAT_FIELD_LIST,
+ MATCH_ARM_LIST,
+ RECORD_FIELD_LIST,
+ TUPLE_FIELD_LIST,
+ RECORD_EXPR_FIELD_LIST,
+ VARIANT_LIST,
+ USE_TREE_LIST,
+ GENERIC_PARAM_LIST,
+ GENERIC_ARG_LIST,
+ TYPE_BOUND_LIST,
+ PARAM_LIST,
+ ARG_LIST,
+ ARRAY_EXPR,
+ TUPLE_EXPR,
+ TUPLE_TYPE,
+ TUPLE_PAT,
+ WHERE_CLAUSE,
+ ];
+
+ if range.is_empty() {
+ let offset = range.start();
+ let mut leaves = root.token_at_offset(offset);
+ if leaves.clone().all(|it| it.kind() == WHITESPACE) {
+ return Some(extend_ws(root, leaves.next()?, offset));
+ }
+ let leaf_range = match leaves {
+ TokenAtOffset::None => return None,
+ TokenAtOffset::Single(l) => {
+ if string_kinds.contains(&l.kind()) {
+ extend_single_word_in_comment_or_string(&l, offset)
+ .unwrap_or_else(|| l.text_range())
+ } else {
+ l.text_range()
+ }
+ }
+ TokenAtOffset::Between(l, r) => pick_best(l, r).text_range(),
+ };
+ return Some(leaf_range);
+ };
+ let node = match root.covering_element(range) {
+ NodeOrToken::Token(token) => {
+ if token.text_range() != range {
+ return Some(token.text_range());
+ }
+ if let Some(comment) = ast::Comment::cast(token.clone()) {
+ if let Some(range) = extend_comments(comment) {
+ return Some(range);
+ }
+ }
+ token.parent()?
+ }
+ NodeOrToken::Node(node) => node,
+ };
+
+ // if we are in single token_tree, we maybe live in macro or attr
+ if node.kind() == TOKEN_TREE {
+ if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) {
+ if let Some(range) = extend_tokens_from_range(sema, macro_call, range) {
+ return Some(range);
+ }
+ }
+ }
+
+ if node.text_range() != range {
+ return Some(node.text_range());
+ }
+
+ let node = shallowest_node(&node);
+
+ if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
+ if let Some(range) = extend_list_item(&node) {
+ return Some(range);
+ }
+ }
+
+ node.parent().map(|it| it.text_range())
+}
+
+fn extend_tokens_from_range(
+ sema: &Semantics<'_, RootDatabase>,
+ macro_call: ast::MacroCall,
+ original_range: TextRange,
+) -> Option<TextRange> {
+ let src = macro_call.syntax().covering_element(original_range);
+ let (first_token, last_token) = match src {
+ NodeOrToken::Node(it) => (it.first_token()?, it.last_token()?),
+ NodeOrToken::Token(it) => (it.clone(), it),
+ };
+
+ let mut first_token = skip_trivia_token(first_token, Direction::Next)?;
+ let mut last_token = skip_trivia_token(last_token, Direction::Prev)?;
+
+ while !original_range.contains_range(first_token.text_range()) {
+ first_token = skip_trivia_token(first_token.next_token()?, Direction::Next)?;
+ }
+ while !original_range.contains_range(last_token.text_range()) {
+ last_token = skip_trivia_token(last_token.prev_token()?, Direction::Prev)?;
+ }
+
+ // compute original mapped token range
+ let extended = {
+ let fst_expanded = sema.descend_into_macros_single(first_token.clone());
+ let lst_expanded = sema.descend_into_macros_single(last_token.clone());
+ let mut lca =
+ algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?;
+ lca = shallowest_node(&lca);
+ if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) {
+ lca = lca.parent()?;
+ }
+ lca
+ };
+
+ // Compute parent node range
+ let validate = |token: &SyntaxToken| -> bool {
+ let expanded = sema.descend_into_macros_single(token.clone());
+ let parent = match expanded.parent() {
+ Some(it) => it,
+ None => return false,
+ };
+ algo::least_common_ancestor(&extended, &parent).as_ref() == Some(&extended)
+ };
+
+ // Find the first and last text range under expanded parent
+ let first = successors(Some(first_token), |token| {
+ let token = token.prev_token()?;
+ skip_trivia_token(token, Direction::Prev)
+ })
+ .take_while(validate)
+ .last()?;
+
+ let last = successors(Some(last_token), |token| {
+ let token = token.next_token()?;
+ skip_trivia_token(token, Direction::Next)
+ })
+ .take_while(validate)
+ .last()?;
+
+ let range = first.text_range().cover(last.text_range());
+ if range.contains_range(original_range) && original_range != range {
+ Some(range)
+ } else {
+ None
+ }
+}
+
+/// Find the shallowest node with same range, which allows us to traverse siblings.
+fn shallowest_node(node: &SyntaxNode) -> SyntaxNode {
+ node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap()
+}
+
+fn extend_single_word_in_comment_or_string(
+ leaf: &SyntaxToken,
+ offset: TextSize,
+) -> Option<TextRange> {
+ let text: &str = leaf.text();
+ let cursor_position: u32 = (offset - leaf.text_range().start()).into();
+
+ let (before, after) = text.split_at(cursor_position as usize);
+
+ fn non_word_char(c: char) -> bool {
+ !(c.is_alphanumeric() || c == '_')
+ }
+
+ let start_idx = before.rfind(non_word_char)? as u32;
+ let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32;
+
+ let from: TextSize = (start_idx + 1).into();
+ let to: TextSize = (cursor_position + end_idx).into();
+
+ let range = TextRange::new(from, to);
+ if range.is_empty() {
+ None
+ } else {
+ Some(range + leaf.text_range().start())
+ }
+}
+
+fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange {
+ let ws_text = ws.text();
+ let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start();
+ let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start();
+ let ws_suffix = &ws_text[suffix];
+ let ws_prefix = &ws_text[prefix];
+ if ws_text.contains('\n') && !ws_suffix.contains('\n') {
+ if let Some(node) = ws.next_sibling_or_token() {
+ let start = match ws_prefix.rfind('\n') {
+ Some(idx) => ws.text_range().start() + TextSize::from((idx + 1) as u32),
+ None => node.text_range().start(),
+ };
+ let end = if root.text().char_at(node.text_range().end()) == Some('\n') {
+ node.text_range().end() + TextSize::of('\n')
+ } else {
+ node.text_range().end()
+ };
+ return TextRange::new(start, end);
+ }
+ }
+ ws.text_range()
+}
+
+fn pick_best(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken {
+ return if priority(&r) > priority(&l) { r } else { l };
+ fn priority(n: &SyntaxToken) -> usize {
+ match n.kind() {
+ WHITESPACE => 0,
+ IDENT | T![self] | T![super] | T![crate] | T![Self] | LIFETIME_IDENT => 2,
+ _ => 1,
+ }
+ }
+}
+
+/// Extend list item selection to include nearby delimiter and whitespace.
+fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
+ fn is_single_line_ws(node: &SyntaxToken) -> bool {
+ node.kind() == WHITESPACE && !node.text().contains('\n')
+ }
+
+ fn nearby_delimiter(
+ delimiter_kind: SyntaxKind,
+ node: &SyntaxNode,
+ dir: Direction,
+ ) -> Option<SyntaxToken> {
+ node.siblings_with_tokens(dir)
+ .skip(1)
+ .find(|node| match node {
+ NodeOrToken::Node(_) => true,
+ NodeOrToken::Token(it) => !is_single_line_ws(it),
+ })
+ .and_then(|it| it.into_token())
+ .filter(|node| node.kind() == delimiter_kind)
+ }
+
+ let delimiter = match node.kind() {
+ TYPE_BOUND => T![+],
+ _ => T![,],
+ };
+
+ if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Next) {
+ // Include any following whitespace when delimiter is after list item.
+ let final_node = delimiter_node
+ .next_sibling_or_token()
+ .and_then(|it| it.into_token())
+ .filter(is_single_line_ws)
+ .unwrap_or(delimiter_node);
+
+ return Some(TextRange::new(node.text_range().start(), final_node.text_range().end()));
+ }
+ if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Prev) {
+ return Some(TextRange::new(delimiter_node.text_range().start(), node.text_range().end()));
+ }
+
+ None
+}
+
+fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
+ let prev = adj_comments(&comment, Direction::Prev);
+ let next = adj_comments(&comment, Direction::Next);
+ if prev != next {
+ Some(TextRange::new(prev.syntax().text_range().start(), next.syntax().text_range().end()))
+ } else {
+ None
+ }
+}
+
+fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment {
+ let mut res = comment.clone();
+ for element in comment.syntax().siblings_with_tokens(dir) {
+ let token = match element.as_token() {
+ None => break,
+ Some(token) => token,
+ };
+ if let Some(c) = ast::Comment::cast(token.clone()) {
+ res = c
+ } else if token.kind() != WHITESPACE || token.text().contains("\n\n") {
+ break;
+ }
+ }
+ res
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+
+ use super::*;
+
+ fn do_check(before: &str, afters: &[&str]) {
+ let (analysis, position) = fixture::position(before);
+ let before = analysis.file_text(position.file_id).unwrap();
+ let range = TextRange::empty(position.offset);
+ let mut frange = FileRange { file_id: position.file_id, range };
+
+ for &after in afters {
+ frange.range = analysis.extend_selection(frange).unwrap();
+ let actual = &before[frange.range];
+ assert_eq!(after, actual);
+ }
+ }
+
+ #[test]
+ fn test_extend_selection_arith() {
+ do_check(r#"fn foo() { $01 + 1 }"#, &["1", "1 + 1", "{ 1 + 1 }"]);
+ }
+
+ #[test]
+ fn test_extend_selection_list() {
+ do_check(r#"fn foo($0x: i32) {}"#, &["x", "x: i32"]);
+ do_check(r#"fn foo($0x: i32, y: i32) {}"#, &["x", "x: i32", "x: i32, "]);
+ do_check(r#"fn foo($0x: i32,y: i32) {}"#, &["x", "x: i32", "x: i32,", "(x: i32,y: i32)"]);
+ do_check(r#"fn foo(x: i32, $0y: i32) {}"#, &["y", "y: i32", ", y: i32"]);
+ do_check(r#"fn foo(x: i32, $0y: i32, ) {}"#, &["y", "y: i32", "y: i32, "]);
+ do_check(r#"fn foo(x: i32,$0y: i32) {}"#, &["y", "y: i32", ",y: i32"]);
+
+ do_check(r#"const FOO: [usize; 2] = [ 22$0 , 33];"#, &["22", "22 , "]);
+ do_check(r#"const FOO: [usize; 2] = [ 22 , 33$0];"#, &["33", ", 33"]);
+ do_check(r#"const FOO: [usize; 2] = [ 22 , 33$0 ,];"#, &["33", "33 ,", "[ 22 , 33 ,]"]);
+
+ do_check(r#"fn main() { (1, 2$0) }"#, &["2", ", 2", "(1, 2)"]);
+
+ do_check(
+ r#"
+const FOO: [usize; 2] = [
+ 22,
+ $033,
+]"#,
+ &["33", "33,"],
+ );
+
+ do_check(
+ r#"
+const FOO: [usize; 2] = [
+ 22
+ , 33$0,
+]"#,
+ &["33", "33,"],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_start_of_the_line() {
+ do_check(
+ r#"
+impl S {
+$0 fn foo() {
+
+ }
+}"#,
+ &[" fn foo() {\n\n }\n"],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_doc_comments() {
+ do_check(
+ r#"
+struct A;
+
+/// bla
+/// bla
+struct B {
+ $0
+}
+ "#,
+ &["\n \n", "{\n \n}", "/// bla\n/// bla\nstruct B {\n \n}"],
+ )
+ }
+
+ #[test]
+ fn test_extend_selection_comments() {
+ do_check(
+ r#"
+fn bar(){}
+
+// fn foo() {
+// 1 + $01
+// }
+
+// fn foo(){}
+ "#,
+ &["1", "// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"],
+ );
+
+ do_check(
+ r#"
+// #[derive(Debug, Clone, Copy, PartialEq, Eq)]
+// pub enum Direction {
+// $0 Next,
+// Prev
+// }
+"#,
+ &[
+ "// Next,",
+ "// #[derive(Debug, Clone, Copy, PartialEq, Eq)]\n// pub enum Direction {\n// Next,\n// Prev\n// }",
+ ],
+ );
+
+ do_check(
+ r#"
+/*
+foo
+_bar1$0*/
+"#,
+ &["_bar1", "/*\nfoo\n_bar1*/"],
+ );
+
+ do_check(r#"//!$0foo_2 bar"#, &["foo_2", "//!foo_2 bar"]);
+
+ do_check(r#"/$0/foo bar"#, &["//foo bar"]);
+ }
+
+ #[test]
+ fn test_extend_selection_prefer_idents() {
+ do_check(
+ r#"
+fn main() { foo$0+bar;}
+"#,
+ &["foo", "foo+bar"],
+ );
+ do_check(
+ r#"
+fn main() { foo+$0bar;}
+"#,
+ &["bar", "foo+bar"],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_prefer_lifetimes() {
+ do_check(r#"fn foo<$0'a>() {}"#, &["'a", "<'a>"]);
+ do_check(r#"fn foo<'a$0>() {}"#, &["'a", "<'a>"]);
+ }
+
+ #[test]
+ fn test_extend_selection_select_first_word() {
+ do_check(r#"// foo bar b$0az quxx"#, &["baz", "// foo bar baz quxx"]);
+ do_check(
+ r#"
+impl S {
+fn foo() {
+// hel$0lo world
+}
+}
+"#,
+ &["hello", "// hello world"],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_string() {
+ do_check(
+ r#"
+fn bar(){}
+
+" fn f$0oo() {"
+"#,
+ &["foo", "\" fn foo() {\""],
+ );
+ }
+
+ #[test]
+ fn test_extend_trait_bounds_list_in_where_clause() {
+ do_check(
+ r#"
+fn foo<R>()
+ where
+ R: req::Request + 'static,
+ R::Params: DeserializeOwned$0 + panic::UnwindSafe + 'static,
+ R::Result: Serialize + 'static,
+"#,
+ &[
+ "DeserializeOwned",
+ "DeserializeOwned + ",
+ "DeserializeOwned + panic::UnwindSafe + 'static",
+ "R::Params: DeserializeOwned + panic::UnwindSafe + 'static",
+ "R::Params: DeserializeOwned + panic::UnwindSafe + 'static,",
+ ],
+ );
+ do_check(r#"fn foo<T>() where T: $0Copy"#, &["Copy"]);
+ do_check(r#"fn foo<T>() where T: $0Copy + Display"#, &["Copy", "Copy + "]);
+ do_check(r#"fn foo<T>() where T: $0Copy +Display"#, &["Copy", "Copy +"]);
+ do_check(r#"fn foo<T>() where T: $0Copy+Display"#, &["Copy", "Copy+"]);
+ do_check(r#"fn foo<T>() where T: Copy + $0Display"#, &["Display", "+ Display"]);
+ do_check(r#"fn foo<T>() where T: Copy + $0Display + Sync"#, &["Display", "Display + "]);
+ do_check(r#"fn foo<T>() where T: Copy +$0Display"#, &["Display", "+Display"]);
+ }
+
+ #[test]
+ fn test_extend_trait_bounds_list_inline() {
+ do_check(r#"fn foo<T: $0Copy>() {}"#, &["Copy"]);
+ do_check(r#"fn foo<T: $0Copy + Display>() {}"#, &["Copy", "Copy + "]);
+ do_check(r#"fn foo<T: $0Copy +Display>() {}"#, &["Copy", "Copy +"]);
+ do_check(r#"fn foo<T: $0Copy+Display>() {}"#, &["Copy", "Copy+"]);
+ do_check(r#"fn foo<T: Copy + $0Display>() {}"#, &["Display", "+ Display"]);
+ do_check(r#"fn foo<T: Copy + $0Display + Sync>() {}"#, &["Display", "Display + "]);
+ do_check(r#"fn foo<T: Copy +$0Display>() {}"#, &["Display", "+Display"]);
+ do_check(
+ r#"fn foo<T: Copy$0 + Display, U: Copy>() {}"#,
+ &[
+ "Copy",
+ "Copy + ",
+ "Copy + Display",
+ "T: Copy + Display",
+ "T: Copy + Display, ",
+ "<T: Copy + Display, U: Copy>",
+ ],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_on_tuple_in_type() {
+ do_check(
+ r#"fn main() { let _: (krate, $0_crate_def_map, module_id) = (); }"#,
+ &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"],
+ );
+ // white space variations
+ do_check(
+ r#"fn main() { let _: (krate,$0_crate_def_map,module_id) = (); }"#,
+ &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"],
+ );
+ do_check(
+ r#"
+fn main() { let _: (
+ krate,
+ _crate$0_def_map,
+ module_id
+) = (); }"#,
+ &[
+ "_crate_def_map",
+ "_crate_def_map,",
+ "(\n krate,\n _crate_def_map,\n module_id\n)",
+ ],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_on_tuple_in_rvalue() {
+ do_check(
+ r#"fn main() { let var = (krate, _crate_def_map$0, module_id); }"#,
+ &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"],
+ );
+ // white space variations
+ do_check(
+ r#"fn main() { let var = (krate,_crate$0_def_map,module_id); }"#,
+ &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"],
+ );
+ do_check(
+ r#"
+fn main() { let var = (
+ krate,
+ _crate_def_map$0,
+ module_id
+); }"#,
+ &[
+ "_crate_def_map",
+ "_crate_def_map,",
+ "(\n krate,\n _crate_def_map,\n module_id\n)",
+ ],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_on_tuple_pat() {
+ do_check(
+ r#"fn main() { let (krate, _crate_def_map$0, module_id) = var; }"#,
+ &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"],
+ );
+ // white space variations
+ do_check(
+ r#"fn main() { let (krate,_crate$0_def_map,module_id) = var; }"#,
+ &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"],
+ );
+ do_check(
+ r#"
+fn main() { let (
+ krate,
+ _crate_def_map$0,
+ module_id
+) = var; }"#,
+ &[
+ "_crate_def_map",
+ "_crate_def_map,",
+ "(\n krate,\n _crate_def_map,\n module_id\n)",
+ ],
+ );
+ }
+
+ #[test]
+ fn extend_selection_inside_macros() {
+ do_check(
+ r#"macro_rules! foo { ($item:item) => {$item} }
+ foo!{fn hello(na$0me:usize){}}"#,
+ &[
+ "name",
+ "name:usize",
+ "(name:usize)",
+ "fn hello(name:usize){}",
+ "{fn hello(name:usize){}}",
+ "foo!{fn hello(name:usize){}}",
+ ],
+ );
+ }
+
+ #[test]
+ fn extend_selection_inside_recur_macros() {
+ do_check(
+ r#" macro_rules! foo2 { ($item:item) => {$item} }
+ macro_rules! foo { ($item:item) => {foo2!($item);} }
+ foo!{fn hello(na$0me:usize){}}"#,
+ &[
+ "name",
+ "name:usize",
+ "(name:usize)",
+ "fn hello(name:usize){}",
+ "{fn hello(name:usize){}}",
+ "foo!{fn hello(name:usize){}}",
+ ],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
new file mode 100644
index 000000000..68fd0952b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
@@ -0,0 +1,579 @@
+use ide_db::SymbolKind;
+use syntax::{
+ ast::{self, HasAttrs, HasGenericParams, HasName},
+ match_ast, AstNode, AstToken, NodeOrToken, SourceFile, SyntaxNode, SyntaxToken, TextRange,
+ WalkEvent,
+};
+
+#[derive(Debug, Clone)]
+pub struct StructureNode {
+ pub parent: Option<usize>,
+ pub label: String,
+ pub navigation_range: TextRange,
+ pub node_range: TextRange,
+ pub kind: StructureNodeKind,
+ pub detail: Option<String>,
+ pub deprecated: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum StructureNodeKind {
+ SymbolKind(SymbolKind),
+ Region,
+}
+
+// Feature: File Structure
+//
+// Provides a tree of the symbols defined in the file. Can be used to
+//
+// * fuzzy search symbol in a file (super useful)
+// * draw breadcrumbs to describe the context around the cursor
+// * draw outline of the file
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[Ctrl+Shift+O]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020654-b42fc800-917a-11eb-8388-e7dc4d92b02e.gif[]
+
+pub(crate) fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
+ let mut res = Vec::new();
+ let mut stack = Vec::new();
+
+ for event in file.syntax().preorder_with_tokens() {
+ match event {
+ WalkEvent::Enter(NodeOrToken::Node(node)) => {
+ if let Some(mut symbol) = structure_node(&node) {
+ symbol.parent = stack.last().copied();
+ stack.push(res.len());
+ res.push(symbol);
+ }
+ }
+ WalkEvent::Leave(NodeOrToken::Node(node)) => {
+ if structure_node(&node).is_some() {
+ stack.pop().unwrap();
+ }
+ }
+ WalkEvent::Enter(NodeOrToken::Token(token)) => {
+ if let Some(mut symbol) = structure_token(token) {
+ symbol.parent = stack.last().copied();
+ stack.push(res.len());
+ res.push(symbol);
+ }
+ }
+ WalkEvent::Leave(NodeOrToken::Token(token)) => {
+ if structure_token(token).is_some() {
+ stack.pop().unwrap();
+ }
+ }
+ }
+ }
+ res
+}
+
+fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
+ fn decl<N: HasName + HasAttrs>(node: N, kind: StructureNodeKind) -> Option<StructureNode> {
+ decl_with_detail(&node, None, kind)
+ }
+
+ fn decl_with_type_ref<N: HasName + HasAttrs>(
+ node: &N,
+ type_ref: Option<ast::Type>,
+ kind: StructureNodeKind,
+ ) -> Option<StructureNode> {
+ let detail = type_ref.map(|type_ref| {
+ let mut detail = String::new();
+ collapse_ws(type_ref.syntax(), &mut detail);
+ detail
+ });
+ decl_with_detail(node, detail, kind)
+ }
+
+ fn decl_with_detail<N: HasName + HasAttrs>(
+ node: &N,
+ detail: Option<String>,
+ kind: StructureNodeKind,
+ ) -> Option<StructureNode> {
+ let name = node.name()?;
+
+ Some(StructureNode {
+ parent: None,
+ label: name.text().to_string(),
+ navigation_range: name.syntax().text_range(),
+ node_range: node.syntax().text_range(),
+ kind,
+ detail,
+ deprecated: node.attrs().filter_map(|x| x.simple_name()).any(|x| x == "deprecated"),
+ })
+ }
+
+ fn collapse_ws(node: &SyntaxNode, output: &mut String) {
+ let mut can_insert_ws = false;
+ node.text().for_each_chunk(|chunk| {
+ for line in chunk.lines() {
+ let line = line.trim();
+ if line.is_empty() {
+ if can_insert_ws {
+ output.push(' ');
+ can_insert_ws = false;
+ }
+ } else {
+ output.push_str(line);
+ can_insert_ws = true;
+ }
+ }
+ })
+ }
+
+ match_ast! {
+ match node {
+ ast::Fn(it) => {
+ let mut detail = String::from("fn");
+ if let Some(type_param_list) = it.generic_param_list() {
+ collapse_ws(type_param_list.syntax(), &mut detail);
+ }
+ if let Some(param_list) = it.param_list() {
+ collapse_ws(param_list.syntax(), &mut detail);
+ }
+ if let Some(ret_type) = it.ret_type() {
+ detail.push(' ');
+ collapse_ws(ret_type.syntax(), &mut detail);
+ }
+
+ decl_with_detail(&it, Some(detail), StructureNodeKind::SymbolKind(SymbolKind::Function))
+ },
+ ast::Struct(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Struct)),
+ ast::Union(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Union)),
+ ast::Enum(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Enum)),
+ ast::Variant(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Variant)),
+ ast::Trait(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Trait)),
+ ast::Module(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Module)),
+ ast::TypeAlias(it) => decl_with_type_ref(&it, it.ty(), StructureNodeKind::SymbolKind(SymbolKind::TypeAlias)),
+ ast::RecordField(it) => decl_with_type_ref(&it, it.ty(), StructureNodeKind::SymbolKind(SymbolKind::Field)),
+ ast::Const(it) => decl_with_type_ref(&it, it.ty(), StructureNodeKind::SymbolKind(SymbolKind::Const)),
+ ast::Static(it) => decl_with_type_ref(&it, it.ty(), StructureNodeKind::SymbolKind(SymbolKind::Static)),
+ ast::Impl(it) => {
+ let target_type = it.self_ty()?;
+ let target_trait = it.trait_();
+ let label = match target_trait {
+ None => format!("impl {}", target_type.syntax().text()),
+ Some(t) => {
+ format!("impl {} for {}", t.syntax().text(), target_type.syntax().text(),)
+ }
+ };
+
+ let node = StructureNode {
+ parent: None,
+ label,
+ navigation_range: target_type.syntax().text_range(),
+ node_range: it.syntax().text_range(),
+ kind: StructureNodeKind::SymbolKind(SymbolKind::Impl),
+ detail: None,
+ deprecated: false,
+ };
+ Some(node)
+ },
+ ast::Macro(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Macro)),
+ _ => None,
+ }
+ }
+}
+
+fn structure_token(token: SyntaxToken) -> Option<StructureNode> {
+ if let Some(comment) = ast::Comment::cast(token) {
+ let text = comment.text().trim();
+
+ if let Some(region_name) = text.strip_prefix("// region:").map(str::trim) {
+ return Some(StructureNode {
+ parent: None,
+ label: region_name.to_string(),
+ navigation_range: comment.syntax().text_range(),
+ node_range: comment.syntax().text_range(),
+ kind: StructureNodeKind::Region,
+ detail: None,
+ deprecated: false,
+ });
+ }
+ }
+
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use super::*;
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let file = SourceFile::parse(ra_fixture).ok().unwrap();
+ let structure = file_structure(&file);
+ expect.assert_debug_eq(&structure)
+ }
+
+ #[test]
+ fn test_file_structure() {
+ check(
+ r#"
+struct Foo {
+ x: i32
+}
+
+mod m {
+ fn bar1() {}
+ fn bar2<T>(t: T) -> T {}
+ fn bar3<A,
+ B>(a: A,
+ b: B) -> Vec<
+ u32
+ > {}
+}
+
+enum E { X, Y(i32) }
+type T = ();
+static S: i32 = 92;
+const C: i32 = 92;
+
+impl E {}
+
+impl fmt::Debug for E {}
+
+macro_rules! mc {
+ () => {}
+}
+
+#[macro_export]
+macro_rules! mcexp {
+ () => {}
+}
+
+/// Doc comment
+macro_rules! mcexp {
+ () => {}
+}
+
+#[deprecated]
+fn obsolete() {}
+
+#[deprecated(note = "for awhile")]
+fn very_obsolete() {}
+
+// region: Some region name
+// endregion
+
+// region: dontpanic
+mod m {
+fn f() {}
+// endregion
+fn g() {}
+}
+"#,
+ expect![[r#"
+ [
+ StructureNode {
+ parent: None,
+ label: "Foo",
+ navigation_range: 8..11,
+ node_range: 1..26,
+ kind: SymbolKind(
+ Struct,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 0,
+ ),
+ label: "x",
+ navigation_range: 18..19,
+ node_range: 18..24,
+ kind: SymbolKind(
+ Field,
+ ),
+ detail: Some(
+ "i32",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "m",
+ navigation_range: 32..33,
+ node_range: 28..158,
+ kind: SymbolKind(
+ Module,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 2,
+ ),
+ label: "bar1",
+ navigation_range: 43..47,
+ node_range: 40..52,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn()",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 2,
+ ),
+ label: "bar2",
+ navigation_range: 60..64,
+ node_range: 57..81,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn<T>(t: T) -> T",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 2,
+ ),
+ label: "bar3",
+ navigation_range: 89..93,
+ node_range: 86..156,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn<A, B>(a: A, b: B) -> Vec< u32 >",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "E",
+ navigation_range: 165..166,
+ node_range: 160..180,
+ kind: SymbolKind(
+ Enum,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 6,
+ ),
+ label: "X",
+ navigation_range: 169..170,
+ node_range: 169..170,
+ kind: SymbolKind(
+ Variant,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 6,
+ ),
+ label: "Y",
+ navigation_range: 172..173,
+ node_range: 172..178,
+ kind: SymbolKind(
+ Variant,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "T",
+ navigation_range: 186..187,
+ node_range: 181..193,
+ kind: SymbolKind(
+ TypeAlias,
+ ),
+ detail: Some(
+ "()",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "S",
+ navigation_range: 201..202,
+ node_range: 194..213,
+ kind: SymbolKind(
+ Static,
+ ),
+ detail: Some(
+ "i32",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "C",
+ navigation_range: 220..221,
+ node_range: 214..232,
+ kind: SymbolKind(
+ Const,
+ ),
+ detail: Some(
+ "i32",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "impl E",
+ navigation_range: 239..240,
+ node_range: 234..243,
+ kind: SymbolKind(
+ Impl,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "impl fmt::Debug for E",
+ navigation_range: 265..266,
+ node_range: 245..269,
+ kind: SymbolKind(
+ Impl,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "mc",
+ navigation_range: 284..286,
+ node_range: 271..303,
+ kind: SymbolKind(
+ Macro,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "mcexp",
+ navigation_range: 334..339,
+ node_range: 305..356,
+ kind: SymbolKind(
+ Macro,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "mcexp",
+ navigation_range: 387..392,
+ node_range: 358..409,
+ kind: SymbolKind(
+ Macro,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "obsolete",
+ navigation_range: 428..436,
+ node_range: 411..441,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn()",
+ ),
+ deprecated: true,
+ },
+ StructureNode {
+ parent: None,
+ label: "very_obsolete",
+ navigation_range: 481..494,
+ node_range: 443..499,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn()",
+ ),
+ deprecated: true,
+ },
+ StructureNode {
+ parent: None,
+ label: "Some region name",
+ navigation_range: 501..528,
+ node_range: 501..528,
+ kind: Region,
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "m",
+ navigation_range: 568..569,
+ node_range: 543..606,
+ kind: SymbolKind(
+ Module,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 20,
+ ),
+ label: "dontpanic",
+ navigation_range: 543..563,
+ node_range: 543..563,
+ kind: Region,
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 20,
+ ),
+ label: "f",
+ navigation_range: 575..576,
+ node_range: 572..581,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn()",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 20,
+ ),
+ label: "g",
+ navigation_range: 598..599,
+ node_range: 582..604,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn()",
+ ),
+ deprecated: false,
+ },
+ ]
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/fixture.rs b/src/tools/rust-analyzer/crates/ide/src/fixture.rs
new file mode 100644
index 000000000..2ea6f6a9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/fixture.rs
@@ -0,0 +1,87 @@
+//! Utilities for creating `Analysis` instances for tests.
+use hir::db::DefDatabase;
+use ide_db::base_db::fixture::ChangeFixture;
+use test_utils::{extract_annotations, RangeOrOffset};
+
+use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange};
+
+/// Creates analysis for a single file.
+pub(crate) fn file(ra_fixture: &str) -> (Analysis, FileId) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.db.set_enable_proc_attr_macros(true);
+ host.db.apply_change(change_fixture.change);
+ (host.analysis(), change_fixture.files[0])
+}
+
+/// Creates analysis from a multi-file fixture, returns positions marked with $0.
+pub(crate) fn position(ra_fixture: &str) -> (Analysis, FilePosition) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.db.set_enable_proc_attr_macros(true);
+ host.db.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
+ let offset = range_or_offset.expect_offset();
+ (host.analysis(), FilePosition { file_id, offset })
+}
+
+/// Creates analysis for a single file, returns range marked with a pair of $0.
+pub(crate) fn range(ra_fixture: &str) -> (Analysis, FileRange) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.db.set_enable_proc_attr_macros(true);
+ host.db.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
+ let range = range_or_offset.expect_range();
+ (host.analysis(), FileRange { file_id, range })
+}
+
+/// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0.
+pub(crate) fn range_or_position(ra_fixture: &str) -> (Analysis, FileId, RangeOrOffset) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.db.set_enable_proc_attr_macros(true);
+ host.db.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
+ (host.analysis(), file_id, range_or_offset)
+}
+
+/// Creates analysis from a multi-file fixture, returns positions marked with $0.
+pub(crate) fn annotations(ra_fixture: &str) -> (Analysis, FilePosition, Vec<(FileRange, String)>) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.db.set_enable_proc_attr_macros(true);
+ host.db.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
+ let offset = range_or_offset.expect_offset();
+
+ let annotations = change_fixture
+ .files
+ .iter()
+ .flat_map(|&file_id| {
+ let file_text = host.analysis().file_text(file_id).unwrap();
+ let annotations = extract_annotations(&file_text);
+ annotations.into_iter().map(move |(range, data)| (FileRange { file_id, range }, data))
+ })
+ .collect();
+ (host.analysis(), FilePosition { file_id, offset }, annotations)
+}
+
+/// Creates analysis from a multi-file fixture with annonations without $0
+pub(crate) fn annotations_without_marker(ra_fixture: &str) -> (Analysis, Vec<(FileRange, String)>) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.db.set_enable_proc_attr_macros(true);
+ host.db.apply_change(change_fixture.change);
+
+ let annotations = change_fixture
+ .files
+ .iter()
+ .flat_map(|&file_id| {
+ let file_text = host.analysis().file_text(file_id).unwrap();
+ let annotations = extract_annotations(&file_text);
+ annotations.into_iter().map(move |(range, data)| (FileRange { file_id, range }, data))
+ })
+ .collect();
+ (host.analysis(), annotations)
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/fn_references.rs b/src/tools/rust-analyzer/crates/ide/src/fn_references.rs
new file mode 100644
index 000000000..63fb322ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/fn_references.rs
@@ -0,0 +1,94 @@
+//! This module implements a methods and free functions search in the specified file.
+//! We have to skip tests, so cannot reuse file_structure module.
+
+use hir::Semantics;
+use ide_assists::utils::test_related_attribute;
+use ide_db::RootDatabase;
+use syntax::{ast, ast::HasName, AstNode, SyntaxNode};
+
+use crate::{FileId, FileRange};
+
+pub(crate) fn find_all_methods(db: &RootDatabase, file_id: FileId) -> Vec<FileRange> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(file_id);
+ source_file.syntax().descendants().filter_map(|it| method_range(it, file_id)).collect()
+}
+
+fn method_range(item: SyntaxNode, file_id: FileId) -> Option<FileRange> {
+ ast::Fn::cast(item).and_then(|fn_def| {
+ if test_related_attribute(&fn_def).is_some() {
+ None
+ } else {
+ fn_def.name().map(|name| FileRange { file_id, range: name.syntax().text_range() })
+ }
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+ use crate::{FileRange, TextSize};
+ use std::ops::RangeInclusive;
+
+ #[test]
+ fn test_find_all_methods() {
+ let (analysis, pos) = fixture::position(
+ r#"
+ fn private_fn() {$0}
+
+ pub fn pub_fn() {}
+
+ pub fn generic_fn<T>(arg: T) {}
+ "#,
+ );
+
+ let refs = analysis.find_all_methods(pos.file_id).unwrap();
+ check_result(&refs, &[3..=13, 27..=33, 47..=57]);
+ }
+
+ #[test]
+ fn test_find_trait_methods() {
+ let (analysis, pos) = fixture::position(
+ r#"
+ trait Foo {
+ fn bar() {$0}
+ fn baz() {}
+ }
+ "#,
+ );
+
+ let refs = analysis.find_all_methods(pos.file_id).unwrap();
+ check_result(&refs, &[19..=22, 35..=38]);
+ }
+
+ #[test]
+ fn test_skip_tests() {
+ let (analysis, pos) = fixture::position(
+ r#"
+ //- /lib.rs
+ #[test]
+ fn foo() {$0}
+
+ pub fn pub_fn() {}
+
+ mod tests {
+ #[test]
+ fn bar() {}
+ }
+ "#,
+ );
+
+ let refs = analysis.find_all_methods(pos.file_id).unwrap();
+ check_result(&refs, &[28..=34]);
+ }
+
+ fn check_result(refs: &[FileRange], expected: &[RangeInclusive<u32>]) {
+ assert_eq!(refs.len(), expected.len());
+
+ for (i, item) in refs.iter().enumerate() {
+ let range = &expected[i];
+ assert_eq!(TextSize::from(*range.start()), item.range.start());
+ assert_eq!(TextSize::from(*range.end()), item.range.end());
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs
new file mode 100755
index 000000000..c694d95d5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs
@@ -0,0 +1,626 @@
+use ide_db::{syntax_helpers::node_ext::vis_eq, FxHashSet};
+use syntax::{
+ ast::{self, AstNode, AstToken},
+ match_ast, Direction, NodeOrToken, SourceFile,
+ SyntaxKind::{self, *},
+ TextRange, TextSize,
+};
+
+use std::hash::Hash;
+
+const REGION_START: &str = "// region:";
+const REGION_END: &str = "// endregion";
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum FoldKind {
+ Comment,
+ Imports,
+ Mods,
+ Block,
+ ArgList,
+ Region,
+ Consts,
+ Statics,
+ Array,
+ WhereClause,
+ ReturnType,
+ MatchArm,
+}
+
+#[derive(Debug)]
+pub struct Fold {
+ pub range: TextRange,
+ pub kind: FoldKind,
+}
+
+// Feature: Folding
+//
+// Defines folding regions for curly braced blocks, runs of consecutive use, mod, const or static
+// items, and `region` / `endregion` comment markers.
+pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
+ let mut res = vec![];
+ let mut visited_comments = FxHashSet::default();
+ let mut visited_imports = FxHashSet::default();
+ let mut visited_mods = FxHashSet::default();
+ let mut visited_consts = FxHashSet::default();
+ let mut visited_statics = FxHashSet::default();
+
+ // regions can be nested, here is a LIFO buffer
+ let mut region_starts: Vec<TextSize> = vec![];
+
+ for element in file.syntax().descendants_with_tokens() {
+ // Fold items that span multiple lines
+ if let Some(kind) = fold_kind(element.kind()) {
+ let is_multiline = match &element {
+ NodeOrToken::Node(node) => node.text().contains_char('\n'),
+ NodeOrToken::Token(token) => token.text().contains('\n'),
+ };
+ if is_multiline {
+ res.push(Fold { range: element.text_range(), kind });
+ continue;
+ }
+ }
+
+ match element {
+ NodeOrToken::Token(token) => {
+ // Fold groups of comments
+ if let Some(comment) = ast::Comment::cast(token) {
+ if visited_comments.contains(&comment) {
+ continue;
+ }
+ let text = comment.text().trim_start();
+ if text.starts_with(REGION_START) {
+ region_starts.push(comment.syntax().text_range().start());
+ } else if text.starts_with(REGION_END) {
+ if let Some(region) = region_starts.pop() {
+ res.push(Fold {
+ range: TextRange::new(region, comment.syntax().text_range().end()),
+ kind: FoldKind::Region,
+ })
+ }
+ } else if let Some(range) =
+ contiguous_range_for_comment(comment, &mut visited_comments)
+ {
+ res.push(Fold { range, kind: FoldKind::Comment })
+ }
+ }
+ }
+ NodeOrToken::Node(node) => {
+ match_ast! {
+ match node {
+ ast::Module(module) => {
+ if module.item_list().is_none() {
+ if let Some(range) = contiguous_range_for_item_group(
+ module,
+ &mut visited_mods,
+ ) {
+ res.push(Fold { range, kind: FoldKind::Mods })
+ }
+ }
+ },
+ ast::Use(use_) => {
+ if let Some(range) = contiguous_range_for_item_group(use_, &mut visited_imports) {
+ res.push(Fold { range, kind: FoldKind::Imports })
+ }
+ },
+ ast::Const(konst) => {
+ if let Some(range) = contiguous_range_for_item_group(konst, &mut visited_consts) {
+ res.push(Fold { range, kind: FoldKind::Consts })
+ }
+ },
+ ast::Static(statik) => {
+ if let Some(range) = contiguous_range_for_item_group(statik, &mut visited_statics) {
+ res.push(Fold { range, kind: FoldKind::Statics })
+ }
+ },
+ ast::WhereClause(where_clause) => {
+ if let Some(range) = fold_range_for_where_clause(where_clause) {
+ res.push(Fold { range, kind: FoldKind::WhereClause })
+ }
+ },
+ ast::MatchArm(match_arm) => {
+ if let Some(range) = fold_range_for_multiline_match_arm(match_arm) {
+ res.push(Fold {range, kind: FoldKind::MatchArm})
+ }
+ },
+ _ => (),
+ }
+ }
+ }
+ }
+ }
+
+ res
+}
+
+fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
+ match kind {
+ COMMENT => Some(FoldKind::Comment),
+ ARG_LIST | PARAM_LIST => Some(FoldKind::ArgList),
+ ARRAY_EXPR => Some(FoldKind::Array),
+ RET_TYPE => Some(FoldKind::ReturnType),
+ ASSOC_ITEM_LIST
+ | RECORD_FIELD_LIST
+ | RECORD_PAT_FIELD_LIST
+ | RECORD_EXPR_FIELD_LIST
+ | ITEM_LIST
+ | EXTERN_ITEM_LIST
+ | USE_TREE_LIST
+ | BLOCK_EXPR
+ | MATCH_ARM_LIST
+ | VARIANT_LIST
+ | TOKEN_TREE => Some(FoldKind::Block),
+ _ => None,
+ }
+}
+
+fn contiguous_range_for_item_group<N>(first: N, visited: &mut FxHashSet<N>) -> Option<TextRange>
+where
+ N: ast::HasVisibility + Clone + Hash + Eq,
+{
+ if !visited.insert(first.clone()) {
+ return None;
+ }
+
+ let (mut last, mut last_vis) = (first.clone(), first.visibility());
+ for element in first.syntax().siblings_with_tokens(Direction::Next) {
+ let node = match element {
+ NodeOrToken::Token(token) => {
+ if let Some(ws) = ast::Whitespace::cast(token) {
+ if !ws.spans_multiple_lines() {
+ // Ignore whitespace without blank lines
+ continue;
+ }
+ }
+ // There is a blank line or another token, which means that the
+ // group ends here
+ break;
+ }
+ NodeOrToken::Node(node) => node,
+ };
+
+ if let Some(next) = N::cast(node) {
+ let next_vis = next.visibility();
+ if eq_visibility(next_vis.clone(), last_vis) {
+ visited.insert(next.clone());
+ last_vis = next_vis;
+ last = next;
+ continue;
+ }
+ }
+ // Stop if we find an item of a different kind or with a different visibility.
+ break;
+ }
+
+ if first != last {
+ Some(TextRange::new(first.syntax().text_range().start(), last.syntax().text_range().end()))
+ } else {
+ // The group consists of only one element, therefore it cannot be folded
+ None
+ }
+}
+
+fn eq_visibility(vis0: Option<ast::Visibility>, vis1: Option<ast::Visibility>) -> bool {
+ match (vis0, vis1) {
+ (None, None) => true,
+ (Some(vis0), Some(vis1)) => vis_eq(&vis0, &vis1),
+ _ => false,
+ }
+}
+
+fn contiguous_range_for_comment(
+ first: ast::Comment,
+ visited: &mut FxHashSet<ast::Comment>,
+) -> Option<TextRange> {
+ visited.insert(first.clone());
+
+ // Only fold comments of the same flavor
+ let group_kind = first.kind();
+ if !group_kind.shape.is_line() {
+ return None;
+ }
+
+ let mut last = first.clone();
+ for element in first.syntax().siblings_with_tokens(Direction::Next) {
+ match element {
+ NodeOrToken::Token(token) => {
+ if let Some(ws) = ast::Whitespace::cast(token.clone()) {
+ if !ws.spans_multiple_lines() {
+ // Ignore whitespace without blank lines
+ continue;
+ }
+ }
+ if let Some(c) = ast::Comment::cast(token) {
+ if c.kind() == group_kind {
+ let text = c.text().trim_start();
+ // regions are not real comments
+ if !(text.starts_with(REGION_START) || text.starts_with(REGION_END)) {
+ visited.insert(c.clone());
+ last = c;
+ continue;
+ }
+ }
+ }
+ // The comment group ends because either:
+ // * An element of a different kind was reached
+ // * A comment of a different flavor was reached
+ break;
+ }
+ NodeOrToken::Node(_) => break,
+ };
+ }
+
+ if first != last {
+ Some(TextRange::new(first.syntax().text_range().start(), last.syntax().text_range().end()))
+ } else {
+ // The group consists of only one element, therefore it cannot be folded
+ None
+ }
+}
+
+fn fold_range_for_where_clause(where_clause: ast::WhereClause) -> Option<TextRange> {
+ let first_where_pred = where_clause.predicates().next();
+ let last_where_pred = where_clause.predicates().last();
+
+ if first_where_pred != last_where_pred {
+ let start = where_clause.where_token()?.text_range().end();
+ let end = where_clause.syntax().text_range().end();
+ return Some(TextRange::new(start, end));
+ }
+ None
+}
+
+fn fold_range_for_multiline_match_arm(match_arm: ast::MatchArm) -> Option<TextRange> {
+ if let Some(_) = fold_kind(match_arm.expr()?.syntax().kind()) {
+ return None;
+ }
+ if match_arm.expr()?.syntax().text().contains_char('\n') {
+ return Some(match_arm.expr()?.syntax().text_range());
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use test_utils::extract_tags;
+
+ use super::*;
+
+ fn check(ra_fixture: &str) {
+ let (ranges, text) = extract_tags(ra_fixture, "fold");
+
+ let parse = SourceFile::parse(&text);
+ let mut folds = folding_ranges(&parse.tree());
+ folds.sort_by_key(|fold| (fold.range.start(), fold.range.end()));
+
+ assert_eq!(
+ folds.len(),
+ ranges.len(),
+ "The amount of folds is different than the expected amount"
+ );
+
+ for (fold, (range, attr)) in folds.iter().zip(ranges.into_iter()) {
+ assert_eq!(fold.range.start(), range.start(), "mismatched start of folding ranges");
+ assert_eq!(fold.range.end(), range.end(), "mismatched end of folding ranges");
+
+ let kind = match fold.kind {
+ FoldKind::Comment => "comment",
+ FoldKind::Imports => "imports",
+ FoldKind::Mods => "mods",
+ FoldKind::Block => "block",
+ FoldKind::ArgList => "arglist",
+ FoldKind::Region => "region",
+ FoldKind::Consts => "consts",
+ FoldKind::Statics => "statics",
+ FoldKind::Array => "array",
+ FoldKind::WhereClause => "whereclause",
+ FoldKind::ReturnType => "returntype",
+ FoldKind::MatchArm => "matcharm",
+ };
+ assert_eq!(kind, &attr.unwrap());
+ }
+ }
+
+ #[test]
+ fn test_fold_comments() {
+ check(
+ r#"
+<fold comment>// Hello
+// this is a multiline
+// comment
+//</fold>
+
+// But this is not
+
+fn main() <fold block>{
+ <fold comment>// We should
+ // also
+ // fold
+ // this one.</fold>
+ <fold comment>//! But this one is different
+ //! because it has another flavor</fold>
+ <fold comment>/* As does this
+ multiline comment */</fold>
+}</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fold_imports() {
+ check(
+ r#"
+use std::<fold block>{
+ str,
+ vec,
+ io as iop
+}</fold>;
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fold_mods() {
+ check(
+ r#"
+
+pub mod foo;
+<fold mods>mod after_pub;
+mod after_pub_next;</fold>
+
+<fold mods>mod before_pub;
+mod before_pub_next;</fold>
+pub mod bar;
+
+mod not_folding_single;
+pub mod foobar;
+pub not_folding_single_next;
+
+<fold mods>#[cfg(test)]
+mod with_attribute;
+mod with_attribute_next;</fold>
+
+mod inline0 {}
+mod inline1 {}
+
+mod inline2 <fold block>{
+
+}</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fold_import_groups() {
+ check(
+ r#"
+<fold imports>use std::str;
+use std::vec;
+use std::io as iop;</fold>
+
+<fold imports>use std::mem;
+use std::f64;</fold>
+
+<fold imports>use std::collections::HashMap;
+// Some random comment
+use std::collections::VecDeque;</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fold_import_and_groups() {
+ check(
+ r#"
+<fold imports>use std::str;
+use std::vec;
+use std::io as iop;</fold>
+
+<fold imports>use std::mem;
+use std::f64;</fold>
+
+use std::collections::<fold block>{
+ HashMap,
+ VecDeque,
+}</fold>;
+// Some random comment
+"#,
+ );
+ }
+
+ #[test]
+ fn test_folds_structs() {
+ check(
+ r#"
+struct Foo <fold block>{
+}</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_folds_traits() {
+ check(
+ r#"
+trait Foo <fold block>{
+}</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_folds_macros() {
+ check(
+ r#"
+macro_rules! foo <fold block>{
+ ($($tt:tt)*) => { $($tt)* }
+}</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fold_match_arms() {
+ check(
+ r#"
+fn main() <fold block>{
+ match 0 <fold block>{
+ 0 => 0,
+ _ => 1,
+ }</fold>
+}</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fold_multiline_non_block_match_arm() {
+ check(
+ r#"
+ fn main() <fold block>{
+ match foo <fold block>{
+ block => <fold block>{
+ }</fold>,
+ matcharm => <fold matcharm>some.
+ call().
+ chain()</fold>,
+ matcharm2
+ => 0,
+ match_expr => <fold matcharm>match foo2 <fold block>{
+ bar => (),
+ }</fold></fold>,
+ array_list => <fold array>[
+ 1,
+ 2,
+ 3,
+ ]</fold>,
+ strustS => <fold matcharm>StructS <fold block>{
+ a: 31,
+ }</fold></fold>,
+ }</fold>
+ }</fold>
+ "#,
+ )
+ }
+
+ #[test]
+ fn fold_big_calls() {
+ check(
+ r#"
+fn main() <fold block>{
+ frobnicate<fold arglist>(
+ 1,
+ 2,
+ 3,
+ )</fold>
+}</fold>
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_record_literals() {
+ check(
+ r#"
+const _: S = S <fold block>{
+
+}</fold>;
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_multiline_params() {
+ check(
+ r#"
+fn foo<fold arglist>(
+ x: i32,
+ y: String,
+)</fold> {}
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_multiline_array() {
+ check(
+ r#"
+const FOO: [usize; 4] = <fold array>[
+ 1,
+ 2,
+ 3,
+ 4,
+]</fold>;
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_region() {
+ check(
+ r#"
+// 1. some normal comment
+<fold region>// region: test
+// 2. some normal comment
+<fold region>// region: inner
+fn f() {}
+// endregion</fold>
+fn f2() {}
+// endregion: test</fold>
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_consecutive_const() {
+ check(
+ r#"
+<fold consts>const FIRST_CONST: &str = "first";
+const SECOND_CONST: &str = "second";</fold>
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_consecutive_static() {
+ check(
+ r#"
+<fold statics>static FIRST_STATIC: &str = "first";
+static SECOND_STATIC: &str = "second";</fold>
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_where_clause() {
+ // fold multi-line and don't fold single line.
+ check(
+ r#"
+fn foo()
+where<fold whereclause>
+ A: Foo,
+ B: Foo,
+ C: Foo,
+ D: Foo,</fold> {}
+
+fn bar()
+where
+ A: Bar, {}
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_return_type() {
+ check(
+ r#"
+fn foo()<fold returntype>-> (
+ bool,
+ bool,
+)</fold> { (true, true) }
+
+fn bar() -> (bool, bool) { (true, true) }
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
new file mode 100644
index 000000000..926292c9b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
@@ -0,0 +1,112 @@
+use hir::Semantics;
+use ide_db::{
+ defs::{Definition, NameClass, NameRefClass},
+ RootDatabase,
+};
+use syntax::{ast, match_ast, AstNode, SyntaxKind::*, T};
+
+use crate::{FilePosition, NavigationTarget, RangeInfo};
+
+// Feature: Go to Declaration
+//
+// Navigates to the declaration of an identifier.
+//
+// This is currently the same as `Go to Definition` with the exception of outline modules where it
+// will navigate to the `mod name;` item declaration.
+pub(crate) fn goto_declaration(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(position.file_id).syntax().clone();
+ let original_token = file
+ .token_at_offset(position.offset)
+ .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
+ let range = original_token.text_range();
+ let info: Vec<NavigationTarget> = sema
+ .descend_into_macros(original_token)
+ .iter()
+ .filter_map(|token| {
+ let parent = token.parent()?;
+ let def = match_ast! {
+ match parent {
+ ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? {
+ NameRefClass::Definition(it) => Some(it),
+ _ => None
+ },
+ ast::Name(name) => match NameClass::classify(&sema, &name)? {
+ NameClass::Definition(it) => Some(it),
+ _ => None
+ },
+ _ => None
+ }
+ };
+ match def? {
+ Definition::Module(module) => {
+ Some(NavigationTarget::from_module_to_decl(db, module))
+ }
+ _ => None,
+ }
+ })
+ .collect();
+
+ Some(RangeInfo::new(range, info))
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+ let navs = analysis
+ .goto_declaration(position)
+ .unwrap()
+ .expect("no declaration or definition found")
+ .info;
+ if navs.is_empty() {
+ panic!("unresolved reference")
+ }
+
+ let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start());
+ let navs = navs
+ .into_iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ let expected = expected
+ .into_iter()
+ .map(|(FileRange { file_id, range }, _)| FileRange { file_id, range })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ assert_eq!(expected, navs);
+ }
+
+ #[test]
+ fn goto_decl_module_outline() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+ // ^^^
+//- /foo.rs
+use self$0;
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_decl_module_inline() {
+ check(
+ r#"
+mod foo {
+ // ^^^
+ use self$0;
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
new file mode 100644
index 000000000..d9c97751c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -0,0 +1,1634 @@
+use std::{convert::TryInto, mem::discriminant};
+
+use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, RangeInfo, TryToNav};
+use hir::{AsAssocItem, AssocItem, Semantics};
+use ide_db::{
+ base_db::{AnchoredPath, FileId, FileLoader},
+ defs::{Definition, IdentClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, AstNode, AstToken, SyntaxKind::*, SyntaxToken, TextRange, T};
+
+// Feature: Go to Definition
+//
+// Navigates to the definition of an identifier.
+//
+// For outline modules, this will navigate to the source file of the module.
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[F12]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif[]
+pub(crate) fn goto_definition(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ let sema = &Semantics::new(db);
+ let file = sema.parse(position.file_id).syntax().clone();
+ let original_token =
+ pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
+ IDENT
+ | INT_NUMBER
+ | LIFETIME_IDENT
+ | T![self]
+ | T![super]
+ | T![crate]
+ | T![Self]
+ | COMMENT => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+ if let Some(doc_comment) = token_as_doc_comment(&original_token) {
+ return doc_comment.get_definition_with_descend_at(sema, position.offset, |def, _, _| {
+ let nav = def.try_to_nav(db)?;
+ Some(RangeInfo::new(original_token.text_range(), vec![nav]))
+ });
+ }
+ let navs = sema
+ .descend_into_macros(original_token.clone())
+ .into_iter()
+ .filter_map(|token| {
+ let parent = token.parent()?;
+ if let Some(tt) = ast::TokenTree::cast(parent) {
+ if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), position.file_id)
+ {
+ return Some(vec![x]);
+ }
+ }
+ Some(
+ IdentClass::classify_token(sema, &token)?
+ .definitions()
+ .into_iter()
+ .flat_map(|def| {
+ try_filter_trait_item_definition(sema, &def)
+ .unwrap_or_else(|| def_to_nav(sema.db, def))
+ })
+ .collect(),
+ )
+ })
+ .flatten()
+ .unique()
+ .collect::<Vec<NavigationTarget>>();
+
+ Some(RangeInfo::new(original_token.text_range(), navs))
+}
+
+fn try_lookup_include_path(
+ sema: &Semantics<'_, RootDatabase>,
+ tt: ast::TokenTree,
+ token: SyntaxToken,
+ file_id: FileId,
+) -> Option<NavigationTarget> {
+ let token = ast::String::cast(token)?;
+ let path = token.value()?.into_owned();
+ let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
+ let name = macro_call.path()?.segment()?.name_ref()?;
+ if !matches!(&*name.text(), "include" | "include_str" | "include_bytes") {
+ return None;
+ }
+ let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
+ let size = sema.db.file_text(file_id).len().try_into().ok()?;
+ Some(NavigationTarget {
+ file_id,
+ full_range: TextRange::new(0.into(), size),
+ name: path.into(),
+ focus_range: None,
+ kind: None,
+ container_name: None,
+ description: None,
+ docs: None,
+ })
+}
+/// finds the trait definition of an impl'd item, except function
+/// e.g.
+/// ```rust
+/// trait A { type a; }
+/// struct S;
+/// impl A for S { type a = i32; } // <-- on this associate type, will get the location of a in the trait
+/// ```
+fn try_filter_trait_item_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ def: &Definition,
+) -> Option<Vec<NavigationTarget>> {
+ let db = sema.db;
+ let assoc = def.as_assoc_item(db)?;
+ match assoc {
+ AssocItem::Function(..) => None,
+ AssocItem::Const(..) | AssocItem::TypeAlias(..) => {
+ let imp = match assoc.container(db) {
+ hir::AssocItemContainer::Impl(imp) => imp,
+ _ => return None,
+ };
+ let trait_ = imp.trait_(db)?;
+ let name = def.name(db)?;
+ let discri_value = discriminant(&assoc);
+ trait_
+ .items(db)
+ .iter()
+ .filter(|itm| discriminant(*itm) == discri_value)
+ .find_map(|itm| (itm.name(db)? == name).then(|| itm.try_to_nav(db)).flatten())
+ .map(|it| vec![it])
+ }
+ }
+}
+
+fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec<NavigationTarget> {
+ def.try_to_nav(db).map(|it| vec![it]).unwrap_or_default()
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ #[track_caller]
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+ let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info;
+ if navs.is_empty() {
+ panic!("unresolved reference")
+ }
+
+ let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start());
+ let navs = navs
+ .into_iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ let expected = expected
+ .into_iter()
+ .map(|(FileRange { file_id, range }, _)| FileRange { file_id, range })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ assert_eq!(expected, navs);
+ }
+
+ fn check_unresolved(ra_fixture: &str) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info;
+
+ assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {:?}", navs)
+ }
+
+ #[test]
+ fn goto_def_if_items_same_name() {
+ check(
+ r#"
+trait Trait {
+ type A;
+ const A: i32;
+ //^
+}
+
+struct T;
+impl Trait for T {
+ type A = i32;
+ const A$0: i32 = -9;
+}"#,
+ );
+ }
+ #[test]
+ fn goto_def_in_mac_call_in_attr_invoc() {
+ check(
+ r#"
+//- proc_macros: identity
+pub struct Struct {
+ // ^^^^^^
+ field: i32,
+}
+
+macro_rules! identity {
+ ($($tt:tt)*) => {$($tt)*};
+}
+
+#[proc_macros::identity]
+fn function() {
+ identity!(Struct$0 { field: 0 });
+}
+
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std$0;
+//- /std/lib.rs crate:std
+// empty
+//^file
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_renamed_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std as abc$0;
+//- /std/lib.rs crate:std
+// empty
+//^file
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_in_items() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+enum E { X(Foo$0) }
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_at_start_of_item() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+enum E { X($0Foo) }
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_resolves_correct_name() {
+ check(
+ r#"
+//- /lib.rs
+use a::Foo;
+mod a;
+mod b;
+enum E { X(Foo$0) }
+
+//- /a.rs
+struct Foo;
+ //^^^
+//- /b.rs
+struct Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_module_declaration() {
+ check(
+ r#"
+//- /lib.rs
+mod $0foo;
+
+//- /foo.rs
+// empty
+//^file
+"#,
+ );
+
+ check(
+ r#"
+//- /lib.rs
+mod $0foo;
+
+//- /foo/mod.rs
+// empty
+//^file
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macros() {
+ check(
+ r#"
+macro_rules! foo { () => { () } }
+ //^^^
+fn bar() {
+ $0foo!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macros_from_other_crates() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::foo;
+fn bar() {
+ $0foo!();
+}
+
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! foo { () => { () } }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macros_in_use_tree() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::foo$0;
+
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! foo { () => { () } }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macro_defined_fn_with_arg() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! define_fn {
+ ($name:ident) => (fn $name() {})
+}
+
+define_fn!(foo);
+ //^^^
+
+fn bar() {
+ $0foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macro_defined_fn_no_arg() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! define_fn {
+ () => (fn foo() {})
+}
+
+ define_fn!();
+//^^^^^^^^^^^^^
+
+fn bar() {
+ $0foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_works_for_macro_inside_pattern() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! foo {() => {0}}
+ //^^^
+
+fn bar() {
+ match (0,1) {
+ ($0foo!(), _) => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_works_for_macro_inside_match_arm_lhs() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! foo {() => {0}}
+ //^^^
+fn bar() {
+ match 0 {
+ $0foo!() => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_use_alias() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo as bar$0;
+
+//- /foo/lib.rs crate:foo
+// empty
+//^file
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_use_alias_foo_macro() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::foo as bar$0;
+
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! foo { () => { () } }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_methods() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn frobnicate(&self) { }
+ //^^^^^^^^^^
+}
+
+fn bar(foo: &Foo) {
+ foo.frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_fields() {
+ check(
+ r#"
+struct Foo {
+ spam: u32,
+} //^^^^
+
+fn bar(foo: &Foo) {
+ foo.spam$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_record_fields() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo {
+ spam: u32,
+} //^^^^
+
+fn bar() -> Foo {
+ Foo {
+ spam$0: 0,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_record_pat_fields() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo {
+ spam: u32,
+} //^^^^
+
+fn bar(foo: Foo) -> Foo {
+ let Foo { spam$0: _, } = foo
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_record_fields_macros() {
+ check(
+ r"
+macro_rules! m { () => { 92 };}
+struct Foo { spam: u32 }
+ //^^^^
+
+fn bar() -> Foo {
+ Foo { spam$0: m!() }
+}
+",
+ );
+ }
+
+ #[test]
+ fn goto_for_tuple_fields() {
+ check(
+ r#"
+struct Foo(u32);
+ //^^^
+
+fn bar() {
+ let foo = Foo(0);
+ foo.$00;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_ufcs_inherent_methods() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn frobnicate() { }
+} //^^^^^^^^^^
+
+fn bar(foo: &Foo) {
+ Foo::frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_ufcs_trait_methods_through_traits() {
+ check(
+ r#"
+trait Foo {
+ fn frobnicate();
+} //^^^^^^^^^^
+
+fn bar() {
+ Foo::frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_ufcs_trait_methods_through_self() {
+ check(
+ r#"
+struct Foo;
+trait Trait {
+ fn frobnicate();
+} //^^^^^^^^^^
+impl Trait for Foo {}
+
+fn bar() {
+ Foo::frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_on_self() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ //^^^
+ pub fn new() -> Self {
+ Self$0 {}
+ }
+}
+"#,
+ );
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ //^^^
+ pub fn new() -> Self$0 {
+ Self {}
+ }
+}
+"#,
+ );
+
+ check(
+ r#"
+enum Foo { A }
+impl Foo {
+ //^^^
+ pub fn new() -> Self$0 {
+ Foo::A
+ }
+}
+"#,
+ );
+
+ check(
+ r#"
+enum Foo { A }
+impl Foo {
+ //^^^
+ pub fn thing(a: &Self$0) {
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_on_self_in_trait_impl() {
+ check(
+ r#"
+struct Foo;
+trait Make {
+ fn new() -> Self;
+}
+impl Make for Foo {
+ //^^^
+ fn new() -> Self {
+ Self$0 {}
+ }
+}
+"#,
+ );
+
+ check(
+ r#"
+struct Foo;
+trait Make {
+ fn new() -> Self;
+}
+impl Make for Foo {
+ //^^^
+ fn new() -> Self$0 {
+ Self {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_when_used_on_definition_name_itself() {
+ check(
+ r#"
+struct Foo$0 { value: u32 }
+ //^^^
+ "#,
+ );
+
+ check(
+ r#"
+struct Foo {
+ field$0: string,
+} //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+fn foo_test$0() { }
+ //^^^^^^^^
+"#,
+ );
+
+ check(
+ r#"
+enum Foo$0 { Variant }
+ //^^^
+"#,
+ );
+
+ check(
+ r#"
+enum Foo {
+ Variant1,
+ Variant2$0,
+ //^^^^^^^^
+ Variant3,
+}
+"#,
+ );
+
+ check(
+ r#"
+static INNER$0: &str = "";
+ //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+const INNER$0: &str = "";
+ //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+type Thing$0 = Option<()>;
+ //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+trait Foo$0 { }
+ //^^^
+"#,
+ );
+
+ check(
+ r#"
+mod bar$0 { }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_from_macro() {
+ check(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => { $($tt)* }
+}
+fn foo() {}
+ //^^^
+id! {
+ fn bar() {
+ fo$0o();
+ }
+}
+mod confuse_index { fn foo(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_through_format() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! format {
+ ($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*)))
+}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+pub mod __export {
+ pub use crate::format_args;
+ fn foo() {} // for index confusion
+}
+fn foo() -> i8 {}
+ //^^^
+fn test() {
+ format!("{}", fo$0o())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_through_included_file() {
+ check(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {}
+
+ include!("foo.rs");
+//^^^^^^^^^^^^^^^^^^^
+
+fn f() {
+ foo$0();
+}
+
+mod confuse_index {
+ pub fn foo() {}
+}
+
+//- /foo.rs
+fn foo() {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn goto_for_type_param() {
+ check(
+ r#"
+struct Foo<T: Clone> { t: $0T }
+ //^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_within_macro() {
+ check(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => ($($tt)*)
+}
+
+fn foo() {
+ let x = 1;
+ //^
+ id!({
+ let y = $0x;
+ let z = y;
+ });
+}
+"#,
+ );
+
+ check(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => ($($tt)*)
+}
+
+fn foo() {
+ let x = 1;
+ id!({
+ let y = x;
+ //^
+ let z = $0y;
+ });
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_local_fn() {
+ check(
+ r#"
+fn main() {
+ fn foo() {
+ let x = 92;
+ //^
+ $0x;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_local_macro() {
+ check(
+ r#"
+fn bar() {
+ macro_rules! foo { () => { () } }
+ //^^^
+ $0foo!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_field_init_shorthand() {
+ check(
+ r#"
+struct Foo { x: i32 }
+ //^
+fn main() {
+ let x = 92;
+ //^
+ Foo { x$0 };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_field() {
+ check(
+ r#"
+enum Foo {
+ Bar { x: i32 }
+ //^
+}
+fn baz(foo: Foo) {
+ match foo {
+ Foo::Bar { x$0 } => x
+ //^
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_pattern_const() {
+ check(
+ r#"
+enum Foo { Bar }
+ //^^^
+impl Foo {
+ fn baz(self) {
+ match self { Self::Bar$0 => {} }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_pattern_record() {
+ check(
+ r#"
+enum Foo { Bar { val: i32 } }
+ //^^^
+impl Foo {
+ fn baz(self) -> i32 {
+ match self { Self::Bar$0 { val } => {} }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_expr_const() {
+ check(
+ r#"
+enum Foo { Bar }
+ //^^^
+impl Foo {
+ fn baz(self) { Self::Bar$0; }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_expr_record() {
+ check(
+ r#"
+enum Foo { Bar { val: i32 } }
+ //^^^
+impl Foo {
+ fn baz(self) { Self::Bar$0 {val: 4}; }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_type_alias_generic_parameter() {
+ check(
+ r#"
+type Alias<T> = T$0;
+ //^
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_macro_container() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+foo::module$0::mac!();
+
+//- /foo/lib.rs crate:foo
+pub mod module {
+ //^^^^^^
+ #[macro_export]
+ macro_rules! _mac { () => { () } }
+ pub use crate::_mac as mac;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_in_path() {
+ check(
+ r#"
+trait Iterator {
+ type Item;
+ //^^^^
+}
+
+fn f() -> impl Iterator<Item$0 = u8> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_super_assoc_ty_in_path() {
+ check(
+ r#"
+trait Super {
+ type Item;
+ //^^^^
+}
+
+trait Sub: Super {}
+
+fn f() -> impl Sub<Item$0 = u8> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn unknown_assoc_ty() {
+ check_unresolved(
+ r#"
+trait Iterator { type Item; }
+fn f() -> impl Iterator<Invalid$0 = u8> {}
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_in_path_multiple() {
+ check(
+ r#"
+trait Iterator {
+ type A;
+ //^
+ type B;
+}
+
+fn f() -> impl Iterator<A$0 = u8, B = ()> {}
+"#,
+ );
+ check(
+ r#"
+trait Iterator {
+ type A;
+ type B;
+ //^
+}
+
+fn f() -> impl Iterator<A = u8, B$0 = ()> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_ufcs() {
+ check(
+ r#"
+trait Iterator {
+ type Item;
+ //^^^^
+}
+
+fn g() -> <() as Iterator<Item$0 = ()>>::Item {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_ufcs_multiple() {
+ check(
+ r#"
+trait Iterator {
+ type A;
+ //^
+ type B;
+}
+
+fn g() -> <() as Iterator<A$0 = (), B = u8>>::B {}
+"#,
+ );
+ check(
+ r#"
+trait Iterator {
+ type A;
+ type B;
+ //^
+}
+
+fn g() -> <() as Iterator<A = (), B$0 = u8>>::A {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_self_param_ty_specified() {
+ check(
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn bar(self: &Foo) {
+ //^^^^
+ let foo = sel$0f;
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_self_param_on_decl() {
+ check(
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn bar(&self$0) {
+ //^^^^
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_param_on_decl() {
+ check(
+ r#"
+fn foo<'foobar$0>(_: &'foobar ()) {
+ //^^^^^^^
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_param_decl() {
+ check(
+ r#"
+fn foo<'foobar>(_: &'foobar$0 ()) {
+ //^^^^^^^
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_param_decl_nested() {
+ check(
+ r#"
+fn foo<'foobar>(_: &'foobar ()) {
+ fn foo<'foobar>(_: &'foobar$0 ()) {}
+ //^^^^^^^
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_hrtb() {
+ // FIXME: requires the HIR to somehow track these hrtb lifetimes
+ check_unresolved(
+ r#"
+trait Foo<T> {}
+fn foo<T>() where for<'a> T: Foo<&'a$0 (u8, u16)>, {}
+ //^^
+"#,
+ );
+ check_unresolved(
+ r#"
+trait Foo<T> {}
+fn foo<T>() where for<'a$0> T: Foo<&'a (u8, u16)>, {}
+ //^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_lifetime_hrtb_for_type() {
+ // FIXME: requires ForTypes to be implemented
+ check_unresolved(
+ r#"trait Foo<T> {}
+fn foo<T>() where T: for<'a> Foo<&'a$0 (u8, u16)>, {}
+ //^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_label() {
+ check(
+ r#"
+fn foo<'foo>(_: &'foo ()) {
+ 'foo: {
+ //^^^^
+ 'bar: loop {
+ break 'foo$0;
+ }
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_intra_doc_link_same_file() {
+ check(
+ r#"
+/// Blah, [`bar`](bar) .. [`foo`](foo$0) has [`bar`](bar)
+pub fn bar() { }
+
+/// You might want to see [`std::fs::read()`] too.
+pub fn foo() { }
+ //^^^
+
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_intra_doc_link_inner() {
+ check(
+ r#"
+//- /main.rs
+mod m;
+struct S;
+ //^
+
+//- /m.rs
+//! [`super::S$0`]
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_incomplete_field() {
+ check(
+ r#"
+struct A { a: u32 }
+ //^
+fn foo() { A { a$0: }; }
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_proc_macro() {
+ check(
+ r#"
+//- /main.rs crate:main deps:mac
+use mac::fn_macro;
+
+fn_macro$0!();
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro]
+fn fn_macro() {}
+ //^^^^^^^^
+ "#,
+ )
+ }
+
+ #[test]
+ fn goto_intra_doc_links() {
+ check(
+ r#"
+
+pub mod theitem {
+ /// This is the item. Cool!
+ pub struct TheItem;
+ //^^^^^^^
+}
+
+/// Gives you a [`TheItem$0`].
+///
+/// [`TheItem`]: theitem::TheItem
+pub fn gimme() -> theitem::TheItem {
+ theitem::TheItem
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_ident_from_pat_macro() {
+ check(
+ r#"
+macro_rules! pat {
+ ($name:ident) => { Enum::Variant1($name) }
+}
+
+enum Enum {
+ Variant1(u8),
+ Variant2,
+}
+
+fn f(e: Enum) {
+ match e {
+ pat!(bind) => {
+ //^^^^
+ bind$0
+ }
+ Enum::Variant2 => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_include() {
+ check(
+ r#"
+//- /main.rs
+fn main() {
+ let str = include_str!("foo.txt$0");
+}
+//- /foo.txt
+// empty
+//^file
+"#,
+ );
+ }
+ #[cfg(test)]
+ mod goto_impl_of_trait_fn {
+ use super::check;
+ #[test]
+ fn cursor_on_impl() {
+ check(
+ r#"
+trait Twait {
+ fn a();
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ fn a$0();
+ //^
+}
+ "#,
+ );
+ }
+ #[test]
+ fn method_call() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self);
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ fn a(&self){};
+ //^
+}
+fn f() {
+ let s = Stwuct;
+ s.a$0();
+}
+ "#,
+ );
+ }
+ #[test]
+ fn path_call() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self);
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ fn a(&self){};
+ //^
+}
+fn f() {
+ let s = Stwuct;
+ Stwuct::a$0(&s);
+}
+ "#,
+ );
+ }
+ #[test]
+ fn where_clause_can_work() {
+ check(
+ r#"
+trait G {
+ fn g(&self);
+}
+trait Bound{}
+trait EA{}
+struct Gen<T>(T);
+impl <T:EA> G for Gen<T> {
+ fn g(&self) {
+ }
+}
+impl <T> G for Gen<T>
+where T : Bound
+{
+ fn g(&self){
+ //^
+ }
+}
+struct A;
+impl Bound for A{}
+fn f() {
+ let gen = Gen::<A>(A);
+ gen.g$0();
+}
+ "#,
+ );
+ }
+ #[test]
+ fn wc_case_is_ok() {
+ check(
+ r#"
+trait G {
+ fn g(&self);
+}
+trait BParent{}
+trait Bound: BParent{}
+struct Gen<T>(T);
+impl <T> G for Gen<T>
+where T : Bound
+{
+ fn g(&self){
+ //^
+ }
+}
+struct A;
+impl Bound for A{}
+fn f() {
+ let gen = Gen::<A>(A);
+ gen.g$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_call_defaulted() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self) {}
+ //^
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+}
+fn f() {
+ let s = Stwuct;
+ s.a$0();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn method_call_on_generic() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self) {}
+ //^
+}
+
+fn f<T: Twait>(s: T) {
+ s.a$0();
+}
+ "#,
+ );
+ }
+ }
+
+ #[test]
+ fn goto_def_of_trait_impl_const() {
+ check(
+ r#"
+trait Twait {
+ const NOMS: bool;
+ // ^^^^
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ const NOMS$0: bool = true;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_of_trait_impl_type_alias() {
+ check(
+ r#"
+trait Twait {
+ type IsBad;
+ // ^^^^^
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ type IsBad$0 = !;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_derive_input() {
+ check(
+ r#"
+ //- minicore:derive
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+ // ^^^^
+ #[derive(Copy$0)]
+ struct Foo;
+ "#,
+ );
+ check(
+ r#"
+//- minicore:derive
+#[rustc_builtin_macro]
+pub macro Copy {}
+ // ^^^^
+#[cfg_attr(feature = "false", derive)]
+#[derive(Copy$0)]
+struct Foo;
+ "#,
+ );
+ check(
+ r#"
+//- minicore:derive
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+ // ^^^^
+}
+#[derive(foo::Copy$0)]
+struct Foo;
+ "#,
+ );
+ check(
+ r#"
+//- minicore:derive
+mod foo {
+ // ^^^
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(foo$0::Copy)]
+struct Foo;
+ "#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_macro_multi() {
+ check(
+ r#"
+struct Foo {
+ foo: ()
+ //^^^
+}
+macro_rules! foo {
+ ($ident:ident) => {
+ fn $ident(Foo { $ident }: Foo) {}
+ }
+}
+foo!(foo$0);
+ //^^^
+ //^^^
+"#,
+ );
+ check(
+ r#"
+fn bar() {}
+ //^^^
+struct bar;
+ //^^^
+macro_rules! foo {
+ ($ident:ident) => {
+ fn foo() {
+ let _: $ident = $ident;
+ }
+ }
+}
+
+foo!(bar$0);
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
new file mode 100644
index 000000000..04b51c839
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
@@ -0,0 +1,344 @@
+use hir::{AsAssocItem, Impl, Semantics};
+use ide_db::{
+ defs::{Definition, NameClass, NameRefClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, AstNode, SyntaxKind::*, T};
+
+use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
+
+// Feature: Go to Implementation
+//
+// Navigates to the impl blocks of types.
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[Ctrl+F12]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif[]
+pub(crate) fn goto_implementation(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+ let syntax = source_file.syntax().clone();
+
+ let original_token =
+ pick_best_token(syntax.token_at_offset(position.offset), |kind| match kind {
+ IDENT | T![self] => 1,
+ _ => 0,
+ })?;
+ let range = original_token.text_range();
+ let navs = sema
+ .descend_into_macros(original_token)
+ .into_iter()
+ .filter_map(|token| token.parent().and_then(ast::NameLike::cast))
+ .filter_map(|node| match &node {
+ ast::NameLike::Name(name) => {
+ NameClass::classify(&sema, name).map(|class| match class {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
+ Definition::Local(local_def)
+ }
+ })
+ }
+ ast::NameLike::NameRef(name_ref) => {
+ NameRefClass::classify(&sema, name_ref).map(|class| match class {
+ NameRefClass::Definition(def) => def,
+ NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
+ Definition::Local(local_ref)
+ }
+ })
+ }
+ ast::NameLike::Lifetime(_) => None,
+ })
+ .unique()
+ .filter_map(|def| {
+ let navs = match def {
+ Definition::Trait(trait_) => impls_for_trait(&sema, trait_),
+ Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)),
+ Definition::TypeAlias(alias) => impls_for_ty(&sema, alias.ty(sema.db)),
+ Definition::BuiltinType(builtin) => impls_for_ty(&sema, builtin.ty(sema.db)),
+ Definition::Function(f) => {
+ let assoc = f.as_assoc_item(sema.db)?;
+ let name = assoc.name(sema.db)?;
+ let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
+ impls_for_trait_item(&sema, trait_, name)
+ }
+ Definition::Const(c) => {
+ let assoc = c.as_assoc_item(sema.db)?;
+ let name = assoc.name(sema.db)?;
+ let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
+ impls_for_trait_item(&sema, trait_, name)
+ }
+ _ => return None,
+ };
+ Some(navs)
+ })
+ .flatten()
+ .collect();
+
+ Some(RangeInfo { range, info: navs })
+}
+
+fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type) -> Vec<NavigationTarget> {
+ Impl::all_for_type(sema.db, ty).into_iter().filter_map(|imp| imp.try_to_nav(sema.db)).collect()
+}
+
+fn impls_for_trait(
+ sema: &Semantics<'_, RootDatabase>,
+ trait_: hir::Trait,
+) -> Vec<NavigationTarget> {
+ Impl::all_for_trait(sema.db, trait_)
+ .into_iter()
+ .filter_map(|imp| imp.try_to_nav(sema.db))
+ .collect()
+}
+
+fn impls_for_trait_item(
+ sema: &Semantics<'_, RootDatabase>,
+ trait_: hir::Trait,
+ fun_name: hir::Name,
+) -> Vec<NavigationTarget> {
+ Impl::all_for_trait(sema.db, trait_)
+ .into_iter()
+ .filter_map(|imp| {
+ let item = imp.items(sema.db).iter().find_map(|itm| {
+ let itm_name = itm.name(sema.db)?;
+ (itm_name == fun_name).then(|| *itm)
+ })?;
+ item.try_to_nav(sema.db)
+ })
+ .collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+
+ let navs = analysis.goto_implementation(position).unwrap().unwrap().info;
+
+ let cmp = |frange: &FileRange| (frange.file_id, frange.range.start());
+
+ let actual = navs
+ .into_iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ let expected =
+ expected.into_iter().map(|(range, _)| range).sorted_by_key(cmp).collect::<Vec<_>>();
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn goto_implementation_works() {
+ check(
+ r#"
+struct Foo$0;
+impl Foo {}
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_works_multiple_blocks() {
+ check(
+ r#"
+struct Foo$0;
+impl Foo {}
+ //^^^
+impl Foo {}
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_works_multiple_mods() {
+ check(
+ r#"
+struct Foo$0;
+mod a {
+ impl super::Foo {}
+ //^^^^^^^^^^
+}
+mod b {
+ impl super::Foo {}
+ //^^^^^^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_works_multiple_files() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo$0;
+mod a;
+mod b;
+//- /a.rs
+impl crate::Foo {}
+ //^^^^^^^^^^
+//- /b.rs
+impl crate::Foo {}
+ //^^^^^^^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_for_trait() {
+ check(
+ r#"
+trait T$0 {}
+struct Foo;
+impl T for Foo {}
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_for_trait_multiple_files() {
+ check(
+ r#"
+//- /lib.rs
+trait T$0 {};
+struct Foo;
+mod a;
+mod b;
+//- /a.rs
+impl crate::T for crate::Foo {}
+ //^^^^^^^^^^
+//- /b.rs
+impl crate::T for crate::Foo {}
+ //^^^^^^^^^^
+ "#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_all_impls() {
+ check(
+ r#"
+//- /lib.rs
+trait T {}
+struct Foo$0;
+impl Foo {}
+ //^^^
+impl T for Foo {}
+ //^^^
+impl T for &Foo {}
+ //^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_to_builtin_derive() {
+ check(
+ r#"
+//- minicore: copy, derive
+ #[derive(Copy)]
+//^^^^^^^^^^^^^^^
+struct Foo$0;
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_type_alias() {
+ check(
+ r#"
+struct Foo;
+
+type Bar$0 = Foo;
+
+impl Foo {}
+ //^^^
+impl Bar {}
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_adt_generic() {
+ check(
+ r#"
+struct Foo$0<T>;
+
+impl<T> Foo<T> {}
+ //^^^^^^
+impl Foo<str> {}
+ //^^^^^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_builtin() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:core
+fn foo(_: bool$0) {{}}
+//- /libcore.rs crate:core
+#[lang = "bool"]
+impl bool {}
+ //^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_trait_functions() {
+ check(
+ r#"
+trait Tr {
+ fn f$0();
+}
+
+struct S;
+
+impl Tr for S {
+ fn f() {
+ //^
+ println!("Hello, world!");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_trait_assoc_const() {
+ check(
+ r#"
+trait Tr {
+ const C$0: usize;
+}
+
+struct S;
+
+impl Tr for S {
+ const C: usize = 4;
+ //^
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
new file mode 100644
index 000000000..55cdb3200
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
@@ -0,0 +1,296 @@
+use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase};
+use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T};
+
+use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
+
+// Feature: Go to Type Definition
+//
+// Navigates to the type of an identifier.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Go to Type Definition*
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif[]
+pub(crate) fn goto_type_definition(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ let sema = hir::Semantics::new(db);
+
+ let file: ast::SourceFile = sema.parse(position.file_id);
+ let token: SyntaxToken =
+ pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
+ IDENT | INT_NUMBER | T![self] => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+
+ let mut res = Vec::new();
+ let mut push = |def: Definition| {
+ if let Some(nav) = def.try_to_nav(db) {
+ if !res.contains(&nav) {
+ res.push(nav);
+ }
+ }
+ };
+ let range = token.text_range();
+ sema.descend_into_macros(token)
+ .iter()
+ .filter_map(|token| {
+ let ty = sema.token_ancestors_with_macros(token.clone()).find_map(|node| {
+ let ty = match_ast! {
+ match node {
+ ast::Expr(it) => sema.type_of_expr(&it)?.original,
+ ast::Pat(it) => sema.type_of_pat(&it)?.original,
+ ast::SelfParam(it) => sema.type_of_self(&it)?,
+ ast::Type(it) => sema.resolve_type(&it)?,
+ ast::RecordField(it) => sema.to_def(&it).map(|d| d.ty(db.upcast()))?,
+ // can't match on RecordExprField directly as `ast::Expr` will match an iteration too early otherwise
+ ast::NameRef(it) => {
+ if let Some(record_field) = ast::RecordExprField::for_name_ref(&it) {
+ let (_, _, ty) = sema.resolve_record_field(&record_field)?;
+ ty
+ } else {
+ let record_field = ast::RecordPatField::for_field_name_ref(&it)?;
+ sema.resolve_record_pat_field(&record_field)?.ty(db)
+ }
+ },
+ _ => return None,
+ }
+ };
+
+ Some(ty)
+ });
+ ty
+ })
+ .for_each(|ty| {
+ // collect from each `ty` into the `res` result vec
+ let ty = ty.strip_references();
+ ty.walk(db, |t| {
+ if let Some(adt) = t.as_adt() {
+ push(adt.into());
+ } else if let Some(trait_) = t.as_dyn_trait() {
+ push(trait_.into());
+ } else if let Some(traits) = t.as_impl_traits(db) {
+ traits.for_each(|it| push(it.into()));
+ } else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
+ push(trait_.into());
+ }
+ });
+ });
+ Some(RangeInfo::new(range, res))
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+ let navs = analysis.goto_type_definition(position).unwrap().unwrap().info;
+ assert_ne!(navs.len(), 0);
+
+ let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start());
+ let navs = navs
+ .into_iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ let expected = expected
+ .into_iter()
+ .map(|(FileRange { file_id, range }, _)| FileRange { file_id, range })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ assert_eq!(expected, navs);
+ }
+
+ #[test]
+ fn goto_type_definition_works_simple() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+fn foo() {
+ let f: Foo; f$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_type_definition_record_expr_field() {
+ check(
+ r#"
+struct Bar;
+ // ^^^
+struct Foo { foo: Bar }
+fn foo() {
+ Foo { foo$0 }
+}
+"#,
+ );
+ check(
+ r#"
+struct Bar;
+ // ^^^
+struct Foo { foo: Bar }
+fn foo() {
+ Foo { foo$0: Bar }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_type_definition_record_pat_field() {
+ check(
+ r#"
+struct Bar;
+ // ^^^
+struct Foo { foo: Bar }
+fn foo() {
+ let Foo { foo$0 };
+}
+"#,
+ );
+ check(
+ r#"
+struct Bar;
+ // ^^^
+struct Foo { foo: Bar }
+fn foo() {
+ let Foo { foo$0: bar };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_type_definition_works_simple_ref() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+fn foo() {
+ let f: &Foo; f$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_type_definition_works_through_macro() {
+ check(
+ r#"
+macro_rules! id { ($($tt:tt)*) => { $($tt)* } }
+struct Foo {}
+ //^^^
+id! {
+ fn bar() { let f$0 = Foo {}; }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_type_definition_for_param() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+fn foo($0f: Foo) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_type_definition_for_tuple_field() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+struct Bar(Foo);
+fn foo() {
+ let bar = Bar(Foo);
+ bar.$00;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_self_param() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+impl Foo {
+ fn f(&self$0) {}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_type_fallback() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+impl Foo$0 {}
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_struct_field() {
+ check(
+ r#"
+struct Bar;
+ //^^^
+
+struct Foo {
+ bar$0: Bar,
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_struct_field() {
+ check(
+ r#"
+struct Bar;
+ //^^^
+
+enum Foo {
+ Bar {
+ bar$0: Bar
+ },
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_considers_generics() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+struct Bar<T, U>(T, U);
+ //^^^
+struct Baz<T>(T);
+ //^^^
+
+fn foo(x$0: Bar<Baz<Foo>, Baz<usize>) {}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
new file mode 100644
index 000000000..f2d7029ea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -0,0 +1,1377 @@
+use hir::Semantics;
+use ide_db::{
+ base_db::{FileId, FilePosition},
+ defs::{Definition, IdentClass},
+ helpers::pick_best_token,
+ search::{FileReference, ReferenceCategory, SearchScope},
+ syntax_helpers::node_ext::{for_each_break_and_continue_expr, for_each_tail_expr, walk_expr},
+ FxHashSet, RootDatabase,
+};
+use syntax::{
+ ast::{self, HasLoopBody},
+ match_ast, AstNode,
+ SyntaxKind::{self, IDENT, INT_NUMBER},
+ SyntaxNode, SyntaxToken, TextRange, T,
+};
+
+use crate::{references, NavigationTarget, TryToNav};
+
+#[derive(PartialEq, Eq, Hash)]
+pub struct HighlightedRange {
+ pub range: TextRange,
+ // FIXME: This needs to be more precise. Reference category makes sense only
+ // for references, but we also have defs. And things like exit points are
+ // neither.
+ pub category: Option<ReferenceCategory>,
+}
+
+#[derive(Default, Clone)]
+pub struct HighlightRelatedConfig {
+ pub references: bool,
+ pub exit_points: bool,
+ pub break_points: bool,
+ pub yield_points: bool,
+}
+
+// Feature: Highlight Related
+//
+// Highlights constructs related to the thing under the cursor:
+//
+// . if on an identifier, highlights all references to that identifier in the current file
+// . if on an `async` or `await token, highlights all yield points for that async context
+// . if on a `return` or `fn` keyword, `?` character or `->` return type arrow, highlights all exit points for that context
+// . if on a `break`, `loop`, `while` or `for` token, highlights all break points for that loop or block context
+//
+// Note: `?` and `->` do not currently trigger this behavior in the VSCode editor.
+pub(crate) fn highlight_related(
+ sema: &Semantics<'_, RootDatabase>,
+ config: HighlightRelatedConfig,
+ FilePosition { offset, file_id }: FilePosition,
+) -> Option<Vec<HighlightedRange>> {
+ let _p = profile::span("highlight_related");
+ let syntax = sema.parse(file_id).syntax().clone();
+
+ let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
+ T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?`
+ T![->] => 3,
+ kind if kind.is_keyword() => 2,
+ IDENT | INT_NUMBER => 1,
+ _ => 0,
+ })?;
+ match token.kind() {
+ T![?] if config.exit_points && token.parent().and_then(ast::TryExpr::cast).is_some() => {
+ highlight_exit_points(sema, token)
+ }
+ T![fn] | T![return] | T![->] if config.exit_points => highlight_exit_points(sema, token),
+ T![await] | T![async] if config.yield_points => highlight_yield_points(token),
+ T![for] if config.break_points && token.parent().and_then(ast::ForExpr::cast).is_some() => {
+ highlight_break_points(token)
+ }
+ T![break] | T![loop] | T![while] | T![continue] if config.break_points => {
+ highlight_break_points(token)
+ }
+ _ if config.references => highlight_references(sema, &syntax, token, file_id),
+ _ => None,
+ }
+}
+
+fn highlight_references(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &SyntaxNode,
+ token: SyntaxToken,
+ file_id: FileId,
+) -> Option<Vec<HighlightedRange>> {
+ let defs = find_defs(sema, token);
+ let usages = defs
+ .iter()
+ .filter_map(|&d| {
+ d.usages(sema)
+ .set_scope(Some(SearchScope::single_file(file_id)))
+ .include_self_refs()
+ .all()
+ .references
+ .remove(&file_id)
+ })
+ .flatten()
+ .map(|FileReference { category: access, range, .. }| HighlightedRange {
+ range,
+ category: access,
+ });
+ let mut res = FxHashSet::default();
+
+ let mut def_to_hl_range = |def| {
+ let hl_range = match def {
+ Definition::Module(module) => {
+ Some(NavigationTarget::from_module_to_decl(sema.db, module))
+ }
+ def => def.try_to_nav(sema.db),
+ }
+ .filter(|decl| decl.file_id == file_id)
+ .and_then(|decl| decl.focus_range)
+ .map(|range| {
+ let category =
+ references::decl_mutability(&def, node, range).then(|| ReferenceCategory::Write);
+ HighlightedRange { range, category }
+ });
+ if let Some(hl_range) = hl_range {
+ res.insert(hl_range);
+ }
+ };
+ for &def in &defs {
+ match def {
+ Definition::Local(local) => local
+ .associated_locals(sema.db)
+ .iter()
+ .for_each(|&local| def_to_hl_range(Definition::Local(local))),
+ def => def_to_hl_range(def),
+ }
+ }
+
+ res.extend(usages);
+ if res.is_empty() {
+ None
+ } else {
+ Some(res.into_iter().collect())
+ }
+}
+
+fn highlight_exit_points(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<Vec<HighlightedRange>> {
+ fn hl(
+ sema: &Semantics<'_, RootDatabase>,
+ body: Option<ast::Expr>,
+ ) -> Option<Vec<HighlightedRange>> {
+ let mut highlights = Vec::new();
+ let body = body?;
+ walk_expr(&body, &mut |expr| match expr {
+ ast::Expr::ReturnExpr(expr) => {
+ if let Some(token) = expr.return_token() {
+ highlights.push(HighlightedRange { category: None, range: token.text_range() });
+ }
+ }
+ ast::Expr::TryExpr(try_) => {
+ if let Some(token) = try_.question_mark_token() {
+ highlights.push(HighlightedRange { category: None, range: token.text_range() });
+ }
+ }
+ ast::Expr::MethodCallExpr(_) | ast::Expr::CallExpr(_) | ast::Expr::MacroExpr(_) => {
+ if sema.type_of_expr(&expr).map_or(false, |ty| ty.original.is_never()) {
+ highlights.push(HighlightedRange {
+ category: None,
+ range: expr.syntax().text_range(),
+ });
+ }
+ }
+ _ => (),
+ });
+ let tail = match body {
+ ast::Expr::BlockExpr(b) => b.tail_expr(),
+ e => Some(e),
+ };
+
+ if let Some(tail) = tail {
+ for_each_tail_expr(&tail, &mut |tail| {
+ let range = match tail {
+ ast::Expr::BreakExpr(b) => b
+ .break_token()
+ .map_or_else(|| tail.syntax().text_range(), |tok| tok.text_range()),
+ _ => tail.syntax().text_range(),
+ };
+ highlights.push(HighlightedRange { category: None, range })
+ });
+ }
+ Some(highlights)
+ }
+ for anc in token.parent_ancestors() {
+ return match_ast! {
+ match anc {
+ ast::Fn(fn_) => hl(sema, fn_.body().map(ast::Expr::BlockExpr)),
+ ast::ClosureExpr(closure) => hl(sema, closure.body()),
+ ast::BlockExpr(block_expr) => if matches!(block_expr.modifier(), Some(ast::BlockModifier::Async(_) | ast::BlockModifier::Try(_)| ast::BlockModifier::Const(_))) {
+ hl(sema, Some(block_expr.into()))
+ } else {
+ continue;
+ },
+ _ => continue,
+ }
+ };
+ }
+ None
+}
+
+fn highlight_break_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> {
+ fn hl(
+ cursor_token_kind: SyntaxKind,
+ token: Option<SyntaxToken>,
+ label: Option<ast::Label>,
+ body: Option<ast::StmtList>,
+ ) -> Option<Vec<HighlightedRange>> {
+ let mut highlights = Vec::new();
+ let range = cover_range(
+ token.map(|tok| tok.text_range()),
+ label.as_ref().map(|it| it.syntax().text_range()),
+ );
+ highlights.extend(range.map(|range| HighlightedRange { category: None, range }));
+ for_each_break_and_continue_expr(label, body, &mut |expr| {
+ let range: Option<TextRange> = match (cursor_token_kind, expr) {
+ (T![for] | T![while] | T![loop] | T![break], ast::Expr::BreakExpr(break_)) => {
+ cover_range(
+ break_.break_token().map(|it| it.text_range()),
+ break_.lifetime().map(|it| it.syntax().text_range()),
+ )
+ }
+ (
+ T![for] | T![while] | T![loop] | T![continue],
+ ast::Expr::ContinueExpr(continue_),
+ ) => cover_range(
+ continue_.continue_token().map(|it| it.text_range()),
+ continue_.lifetime().map(|it| it.syntax().text_range()),
+ ),
+ _ => None,
+ };
+ highlights.extend(range.map(|range| HighlightedRange { category: None, range }));
+ });
+ Some(highlights)
+ }
+ let parent = token.parent()?;
+ let lbl = match_ast! {
+ match parent {
+ ast::BreakExpr(b) => b.lifetime(),
+ ast::ContinueExpr(c) => c.lifetime(),
+ ast::LoopExpr(l) => l.label().and_then(|it| it.lifetime()),
+ ast::ForExpr(f) => f.label().and_then(|it| it.lifetime()),
+ ast::WhileExpr(w) => w.label().and_then(|it| it.lifetime()),
+ ast::BlockExpr(b) => Some(b.label().and_then(|it| it.lifetime())?),
+ _ => return None,
+ }
+ };
+ let lbl = lbl.as_ref();
+ let label_matches = |def_lbl: Option<ast::Label>| match lbl {
+ Some(lbl) => {
+ Some(lbl.text()) == def_lbl.and_then(|it| it.lifetime()).as_ref().map(|it| it.text())
+ }
+ None => true,
+ };
+ let token_kind = token.kind();
+ for anc in token.parent_ancestors().flat_map(ast::Expr::cast) {
+ return match anc {
+ ast::Expr::LoopExpr(l) if label_matches(l.label()) => hl(
+ token_kind,
+ l.loop_token(),
+ l.label(),
+ l.loop_body().and_then(|it| it.stmt_list()),
+ ),
+ ast::Expr::ForExpr(f) if label_matches(f.label()) => hl(
+ token_kind,
+ f.for_token(),
+ f.label(),
+ f.loop_body().and_then(|it| it.stmt_list()),
+ ),
+ ast::Expr::WhileExpr(w) if label_matches(w.label()) => hl(
+ token_kind,
+ w.while_token(),
+ w.label(),
+ w.loop_body().and_then(|it| it.stmt_list()),
+ ),
+ ast::Expr::BlockExpr(e) if e.label().is_some() && label_matches(e.label()) => {
+ hl(token_kind, None, e.label(), e.stmt_list())
+ }
+ _ => continue,
+ };
+ }
+ None
+}
+
+fn highlight_yield_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> {
+ fn hl(
+ async_token: Option<SyntaxToken>,
+ body: Option<ast::Expr>,
+ ) -> Option<Vec<HighlightedRange>> {
+ let mut highlights =
+ vec![HighlightedRange { category: None, range: async_token?.text_range() }];
+ if let Some(body) = body {
+ walk_expr(&body, &mut |expr| {
+ if let ast::Expr::AwaitExpr(expr) = expr {
+ if let Some(token) = expr.await_token() {
+ highlights
+ .push(HighlightedRange { category: None, range: token.text_range() });
+ }
+ }
+ });
+ }
+ Some(highlights)
+ }
+ for anc in token.parent_ancestors() {
+ return match_ast! {
+ match anc {
+ ast::Fn(fn_) => hl(fn_.async_token(), fn_.body().map(ast::Expr::BlockExpr)),
+ ast::BlockExpr(block_expr) => {
+ if block_expr.async_token().is_none() {
+ continue;
+ }
+ hl(block_expr.async_token(), Some(block_expr.into()))
+ },
+ ast::ClosureExpr(closure) => hl(closure.async_token(), closure.body()),
+ _ => continue,
+ }
+ };
+ }
+ None
+}
+
+fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange> {
+ match (r0, r1) {
+ (Some(r0), Some(r1)) => Some(r0.cover(r1)),
+ (Some(range), None) => Some(range),
+ (None, Some(range)) => Some(range),
+ (None, None) => None,
+ }
+}
+
+fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
+ sema.descend_into_macros(token)
+ .into_iter()
+ .filter_map(|token| IdentClass::classify_token(sema, &token).map(IdentClass::definitions))
+ .flatten()
+ .collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+
+ use super::*;
+
+ #[track_caller]
+ fn check(ra_fixture: &str) {
+ let config = HighlightRelatedConfig {
+ break_points: true,
+ exit_points: true,
+ references: true,
+ yield_points: true,
+ };
+
+ check_with_config(ra_fixture, config);
+ }
+
+ #[track_caller]
+ fn check_with_config(ra_fixture: &str, config: HighlightRelatedConfig) {
+ let (analysis, pos, annotations) = fixture::annotations(ra_fixture);
+
+ let hls = analysis.highlight_related(config, pos).unwrap().unwrap_or_default();
+
+ let mut expected = annotations
+ .into_iter()
+ .map(|(r, access)| (r.range, (!access.is_empty()).then(|| access)))
+ .collect::<Vec<_>>();
+
+ let mut actual = hls
+ .into_iter()
+ .map(|hl| {
+ (
+ hl.range,
+ hl.category.map(|it| {
+ match it {
+ ReferenceCategory::Read => "read",
+ ReferenceCategory::Write => "write",
+ }
+ .to_string()
+ }),
+ )
+ })
+ .collect::<Vec<_>>();
+ actual.sort_by_key(|(range, _)| range.start());
+ expected.sort_by_key(|(range, _)| range.start());
+
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn test_hl_tuple_fields() {
+ check(
+ r#"
+struct Tuple(u32, u32);
+
+fn foo(t: Tuple) {
+ t.0$0;
+ // ^ read
+ t.0;
+ // ^ read
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo$0;
+ // ^^^
+//- /foo.rs
+struct Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_self_in_crate_root() {
+ check(
+ r#"
+use crate$0;
+ //^^^^^
+use self;
+ //^^^^
+mod __ {
+ use super;
+ //^^^^^
+}
+"#,
+ );
+ check(
+ r#"
+//- /main.rs crate:main deps:lib
+use lib$0;
+ //^^^
+//- /lib.rs crate:lib
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_self_in_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+use self$0;
+ // ^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_local() {
+ check(
+ r#"
+fn foo() {
+ let mut bar = 3;
+ // ^^^ write
+ bar$0;
+ // ^^^ read
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_local_in_attr() {
+ check(
+ r#"
+//- proc_macros: identity
+#[proc_macros::identity]
+fn foo() {
+ let mut bar = 3;
+ // ^^^ write
+ bar$0;
+ // ^^^ read
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_multi_macro_usage() {
+ check(
+ r#"
+macro_rules! foo {
+ ($ident:ident) => {
+ fn $ident() -> $ident { loop {} }
+ struct $ident;
+ }
+}
+
+foo!(bar$0);
+ // ^^^
+fn foo() {
+ let bar: bar = bar();
+ // ^^^
+ // ^^^
+}
+"#,
+ );
+ check(
+ r#"
+macro_rules! foo {
+ ($ident:ident) => {
+ fn $ident() -> $ident { loop {} }
+ struct $ident;
+ }
+}
+
+foo!(bar);
+ // ^^^
+fn foo() {
+ let bar: bar$0 = bar();
+ // ^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_yield_points() {
+ check(
+ r#"
+pub async fn foo() {
+ // ^^^^^
+ let x = foo()
+ .await$0
+ // ^^^^^
+ .await;
+ // ^^^^^
+ || { 0.await };
+ (async { 0.await }).await
+ // ^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_yield_points2() {
+ check(
+ r#"
+pub async$0 fn foo() {
+ // ^^^^^
+ let x = foo()
+ .await
+ // ^^^^^
+ .await;
+ // ^^^^^
+ || { 0.await };
+ (async { 0.await }).await
+ // ^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_yield_nested_fn() {
+ check(
+ r#"
+async fn foo() {
+ async fn foo2() {
+ // ^^^^^
+ async fn foo3() {
+ 0.await
+ }
+ 0.await$0
+ // ^^^^^
+ }
+ 0.await
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_yield_nested_async_blocks() {
+ check(
+ r#"
+async fn foo() {
+ (async {
+ // ^^^^^
+ (async {
+ 0.await
+ }).await$0 }
+ // ^^^^^
+ ).await;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_exit_points() {
+ check(
+ r#"
+fn foo() -> u32 {
+ if true {
+ return$0 0;
+ // ^^^^^^
+ }
+
+ 0?;
+ // ^
+ 0xDEAD_BEEF
+ // ^^^^^^^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_exit_points2() {
+ check(
+ r#"
+fn foo() ->$0 u32 {
+ if true {
+ return 0;
+ // ^^^^^^
+ }
+
+ 0?;
+ // ^
+ 0xDEAD_BEEF
+ // ^^^^^^^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_exit_points3() {
+ check(
+ r#"
+fn$0 foo() -> u32 {
+ if true {
+ return 0;
+ // ^^^^^^
+ }
+
+ 0?;
+ // ^
+ 0xDEAD_BEEF
+ // ^^^^^^^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_prefer_ref_over_tail_exit() {
+ check(
+ r#"
+fn foo() -> u32 {
+// ^^^
+ if true {
+ return 0;
+ }
+
+ 0?;
+
+ foo$0()
+ // ^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_never_call_is_exit_point() {
+ check(
+ r#"
+struct Never;
+impl Never {
+ fn never(self) -> ! { loop {} }
+}
+macro_rules! never {
+ () => { never() }
+}
+fn never() -> ! { loop {} }
+fn foo() ->$0 u32 {
+ never();
+ // ^^^^^^^
+ never!();
+ // ^^^^^^^^
+
+ Never.never();
+ // ^^^^^^^^^^^^^
+
+ 0
+ // ^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_inner_tail_exit_points() {
+ check(
+ r#"
+fn foo() ->$0 u32 {
+ if true {
+ unsafe {
+ return 5;
+ // ^^^^^^
+ 5
+ // ^
+ }
+ } else if false {
+ 0
+ // ^
+ } else {
+ match 5 {
+ 6 => 100,
+ // ^^^
+ 7 => loop {
+ break 5;
+ // ^^^^^
+ }
+ 8 => 'a: loop {
+ 'b: loop {
+ break 'a 5;
+ // ^^^^^
+ break 'b 5;
+ break 5;
+ };
+ }
+ //
+ _ => 500,
+ // ^^^
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_inner_tail_exit_points_labeled_block() {
+ check(
+ r#"
+fn foo() ->$0 u32 {
+ 'foo: {
+ break 'foo 0;
+ // ^^^^^
+ loop {
+ break;
+ break 'foo 0;
+ // ^^^^^
+ }
+ 0
+ // ^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_loop() {
+ check(
+ r#"
+fn foo() {
+ 'outer: loop {
+ // ^^^^^^^^^^^^
+ break;
+ // ^^^^^
+ 'inner: loop {
+ break;
+ 'innermost: loop {
+ break 'outer;
+ // ^^^^^^^^^^^^
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ break;
+ }
+ break;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_loop2() {
+ check(
+ r#"
+fn foo() {
+ 'outer: loop {
+ break;
+ 'inner: loop {
+ // ^^^^^^^^^^^^
+ break;
+ // ^^^^^
+ 'innermost: loop {
+ break 'outer;
+ break 'inner;
+ // ^^^^^^^^^^^^
+ }
+ break 'outer;
+ break$0;
+ // ^^^^^
+ }
+ break;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_for() {
+ check(
+ r#"
+fn foo() {
+ 'outer: for _ in () {
+ // ^^^^^^^^^^^
+ break;
+ // ^^^^^
+ 'inner: for _ in () {
+ break;
+ 'innermost: for _ in () {
+ break 'outer;
+ // ^^^^^^^^^^^^
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ break;
+ }
+ break;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_for_but_not_continue() {
+ check(
+ r#"
+fn foo() {
+ 'outer: for _ in () {
+ // ^^^^^^^^^^^
+ break;
+ // ^^^^^
+ continue;
+ 'inner: for _ in () {
+ break;
+ continue;
+ 'innermost: for _ in () {
+ continue 'outer;
+ break 'outer;
+ // ^^^^^^^^^^^^
+ continue 'inner;
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ continue 'outer;
+ break;
+ continue;
+ }
+ break;
+ // ^^^^^
+ continue;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_continue_for_but_not_break() {
+ check(
+ r#"
+fn foo() {
+ 'outer: for _ in () {
+ // ^^^^^^^^^^^
+ break;
+ continue;
+ // ^^^^^^^^
+ 'inner: for _ in () {
+ break;
+ continue;
+ 'innermost: for _ in () {
+ continue 'outer;
+ // ^^^^^^^^^^^^^^^
+ break 'outer;
+ continue 'inner;
+ break 'inner;
+ }
+ break 'outer;
+ continue$0 'outer;
+ // ^^^^^^^^^^^^^^^
+ break;
+ continue;
+ }
+ break;
+ continue;
+ // ^^^^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_and_continue() {
+ check(
+ r#"
+fn foo() {
+ 'outer: fo$0r _ in () {
+ // ^^^^^^^^^^^
+ break;
+ // ^^^^^
+ continue;
+ // ^^^^^^^^
+ 'inner: for _ in () {
+ break;
+ continue;
+ 'innermost: for _ in () {
+ continue 'outer;
+ // ^^^^^^^^^^^^^^^
+ break 'outer;
+ // ^^^^^^^^^^^^
+ continue 'inner;
+ break 'inner;
+ }
+ break 'outer;
+ // ^^^^^^^^^^^^
+ continue 'outer;
+ // ^^^^^^^^^^^^^^^
+ break;
+ continue;
+ }
+ break;
+ // ^^^^^
+ continue;
+ // ^^^^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_while() {
+ check(
+ r#"
+fn foo() {
+ 'outer: while true {
+ // ^^^^^^^^^^^^^
+ break;
+ // ^^^^^
+ 'inner: while true {
+ break;
+ 'innermost: while true {
+ break 'outer;
+ // ^^^^^^^^^^^^
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ break;
+ }
+ break;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_labeled_block() {
+ check(
+ r#"
+fn foo() {
+ 'outer: {
+ // ^^^^^^^
+ break;
+ // ^^^^^
+ 'inner: {
+ break;
+ 'innermost: {
+ break 'outer;
+ // ^^^^^^^^^^^^
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ break;
+ }
+ break;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_unlabeled_loop() {
+ check(
+ r#"
+fn foo() {
+ loop {
+ // ^^^^
+ break$0;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_unlabeled_block_in_loop() {
+ check(
+ r#"
+fn foo() {
+ loop {
+ // ^^^^
+ {
+ break$0;
+ // ^^^^^
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_field_shorthand() {
+ check(
+ r#"
+struct Struct { field: u32 }
+ //^^^^^
+fn function(field: u32) {
+ //^^^^^
+ Struct { field$0 }
+ //^^^^^ read
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_ref_local() {
+ let config = HighlightRelatedConfig {
+ references: false,
+ break_points: true,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() {
+ let x$0 = 5;
+ let y = x * 2;
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_ref_local_preserved_break() {
+ let config = HighlightRelatedConfig {
+ references: false,
+ break_points: true,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() {
+ let x$0 = 5;
+ let y = x * 2;
+
+ loop {
+ break;
+ }
+}
+"#,
+ config.clone(),
+ );
+
+ check_with_config(
+ r#"
+fn foo() {
+ let x = 5;
+ let y = x * 2;
+
+ loop$0 {
+// ^^^^
+ break;
+// ^^^^^
+ }
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_ref_local_preserved_yield() {
+ let config = HighlightRelatedConfig {
+ references: false,
+ break_points: true,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+async fn foo() {
+ let x$0 = 5;
+ let y = x * 2;
+
+ 0.await;
+}
+"#,
+ config.clone(),
+ );
+
+ check_with_config(
+ r#"
+ async fn foo() {
+// ^^^^^
+ let x = 5;
+ let y = x * 2;
+
+ 0.await$0;
+// ^^^^^
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_ref_local_preserved_exit() {
+ let config = HighlightRelatedConfig {
+ references: false,
+ break_points: true,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() -> i32 {
+ let x$0 = 5;
+ let y = x * 2;
+
+ if true {
+ return y;
+ }
+
+ 0?
+}
+"#,
+ config.clone(),
+ );
+
+ check_with_config(
+ r#"
+fn foo() ->$0 i32 {
+ let x = 5;
+ let y = x * 2;
+
+ if true {
+ return y;
+// ^^^^^^
+ }
+
+ 0?
+// ^
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_break() {
+ let config = HighlightRelatedConfig {
+ references: true,
+ break_points: false,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() {
+ loop {
+ break$0;
+ }
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_yield() {
+ let config = HighlightRelatedConfig {
+ references: true,
+ break_points: true,
+ exit_points: true,
+ yield_points: false,
+ };
+
+ check_with_config(
+ r#"
+async$0 fn foo() {
+ 0.await;
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_exit() {
+ let config = HighlightRelatedConfig {
+ references: true,
+ break_points: true,
+ exit_points: false,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() ->$0 i32 {
+ if true {
+ return -1;
+ }
+
+ 42
+}"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_multi_local() {
+ check(
+ r#"
+fn foo((
+ foo$0
+ //^^^
+ | foo
+ //^^^
+ | foo
+ //^^^
+): ()) {
+ foo;
+ //^^^read
+ let foo;
+}
+"#,
+ );
+ check(
+ r#"
+fn foo((
+ foo
+ //^^^
+ | foo$0
+ //^^^
+ | foo
+ //^^^
+): ()) {
+ foo;
+ //^^^read
+ let foo;
+}
+"#,
+ );
+ check(
+ r#"
+fn foo((
+ foo
+ //^^^
+ | foo
+ //^^^
+ | foo
+ //^^^
+): ()) {
+ foo$0;
+ //^^^read
+ let foo;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_trait_impl_methods() {
+ check(
+ r#"
+trait Trait {
+ fn func$0(self) {}
+ //^^^^
+}
+
+impl Trait for () {
+ fn func(self) {}
+ //^^^^
+}
+
+fn main() {
+ <()>::func(());
+ //^^^^
+ ().func();
+ //^^^^
+}
+"#,
+ );
+ check(
+ r#"
+trait Trait {
+ fn func(self) {}
+ //^^^^
+}
+
+impl Trait for () {
+ fn func$0(self) {}
+ //^^^^
+}
+
+fn main() {
+ <()>::func(());
+ //^^^^
+ ().func();
+ //^^^^
+}
+"#,
+ );
+ check(
+ r#"
+trait Trait {
+ fn func(self) {}
+ //^^^^
+}
+
+impl Trait for () {
+ fn func(self) {}
+ //^^^^
+}
+
+fn main() {
+ <()>::func(());
+ //^^^^
+ ().func$0();
+ //^^^^
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs
new file mode 100644
index 000000000..59c97f2dc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs
@@ -0,0 +1,390 @@
+mod render;
+
+#[cfg(test)]
+mod tests;
+
+use std::iter;
+
+use either::Either;
+use hir::{HasSource, Semantics};
+use ide_db::{
+ base_db::FileRange,
+ defs::{Definition, IdentClass},
+ famous_defs::FamousDefs,
+ helpers::pick_best_token,
+ FxIndexSet, RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, T};
+
+use crate::{
+ doc_links::token_as_doc_comment,
+ markup::Markup,
+ runnables::{runnable_fn, runnable_mod},
+ FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav,
+};
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct HoverConfig {
+ pub links_in_hover: bool,
+ pub documentation: Option<HoverDocFormat>,
+}
+
+impl HoverConfig {
+ fn markdown(&self) -> bool {
+ matches!(self.documentation, Some(HoverDocFormat::Markdown))
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum HoverDocFormat {
+ Markdown,
+ PlainText,
+}
+
+#[derive(Debug, Clone)]
+pub enum HoverAction {
+ Runnable(Runnable),
+ Implementation(FilePosition),
+ Reference(FilePosition),
+ GoToType(Vec<HoverGotoTypeData>),
+}
+
+impl HoverAction {
+ fn goto_type_from_targets(db: &RootDatabase, targets: Vec<hir::ModuleDef>) -> Self {
+ let targets = targets
+ .into_iter()
+ .filter_map(|it| {
+ Some(HoverGotoTypeData {
+ mod_path: render::path(
+ db,
+ it.module(db)?,
+ it.name(db).map(|name| name.to_string()),
+ ),
+ nav: it.try_to_nav(db)?,
+ })
+ })
+ .collect();
+ HoverAction::GoToType(targets)
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub struct HoverGotoTypeData {
+ pub mod_path: String,
+ pub nav: NavigationTarget,
+}
+
+/// Contains the results when hovering over an item
+#[derive(Debug, Default)]
+pub struct HoverResult {
+ pub markup: Markup,
+ pub actions: Vec<HoverAction>,
+}
+
+// Feature: Hover
+//
+// Shows additional information, like the type of an expression or the documentation for a definition when "focusing" code.
+// Focusing is usually hovering with a mouse, but can also be triggered with a shortcut.
+//
+// image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif[]
+pub(crate) fn hover(
+ db: &RootDatabase,
+ FileRange { file_id, range }: FileRange,
+ config: &HoverConfig,
+) -> Option<RangeInfo<HoverResult>> {
+ let sema = &hir::Semantics::new(db);
+ let file = sema.parse(file_id).syntax().clone();
+
+ if !range.is_empty() {
+ return hover_ranged(&file, range, sema, config);
+ }
+ let offset = range.start();
+
+ let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
+ IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 3,
+ T!['('] | T![')'] => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+
+ if let Some(doc_comment) = token_as_doc_comment(&original_token) {
+ cov_mark::hit!(no_highlight_on_comment_hover);
+ return doc_comment.get_definition_with_descend_at(sema, offset, |def, node, range| {
+ let res = hover_for_definition(sema, file_id, def, &node, config)?;
+ Some(RangeInfo::new(range, res))
+ });
+ }
+
+ let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())));
+ let descended = if in_attr {
+ [sema.descend_into_macros_with_kind_preference(original_token.clone())].into()
+ } else {
+ sema.descend_into_macros_with_same_text(original_token.clone())
+ };
+
+ // FIXME: Definition should include known lints and the like instead of having this special case here
+ let hovered_lint = descended.iter().find_map(|token| {
+ let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
+ render::try_for_lint(&attr, token)
+ });
+ if let Some(res) = hovered_lint {
+ return Some(RangeInfo::new(original_token.text_range(), res));
+ }
+
+ let result = descended
+ .iter()
+ .filter_map(|token| {
+ let node = token.parent()?;
+ let class = IdentClass::classify_token(sema, token)?;
+ Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
+ })
+ .flatten()
+ .unique_by(|&(def, _)| def)
+ .filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config))
+ .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| {
+ acc.actions.extend(actions);
+ acc.markup = Markup::from(format!("{}\n---\n{}", acc.markup, markup));
+ acc
+ });
+
+ if result.is_none() {
+ // fallbacks, show keywords or types
+
+ let res = descended.iter().find_map(|token| render::keyword(sema, config, token));
+ if let Some(res) = res {
+ return Some(RangeInfo::new(original_token.text_range(), res));
+ }
+ let res = descended
+ .iter()
+ .find_map(|token| hover_type_fallback(sema, config, token, &original_token));
+ if let Some(_) = res {
+ return res;
+ }
+ }
+ result.map(|mut res: HoverResult| {
+ res.actions = dedupe_or_merge_hover_actions(res.actions);
+ RangeInfo::new(original_token.text_range(), res)
+ })
+}
+
+pub(crate) fn hover_for_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ file_id: FileId,
+ definition: Definition,
+ node: &SyntaxNode,
+ config: &HoverConfig,
+) -> Option<HoverResult> {
+ let famous_defs = match &definition {
+ Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())),
+ _ => None,
+ };
+ render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| {
+ HoverResult {
+ markup: render::process_markup(sema.db, definition, &markup, config),
+ actions: show_implementations_action(sema.db, definition)
+ .into_iter()
+ .chain(show_fn_references_action(sema.db, definition))
+ .chain(runnable_action(sema, definition, file_id))
+ .chain(goto_type_action_for_def(sema.db, definition))
+ .collect(),
+ }
+ })
+}
+
+fn hover_ranged(
+ file: &SyntaxNode,
+ range: syntax::TextRange,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+) -> Option<RangeInfo<HoverResult>> {
+ // FIXME: make this work in attributes
+ let expr_or_pat = file.covering_element(range).ancestors().find_map(|it| {
+ match_ast! {
+ match it {
+ ast::Expr(expr) => Some(Either::Left(expr)),
+ ast::Pat(pat) => Some(Either::Right(pat)),
+ _ => None,
+ }
+ }
+ })?;
+ let res = match &expr_or_pat {
+ Either::Left(ast::Expr::TryExpr(try_expr)) => render::try_expr(sema, config, try_expr),
+ Either::Left(ast::Expr::PrefixExpr(prefix_expr))
+ if prefix_expr.op_kind() == Some(ast::UnaryOp::Deref) =>
+ {
+ render::deref_expr(sema, config, prefix_expr)
+ }
+ _ => None,
+ };
+ let res = res.or_else(|| render::type_info(sema, config, &expr_or_pat));
+ res.map(|it| {
+ let range = match expr_or_pat {
+ Either::Left(it) => it.syntax().text_range(),
+ Either::Right(it) => it.syntax().text_range(),
+ };
+ RangeInfo::new(range, it)
+ })
+}
+
+fn hover_type_fallback(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+ token: &SyntaxToken,
+ original_token: &SyntaxToken,
+) -> Option<RangeInfo<HoverResult>> {
+ let node = token
+ .parent_ancestors()
+ .take_while(|it| !ast::Item::can_cast(it.kind()))
+ .find(|n| ast::Expr::can_cast(n.kind()) || ast::Pat::can_cast(n.kind()))?;
+
+ let expr_or_pat = match_ast! {
+ match node {
+ ast::Expr(it) => Either::Left(it),
+ ast::Pat(it) => Either::Right(it),
+ // If this node is a MACRO_CALL, it means that `descend_into_macros_many` failed to resolve.
+ // (e.g expanding a builtin macro). So we give up here.
+ ast::MacroCall(_it) => return None,
+ _ => return None,
+ }
+ };
+
+ let res = render::type_info(sema, config, &expr_or_pat)?;
+ let range = sema
+ .original_range_opt(&node)
+ .map(|frange| frange.range)
+ .unwrap_or_else(|| original_token.text_range());
+ Some(RangeInfo::new(range, res))
+}
+
+fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
+ fn to_action(nav_target: NavigationTarget) -> HoverAction {
+ HoverAction::Implementation(FilePosition {
+ file_id: nav_target.file_id,
+ offset: nav_target.focus_or_full_range().start(),
+ })
+ }
+
+ let adt = match def {
+ Definition::Trait(it) => return it.try_to_nav(db).map(to_action),
+ Definition::Adt(it) => Some(it),
+ Definition::SelfType(it) => it.self_ty(db).as_adt(),
+ _ => None,
+ }?;
+ adt.try_to_nav(db).map(to_action)
+}
+
+fn show_fn_references_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
+ match def {
+ Definition::Function(it) => it.try_to_nav(db).map(|nav_target| {
+ HoverAction::Reference(FilePosition {
+ file_id: nav_target.file_id,
+ offset: nav_target.focus_or_full_range().start(),
+ })
+ }),
+ _ => None,
+ }
+}
+
+fn runnable_action(
+ sema: &hir::Semantics<'_, RootDatabase>,
+ def: Definition,
+ file_id: FileId,
+) -> Option<HoverAction> {
+ match def {
+ Definition::Module(it) => runnable_mod(sema, it).map(HoverAction::Runnable),
+ Definition::Function(func) => {
+ let src = func.source(sema.db)?;
+ if src.file_id != file_id.into() {
+ cov_mark::hit!(hover_macro_generated_struct_fn_doc_comment);
+ cov_mark::hit!(hover_macro_generated_struct_fn_doc_attr);
+ return None;
+ }
+
+ runnable_fn(sema, func).map(HoverAction::Runnable)
+ }
+ _ => None,
+ }
+}
+
+fn goto_type_action_for_def(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+
+ if let Definition::GenericParam(hir::GenericParam::TypeParam(it)) = def {
+ it.trait_bounds(db).into_iter().for_each(|it| push_new_def(it.into()));
+ } else {
+ let ty = match def {
+ Definition::Local(it) => it.ty(db),
+ Definition::GenericParam(hir::GenericParam::ConstParam(it)) => it.ty(db),
+ Definition::Field(field) => field.ty(db),
+ Definition::Function(function) => function.ret_type(db),
+ _ => return None,
+ };
+
+ walk_and_push_ty(db, &ty, &mut push_new_def);
+ }
+
+ Some(HoverAction::goto_type_from_targets(db, targets))
+}
+
+fn walk_and_push_ty(
+ db: &RootDatabase,
+ ty: &hir::Type,
+ push_new_def: &mut dyn FnMut(hir::ModuleDef),
+) {
+ ty.walk(db, |t| {
+ if let Some(adt) = t.as_adt() {
+ push_new_def(adt.into());
+ } else if let Some(trait_) = t.as_dyn_trait() {
+ push_new_def(trait_.into());
+ } else if let Some(traits) = t.as_impl_traits(db) {
+ traits.for_each(|it| push_new_def(it.into()));
+ } else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
+ push_new_def(trait_.into());
+ }
+ });
+}
+
+fn dedupe_or_merge_hover_actions(actions: Vec<HoverAction>) -> Vec<HoverAction> {
+ let mut deduped_actions = Vec::with_capacity(actions.len());
+ let mut go_to_type_targets = FxIndexSet::default();
+
+ let mut seen_implementation = false;
+ let mut seen_reference = false;
+ let mut seen_runnable = false;
+ for action in actions {
+ match action {
+ HoverAction::GoToType(targets) => {
+ go_to_type_targets.extend(targets);
+ }
+ HoverAction::Implementation(..) => {
+ if !seen_implementation {
+ seen_implementation = true;
+ deduped_actions.push(action);
+ }
+ }
+ HoverAction::Reference(..) => {
+ if !seen_reference {
+ seen_reference = true;
+ deduped_actions.push(action);
+ }
+ }
+ HoverAction::Runnable(..) => {
+ if !seen_runnable {
+ seen_runnable = true;
+ deduped_actions.push(action);
+ }
+ }
+ };
+ }
+
+ if !go_to_type_targets.is_empty() {
+ deduped_actions.push(HoverAction::GoToType(go_to_type_targets.into_iter().collect()));
+ }
+
+ deduped_actions
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
new file mode 100644
index 000000000..6c50a4e6a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -0,0 +1,563 @@
+//! Logic for rendering the different hover messages
+use std::fmt::Display;
+
+use either::Either;
+use hir::{AsAssocItem, AttributeTemplate, HasAttrs, HirDisplay, Semantics, TypeInfo};
+use ide_db::{
+ base_db::SourceDatabase,
+ defs::Definition,
+ famous_defs::FamousDefs,
+ generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
+ RootDatabase,
+};
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::{
+ algo, ast, match_ast, AstNode, Direction,
+ SyntaxKind::{LET_EXPR, LET_STMT},
+ SyntaxToken, T,
+};
+
+use crate::{
+ doc_links::{remove_links, rewrite_links},
+ hover::walk_and_push_ty,
+ markdown_remove::remove_markdown,
+ HoverAction, HoverConfig, HoverResult, Markup,
+};
+
+pub(super) fn type_info(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+ expr_or_pat: &Either<ast::Expr, ast::Pat>,
+) -> Option<HoverResult> {
+ let TypeInfo { original, adjusted } = match expr_or_pat {
+ Either::Left(expr) => sema.type_of_expr(expr)?,
+ Either::Right(pat) => sema.type_of_pat(pat)?,
+ };
+
+ let mut res = HoverResult::default();
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+ walk_and_push_ty(sema.db, &original, &mut push_new_def);
+
+ res.markup = if let Some(adjusted_ty) = adjusted {
+ walk_and_push_ty(sema.db, &adjusted_ty, &mut push_new_def);
+ let original = original.display(sema.db).to_string();
+ let adjusted = adjusted_ty.display(sema.db).to_string();
+ let static_text_diff_len = "Coerced to: ".len() - "Type: ".len();
+ format!(
+ "{bt_start}Type: {:>apad$}\nCoerced to: {:>opad$}\n{bt_end}",
+ original,
+ adjusted,
+ apad = static_text_diff_len + adjusted.len().max(original.len()),
+ opad = original.len(),
+ bt_start = if config.markdown() { "```text\n" } else { "" },
+ bt_end = if config.markdown() { "```\n" } else { "" }
+ )
+ .into()
+ } else {
+ if config.markdown() {
+ Markup::fenced_block(&original.display(sema.db))
+ } else {
+ original.display(sema.db).to_string().into()
+ }
+ };
+ res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets));
+ Some(res)
+}
+
+pub(super) fn try_expr(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+ try_expr: &ast::TryExpr,
+) -> Option<HoverResult> {
+ let inner_ty = sema.type_of_expr(&try_expr.expr()?)?.original;
+ let mut ancestors = try_expr.syntax().ancestors();
+ let mut body_ty = loop {
+ let next = ancestors.next()?;
+ break match_ast! {
+ match next {
+ ast::Fn(fn_) => sema.to_def(&fn_)?.ret_type(sema.db),
+ ast::Item(__) => return None,
+ ast::ClosureExpr(closure) => sema.type_of_expr(&closure.body()?)?.original,
+ ast::BlockExpr(block_expr) => if matches!(block_expr.modifier(), Some(ast::BlockModifier::Async(_) | ast::BlockModifier::Try(_)| ast::BlockModifier::Const(_))) {
+ sema.type_of_expr(&block_expr.into())?.original
+ } else {
+ continue;
+ },
+ _ => continue,
+ }
+ };
+ };
+
+ if inner_ty == body_ty {
+ return None;
+ }
+
+ let mut inner_ty = inner_ty;
+ let mut s = "Try Target".to_owned();
+
+ let adts = inner_ty.as_adt().zip(body_ty.as_adt());
+ if let Some((hir::Adt::Enum(inner), hir::Adt::Enum(body))) = adts {
+ let famous_defs = FamousDefs(sema, sema.scope(try_expr.syntax())?.krate());
+ // special case for two options, there is no value in showing them
+ if let Some(option_enum) = famous_defs.core_option_Option() {
+ if inner == option_enum && body == option_enum {
+ cov_mark::hit!(hover_try_expr_opt_opt);
+ return None;
+ }
+ }
+
+ // special case two results to show the error variants only
+ if let Some(result_enum) = famous_defs.core_result_Result() {
+ if inner == result_enum && body == result_enum {
+ let error_type_args =
+ inner_ty.type_arguments().nth(1).zip(body_ty.type_arguments().nth(1));
+ if let Some((inner, body)) = error_type_args {
+ inner_ty = inner;
+ body_ty = body;
+ s = "Try Error".to_owned();
+ }
+ }
+ }
+ }
+
+ let mut res = HoverResult::default();
+
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+ walk_and_push_ty(sema.db, &inner_ty, &mut push_new_def);
+ walk_and_push_ty(sema.db, &body_ty, &mut push_new_def);
+ res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets));
+
+ let inner_ty = inner_ty.display(sema.db).to_string();
+ let body_ty = body_ty.display(sema.db).to_string();
+ let ty_len_max = inner_ty.len().max(body_ty.len());
+
+ let l = "Propagated as: ".len() - " Type: ".len();
+ let static_text_len_diff = l as isize - s.len() as isize;
+ let tpad = static_text_len_diff.max(0) as usize;
+ let ppad = static_text_len_diff.min(0).abs() as usize;
+
+ res.markup = format!(
+ "{bt_start}{} Type: {:>pad0$}\nPropagated as: {:>pad1$}\n{bt_end}",
+ s,
+ inner_ty,
+ body_ty,
+ pad0 = ty_len_max + tpad,
+ pad1 = ty_len_max + ppad,
+ bt_start = if config.markdown() { "```text\n" } else { "" },
+ bt_end = if config.markdown() { "```\n" } else { "" }
+ )
+ .into();
+ Some(res)
+}
+
+pub(super) fn deref_expr(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+ deref_expr: &ast::PrefixExpr,
+) -> Option<HoverResult> {
+ let inner_ty = sema.type_of_expr(&deref_expr.expr()?)?.original;
+ let TypeInfo { original, adjusted } =
+ sema.type_of_expr(&ast::Expr::from(deref_expr.clone()))?;
+
+ let mut res = HoverResult::default();
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+ walk_and_push_ty(sema.db, &inner_ty, &mut push_new_def);
+ walk_and_push_ty(sema.db, &original, &mut push_new_def);
+
+ res.markup = if let Some(adjusted_ty) = adjusted {
+ walk_and_push_ty(sema.db, &adjusted_ty, &mut push_new_def);
+ let original = original.display(sema.db).to_string();
+ let adjusted = adjusted_ty.display(sema.db).to_string();
+ let inner = inner_ty.display(sema.db).to_string();
+ let type_len = "To type: ".len();
+ let coerced_len = "Coerced to: ".len();
+ let deref_len = "Dereferenced from: ".len();
+ let max_len = (original.len() + type_len)
+ .max(adjusted.len() + coerced_len)
+ .max(inner.len() + deref_len);
+ format!(
+ "{bt_start}Dereferenced from: {:>ipad$}\nTo type: {:>apad$}\nCoerced to: {:>opad$}\n{bt_end}",
+ inner,
+ original,
+ adjusted,
+ ipad = max_len - deref_len,
+ apad = max_len - type_len,
+ opad = max_len - coerced_len,
+ bt_start = if config.markdown() { "```text\n" } else { "" },
+ bt_end = if config.markdown() { "```\n" } else { "" }
+ )
+ .into()
+ } else {
+ let original = original.display(sema.db).to_string();
+ let inner = inner_ty.display(sema.db).to_string();
+ let type_len = "To type: ".len();
+ let deref_len = "Dereferenced from: ".len();
+ let max_len = (original.len() + type_len).max(inner.len() + deref_len);
+ format!(
+ "{bt_start}Dereferenced from: {:>ipad$}\nTo type: {:>apad$}\n{bt_end}",
+ inner,
+ original,
+ ipad = max_len - deref_len,
+ apad = max_len - type_len,
+ bt_start = if config.markdown() { "```text\n" } else { "" },
+ bt_end = if config.markdown() { "```\n" } else { "" }
+ )
+ .into()
+ };
+ res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets));
+
+ Some(res)
+}
+
+pub(super) fn keyword(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+ token: &SyntaxToken,
+) -> Option<HoverResult> {
+ if !token.kind().is_keyword() || !config.documentation.is_some() {
+ return None;
+ }
+ let parent = token.parent()?;
+ let famous_defs = FamousDefs(sema, sema.scope(&parent)?.krate());
+
+ let KeywordHint { description, keyword_mod, actions } = keyword_hints(sema, token, parent);
+
+ let doc_owner = find_std_module(&famous_defs, &keyword_mod)?;
+ let docs = doc_owner.attrs(sema.db).docs()?;
+ let markup = process_markup(
+ sema.db,
+ Definition::Module(doc_owner),
+ &markup(Some(docs.into()), description, None)?,
+ config,
+ );
+ Some(HoverResult { markup, actions })
+}
+
+pub(super) fn try_for_lint(attr: &ast::Attr, token: &SyntaxToken) -> Option<HoverResult> {
+ let (path, tt) = attr.as_simple_call()?;
+ if !tt.syntax().text_range().contains(token.text_range().start()) {
+ return None;
+ }
+ let (is_clippy, lints) = match &*path {
+ "feature" => (false, FEATURES),
+ "allow" | "deny" | "forbid" | "warn" => {
+ let is_clippy = algo::non_trivia_sibling(token.clone().into(), Direction::Prev)
+ .filter(|t| t.kind() == T![:])
+ .and_then(|t| algo::non_trivia_sibling(t, Direction::Prev))
+ .filter(|t| t.kind() == T![:])
+ .and_then(|t| algo::non_trivia_sibling(t, Direction::Prev))
+ .map_or(false, |t| {
+ t.kind() == T![ident] && t.into_token().map_or(false, |t| t.text() == "clippy")
+ });
+ if is_clippy {
+ (true, CLIPPY_LINTS)
+ } else {
+ (false, DEFAULT_LINTS)
+ }
+ }
+ _ => return None,
+ };
+
+ let tmp;
+ let needle = if is_clippy {
+ tmp = format!("clippy::{}", token.text());
+ &tmp
+ } else {
+ &*token.text()
+ };
+
+ let lint =
+ lints.binary_search_by_key(&needle, |lint| lint.label).ok().map(|idx| &lints[idx])?;
+ Some(HoverResult {
+ markup: Markup::from(format!("```\n{}\n```\n___\n\n{}", lint.label, lint.description)),
+ ..Default::default()
+ })
+}
+
+pub(super) fn process_markup(
+ db: &RootDatabase,
+ def: Definition,
+ markup: &Markup,
+ config: &HoverConfig,
+) -> Markup {
+ let markup = markup.as_str();
+ let markup = if !config.markdown() {
+ remove_markdown(markup)
+ } else if config.links_in_hover {
+ rewrite_links(db, markup, def)
+ } else {
+ remove_links(markup)
+ };
+ Markup::from(markup)
+}
+
+fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option<String> {
+ match def {
+ Definition::Field(f) => Some(f.parent_def(db).name(db)),
+ Definition::Local(l) => l.parent(db).name(db),
+ Definition::Function(f) => match f.as_assoc_item(db)?.container(db) {
+ hir::AssocItemContainer::Trait(t) => Some(t.name(db)),
+ hir::AssocItemContainer::Impl(i) => i.self_ty(db).as_adt().map(|adt| adt.name(db)),
+ },
+ Definition::Variant(e) => Some(e.parent_enum(db).name(db)),
+ _ => None,
+ }
+ .map(|name| name.to_string())
+}
+
+pub(super) fn path(db: &RootDatabase, module: hir::Module, item_name: Option<String>) -> String {
+ let crate_name =
+ db.crate_graph()[module.krate().into()].display_name.as_ref().map(|it| it.to_string());
+ let module_path = module
+ .path_to_root(db)
+ .into_iter()
+ .rev()
+ .flat_map(|it| it.name(db).map(|name| name.to_string()));
+ crate_name.into_iter().chain(module_path).chain(item_name).join("::")
+}
+
+pub(super) fn definition(
+ db: &RootDatabase,
+ def: Definition,
+ famous_defs: Option<&FamousDefs<'_, '_>>,
+ config: &HoverConfig,
+) -> Option<Markup> {
+ let mod_path = definition_mod_path(db, &def);
+ let (label, docs) = match def {
+ Definition::Macro(it) => label_and_docs(db, it),
+ Definition::Field(it) => label_and_docs(db, it),
+ Definition::Module(it) => label_and_docs(db, it),
+ Definition::Function(it) => label_and_docs(db, it),
+ Definition::Adt(it) => label_and_docs(db, it),
+ Definition::Variant(it) => label_and_docs(db, it),
+ Definition::Const(it) => label_value_and_docs(db, it, |it| {
+ let body = it.eval(db);
+ match body {
+ Ok(x) => Some(format!("{}", x)),
+ Err(_) => it.value(db).map(|x| format!("{}", x)),
+ }
+ }),
+ Definition::Static(it) => label_value_and_docs(db, it, |it| it.value(db)),
+ Definition::Trait(it) => label_and_docs(db, it),
+ Definition::TypeAlias(it) => label_and_docs(db, it),
+ Definition::BuiltinType(it) => {
+ return famous_defs
+ .and_then(|fd| builtin(fd, it))
+ .or_else(|| Some(Markup::fenced_block(&it.name())))
+ }
+ Definition::Local(it) => return local(db, it),
+ Definition::SelfType(impl_def) => {
+ impl_def.self_ty(db).as_adt().map(|adt| label_and_docs(db, adt))?
+ }
+ Definition::GenericParam(it) => label_and_docs(db, it),
+ Definition::Label(it) => return Some(Markup::fenced_block(&it.name(db))),
+ // FIXME: We should be able to show more info about these
+ Definition::BuiltinAttr(it) => return render_builtin_attr(db, it),
+ Definition::ToolModule(it) => return Some(Markup::fenced_block(&it.name(db))),
+ Definition::DeriveHelper(it) => (format!("derive_helper {}", it.name(db)), None),
+ };
+
+ let docs = match config.documentation {
+ Some(_) => docs.or_else(|| {
+ // docs are missing, for assoc items of trait impls try to fall back to the docs of the
+ // original item of the trait
+ let assoc = def.as_assoc_item(db)?;
+ let trait_ = assoc.containing_trait_impl(db)?;
+ let name = Some(assoc.name(db)?);
+ let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?;
+ item.docs(db)
+ }),
+ None => None,
+ };
+ let docs = docs.filter(|_| config.documentation.is_some()).map(Into::into);
+ markup(docs, label, mod_path)
+}
+
+fn render_builtin_attr(db: &RootDatabase, attr: hir::BuiltinAttr) -> Option<Markup> {
+ let name = attr.name(db);
+ let desc = format!("#[{}]", name);
+
+ let AttributeTemplate { word, list, name_value_str } = match attr.template(db) {
+ Some(template) => template,
+ None => return Some(Markup::fenced_block(&attr.name(db))),
+ };
+ let mut docs = "Valid forms are:".to_owned();
+ if word {
+ format_to!(docs, "\n - #\\[{}]", name);
+ }
+ if let Some(list) = list {
+ format_to!(docs, "\n - #\\[{}({})]", name, list);
+ }
+ if let Some(name_value_str) = name_value_str {
+ format_to!(docs, "\n - #\\[{} = {}]", name, name_value_str);
+ }
+ markup(Some(docs.replace('*', "\\*")), desc, None)
+}
+
+fn label_and_docs<D>(db: &RootDatabase, def: D) -> (String, Option<hir::Documentation>)
+where
+ D: HasAttrs + HirDisplay,
+{
+ let label = def.display(db).to_string();
+ let docs = def.attrs(db).docs();
+ (label, docs)
+}
+
+fn label_value_and_docs<D, E, V>(
+ db: &RootDatabase,
+ def: D,
+ value_extractor: E,
+) -> (String, Option<hir::Documentation>)
+where
+ D: HasAttrs + HirDisplay,
+ E: Fn(&D) -> Option<V>,
+ V: Display,
+{
+ let label = if let Some(value) = value_extractor(&def) {
+ format!("{} = {}", def.display(db), value)
+ } else {
+ def.display(db).to_string()
+ };
+ let docs = def.attrs(db).docs();
+ (label, docs)
+}
+
+fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option<String> {
+ if let Definition::GenericParam(_) = def {
+ return None;
+ }
+ def.module(db).map(|module| path(db, module, definition_owner_name(db, def)))
+}
+
+fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Option<Markup> {
+ let mut buf = String::new();
+
+ if let Some(mod_path) = mod_path {
+ if !mod_path.is_empty() {
+ format_to!(buf, "```rust\n{}\n```\n\n", mod_path);
+ }
+ }
+ format_to!(buf, "```rust\n{}\n```", desc);
+
+ if let Some(doc) = docs {
+ format_to!(buf, "\n___\n\n{}", doc);
+ }
+ Some(buf.into())
+}
+
+fn builtin(famous_defs: &FamousDefs<'_, '_>, builtin: hir::BuiltinType) -> Option<Markup> {
+ // std exposes prim_{} modules with docstrings on the root to document the builtins
+ let primitive_mod = format!("prim_{}", builtin.name());
+ let doc_owner = find_std_module(famous_defs, &primitive_mod)?;
+ let docs = doc_owner.attrs(famous_defs.0.db).docs()?;
+ markup(Some(docs.into()), builtin.name().to_string(), None)
+}
+
+fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option<hir::Module> {
+ let db = famous_defs.0.db;
+ let std_crate = famous_defs.std()?;
+ let std_root_module = std_crate.root_module(db);
+ std_root_module
+ .children(db)
+ .find(|module| module.name(db).map_or(false, |module| module.to_string() == name))
+}
+
+fn local(db: &RootDatabase, it: hir::Local) -> Option<Markup> {
+ let ty = it.ty(db);
+ let ty = ty.display_truncated(db, None);
+ let is_mut = if it.is_mut(db) { "mut " } else { "" };
+ let desc = match it.source(db).value {
+ Either::Left(ident) => {
+ let name = it.name(db);
+ let let_kw = if ident
+ .syntax()
+ .parent()
+ .map_or(false, |p| p.kind() == LET_STMT || p.kind() == LET_EXPR)
+ {
+ "let "
+ } else {
+ ""
+ };
+ format!("{}{}{}: {}", let_kw, is_mut, name, ty)
+ }
+ Either::Right(_) => format!("{}self: {}", is_mut, ty),
+ };
+ markup(None, desc, None)
+}
+
+struct KeywordHint {
+ description: String,
+ keyword_mod: String,
+ actions: Vec<HoverAction>,
+}
+
+impl KeywordHint {
+ fn new(description: String, keyword_mod: String) -> Self {
+ Self { description, keyword_mod, actions: Vec::default() }
+ }
+}
+
+fn keyword_hints(
+ sema: &Semantics<'_, RootDatabase>,
+ token: &SyntaxToken,
+ parent: syntax::SyntaxNode,
+) -> KeywordHint {
+ match token.kind() {
+ T![await] | T![loop] | T![match] | T![unsafe] | T![as] | T![try] | T![if] | T![else] => {
+ let keyword_mod = format!("{}_keyword", token.text());
+
+ match ast::Expr::cast(parent).and_then(|site| sema.type_of_expr(&site)) {
+ // ignore the unit type ()
+ Some(ty) if !ty.adjusted.as_ref().unwrap_or(&ty.original).is_unit() => {
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+ walk_and_push_ty(sema.db, &ty.original, &mut push_new_def);
+
+ let ty = ty.adjusted();
+ let description = format!("{}: {}", token.text(), ty.display(sema.db));
+
+ KeywordHint {
+ description,
+ keyword_mod,
+ actions: vec![HoverAction::goto_type_from_targets(sema.db, targets)],
+ }
+ }
+ _ => KeywordHint {
+ description: token.text().to_string(),
+ keyword_mod,
+ actions: Vec::new(),
+ },
+ }
+ }
+ T![fn] => {
+ let module = match ast::FnPtrType::cast(parent) {
+ // treat fn keyword inside function pointer type as primitive
+ Some(_) => format!("prim_{}", token.text()),
+ None => format!("{}_keyword", token.text()),
+ };
+ KeywordHint::new(token.text().to_string(), module)
+ }
+ T![Self] => KeywordHint::new(token.text().to_string(), "self_upper_keyword".into()),
+ _ => KeywordHint::new(token.text().to_string(), format!("{}_keyword", token.text())),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
new file mode 100644
index 000000000..867d1f54d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -0,0 +1,5053 @@
+use expect_test::{expect, Expect};
+use ide_db::base_db::{FileLoader, FileRange};
+use syntax::TextRange;
+
+use crate::{fixture, hover::HoverDocFormat, HoverConfig};
+
+fn check_hover_no_result(ra_fixture: &str) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
+ FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
+ )
+ .unwrap();
+ assert!(hover.is_none(), "hover not expected but found: {:?}", hover.unwrap());
+}
+
+#[track_caller]
+fn check(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
+ FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
+ )
+ .unwrap()
+ .unwrap();
+
+ let content = analysis.db.file_text(position.file_id);
+ let hovered_element = &content[hover.range];
+
+ let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup);
+ expect.assert_eq(&actual)
+}
+
+fn check_hover_no_links(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
+ FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
+ )
+ .unwrap()
+ .unwrap();
+
+ let content = analysis.db.file_text(position.file_id);
+ let hovered_element = &content[hover.range];
+
+ let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup);
+ expect.assert_eq(&actual)
+}
+
+fn check_hover_no_markdown(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::PlainText) },
+ FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
+ )
+ .unwrap()
+ .unwrap();
+
+ let content = analysis.db.file_text(position.file_id);
+ let hovered_element = &content[hover.range];
+
+ let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup);
+ expect.assert_eq(&actual)
+}
+
+fn check_actions(ra_fixture: &str, expect: Expect) {
+ let (analysis, file_id, position) = fixture::range_or_position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
+ FileRange { file_id, range: position.range_or_empty() },
+ )
+ .unwrap()
+ .unwrap();
+ expect.assert_debug_eq(&hover.info.actions)
+}
+
+fn check_hover_range(ra_fixture: &str, expect: Expect) {
+ let (analysis, range) = fixture::range(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
+ range,
+ )
+ .unwrap()
+ .unwrap();
+ expect.assert_eq(hover.info.markup.as_str())
+}
+
+fn check_hover_range_no_results(ra_fixture: &str) {
+ let (analysis, range) = fixture::range(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
+ range,
+ )
+ .unwrap();
+ assert!(hover.is_none());
+}
+
+#[test]
+fn hover_descend_macros_avoids_duplicates() {
+ check(
+ r#"
+macro_rules! dupe_use {
+ ($local:ident) => {
+ {
+ $local;
+ $local;
+ }
+ }
+}
+fn foo() {
+ let local = 0;
+ dupe_use!(local$0);
+}
+"#,
+ expect![[r#"
+ *local*
+
+ ```rust
+ let local: i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_all_macro_descends() {
+ check(
+ r#"
+macro_rules! m {
+ ($name:ident) => {
+ /// Outer
+ fn $name() {}
+
+ mod module {
+ /// Inner
+ fn $name() {}
+ }
+ };
+}
+
+m!(ab$0c);
+ "#,
+ expect![[r#"
+ *abc*
+
+ ```rust
+ test::module
+ ```
+
+ ```rust
+ fn abc()
+ ```
+
+ ---
+
+ Inner
+ ---
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn abc()
+ ```
+
+ ---
+
+ Outer
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_type_of_an_expression() {
+ check(
+ r#"
+pub fn foo() -> u32 { 1 }
+
+fn main() {
+ let foo_test = foo()$0;
+}
+"#,
+ expect![[r#"
+ *foo()*
+ ```rust
+ u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_remove_markdown_if_configured() {
+ check_hover_no_markdown(
+ r#"
+pub fn foo() -> u32 { 1 }
+
+fn main() {
+ let foo_test = foo()$0;
+}
+"#,
+ expect![[r#"
+ *foo()*
+ u32
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_long_type_of_an_expression() {
+ check(
+ r#"
+struct Scan<A, B, C> { a: A, b: B, c: C }
+struct Iter<I> { inner: I }
+enum Option<T> { Some(T), None }
+
+struct OtherStruct<T> { i: T }
+
+fn scan<A, B, C>(a: A, b: B, c: C) -> Iter<Scan<OtherStruct<A>, B, C>> {
+ Iter { inner: Scan { a, b, c } }
+}
+
+fn main() {
+ let num: i32 = 55;
+ let closure = |memo: &mut u32, value: &u32, _another: &mut u32| -> Option<u32> {
+ Option::Some(*memo + value)
+ };
+ let number = 5u32;
+ let mut iter$0 = scan(OtherStruct { i: num }, closure, number);
+}
+"#,
+ expect![[r#"
+ *iter*
+
+ ```rust
+ let mut iter: Iter<Scan<OtherStruct<OtherStruct<i32>>, |&mut u32, &u32, &mut u32| -> Option<u32>, u32>>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_signature() {
+ // Single file with result
+ check(
+ r#"
+pub fn foo() -> u32 { 1 }
+
+fn main() { let foo_test = fo$0o(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo() -> u32
+ ```
+ "#]],
+ );
+
+ // Multiple candidates but results are ambiguous.
+ check(
+ r#"
+//- /a.rs
+pub fn foo() -> u32 { 1 }
+
+//- /b.rs
+pub fn foo() -> &str { "" }
+
+//- /c.rs
+pub fn foo(a: u32, b: u32) {}
+
+//- /main.rs
+mod a;
+mod b;
+mod c;
+
+fn main() { let foo_test = fo$0o(); }
+ "#,
+ expect![[r#"
+ *foo*
+ ```rust
+ {unknown}
+ ```
+ "#]],
+ );
+
+ // Use literal `crate` in path
+ check(
+ r#"
+pub struct X;
+
+fn foo() -> crate::X { X }
+
+fn main() { f$0oo(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo() -> crate::X
+ ```
+ "#]],
+ );
+
+ // Check `super` in path
+ check(
+ r#"
+pub struct X;
+
+mod m { pub fn foo() -> super::X { super::X } }
+
+fn main() { m::f$0oo(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::m
+ ```
+
+ ```rust
+ pub fn foo() -> super::X
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_omits_unnamed_where_preds() {
+ check(
+ r#"
+pub fn foo(bar: impl T) { }
+
+fn main() { fo$0o(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo(bar: impl T)
+ ```
+ "#]],
+ );
+ check(
+ r#"
+pub fn foo<V: AsRef<str>>(bar: impl T, baz: V) { }
+
+fn main() { fo$0o(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo<V>(bar: impl T, baz: V)
+ where
+ V: AsRef<str>,
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_signature_with_type_params() {
+ check(
+ r#"
+pub fn foo<'a, T: AsRef<str>>(b: &'a T) -> &'a str { }
+
+fn main() { let foo_test = fo$0o(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo<'a, T>(b: &'a T) -> &'a str
+ where
+ T: AsRef<str>,
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_signature_on_fn_name() {
+ check(
+ r#"
+pub fn foo$0(a: u32, b: u32) -> u32 {}
+
+fn main() { }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo(a: u32, b: u32) -> u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_doc() {
+ check(
+ r#"
+/// # Example
+/// ```
+/// # use std::path::Path;
+/// #
+/// foo(Path::new("hello, world!"))
+/// ```
+pub fn foo$0(_: &Path) {}
+
+fn main() { }
+"#,
+ expect![[r##"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo(_: &Path)
+ ```
+
+ ---
+
+ # Example
+
+ ```
+ # use std::path::Path;
+ #
+ foo(Path::new("hello, world!"))
+ ```
+ "##]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_doc_attr_raw_string() {
+ check(
+ r##"
+#[doc = r#"Raw string doc attr"#]
+pub fn foo$0(_: &Path) {}
+
+fn main() { }
+"##,
+ expect![[r##"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo(_: &Path)
+ ```
+
+ ---
+
+ Raw string doc attr
+ "##]],
+ );
+}
+
+#[test]
+fn hover_shows_struct_field_info() {
+ // Hovering over the field when instantiating
+ check(
+ r#"
+struct Foo { field_a: u32 }
+
+fn main() {
+ let foo = Foo { field_a$0: 0, };
+}
+"#,
+ expect![[r#"
+ *field_a*
+
+ ```rust
+ test::Foo
+ ```
+
+ ```rust
+ field_a: u32
+ ```
+ "#]],
+ );
+
+ // Hovering over the field in the definition
+ check(
+ r#"
+struct Foo { field_a$0: u32 }
+
+fn main() {
+ let foo = Foo { field_a: 0 };
+}
+"#,
+ expect![[r#"
+ *field_a*
+
+ ```rust
+ test::Foo
+ ```
+
+ ```rust
+ field_a: u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_const_static() {
+ check(
+ r#"const foo$0: u32 = 123;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const foo: u32 = 123 (0x7B)
+ ```
+ "#]],
+ );
+ check(
+ r#"
+const foo$0: u32 = {
+ let x = foo();
+ x + 100
+};"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const foo: u32 = {
+ let x = foo();
+ x + 100
+ }
+ ```
+ "#]],
+ );
+
+ check(
+ r#"static foo$0: u32 = 456;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ static foo: u32 = 456
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_default_generic_types() {
+ check(
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let zz$0 = Test { t: 23u8, k: 33 };
+}"#,
+ expect![[r#"
+ *zz*
+
+ ```rust
+ let zz: Test<i32>
+ ```
+ "#]],
+ );
+ check_hover_range(
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let $0zz$0 = Test { t: 23u8, k: 33 };
+}"#,
+ expect![[r#"
+ ```rust
+ Test<i32, u8>
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_some() {
+ check(
+ r#"
+enum Option<T> { Some(T) }
+use Option::Some;
+
+fn main() { So$0me(12); }
+"#,
+ expect![[r#"
+ *Some*
+
+ ```rust
+ test::Option
+ ```
+
+ ```rust
+ Some(T)
+ ```
+ "#]],
+ );
+
+ check(
+ r#"
+enum Option<T> { Some(T) }
+use Option::Some;
+
+fn main() { let b$0ar = Some(12); }
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ let bar: Option<i32>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_enum_variant() {
+ check(
+ r#"
+enum Option<T> {
+ /// The None variant
+ Non$0e
+}
+"#,
+ expect![[r#"
+ *None*
+
+ ```rust
+ test::Option
+ ```
+
+ ```rust
+ None
+ ```
+
+ ---
+
+ The None variant
+ "#]],
+ );
+
+ check(
+ r#"
+enum Option<T> {
+ /// The Some variant
+ Some(T)
+}
+fn main() {
+ let s = Option::Som$0e(12);
+}
+"#,
+ expect![[r#"
+ *Some*
+
+ ```rust
+ test::Option
+ ```
+
+ ```rust
+ Some(T)
+ ```
+
+ ---
+
+ The Some variant
+ "#]],
+ );
+}
+
+#[test]
+fn hover_for_local_variable() {
+ check(
+ r#"fn func(foo: i32) { fo$0o; }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ foo: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_for_local_variable_pat() {
+ check(
+ r#"fn func(fo$0o: i32) {}"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ foo: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_local_var_edge() {
+ check(
+ r#"fn func(foo: i32) { if true { $0foo; }; }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ foo: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_for_param_edge() {
+ check(
+ r#"fn func($0foo: i32) {}"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ foo: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_for_param_with_multiple_traits() {
+ check(
+ r#"
+ //- minicore: sized
+ trait Deref {
+ type Target: ?Sized;
+ }
+ trait DerefMut {
+ type Target: ?Sized;
+ }
+ fn f(_x$0: impl Deref<Target=u8> + DerefMut<Target=u8>) {}"#,
+ expect![[r#"
+ *_x*
+
+ ```rust
+ _x: impl Deref<Target = u8> + DerefMut<Target = u8>
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_infer_associated_method_result() {
+ check(
+ r#"
+struct Thing { x: u32 }
+
+impl Thing {
+ fn new() -> Thing { Thing { x: 0 } }
+}
+
+fn main() { let foo_$0test = Thing::new(); }
+"#,
+ expect![[r#"
+ *foo_test*
+
+ ```rust
+ let foo_test: Thing
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_infer_associated_method_exact() {
+ check(
+ r#"
+mod wrapper {
+ pub struct Thing { x: u32 }
+
+ impl Thing {
+ pub fn new() -> Thing { Thing { x: 0 } }
+ }
+}
+
+fn main() { let foo_test = wrapper::Thing::new$0(); }
+"#,
+ expect![[r#"
+ *new*
+
+ ```rust
+ test::wrapper::Thing
+ ```
+
+ ```rust
+ pub fn new() -> Thing
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_infer_associated_const_in_pattern() {
+ check(
+ r#"
+struct X;
+impl X {
+ const C: u32 = 1;
+}
+
+fn main() {
+ match 1 {
+ X::C$0 => {},
+ 2 => {},
+ _ => {}
+ };
+}
+"#,
+ expect![[r#"
+ *C*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const C: u32 = 1
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_self() {
+ check(
+ r#"
+struct Thing { x: u32 }
+impl Thing {
+ fn new() -> Self { Self$0 { x: 0 } }
+}
+"#,
+ expect![[r#"
+ *Self*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Thing
+ ```
+ "#]],
+ );
+ check(
+ r#"
+struct Thing { x: u32 }
+impl Thing {
+ fn new() -> Self$0 { Self { x: 0 } }
+}
+"#,
+ expect![[r#"
+ *Self*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Thing
+ ```
+ "#]],
+ );
+ check(
+ r#"
+enum Thing { A }
+impl Thing {
+ pub fn new() -> Self$0 { Thing::A }
+}
+"#,
+ expect![[r#"
+ *Self*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ enum Thing
+ ```
+ "#]],
+ );
+ check(
+ r#"
+ enum Thing { A }
+ impl Thing {
+ pub fn thing(a: Self$0) {}
+ }
+ "#,
+ expect![[r#"
+ *Self*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ enum Thing
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_shadowing_pat() {
+ check(
+ r#"
+fn x() {}
+
+fn y() {
+ let x = 0i32;
+ x$0;
+}
+"#,
+ expect![[r#"
+ *x*
+
+ ```rust
+ let x: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_macro_invocation() {
+ check(
+ r#"
+macro_rules! foo { () => {} }
+
+fn f() { fo$0o!(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ macro_rules! foo
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_macro2_invocation() {
+ check(
+ r#"
+/// foo bar
+///
+/// foo bar baz
+macro foo() {}
+
+fn f() { fo$0o!(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ macro foo
+ ```
+
+ ---
+
+ foo bar
+
+ foo bar baz
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_tuple_field() {
+ check(
+ r#"struct TS(String, i32$0);"#,
+ expect![[r#"
+ *i32*
+
+ ```rust
+ i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_through_macro() {
+ check(
+ r#"
+macro_rules! id { ($($tt:tt)*) => { $($tt)* } }
+fn foo() {}
+id! {
+ fn bar() { fo$0o(); }
+}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo()
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_attr() {
+ check(
+ r#"
+//- proc_macros: identity
+#[proc_macros::identity]
+fn foo$0() {}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo()
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_expr_in_macro() {
+ check(
+ r#"
+macro_rules! id { ($($tt:tt)*) => { $($tt)* } }
+fn foo(bar:u32) { let a = id!(ba$0r); }
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ bar: u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_expr_in_macro_recursive() {
+ check(
+ r#"
+macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } }
+macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } }
+fn foo(bar:u32) { let a = id!(ba$0r); }
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ bar: u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_func_in_macro_recursive() {
+ check(
+ r#"
+macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } }
+macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } }
+fn bar() -> u32 { 0 }
+fn foo() { let a = id!([0u32, bar($0)] ); }
+"#,
+ expect![[r#"
+ *bar()*
+ ```rust
+ u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_literal_string_in_macro() {
+ check(
+ r#"
+macro_rules! arr { ($($tt:tt)*) => { [$($tt)*] } }
+fn foo() {
+ let mastered_for_itunes = "";
+ let _ = arr!("Tr$0acks", &mastered_for_itunes);
+}
+"#,
+ expect![[r#"
+ *"Tracks"*
+ ```rust
+ &str
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_assert_macro() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! assert {}
+
+fn bar() -> bool { true }
+fn foo() {
+ assert!(ba$0r());
+}
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn bar() -> bool
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_multiple_actions() {
+ check_actions(
+ r#"
+struct Bar;
+struct Foo { bar: Bar }
+
+fn foo(Foo { b$0ar }: &Foo) {}
+ "#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..11,
+ focus_range: 7..10,
+ name: "Bar",
+ kind: Struct,
+ description: "struct Bar",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_through_literal_string_in_builtin_macro() {
+ check_hover_no_result(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! format {}
+
+ fn foo() {
+ format!("hel$0lo {}", 0);
+ }
+"#,
+ );
+}
+
+#[test]
+fn test_hover_non_ascii_space_doc() {
+ check(
+ "
+/// <- `\u{3000}` here
+fn foo() { }
+
+fn bar() { fo$0o(); }
+",
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo()
+ ```
+
+ ---
+
+ \<- ` ` here
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_show_qualifiers() {
+ check(
+ r#"async fn foo$0() {}"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ async fn foo()
+ ```
+ "#]],
+ );
+ check(
+ r#"pub const unsafe fn foo$0() {}"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub const unsafe fn foo()
+ ```
+ "#]],
+ );
+ // Top level `pub(crate)` will be displayed as no visibility.
+ check(
+ r#"mod m { pub(crate) async unsafe extern "C" fn foo$0() {} }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::m
+ ```
+
+ ```rust
+ pub(crate) async unsafe extern "C" fn foo()
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_show_types() {
+ check(
+ r#"fn foo$0(a: i32, b:i32) -> i32 { 0 }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo(a: i32, b: i32) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_pointer_show_identifiers() {
+ check(
+ r#"type foo$0 = fn(a: i32, b: i32) -> i32;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type foo = fn(a: i32, b: i32) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_pointer_no_identifier() {
+ check(
+ r#"type foo$0 = fn(i32, _: i32) -> i32;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type foo = fn(i32, i32) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_trait_show_qualifiers() {
+ check_actions(
+ r"unsafe trait foo$0() {}",
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 13,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate st$0d;
+//- /std/lib.rs crate:std
+//! Standard library for this test
+//!
+//! Printed?
+//! abc123
+"#,
+ expect![[r#"
+ *std*
+
+ ```rust
+ extern crate std
+ ```
+
+ ---
+
+ Standard library for this test
+
+ Printed?
+ abc123
+ "#]],
+ );
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std as ab$0c;
+//- /std/lib.rs crate:std
+//! Standard library for this test
+//!
+//! Printed?
+//! abc123
+"#,
+ expect![[r#"
+ *abc*
+
+ ```rust
+ extern crate std
+ ```
+
+ ---
+
+ Standard library for this test
+
+ Printed?
+ abc123
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_mod_with_same_name_as_function() {
+ check(
+ r#"
+use self::m$0y::Bar;
+mod my { pub struct Bar; }
+
+fn my() {}
+"#,
+ expect![[r#"
+ *my*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod my
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_struct_doc_comment() {
+ check(
+ r#"
+/// This is an example
+/// multiline doc
+///
+/// # Example
+///
+/// ```
+/// let five = 5;
+///
+/// assert_eq!(6, my_crate::add_one(5));
+/// ```
+struct Bar;
+
+fn foo() { let bar = Ba$0r; }
+"#,
+ expect![[r##"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ This is an example
+ multiline doc
+
+ # Example
+
+ ```
+ let five = 5;
+
+ assert_eq!(6, my_crate::add_one(5));
+ ```
+ "##]],
+ );
+}
+
+#[test]
+fn test_hover_struct_doc_attr() {
+ check(
+ r#"
+#[doc = "bar docs"]
+struct Bar;
+
+fn foo() { let bar = Ba$0r; }
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ bar docs
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_struct_doc_attr_multiple_and_mixed() {
+ check(
+ r#"
+/// bar docs 0
+#[doc = "bar docs 1"]
+#[doc = "bar docs 2"]
+struct Bar;
+
+fn foo() { let bar = Ba$0r; }
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ bar docs 0
+ bar docs 1
+ bar docs 2
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_external_url() {
+ check(
+ r#"
+pub struct Foo;
+/// [external](https://www.google.com)
+pub struct B$0ar
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Bar
+ ```
+
+ ---
+
+ [external](https://www.google.com)
+ "#]],
+ );
+}
+
+// Check that we don't rewrite links which we can't identify
+#[test]
+fn test_hover_unknown_target() {
+ check(
+ r#"
+pub struct Foo;
+/// [baz](Baz)
+pub struct B$0ar
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Bar
+ ```
+
+ ---
+
+ [baz](Baz)
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_no_links() {
+ check_hover_no_links(
+ r#"
+/// Test cases:
+/// case 1. bare URL: https://www.example.com/
+/// case 2. inline URL with title: [example](https://www.example.com/)
+/// case 3. code reference: [`Result`]
+/// case 4. code reference but miss footnote: [`String`]
+/// case 5. autolink: <http://www.example.com/>
+/// case 6. email address: <test@example.com>
+/// case 7. reference: [example][example]
+/// case 8. collapsed link: [example][]
+/// case 9. shortcut link: [example]
+/// case 10. inline without URL: [example]()
+/// case 11. reference: [foo][foo]
+/// case 12. reference: [foo][bar]
+/// case 13. collapsed link: [foo][]
+/// case 14. shortcut link: [foo]
+/// case 15. inline without URL: [foo]()
+/// case 16. just escaped text: \[foo]
+/// case 17. inline link: [Foo](foo::Foo)
+///
+/// [`Result`]: ../../std/result/enum.Result.html
+/// [^example]: https://www.example.com/
+pub fn fo$0o() {}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo()
+ ```
+
+ ---
+
+ Test cases:
+ case 1. bare URL: https://www.example.com/
+ case 2. inline URL with title: [example](https://www.example.com/)
+ case 3. code reference: `Result`
+ case 4. code reference but miss footnote: `String`
+ case 5. autolink: http://www.example.com/
+ case 6. email address: test@example.com
+ case 7. reference: example
+ case 8. collapsed link: example
+ case 9. shortcut link: example
+ case 10. inline without URL: example
+ case 11. reference: foo
+ case 12. reference: foo
+ case 13. collapsed link: foo
+ case 14. shortcut link: foo
+ case 15. inline without URL: foo
+ case 16. just escaped text: \[foo\]
+ case 17. inline link: Foo
+
+ [^example]: https://www.example.com/
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_macro_generated_struct_fn_doc_comment() {
+ cov_mark::check!(hover_macro_generated_struct_fn_doc_comment);
+
+ check(
+ r#"
+macro_rules! bar {
+ () => {
+ struct Bar;
+ impl Bar {
+ /// Do the foo
+ fn foo(&self) {}
+ }
+ }
+}
+
+bar!();
+
+fn foo() { let bar = Bar; bar.fo$0o(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::Bar
+ ```
+
+ ```rust
+ fn foo(&self)
+ ```
+
+ ---
+
+ Do the foo
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_macro_generated_struct_fn_doc_attr() {
+ cov_mark::check!(hover_macro_generated_struct_fn_doc_attr);
+
+ check(
+ r#"
+macro_rules! bar {
+ () => {
+ struct Bar;
+ impl Bar {
+ #[doc = "Do the foo"]
+ fn foo(&self) {}
+ }
+ }
+}
+
+bar!();
+
+fn foo() { let bar = Bar; bar.fo$0o(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::Bar
+ ```
+
+ ```rust
+ fn foo(&self)
+ ```
+
+ ---
+
+ Do the foo
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_variadic_function() {
+ check(
+ r#"
+extern "C" {
+ pub fn foo(bar: i32, ...) -> i32;
+}
+
+fn main() { let foo_test = unsafe { fo$0o(1, 2, 3); } }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub unsafe fn foo(bar: i32, ...) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_trait_has_impl_action() {
+ check_actions(
+ r#"trait foo$0() {}"#,
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 6,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_struct_has_impl_action() {
+ check_actions(
+ r"struct foo$0() {}",
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 7,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_union_has_impl_action() {
+ check_actions(
+ r#"union foo$0() {}"#,
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 6,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_enum_has_impl_action() {
+ check_actions(
+ r"enum foo$0() { A, B }",
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 5,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_self_has_impl_action() {
+ check_actions(
+ r#"struct foo where Self$0:;"#,
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 7,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_test_has_action() {
+ check_actions(
+ r#"
+#[test]
+fn foo_$0test() {}
+"#,
+ expect![[r#"
+ [
+ Reference(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 11,
+ },
+ ),
+ Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..24,
+ focus_range: 11..19,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_test_mod_has_action() {
+ check_actions(
+ r#"
+mod tests$0 {
+ #[test]
+ fn foo_test() {}
+}
+"#,
+ expect![[r#"
+ [
+ Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..46,
+ focus_range: 4..9,
+ name: "tests",
+ kind: Module,
+ description: "mod tests",
+ },
+ kind: TestMod {
+ path: "tests",
+ },
+ cfg: None,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_struct_has_goto_type_action() {
+ check_actions(
+ r#"
+struct S{ f1: u32 }
+
+fn main() { let s$0t = S{ f1:0 }; }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..19,
+ focus_range: 7..8,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_struct_has_goto_type_actions() {
+ check_actions(
+ r#"
+struct Arg(u32);
+struct S<T>{ f1: T }
+
+fn main() { let s$0t = S{ f1:Arg(0) }; }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 17..37,
+ focus_range: 24..25,
+ name: "S",
+ kind: Struct,
+ description: "struct S<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Arg",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..16,
+ focus_range: 7..10,
+ name: "Arg",
+ kind: Struct,
+ description: "struct Arg",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_struct_has_flattened_goto_type_actions() {
+ check_actions(
+ r#"
+struct Arg(u32);
+struct S<T>{ f1: T }
+
+fn main() { let s$0t = S{ f1: S{ f1: Arg(0) } }; }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 17..37,
+ focus_range: 24..25,
+ name: "S",
+ kind: Struct,
+ description: "struct S<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Arg",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..16,
+ focus_range: 7..10,
+ name: "Arg",
+ kind: Struct,
+ description: "struct Arg",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_tuple_has_goto_type_actions() {
+ check_actions(
+ r#"
+struct A(u32);
+struct B(u32);
+mod M {
+ pub struct C(u32);
+}
+
+fn main() { let s$0t = (A(1), B(2), M::C(3) ); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::A",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..14,
+ focus_range: 7..8,
+ name: "A",
+ kind: Struct,
+ description: "struct A",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::B",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 15..29,
+ focus_range: 22..23,
+ name: "B",
+ kind: Struct,
+ description: "struct B",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::M::C",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 42..60,
+ focus_range: 53..54,
+ name: "C",
+ kind: Struct,
+ description: "pub struct C",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_return_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+fn foo() -> impl Foo {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_return_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo<T> {}
+struct S;
+fn foo() -> impl Foo<S> {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..15,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..25,
+ focus_range: 23..24,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_return_impl_traits_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+trait Bar {}
+fn foo() -> impl Foo + Bar {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 13..25,
+ focus_range: 19..22,
+ name: "Bar",
+ kind: Trait,
+ description: "trait Bar",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_return_impl_traits_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo<T> {}
+trait Bar<T> {}
+struct S1 {}
+struct S2 {}
+
+fn foo() -> impl Foo<S1> + Bar<S2> {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..15,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..31,
+ focus_range: 22..25,
+ name: "Bar",
+ kind: Trait,
+ description: "trait Bar<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S1",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 32..44,
+ focus_range: 39..41,
+ name: "S1",
+ kind: Struct,
+ description: "struct S1",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S2",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 45..57,
+ focus_range: 52..54,
+ name: "S2",
+ kind: Struct,
+ description: "struct S2",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_arg_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+fn foo(ar$0g: &impl Foo) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_arg_impl_traits_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+trait Bar<T> {}
+struct S{}
+
+fn foo(ar$0g: &impl Foo + Bar<S>) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 13..28,
+ focus_range: 19..22,
+ name: "Bar",
+ kind: Trait,
+ description: "trait Bar<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 29..39,
+ focus_range: 36..37,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_async_block_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+//- /main.rs crate:main deps:core
+// we don't use minicore here so that this test doesn't randomly fail
+// when someone edits minicore
+struct S;
+fn foo() {
+ let fo$0o = async { S };
+}
+//- /core.rs crate:core
+pub mod future {
+ #[lang = "future_trait"]
+ pub trait Future {}
+}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "core::future::Future",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 21..69,
+ focus_range: 60..66,
+ name: "Future",
+ kind: Trait,
+ description: "pub trait Future",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "main::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..110,
+ focus_range: 108..109,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_arg_generic_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo<T> {}
+struct S {}
+fn foo(ar$0g: &impl Foo<S>) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..15,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..27,
+ focus_range: 23..24,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_dyn_return_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+struct S;
+impl Foo for S {}
+
+struct B<T>{}
+fn foo() -> B<dyn Foo> {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::B",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 42..55,
+ focus_range: 49..50,
+ name: "B",
+ kind: Struct,
+ description: "struct B<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_dyn_arg_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+fn foo(ar$0g: &dyn Foo) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_dyn_arg_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo<T> {}
+struct S {}
+fn foo(ar$0g: &dyn Foo<S>) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..15,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..27,
+ focus_range: 23..24,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_goto_type_action_links_order() {
+ check_actions(
+ r#"
+trait ImplTrait<T> {}
+trait DynTrait<T> {}
+struct B<T> {}
+struct S {}
+
+fn foo(a$0rg: &impl ImplTrait<B<dyn DynTrait<B<S>>>>) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::ImplTrait",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..21,
+ focus_range: 6..15,
+ name: "ImplTrait",
+ kind: Trait,
+ description: "trait ImplTrait<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::B",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 43..57,
+ focus_range: 50..51,
+ name: "B",
+ kind: Struct,
+ description: "struct B<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::DynTrait",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 22..42,
+ focus_range: 28..36,
+ name: "DynTrait",
+ kind: Trait,
+ description: "trait DynTrait<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 58..69,
+ focus_range: 65..66,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_associated_type_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {
+ type Item;
+ fn get(self) -> Self::Item {}
+}
+
+struct Bar{}
+struct S{}
+
+impl Foo for S { type Item = Bar; }
+
+fn test() -> impl Foo { S {} }
+
+fn main() { let s$0t = test().get(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..62,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_const_param_has_goto_type_action() {
+ check_actions(
+ r#"
+struct Bar;
+struct Foo<const BAR: Bar>;
+
+impl<const BAR: Bar> Foo<BAR$0> {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..11,
+ focus_range: 7..10,
+ name: "Bar",
+ kind: Struct,
+ description: "struct Bar",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_type_param_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+
+fn foo<T: Foo>(t: T$0){}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_self_has_go_to_type() {
+ check_actions(
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self$0) {}
+}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..11,
+ focus_range: 7..10,
+ name: "Foo",
+ kind: Struct,
+ description: "struct Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn hover_displays_normalized_crate_names() {
+ check(
+ r#"
+//- /lib.rs crate:name-with-dashes
+pub mod wrapper {
+ pub struct Thing { x: u32 }
+
+ impl Thing {
+ pub fn new() -> Thing { Thing { x: 0 } }
+ }
+}
+
+//- /main.rs crate:main deps:name-with-dashes
+fn main() { let foo_test = name_with_dashes::wrapper::Thing::new$0(); }
+"#,
+ expect![[r#"
+ *new*
+
+ ```rust
+ name_with_dashes::wrapper::Thing
+ ```
+
+ ```rust
+ pub fn new() -> Thing
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_field_pat_shorthand_ref_match_ergonomics() {
+ check(
+ r#"
+struct S {
+ f: i32,
+}
+
+fn main() {
+ let s = S { f: 0 };
+ let S { f$0 } = &s;
+}
+"#,
+ expect![[r#"
+ *f*
+
+ ```rust
+ f: &i32
+ ```
+ ---
+
+ ```rust
+ test::S
+ ```
+
+ ```rust
+ f: i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_order() {
+ check(
+ r#"
+struct Foo;
+struct S$0T<const C: usize = 1, T = Foo>(T);
+"#,
+ expect![[r#"
+ *ST*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct ST<const C: usize, T = Foo>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_positive_i8_literal() {
+ check(
+ r#"
+struct Const<const N: i8>;
+
+fn main() {
+ let v$0alue = Const::<1>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<1>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_zero_i8_literal() {
+ check(
+ r#"
+struct Const<const N: i8>;
+
+fn main() {
+ let v$0alue = Const::<0>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<0>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_negative_i8_literal() {
+ check(
+ r#"
+struct Const<const N: i8>;
+
+fn main() {
+ let v$0alue = Const::<-1>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<-1>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_bool_literal() {
+ check(
+ r#"
+struct Const<const F: bool>;
+
+fn main() {
+ let v$0alue = Const::<true>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<true>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_char_literal() {
+ check(
+ r#"
+struct Const<const C: char>;
+
+fn main() {
+ let v$0alue = Const::<'🦀'>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<'🦀'>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_self_param_shows_type() {
+ check(
+ r#"
+struct Foo {}
+impl Foo {
+ fn bar(&sel$0f) {}
+}
+"#,
+ expect![[r#"
+ *self*
+
+ ```rust
+ self: &Foo
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_self_param_shows_type_for_arbitrary_self_type() {
+ check(
+ r#"
+struct Arc<T>(T);
+struct Foo {}
+impl Foo {
+ fn bar(sel$0f: Arc<Foo>) {}
+}
+"#,
+ expect![[r#"
+ *self*
+
+ ```rust
+ self: Arc<Foo>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_doc_outer_inner() {
+ check(
+ r#"
+/// Be quick;
+mod Foo$0 {
+ //! time is mana
+
+ /// This comment belongs to the function
+ fn foo() {}
+}
+"#,
+ expect![[r#"
+ *Foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod Foo
+ ```
+
+ ---
+
+ Be quick;
+ time is mana
+ "#]],
+ );
+}
+
+#[test]
+fn hover_doc_outer_inner_attribue() {
+ check(
+ r#"
+#[doc = "Be quick;"]
+mod Foo$0 {
+ #![doc = "time is mana"]
+
+ #[doc = "This comment belongs to the function"]
+ fn foo() {}
+}
+"#,
+ expect![[r#"
+ *Foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod Foo
+ ```
+
+ ---
+
+ Be quick;
+ time is mana
+ "#]],
+ );
+}
+
+#[test]
+fn hover_doc_block_style_indentend() {
+ check(
+ r#"
+/**
+ foo
+ ```rust
+ let x = 3;
+ ```
+*/
+fn foo$0() {}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo()
+ ```
+
+ ---
+
+ foo
+
+ ```rust
+ let x = 3;
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_comments_dont_highlight_parent() {
+ cov_mark::check!(no_highlight_on_comment_hover);
+ check_hover_no_result(
+ r#"
+fn no_hover() {
+ // no$0hover
+}
+"#,
+ );
+}
+
+#[test]
+fn hover_label() {
+ check(
+ r#"
+fn foo() {
+ 'label$0: loop {}
+}
+"#,
+ expect![[r#"
+ *'label*
+
+ ```rust
+ 'label
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_lifetime() {
+ check(
+ r#"fn foo<'lifetime>(_: &'lifetime$0 ()) {}"#,
+ expect![[r#"
+ *'lifetime*
+
+ ```rust
+ 'lifetime
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_type_param() {
+ check(
+ r#"
+//- minicore: sized
+struct Foo<T>(T);
+trait TraitA {}
+trait TraitB {}
+impl<T: TraitA + TraitB> Foo<T$0> where T: Sized {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: TraitA + TraitB
+ ```
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: sized
+struct Foo<T>(T);
+impl<T> Foo<T$0> {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+ // lifetimes bounds arent being tracked yet
+ check(
+ r#"
+//- minicore: sized
+struct Foo<T>(T);
+impl<T: 'static> Foo<T$0> {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_type_param_sized_bounds() {
+ // implicit `: Sized` bound
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+struct Foo<T>(T);
+impl<T: Trait> Foo<T$0> {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait
+ ```
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+struct Foo<T>(T);
+impl<T: Trait + ?Sized> Foo<T$0> {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait + ?Sized
+ ```
+ "#]],
+ );
+}
+
+mod type_param_sized_bounds {
+ use super::*;
+
+ #[test]
+ fn single_implicit() {
+ check(
+ r#"
+//- minicore: sized
+fn foo<T$0>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn single_explicit() {
+ check(
+ r#"
+//- minicore: sized
+fn foo<T$0: Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn single_relaxed() {
+ check(
+ r#"
+//- minicore: sized
+fn foo<T$0: ?Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: ?Sized
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn multiple_implicit() {
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo<T$0: Trait>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn multiple_explicit() {
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo<T$0: Trait + Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn multiple_relaxed() {
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo<T$0: Trait + ?Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait + ?Sized
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn mixed() {
+ check(
+ r#"
+//- minicore: sized
+fn foo<T$0: ?Sized + Sized + Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo<T$0: Sized + ?Sized + Sized + Trait>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait
+ ```
+ "#]],
+ );
+ }
+}
+
+#[test]
+fn hover_const_generic_type_alias() {
+ check(
+ r#"
+struct Foo<const LEN: usize>;
+type Fo$0o2 = Foo<2>;
+"#,
+ expect![[r#"
+ *Foo2*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type Foo2 = Foo<2>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_const_param() {
+ check(
+ r#"
+struct Foo<const LEN: usize>;
+impl<const LEN: usize> Foo<LEN$0> {}
+"#,
+ expect![[r#"
+ *LEN*
+
+ ```rust
+ const LEN: usize
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_const_eval() {
+ // show hex for <10
+ check(
+ r#"
+/// This is a doc
+const FOO$0: usize = 1 << 3;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 8
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show hex for >10
+ check(
+ r#"
+/// This is a doc
+const FOO$0: usize = (1 << 3) + (1 << 2);
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 12 (0xC)
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show original body when const eval fails
+ check(
+ r#"
+/// This is a doc
+const FOO$0: usize = 2 - 3;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 2 - 3
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // don't show hex for negatives
+ check(
+ r#"
+/// This is a doc
+const FOO$0: i32 = 2 - 3;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: i32 = -1
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ check(
+ r#"
+/// This is a doc
+const FOO$0: &str = "bar";
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: &str = "bar"
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show char literal
+ check(
+ r#"
+/// This is a doc
+const FOO$0: char = 'a';
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: char = 'a'
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show escaped char literal
+ check(
+ r#"
+/// This is a doc
+const FOO$0: char = '\x61';
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: char = 'a'
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show byte literal
+ check(
+ r#"
+/// This is a doc
+const FOO$0: u8 = b'a';
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: u8 = 97 (0x61)
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show escaped byte literal
+ check(
+ r#"
+/// This is a doc
+const FOO$0: u8 = b'\x61';
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: u8 = 97 (0x61)
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show float literal
+ check(
+ r#"
+ /// This is a doc
+ const FOO$0: f64 = 1.0234;
+ "#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: f64 = 1.0234
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ //show float typecasted from int
+ check(
+ r#"
+/// This is a doc
+const FOO$0: f32 = 1f32;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: f32 = 1.0
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ //show f64 typecasted from float
+ check(
+ r#"
+/// This is a doc
+const FOO$0: f64 = 1.0f64;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: f64 = 1.0
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+}
+
+#[test]
+fn hover_const_pat() {
+ check(
+ r#"
+/// This is a doc
+const FOO: usize = 3;
+fn foo() {
+ match 5 {
+ FOO$0 => (),
+ _ => ()
+ }
+}
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 3
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+}
+
+#[test]
+fn array_repeat_exp() {
+ check(
+ r#"
+fn main() {
+ let til$0e4 = [0_u32; (4 * 8 * 8) / 32];
+}
+ "#,
+ expect![[r#"
+ *tile4*
+
+ ```rust
+ let tile4: [u32; 8]
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_mod_def() {
+ check(
+ r#"
+//- /main.rs
+mod foo$0;
+//- /foo.rs
+//! For the horde!
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod foo
+ ```
+
+ ---
+
+ For the horde!
+ "#]],
+ );
+}
+
+#[test]
+fn hover_self_in_use() {
+ check(
+ r#"
+//! This should not appear
+mod foo {
+ /// But this should appear
+ pub mod bar {}
+}
+use foo::bar::{self$0};
+"#,
+ expect![[r#"
+ *self*
+
+ ```rust
+ test::foo
+ ```
+
+ ```rust
+ mod bar
+ ```
+
+ ---
+
+ But this should appear
+ "#]],
+ )
+}
+
+#[test]
+fn hover_keyword() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+fn f() { retur$0n; }
+//- /libstd.rs crate:std
+/// Docs for return_keyword
+mod return_keyword {}
+"#,
+ expect![[r#"
+ *return*
+
+ ```rust
+ return
+ ```
+
+ ---
+
+ Docs for return_keyword
+ "#]],
+ );
+}
+
+#[test]
+fn hover_keyword_doc() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+fn foo() {
+ let bar = mov$0e || {};
+}
+//- /libstd.rs crate:std
+#[doc(keyword = "move")]
+/// [closure]
+/// [closures][closure]
+/// [threads]
+/// <https://doc.rust-lang.org/nightly/book/ch13-01-closures.html>
+///
+/// [closure]: ../book/ch13-01-closures.html
+/// [threads]: ../book/ch16-01-threads.html#using-move-closures-with-threads
+mod move_keyword {}
+"#,
+ expect![[r##"
+ *move*
+
+ ```rust
+ move
+ ```
+
+ ---
+
+ [closure](https://doc.rust-lang.org/nightly/book/ch13-01-closures.html)
+ [closures](https://doc.rust-lang.org/nightly/book/ch13-01-closures.html)
+ [threads](https://doc.rust-lang.org/nightly/book/ch16-01-threads.html#using-move-closures-with-threads)
+ <https://doc.rust-lang.org/nightly/book/ch13-01-closures.html>
+ "##]],
+ );
+}
+
+#[test]
+fn hover_keyword_as_primitive() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+type F = f$0n(i32) -> i32;
+//- /libstd.rs crate:std
+/// Docs for prim_fn
+mod prim_fn {}
+"#,
+ expect![[r#"
+ *fn*
+
+ ```rust
+ fn
+ ```
+
+ ---
+
+ Docs for prim_fn
+ "#]],
+ );
+}
+
+#[test]
+fn hover_builtin() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+cosnt _: &str$0 = ""; }
+
+//- /libstd.rs crate:std
+/// Docs for prim_str
+/// [`foo`](../std/keyword.foo.html)
+mod prim_str {}
+"#,
+ expect![[r#"
+ *str*
+
+ ```rust
+ str
+ ```
+
+ ---
+
+ Docs for prim_str
+ [`foo`](https://doc.rust-lang.org/nightly/std/keyword.foo.html)
+ "#]],
+ );
+}
+
+#[test]
+fn hover_macro_expanded_function() {
+ check(
+ r#"
+struct S<'a, T>(&'a T);
+trait Clone {}
+macro_rules! foo {
+ () => {
+ fn bar<'t, T: Clone + 't>(s: &mut S<'t, T>, t: u32) -> *mut u32 where
+ 't: 't + 't,
+ for<'a> T: Clone + 'a
+ { 0 as _ }
+ };
+}
+
+foo!();
+
+fn main() {
+ bar$0;
+}
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn bar<'t, T>(s: &mut S<'t, T>, t: u32) -> *mut u32
+ where
+ T: Clone + 't,
+ 't: 't + 't,
+ for<'a> T: Clone + 'a,
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_intra_doc_links() {
+ check(
+ r#"
+
+pub mod theitem {
+ /// This is the item. Cool!
+ pub struct TheItem;
+}
+
+/// Gives you a [`TheItem$0`].
+///
+/// [`TheItem`]: theitem::TheItem
+pub fn gimme() -> theitem::TheItem {
+ theitem::TheItem
+}
+"#,
+ expect![[r#"
+ *[`TheItem`]*
+
+ ```rust
+ test::theitem
+ ```
+
+ ```rust
+ pub struct TheItem
+ ```
+
+ ---
+
+ This is the item. Cool!
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_trait_assoc_typealias() {
+ check(
+ r#"
+ fn main() {}
+
+trait T1 {
+ type Bar;
+ type Baz;
+}
+
+struct Foo;
+
+mod t2 {
+ pub trait T2 {
+ type Bar;
+ }
+}
+
+use t2::T2;
+
+impl T2 for Foo {
+ type Bar = String;
+}
+
+impl T1 for Foo {
+ type Bar = <Foo as t2::T2>::Ba$0r;
+ // ^^^ unresolvedReference
+}
+ "#,
+ expect![[r#"
+*Bar*
+
+```rust
+test::t2
+```
+
+```rust
+pub type Bar
+```
+"#]],
+ );
+}
+#[test]
+fn hover_generic_assoc() {
+ check(
+ r#"
+fn foo<T: A>() where T::Assoc$0: {}
+
+trait A {
+ type Assoc;
+}"#,
+ expect![[r#"
+ *Assoc*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type Assoc
+ ```
+ "#]],
+ );
+ check(
+ r#"
+fn foo<T: A>() {
+ let _: <T>::Assoc$0;
+}
+
+trait A {
+ type Assoc;
+}"#,
+ expect![[r#"
+ *Assoc*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type Assoc
+ ```
+ "#]],
+ );
+ check(
+ r#"
+trait A where
+ Self::Assoc$0: ,
+{
+ type Assoc;
+}"#,
+ expect![[r#"
+ *Assoc*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type Assoc
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn string_shadowed_with_inner_items() {
+ check(
+ r#"
+//- /main.rs crate:main deps:alloc
+
+/// Custom `String` type.
+struct String;
+
+fn f() {
+ let _: String$0;
+
+ fn inner() {}
+}
+
+//- /alloc.rs crate:alloc
+#[prelude_import]
+pub use string::*;
+
+mod string {
+ /// This is `alloc::String`.
+ pub struct String;
+}
+"#,
+ expect![[r#"
+ *String*
+
+ ```rust
+ main
+ ```
+
+ ```rust
+ struct String
+ ```
+
+ ---
+
+ Custom `String` type.
+ "#]],
+ )
+}
+
+#[test]
+fn function_doesnt_shadow_crate_in_use_tree() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+use foo$0::{foo};
+
+//- /foo.rs crate:foo
+pub fn foo() {}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate foo
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_feature() {
+ check(
+ r#"#![feature(box_syntax$0)]"#,
+ expect![[r##"
+ *box_syntax*
+ ```
+ box_syntax
+ ```
+ ___
+
+ # `box_syntax`
+
+ The tracking issue for this feature is: [#49733]
+
+ [#49733]: https://github.com/rust-lang/rust/issues/49733
+
+ See also [`box_patterns`](box-patterns.md)
+
+ ------------------------
+
+ Currently the only stable way to create a `Box` is via the `Box::new` method.
+ Also it is not possible in stable Rust to destructure a `Box` in a match
+ pattern. The unstable `box` keyword can be used to create a `Box`. An example
+ usage would be:
+
+ ```rust
+ #![feature(box_syntax)]
+
+ fn main() {
+ let b = box 5;
+ }
+ ```
+
+ "##]],
+ )
+}
+
+#[test]
+fn hover_lint() {
+ check(
+ r#"#![allow(arithmetic_overflow$0)]"#,
+ expect![[r#"
+ *arithmetic_overflow*
+ ```
+ arithmetic_overflow
+ ```
+ ___
+
+ arithmetic operation overflows
+ "#]],
+ )
+}
+
+#[test]
+fn hover_clippy_lint() {
+ check(
+ r#"#![allow(clippy::almost_swapped$0)]"#,
+ expect![[r#"
+ *almost_swapped*
+ ```
+ clippy::almost_swapped
+ ```
+ ___
+
+ Checks for `foo = bar; bar = foo` sequences.
+ "#]],
+ )
+}
+
+#[test]
+fn hover_attr_path_qualifier() {
+ check(
+ r#"
+//- /foo.rs crate:foo
+
+//- /lib.rs crate:main.rs deps:foo
+#[fo$0o::bar()]
+struct Foo;
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate foo
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_rename() {
+ check(
+ r#"
+use self as foo$0;
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate test
+ ```
+ "#]],
+ );
+ check(
+ r#"
+mod bar {}
+use bar::{self as foo$0};
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod bar
+ ```
+ "#]],
+ );
+ check(
+ r#"
+mod bar {
+ use super as foo$0;
+}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate test
+ ```
+ "#]],
+ );
+ check(
+ r#"
+use crate as foo$0;
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate test
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_attribute_in_macro() {
+ check(
+ r#"
+//- minicore:derive
+macro_rules! identity {
+ ($struct:item) => {
+ $struct
+ };
+}
+#[rustc_builtin_macro]
+pub macro Copy {}
+identity!{
+ #[derive(Copy$0)]
+ struct Foo;
+}
+"#,
+ expect![[r#"
+ *Copy*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ macro Copy
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_derive_input() {
+ check(
+ r#"
+//- minicore:derive
+#[rustc_builtin_macro]
+pub macro Copy {}
+#[derive(Copy$0)]
+struct Foo;
+"#,
+ expect![[r#"
+ *Copy*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ macro Copy
+ ```
+ "#]],
+ );
+ check(
+ r#"
+//- minicore:derive
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(foo::Copy$0)]
+struct Foo;
+"#,
+ expect![[r#"
+ *Copy*
+
+ ```rust
+ test::foo
+ ```
+
+ ```rust
+ macro Copy
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_range_math() {
+ check_hover_range(
+ r#"
+fn f() { let expr = $01 + 2 * 3$0 }
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr = 1 $0+ 2 * $03 }
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr = 1 + $02 * 3$0 }
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_arrays() {
+ check_hover_range(
+ r#"
+fn f() { let expr = $0[1, 2, 3, 4]$0 }
+"#,
+ expect![[r#"
+ ```rust
+ [i32; 4]
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr = [1, 2, $03, 4]$0 }
+"#,
+ expect![[r#"
+ ```rust
+ [i32; 4]
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr = [1, 2, $03$0, 4] }
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_functions() {
+ check_hover_range(
+ r#"
+fn f<T>(a: &[T]) { }
+fn b() { $0f$0(&[1, 2, 3, 4, 5]); }
+"#,
+ expect![[r#"
+ ```rust
+ fn f<i32>(&[i32])
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f<T>(a: &[T]) { }
+fn b() { f($0&[1, 2, 3, 4, 5]$0); }
+"#,
+ expect![[r#"
+ ```rust
+ &[i32; 5]
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_shows_nothing_when_invalid() {
+ check_hover_range_no_results(
+ r#"
+fn f<T>(a: &[T]) { }
+fn b()$0 { f(&[1, 2, 3, 4, 5]); }$0
+"#,
+ );
+
+ check_hover_range_no_results(
+ r#"
+fn f<T>$0(a: &[T]) { }
+fn b() { f(&[1, 2, 3,$0 4, 5]); }
+"#,
+ );
+
+ check_hover_range_no_results(
+ r#"
+fn $0f() { let expr = [1, 2, 3, 4]$0 }
+"#,
+ );
+}
+
+#[test]
+fn hover_range_shows_unit_for_statements() {
+ check_hover_range(
+ r#"
+fn f<T>(a: &[T]) { }
+fn b() { $0f(&[1, 2, 3, 4, 5]); }$0
+"#,
+ expect![[r#"
+ ```rust
+ ()
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr$0 = $0[1, 2, 3, 4] }
+"#,
+ expect![[r#"
+ ```rust
+ ()
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_for_pat() {
+ check_hover_range(
+ r#"
+fn foo() {
+ let $0x$0 = 0;
+}
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn foo() {
+ let $0x$0 = "";
+}
+"#,
+ expect![[r#"
+ ```rust
+ &str
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_shows_coercions_if_applicable_expr() {
+ check_hover_range(
+ r#"
+fn foo() {
+ let x: &u32 = $0&&&&&0$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Type: &&&&&u32
+ Coerced to: &u32
+ ```
+ "#]],
+ );
+ check_hover_range(
+ r#"
+fn foo() {
+ let x: *const u32 = $0&0$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Type: &u32
+ Coerced to: *const u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_range_shows_type_actions() {
+ check_actions(
+ r#"
+struct Foo;
+fn foo() {
+ let x: &Foo = $0&&&&&Foo$0;
+}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..11,
+ focus_range: 7..10,
+ name: "Foo",
+ kind: Struct,
+ description: "struct Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn hover_try_expr_res() {
+ check_hover_range(
+ r#"
+//- minicore:result
+struct FooError;
+
+fn foo() -> Result<(), FooError> {
+ Ok($0Result::<(), FooError>::Ok(())?$0)
+}
+"#,
+ expect![[r#"
+ ```rust
+ ()
+ ```"#]],
+ );
+ check_hover_range(
+ r#"
+//- minicore:result
+struct FooError;
+struct BarError;
+
+fn foo() -> Result<(), FooError> {
+ Ok($0Result::<(), BarError>::Ok(())?$0)
+}
+"#,
+ expect![[r#"
+ ```text
+ Try Error Type: BarError
+ Propagated as: FooError
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_try_expr() {
+ check_hover_range(
+ r#"
+struct NotResult<T, U>(T, U);
+struct Short;
+struct Looooong;
+
+fn foo() -> NotResult<(), Looooong> {
+ $0NotResult((), Short)?$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Try Target Type: NotResult<(), Short>
+ Propagated as: NotResult<(), Looooong>
+ ```
+ "#]],
+ );
+ check_hover_range(
+ r#"
+struct NotResult<T, U>(T, U);
+struct Short;
+struct Looooong;
+
+fn foo() -> NotResult<(), Short> {
+ $0NotResult((), Looooong)?$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Try Target Type: NotResult<(), Looooong>
+ Propagated as: NotResult<(), Short>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_try_expr_option() {
+ cov_mark::check!(hover_try_expr_opt_opt);
+ check_hover_range(
+ r#"
+//- minicore: option, try
+
+fn foo() -> Option<()> {
+ $0Some(0)?$0;
+ None
+}
+"#,
+ expect![[r#"
+ ```rust
+ <Option<i32> as Try>::Output
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_deref_expr() {
+ check_hover_range(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct DerefExample<T> {
+ value: T
+}
+
+impl<T> Deref for DerefExample<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.value
+ }
+}
+
+fn foo() {
+ let x = DerefExample { value: 0 };
+ let y: i32 = $0*x$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Dereferenced from: DerefExample<i32>
+ To type: i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_deref_expr_with_coercion() {
+ check_hover_range(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct DerefExample<T> {
+ value: T
+}
+
+impl<T> Deref for DerefExample<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.value
+ }
+}
+
+fn foo() {
+ let x = DerefExample { value: &&&&&0 };
+ let y: &i32 = $0*x$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Dereferenced from: DerefExample<&&&&&i32>
+ To type: &&&&&i32
+ Coerced to: &i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_intra_in_macro() {
+ check(
+ r#"
+macro_rules! foo_macro {
+ ($(#[$attr:meta])* $name:ident) => {
+ $(#[$attr])*
+ pub struct $name;
+ }
+}
+
+foo_macro!(
+ /// Doc comment for [`Foo$0`]
+ Foo
+);
+"#,
+ expect![[r#"
+ *[`Foo`]*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Foo
+ ```
+
+ ---
+
+ Doc comment for [`Foo`](https://docs.rs/test/*/test/struct.Foo.html)
+ "#]],
+ );
+}
+
+#[test]
+fn hover_intra_in_attr() {
+ check(
+ r#"
+#[doc = "Doc comment for [`Foo$0`]"]
+pub struct Foo;
+"#,
+ expect![[r#"
+ *[`Foo`]*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Foo
+ ```
+
+ ---
+
+ Doc comment for [`Foo`](https://docs.rs/test/*/test/struct.Foo.html)
+ "#]],
+ );
+}
+
+#[test]
+fn hover_inert_attr() {
+ check(
+ r#"
+#[doc$0 = ""]
+pub struct Foo;
+"#,
+ expect![[r##"
+ *doc*
+
+ ```rust
+ #[doc]
+ ```
+
+ ---
+
+ Valid forms are:
+
+ * \#\[doc(hidden|inline|...)\]
+ * \#\[doc = string\]
+ "##]],
+ );
+ check(
+ r#"
+#[allow$0()]
+pub struct Foo;
+"#,
+ expect![[r##"
+ *allow*
+
+ ```rust
+ #[allow]
+ ```
+
+ ---
+
+ Valid forms are:
+
+ * \#\[allow(lint1, lint2, ..., /\*opt\*/ reason = "...")\]
+ "##]],
+ );
+}
+
+#[test]
+fn hover_dollar_crate() {
+ // $crate should be resolved to the right crate name.
+
+ check(
+ r#"
+//- /main.rs crate:main deps:dep
+dep::m!(KONST$0);
+//- /dep.rs crate:dep
+#[macro_export]
+macro_rules! m {
+ ( $name:ident ) => { const $name: $crate::Type = $crate::Type; };
+}
+
+pub struct Type;
+"#,
+ expect![[r#"
+ *KONST*
+
+ ```rust
+ main
+ ```
+
+ ```rust
+ const KONST: dep::Type = $crate::Type
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_record_variant() {
+ check(
+ r#"
+enum Enum {
+ RecordV$0 { field: u32 }
+}
+"#,
+ expect![[r#"
+ *RecordV*
+
+ ```rust
+ test::Enum
+ ```
+
+ ```rust
+ RecordV { field: u32 }
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_trait_impl_assoc_item_def_doc_forwarding() {
+ check(
+ r#"
+trait T {
+ /// Trait docs
+ fn func() {}
+}
+impl T for () {
+ fn func$0() {}
+}
+"#,
+ expect![[r#"
+ *func*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn func()
+ ```
+
+ ---
+
+ Trait docs
+ "#]],
+ );
+}
+
+#[test]
+fn hover_ranged_macro_call() {
+ check_hover_range(
+ r#"
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
+ };
+}
+macro_rules! vec {
+ ($elem:expr) => {
+ __rust_force_expr!($elem)
+ };
+}
+
+struct Struct;
+impl Struct {
+ fn foo(self) {}
+}
+
+fn f() {
+ $0vec![Struct]$0;
+}
+"#,
+ expect![[r#"
+ ```rust
+ Struct
+ ```"#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
new file mode 100644
index 000000000..5aae669aa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
@@ -0,0 +1,2818 @@
+use either::Either;
+use hir::{known, Callable, HasVisibility, HirDisplay, Mutability, Semantics, TypeInfo};
+use ide_db::{
+ base_db::FileRange, famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty, FxHashMap,
+ RootDatabase,
+};
+use itertools::Itertools;
+use stdx::to_lower_snake_case;
+use syntax::{
+ ast::{self, AstNode, HasArgList, HasGenericParams, HasName, UnaryOp},
+ match_ast, Direction, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
+ TextSize, T,
+};
+
+use crate::FileId;
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct InlayHintsConfig {
+ pub render_colons: bool,
+ pub type_hints: bool,
+ pub parameter_hints: bool,
+ pub chaining_hints: bool,
+ pub reborrow_hints: ReborrowHints,
+ pub closure_return_type_hints: ClosureReturnTypeHints,
+ pub binding_mode_hints: bool,
+ pub lifetime_elision_hints: LifetimeElisionHints,
+ pub param_names_for_lifetime_elision_hints: bool,
+ pub hide_named_constructor_hints: bool,
+ pub hide_closure_initialization_hints: bool,
+ pub max_length: Option<usize>,
+ pub closing_brace_hints_min_lines: Option<usize>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum ClosureReturnTypeHints {
+ Always,
+ WithBlock,
+ Never,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum LifetimeElisionHints {
+ Always,
+ SkipTrivial,
+ Never,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum ReborrowHints {
+ Always,
+ MutableOnly,
+ Never,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum InlayKind {
+ BindingModeHint,
+ ChainingHint,
+ ClosingBraceHint,
+ ClosureReturnTypeHint,
+ GenericParamListHint,
+ ImplicitReborrowHint,
+ LifetimeHint,
+ ParameterHint,
+ TypeHint,
+}
+
+#[derive(Debug)]
+pub struct InlayHint {
+ pub range: TextRange,
+ pub kind: InlayKind,
+ pub label: String,
+ pub tooltip: Option<InlayTooltip>,
+}
+
+#[derive(Debug)]
+pub enum InlayTooltip {
+ String(String),
+ HoverRanged(FileId, TextRange),
+ HoverOffset(FileId, TextSize),
+}
+
+// Feature: Inlay Hints
+//
+// rust-analyzer shows additional information inline with the source code.
+// Editors usually render this using read-only virtual text snippets interspersed with code.
+//
+// rust-analyzer by default shows hints for
+//
+// * types of local variables
+// * names of function arguments
+// * types of chained expressions
+//
+// Optionally, one can enable additional hints for
+//
+// * return types of closure expressions
+// * elided lifetimes
+// * compiler inserted reborrows
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Toggle inlay hints*
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[]
+pub(crate) fn inlay_hints(
+ db: &RootDatabase,
+ file_id: FileId,
+ range_limit: Option<FileRange>,
+ config: &InlayHintsConfig,
+) -> Vec<InlayHint> {
+ let _p = profile::span("inlay_hints");
+ let sema = Semantics::new(db);
+ let file = sema.parse(file_id);
+ let file = file.syntax();
+
+ let mut acc = Vec::new();
+
+ if let Some(scope) = sema.scope(&file) {
+ let famous_defs = FamousDefs(&sema, scope.krate());
+
+ let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);
+ match range_limit {
+ Some(FileRange { range, .. }) => match file.covering_element(range) {
+ NodeOrToken::Token(_) => return acc,
+ NodeOrToken::Node(n) => n
+ .descendants()
+ .filter(|descendant| range.intersect(descendant.text_range()).is_some())
+ .for_each(hints),
+ },
+ None => file.descendants().for_each(hints),
+ };
+ }
+
+ acc
+}
+
+fn hints(
+ hints: &mut Vec<InlayHint>,
+ famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ node: SyntaxNode,
+) {
+ closing_brace_hints(hints, sema, config, file_id, node.clone());
+ match_ast! {
+ match node {
+ ast::Expr(expr) => {
+ chaining_hints(hints, sema, &famous_defs, config, file_id, &expr);
+ match expr {
+ ast::Expr::CallExpr(it) => param_name_hints(hints, sema, config, ast::Expr::from(it)),
+ ast::Expr::MethodCallExpr(it) => {
+ param_name_hints(hints, sema, config, ast::Expr::from(it))
+ }
+ ast::Expr::ClosureExpr(it) => closure_ret_hints(hints, sema, &famous_defs, config, file_id, it),
+ // We could show reborrows for all expressions, but usually that is just noise to the user
+ // and the main point here is to show why "moving" a mutable reference doesn't necessarily move it
+ ast::Expr::PathExpr(_) => reborrow_hints(hints, sema, config, &expr),
+ _ => None,
+ }
+ },
+ ast::Pat(it) => {
+ binding_mode_hints(hints, sema, config, &it);
+ if let ast::Pat::IdentPat(it) = it {
+ bind_pat_hints(hints, sema, config, file_id, &it);
+ }
+ Some(())
+ },
+ ast::Item(it) => match it {
+ // FIXME: record impl lifetimes so they aren't being reused in assoc item lifetime inlay hints
+ ast::Item::Impl(_) => None,
+ ast::Item::Fn(it) => fn_lifetime_fn_hints(hints, config, it),
+ // static type elisions
+ ast::Item::Static(it) => implicit_static_hints(hints, config, Either::Left(it)),
+ ast::Item::Const(it) => implicit_static_hints(hints, config, Either::Right(it)),
+ _ => None,
+ },
+ // FIXME: fn-ptr type, dyn fn type, and trait object type elisions
+ ast::Type(_) => None,
+ _ => None,
+ }
+ };
+}
+
+fn closing_brace_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ node: SyntaxNode,
+) -> Option<()> {
+ let min_lines = config.closing_brace_hints_min_lines?;
+
+ let name = |it: ast::Name| it.syntax().text_range().start();
+
+ let mut closing_token;
+ let (label, name_offset) = if let Some(item_list) = ast::AssocItemList::cast(node.clone()) {
+ closing_token = item_list.r_curly_token()?;
+
+ let parent = item_list.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::Impl(imp) => {
+ let imp = sema.to_def(&imp)?;
+ let ty = imp.self_ty(sema.db);
+ let trait_ = imp.trait_(sema.db);
+
+ (match trait_ {
+ Some(tr) => format!("impl {} for {}", tr.name(sema.db), ty.display_truncated(sema.db, config.max_length)),
+ None => format!("impl {}", ty.display_truncated(sema.db, config.max_length)),
+ }, None)
+ },
+ ast::Trait(tr) => {
+ (format!("trait {}", tr.name()?), tr.name().map(name))
+ },
+ _ => return None,
+ }
+ }
+ } else if let Some(list) = ast::ItemList::cast(node.clone()) {
+ closing_token = list.r_curly_token()?;
+
+ let module = ast::Module::cast(list.syntax().parent()?)?;
+ (format!("mod {}", module.name()?), module.name().map(name))
+ } else if let Some(block) = ast::BlockExpr::cast(node.clone()) {
+ closing_token = block.stmt_list()?.r_curly_token()?;
+
+ let parent = block.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::Fn(it) => {
+ // FIXME: this could include parameters, but `HirDisplay` prints too much info
+ // and doesn't respect the max length either, so the hints end up way too long
+ (format!("fn {}", it.name()?), it.name().map(name))
+ },
+ ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)),
+ ast::Const(it) => {
+ if it.underscore_token().is_some() {
+ ("const _".into(), None)
+ } else {
+ (format!("const {}", it.name()?), it.name().map(name))
+ }
+ },
+ _ => return None,
+ }
+ }
+ } else if let Some(mac) = ast::MacroCall::cast(node.clone()) {
+ let last_token = mac.syntax().last_token()?;
+ if last_token.kind() != T![;] && last_token.kind() != SyntaxKind::R_CURLY {
+ return None;
+ }
+ closing_token = last_token;
+
+ (
+ format!("{}!", mac.path()?),
+ mac.path().and_then(|it| it.segment()).map(|it| it.syntax().text_range().start()),
+ )
+ } else {
+ return None;
+ };
+
+ if let Some(mut next) = closing_token.next_token() {
+ if next.kind() == T![;] {
+ if let Some(tok) = next.next_token() {
+ closing_token = next;
+ next = tok;
+ }
+ }
+ if !(next.kind() == SyntaxKind::WHITESPACE && next.text().contains('\n')) {
+ // Only display the hint if the `}` is the last token on the line
+ return None;
+ }
+ }
+
+ let mut lines = 1;
+ node.text().for_each_chunk(|s| lines += s.matches('\n').count());
+ if lines < min_lines {
+ return None;
+ }
+
+ acc.push(InlayHint {
+ range: closing_token.text_range(),
+ kind: InlayKind::ClosingBraceHint,
+ label,
+ tooltip: name_offset.map(|it| InlayTooltip::HoverOffset(file_id, it)),
+ });
+
+ None
+}
+
+fn implicit_static_hints(
+ acc: &mut Vec<InlayHint>,
+ config: &InlayHintsConfig,
+ statik_or_const: Either<ast::Static, ast::Const>,
+) -> Option<()> {
+ if config.lifetime_elision_hints != LifetimeElisionHints::Always {
+ return None;
+ }
+
+ if let Either::Right(it) = &statik_or_const {
+ if ast::AssocItemList::can_cast(
+ it.syntax().parent().map_or(SyntaxKind::EOF, |it| it.kind()),
+ ) {
+ return None;
+ }
+ }
+
+ if let Some(ast::Type::RefType(ty)) = statik_or_const.either(|it| it.ty(), |it| it.ty()) {
+ if ty.lifetime().is_none() {
+ let t = ty.amp_token()?;
+ acc.push(InlayHint {
+ range: t.text_range(),
+ kind: InlayKind::LifetimeHint,
+ label: "'static".to_owned(),
+ tooltip: Some(InlayTooltip::String("Elided static lifetime".into())),
+ });
+ }
+ }
+
+ Some(())
+}
+
+fn fn_lifetime_fn_hints(
+ acc: &mut Vec<InlayHint>,
+ config: &InlayHintsConfig,
+ func: ast::Fn,
+) -> Option<()> {
+ if config.lifetime_elision_hints == LifetimeElisionHints::Never {
+ return None;
+ }
+
+ let mk_lt_hint = |t: SyntaxToken, label| InlayHint {
+ range: t.text_range(),
+ kind: InlayKind::LifetimeHint,
+ label,
+ tooltip: Some(InlayTooltip::String("Elided lifetime".into())),
+ };
+
+ let param_list = func.param_list()?;
+ let generic_param_list = func.generic_param_list();
+ let ret_type = func.ret_type();
+ let self_param = param_list.self_param().filter(|it| it.amp_token().is_some());
+
+ let is_elided = |lt: &Option<ast::Lifetime>| match lt {
+ Some(lt) => matches!(lt.text().as_str(), "'_"),
+ None => true,
+ };
+
+ let potential_lt_refs = {
+ let mut acc: Vec<_> = vec![];
+ if let Some(self_param) = &self_param {
+ let lifetime = self_param.lifetime();
+ let is_elided = is_elided(&lifetime);
+ acc.push((None, self_param.amp_token(), lifetime, is_elided));
+ }
+ param_list.params().filter_map(|it| Some((it.pat(), it.ty()?))).for_each(|(pat, ty)| {
+ // FIXME: check path types
+ walk_ty(&ty, &mut |ty| match ty {
+ ast::Type::RefType(r) => {
+ let lifetime = r.lifetime();
+ let is_elided = is_elided(&lifetime);
+ acc.push((
+ pat.as_ref().and_then(|it| match it {
+ ast::Pat::IdentPat(p) => p.name(),
+ _ => None,
+ }),
+ r.amp_token(),
+ lifetime,
+ is_elided,
+ ))
+ }
+ _ => (),
+ })
+ });
+ acc
+ };
+
+ // allocate names
+ let mut gen_idx_name = {
+ let mut gen = (0u8..).map(|idx| match idx {
+ idx if idx < 10 => SmolStr::from_iter(['\'', (idx + 48) as char]),
+ idx => format!("'{idx}").into(),
+ });
+ move || gen.next().unwrap_or_default()
+ };
+ let mut allocated_lifetimes = vec![];
+
+ let mut used_names: FxHashMap<SmolStr, usize> =
+ match config.param_names_for_lifetime_elision_hints {
+ true => generic_param_list
+ .iter()
+ .flat_map(|gpl| gpl.lifetime_params())
+ .filter_map(|param| param.lifetime())
+ .filter_map(|lt| Some((SmolStr::from(lt.text().as_str().get(1..)?), 0)))
+ .collect(),
+ false => Default::default(),
+ };
+ {
+ let mut potential_lt_refs = potential_lt_refs.iter().filter(|&&(.., is_elided)| is_elided);
+ if let Some(_) = &self_param {
+ if let Some(_) = potential_lt_refs.next() {
+ allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {
+ // self can't be used as a lifetime, so no need to check for collisions
+ "'self".into()
+ } else {
+ gen_idx_name()
+ });
+ }
+ }
+ potential_lt_refs.for_each(|(name, ..)| {
+ let name = match name {
+ Some(it) if config.param_names_for_lifetime_elision_hints => {
+ if let Some(c) = used_names.get_mut(it.text().as_str()) {
+ *c += 1;
+ SmolStr::from(format!("'{text}{c}", text = it.text().as_str()))
+ } else {
+ used_names.insert(it.text().as_str().into(), 0);
+ SmolStr::from_iter(["\'", it.text().as_str()])
+ }
+ }
+ _ => gen_idx_name(),
+ };
+ allocated_lifetimes.push(name);
+ });
+ }
+
+ // fetch output lifetime if elision rule applies
+ let output = match potential_lt_refs.as_slice() {
+ [(_, _, lifetime, _), ..] if self_param.is_some() || potential_lt_refs.len() == 1 => {
+ match lifetime {
+ Some(lt) => match lt.text().as_str() {
+ "'_" => allocated_lifetimes.get(0).cloned(),
+ "'static" => None,
+ name => Some(name.into()),
+ },
+ None => allocated_lifetimes.get(0).cloned(),
+ }
+ }
+ [..] => None,
+ };
+
+ if allocated_lifetimes.is_empty() && output.is_none() {
+ return None;
+ }
+
+ // apply hints
+ // apply output if required
+ let mut is_trivial = true;
+ if let (Some(output_lt), Some(r)) = (&output, ret_type) {
+ if let Some(ty) = r.ty() {
+ walk_ty(&ty, &mut |ty| match ty {
+ ast::Type::RefType(ty) if ty.lifetime().is_none() => {
+ if let Some(amp) = ty.amp_token() {
+ is_trivial = false;
+ acc.push(mk_lt_hint(amp, output_lt.to_string()));
+ }
+ }
+ _ => (),
+ })
+ }
+ }
+
+ if config.lifetime_elision_hints == LifetimeElisionHints::SkipTrivial && is_trivial {
+ return None;
+ }
+
+ let mut a = allocated_lifetimes.iter();
+ for (_, amp_token, _, is_elided) in potential_lt_refs {
+ if is_elided {
+ let t = amp_token?;
+ let lt = a.next()?;
+ acc.push(mk_lt_hint(t, lt.to_string()));
+ }
+ }
+
+ // generate generic param list things
+ match (generic_param_list, allocated_lifetimes.as_slice()) {
+ (_, []) => (),
+ (Some(gpl), allocated_lifetimes) => {
+ let angle_tok = gpl.l_angle_token()?;
+ let is_empty = gpl.generic_params().next().is_none();
+ acc.push(InlayHint {
+ range: angle_tok.text_range(),
+ kind: InlayKind::LifetimeHint,
+ label: format!(
+ "{}{}",
+ allocated_lifetimes.iter().format(", "),
+ if is_empty { "" } else { ", " }
+ ),
+ tooltip: Some(InlayTooltip::String("Elided lifetimes".into())),
+ });
+ }
+ (None, allocated_lifetimes) => acc.push(InlayHint {
+ range: func.name()?.syntax().text_range(),
+ kind: InlayKind::GenericParamListHint,
+ label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(),
+ tooltip: Some(InlayTooltip::String("Elided lifetimes".into())),
+ }),
+ }
+ Some(())
+}
+
+fn closure_ret_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ famous_defs: &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ closure: ast::ClosureExpr,
+) -> Option<()> {
+ if config.closure_return_type_hints == ClosureReturnTypeHints::Never {
+ return None;
+ }
+
+ if closure.ret_type().is_some() {
+ return None;
+ }
+
+ if !closure_has_block_body(&closure)
+ && config.closure_return_type_hints == ClosureReturnTypeHints::WithBlock
+ {
+ return None;
+ }
+
+ let param_list = closure.param_list()?;
+
+ let closure = sema.descend_node_into_attributes(closure.clone()).pop()?;
+ let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(closure))?.adjusted();
+ let callable = ty.as_callable(sema.db)?;
+ let ty = callable.return_type();
+ if ty.is_unit() {
+ return None;
+ }
+ acc.push(InlayHint {
+ range: param_list.syntax().text_range(),
+ kind: InlayKind::ClosureReturnTypeHint,
+ label: hint_iterator(sema, &famous_defs, config, &ty)
+ .unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string()),
+ tooltip: Some(InlayTooltip::HoverRanged(file_id, param_list.syntax().text_range())),
+ });
+ Some(())
+}
+
+fn reborrow_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ expr: &ast::Expr,
+) -> Option<()> {
+ if config.reborrow_hints == ReborrowHints::Never {
+ return None;
+ }
+
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let desc_expr = descended.as_ref().unwrap_or(expr);
+ let mutability = sema.is_implicit_reborrow(desc_expr)?;
+ let label = match mutability {
+ hir::Mutability::Shared if config.reborrow_hints != ReborrowHints::MutableOnly => "&*",
+ hir::Mutability::Mut => "&mut *",
+ _ => return None,
+ };
+ acc.push(InlayHint {
+ range: expr.syntax().text_range(),
+ kind: InlayKind::ImplicitReborrowHint,
+ label: label.to_string(),
+ tooltip: Some(InlayTooltip::String("Compiler inserted reborrow".into())),
+ });
+ Some(())
+}
+
+fn chaining_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ famous_defs: &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ expr: &ast::Expr,
+) -> Option<()> {
+ if !config.chaining_hints {
+ return None;
+ }
+
+ if matches!(expr, ast::Expr::RecordExpr(_)) {
+ return None;
+ }
+
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let desc_expr = descended.as_ref().unwrap_or(expr);
+
+ let mut tokens = expr
+ .syntax()
+ .siblings_with_tokens(Direction::Next)
+ .filter_map(NodeOrToken::into_token)
+ .filter(|t| match t.kind() {
+ SyntaxKind::WHITESPACE if !t.text().contains('\n') => false,
+ SyntaxKind::COMMENT => false,
+ _ => true,
+ });
+
+ // Chaining can be defined as an expression whose next sibling tokens are newline and dot
+ // Ignoring extra whitespace and comments
+ let next = tokens.next()?.kind();
+ if next == SyntaxKind::WHITESPACE {
+ let mut next_next = tokens.next()?.kind();
+ while next_next == SyntaxKind::WHITESPACE {
+ next_next = tokens.next()?.kind();
+ }
+ if next_next == T![.] {
+ let ty = sema.type_of_expr(desc_expr)?.original;
+ if ty.is_unknown() {
+ return None;
+ }
+ if matches!(expr, ast::Expr::PathExpr(_)) {
+ if let Some(hir::Adt::Struct(st)) = ty.as_adt() {
+ if st.fields(sema.db).is_empty() {
+ return None;
+ }
+ }
+ }
+ acc.push(InlayHint {
+ range: expr.syntax().text_range(),
+ kind: InlayKind::ChainingHint,
+ label: hint_iterator(sema, &famous_defs, config, &ty).unwrap_or_else(|| {
+ ty.display_truncated(sema.db, config.max_length).to_string()
+ }),
+ tooltip: Some(InlayTooltip::HoverRanged(file_id, expr.syntax().text_range())),
+ });
+ }
+ }
+ Some(())
+}
+
+fn param_name_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ expr: ast::Expr,
+) -> Option<()> {
+ if !config.parameter_hints {
+ return None;
+ }
+
+ let (callable, arg_list) = get_callable(sema, &expr)?;
+ let hints = callable
+ .params(sema.db)
+ .into_iter()
+ .zip(arg_list.args())
+ .filter_map(|((param, _ty), arg)| {
+ // Only annotate hints for expressions that exist in the original file
+ let range = sema.original_range_opt(arg.syntax())?;
+ let (param_name, name_syntax) = match param.as_ref()? {
+ Either::Left(pat) => ("self".to_string(), pat.name()),
+ Either::Right(pat) => match pat {
+ ast::Pat::IdentPat(it) => (it.name()?.to_string(), it.name()),
+ _ => return None,
+ },
+ };
+ Some((name_syntax, param_name, arg, range))
+ })
+ .filter(|(_, param_name, arg, _)| {
+ !should_hide_param_name_hint(sema, &callable, param_name, arg)
+ })
+ .map(|(param, param_name, _, FileRange { range, .. })| {
+ let mut tooltip = None;
+ if let Some(name) = param {
+ if let hir::CallableKind::Function(f) = callable.kind() {
+ // assert the file is cached so we can map out of macros
+ if let Some(_) = sema.source(f) {
+ tooltip = sema.original_range_opt(name.syntax());
+ }
+ }
+ }
+
+ InlayHint {
+ range,
+ kind: InlayKind::ParameterHint,
+ label: param_name,
+ tooltip: tooltip.map(|it| InlayTooltip::HoverOffset(it.file_id, it.range.start())),
+ }
+ });
+
+ acc.extend(hints);
+ Some(())
+}
+
+fn binding_mode_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ pat: &ast::Pat,
+) -> Option<()> {
+ if !config.binding_mode_hints {
+ return None;
+ }
+
+ let range = pat.syntax().text_range();
+ sema.pattern_adjustments(&pat).iter().for_each(|ty| {
+ let reference = ty.is_reference();
+ let mut_reference = ty.is_mutable_reference();
+ let r = match (reference, mut_reference) {
+ (true, true) => "&mut",
+ (true, false) => "&",
+ _ => return,
+ };
+ acc.push(InlayHint {
+ range,
+ kind: InlayKind::BindingModeHint,
+ label: r.to_string(),
+ tooltip: Some(InlayTooltip::String("Inferred binding mode".into())),
+ });
+ });
+ match pat {
+ ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => {
+ let bm = sema.binding_mode_of_pat(pat)?;
+ let bm = match bm {
+ hir::BindingMode::Move => return None,
+ hir::BindingMode::Ref(Mutability::Mut) => "ref mut",
+ hir::BindingMode::Ref(Mutability::Shared) => "ref",
+ };
+ acc.push(InlayHint {
+ range,
+ kind: InlayKind::BindingModeHint,
+ label: bm.to_string(),
+ tooltip: Some(InlayTooltip::String("Inferred binding mode".into())),
+ });
+ }
+ _ => (),
+ }
+
+ Some(())
+}
+
+fn bind_pat_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ pat: &ast::IdentPat,
+) -> Option<()> {
+ if !config.type_hints {
+ return None;
+ }
+
+ let descended = sema.descend_node_into_attributes(pat.clone()).pop();
+ let desc_pat = descended.as_ref().unwrap_or(pat);
+ let ty = sema.type_of_pat(&desc_pat.clone().into())?.original;
+
+ if should_not_display_type_hint(sema, config, pat, &ty) {
+ return None;
+ }
+
+ let krate = sema.scope(desc_pat.syntax())?.krate();
+ let famous_defs = FamousDefs(sema, krate);
+ let label = hint_iterator(sema, &famous_defs, config, &ty);
+
+ let label = match label {
+ Some(label) => label,
+ None => {
+ let ty_name = ty.display_truncated(sema.db, config.max_length).to_string();
+ if config.hide_named_constructor_hints
+ && is_named_constructor(sema, pat, &ty_name).is_some()
+ {
+ return None;
+ }
+ ty_name
+ }
+ };
+
+ acc.push(InlayHint {
+ range: match pat.name() {
+ Some(name) => name.syntax().text_range(),
+ None => pat.syntax().text_range(),
+ },
+ kind: InlayKind::TypeHint,
+ label,
+ tooltip: pat
+ .name()
+ .map(|it| it.syntax().text_range())
+ .map(|it| InlayTooltip::HoverRanged(file_id, it)),
+ });
+
+ Some(())
+}
+
+fn is_named_constructor(
+ sema: &Semantics<'_, RootDatabase>,
+ pat: &ast::IdentPat,
+ ty_name: &str,
+) -> Option<()> {
+ let let_node = pat.syntax().parent()?;
+ let expr = match_ast! {
+ match let_node {
+ ast::LetStmt(it) => it.initializer(),
+ ast::LetExpr(it) => it.expr(),
+ _ => None,
+ }
+ }?;
+
+ let expr = sema.descend_node_into_attributes(expr.clone()).pop().unwrap_or(expr);
+ // unwrap postfix expressions
+ let expr = match expr {
+ ast::Expr::TryExpr(it) => it.expr(),
+ ast::Expr::AwaitExpr(it) => it.expr(),
+ expr => Some(expr),
+ }?;
+ let expr = match expr {
+ ast::Expr::CallExpr(call) => match call.expr()? {
+ ast::Expr::PathExpr(path) => path,
+ _ => return None,
+ },
+ ast::Expr::PathExpr(path) => path,
+ _ => return None,
+ };
+ let path = expr.path()?;
+
+ let callable = sema.type_of_expr(&ast::Expr::PathExpr(expr))?.original.as_callable(sema.db);
+ let callable_kind = callable.map(|it| it.kind());
+ let qual_seg = match callable_kind {
+ Some(hir::CallableKind::Function(_) | hir::CallableKind::TupleEnumVariant(_)) => {
+ path.qualifier()?.segment()
+ }
+ _ => path.segment(),
+ }?;
+
+ let ctor_name = match qual_seg.kind()? {
+ ast::PathSegmentKind::Name(name_ref) => {
+ match qual_seg.generic_arg_list().map(|it| it.generic_args()) {
+ Some(generics) => format!("{}<{}>", name_ref, generics.format(", ")),
+ None => name_ref.to_string(),
+ }
+ }
+ ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(),
+ _ => return None,
+ };
+ (ctor_name == ty_name).then(|| ())
+}
+
+/// Checks if the type is an Iterator from std::iter and replaces its hint with an `impl Iterator<Item = Ty>`.
+fn hint_iterator(
+ sema: &Semantics<'_, RootDatabase>,
+ famous_defs: &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ ty: &hir::Type,
+) -> Option<String> {
+ let db = sema.db;
+ let strukt = ty.strip_references().as_adt()?;
+ let krate = strukt.module(db).krate();
+ if krate != famous_defs.core()? {
+ return None;
+ }
+ let iter_trait = famous_defs.core_iter_Iterator()?;
+ let iter_mod = famous_defs.core_iter()?;
+
+ // Assert that this struct comes from `core::iter`.
+ if !(strukt.visibility(db) == hir::Visibility::Public
+ && strukt.module(db).path_to_root(db).contains(&iter_mod))
+ {
+ return None;
+ }
+
+ if ty.impls_trait(db, iter_trait, &[]) {
+ let assoc_type_item = iter_trait.items(db).into_iter().find_map(|item| match item {
+ hir::AssocItem::TypeAlias(alias) if alias.name(db) == known::Item => Some(alias),
+ _ => None,
+ })?;
+ if let Some(ty) = ty.normalize_trait_assoc_type(db, &[], assoc_type_item) {
+ const LABEL_START: &str = "impl Iterator<Item = ";
+ const LABEL_END: &str = ">";
+
+ let ty_display = hint_iterator(sema, famous_defs, config, &ty)
+ .map(|assoc_type_impl| assoc_type_impl.to_string())
+ .unwrap_or_else(|| {
+ ty.display_truncated(
+ db,
+ config
+ .max_length
+ .map(|len| len.saturating_sub(LABEL_START.len() + LABEL_END.len())),
+ )
+ .to_string()
+ });
+ return Some(format!("{}{}{}", LABEL_START, ty_display, LABEL_END));
+ }
+ }
+
+ None
+}
+
+fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool {
+ if let Some(hir::Adt::Enum(enum_data)) = pat_ty.as_adt() {
+ let pat_text = bind_pat.to_string();
+ enum_data
+ .variants(db)
+ .into_iter()
+ .map(|variant| variant.name(db).to_smol_str())
+ .any(|enum_name| enum_name == pat_text)
+ } else {
+ false
+ }
+}
+
+fn should_not_display_type_hint(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ bind_pat: &ast::IdentPat,
+ pat_ty: &hir::Type,
+) -> bool {
+ let db = sema.db;
+
+ if pat_ty.is_unknown() {
+ return true;
+ }
+
+ if let Some(hir::Adt::Struct(s)) = pat_ty.as_adt() {
+ if s.fields(db).is_empty() && s.name(db).to_smol_str() == bind_pat.to_string() {
+ return true;
+ }
+ }
+
+ if config.hide_closure_initialization_hints {
+ if let Some(parent) = bind_pat.syntax().parent() {
+ if let Some(it) = ast::LetStmt::cast(parent.clone()) {
+ if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() {
+ if closure_has_block_body(&closure) {
+ return true;
+ }
+ }
+ }
+ }
+ }
+
+ for node in bind_pat.syntax().ancestors() {
+ match_ast! {
+ match node {
+ ast::LetStmt(it) => return it.ty().is_some(),
+ // FIXME: We might wanna show type hints in parameters for non-top level patterns as well
+ ast::Param(it) => return it.ty().is_some(),
+ ast::MatchArm(_) => return pat_is_enum_variant(db, bind_pat, pat_ty),
+ ast::LetExpr(_) => return pat_is_enum_variant(db, bind_pat, pat_ty),
+ ast::IfExpr(_) => return false,
+ ast::WhileExpr(_) => return false,
+ ast::ForExpr(it) => {
+ // We *should* display hint only if user provided "in {expr}" and we know the type of expr (and it's not unit).
+ // Type of expr should be iterable.
+ return it.in_token().is_none() ||
+ it.iterable()
+ .and_then(|iterable_expr| sema.type_of_expr(&iterable_expr))
+ .map(TypeInfo::original)
+ .map_or(true, |iterable_ty| iterable_ty.is_unknown() || iterable_ty.is_unit())
+ },
+ _ => (),
+ }
+ }
+ }
+ false
+}
+
+fn closure_has_block_body(closure: &ast::ClosureExpr) -> bool {
+ matches!(closure.body(), Some(ast::Expr::BlockExpr(_)))
+}
+
+fn should_hide_param_name_hint(
+ sema: &Semantics<'_, RootDatabase>,
+ callable: &hir::Callable,
+ param_name: &str,
+ argument: &ast::Expr,
+) -> bool {
+ // These are to be tested in the `parameter_hint_heuristics` test
+ // hide when:
+ // - the parameter name is a suffix of the function's name
+ // - the argument is a qualified constructing or call expression where the qualifier is an ADT
+ // - exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix
+ // of argument with _ splitting it off
+ // - param starts with `ra_fixture`
+ // - param is a well known name in a unary function
+
+ let param_name = param_name.trim_start_matches('_');
+ if param_name.is_empty() {
+ return true;
+ }
+
+ if matches!(argument, ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(UnaryOp::Not)) {
+ return false;
+ }
+
+ let fn_name = match callable.kind() {
+ hir::CallableKind::Function(it) => Some(it.name(sema.db).to_smol_str()),
+ _ => None,
+ };
+ let fn_name = fn_name.as_deref();
+ is_param_name_suffix_of_fn_name(param_name, callable, fn_name)
+ || is_argument_similar_to_param_name(argument, param_name)
+ || param_name.starts_with("ra_fixture")
+ || (callable.n_params() == 1 && is_obvious_param(param_name))
+ || is_adt_constructor_similar_to_param_name(sema, argument, param_name)
+}
+
+fn is_argument_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool {
+ // check whether param_name and argument are the same or
+ // whether param_name is a prefix/suffix of argument(split at `_`)
+ let argument = match get_string_representation(argument) {
+ Some(argument) => argument,
+ None => return false,
+ };
+
+ // std is honestly too panic happy...
+ let str_split_at = |str: &str, at| str.is_char_boundary(at).then(|| argument.split_at(at));
+
+ let param_name = param_name.trim_start_matches('_');
+ let argument = argument.trim_start_matches('_');
+
+ match str_split_at(argument, param_name.len()) {
+ Some((prefix, rest)) if prefix.eq_ignore_ascii_case(param_name) => {
+ return rest.is_empty() || rest.starts_with('_');
+ }
+ _ => (),
+ }
+ match argument.len().checked_sub(param_name.len()).and_then(|at| str_split_at(argument, at)) {
+ Some((rest, suffix)) if param_name.eq_ignore_ascii_case(suffix) => {
+ return rest.is_empty() || rest.ends_with('_');
+ }
+ _ => (),
+ }
+ false
+}
+
+/// Hide the parameter name of a unary function if it is a `_` - prefixed suffix of the function's name, or equal.
+///
+/// `fn strip_suffix(suffix)` will be hidden.
+/// `fn stripsuffix(suffix)` will not be hidden.
+fn is_param_name_suffix_of_fn_name(
+ param_name: &str,
+ callable: &Callable,
+ fn_name: Option<&str>,
+) -> bool {
+ match (callable.n_params(), fn_name) {
+ (1, Some(function)) => {
+ function == param_name
+ || function
+ .len()
+ .checked_sub(param_name.len())
+ .and_then(|at| function.is_char_boundary(at).then(|| function.split_at(at)))
+ .map_or(false, |(prefix, suffix)| {
+ suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_')
+ })
+ }
+ _ => false,
+ }
+}
+
+fn is_adt_constructor_similar_to_param_name(
+ sema: &Semantics<'_, RootDatabase>,
+ argument: &ast::Expr,
+ param_name: &str,
+) -> bool {
+ let path = match argument {
+ ast::Expr::CallExpr(c) => c.expr().and_then(|e| match e {
+ ast::Expr::PathExpr(p) => p.path(),
+ _ => None,
+ }),
+ ast::Expr::PathExpr(p) => p.path(),
+ ast::Expr::RecordExpr(r) => r.path(),
+ _ => return false,
+ };
+ let path = match path {
+ Some(it) => it,
+ None => return false,
+ };
+ (|| match sema.resolve_path(&path)? {
+ hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => {
+ Some(to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name)
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Function(_) | hir::ModuleDef::Variant(_)) => {
+ if to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name {
+ return Some(true);
+ }
+ let qual = path.qualifier()?;
+ match sema.resolve_path(&qual)? {
+ hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => {
+ Some(to_lower_snake_case(&qual.segment()?.name_ref()?.text()) == param_name)
+ }
+ _ => None,
+ }
+ }
+ _ => None,
+ })()
+ .unwrap_or(false)
+}
+
+fn get_string_representation(expr: &ast::Expr) -> Option<String> {
+ match expr {
+ ast::Expr::MethodCallExpr(method_call_expr) => {
+ let name_ref = method_call_expr.name_ref()?;
+ match name_ref.text().as_str() {
+ "clone" | "as_ref" => method_call_expr.receiver().map(|rec| rec.to_string()),
+ name_ref => Some(name_ref.to_owned()),
+ }
+ }
+ ast::Expr::MacroExpr(macro_expr) => {
+ Some(macro_expr.macro_call()?.path()?.segment()?.to_string())
+ }
+ ast::Expr::FieldExpr(field_expr) => Some(field_expr.name_ref()?.to_string()),
+ ast::Expr::PathExpr(path_expr) => Some(path_expr.path()?.segment()?.to_string()),
+ ast::Expr::PrefixExpr(prefix_expr) => get_string_representation(&prefix_expr.expr()?),
+ ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?),
+ ast::Expr::CastExpr(cast_expr) => get_string_representation(&cast_expr.expr()?),
+ _ => None,
+ }
+}
+
+fn is_obvious_param(param_name: &str) -> bool {
+ // avoid displaying hints for common functions like map, filter, etc.
+ // or other obvious words used in std
+ let is_obvious_param_name =
+ matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other");
+ param_name.len() == 1 || is_obvious_param_name
+}
+
+fn get_callable(
+ sema: &Semantics<'_, RootDatabase>,
+ expr: &ast::Expr,
+) -> Option<(hir::Callable, ast::ArgList)> {
+ match expr {
+ ast::Expr::CallExpr(expr) => {
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let expr = descended.as_ref().unwrap_or(expr);
+ sema.type_of_expr(&expr.expr()?)?.original.as_callable(sema.db).zip(expr.arg_list())
+ }
+ ast::Expr::MethodCallExpr(expr) => {
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let expr = descended.as_ref().unwrap_or(expr);
+ sema.resolve_method_call_as_callable(expr).zip(expr.arg_list())
+ }
+ _ => None,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+ use syntax::{TextRange, TextSize};
+ use test_utils::extract_annotations;
+
+ use crate::inlay_hints::ReborrowHints;
+ use crate::{fixture, inlay_hints::InlayHintsConfig, LifetimeElisionHints};
+
+ use super::ClosureReturnTypeHints;
+
+ const DISABLED_CONFIG: InlayHintsConfig = InlayHintsConfig {
+ render_colons: false,
+ type_hints: false,
+ parameter_hints: false,
+ chaining_hints: false,
+ lifetime_elision_hints: LifetimeElisionHints::Never,
+ closure_return_type_hints: ClosureReturnTypeHints::Never,
+ reborrow_hints: ReborrowHints::Always,
+ binding_mode_hints: false,
+ hide_named_constructor_hints: false,
+ hide_closure_initialization_hints: false,
+ param_names_for_lifetime_elision_hints: false,
+ max_length: None,
+ closing_brace_hints_min_lines: None,
+ };
+ const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig {
+ type_hints: true,
+ parameter_hints: true,
+ chaining_hints: true,
+ reborrow_hints: ReborrowHints::Always,
+ closure_return_type_hints: ClosureReturnTypeHints::WithBlock,
+ binding_mode_hints: true,
+ lifetime_elision_hints: LifetimeElisionHints::Always,
+ ..DISABLED_CONFIG
+ };
+
+ #[track_caller]
+ fn check(ra_fixture: &str) {
+ check_with_config(TEST_CONFIG, ra_fixture);
+ }
+
+ #[track_caller]
+ fn check_params(ra_fixture: &str) {
+ check_with_config(
+ InlayHintsConfig { parameter_hints: true, ..DISABLED_CONFIG },
+ ra_fixture,
+ );
+ }
+
+ #[track_caller]
+ fn check_types(ra_fixture: &str) {
+ check_with_config(InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, ra_fixture);
+ }
+
+ #[track_caller]
+ fn check_chains(ra_fixture: &str) {
+ check_with_config(InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, ra_fixture);
+ }
+
+ #[track_caller]
+ fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let mut expected = extract_annotations(&*analysis.file_text(file_id).unwrap());
+ let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
+ let actual = inlay_hints
+ .into_iter()
+ .map(|it| (it.range, it.label.to_string()))
+ .sorted_by_key(|(range, _)| range.start())
+ .collect::<Vec<_>>();
+ expected.sort_by_key(|(range, _)| range.start());
+
+ assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual);
+ }
+
+ #[track_caller]
+ fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
+ expect.assert_debug_eq(&inlay_hints)
+ }
+
+ #[test]
+ fn hints_disabled() {
+ check_with_config(
+ InlayHintsConfig { render_colons: true, ..DISABLED_CONFIG },
+ r#"
+fn foo(a: i32, b: i32) -> i32 { a + b }
+fn main() {
+ let _x = foo(4, 4);
+}"#,
+ );
+ }
+
+ // Parameter hint tests
+
+ #[test]
+ fn param_hints_only() {
+ check_params(
+ r#"
+fn foo(a: i32, b: i32) -> i32 { a + b }
+fn main() {
+ let _x = foo(
+ 4,
+ //^ a
+ 4,
+ //^ b
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_hints_on_closure() {
+ check_params(
+ r#"
+fn main() {
+ let clo = |a: u8, b: u8| a + b;
+ clo(
+ 1,
+ //^ a
+ 2,
+ //^ b
+ );
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn param_name_similar_to_fn_name_still_hints() {
+ check_params(
+ r#"
+fn max(x: i32, y: i32) -> i32 { x + y }
+fn main() {
+ let _x = max(
+ 4,
+ //^ x
+ 4,
+ //^ y
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_name_similar_to_fn_name() {
+ check_params(
+ r#"
+fn param_with_underscore(with_underscore: i32) -> i32 { with_underscore }
+fn main() {
+ let _x = param_with_underscore(
+ 4,
+ );
+}"#,
+ );
+ check_params(
+ r#"
+fn param_with_underscore(underscore: i32) -> i32 { underscore }
+fn main() {
+ let _x = param_with_underscore(
+ 4,
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_name_same_as_fn_name() {
+ check_params(
+ r#"
+fn foo(foo: i32) -> i32 { foo }
+fn main() {
+ let _x = foo(
+ 4,
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn never_hide_param_when_multiple_params() {
+ check_params(
+ r#"
+fn foo(foo: i32, bar: i32) -> i32 { bar + baz }
+fn main() {
+ let _x = foo(
+ 4,
+ //^ foo
+ 8,
+ //^ bar
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_hints_look_through_as_ref_and_clone() {
+ check_params(
+ r#"
+fn foo(bar: i32, baz: f32) {}
+
+fn main() {
+ let bar = 3;
+ let baz = &"baz";
+ let fez = 1.0;
+ foo(bar.clone(), bar.clone());
+ //^^^^^^^^^^^ baz
+ foo(bar.as_ref(), bar.as_ref());
+ //^^^^^^^^^^^^ baz
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn self_param_hints() {
+ check_params(
+ r#"
+struct Foo;
+
+impl Foo {
+ fn foo(self: Self) {}
+ fn bar(self: &Self) {}
+}
+
+fn main() {
+ Foo::foo(Foo);
+ //^^^ self
+ Foo::bar(&Foo);
+ //^^^^ self
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn param_name_hints_show_for_literals() {
+ check_params(
+ r#"pub fn test(a: i32, b: i32) -> [i32; 2] { [a, b] }
+fn main() {
+ test(
+ 0xa_b,
+ //^^^^^ a
+ 0xa_b,
+ //^^^^^ b
+ );
+}"#,
+ )
+ }
+
+ #[test]
+ fn function_call_parameter_hint() {
+ check_params(
+ r#"
+//- minicore: option
+struct FileId {}
+struct SmolStr {}
+
+struct TextRange {}
+struct SyntaxKind {}
+struct NavigationTarget {}
+
+struct Test {}
+
+impl Test {
+ fn method(&self, mut param: i32) -> i32 { param * 2 }
+
+ fn from_syntax(
+ file_id: FileId,
+ name: SmolStr,
+ focus_range: Option<TextRange>,
+ full_range: TextRange,
+ kind: SyntaxKind,
+ docs: Option<String>,
+ ) -> NavigationTarget {
+ NavigationTarget {}
+ }
+}
+
+fn test_func(mut foo: i32, bar: i32, msg: &str, _: i32, last: i32) -> i32 {
+ foo + bar
+}
+
+fn main() {
+ let not_literal = 1;
+ let _: i32 = test_func(1, 2, "hello", 3, not_literal);
+ //^ foo ^ bar ^^^^^^^ msg ^^^^^^^^^^^ last
+ let t: Test = Test {};
+ t.method(123);
+ //^^^ param
+ Test::method(&t, 3456);
+ //^^ self ^^^^ param
+ Test::from_syntax(
+ FileId {},
+ "impl".into(),
+ //^^^^^^^^^^^^^ name
+ None,
+ //^^^^ focus_range
+ TextRange {},
+ //^^^^^^^^^^^^ full_range
+ SyntaxKind {},
+ //^^^^^^^^^^^^^ kind
+ None,
+ //^^^^ docs
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn parameter_hint_heuristics() {
+ check_params(
+ r#"
+fn check(ra_fixture_thing: &str) {}
+
+fn map(f: i32) {}
+fn filter(predicate: i32) {}
+
+fn strip_suffix(suffix: &str) {}
+fn stripsuffix(suffix: &str) {}
+fn same(same: u32) {}
+fn same2(_same2: u32) {}
+
+fn enum_matches_param_name(completion_kind: CompletionKind) {}
+
+fn foo(param: u32) {}
+fn bar(param_eter: u32) {}
+
+enum CompletionKind {
+ Keyword,
+}
+
+fn non_ident_pat((a, b): (u32, u32)) {}
+
+fn main() {
+ const PARAM: u32 = 0;
+ foo(PARAM);
+ foo(!PARAM);
+ // ^^^^^^ param
+ check("");
+
+ map(0);
+ filter(0);
+
+ strip_suffix("");
+ stripsuffix("");
+ //^^ suffix
+ same(0);
+ same2(0);
+
+ enum_matches_param_name(CompletionKind::Keyword);
+
+ let param = 0;
+ foo(param);
+ foo(param as _);
+ let param_end = 0;
+ foo(param_end);
+ let start_param = 0;
+ foo(start_param);
+ let param2 = 0;
+ foo(param2);
+ //^^^^^^ param
+
+ macro_rules! param {
+ () => {};
+ };
+ foo(param!());
+
+ let param_eter = 0;
+ bar(param_eter);
+ let param_eter_end = 0;
+ bar(param_eter_end);
+ let start_param_eter = 0;
+ bar(start_param_eter);
+ let param_eter2 = 0;
+ bar(param_eter2);
+ //^^^^^^^^^^^ param_eter
+
+ non_ident_pat((0, 0));
+}"#,
+ );
+ }
+
+ // Type-Hint tests
+
+ #[test]
+ fn type_hints_only() {
+ check_types(
+ r#"
+fn foo(a: i32, b: i32) -> i32 { a + b }
+fn main() {
+ let _x = foo(4, 4);
+ //^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn type_hints_bindings_after_at() {
+ check_types(
+ r#"
+//- minicore: option
+fn main() {
+ let ref foo @ bar @ ref mut baz = 0;
+ //^^^ &i32
+ //^^^ i32
+ //^^^ &mut i32
+ let [x @ ..] = [0];
+ //^ [i32; 1]
+ if let x @ Some(_) = Some(0) {}
+ //^ Option<i32>
+ let foo @ (bar, baz) = (3, 3);
+ //^^^ (i32, i32)
+ //^^^ i32
+ //^^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn default_generic_types_should_not_be_displayed() {
+ check(
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let zz = Test { t: 23u8, k: 33 };
+ //^^ Test<i32>
+ let zz_ref = &zz;
+ //^^^^^^ &Test<i32>
+ let test = || zz;
+ //^^^^ || -> Test<i32>
+}"#,
+ );
+ }
+
+ #[test]
+ fn shorten_iterators_in_associated_params() {
+ check_types(
+ r#"
+//- minicore: iterators
+use core::iter;
+
+pub struct SomeIter<T> {}
+
+impl<T> SomeIter<T> {
+ pub fn new() -> Self { SomeIter {} }
+ pub fn push(&mut self, t: T) {}
+}
+
+impl<T> Iterator for SomeIter<T> {
+ type Item = T;
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ let mut some_iter = SomeIter::new();
+ //^^^^^^^^^ SomeIter<Take<Repeat<i32>>>
+ some_iter.push(iter::repeat(2).take(2));
+ let iter_of_iters = some_iter.take(2);
+ //^^^^^^^^^^^^^ impl Iterator<Item = impl Iterator<Item = i32>>
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn infer_call_method_return_associated_types_with_generic() {
+ check_types(
+ r#"
+ pub trait Default {
+ fn default() -> Self;
+ }
+ pub trait Foo {
+ type Bar: Default;
+ }
+
+ pub fn quux<T: Foo>() -> T::Bar {
+ let y = Default::default();
+ //^ <T as Foo>::Bar
+
+ y
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn fn_hints() {
+ check_types(
+ r#"
+//- minicore: fn, sized
+fn foo() -> impl Fn() { loop {} }
+fn foo1() -> impl Fn(f64) { loop {} }
+fn foo2() -> impl Fn(f64, f64) { loop {} }
+fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} }
+fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} }
+fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} }
+fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} }
+fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} }
+
+fn main() {
+ let foo = foo();
+ // ^^^ impl Fn()
+ let foo = foo1();
+ // ^^^ impl Fn(f64)
+ let foo = foo2();
+ // ^^^ impl Fn(f64, f64)
+ let foo = foo3();
+ // ^^^ impl Fn(f64, f64) -> u32
+ let foo = foo4();
+ // ^^^ &dyn Fn(f64, f64) -> u32
+ let foo = foo5();
+ // ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32
+ let foo = foo6();
+ // ^^^ impl Fn(f64, f64) -> u32
+ let foo = foo7();
+ // ^^^ *const impl Fn(f64, f64) -> u32
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn check_hint_range_limit() {
+ let fixture = r#"
+ //- minicore: fn, sized
+ fn foo() -> impl Fn() { loop {} }
+ fn foo1() -> impl Fn(f64) { loop {} }
+ fn foo2() -> impl Fn(f64, f64) { loop {} }
+ fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} }
+ fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} }
+ fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} }
+ fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} }
+ fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} }
+
+ fn main() {
+ let foo = foo();
+ let foo = foo1();
+ let foo = foo2();
+ // ^^^ impl Fn(f64, f64)
+ let foo = foo3();
+ // ^^^ impl Fn(f64, f64) -> u32
+ let foo = foo4();
+ let foo = foo5();
+ let foo = foo6();
+ let foo = foo7();
+ }
+ "#;
+ let (analysis, file_id) = fixture::file(fixture);
+ let expected = extract_annotations(&*analysis.file_text(file_id).unwrap());
+ let inlay_hints = analysis
+ .inlay_hints(
+ &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
+ file_id,
+ Some(FileRange {
+ file_id,
+ range: TextRange::new(TextSize::from(500), TextSize::from(600)),
+ }),
+ )
+ .unwrap();
+ let actual =
+ inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::<Vec<_>>();
+ assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual);
+ }
+
+ #[test]
+ fn fn_hints_ptr_rpit_fn_parentheses() {
+ check_types(
+ r#"
+//- minicore: fn, sized
+trait Trait {}
+
+fn foo1() -> *const impl Fn() { loop {} }
+fn foo2() -> *const (impl Fn() + Sized) { loop {} }
+fn foo3() -> *const (impl Fn() + ?Sized) { loop {} }
+fn foo4() -> *const (impl Sized + Fn()) { loop {} }
+fn foo5() -> *const (impl ?Sized + Fn()) { loop {} }
+fn foo6() -> *const (impl Fn() + Trait) { loop {} }
+fn foo7() -> *const (impl Fn() + Sized + Trait) { loop {} }
+fn foo8() -> *const (impl Fn() + ?Sized + Trait) { loop {} }
+fn foo9() -> *const (impl Fn() -> u8 + ?Sized) { loop {} }
+fn foo10() -> *const (impl Fn() + Sized + ?Sized) { loop {} }
+
+fn main() {
+ let foo = foo1();
+ // ^^^ *const impl Fn()
+ let foo = foo2();
+ // ^^^ *const impl Fn()
+ let foo = foo3();
+ // ^^^ *const (impl Fn() + ?Sized)
+ let foo = foo4();
+ // ^^^ *const impl Fn()
+ let foo = foo5();
+ // ^^^ *const (impl Fn() + ?Sized)
+ let foo = foo6();
+ // ^^^ *const (impl Fn() + Trait)
+ let foo = foo7();
+ // ^^^ *const (impl Fn() + Trait)
+ let foo = foo8();
+ // ^^^ *const (impl Fn() + Trait + ?Sized)
+ let foo = foo9();
+ // ^^^ *const (impl Fn() -> u8 + ?Sized)
+ let foo = foo10();
+ // ^^^ *const impl Fn()
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn unit_structs_have_no_type_hints() {
+ check_types(
+ r#"
+//- minicore: result
+struct SyntheticSyntax;
+
+fn main() {
+ match Ok(()) {
+ Ok(_) => (),
+ Err(SyntheticSyntax) => (),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn let_statement() {
+ check_types(
+ r#"
+#[derive(PartialEq)]
+enum Option<T> { None, Some(T) }
+
+#[derive(PartialEq)]
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ struct InnerStruct {}
+
+ let test = 54;
+ //^^^^ i32
+ let test: i32 = 33;
+ let mut test = 33;
+ //^^^^ i32
+ let _ = 22;
+ let test = "test";
+ //^^^^ &str
+ let test = InnerStruct {};
+ //^^^^ InnerStruct
+
+ let test = unresolved();
+
+ let test = (42, 'a');
+ //^^^^ (i32, char)
+ let (a, (b, (c,)) = (2, (3, (9.2,));
+ //^ i32 ^ i32 ^ f64
+ let &x = &92;
+ //^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn if_expr() {
+ check_types(
+ r#"
+//- minicore: option
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ let test = Some(Test { a: Some(3), b: 1 });
+ //^^^^ Option<Test>
+ if let None = &test {};
+ if let test = &test {};
+ //^^^^ &Option<Test>
+ if let Some(test) = &test {};
+ //^^^^ &Test
+ if let Some(Test { a, b }) = &test {};
+ //^ &Option<u32> ^ &u8
+ if let Some(Test { a: x, b: y }) = &test {};
+ //^ &Option<u32> ^ &u8
+ if let Some(Test { a: Some(x), b: y }) = &test {};
+ //^ &u32 ^ &u8
+ if let Some(Test { a: None, b: y }) = &test {};
+ //^ &u8
+ if let Some(Test { b: y, .. }) = &test {};
+ //^ &u8
+ if test == None {}
+}"#,
+ );
+ }
+
+ #[test]
+ fn while_expr() {
+ check_types(
+ r#"
+//- minicore: option
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ let test = Some(Test { a: Some(3), b: 1 });
+ //^^^^ Option<Test>
+ while let Some(Test { a: Some(x), b: y }) = &test {};
+ //^ &u32 ^ &u8
+}"#,
+ );
+ }
+
+ #[test]
+ fn match_arm_list() {
+ check_types(
+ r#"
+//- minicore: option
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ match Some(Test { a: Some(3), b: 1 }) {
+ None => (),
+ test => (),
+ //^^^^ Option<Test>
+ Some(Test { a: Some(x), b: y }) => (),
+ //^ u32 ^ u8
+ _ => {}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn complete_for_hint() {
+ check_types(
+ r#"
+//- minicore: iterator
+pub struct Vec<T> {}
+
+impl<T> Vec<T> {
+ pub fn new() -> Self { Vec {} }
+ pub fn push(&mut self, t: T) {}
+}
+
+impl<T> IntoIterator for Vec<T> {
+ type Item=T;
+}
+
+fn main() {
+ let mut data = Vec::new();
+ //^^^^ Vec<&str>
+ data.push("foo");
+ for i in data {
+ //^ &str
+ let z = i;
+ //^ &str
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multi_dyn_trait_bounds() {
+ check_types(
+ r#"
+pub struct Vec<T> {}
+
+impl<T> Vec<T> {
+ pub fn new() -> Self { Vec {} }
+}
+
+pub struct Box<T> {}
+
+trait Display {}
+trait Sync {}
+
+fn main() {
+ // The block expression wrapping disables the constructor hint hiding logic
+ let _v = { Vec::<Box<&(dyn Display + Sync)>>::new() };
+ //^^ Vec<Box<&(dyn Display + Sync)>>
+ let _v = { Vec::<Box<*const (dyn Display + Sync)>>::new() };
+ //^^ Vec<Box<*const (dyn Display + Sync)>>
+ let _v = { Vec::<Box<dyn Display + Sync>>::new() };
+ //^^ Vec<Box<dyn Display + Sync>>
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn shorten_iterator_hints() {
+ check_types(
+ r#"
+//- minicore: iterators
+use core::iter;
+
+struct MyIter;
+
+impl Iterator for MyIter {
+ type Item = ();
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ let _x = MyIter;
+ //^^ MyIter
+ let _x = iter::repeat(0);
+ //^^ impl Iterator<Item = i32>
+ fn generic<T: Clone>(t: T) {
+ let _x = iter::repeat(t);
+ //^^ impl Iterator<Item = T>
+ let _chained = iter::repeat(t).take(10);
+ //^^^^^^^^ impl Iterator<Item = T>
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn skip_constructor_and_enum_type_hints() {
+ check_with_config(
+ InlayHintsConfig {
+ type_hints: true,
+ hide_named_constructor_hints: true,
+ ..DISABLED_CONFIG
+ },
+ r#"
+//- minicore: try, option
+use core::ops::ControlFlow;
+
+mod x {
+ pub mod y { pub struct Foo; }
+ pub struct Foo;
+ pub enum AnotherEnum {
+ Variant()
+ };
+}
+struct Struct;
+struct TupleStruct();
+
+impl Struct {
+ fn new() -> Self {
+ Struct
+ }
+ fn try_new() -> ControlFlow<(), Self> {
+ ControlFlow::Continue(Struct)
+ }
+}
+
+struct Generic<T>(T);
+impl Generic<i32> {
+ fn new() -> Self {
+ Generic(0)
+ }
+}
+
+enum Enum {
+ Variant(u32)
+}
+
+fn times2(value: i32) -> i32 {
+ 2 * value
+}
+
+fn main() {
+ let enumb = Enum::Variant(0);
+
+ let strukt = x::Foo;
+ let strukt = x::y::Foo;
+ let strukt = Struct;
+ let strukt = Struct::new();
+
+ let tuple_struct = TupleStruct();
+
+ let generic0 = Generic::new();
+ // ^^^^^^^^ Generic<i32>
+ let generic1 = Generic(0);
+ // ^^^^^^^^ Generic<i32>
+ let generic2 = Generic::<i32>::new();
+ let generic3 = <Generic<i32>>::new();
+ let generic4 = Generic::<i32>(0);
+
+
+ let option = Some(0);
+ // ^^^^^^ Option<i32>
+ let func = times2;
+ // ^^^^ fn times2(i32) -> i32
+ let closure = |x: i32| x * 2;
+ // ^^^^^^^ |i32| -> i32
+}
+
+fn fallible() -> ControlFlow<()> {
+ let strukt = Struct::try_new()?;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn shows_constructor_type_hints_when_enabled() {
+ check_types(
+ r#"
+//- minicore: try
+use core::ops::ControlFlow;
+
+struct Struct;
+struct TupleStruct();
+
+impl Struct {
+ fn new() -> Self {
+ Struct
+ }
+ fn try_new() -> ControlFlow<(), Self> {
+ ControlFlow::Continue(Struct)
+ }
+}
+
+struct Generic<T>(T);
+impl Generic<i32> {
+ fn new() -> Self {
+ Generic(0)
+ }
+}
+
+fn main() {
+ let strukt = Struct::new();
+ // ^^^^^^ Struct
+ let tuple_struct = TupleStruct();
+ // ^^^^^^^^^^^^ TupleStruct
+ let generic0 = Generic::new();
+ // ^^^^^^^^ Generic<i32>
+ let generic1 = Generic::<i32>::new();
+ // ^^^^^^^^ Generic<i32>
+ let generic2 = <Generic<i32>>::new();
+ // ^^^^^^^^ Generic<i32>
+}
+
+fn fallible() -> ControlFlow<()> {
+ let strukt = Struct::try_new()?;
+ // ^^^^^^ Struct
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn closures() {
+ check(
+ r#"
+fn main() {
+ let mut start = 0;
+ //^^^^^ i32
+ (0..2).for_each(|increment | { start += increment; });
+ //^^^^^^^^^ i32
+
+ let multiply =
+ //^^^^^^^^ |i32, i32| -> i32
+ | a, b| a * b
+ //^ i32 ^ i32
+
+ ;
+
+ let _: i32 = multiply(1, 2);
+ //^ a ^ b
+ let multiply_ref = &multiply;
+ //^^^^^^^^^^^^ &|i32, i32| -> i32
+
+ let return_42 = || 42;
+ //^^^^^^^^^ || -> i32
+ || { 42 };
+ //^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn return_type_hints_for_closure_without_block() {
+ check_with_config(
+ InlayHintsConfig {
+ closure_return_type_hints: ClosureReturnTypeHints::Always,
+ ..DISABLED_CONFIG
+ },
+ r#"
+fn main() {
+ let a = || { 0 };
+ //^^ i32
+ let b = || 0;
+ //^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn skip_closure_type_hints() {
+ check_with_config(
+ InlayHintsConfig {
+ type_hints: true,
+ hide_closure_initialization_hints: true,
+ ..DISABLED_CONFIG
+ },
+ r#"
+//- minicore: fn
+fn main() {
+ let multiple_2 = |x: i32| { x * 2 };
+
+ let multiple_2 = |x: i32| x * 2;
+ // ^^^^^^^^^^ |i32| -> i32
+
+ let (not) = (|x: bool| { !x });
+ // ^^^ |bool| -> bool
+
+ let (is_zero, _b) = (|x: usize| { x == 0 }, false);
+ // ^^^^^^^ |usize| -> bool
+ // ^^ bool
+
+ let plus_one = |x| { x + 1 };
+ // ^ u8
+ foo(plus_one);
+
+ let add_mul = bar(|x: u8| { x + 1 });
+ // ^^^^^^^ impl FnOnce(u8) -> u8 + ?Sized
+
+ let closure = if let Some(6) = add_mul(2).checked_sub(1) {
+ // ^^^^^^^ fn(i32) -> i32
+ |x: i32| { x * 2 }
+ } else {
+ |x: i32| { x * 3 }
+ };
+}
+
+fn foo(f: impl FnOnce(u8) -> u8) {}
+
+fn bar(f: impl FnOnce(u8) -> u8) -> impl FnOnce(u8) -> u8 {
+ move |x: u8| f(x) * 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hint_truncation() {
+ check_with_config(
+ InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG },
+ r#"
+struct Smol<T>(T);
+
+struct VeryLongOuterName<T>(T);
+
+fn main() {
+ let a = Smol(0u32);
+ //^ Smol<u32>
+ let b = VeryLongOuterName(0usize);
+ //^ VeryLongOuterName<…>
+ let c = Smol(Smol(0u32))
+ //^ Smol<Smol<…>>
+}"#,
+ );
+ }
+
+ // Chaining hint tests
+
+ #[test]
+ fn chaining_hints_ignore_comments() {
+ check_expect(
+ InlayHintsConfig { type_hints: false, chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct A(B);
+impl A { fn into_b(self) -> B { self.0 } }
+struct B(C);
+impl B { fn into_c(self) -> C { self.0 } }
+struct C;
+
+fn main() {
+ let c = A(B(C))
+ .into_b() // This is a comment
+ // This is another comment
+ .into_c();
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 147..172,
+ kind: ChainingHint,
+ label: "B",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 147..172,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 147..154,
+ kind: ChainingHint,
+ label: "A",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 147..154,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn chaining_hints_without_newlines() {
+ check_chains(
+ r#"
+struct A(B);
+impl A { fn into_b(self) -> B { self.0 } }
+struct B(C);
+impl B { fn into_c(self) -> C { self.0 } }
+struct C;
+
+fn main() {
+ let c = A(B(C)).into_b().into_c();
+}"#,
+ );
+ }
+
+ #[test]
+ fn struct_access_chaining_hints() {
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct A { pub b: B }
+struct B { pub c: C }
+struct C(pub bool);
+struct D;
+
+impl D {
+ fn foo(&self) -> i32 { 42 }
+}
+
+fn main() {
+ let x = A { b: B { c: C(true) } }
+ .b
+ .c
+ .0;
+ let x = D
+ .foo();
+}"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 143..190,
+ kind: ChainingHint,
+ label: "C",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 143..190,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 143..179,
+ kind: ChainingHint,
+ label: "B",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 143..179,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn generic_chaining_hints() {
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct A<T>(T);
+struct B<T>(T);
+struct C<T>(T);
+struct X<T,R>(T, R);
+
+impl<T> A<T> {
+ fn new(t: T) -> Self { A(t) }
+ fn into_b(self) -> B<T> { B(self.0) }
+}
+impl<T> B<T> {
+ fn into_c(self) -> C<T> { C(self.0) }
+}
+fn main() {
+ let c = A::new(X(42, true))
+ .into_b()
+ .into_c();
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 246..283,
+ kind: ChainingHint,
+ label: "B<X<i32, bool>>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 246..283,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 246..265,
+ kind: ChainingHint,
+ label: "A<X<i32, bool>>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 246..265,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn shorten_iterator_chaining_hints() {
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+//- minicore: iterators
+use core::iter;
+
+struct MyIter;
+
+impl Iterator for MyIter {
+ type Item = ();
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ let _x = MyIter.by_ref()
+ .take(5)
+ .by_ref()
+ .take(5)
+ .by_ref();
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 174..241,
+ kind: ChainingHint,
+ label: "impl Iterator<Item = ()>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..241,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 174..224,
+ kind: ChainingHint,
+ label: "impl Iterator<Item = ()>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..224,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 174..206,
+ kind: ChainingHint,
+ label: "impl Iterator<Item = ()>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..206,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 174..189,
+ kind: ChainingHint,
+ label: "&mut MyIter",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..189,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn hints_in_attr_call() {
+ check_expect(
+ TEST_CONFIG,
+ r#"
+//- proc_macros: identity, input_replace
+struct Struct;
+impl Struct {
+ fn chain(self) -> Self {
+ self
+ }
+}
+#[proc_macros::identity]
+fn main() {
+ let strukt = Struct;
+ strukt
+ .chain()
+ .chain()
+ .chain();
+ Struct::chain(strukt);
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 124..130,
+ kind: TypeHint,
+ label: "Struct",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 124..130,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 145..185,
+ kind: ChainingHint,
+ label: "Struct",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 145..185,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 145..168,
+ kind: ChainingHint,
+ label: "Struct",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 145..168,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 222..228,
+ kind: ParameterHint,
+ label: "self",
+ tooltip: Some(
+ HoverOffset(
+ FileId(
+ 0,
+ ),
+ 42,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes() {
+ check(
+ r#"
+fn empty() {}
+
+fn no_gpl(a: &()) {}
+ //^^^^^^<'0>
+ // ^'0
+fn empty_gpl<>(a: &()) {}
+ // ^'0 ^'0
+fn partial<'b>(a: &(), b: &'b ()) {}
+// ^'0, $ ^'0
+fn partial<'a>(a: &'a (), b: &()) {}
+// ^'0, $ ^'0
+
+fn single_ret(a: &()) -> &() {}
+// ^^^^^^^^^^<'0>
+ // ^'0 ^'0
+fn full_mul(a: &(), b: &()) {}
+// ^^^^^^^^<'0, '1>
+ // ^'0 ^'1
+
+fn foo<'c>(a: &'c ()) -> &() {}
+ // ^'c
+
+fn nested_in(a: & &X< &()>) {}
+// ^^^^^^^^^<'0, '1, '2>
+ //^'0 ^'1 ^'2
+fn nested_out(a: &()) -> & &X< &()>{}
+// ^^^^^^^^^^<'0>
+ //^'0 ^'0 ^'0 ^'0
+
+impl () {
+ fn foo(&self) {}
+ // ^^^<'0>
+ // ^'0
+ fn foo(&self) -> &() {}
+ // ^^^<'0>
+ // ^'0 ^'0
+ fn foo(&self, a: &()) -> &() {}
+ // ^^^<'0, '1>
+ // ^'0 ^'1 ^'0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes_named() {
+ check_with_config(
+ InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG },
+ r#"
+fn nested_in<'named>(named: & &X< &()>) {}
+// ^'named1, 'named2, 'named3, $
+ //^'named1 ^'named2 ^'named3
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes_trivial_skip() {
+ check_with_config(
+ InlayHintsConfig {
+ lifetime_elision_hints: LifetimeElisionHints::SkipTrivial,
+ ..TEST_CONFIG
+ },
+ r#"
+fn no_gpl(a: &()) {}
+fn empty_gpl<>(a: &()) {}
+fn partial<'b>(a: &(), b: &'b ()) {}
+fn partial<'a>(a: &'a (), b: &()) {}
+
+fn single_ret(a: &()) -> &() {}
+// ^^^^^^^^^^<'0>
+ // ^'0 ^'0
+fn full_mul(a: &(), b: &()) {}
+
+fn foo<'c>(a: &'c ()) -> &() {}
+ // ^'c
+
+fn nested_in(a: & &X< &()>) {}
+fn nested_out(a: &()) -> & &X< &()>{}
+// ^^^^^^^^^^<'0>
+ //^'0 ^'0 ^'0 ^'0
+
+impl () {
+ fn foo(&self) {}
+ fn foo(&self) -> &() {}
+ // ^^^<'0>
+ // ^'0 ^'0
+ fn foo(&self, a: &()) -> &() {}
+ // ^^^<'0, '1>
+ // ^'0 ^'1 ^'0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes_static() {
+ check_with_config(
+ InlayHintsConfig {
+ lifetime_elision_hints: LifetimeElisionHints::Always,
+ ..TEST_CONFIG
+ },
+ r#"
+trait Trait {}
+static S: &str = "";
+// ^'static
+const C: &str = "";
+// ^'static
+const C: &dyn Trait = panic!();
+// ^'static
+
+impl () {
+ const C: &str = "";
+ const C: &dyn Trait = panic!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_implicit_reborrow() {
+ check_with_config(
+ InlayHintsConfig {
+ reborrow_hints: ReborrowHints::Always,
+ parameter_hints: true,
+ ..DISABLED_CONFIG
+ },
+ r#"
+fn __() {
+ let unique = &mut ();
+ let r_mov = unique;
+ let foo: &mut _ = unique;
+ //^^^^^^ &mut *
+ ref_mut_id(unique);
+ //^^^^^^ mut_ref
+ //^^^^^^ &mut *
+ let shared = ref_id(unique);
+ //^^^^^^ shared_ref
+ //^^^^^^ &*
+ let mov = shared;
+ let r_mov: &_ = shared;
+ ref_id(shared);
+ //^^^^^^ shared_ref
+
+ identity(unique);
+ identity(shared);
+}
+fn identity<T>(t: T) -> T {
+ t
+}
+fn ref_mut_id(mut_ref: &mut ()) -> &mut () {
+ mut_ref
+ //^^^^^^^ &mut *
+}
+fn ref_id(shared_ref: &()) -> &() {
+ shared_ref
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_binding_modes() {
+ check_with_config(
+ InlayHintsConfig { binding_mode_hints: true, ..DISABLED_CONFIG },
+ r#"
+fn __(
+ (x,): (u32,),
+ (x,): &(u32,),
+ //^^^^&
+ //^ ref
+ (x,): &mut (u32,)
+ //^^^^&mut
+ //^ ref mut
+) {
+ let (x,) = (0,);
+ let (x,) = &(0,);
+ //^^^^ &
+ //^ ref
+ let (x,) = &mut (0,);
+ //^^^^ &mut
+ //^ ref mut
+ let &mut (x,) = &mut (0,);
+ let (ref mut x,) = &mut (0,);
+ //^^^^^^^^^^^^ &mut
+ let &mut (ref mut x,) = &mut (0,);
+ let (mut x,) = &mut (0,);
+ //^^^^^^^^ &mut
+ match (0,) {
+ (x,) => ()
+ }
+ match &(0,) {
+ (x,) => ()
+ //^^^^ &
+ //^ ref
+ }
+ match &mut (0,) {
+ (x,) => ()
+ //^^^^ &mut
+ //^ ref mut
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn hints_closing_brace() {
+ check_with_config(
+ InlayHintsConfig { closing_brace_hints_min_lines: Some(2), ..DISABLED_CONFIG },
+ r#"
+fn a() {}
+
+fn f() {
+} // no hint unless `}` is the last token on the line
+
+fn g() {
+ }
+//^ fn g
+
+fn h<T>(with: T, arguments: u8, ...) {
+ }
+//^ fn h
+
+trait Tr {
+ fn f();
+ fn g() {
+ }
+ //^ fn g
+ }
+//^ trait Tr
+impl Tr for () {
+ }
+//^ impl Tr for ()
+impl dyn Tr {
+ }
+//^ impl dyn Tr
+
+static S0: () = 0;
+static S1: () = {};
+static S2: () = {
+ };
+//^ static S2
+const _: () = {
+ };
+//^ const _
+
+mod m {
+ }
+//^ mod m
+
+m! {}
+m!();
+m!(
+ );
+//^ m!
+
+m! {
+ }
+//^ m!
+
+fn f() {
+ let v = vec![
+ ];
+ }
+//^ fn f
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
new file mode 100644
index 000000000..08621adde
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
@@ -0,0 +1,1087 @@
+use ide_assists::utils::extract_trivial_expression;
+use ide_db::syntax_helpers::node_ext::expr_as_name_ref;
+use itertools::Itertools;
+use syntax::{
+ ast::{self, AstNode, AstToken, IsString},
+ NodeOrToken, SourceFile, SyntaxElement,
+ SyntaxKind::{self, USE_TREE, WHITESPACE},
+ SyntaxToken, TextRange, TextSize, T,
+};
+
+use text_edit::{TextEdit, TextEditBuilder};
+
+pub struct JoinLinesConfig {
+ pub join_else_if: bool,
+ pub remove_trailing_comma: bool,
+ pub unwrap_trivial_blocks: bool,
+ pub join_assignments: bool,
+}
+
+// Feature: Join Lines
+//
+// Join selected lines into one, smartly fixing up whitespace, trailing commas, and braces.
+//
+// See
+// https://user-images.githubusercontent.com/1711539/124515923-4504e800-dde9-11eb-8d58-d97945a1a785.gif[this gif]
+// for the cases handled specially by joined lines.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Join lines**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020661-b6922200-917a-11eb-87c4-b75acc028f11.gif[]
+pub(crate) fn join_lines(
+ config: &JoinLinesConfig,
+ file: &SourceFile,
+ range: TextRange,
+) -> TextEdit {
+ let range = if range.is_empty() {
+ let syntax = file.syntax();
+ let text = syntax.text().slice(range.start()..);
+ let pos = match text.find_char('\n') {
+ None => return TextEdit::builder().finish(),
+ Some(pos) => pos,
+ };
+ TextRange::at(range.start() + pos, TextSize::of('\n'))
+ } else {
+ range
+ };
+
+ let mut edit = TextEdit::builder();
+ match file.syntax().covering_element(range) {
+ NodeOrToken::Node(node) => {
+ for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) {
+ remove_newlines(config, &mut edit, &token, range)
+ }
+ }
+ NodeOrToken::Token(token) => remove_newlines(config, &mut edit, &token, range),
+ };
+ edit.finish()
+}
+
+fn remove_newlines(
+ config: &JoinLinesConfig,
+ edit: &mut TextEditBuilder,
+ token: &SyntaxToken,
+ range: TextRange,
+) {
+ let intersection = match range.intersect(token.text_range()) {
+ Some(range) => range,
+ None => return,
+ };
+
+ let range = intersection - token.text_range().start();
+ let text = token.text();
+ for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') {
+ let pos: TextSize = (pos as u32).into();
+ let offset = token.text_range().start() + range.start() + pos;
+ if !edit.invalidates_offset(offset) {
+ remove_newline(config, edit, token, offset);
+ }
+ }
+}
+
+fn remove_newline(
+ config: &JoinLinesConfig,
+ edit: &mut TextEditBuilder,
+ token: &SyntaxToken,
+ offset: TextSize,
+) {
+ if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
+ let n_spaces_after_line_break = {
+ let suff = &token.text()[TextRange::new(
+ offset - token.text_range().start() + TextSize::of('\n'),
+ TextSize::of(token.text()),
+ )];
+ suff.bytes().take_while(|&b| b == b' ').count()
+ };
+
+ let mut no_space = false;
+ if let Some(string) = ast::String::cast(token.clone()) {
+ if let Some(range) = string.open_quote_text_range() {
+ cov_mark::hit!(join_string_literal_open_quote);
+ no_space |= range.end() == offset;
+ }
+ if let Some(range) = string.close_quote_text_range() {
+ cov_mark::hit!(join_string_literal_close_quote);
+ no_space |= range.start()
+ == offset
+ + TextSize::of('\n')
+ + TextSize::try_from(n_spaces_after_line_break).unwrap();
+ }
+ }
+
+ let range = TextRange::at(offset, ((n_spaces_after_line_break + 1) as u32).into());
+ let replace_with = if no_space { "" } else { " " };
+ edit.replace(range, replace_with.to_string());
+ return;
+ }
+
+ // The node is between two other nodes
+ let (prev, next) = match (token.prev_sibling_or_token(), token.next_sibling_or_token()) {
+ (Some(prev), Some(next)) => (prev, next),
+ _ => return,
+ };
+
+ if config.remove_trailing_comma && prev.kind() == T![,] {
+ match next.kind() {
+ T![')'] | T![']'] => {
+ // Removes: trailing comma, newline (incl. surrounding whitespace)
+ edit.delete(TextRange::new(prev.text_range().start(), token.text_range().end()));
+ return;
+ }
+ T!['}'] => {
+ // Removes: comma, newline (incl. surrounding whitespace)
+ let space = match prev.prev_sibling_or_token() {
+ Some(left) => compute_ws(left.kind(), next.kind()),
+ None => " ",
+ };
+ edit.replace(
+ TextRange::new(prev.text_range().start(), token.text_range().end()),
+ space.to_string(),
+ );
+ return;
+ }
+ _ => (),
+ }
+ }
+
+ if config.join_else_if {
+ if let (Some(prev), Some(_next)) = (as_if_expr(&prev), as_if_expr(&next)) {
+ match prev.else_token() {
+ Some(_) => cov_mark::hit!(join_two_ifs_with_existing_else),
+ None => {
+ cov_mark::hit!(join_two_ifs);
+ edit.replace(token.text_range(), " else ".to_string());
+ return;
+ }
+ }
+ }
+ }
+
+ if config.join_assignments {
+ if join_assignments(edit, &prev, &next).is_some() {
+ return;
+ }
+ }
+
+ if config.unwrap_trivial_blocks {
+ // Special case that turns something like:
+ //
+ // ```
+ // my_function({$0
+ // <some-expr>
+ // })
+ // ```
+ //
+ // into `my_function(<some-expr>)`
+ if join_single_expr_block(edit, token).is_some() {
+ return;
+ }
+ // ditto for
+ //
+ // ```
+ // use foo::{$0
+ // bar
+ // };
+ // ```
+ if join_single_use_tree(edit, token).is_some() {
+ return;
+ }
+ }
+
+ if let (Some(_), Some(next)) = (
+ prev.as_token().cloned().and_then(ast::Comment::cast),
+ next.as_token().cloned().and_then(ast::Comment::cast),
+ ) {
+ // Removes: newline (incl. surrounding whitespace), start of the next comment
+ edit.delete(TextRange::new(
+ token.text_range().start(),
+ next.syntax().text_range().start() + TextSize::of(next.prefix()),
+ ));
+ return;
+ }
+
+ // Remove newline but add a computed amount of whitespace characters
+ edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_string());
+}
+
+fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
+ let block_expr = ast::BlockExpr::cast(token.parent_ancestors().nth(1)?)?;
+ if !block_expr.is_standalone() {
+ return None;
+ }
+ let expr = extract_trivial_expression(&block_expr)?;
+
+ let block_range = block_expr.syntax().text_range();
+ let mut buf = expr.syntax().text().to_string();
+
+ // Match block needs to have a comma after the block
+ if let Some(match_arm) = block_expr.syntax().parent().and_then(ast::MatchArm::cast) {
+ if match_arm.comma_token().is_none() {
+ buf.push(',');
+ }
+ }
+
+ edit.replace(block_range, buf);
+
+ Some(())
+}
+
+fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
+ let use_tree_list = ast::UseTreeList::cast(token.parent()?)?;
+ let (tree,) = use_tree_list.use_trees().collect_tuple()?;
+ edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string());
+ Some(())
+}
+
+fn join_assignments(
+ edit: &mut TextEditBuilder,
+ prev: &SyntaxElement,
+ next: &SyntaxElement,
+) -> Option<()> {
+ let let_stmt = ast::LetStmt::cast(prev.as_node()?.clone())?;
+ if let_stmt.eq_token().is_some() {
+ cov_mark::hit!(join_assignments_already_initialized);
+ return None;
+ }
+ let let_ident_pat = match let_stmt.pat()? {
+ ast::Pat::IdentPat(it) => it,
+ _ => return None,
+ };
+
+ let expr_stmt = ast::ExprStmt::cast(next.as_node()?.clone())?;
+ let bin_expr = match expr_stmt.expr()? {
+ ast::Expr::BinExpr(it) => it,
+ _ => return None,
+ };
+ if !matches!(bin_expr.op_kind()?, ast::BinaryOp::Assignment { op: None }) {
+ return None;
+ }
+ let lhs = bin_expr.lhs()?;
+ let name_ref = expr_as_name_ref(&lhs)?;
+
+ if name_ref.to_string() != let_ident_pat.syntax().to_string() {
+ cov_mark::hit!(join_assignments_mismatch);
+ return None;
+ }
+
+ edit.delete(let_stmt.semicolon_token()?.text_range().cover(lhs.syntax().text_range()));
+ Some(())
+}
+
+fn as_if_expr(element: &SyntaxElement) -> Option<ast::IfExpr> {
+ let mut node = element.as_node()?.clone();
+ if let Some(stmt) = ast::ExprStmt::cast(node.clone()) {
+ node = stmt.expr()?.syntax().clone();
+ }
+ ast::IfExpr::cast(node)
+}
+
+fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str {
+ match left {
+ T!['('] | T!['['] => return "",
+ T!['{'] => {
+ if let USE_TREE = right {
+ return "";
+ }
+ }
+ _ => (),
+ }
+ match right {
+ T![')'] | T![']'] => return "",
+ T!['}'] => {
+ if let USE_TREE = left {
+ return "";
+ }
+ }
+ T![.] => return "",
+ _ => (),
+ }
+ " "
+}
+
+#[cfg(test)]
+mod tests {
+ use syntax::SourceFile;
+ use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range};
+
+ use super::*;
+
+ fn check_join_lines(ra_fixture_before: &str, ra_fixture_after: &str) {
+ let config = JoinLinesConfig {
+ join_else_if: true,
+ remove_trailing_comma: true,
+ unwrap_trivial_blocks: true,
+ join_assignments: true,
+ };
+
+ let (before_cursor_pos, before) = extract_offset(ra_fixture_before);
+ let file = SourceFile::parse(&before).ok().unwrap();
+
+ let range = TextRange::empty(before_cursor_pos);
+ let result = join_lines(&config, &file, range);
+
+ let actual = {
+ let mut actual = before;
+ result.apply(&mut actual);
+ actual
+ };
+ let actual_cursor_pos = result
+ .apply_to_offset(before_cursor_pos)
+ .expect("cursor position is affected by the edit");
+ let actual = add_cursor(&actual, actual_cursor_pos);
+ assert_eq_text!(ra_fixture_after, &actual);
+ }
+
+ fn check_join_lines_sel(ra_fixture_before: &str, ra_fixture_after: &str) {
+ let config = JoinLinesConfig {
+ join_else_if: true,
+ remove_trailing_comma: true,
+ unwrap_trivial_blocks: true,
+ join_assignments: true,
+ };
+
+ let (sel, before) = extract_range(ra_fixture_before);
+ let parse = SourceFile::parse(&before);
+ let result = join_lines(&config, &parse.tree(), sel);
+ let actual = {
+ let mut actual = before;
+ result.apply(&mut actual);
+ actual
+ };
+ assert_eq_text!(ra_fixture_after, &actual);
+ }
+
+ #[test]
+ fn test_join_lines_comma() {
+ check_join_lines(
+ r"
+fn foo() {
+ $0foo(1,
+ )
+}
+",
+ r"
+fn foo() {
+ $0foo(1)
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_lambda_block() {
+ check_join_lines(
+ r"
+pub fn reparse(&self, edit: &AtomTextEdit) -> File {
+ $0self.incremental_reparse(edit).unwrap_or_else(|| {
+ self.full_reparse(edit)
+ })
+}
+",
+ r"
+pub fn reparse(&self, edit: &AtomTextEdit) -> File {
+ $0self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_block() {
+ check_join_lines(
+ r"
+fn foo() {
+ foo($0{
+ 92
+ })
+}",
+ r"
+fn foo() {
+ foo($092)
+}",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_diverging_block() {
+ check_join_lines(
+ r"
+fn foo() {
+ loop {
+ match x {
+ 92 => $0{
+ continue;
+ }
+ }
+ }
+}
+ ",
+ r"
+fn foo() {
+ loop {
+ match x {
+ 92 => $0continue,
+ }
+ }
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn join_lines_adds_comma_for_block_in_match_arm() {
+ check_join_lines(
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0{
+ u.foo()
+ }
+ Err(v) => v,
+ }
+}",
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0u.foo(),
+ Err(v) => v,
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn join_lines_multiline_in_block() {
+ check_join_lines(
+ r"
+fn foo() {
+ match ty {
+ $0 Some(ty) => {
+ match ty {
+ _ => false,
+ }
+ }
+ _ => true,
+ }
+}
+",
+ r"
+fn foo() {
+ match ty {
+ $0 Some(ty) => match ty {
+ _ => false,
+ },
+ _ => true,
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn join_lines_keeps_comma_for_block_in_match_arm() {
+ // We already have a comma
+ check_join_lines(
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0{
+ u.foo()
+ },
+ Err(v) => v,
+ }
+}",
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0u.foo(),
+ Err(v) => v,
+ }
+}",
+ );
+
+ // comma with whitespace between brace and ,
+ check_join_lines(
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0{
+ u.foo()
+ } ,
+ Err(v) => v,
+ }
+}",
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0u.foo() ,
+ Err(v) => v,
+ }
+}",
+ );
+
+ // comma with newline between brace and ,
+ check_join_lines(
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0{
+ u.foo()
+ }
+ ,
+ Err(v) => v,
+ }
+}",
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0u.foo()
+ ,
+ Err(v) => v,
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn join_lines_keeps_comma_with_single_arg_tuple() {
+ // A single arg tuple
+ check_join_lines(
+ r"
+fn foo() {
+ let x = ($0{
+ 4
+ },);
+}",
+ r"
+fn foo() {
+ let x = ($04,);
+}",
+ );
+
+ // single arg tuple with whitespace between brace and comma
+ check_join_lines(
+ r"
+fn foo() {
+ let x = ($0{
+ 4
+ } ,);
+}",
+ r"
+fn foo() {
+ let x = ($04 ,);
+}",
+ );
+
+ // single arg tuple with newline between brace and comma
+ check_join_lines(
+ r"
+fn foo() {
+ let x = ($0{
+ 4
+ }
+ ,);
+}",
+ r"
+fn foo() {
+ let x = ($04
+ ,);
+}",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_use_items_left() {
+ // No space after the '{'
+ check_join_lines(
+ r"
+$0use syntax::{
+ TextSize, TextRange,
+};",
+ r"
+$0use syntax::{TextSize, TextRange,
+};",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_use_items_right() {
+ // No space after the '}'
+ check_join_lines(
+ r"
+use syntax::{
+$0 TextSize, TextRange
+};",
+ r"
+use syntax::{
+$0 TextSize, TextRange};",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_use_items_right_comma() {
+ // No space after the '}'
+ check_join_lines(
+ r"
+use syntax::{
+$0 TextSize, TextRange,
+};",
+ r"
+use syntax::{
+$0 TextSize, TextRange};",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_use_tree() {
+ check_join_lines(
+ r"
+use syntax::{
+ algo::$0{
+ find_token_at_offset,
+ },
+ ast,
+};",
+ r"
+use syntax::{
+ algo::$0find_token_at_offset,
+ ast,
+};",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_normal_comments() {
+ check_join_lines(
+ r"
+fn foo() {
+ // Hello$0
+ // world!
+}
+",
+ r"
+fn foo() {
+ // Hello$0 world!
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_doc_comments() {
+ check_join_lines(
+ r"
+fn foo() {
+ /// Hello$0
+ /// world!
+}
+",
+ r"
+fn foo() {
+ /// Hello$0 world!
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_mod_comments() {
+ check_join_lines(
+ r"
+fn foo() {
+ //! Hello$0
+ //! world!
+}
+",
+ r"
+fn foo() {
+ //! Hello$0 world!
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_multiline_comments_1() {
+ check_join_lines(
+ r"
+fn foo() {
+ // Hello$0
+ /* world! */
+}
+",
+ r"
+fn foo() {
+ // Hello$0 world! */
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_multiline_comments_2() {
+ check_join_lines(
+ r"
+fn foo() {
+ // The$0
+ /* quick
+ brown
+ fox! */
+}
+",
+ r"
+fn foo() {
+ // The$0 quick
+ brown
+ fox! */
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_selection_fn_args() {
+ check_join_lines_sel(
+ r"
+fn foo() {
+ $0foo(1,
+ 2,
+ 3,
+ $0)
+}
+ ",
+ r"
+fn foo() {
+ foo(1, 2, 3)
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_selection_struct() {
+ check_join_lines_sel(
+ r"
+struct Foo $0{
+ f: u32,
+}$0
+ ",
+ r"
+struct Foo { f: u32 }
+ ",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_selection_dot_chain() {
+ check_join_lines_sel(
+ r"
+fn foo() {
+ join($0type_params.type_params()
+ .filter_map(|it| it.name())
+ .map(|it| it.text())$0)
+}",
+ r"
+fn foo() {
+ join(type_params.type_params().filter_map(|it| it.name()).map(|it| it.text()))
+}",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_selection_lambda_block_body() {
+ check_join_lines_sel(
+ r"
+pub fn handle_find_matching_brace() {
+ params.offsets
+ .map(|offset| $0{
+ world.analysis().matching_brace(&file, offset).unwrap_or(offset)
+ }$0)
+ .collect();
+}",
+ r"
+pub fn handle_find_matching_brace() {
+ params.offsets
+ .map(|offset| world.analysis().matching_brace(&file, offset).unwrap_or(offset))
+ .collect();
+}",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_commented_block() {
+ check_join_lines(
+ r"
+fn main() {
+ let _ = {
+ // $0foo
+ // bar
+ 92
+ };
+}
+ ",
+ r"
+fn main() {
+ let _ = {
+ // $0foo bar
+ 92
+ };
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn join_lines_mandatory_blocks_block() {
+ check_join_lines(
+ r"
+$0fn foo() {
+ 92
+}
+ ",
+ r"
+$0fn foo() { 92
+}
+ ",
+ );
+
+ check_join_lines(
+ r"
+fn foo() {
+ $0if true {
+ 92
+ }
+}
+ ",
+ r"
+fn foo() {
+ $0if true { 92
+ }
+}
+ ",
+ );
+
+ check_join_lines(
+ r"
+fn foo() {
+ $0loop {
+ 92
+ }
+}
+ ",
+ r"
+fn foo() {
+ $0loop { 92
+ }
+}
+ ",
+ );
+
+ check_join_lines(
+ r"
+fn foo() {
+ $0unsafe {
+ 92
+ }
+}
+ ",
+ r"
+fn foo() {
+ $0unsafe { 92
+ }
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn join_string_literal() {
+ {
+ cov_mark::check!(join_string_literal_open_quote);
+ check_join_lines(
+ r#"
+fn main() {
+ $0"
+hello
+";
+}
+"#,
+ r#"
+fn main() {
+ $0"hello
+";
+}
+"#,
+ );
+ }
+
+ {
+ cov_mark::check!(join_string_literal_close_quote);
+ check_join_lines(
+ r#"
+fn main() {
+ $0"hello
+";
+}
+"#,
+ r#"
+fn main() {
+ $0"hello";
+}
+"#,
+ );
+ check_join_lines(
+ r#"
+fn main() {
+ $0r"hello
+ ";
+}
+"#,
+ r#"
+fn main() {
+ $0r"hello";
+}
+"#,
+ );
+ }
+
+ check_join_lines(
+ r#"
+fn main() {
+ "
+$0hello
+world
+";
+}
+"#,
+ r#"
+fn main() {
+ "
+$0hello world
+";
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn join_last_line_empty() {
+ check_join_lines(
+ r#"
+fn main() {$0}
+"#,
+ r#"
+fn main() {$0}
+"#,
+ );
+ }
+
+ #[test]
+ fn join_two_ifs() {
+ cov_mark::check!(join_two_ifs);
+ check_join_lines(
+ r#"
+fn main() {
+ if foo {
+
+ }$0
+ if bar {
+
+ }
+}
+"#,
+ r#"
+fn main() {
+ if foo {
+
+ }$0 else if bar {
+
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn join_two_ifs_with_existing_else() {
+ cov_mark::check!(join_two_ifs_with_existing_else);
+ check_join_lines(
+ r#"
+fn main() {
+ if foo {
+
+ } else {
+
+ }$0
+ if bar {
+
+ }
+}
+"#,
+ r#"
+fn main() {
+ if foo {
+
+ } else {
+
+ }$0 if bar {
+
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn join_assignments() {
+ check_join_lines(
+ r#"
+fn foo() {
+ $0let foo;
+ foo = "bar";
+}
+"#,
+ r#"
+fn foo() {
+ $0let foo = "bar";
+}
+"#,
+ );
+
+ cov_mark::check!(join_assignments_mismatch);
+ check_join_lines(
+ r#"
+fn foo() {
+ let foo;
+ let qux;$0
+ foo = "bar";
+}
+"#,
+ r#"
+fn foo() {
+ let foo;
+ let qux;$0 foo = "bar";
+}
+"#,
+ );
+
+ cov_mark::check!(join_assignments_already_initialized);
+ check_join_lines(
+ r#"
+fn foo() {
+ let foo = "bar";$0
+ foo = "bar";
+}
+"#,
+ r#"
+fn foo() {
+ let foo = "bar";$0 foo = "bar";
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
new file mode 100644
index 000000000..dd108fa79
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -0,0 +1,702 @@
+//! ide crate provides "ide-centric" APIs for the rust-analyzer. That is,
+//! it generally operates with files and text ranges, and returns results as
+//! Strings, suitable for displaying to the human.
+//!
+//! What powers this API are the `RootDatabase` struct, which defines a `salsa`
+//! database, and the `hir` crate, where majority of the analysis happens.
+//! However, IDE specific bits of the analysis (most notably completion) happen
+//! in this crate.
+
+// For proving that RootDatabase is RefUnwindSafe.
+#![recursion_limit = "128"]
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+#[cfg(test)]
+mod fixture;
+
+mod markup;
+mod prime_caches;
+mod navigation_target;
+
+mod annotations;
+mod call_hierarchy;
+mod signature_help;
+mod doc_links;
+mod highlight_related;
+mod expand_macro;
+mod extend_selection;
+mod file_structure;
+mod fn_references;
+mod folding_ranges;
+mod goto_declaration;
+mod goto_definition;
+mod goto_implementation;
+mod goto_type_definition;
+mod hover;
+mod inlay_hints;
+mod join_lines;
+mod markdown_remove;
+mod matching_brace;
+mod moniker;
+mod move_item;
+mod parent_module;
+mod references;
+mod rename;
+mod runnables;
+mod ssr;
+mod static_index;
+mod status;
+mod syntax_highlighting;
+mod syntax_tree;
+mod typing;
+mod view_crate_graph;
+mod view_hir;
+mod view_item_tree;
+mod shuffle_crate_graph;
+
+use std::sync::Arc;
+
+use cfg::CfgOptions;
+use ide_db::{
+ base_db::{
+ salsa::{self, ParallelDatabase},
+ CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, VfsPath,
+ },
+ symbol_index, LineIndexDatabase,
+};
+use syntax::SourceFile;
+
+use crate::navigation_target::{ToNav, TryToNav};
+
+pub use crate::{
+ annotations::{Annotation, AnnotationConfig, AnnotationKind},
+ call_hierarchy::CallItem,
+ expand_macro::ExpandedMacro,
+ file_structure::{StructureNode, StructureNodeKind},
+ folding_ranges::{Fold, FoldKind},
+ highlight_related::{HighlightRelatedConfig, HighlightedRange},
+ hover::{HoverAction, HoverConfig, HoverDocFormat, HoverGotoTypeData, HoverResult},
+ inlay_hints::{
+ ClosureReturnTypeHints, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip,
+ LifetimeElisionHints, ReborrowHints,
+ },
+ join_lines::JoinLinesConfig,
+ markup::Markup,
+ moniker::{MonikerKind, MonikerResult, PackageInformation},
+ move_item::Direction,
+ navigation_target::NavigationTarget,
+ prime_caches::ParallelPrimeCachesProgress,
+ references::ReferenceSearchResult,
+ rename::RenameError,
+ runnables::{Runnable, RunnableKind, TestId},
+ signature_help::SignatureHelp,
+ static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData},
+ syntax_highlighting::{
+ tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
+ HlRange,
+ },
+};
+pub use hir::{Documentation, Semantics};
+pub use ide_assists::{
+ Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve,
+};
+pub use ide_completion::{
+ CallableSnippets, CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance,
+ Snippet, SnippetScope,
+};
+pub use ide_db::{
+ base_db::{
+ Cancelled, Change, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange,
+ SourceRoot, SourceRootId,
+ },
+ label::Label,
+ line_index::{LineCol, LineColUtf16, LineIndex},
+ search::{ReferenceCategory, SearchScope},
+ source_change::{FileSystemEdit, SourceChange},
+ symbol_index::Query,
+ RootDatabase, SymbolKind,
+};
+pub use ide_diagnostics::{Diagnostic, DiagnosticsConfig, ExprFillDefaultMode, Severity};
+pub use ide_ssr::SsrError;
+pub use syntax::{TextRange, TextSize};
+pub use text_edit::{Indel, TextEdit};
+
+pub type Cancellable<T> = Result<T, Cancelled>;
+
+/// Info associated with a text range.
+#[derive(Debug)]
+pub struct RangeInfo<T> {
+ pub range: TextRange,
+ pub info: T,
+}
+
+impl<T> RangeInfo<T> {
+ pub fn new(range: TextRange, info: T) -> RangeInfo<T> {
+ RangeInfo { range, info }
+ }
+}
+
+/// `AnalysisHost` stores the current state of the world.
+#[derive(Debug)]
+pub struct AnalysisHost {
+ db: RootDatabase,
+}
+
+impl AnalysisHost {
+ pub fn new(lru_capacity: Option<usize>) -> AnalysisHost {
+ AnalysisHost { db: RootDatabase::new(lru_capacity) }
+ }
+
+ pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) {
+ self.db.update_lru_capacity(lru_capacity);
+ }
+
+ /// Returns a snapshot of the current state, which you can query for
+ /// semantic information.
+ pub fn analysis(&self) -> Analysis {
+ Analysis { db: self.db.snapshot() }
+ }
+
+ /// Applies changes to the current state of the world. If there are
+ /// outstanding snapshots, they will be canceled.
+ pub fn apply_change(&mut self, change: Change) {
+ self.db.apply_change(change)
+ }
+
+ /// NB: this clears the database
+ pub fn per_query_memory_usage(&mut self) -> Vec<(String, profile::Bytes)> {
+ self.db.per_query_memory_usage()
+ }
+ pub fn request_cancellation(&mut self) {
+ self.db.request_cancellation();
+ }
+ pub fn raw_database(&self) -> &RootDatabase {
+ &self.db
+ }
+ pub fn raw_database_mut(&mut self) -> &mut RootDatabase {
+ &mut self.db
+ }
+
+ pub fn shuffle_crate_graph(&mut self) {
+ shuffle_crate_graph::shuffle_crate_graph(&mut self.db);
+ }
+}
+
+impl Default for AnalysisHost {
+ fn default() -> AnalysisHost {
+ AnalysisHost::new(None)
+ }
+}
+
+/// Analysis is a snapshot of a world state at a moment in time. It is the main
+/// entry point for asking semantic information about the world. When the world
+/// state is advanced using `AnalysisHost::apply_change` method, all existing
+/// `Analysis` are canceled (most method return `Err(Canceled)`).
+#[derive(Debug)]
+pub struct Analysis {
+ db: salsa::Snapshot<RootDatabase>,
+}
+
+// As a general design guideline, `Analysis` API are intended to be independent
+// from the language server protocol. That is, when exposing some functionality
+// we should think in terms of "what API makes most sense" and not in terms of
+// "what types LSP uses". Although currently LSP is the only consumer of the
+// API, the API should in theory be usable as a library, or via a different
+// protocol.
+impl Analysis {
+ // Creates an analysis instance for a single file, without any external
+ // dependencies, stdlib support or ability to apply changes. See
+ // `AnalysisHost` for creating a fully-featured analysis.
+ pub fn from_single_file(text: String) -> (Analysis, FileId) {
+ let mut host = AnalysisHost::default();
+ let file_id = FileId(0);
+ let mut file_set = FileSet::default();
+ file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string()));
+ let source_root = SourceRoot::new_local(file_set);
+
+ let mut change = Change::new();
+ change.set_roots(vec![source_root]);
+ let mut crate_graph = CrateGraph::default();
+ // FIXME: cfg options
+ // Default to enable test for single file.
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.insert_atom("test".into());
+ crate_graph.add_crate_root(
+ file_id,
+ Edition::CURRENT,
+ None,
+ None,
+ cfg_options.clone(),
+ cfg_options,
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ change.change_file(file_id, Some(Arc::new(text)));
+ change.set_crate_graph(crate_graph);
+ host.apply_change(change);
+ (host.analysis(), file_id)
+ }
+
+ /// Debug info about the current state of the analysis.
+ pub fn status(&self, file_id: Option<FileId>) -> Cancellable<String> {
+ self.with_db(|db| status::status(&*db, file_id))
+ }
+
+ pub fn parallel_prime_caches<F>(&self, num_worker_threads: u8, cb: F) -> Cancellable<()>
+ where
+ F: Fn(ParallelPrimeCachesProgress) + Sync + std::panic::UnwindSafe,
+ {
+ self.with_db(move |db| prime_caches::parallel_prime_caches(db, num_worker_threads, &cb))
+ }
+
+ /// Gets the text of the source file.
+ pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<String>> {
+ self.with_db(|db| db.file_text(file_id))
+ }
+
+ /// Gets the syntax tree of the file.
+ pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
+ self.with_db(|db| db.parse(file_id).tree())
+ }
+
+ /// Returns true if this file belongs to an immutable library.
+ pub fn is_library_file(&self, file_id: FileId) -> Cancellable<bool> {
+ use ide_db::base_db::SourceDatabaseExt;
+ self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library)
+ }
+
+ /// Gets the file's `LineIndex`: data structure to convert between absolute
+ /// offsets and line/column representation.
+ pub fn file_line_index(&self, file_id: FileId) -> Cancellable<Arc<LineIndex>> {
+ self.with_db(|db| db.line_index(file_id))
+ }
+
+ /// Selects the next syntactic nodes encompassing the range.
+ pub fn extend_selection(&self, frange: FileRange) -> Cancellable<TextRange> {
+ self.with_db(|db| extend_selection::extend_selection(db, frange))
+ }
+
+ /// Returns position of the matching brace (all types of braces are
+ /// supported).
+ pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
+ self.with_db(|db| {
+ let parse = db.parse(position.file_id);
+ let file = parse.tree();
+ matching_brace::matching_brace(&file, position.offset)
+ })
+ }
+
+ /// Returns a syntax tree represented as `String`, for debug purposes.
+ // FIXME: use a better name here.
+ pub fn syntax_tree(
+ &self,
+ file_id: FileId,
+ text_range: Option<TextRange>,
+ ) -> Cancellable<String> {
+ self.with_db(|db| syntax_tree::syntax_tree(db, file_id, text_range))
+ }
+
+ pub fn view_hir(&self, position: FilePosition) -> Cancellable<String> {
+ self.with_db(|db| view_hir::view_hir(db, position))
+ }
+
+ pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> {
+ self.with_db(|db| view_item_tree::view_item_tree(db, file_id))
+ }
+
+ /// Renders the crate graph to GraphViz "dot" syntax.
+ pub fn view_crate_graph(&self, full: bool) -> Cancellable<Result<String, String>> {
+ self.with_db(|db| view_crate_graph::view_crate_graph(db, full))
+ }
+
+ pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<ExpandedMacro>> {
+ self.with_db(|db| expand_macro::expand_macro(db, position))
+ }
+
+ /// Returns an edit to remove all newlines in the range, cleaning up minor
+ /// stuff like trailing commas.
+ pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
+ self.with_db(|db| {
+ let parse = db.parse(frange.file_id);
+ join_lines::join_lines(config, &parse.tree(), frange.range)
+ })
+ }
+
+ /// Returns an edit which should be applied when opening a new line, fixing
+ /// up minor stuff like continuing the comment.
+ /// The edit will be a snippet (with `$0`).
+ pub fn on_enter(&self, position: FilePosition) -> Cancellable<Option<TextEdit>> {
+ self.with_db(|db| typing::on_enter(db, position))
+ }
+
+ /// Returns an edit which should be applied after a character was typed.
+ ///
+ /// This is useful for some on-the-fly fixups, like adding `;` to `let =`
+ /// automatically.
+ pub fn on_char_typed(
+ &self,
+ position: FilePosition,
+ char_typed: char,
+ autoclose: bool,
+ ) -> Cancellable<Option<SourceChange>> {
+ // Fast path to not even parse the file.
+ if !typing::TRIGGER_CHARS.contains(char_typed) {
+ return Ok(None);
+ }
+ if char_typed == '<' && !autoclose {
+ return Ok(None);
+ }
+
+ self.with_db(|db| typing::on_char_typed(db, position, char_typed))
+ }
+
+ /// Returns a tree representation of symbols in the file. Useful to draw a
+ /// file outline.
+ pub fn file_structure(&self, file_id: FileId) -> Cancellable<Vec<StructureNode>> {
+ self.with_db(|db| file_structure::file_structure(&db.parse(file_id).tree()))
+ }
+
+ /// Returns a list of the places in the file where type hints can be displayed.
+ pub fn inlay_hints(
+ &self,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ range: Option<FileRange>,
+ ) -> Cancellable<Vec<InlayHint>> {
+ self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config))
+ }
+
+ /// Returns the set of folding ranges.
+ pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
+ self.with_db(|db| folding_ranges::folding_ranges(&db.parse(file_id).tree()))
+ }
+
+ /// Fuzzy searches for a symbol.
+ pub fn symbol_search(&self, query: Query) -> Cancellable<Vec<NavigationTarget>> {
+ self.with_db(|db| {
+ symbol_index::world_symbols(db, query)
+ .into_iter() // xx: should we make this a par iter?
+ .filter_map(|s| s.try_to_nav(db))
+ .collect::<Vec<_>>()
+ })
+ }
+
+ /// Returns the definitions from the symbol at `position`.
+ pub fn goto_definition(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| goto_definition::goto_definition(db, position))
+ }
+
+ /// Returns the declaration from the symbol at `position`.
+ pub fn goto_declaration(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| goto_declaration::goto_declaration(db, position))
+ }
+
+ /// Returns the impls from the symbol at `position`.
+ pub fn goto_implementation(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| goto_implementation::goto_implementation(db, position))
+ }
+
+ /// Returns the type definitions for the symbol at `position`.
+ pub fn goto_type_definition(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| goto_type_definition::goto_type_definition(db, position))
+ }
+
+ /// Finds all usages of the reference at point.
+ pub fn find_all_refs(
+ &self,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+ ) -> Cancellable<Option<Vec<ReferenceSearchResult>>> {
+ self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, search_scope))
+ }
+
+ /// Finds all methods and free functions for the file. Does not return tests!
+ pub fn find_all_methods(&self, file_id: FileId) -> Cancellable<Vec<FileRange>> {
+ self.with_db(|db| fn_references::find_all_methods(db, file_id))
+ }
+
+ /// Returns a short text describing element at position.
+ pub fn hover(
+ &self,
+ config: &HoverConfig,
+ range: FileRange,
+ ) -> Cancellable<Option<RangeInfo<HoverResult>>> {
+ self.with_db(|db| hover::hover(db, range, config))
+ }
+
+ /// Returns moniker of symbol at position.
+ pub fn moniker(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<moniker::MonikerResult>>>> {
+ self.with_db(|db| moniker::moniker(db, position))
+ }
+
+ /// Return URL(s) for the documentation of the symbol under the cursor.
+ pub fn external_docs(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<doc_links::DocumentationLink>> {
+ self.with_db(|db| doc_links::external_docs(db, &position))
+ }
+
+ /// Computes parameter information at the given position.
+ pub fn signature_help(&self, position: FilePosition) -> Cancellable<Option<SignatureHelp>> {
+ self.with_db(|db| signature_help::signature_help(db, position))
+ }
+
+ /// Computes call hierarchy candidates for the given file position.
+ pub fn call_hierarchy(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| call_hierarchy::call_hierarchy(db, position))
+ }
+
+ /// Computes incoming calls for the given file position.
+ pub fn incoming_calls(&self, position: FilePosition) -> Cancellable<Option<Vec<CallItem>>> {
+ self.with_db(|db| call_hierarchy::incoming_calls(db, position))
+ }
+
+ /// Computes outgoing calls for the given file position.
+ pub fn outgoing_calls(&self, position: FilePosition) -> Cancellable<Option<Vec<CallItem>>> {
+ self.with_db(|db| call_hierarchy::outgoing_calls(db, position))
+ }
+
+ /// Returns a `mod name;` declaration which created the current module.
+ pub fn parent_module(&self, position: FilePosition) -> Cancellable<Vec<NavigationTarget>> {
+ self.with_db(|db| parent_module::parent_module(db, position))
+ }
+
+ /// Returns crates this file belongs too.
+ pub fn crate_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
+ self.with_db(|db| parent_module::crate_for(db, file_id))
+ }
+
+ /// Returns the edition of the given crate.
+ pub fn crate_edition(&self, crate_id: CrateId) -> Cancellable<Edition> {
+ self.with_db(|db| db.crate_graph()[crate_id].edition)
+ }
+
+ /// Returns the root file of the given crate.
+ pub fn crate_root(&self, crate_id: CrateId) -> Cancellable<FileId> {
+ self.with_db(|db| db.crate_graph()[crate_id].root_file_id)
+ }
+
+ /// Returns the set of possible targets to run for the current file.
+ pub fn runnables(&self, file_id: FileId) -> Cancellable<Vec<Runnable>> {
+ self.with_db(|db| runnables::runnables(db, file_id))
+ }
+
+ /// Returns the set of tests for the given file position.
+ pub fn related_tests(
+ &self,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+ ) -> Cancellable<Vec<Runnable>> {
+ self.with_db(|db| runnables::related_tests(db, position, search_scope))
+ }
+
+ /// Computes syntax highlighting for the given file
+ pub fn highlight(&self, file_id: FileId) -> Cancellable<Vec<HlRange>> {
+ self.with_db(|db| syntax_highlighting::highlight(db, file_id, None, false))
+ }
+
+ /// Computes all ranges to highlight for a given item in a file.
+ pub fn highlight_related(
+ &self,
+ config: HighlightRelatedConfig,
+ position: FilePosition,
+ ) -> Cancellable<Option<Vec<HighlightedRange>>> {
+ self.with_db(|db| {
+ highlight_related::highlight_related(&Semantics::new(db), config, position)
+ })
+ }
+
+ /// Computes syntax highlighting for the given file range.
+ pub fn highlight_range(&self, frange: FileRange) -> Cancellable<Vec<HlRange>> {
+ self.with_db(|db| {
+ syntax_highlighting::highlight(db, frange.file_id, Some(frange.range), false)
+ })
+ }
+
+ /// Computes syntax highlighting for the given file.
+ pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancellable<String> {
+ self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow))
+ }
+
+ /// Computes completions at the given position.
+ pub fn completions(
+ &self,
+ config: &CompletionConfig,
+ position: FilePosition,
+ trigger_character: Option<char>,
+ ) -> Cancellable<Option<Vec<CompletionItem>>> {
+ self.with_db(|db| {
+ ide_completion::completions(db, config, position, trigger_character).map(Into::into)
+ })
+ }
+
+ /// Resolves additional completion data at the position given.
+ pub fn resolve_completion_edits(
+ &self,
+ config: &CompletionConfig,
+ position: FilePosition,
+ imports: impl IntoIterator<Item = (String, String)> + std::panic::UnwindSafe,
+ ) -> Cancellable<Vec<TextEdit>> {
+ Ok(self
+ .with_db(|db| ide_completion::resolve_completion_edits(db, config, position, imports))?
+ .unwrap_or_default())
+ }
+
+ /// Computes the set of diagnostics for the given file.
+ pub fn diagnostics(
+ &self,
+ config: &DiagnosticsConfig,
+ resolve: AssistResolveStrategy,
+ file_id: FileId,
+ ) -> Cancellable<Vec<Diagnostic>> {
+ self.with_db(|db| ide_diagnostics::diagnostics(db, config, &resolve, file_id))
+ }
+
+ /// Convenience function to return assists + quick fixes for diagnostics
+ pub fn assists_with_fixes(
+ &self,
+ assist_config: &AssistConfig,
+ diagnostics_config: &DiagnosticsConfig,
+ resolve: AssistResolveStrategy,
+ frange: FileRange,
+ ) -> Cancellable<Vec<Assist>> {
+ let include_fixes = match &assist_config.allowed {
+ Some(it) => it.iter().any(|&it| it == AssistKind::None || it == AssistKind::QuickFix),
+ None => true,
+ };
+
+ self.with_db(|db| {
+ let diagnostic_assists = if include_fixes {
+ ide_diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id)
+ .into_iter()
+ .flat_map(|it| it.fixes.unwrap_or_default())
+ .filter(|it| it.target.intersect(frange.range).is_some())
+ .collect()
+ } else {
+ Vec::new()
+ };
+ let ssr_assists = ssr::ssr_assists(db, &resolve, frange);
+ let assists = ide_assists::assists(db, assist_config, resolve, frange);
+
+ let mut res = diagnostic_assists;
+ res.extend(ssr_assists.into_iter());
+ res.extend(assists.into_iter());
+
+ res
+ })
+ }
+
+ /// Returns the edit required to rename reference at the position to the new
+ /// name.
+ pub fn rename(
+ &self,
+ position: FilePosition,
+ new_name: &str,
+ ) -> Cancellable<Result<SourceChange, RenameError>> {
+ self.with_db(|db| rename::rename(db, position, new_name))
+ }
+
+ pub fn prepare_rename(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Result<RangeInfo<()>, RenameError>> {
+ self.with_db(|db| rename::prepare_rename(db, position))
+ }
+
+ pub fn will_rename_file(
+ &self,
+ file_id: FileId,
+ new_name_stem: &str,
+ ) -> Cancellable<Option<SourceChange>> {
+ self.with_db(|db| rename::will_rename_file(db, file_id, new_name_stem))
+ }
+
+ pub fn structural_search_replace(
+ &self,
+ query: &str,
+ parse_only: bool,
+ resolve_context: FilePosition,
+ selections: Vec<FileRange>,
+ ) -> Cancellable<Result<SourceChange, SsrError>> {
+ self.with_db(|db| {
+ let rule: ide_ssr::SsrRule = query.parse()?;
+ let mut match_finder =
+ ide_ssr::MatchFinder::in_context(db, resolve_context, selections)?;
+ match_finder.add_rule(rule)?;
+ let edits = if parse_only { Default::default() } else { match_finder.edits() };
+ Ok(SourceChange::from(edits))
+ })
+ }
+
+ pub fn annotations(
+ &self,
+ config: &AnnotationConfig,
+ file_id: FileId,
+ ) -> Cancellable<Vec<Annotation>> {
+ self.with_db(|db| annotations::annotations(db, config, file_id))
+ }
+
+ pub fn resolve_annotation(&self, annotation: Annotation) -> Cancellable<Annotation> {
+ self.with_db(|db| annotations::resolve_annotation(db, annotation))
+ }
+
+ pub fn move_item(
+ &self,
+ range: FileRange,
+ direction: Direction,
+ ) -> Cancellable<Option<TextEdit>> {
+ self.with_db(|db| move_item::move_item(db, range, direction))
+ }
+
+ /// Performs an operation on the database that may be canceled.
+ ///
+ /// rust-analyzer needs to be able to answer semantic questions about the
+ /// code while the code is being modified. A common problem is that a
+ /// long-running query is being calculated when a new change arrives.
+ ///
+ /// We can't just apply the change immediately: this will cause the pending
+ /// query to see inconsistent state (it will observe an absence of
+ /// repeatable read). So what we do is we **cancel** all pending queries
+ /// before applying the change.
+ ///
+ /// Salsa implements cancellation by unwinding with a special value and
+ /// catching it on the API boundary.
+ fn with_db<F, T>(&self, f: F) -> Cancellable<T>
+ where
+ F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
+ {
+ Cancelled::catch(|| f(&self.db))
+ }
+}
+
+#[test]
+fn analysis_is_send() {
+ fn is_send<T: Send>() {}
+ is_send::<Analysis>();
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs b/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs
new file mode 100644
index 000000000..3ec5c629e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs
@@ -0,0 +1,22 @@
+//! Removes markdown from strings.
+use pulldown_cmark::{Event, Parser, Tag};
+
+/// Removes all markdown, keeping the text and code blocks
+///
+/// Currently limited in styling, i.e. no ascii tables or lists
+pub(crate) fn remove_markdown(markdown: &str) -> String {
+ let mut out = String::new();
+ let parser = Parser::new(markdown);
+
+ for event in parser {
+ match event {
+ Event::Text(text) | Event::Code(text) => out.push_str(&text),
+ Event::SoftBreak | Event::HardBreak | Event::Rule | Event::End(Tag::CodeBlock(_)) => {
+ out.push('\n')
+ }
+ _ => {}
+ }
+ }
+
+ out
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/markup.rs b/src/tools/rust-analyzer/crates/ide/src/markup.rs
new file mode 100644
index 000000000..60c193c40
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/markup.rs
@@ -0,0 +1,38 @@
+//! Markdown formatting.
+//!
+//! Sometimes, we want to display a "rich text" in the UI. At the moment, we use
+//! markdown for this purpose. It doesn't feel like a right option, but that's
+//! what is used by LSP, so let's keep it simple.
+use std::fmt;
+
+#[derive(Default, Debug)]
+pub struct Markup {
+ text: String,
+}
+
+impl From<Markup> for String {
+ fn from(markup: Markup) -> Self {
+ markup.text
+ }
+}
+
+impl From<String> for Markup {
+ fn from(text: String) -> Self {
+ Markup { text }
+ }
+}
+
+impl fmt::Display for Markup {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.text, f)
+ }
+}
+
+impl Markup {
+ pub fn as_str(&self) -> &str {
+ self.text.as_str()
+ }
+ pub fn fenced_block(contents: &impl fmt::Display) -> Markup {
+ format!("```rust\n{}\n```", contents).into()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs b/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs
new file mode 100644
index 000000000..da70cecdd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs
@@ -0,0 +1,78 @@
+use syntax::{
+ ast::{self, AstNode},
+ SourceFile, SyntaxKind, TextSize, T,
+};
+
+// Feature: Matching Brace
+//
+// If the cursor is on any brace (`<>(){}[]||`) which is a part of a brace-pair,
+// moves cursor to the matching brace. It uses the actual parser to determine
+// braces, so it won't confuse generics with comparisons.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Find matching brace**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065573-04298180-91b1-11eb-8dec-d4e2a202f304.gif[]
+pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<TextSize> {
+ const BRACES: &[SyntaxKind] =
+ &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]];
+ let (brace_token, brace_idx) = file
+ .syntax()
+ .token_at_offset(offset)
+ .filter_map(|node| {
+ let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
+ Some((node, idx))
+ })
+ .last()?;
+ let parent = brace_token.parent()?;
+ if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) {
+ cov_mark::hit!(pipes_not_braces);
+ return None;
+ }
+ let matching_kind = BRACES[brace_idx ^ 1];
+ let matching_node = parent
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|node| node.kind() == matching_kind && node != &brace_token)?;
+ Some(matching_node.text_range().start())
+}
+
+#[cfg(test)]
+mod tests {
+ use test_utils::{add_cursor, assert_eq_text, extract_offset};
+
+ use super::*;
+
+ #[test]
+ fn test_matching_brace() {
+ fn do_check(before: &str, after: &str) {
+ let (pos, before) = extract_offset(before);
+ let parse = SourceFile::parse(&before);
+ let new_pos = match matching_brace(&parse.tree(), pos) {
+ None => pos,
+ Some(pos) => pos,
+ };
+ let actual = add_cursor(&before, new_pos);
+ assert_eq_text!(after, &actual);
+ }
+
+ do_check("struct Foo { a: i32, }$0", "struct Foo $0{ a: i32, }");
+ do_check("fn main() { |x: i32|$0 x * 2;}", "fn main() { $0|x: i32| x * 2;}");
+ do_check("fn main() { $0|x: i32| x * 2;}", "fn main() { |x: i32$0| x * 2;}");
+ do_check(
+ "fn func(x) { return (2 * (x + 3)$0) + 5;}",
+ "fn func(x) { return $0(2 * (x + 3)) + 5;}",
+ );
+
+ {
+ cov_mark::check!(pipes_not_braces);
+ do_check(
+ "fn main() { match 92 { 1 | 2 |$0 3 => 92 } }",
+ "fn main() { match 92 { 1 | 2 |$0 3 => 92 } }",
+ );
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
new file mode 100644
index 000000000..6bab9fa1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
@@ -0,0 +1,342 @@
+//! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports)
+//! for LSIF and LSP.
+
+use hir::{db::DefDatabase, AsAssocItem, AssocItemContainer, Crate, Name, Semantics};
+use ide_db::{
+ base_db::{CrateOrigin, FileId, FileLoader, FilePosition, LangCrateOrigin},
+ defs::{Definition, IdentClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{AstNode, SyntaxKind::*, T};
+
+use crate::{doc_links::token_as_doc_comment, RangeInfo};
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct MonikerIdentifier {
+ crate_name: String,
+ path: Vec<Name>,
+}
+
+impl ToString for MonikerIdentifier {
+ fn to_string(&self) -> String {
+ match self {
+ MonikerIdentifier { path, crate_name } => {
+ format!("{}::{}", crate_name, path.iter().map(|x| x.to_string()).join("::"))
+ }
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum MonikerKind {
+ Import,
+ Export,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MonikerResult {
+ pub identifier: MonikerIdentifier,
+ pub kind: MonikerKind,
+ pub package_information: PackageInformation,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PackageInformation {
+ pub name: String,
+ pub repo: String,
+ pub version: String,
+}
+
+pub(crate) fn crate_for_file(db: &RootDatabase, file_id: FileId) -> Option<Crate> {
+ for &krate in db.relevant_crates(file_id).iter() {
+ let crate_def_map = db.crate_def_map(krate);
+ for (_, data) in crate_def_map.modules() {
+ if data.origin.file_id() == Some(file_id) {
+ return Some(krate.into());
+ }
+ }
+ }
+ None
+}
+
+pub(crate) fn moniker(
+ db: &RootDatabase,
+ FilePosition { file_id, offset }: FilePosition,
+) -> Option<RangeInfo<Vec<MonikerResult>>> {
+ let sema = &Semantics::new(db);
+ let file = sema.parse(file_id).syntax().clone();
+ let current_crate = crate_for_file(db, file_id)?;
+ let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
+ IDENT
+ | INT_NUMBER
+ | LIFETIME_IDENT
+ | T![self]
+ | T![super]
+ | T![crate]
+ | T![Self]
+ | COMMENT => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+ if let Some(doc_comment) = token_as_doc_comment(&original_token) {
+ return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, _| {
+ let m = def_to_moniker(db, def, current_crate)?;
+ Some(RangeInfo::new(original_token.text_range(), vec![m]))
+ });
+ }
+ let navs = sema
+ .descend_into_macros(original_token.clone())
+ .into_iter()
+ .filter_map(|token| {
+ IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
+ it.into_iter().flat_map(|def| def_to_moniker(sema.db, def, current_crate))
+ })
+ })
+ .flatten()
+ .unique()
+ .collect::<Vec<_>>();
+ Some(RangeInfo::new(original_token.text_range(), navs))
+}
+
+pub(crate) fn def_to_moniker(
+ db: &RootDatabase,
+ def: Definition,
+ from_crate: Crate,
+) -> Option<MonikerResult> {
+ if matches!(def, Definition::GenericParam(_) | Definition::SelfType(_) | Definition::Local(_)) {
+ return None;
+ }
+ let module = def.module(db)?;
+ let krate = module.krate();
+ let mut path = vec![];
+ path.extend(module.path_to_root(db).into_iter().filter_map(|x| x.name(db)));
+
+ // Handle associated items within a trait
+ if let Some(assoc) = def.as_assoc_item(db) {
+ let container = assoc.container(db);
+ match container {
+ AssocItemContainer::Trait(trait_) => {
+ // Because different traits can have functions with the same name,
+ // we have to include the trait name as part of the moniker for uniqueness.
+ path.push(trait_.name(db));
+ }
+ AssocItemContainer::Impl(impl_) => {
+ // Because a struct can implement multiple traits, for implementations
+ // we add both the struct name and the trait name to the path
+ if let Some(adt) = impl_.self_ty(db).as_adt() {
+ path.push(adt.name(db));
+ }
+
+ if let Some(trait_) = impl_.trait_(db) {
+ path.push(trait_.name(db));
+ }
+ }
+ }
+ }
+
+ if let Definition::Field(it) = def {
+ path.push(it.parent_def(db).name(db));
+ }
+
+ path.push(def.name(db)?);
+ Some(MonikerResult {
+ identifier: MonikerIdentifier {
+ crate_name: krate.display_name(db)?.crate_name().to_string(),
+ path,
+ },
+ kind: if krate == from_crate { MonikerKind::Export } else { MonikerKind::Import },
+ package_information: {
+ let name = krate.display_name(db)?.to_string();
+ let (repo, version) = match krate.origin(db) {
+ CrateOrigin::CratesIo { repo } => (repo?, krate.version(db)?),
+ CrateOrigin::Lang(lang) => (
+ "https://github.com/rust-lang/rust/".to_string(),
+ match lang {
+ LangCrateOrigin::Other => {
+ "https://github.com/rust-lang/rust/library/".into()
+ }
+ lang => format!("https://github.com/rust-lang/rust/library/{lang}",),
+ },
+ ),
+ };
+ PackageInformation { name, repo, version }
+ },
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+
+ use super::MonikerKind;
+
+ #[track_caller]
+ fn no_moniker(ra_fixture: &str) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ if let Some(x) = analysis.moniker(position).unwrap() {
+ assert_eq!(x.info.len(), 0, "Moniker founded but no moniker expected: {:?}", x);
+ }
+ }
+
+ #[track_caller]
+ fn check_moniker(ra_fixture: &str, identifier: &str, package: &str, kind: MonikerKind) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let x = analysis.moniker(position).unwrap().expect("no moniker found").info;
+ assert_eq!(x.len(), 1);
+ let x = x.into_iter().next().unwrap();
+ assert_eq!(identifier, x.identifier.to_string());
+ assert_eq!(package, format!("{:?}", x.package_information));
+ assert_eq!(kind, x.kind);
+ }
+
+ #[test]
+ fn basic() {
+ check_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::module::func;
+fn main() {
+ func$0();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub fn func() {}
+}
+"#,
+ "foo::module::func",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Import,
+ );
+ check_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::module::func;
+fn main() {
+ func();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub fn func$0() {}
+}
+"#,
+ "foo::module::func",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ pub fn func$0() {}
+ }
+}
+"#,
+ "foo::module::MyTrait::func",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait_constant() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ const MY_CONST$0: u8;
+ }
+}
+"#,
+ "foo::module::MyTrait::MY_CONST",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait_type() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ type MyType$0;
+ }
+}
+"#,
+ "foo::module::MyTrait::MyType",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait_impl_function() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ pub fn func() {}
+ }
+
+ struct MyStruct {}
+
+ impl MyTrait for MyStruct {
+ pub fn func$0() {}
+ }
+}
+"#,
+ "foo::module::MyStruct::MyTrait::func",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_field() {
+ check_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::St;
+fn main() {
+ let x = St { a$0: 2 };
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub struct St {
+ pub a: i32,
+}
+"#,
+ "foo::St::a",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Import,
+ );
+ }
+
+ #[test]
+ fn no_moniker_for_local() {
+ no_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::module::func;
+fn main() {
+ func();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub fn func() {
+ let x$0 = 2;
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/move_item.rs b/src/tools/rust-analyzer/crates/ide/src/move_item.rs
new file mode 100644
index 000000000..02e9fb8b5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/move_item.rs
@@ -0,0 +1,890 @@
+use std::{iter::once, mem};
+
+use hir::Semantics;
+use ide_db::{base_db::FileRange, helpers::pick_best_token, RootDatabase};
+use itertools::Itertools;
+use syntax::{algo, ast, match_ast, AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange};
+use text_edit::{TextEdit, TextEditBuilder};
+
+#[derive(Copy, Clone, Debug)]
+pub enum Direction {
+ Up,
+ Down,
+}
+
+// Feature: Move Item
+//
+// Move item under cursor or selection up and down.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Move item up**
+// | VS Code | **Rust Analyzer: Move item down**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065576-04298180-91b1-11eb-91ce-4505e99ed598.gif[]
+pub(crate) fn move_item(
+ db: &RootDatabase,
+ range: FileRange,
+ direction: Direction,
+) -> Option<TextEdit> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(range.file_id);
+
+ let item = if range.range.is_empty() {
+ SyntaxElement::Token(pick_best_token(
+ file.syntax().token_at_offset(range.range.start()),
+ |kind| match kind {
+ SyntaxKind::IDENT | SyntaxKind::LIFETIME_IDENT => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ },
+ )?)
+ } else {
+ file.syntax().covering_element(range.range)
+ };
+
+ find_ancestors(item, direction, range.range)
+}
+
+fn find_ancestors(item: SyntaxElement, direction: Direction, range: TextRange) -> Option<TextEdit> {
+ let root = match item {
+ SyntaxElement::Node(node) => node,
+ SyntaxElement::Token(token) => token.parent()?,
+ };
+
+ let movable = [
+ SyntaxKind::ARG_LIST,
+ SyntaxKind::GENERIC_PARAM_LIST,
+ SyntaxKind::GENERIC_ARG_LIST,
+ SyntaxKind::VARIANT_LIST,
+ SyntaxKind::TYPE_BOUND_LIST,
+ SyntaxKind::MATCH_ARM,
+ SyntaxKind::PARAM,
+ SyntaxKind::LET_STMT,
+ SyntaxKind::EXPR_STMT,
+ SyntaxKind::IF_EXPR,
+ SyntaxKind::FOR_EXPR,
+ SyntaxKind::LOOP_EXPR,
+ SyntaxKind::WHILE_EXPR,
+ SyntaxKind::RETURN_EXPR,
+ SyntaxKind::MATCH_EXPR,
+ SyntaxKind::MACRO_CALL,
+ SyntaxKind::TYPE_ALIAS,
+ SyntaxKind::TRAIT,
+ SyntaxKind::IMPL,
+ SyntaxKind::MACRO_DEF,
+ SyntaxKind::STRUCT,
+ SyntaxKind::UNION,
+ SyntaxKind::ENUM,
+ SyntaxKind::FN,
+ SyntaxKind::MODULE,
+ SyntaxKind::USE,
+ SyntaxKind::STATIC,
+ SyntaxKind::CONST,
+ SyntaxKind::MACRO_RULES,
+ SyntaxKind::MACRO_DEF,
+ ];
+
+ let ancestor = once(root.clone())
+ .chain(root.ancestors())
+ .find(|ancestor| movable.contains(&ancestor.kind()))?;
+
+ move_in_direction(&ancestor, direction, range)
+}
+
+fn move_in_direction(
+ node: &SyntaxNode,
+ direction: Direction,
+ range: TextRange,
+) -> Option<TextEdit> {
+ match_ast! {
+ match node {
+ ast::ArgList(it) => swap_sibling_in_list(node, it.args(), range, direction),
+ ast::GenericParamList(it) => swap_sibling_in_list(node, it.generic_params(), range, direction),
+ ast::GenericArgList(it) => swap_sibling_in_list(node, it.generic_args(), range, direction),
+ ast::VariantList(it) => swap_sibling_in_list(node, it.variants(), range, direction),
+ ast::TypeBoundList(it) => swap_sibling_in_list(node, it.bounds(), range, direction),
+ _ => Some(replace_nodes(range, node, &match direction {
+ Direction::Up => node.prev_sibling(),
+ Direction::Down => node.next_sibling(),
+ }?))
+ }
+ }
+}
+
+fn swap_sibling_in_list<A: AstNode + Clone, I: Iterator<Item = A>>(
+ node: &SyntaxNode,
+ list: I,
+ range: TextRange,
+ direction: Direction,
+) -> Option<TextEdit> {
+ let list_lookup = list.tuple_windows().find(|(l, r)| match direction {
+ Direction::Up => r.syntax().text_range().contains_range(range),
+ Direction::Down => l.syntax().text_range().contains_range(range),
+ });
+
+ if let Some((l, r)) = list_lookup {
+ Some(replace_nodes(range, l.syntax(), r.syntax()))
+ } else {
+ // Cursor is beyond any movable list item (for example, on curly brace in enum).
+ // It's not necessary, that parent of list is movable (arg list's parent is not, for example),
+ // and we have to continue tree traversal to find suitable node.
+ find_ancestors(SyntaxElement::Node(node.parent()?), direction, range)
+ }
+}
+
+fn replace_nodes<'a>(
+ range: TextRange,
+ mut first: &'a SyntaxNode,
+ mut second: &'a SyntaxNode,
+) -> TextEdit {
+ let cursor_offset = if range.is_empty() {
+ // FIXME: `applySnippetTextEdits` does not support non-empty selection ranges
+ if first.text_range().contains_range(range) {
+ Some(range.start() - first.text_range().start())
+ } else if second.text_range().contains_range(range) {
+ mem::swap(&mut first, &mut second);
+ Some(range.start() - first.text_range().start())
+ } else {
+ None
+ }
+ } else {
+ None
+ };
+
+ let first_with_cursor = match cursor_offset {
+ Some(offset) => {
+ let mut item_text = first.text().to_string();
+ item_text.insert_str(offset.into(), "$0");
+ item_text
+ }
+ None => first.text().to_string(),
+ };
+
+ let mut edit = TextEditBuilder::default();
+
+ algo::diff(first, second).into_text_edit(&mut edit);
+ edit.replace(second.text_range(), first_with_cursor);
+
+ edit.finish()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+ use expect_test::{expect, Expect};
+
+ use crate::Direction;
+
+ fn check(ra_fixture: &str, expect: Expect, direction: Direction) {
+ let (analysis, range) = fixture::range(ra_fixture);
+ let edit = analysis.move_item(range, direction).unwrap().unwrap_or_default();
+ let mut file = analysis.file_text(range.file_id).unwrap().to_string();
+ edit.apply(&mut file);
+ expect.assert_eq(&file);
+ }
+
+ #[test]
+ fn test_moves_match_arm_up() {
+ check(
+ r#"
+fn main() {
+ match true {
+ true => {
+ println!("Hello, world");
+ },
+ false =>$0$0 {
+ println!("Test");
+ }
+ };
+}
+"#,
+ expect![[r#"
+ fn main() {
+ match true {
+ false =>$0 {
+ println!("Test");
+ }
+ true => {
+ println!("Hello, world");
+ },
+ };
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_match_arm_down() {
+ check(
+ r#"
+fn main() {
+ match true {
+ true =>$0$0 {
+ println!("Hello, world");
+ },
+ false => {
+ println!("Test");
+ }
+ };
+}
+"#,
+ expect![[r#"
+ fn main() {
+ match true {
+ false => {
+ println!("Test");
+ }
+ true =>$0 {
+ println!("Hello, world");
+ },
+ };
+ }
+ "#]],
+ Direction::Down,
+ );
+ }
+
+ #[test]
+ fn test_nowhere_to_move() {
+ check(
+ r#"
+fn main() {
+ match true {
+ true =>$0$0 {
+ println!("Hello, world");
+ },
+ false => {
+ println!("Test");
+ }
+ };
+}
+"#,
+ expect![[r#"
+ fn main() {
+ match true {
+ true => {
+ println!("Hello, world");
+ },
+ false => {
+ println!("Test");
+ }
+ };
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_let_stmt_up() {
+ check(
+ r#"
+fn main() {
+ let test = 123;
+ let test2$0$0 = 456;
+}
+"#,
+ expect![[r#"
+ fn main() {
+ let test2$0 = 456;
+ let test = 123;
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_expr_up() {
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+ println!("All I want to say is...");$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ println!("All I want to say is...");$0
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ if true {
+ println!("Test");
+ }$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ if true {
+ println!("Test");
+ }$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ for i in 0..10 {
+ println!("Test");
+ }$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ for i in 0..10 {
+ println!("Test");
+ }$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ loop {
+ println!("Test");
+ }$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ loop {
+ println!("Test");
+ }$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ while true {
+ println!("Test");
+ }$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ while true {
+ println!("Test");
+ }$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ return 123;$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ return 123;$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_nowhere_to_move_stmt() {
+ check(
+ r#"
+fn main() {
+ println!("All I want to say is...");$0$0
+ println!("Hello, world");
+}
+"#,
+ expect![[r#"
+ fn main() {
+ println!("All I want to say is...");
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_move_item() {
+ check(
+ r#"
+fn main() {}
+
+fn foo() {}$0$0
+"#,
+ expect![[r#"
+ fn foo() {}$0
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_move_impl_up() {
+ check(
+ r#"
+struct Yay;
+
+trait Wow {}
+
+impl Wow for Yay $0$0{}
+"#,
+ expect![[r#"
+ struct Yay;
+
+ impl Wow for Yay $0{}
+
+ trait Wow {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_move_use_up() {
+ check(
+ r#"
+use std::vec::Vec;
+use std::collections::HashMap$0$0;
+"#,
+ expect![[r#"
+ use std::collections::HashMap$0;
+ use std::vec::Vec;
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_match_expr_up() {
+ check(
+ r#"
+fn main() {
+ let test = 123;
+
+ $0match test {
+ 456 => {},
+ _ => {}
+ };$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ match test {
+ 456 => {},
+ _ => {}
+ };
+
+ let test = 123;
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_param() {
+ check(
+ r#"
+fn test(one: i32, two$0$0: u32) {}
+
+fn main() {
+ test(123, 456);
+}
+"#,
+ expect![[r#"
+ fn test(two$0: u32, one: i32) {}
+
+ fn main() {
+ test(123, 456);
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn f($0$0arg: u8, arg2: u16) {}
+"#,
+ expect![[r#"
+ fn f(arg2: u16, $0arg: u8) {}
+ "#]],
+ Direction::Down,
+ );
+ }
+
+ #[test]
+ fn test_moves_arg_up() {
+ check(
+ r#"
+fn test(one: i32, two: u32) {}
+
+fn main() {
+ test(123, 456$0$0);
+}
+"#,
+ expect![[r#"
+ fn test(one: i32, two: u32) {}
+
+ fn main() {
+ test(456$0, 123);
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_arg_down() {
+ check(
+ r#"
+fn test(one: i32, two: u32) {}
+
+fn main() {
+ test(123$0$0, 456);
+}
+"#,
+ expect![[r#"
+ fn test(one: i32, two: u32) {}
+
+ fn main() {
+ test(456, 123$0);
+ }
+ "#]],
+ Direction::Down,
+ );
+ }
+
+ #[test]
+ fn test_nowhere_to_move_arg() {
+ check(
+ r#"
+fn test(one: i32, two: u32) {}
+
+fn main() {
+ test(123$0$0, 456);
+}
+"#,
+ expect![[r#"
+ fn test(one: i32, two: u32) {}
+
+ fn main() {
+ test(123, 456);
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_generic_param_up() {
+ check(
+ r#"
+struct Test<A, B$0$0>(A, B);
+
+fn main() {}
+"#,
+ expect![[r#"
+ struct Test<B$0, A>(A, B);
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_generic_arg_up() {
+ check(
+ r#"
+struct Test<A, B>(A, B);
+
+fn main() {
+ let t = Test::<i32, &str$0$0>(123, "yay");
+}
+"#,
+ expect![[r#"
+ struct Test<A, B>(A, B);
+
+ fn main() {
+ let t = Test::<&str$0, i32>(123, "yay");
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_variant_up() {
+ check(
+ r#"
+enum Hello {
+ One,
+ Two$0$0
+}
+
+fn main() {}
+"#,
+ expect![[r#"
+ enum Hello {
+ Two$0,
+ One
+ }
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_type_bound_up() {
+ check(
+ r#"
+trait One {}
+
+trait Two {}
+
+fn test<T: One + Two$0$0>(t: T) {}
+
+fn main() {}
+"#,
+ expect![[r#"
+ trait One {}
+
+ trait Two {}
+
+ fn test<T: Two$0 + One>(t: T) {}
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_prioritizes_trait_items() {
+ check(
+ r#"
+struct Test;
+
+trait Yay {
+ type One;
+
+ type Two;
+
+ fn inner();
+}
+
+impl Yay for Test {
+ type One = i32;
+
+ type Two = u32;
+
+ fn inner() {$0$0
+ println!("Mmmm");
+ }
+}
+"#,
+ expect![[r#"
+ struct Test;
+
+ trait Yay {
+ type One;
+
+ type Two;
+
+ fn inner();
+ }
+
+ impl Yay for Test {
+ type One = i32;
+
+ fn inner() {$0
+ println!("Mmmm");
+ }
+
+ type Two = u32;
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_weird_nesting() {
+ check(
+ r#"
+fn test() {
+ mod hello {
+ fn inner() {}
+ }
+
+ mod hi {$0$0
+ fn inner() {}
+ }
+}
+"#,
+ expect![[r#"
+ fn test() {
+ mod hi {$0
+ fn inner() {}
+ }
+
+ mod hello {
+ fn inner() {}
+ }
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_cursor_at_item_start() {
+ check(
+ r#"
+$0$0#[derive(Debug)]
+enum FooBar {
+ Foo,
+ Bar,
+}
+
+fn main() {}
+"#,
+ expect![[r##"
+ fn main() {}
+
+ $0#[derive(Debug)]
+ enum FooBar {
+ Foo,
+ Bar,
+ }
+ "##]],
+ Direction::Down,
+ );
+ check(
+ r#"
+$0$0enum FooBar {
+ Foo,
+ Bar,
+}
+
+fn main() {}
+"#,
+ expect![[r#"
+ fn main() {}
+
+ $0enum FooBar {
+ Foo,
+ Bar,
+ }
+ "#]],
+ Direction::Down,
+ );
+ check(
+ r#"
+struct Test;
+
+trait SomeTrait {}
+
+$0$0impl SomeTrait for Test {}
+
+fn main() {}
+"#,
+ expect![[r#"
+ struct Test;
+
+ $0impl SomeTrait for Test {}
+
+ trait SomeTrait {}
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_cursor_at_item_end() {
+ check(
+ r#"
+enum FooBar {
+ Foo,
+ Bar,
+}$0$0
+
+fn main() {}
+"#,
+ expect![[r#"
+ fn main() {}
+
+ enum FooBar {
+ Foo,
+ Bar,
+ }$0
+ "#]],
+ Direction::Down,
+ );
+ check(
+ r#"
+struct Test;
+
+trait SomeTrait {}
+
+impl SomeTrait for Test {}$0$0
+
+fn main() {}
+"#,
+ expect![[r#"
+ struct Test;
+
+ impl SomeTrait for Test {}$0
+
+ trait SomeTrait {}
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn handles_empty_file() {
+ check(r#"$0$0"#, expect![[r#""#]], Direction::Up);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
new file mode 100644
index 000000000..9f049e298
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
@@ -0,0 +1,623 @@
+//! See [`NavigationTarget`].
+
+use std::fmt;
+
+use either::Either;
+use hir::{
+ symbols::FileSymbol, AssocItem, Documentation, FieldSource, HasAttrs, HasSource, HirDisplay,
+ InFile, ModuleSource, Semantics,
+};
+use ide_db::{
+ base_db::{FileId, FileRange},
+ SymbolKind,
+};
+use ide_db::{defs::Definition, RootDatabase};
+use stdx::never;
+use syntax::{
+ ast::{self, HasName},
+ match_ast, AstNode, SmolStr, SyntaxNode, TextRange,
+};
+
+/// `NavigationTarget` represents an element in the editor's UI which you can
+/// click on to navigate to a particular piece of code.
+///
+/// Typically, a `NavigationTarget` corresponds to some element in the source
+/// code, like a function or a struct, but this is not strictly required.
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct NavigationTarget {
+ pub file_id: FileId,
+ /// Range which encompasses the whole element.
+ ///
+ /// Should include body, doc comments, attributes, etc.
+ ///
+ /// Clients should use this range to answer "is the cursor inside the
+ /// element?" question.
+ pub full_range: TextRange,
+ /// A "most interesting" range within the `full_range`.
+ ///
+ /// Typically, `full_range` is the whole syntax node, including doc
+ /// comments, and `focus_range` is the range of the identifier.
+ ///
+ /// Clients should place the cursor on this range when navigating to this target.
+ pub focus_range: Option<TextRange>,
+ pub name: SmolStr,
+ pub kind: Option<SymbolKind>,
+ pub container_name: Option<SmolStr>,
+ pub description: Option<String>,
+ pub docs: Option<Documentation>,
+}
+
+impl fmt::Debug for NavigationTarget {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut f = f.debug_struct("NavigationTarget");
+ macro_rules! opt {
+ ($($name:ident)*) => {$(
+ if let Some(it) = &self.$name {
+ f.field(stringify!($name), it);
+ }
+ )*}
+ }
+ f.field("file_id", &self.file_id).field("full_range", &self.full_range);
+ opt!(focus_range);
+ f.field("name", &self.name);
+ opt!(kind container_name description docs);
+ f.finish()
+ }
+}
+
+pub(crate) trait ToNav {
+ fn to_nav(&self, db: &RootDatabase) -> NavigationTarget;
+}
+
+pub(crate) trait TryToNav {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget>;
+}
+
+impl<T: TryToNav, U: TryToNav> TryToNav for Either<T, U> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ match self {
+ Either::Left(it) => it.try_to_nav(db),
+ Either::Right(it) => it.try_to_nav(db),
+ }
+ }
+}
+
+impl NavigationTarget {
+ pub fn focus_or_full_range(&self) -> TextRange {
+ self.focus_range.unwrap_or(self.full_range)
+ }
+
+ pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
+ let name = module.name(db).map(|it| it.to_smol_str()).unwrap_or_default();
+ if let Some(src @ InFile { value, .. }) = &module.declaration_source(db) {
+ let FileRange { file_id, range: full_range } = src.syntax().original_file_range(db);
+ let focus_range =
+ value.name().and_then(|it| orig_focus_range(db, src.file_id, it.syntax()));
+ let mut res = NavigationTarget::from_syntax(
+ file_id,
+ name,
+ focus_range,
+ full_range,
+ SymbolKind::Module,
+ );
+ res.docs = module.attrs(db).docs();
+ res.description = Some(module.display(db).to_string());
+ return res;
+ }
+ module.to_nav(db)
+ }
+
+ #[cfg(test)]
+ pub(crate) fn debug_render(&self) -> String {
+ let mut buf = format!(
+ "{} {:?} {:?} {:?}",
+ self.name,
+ self.kind.unwrap(),
+ self.file_id,
+ self.full_range
+ );
+ if let Some(focus_range) = self.focus_range {
+ buf.push_str(&format!(" {:?}", focus_range))
+ }
+ if let Some(container_name) = &self.container_name {
+ buf.push_str(&format!(" {}", container_name))
+ }
+ buf
+ }
+
+ /// Allows `NavigationTarget` to be created from a `NameOwner`
+ pub(crate) fn from_named(
+ db: &RootDatabase,
+ node @ InFile { file_id, value }: InFile<&dyn ast::HasName>,
+ kind: SymbolKind,
+ ) -> NavigationTarget {
+ let name = value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into());
+ let focus_range = value.name().and_then(|it| orig_focus_range(db, file_id, it.syntax()));
+ let FileRange { file_id, range } = node.map(|it| it.syntax()).original_file_range(db);
+
+ NavigationTarget::from_syntax(file_id, name, focus_range, range, kind)
+ }
+
+ fn from_syntax(
+ file_id: FileId,
+ name: SmolStr,
+ focus_range: Option<TextRange>,
+ full_range: TextRange,
+ kind: SymbolKind,
+ ) -> NavigationTarget {
+ NavigationTarget {
+ file_id,
+ name,
+ kind: Some(kind),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ }
+ }
+}
+
+impl TryToNav for FileSymbol {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let full_range = self.loc.original_range(db)?;
+ let name_range = self.loc.original_name_range(db)?;
+
+ Some(NavigationTarget {
+ file_id: full_range.file_id,
+ name: self.name.clone(),
+ kind: Some(self.kind.into()),
+ full_range: full_range.range,
+ focus_range: Some(name_range.range),
+ container_name: self.container_name.clone(),
+ description: description_from_symbol(db, self),
+ docs: None,
+ })
+ }
+}
+
+impl TryToNav for Definition {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ match self {
+ Definition::Local(it) => Some(it.to_nav(db)),
+ Definition::Label(it) => Some(it.to_nav(db)),
+ Definition::Module(it) => Some(it.to_nav(db)),
+ Definition::Macro(it) => it.try_to_nav(db),
+ Definition::Field(it) => it.try_to_nav(db),
+ Definition::SelfType(it) => it.try_to_nav(db),
+ Definition::GenericParam(it) => it.try_to_nav(db),
+ Definition::Function(it) => it.try_to_nav(db),
+ Definition::Adt(it) => it.try_to_nav(db),
+ Definition::Variant(it) => it.try_to_nav(db),
+ Definition::Const(it) => it.try_to_nav(db),
+ Definition::Static(it) => it.try_to_nav(db),
+ Definition::Trait(it) => it.try_to_nav(db),
+ Definition::TypeAlias(it) => it.try_to_nav(db),
+ Definition::BuiltinType(_) => None,
+ Definition::ToolModule(_) => None,
+ Definition::BuiltinAttr(_) => None,
+ // FIXME: The focus range should be set to the helper declaration
+ Definition::DeriveHelper(it) => it.derive().try_to_nav(db),
+ }
+ }
+}
+
+impl TryToNav for hir::ModuleDef {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ match self {
+ hir::ModuleDef::Module(it) => Some(it.to_nav(db)),
+ hir::ModuleDef::Function(it) => it.try_to_nav(db),
+ hir::ModuleDef::Adt(it) => it.try_to_nav(db),
+ hir::ModuleDef::Variant(it) => it.try_to_nav(db),
+ hir::ModuleDef::Const(it) => it.try_to_nav(db),
+ hir::ModuleDef::Static(it) => it.try_to_nav(db),
+ hir::ModuleDef::Trait(it) => it.try_to_nav(db),
+ hir::ModuleDef::TypeAlias(it) => it.try_to_nav(db),
+ hir::ModuleDef::Macro(it) => it.try_to_nav(db),
+ hir::ModuleDef::BuiltinType(_) => None,
+ }
+ }
+}
+
+pub(crate) trait ToNavFromAst {
+ const KIND: SymbolKind;
+}
+impl ToNavFromAst for hir::Function {
+ const KIND: SymbolKind = SymbolKind::Function;
+}
+impl ToNavFromAst for hir::Const {
+ const KIND: SymbolKind = SymbolKind::Const;
+}
+impl ToNavFromAst for hir::Static {
+ const KIND: SymbolKind = SymbolKind::Static;
+}
+impl ToNavFromAst for hir::Struct {
+ const KIND: SymbolKind = SymbolKind::Struct;
+}
+impl ToNavFromAst for hir::Enum {
+ const KIND: SymbolKind = SymbolKind::Enum;
+}
+impl ToNavFromAst for hir::Variant {
+ const KIND: SymbolKind = SymbolKind::Variant;
+}
+impl ToNavFromAst for hir::Union {
+ const KIND: SymbolKind = SymbolKind::Union;
+}
+impl ToNavFromAst for hir::TypeAlias {
+ const KIND: SymbolKind = SymbolKind::TypeAlias;
+}
+impl ToNavFromAst for hir::Trait {
+ const KIND: SymbolKind = SymbolKind::Trait;
+}
+
+impl<D> TryToNav for D
+where
+ D: HasSource + ToNavFromAst + Copy + HasAttrs + HirDisplay,
+ D::Ast: ast::HasName,
+{
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let src = self.source(db)?;
+ let mut res = NavigationTarget::from_named(
+ db,
+ src.as_ref().map(|it| it as &dyn ast::HasName),
+ D::KIND,
+ );
+ res.docs = self.docs(db);
+ res.description = Some(self.display(db).to_string());
+ Some(res)
+ }
+}
+
+impl ToNav for hir::Module {
+ fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ let InFile { file_id, value } = self.definition_source(db);
+
+ let name = self.name(db).map(|it| it.to_smol_str()).unwrap_or_default();
+ let (syntax, focus) = match &value {
+ ModuleSource::SourceFile(node) => (node.syntax(), None),
+ ModuleSource::Module(node) => (
+ node.syntax(),
+ node.name().and_then(|it| orig_focus_range(db, file_id, it.syntax())),
+ ),
+ ModuleSource::BlockExpr(node) => (node.syntax(), None),
+ };
+ let FileRange { file_id, range: full_range } =
+ InFile::new(file_id, syntax).original_file_range(db);
+ NavigationTarget::from_syntax(file_id, name, focus, full_range, SymbolKind::Module)
+ }
+}
+
+impl TryToNav for hir::Impl {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let InFile { file_id, value } = self.source(db)?;
+ let derive_attr = self.is_builtin_derive(db);
+
+ let focus_range = if derive_attr.is_some() {
+ None
+ } else {
+ value.self_ty().and_then(|ty| orig_focus_range(db, file_id, ty.syntax()))
+ };
+
+ let FileRange { file_id, range: full_range } = match &derive_attr {
+ Some(attr) => attr.syntax().original_file_range(db),
+ None => InFile::new(file_id, value.syntax()).original_file_range(db),
+ };
+
+ Some(NavigationTarget::from_syntax(
+ file_id,
+ "impl".into(),
+ focus_range,
+ full_range,
+ SymbolKind::Impl,
+ ))
+ }
+}
+
+impl TryToNav for hir::Field {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let src = self.source(db)?;
+
+ let field_source = match &src.value {
+ FieldSource::Named(it) => {
+ let mut res =
+ NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field);
+ res.docs = self.docs(db);
+ res.description = Some(self.display(db).to_string());
+ res
+ }
+ FieldSource::Pos(it) => {
+ let FileRange { file_id, range } =
+ src.with_value(it.syntax()).original_file_range(db);
+ NavigationTarget::from_syntax(file_id, "".into(), None, range, SymbolKind::Field)
+ }
+ };
+ Some(field_source)
+ }
+}
+
+impl TryToNav for hir::Macro {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let src = self.source(db)?;
+ let name_owner: &dyn ast::HasName = match &src.value {
+ Either::Left(it) => it,
+ Either::Right(it) => it,
+ };
+ let mut res = NavigationTarget::from_named(
+ db,
+ src.as_ref().with_value(name_owner),
+ self.kind(db).into(),
+ );
+ res.docs = self.docs(db);
+ Some(res)
+ }
+}
+
+impl TryToNav for hir::Adt {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ match self {
+ hir::Adt::Struct(it) => it.try_to_nav(db),
+ hir::Adt::Union(it) => it.try_to_nav(db),
+ hir::Adt::Enum(it) => it.try_to_nav(db),
+ }
+ }
+}
+
+impl TryToNav for hir::AssocItem {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ match self {
+ AssocItem::Function(it) => it.try_to_nav(db),
+ AssocItem::Const(it) => it.try_to_nav(db),
+ AssocItem::TypeAlias(it) => it.try_to_nav(db),
+ }
+ }
+}
+
+impl TryToNav for hir::GenericParam {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ match self {
+ hir::GenericParam::TypeParam(it) => it.try_to_nav(db),
+ hir::GenericParam::ConstParam(it) => it.try_to_nav(db),
+ hir::GenericParam::LifetimeParam(it) => it.try_to_nav(db),
+ }
+ }
+}
+
+impl ToNav for hir::Local {
+ fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ let InFile { file_id, value } = self.source(db);
+ let (node, name) = match &value {
+ Either::Left(bind_pat) => (bind_pat.syntax(), bind_pat.name()),
+ Either::Right(it) => (it.syntax(), it.name()),
+ };
+ let focus_range = name.and_then(|it| orig_focus_range(db, file_id, it.syntax()));
+ let FileRange { file_id, range: full_range } =
+ InFile::new(file_id, node).original_file_range(db);
+
+ let name = self.name(db).to_smol_str();
+ let kind = if self.is_self(db) {
+ SymbolKind::SelfParam
+ } else if self.is_param(db) {
+ SymbolKind::ValueParam
+ } else {
+ SymbolKind::Local
+ };
+ NavigationTarget {
+ file_id,
+ name,
+ kind: Some(kind),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ }
+ }
+}
+
+impl ToNav for hir::Label {
+ fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ let InFile { file_id, value } = self.source(db);
+ let name = self.name(db).to_smol_str();
+
+ let range = |syntax: &_| InFile::new(file_id, syntax).original_file_range(db);
+ let FileRange { file_id, range: full_range } = range(value.syntax());
+ let focus_range = value.lifetime().map(|lt| range(lt.syntax()).range);
+
+ NavigationTarget {
+ file_id,
+ name,
+ kind: Some(SymbolKind::Label),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ }
+ }
+}
+
+impl TryToNav for hir::TypeParam {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let InFile { file_id, value } = self.merge().source(db)?;
+ let name = self.name(db).to_smol_str();
+
+ let value = match value {
+ Either::Left(ast::TypeOrConstParam::Type(x)) => Either::Left(x),
+ Either::Left(ast::TypeOrConstParam::Const(_)) => {
+ never!();
+ return None;
+ }
+ Either::Right(x) => Either::Right(x),
+ };
+
+ let range = |syntax: &_| InFile::new(file_id, syntax).original_file_range(db);
+ let focus_range = |syntax: &_| InFile::new(file_id, syntax).original_file_range_opt(db);
+ let FileRange { file_id, range: full_range } = match &value {
+ Either::Left(type_param) => range(type_param.syntax()),
+ Either::Right(trait_) => trait_
+ .name()
+ .and_then(|name| focus_range(name.syntax()))
+ .unwrap_or_else(|| range(trait_.syntax())),
+ };
+ let focus_range = value
+ .either(|it| it.name(), |it| it.name())
+ .and_then(|it| focus_range(it.syntax()))
+ .map(|it| it.range);
+ Some(NavigationTarget {
+ file_id,
+ name,
+ kind: Some(SymbolKind::TypeParam),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ })
+ }
+}
+
+impl TryToNav for hir::TypeOrConstParam {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ self.split(db).try_to_nav(db)
+ }
+}
+
+impl TryToNav for hir::LifetimeParam {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let InFile { file_id, value } = self.source(db)?;
+ let name = self.name(db).to_smol_str();
+
+ let FileRange { file_id, range: full_range } =
+ InFile::new(file_id, value.syntax()).original_file_range(db);
+ Some(NavigationTarget {
+ file_id,
+ name,
+ kind: Some(SymbolKind::LifetimeParam),
+ full_range,
+ focus_range: Some(full_range),
+ container_name: None,
+ description: None,
+ docs: None,
+ })
+ }
+}
+
+impl TryToNav for hir::ConstParam {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let InFile { file_id, value } = self.merge().source(db)?;
+ let name = self.name(db).to_smol_str();
+
+ let value = match value {
+ Either::Left(ast::TypeOrConstParam::Const(x)) => x,
+ _ => {
+ never!();
+ return None;
+ }
+ };
+
+ let focus_range = value.name().and_then(|it| orig_focus_range(db, file_id, it.syntax()));
+ let FileRange { file_id, range: full_range } =
+ InFile::new(file_id, value.syntax()).original_file_range(db);
+ Some(NavigationTarget {
+ file_id,
+ name,
+ kind: Some(SymbolKind::ConstParam),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ })
+ }
+}
+
+/// Get a description of a symbol.
+///
+/// e.g. `struct Name`, `enum Name`, `fn Name`
+pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option<String> {
+ let sema = Semantics::new(db);
+ let node = symbol.loc.syntax(&sema)?;
+
+ match_ast! {
+ match node {
+ ast::Fn(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Struct(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Enum(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Trait(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Module(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::TypeAlias(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Const(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Static(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::RecordField(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Variant(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Union(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ _ => None,
+ }
+ }
+}
+
+fn orig_focus_range(
+ db: &RootDatabase,
+ file_id: hir::HirFileId,
+ syntax: &SyntaxNode,
+) -> Option<TextRange> {
+ InFile::new(file_id, syntax).original_file_range_opt(db).map(|it| it.range)
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::expect;
+
+ use crate::{fixture, Query};
+
+ #[test]
+ fn test_nav_for_symbol() {
+ let (analysis, _) = fixture::file(
+ r#"
+enum FooInner { }
+fn foo() { enum FooInner { } }
+"#,
+ );
+
+ let navs = analysis.symbol_search(Query::new("FooInner".to_string())).unwrap();
+ expect![[r#"
+ [
+ NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..17,
+ focus_range: 5..13,
+ name: "FooInner",
+ kind: Enum,
+ description: "enum FooInner",
+ },
+ NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 29..46,
+ focus_range: 34..42,
+ name: "FooInner",
+ kind: Enum,
+ container_name: "foo",
+ description: "enum FooInner",
+ },
+ ]
+ "#]]
+ .assert_debug_eq(&navs);
+ }
+
+ #[test]
+ fn test_world_symbols_are_case_sensitive() {
+ let (analysis, _) = fixture::file(
+ r#"
+fn foo() {}
+struct Foo;
+"#,
+ );
+
+ let navs = analysis.symbol_search(Query::new("foo".to_string())).unwrap();
+ assert_eq!(navs.len(), 2)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
new file mode 100644
index 000000000..9b1f48044
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
@@ -0,0 +1,167 @@
+use hir::Semantics;
+use ide_db::{
+ base_db::{CrateId, FileId, FilePosition},
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{
+ algo::find_node_at_offset,
+ ast::{self, AstNode},
+};
+
+use crate::NavigationTarget;
+
+// Feature: Parent Module
+//
+// Navigates to the parent module of the current module.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Locate parent module**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif[]
+
+/// This returns `Vec` because a module may be included from several places.
+pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+
+ let mut module = find_node_at_offset::<ast::Module>(source_file.syntax(), position.offset);
+
+ // If cursor is literally on `mod foo`, go to the grandpa.
+ if let Some(m) = &module {
+ if !m
+ .item_list()
+ .map_or(false, |it| it.syntax().text_range().contains_inclusive(position.offset))
+ {
+ cov_mark::hit!(test_resolve_parent_module_on_module_decl);
+ module = m.syntax().ancestors().skip(1).find_map(ast::Module::cast);
+ }
+ }
+
+ match module {
+ Some(module) => sema
+ .to_def(&module)
+ .into_iter()
+ .map(|module| NavigationTarget::from_module_to_decl(db, module))
+ .collect(),
+ None => sema
+ .to_module_defs(position.file_id)
+ .map(|module| NavigationTarget::from_module_to_decl(db, module))
+ .collect(),
+ }
+}
+
+/// Returns `Vec` for the same reason as `parent_module`
+pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
+ let sema = Semantics::new(db);
+ sema.to_module_defs(file_id).map(|module| module.krate().into()).unique().collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+ let navs = analysis.parent_module(position).unwrap();
+ let navs = navs
+ .iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .collect::<Vec<_>>();
+ assert_eq!(expected.into_iter().map(|(fr, _)| fr).collect::<Vec<_>>(), navs);
+ }
+
+ #[test]
+ fn test_resolve_parent_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+ //^^^
+
+//- /foo.rs
+$0// empty
+"#,
+ );
+ }
+
+ #[test]
+ fn test_resolve_parent_module_on_module_decl() {
+ cov_mark::check!(test_resolve_parent_module_on_module_decl);
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+ //^^^
+//- /foo.rs
+mod $0bar;
+
+//- /foo/bar.rs
+// empty
+"#,
+ );
+ }
+
+ #[test]
+ fn test_resolve_parent_module_for_inline() {
+ check(
+ r#"
+//- /lib.rs
+mod foo {
+ mod bar {
+ mod baz { $0 }
+ } //^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_resolve_multi_parent_module() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+ //^^^
+#[path = "foo.rs"]
+mod bar;
+ //^^^
+//- /foo.rs
+$0
+"#,
+ );
+ }
+
+ #[test]
+ fn test_resolve_crate_root() {
+ let (analysis, file_id) = fixture::file(
+ r#"
+//- /foo.rs
+$0
+//- /main.rs
+mod foo;
+"#,
+ );
+ assert_eq!(analysis.crate_for(file_id).unwrap().len(), 1);
+ }
+
+ #[test]
+ fn test_resolve_multi_parent_crate() {
+ let (analysis, file_id) = fixture::file(
+ r#"
+//- /baz.rs
+$0
+//- /foo.rs crate:foo
+mod baz;
+//- /bar.rs crate:bar
+mod baz;
+"#,
+ );
+ assert_eq!(analysis.crate_for(file_id).unwrap().len(), 2);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs b/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs
new file mode 100644
index 000000000..296270036
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs
@@ -0,0 +1,158 @@
+//! rust-analyzer is lazy and doesn't compute anything unless asked. This
+//! sometimes is counter productive when, for example, the first goto definition
+//! request takes longer to compute. This modules implemented prepopulation of
+//! various caches, it's not really advanced at the moment.
+mod topologic_sort;
+
+use std::time::Duration;
+
+use hir::db::DefDatabase;
+use ide_db::{
+ base_db::{
+ salsa::{Database, ParallelDatabase, Snapshot},
+ Cancelled, CrateGraph, CrateId, SourceDatabase, SourceDatabaseExt,
+ },
+ FxHashSet, FxIndexMap,
+};
+
+use crate::RootDatabase;
+
+/// We're indexing many crates.
+#[derive(Debug)]
+pub struct ParallelPrimeCachesProgress {
+ /// the crates that we are currently priming.
+ pub crates_currently_indexing: Vec<String>,
+ /// the total number of crates we want to prime.
+ pub crates_total: usize,
+ /// the total number of crates that have finished priming
+ pub crates_done: usize,
+}
+
+pub(crate) fn parallel_prime_caches(
+ db: &RootDatabase,
+ num_worker_threads: u8,
+ cb: &(dyn Fn(ParallelPrimeCachesProgress) + Sync),
+) {
+ let _p = profile::span("prime_caches");
+
+ let graph = db.crate_graph();
+ let mut crates_to_prime = {
+ let crate_ids = compute_crates_to_prime(db, &graph);
+
+ let mut builder = topologic_sort::TopologicalSortIter::builder();
+
+ for &crate_id in &crate_ids {
+ let crate_data = &graph[crate_id];
+ let dependencies = crate_data
+ .dependencies
+ .iter()
+ .map(|d| d.crate_id)
+ .filter(|i| crate_ids.contains(i));
+
+ builder.add(crate_id, dependencies);
+ }
+
+ builder.build()
+ };
+
+ enum ParallelPrimeCacheWorkerProgress {
+ BeginCrate { crate_id: CrateId, crate_name: String },
+ EndCrate { crate_id: CrateId },
+ }
+
+ let (work_sender, progress_receiver) = {
+ let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
+ let (work_sender, work_receiver) = crossbeam_channel::unbounded();
+ let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
+ while let Ok((crate_id, crate_name)) = work_receiver.recv() {
+ progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
+
+ // This also computes the DefMap
+ db.import_map(crate_id);
+
+ progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?;
+ }
+
+ Ok::<_, crossbeam_channel::SendError<_>>(())
+ };
+
+ for _ in 0..num_worker_threads {
+ let worker = prime_caches_worker.clone();
+ let db = db.snapshot();
+ std::thread::spawn(move || Cancelled::catch(|| worker(db)));
+ }
+
+ (work_sender, progress_receiver)
+ };
+
+ let crates_total = crates_to_prime.pending();
+ let mut crates_done = 0;
+
+ // an index map is used to preserve ordering so we can sort the progress report in order of
+ // "longest crate to index" first
+ let mut crates_currently_indexing =
+ FxIndexMap::with_capacity_and_hasher(num_worker_threads as _, Default::default());
+
+ while crates_done < crates_total {
+ db.unwind_if_cancelled();
+
+ for crate_id in &mut crates_to_prime {
+ work_sender
+ .send((
+ crate_id,
+ graph[crate_id].display_name.as_deref().unwrap_or_default().to_string(),
+ ))
+ .ok();
+ }
+
+ // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision
+ // is cancelled on a regular basis. workers will only exit if they are processing a task that is cancelled, or
+ // if this thread exits, and closes the work channel.
+ let worker_progress = match progress_receiver.recv_timeout(Duration::from_millis(10)) {
+ Ok(p) => p,
+ Err(crossbeam_channel::RecvTimeoutError::Timeout) => {
+ continue;
+ }
+ Err(crossbeam_channel::RecvTimeoutError::Disconnected) => {
+ // our workers may have died from a cancelled task, so we'll check and re-raise here.
+ db.unwind_if_cancelled();
+ break;
+ }
+ };
+ match worker_progress {
+ ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name } => {
+ crates_currently_indexing.insert(crate_id, crate_name);
+ }
+ ParallelPrimeCacheWorkerProgress::EndCrate { crate_id } => {
+ crates_currently_indexing.remove(&crate_id);
+ crates_to_prime.mark_done(crate_id);
+ crates_done += 1;
+ }
+ };
+
+ let progress = ParallelPrimeCachesProgress {
+ crates_currently_indexing: crates_currently_indexing.values().cloned().collect(),
+ crates_done,
+ crates_total,
+ };
+
+ cb(progress);
+ }
+}
+
+fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> FxHashSet<CrateId> {
+ // We're only interested in the workspace crates and the `ImportMap`s of their direct
+ // dependencies, though in practice the latter also compute the `DefMap`s.
+ // We don't prime transitive dependencies because they're generally not visible in
+ // the current workspace.
+ graph
+ .iter()
+ .filter(|&id| {
+ let file_id = graph[id].root_file_id;
+ let root_id = db.file_source_root(file_id);
+ !db.source_root(root_id).is_library
+ })
+ .flat_map(|id| graph[id].dependencies.iter().map(|krate| krate.crate_id))
+ .collect()
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/prime_caches/topologic_sort.rs b/src/tools/rust-analyzer/crates/ide/src/prime_caches/topologic_sort.rs
new file mode 100644
index 000000000..9c3ceedbb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/prime_caches/topologic_sort.rs
@@ -0,0 +1,98 @@
+//! helper data structure to schedule work for parallel prime caches.
+use std::{collections::VecDeque, hash::Hash};
+
+use ide_db::FxHashMap;
+
+pub(crate) struct TopologicSortIterBuilder<T> {
+ nodes: FxHashMap<T, Entry<T>>,
+}
+
+impl<T> TopologicSortIterBuilder<T>
+where
+ T: Copy + Eq + PartialEq + Hash,
+{
+ fn new() -> Self {
+ Self { nodes: Default::default() }
+ }
+
+ fn get_or_create_entry(&mut self, item: T) -> &mut Entry<T> {
+ self.nodes.entry(item).or_default()
+ }
+
+ pub(crate) fn add(&mut self, item: T, predecessors: impl IntoIterator<Item = T>) {
+ let mut num_predecessors = 0;
+
+ for predecessor in predecessors.into_iter() {
+ self.get_or_create_entry(predecessor).successors.push(item);
+ num_predecessors += 1;
+ }
+
+ let entry = self.get_or_create_entry(item);
+ entry.num_predecessors += num_predecessors;
+ }
+
+ pub(crate) fn build(self) -> TopologicalSortIter<T> {
+ let ready = self
+ .nodes
+ .iter()
+ .filter_map(
+ |(item, entry)| if entry.num_predecessors == 0 { Some(*item) } else { None },
+ )
+ .collect();
+
+ TopologicalSortIter { nodes: self.nodes, ready }
+ }
+}
+
+pub(crate) struct TopologicalSortIter<T> {
+ ready: VecDeque<T>,
+ nodes: FxHashMap<T, Entry<T>>,
+}
+
+impl<T> TopologicalSortIter<T>
+where
+ T: Copy + Eq + PartialEq + Hash,
+{
+ pub(crate) fn builder() -> TopologicSortIterBuilder<T> {
+ TopologicSortIterBuilder::new()
+ }
+
+ pub(crate) fn pending(&self) -> usize {
+ self.nodes.len()
+ }
+
+ pub(crate) fn mark_done(&mut self, item: T) {
+ let entry = self.nodes.remove(&item).expect("invariant: unknown item marked as done");
+
+ for successor in entry.successors {
+ let succ_entry = self
+ .nodes
+ .get_mut(&successor)
+ .expect("invariant: unknown successor referenced by entry");
+
+ succ_entry.num_predecessors -= 1;
+ if succ_entry.num_predecessors == 0 {
+ self.ready.push_back(successor);
+ }
+ }
+ }
+}
+
+impl<T> Iterator for TopologicalSortIter<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.ready.pop_front()
+ }
+}
+
+struct Entry<T> {
+ successors: Vec<T>,
+ num_predecessors: usize,
+}
+
+impl<T> Default for Entry<T> {
+ fn default() -> Self {
+ Self { successors: Default::default(), num_predecessors: 0 }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
new file mode 100644
index 000000000..1a6beec18
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -0,0 +1,1636 @@
+//! This module implements a reference search.
+//! First, the element at the cursor position must be either an `ast::Name`
+//! or `ast::NameRef`. If it's an `ast::NameRef`, at the classification step we
+//! try to resolve the direct tree parent of this element, otherwise we
+//! already have a definition and just need to get its HIR together with
+//! some information that is needed for further steps of searching.
+//! After that, we collect files that might contain references and look
+//! for text occurrences of the identifier. If there's an `ast::NameRef`
+//! at the index that the match starts at and its tree parent is
+//! resolved to the search element definition, we get a reference.
+
+use hir::{PathResolution, Semantics};
+use ide_db::{
+ base_db::FileId,
+ defs::{Definition, NameClass, NameRefClass},
+ search::{ReferenceCategory, SearchScope, UsageSearchResult},
+ FxHashMap, RootDatabase,
+};
+use syntax::{
+ algo::find_node_at_offset,
+ ast::{self, HasName},
+ match_ast, AstNode,
+ SyntaxKind::*,
+ SyntaxNode, TextRange, TextSize, T,
+};
+
+use crate::{FilePosition, NavigationTarget, TryToNav};
+
+#[derive(Debug, Clone)]
+pub struct ReferenceSearchResult {
+ pub declaration: Option<Declaration>,
+ pub references: FxHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
+}
+
+#[derive(Debug, Clone)]
+pub struct Declaration {
+ pub nav: NavigationTarget,
+ pub is_mut: bool,
+}
+
+// Feature: Find All References
+//
+// Shows all references of the item at the cursor location
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[Shift+Alt+F12]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020670-b7c34f00-917a-11eb-8003-370ac5f2b3cb.gif[]
+pub(crate) fn find_all_refs(
+ sema: &Semantics<'_, RootDatabase>,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+) -> Option<Vec<ReferenceSearchResult>> {
+ let _p = profile::span("find_all_refs");
+ let syntax = sema.parse(position.file_id).syntax().clone();
+ let make_searcher = |literal_search: bool| {
+ move |def: Definition| {
+ let declaration = match def {
+ Definition::Module(module) => {
+ Some(NavigationTarget::from_module_to_decl(sema.db, module))
+ }
+ def => def.try_to_nav(sema.db),
+ }
+ .map(|nav| {
+ let decl_range = nav.focus_or_full_range();
+ Declaration {
+ is_mut: decl_mutability(&def, sema.parse(nav.file_id).syntax(), decl_range),
+ nav,
+ }
+ });
+ let mut usages =
+ def.usages(sema).set_scope(search_scope.clone()).include_self_refs().all();
+
+ if literal_search {
+ retain_adt_literal_usages(&mut usages, def, sema);
+ }
+
+ let references = usages
+ .into_iter()
+ .map(|(file_id, refs)| {
+ (
+ file_id,
+ refs.into_iter()
+ .map(|file_ref| (file_ref.range, file_ref.category))
+ .collect(),
+ )
+ })
+ .collect();
+
+ ReferenceSearchResult { declaration, references }
+ }
+ };
+
+ match name_for_constructor_search(&syntax, position) {
+ Some(name) => {
+ let def = match NameClass::classify(sema, &name)? {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def: _, field_ref } => {
+ Definition::Field(field_ref)
+ }
+ };
+ Some(vec![make_searcher(true)(def)])
+ }
+ None => {
+ let search = make_searcher(false);
+ Some(find_defs(sema, &syntax, position.offset)?.map(search).collect())
+ }
+ }
+}
+
+pub(crate) fn find_defs<'a>(
+ sema: &'a Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ offset: TextSize,
+) -> Option<impl Iterator<Item = Definition> + 'a> {
+ let token = syntax.token_at_offset(offset).find(|t| {
+ matches!(
+ t.kind(),
+ IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
+ )
+ });
+ token.map(|token| {
+ sema.descend_into_macros_with_same_text(token)
+ .into_iter()
+ .filter_map(|it| ast::NameLike::cast(it.parent()?))
+ .filter_map(move |name_like| {
+ let def = match name_like {
+ ast::NameLike::NameRef(name_ref) => {
+ match NameRefClass::classify(sema, &name_ref)? {
+ NameRefClass::Definition(def) => def,
+ NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
+ Definition::Local(local_ref)
+ }
+ }
+ }
+ ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
+ Definition::Local(local_def)
+ }
+ },
+ ast::NameLike::Lifetime(lifetime) => {
+ NameRefClass::classify_lifetime(sema, &lifetime)
+ .and_then(|class| match class {
+ NameRefClass::Definition(it) => Some(it),
+ _ => None,
+ })
+ .or_else(|| {
+ NameClass::classify_lifetime(sema, &lifetime)
+ .and_then(NameClass::defined)
+ })?
+ }
+ };
+ Some(def)
+ })
+ })
+}
+
+pub(crate) fn decl_mutability(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> bool {
+ match def {
+ Definition::Local(_) | Definition::Field(_) => {}
+ _ => return false,
+ };
+
+ match find_node_at_offset::<ast::LetStmt>(syntax, range.start()) {
+ Some(stmt) if stmt.initializer().is_some() => match stmt.pat() {
+ Some(ast::Pat::IdentPat(it)) => it.mut_token().is_some(),
+ _ => false,
+ },
+ _ => false,
+ }
+}
+
+/// Filter out all non-literal usages for adt-defs
+fn retain_adt_literal_usages(
+ usages: &mut UsageSearchResult,
+ def: Definition,
+ sema: &Semantics<'_, RootDatabase>,
+) {
+ let refs = usages.references.values_mut();
+ match def {
+ Definition::Adt(hir::Adt::Enum(enum_)) => {
+ refs.for_each(|it| {
+ it.retain(|reference| {
+ reference
+ .name
+ .as_name_ref()
+ .map_or(false, |name_ref| is_enum_lit_name_ref(sema, enum_, name_ref))
+ })
+ });
+ usages.references.retain(|_, it| !it.is_empty());
+ }
+ Definition::Adt(_) | Definition::Variant(_) => {
+ refs.for_each(|it| {
+ it.retain(|reference| reference.name.as_name_ref().map_or(false, is_lit_name_ref))
+ });
+ usages.references.retain(|_, it| !it.is_empty());
+ }
+ _ => {}
+ }
+}
+
+/// Returns `Some` if the cursor is at a position for an item to search for all its constructor/literal usages
+fn name_for_constructor_search(syntax: &SyntaxNode, position: FilePosition) -> Option<ast::Name> {
+ let token = syntax.token_at_offset(position.offset).right_biased()?;
+ let token_parent = token.parent()?;
+ let kind = token.kind();
+ if kind == T![;] {
+ ast::Struct::cast(token_parent)
+ .filter(|struct_| struct_.field_list().is_none())
+ .and_then(|struct_| struct_.name())
+ } else if kind == T!['{'] {
+ match_ast! {
+ match token_parent {
+ ast::RecordFieldList(rfl) => match_ast! {
+ match (rfl.syntax().parent()?) {
+ ast::Variant(it) => it.name(),
+ ast::Struct(it) => it.name(),
+ ast::Union(it) => it.name(),
+ _ => None,
+ }
+ },
+ ast::VariantList(vl) => ast::Enum::cast(vl.syntax().parent()?)?.name(),
+ _ => None,
+ }
+ }
+ } else if kind == T!['('] {
+ let tfl = ast::TupleFieldList::cast(token_parent)?;
+ match_ast! {
+ match (tfl.syntax().parent()?) {
+ ast::Variant(it) => it.name(),
+ ast::Struct(it) => it.name(),
+ _ => None,
+ }
+ }
+ } else {
+ None
+ }
+}
+
+fn is_enum_lit_name_ref(
+ sema: &Semantics<'_, RootDatabase>,
+ enum_: hir::Enum,
+ name_ref: &ast::NameRef,
+) -> bool {
+ let path_is_variant_of_enum = |path: ast::Path| {
+ matches!(
+ sema.resolve_path(&path),
+ Some(PathResolution::Def(hir::ModuleDef::Variant(variant)))
+ if variant.parent_enum(sema.db) == enum_
+ )
+ };
+ name_ref
+ .syntax()
+ .ancestors()
+ .find_map(|ancestor| {
+ match_ast! {
+ match ancestor {
+ ast::PathExpr(path_expr) => path_expr.path().map(path_is_variant_of_enum),
+ ast::RecordExpr(record_expr) => record_expr.path().map(path_is_variant_of_enum),
+ _ => None,
+ }
+ }
+ })
+ .unwrap_or(false)
+}
+
+fn path_ends_with(path: Option<ast::Path>, name_ref: &ast::NameRef) -> bool {
+ path.and_then(|path| path.segment())
+ .and_then(|segment| segment.name_ref())
+ .map_or(false, |segment| segment == *name_ref)
+}
+
+fn is_lit_name_ref(name_ref: &ast::NameRef) -> bool {
+ name_ref.syntax().ancestors().find_map(|ancestor| {
+ match_ast! {
+ match ancestor {
+ ast::PathExpr(path_expr) => Some(path_ends_with(path_expr.path(), name_ref)),
+ ast::RecordExpr(record_expr) => Some(path_ends_with(record_expr.path(), name_ref)),
+ _ => None,
+ }
+ }
+ }).unwrap_or(false)
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use ide_db::{base_db::FileId, search::ReferenceCategory};
+ use stdx::format_to;
+
+ use crate::{fixture, SearchScope};
+
+ #[test]
+ fn test_struct_literal_after_space() {
+ check(
+ r#"
+struct Foo $0{
+ a: i32,
+}
+impl Foo {
+ fn f() -> i32 { 42 }
+}
+fn main() {
+ let f: Foo;
+ f = Foo {a: Foo::f()};
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..26 7..10
+
+ FileId(0) 101..104
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_literal_before_space() {
+ check(
+ r#"
+struct Foo$0 {}
+ fn main() {
+ let f: Foo;
+ f = Foo {};
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..13 7..10
+
+ FileId(0) 41..44
+ FileId(0) 54..57
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_literal_with_generic_type() {
+ check(
+ r#"
+struct Foo<T> $0{}
+ fn main() {
+ let f: Foo::<i32>;
+ f = Foo {};
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..16 7..10
+
+ FileId(0) 64..67
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_literal_for_tuple() {
+ check(
+ r#"
+struct Foo$0(i32);
+
+fn main() {
+ let f: Foo;
+ f = Foo(1);
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..16 7..10
+
+ FileId(0) 54..57
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_literal_for_union() {
+ check(
+ r#"
+union Foo $0{
+ x: u32
+}
+
+fn main() {
+ let f: Foo;
+ f = Foo { x: 1 };
+}
+"#,
+ expect![[r#"
+ Foo Union FileId(0) 0..24 6..9
+
+ FileId(0) 62..65
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_enum_after_space() {
+ check(
+ r#"
+enum Foo $0{
+ A,
+ B(),
+ C{},
+}
+fn main() {
+ let f: Foo;
+ f = Foo::A;
+ f = Foo::B();
+ f = Foo::C{};
+}
+"#,
+ expect![[r#"
+ Foo Enum FileId(0) 0..37 5..8
+
+ FileId(0) 74..77
+ FileId(0) 90..93
+ FileId(0) 108..111
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_variant_record_after_space() {
+ check(
+ r#"
+enum Foo {
+ A $0{ n: i32 },
+ B,
+}
+fn main() {
+ let f: Foo;
+ f = Foo::B;
+ f = Foo::A { n: 92 };
+}
+"#,
+ expect![[r#"
+ A Variant FileId(0) 15..27 15..16
+
+ FileId(0) 95..96
+ "#]],
+ );
+ }
+ #[test]
+ fn test_variant_tuple_before_paren() {
+ check(
+ r#"
+enum Foo {
+ A$0(i32),
+ B,
+}
+fn main() {
+ let f: Foo;
+ f = Foo::B;
+ f = Foo::A(92);
+}
+"#,
+ expect![[r#"
+ A Variant FileId(0) 15..21 15..16
+
+ FileId(0) 89..90
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_enum_before_space() {
+ check(
+ r#"
+enum Foo$0 {
+ A,
+ B,
+}
+fn main() {
+ let f: Foo;
+ f = Foo::A;
+}
+"#,
+ expect![[r#"
+ Foo Enum FileId(0) 0..26 5..8
+
+ FileId(0) 50..53
+ FileId(0) 63..66
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_enum_with_generic_type() {
+ check(
+ r#"
+enum Foo<T> $0{
+ A(T),
+ B,
+}
+fn main() {
+ let f: Foo<i8>;
+ f = Foo::A(1);
+}
+"#,
+ expect![[r#"
+ Foo Enum FileId(0) 0..32 5..8
+
+ FileId(0) 73..76
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_enum_for_tuple() {
+ check(
+ r#"
+enum Foo$0{
+ A(i8),
+ B(i8),
+}
+fn main() {
+ let f: Foo;
+ f = Foo::A(1);
+}
+"#,
+ expect![[r#"
+ Foo Enum FileId(0) 0..33 5..8
+
+ FileId(0) 70..73
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_for_local() {
+ check(
+ r#"
+fn main() {
+ let mut i = 1;
+ let j = 1;
+ i = i$0 + j;
+
+ {
+ i = 0;
+ }
+
+ i = 5;
+}"#,
+ expect![[r#"
+ i Local FileId(0) 20..25 24..25 Write
+
+ FileId(0) 50..51 Write
+ FileId(0) 54..55 Read
+ FileId(0) 76..77 Write
+ FileId(0) 94..95 Write
+ "#]],
+ );
+ }
+
+ #[test]
+ fn search_filters_by_range() {
+ check(
+ r#"
+fn foo() {
+ let spam$0 = 92;
+ spam + spam
+}
+fn bar() {
+ let spam = 92;
+ spam + spam
+}
+"#,
+ expect![[r#"
+ spam Local FileId(0) 19..23 19..23
+
+ FileId(0) 34..38 Read
+ FileId(0) 41..45 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_for_param_inside() {
+ check(
+ r#"
+fn foo(i : u32) -> u32 { i$0 }
+"#,
+ expect![[r#"
+ i ValueParam FileId(0) 7..8 7..8
+
+ FileId(0) 25..26 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_for_fn_param() {
+ check(
+ r#"
+fn foo(i$0 : u32) -> u32 { i }
+"#,
+ expect![[r#"
+ i ValueParam FileId(0) 7..8 7..8
+
+ FileId(0) 25..26 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_field_name() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo {
+ pub spam$0: u32,
+}
+
+fn main(s: Foo) {
+ let f = s.spam;
+}
+"#,
+ expect![[r#"
+ spam Field FileId(0) 17..30 21..25
+
+ FileId(0) 67..71 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_impl_item_name() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn f$0(&self) { }
+}
+"#,
+ expect![[r#"
+ f Function FileId(0) 27..43 30..31
+
+ (no references)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_enum_var_name() {
+ check(
+ r#"
+enum Foo {
+ A,
+ B$0,
+ C,
+}
+"#,
+ expect![[r#"
+ B Variant FileId(0) 22..23 22..23
+
+ (no references)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_enum_var_field() {
+ check(
+ r#"
+enum Foo {
+ A,
+ B { field$0: u8 },
+ C,
+}
+"#,
+ expect![[r#"
+ field Field FileId(0) 26..35 26..31
+
+ (no references)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_two_modules() {
+ check(
+ r#"
+//- /lib.rs
+pub mod foo;
+pub mod bar;
+
+fn f() {
+ let i = foo::Foo { n: 5 };
+}
+
+//- /foo.rs
+use crate::bar;
+
+pub struct Foo {
+ pub n: u32,
+}
+
+fn f() {
+ let i = bar::Bar { n: 5 };
+}
+
+//- /bar.rs
+use crate::foo;
+
+pub struct Bar {
+ pub n: u32,
+}
+
+fn f() {
+ let i = foo::Foo$0 { n: 5 };
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(1) 17..51 28..31
+
+ FileId(0) 53..56
+ FileId(2) 79..82
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_decl_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo$0;
+
+use foo::Foo;
+
+fn f() {
+ let i = Foo { n: 5 };
+}
+
+//- /foo.rs
+pub struct Foo {
+ pub n: u32,
+}
+"#,
+ expect![[r#"
+ foo Module FileId(0) 0..8 4..7
+
+ FileId(0) 14..17
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_decl_module_on_self() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+
+//- /foo.rs
+use self$0;
+"#,
+ expect![[r#"
+ foo Module FileId(0) 0..8 4..7
+
+ FileId(1) 4..8
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_decl_module_on_self_crate_root() {
+ check(
+ r#"
+//- /lib.rs
+use self$0;
+"#,
+ expect![[r#"
+ Module FileId(0) 0..10
+
+ FileId(0) 4..8
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_super_mod_vis() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+
+//- /foo.rs
+mod some;
+use some::Foo;
+
+fn f() {
+ let i = Foo { n: 5 };
+}
+
+//- /foo/some.rs
+pub(super) struct Foo$0 {
+ pub n: u32,
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(2) 0..41 18..21
+
+ FileId(1) 20..23
+ FileId(1) 47..50
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_with_scope() {
+ let code = r#"
+ //- /lib.rs
+ mod foo;
+ mod bar;
+
+ pub fn quux$0() {}
+
+ //- /foo.rs
+ fn f() { super::quux(); }
+
+ //- /bar.rs
+ fn f() { super::quux(); }
+ "#;
+
+ check_with_scope(
+ code,
+ None,
+ expect![[r#"
+ quux Function FileId(0) 19..35 26..30
+
+ FileId(1) 16..20
+ FileId(2) 16..20
+ "#]],
+ );
+
+ check_with_scope(
+ code,
+ Some(SearchScope::single_file(FileId(2))),
+ expect![[r#"
+ quux Function FileId(0) 19..35 26..30
+
+ FileId(2) 16..20
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_macro_def() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! m1$0 { () => (()) }
+
+fn foo() {
+ m1();
+ m1();
+}
+"#,
+ expect![[r#"
+ m1 Macro FileId(0) 0..46 29..31
+
+ FileId(0) 63..65
+ FileId(0) 73..75
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_basic_highlight_read_write() {
+ check(
+ r#"
+fn foo() {
+ let mut i$0 = 0;
+ i = i + 1;
+}
+"#,
+ expect![[r#"
+ i Local FileId(0) 19..24 23..24 Write
+
+ FileId(0) 34..35 Write
+ FileId(0) 38..39 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_basic_highlight_field_read_write() {
+ check(
+ r#"
+struct S {
+ f: u32,
+}
+
+fn foo() {
+ let mut s = S{f: 0};
+ s.f$0 = 0;
+}
+"#,
+ expect![[r#"
+ f Field FileId(0) 15..21 15..16
+
+ FileId(0) 55..56 Read
+ FileId(0) 68..69 Write
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_basic_highlight_decl_no_write() {
+ check(
+ r#"
+fn foo() {
+ let i$0;
+ i = 1;
+}
+"#,
+ expect![[r#"
+ i Local FileId(0) 19..20 19..20
+
+ FileId(0) 26..27 Write
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_struct_function_refs_outside_module() {
+ check(
+ r#"
+mod foo {
+ pub struct Foo;
+
+ impl Foo {
+ pub fn new$0() -> Foo { Foo }
+ }
+}
+
+fn main() {
+ let _f = foo::Foo::new();
+}
+"#,
+ expect![[r#"
+ new Function FileId(0) 54..81 61..64
+
+ FileId(0) 126..129
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_nested_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo { mod bar; }
+
+fn f$0() {}
+
+//- /foo/bar.rs
+use crate::f;
+
+fn g() { f(); }
+"#,
+ expect![[r#"
+ f Function FileId(0) 22..31 25..26
+
+ FileId(1) 11..12
+ FileId(1) 24..25
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_struct_pat() {
+ check(
+ r#"
+struct S {
+ field$0: u8,
+}
+
+fn f(s: S) {
+ match s {
+ S { field } => {}
+ }
+}
+"#,
+ expect![[r#"
+ field Field FileId(0) 15..24 15..20
+
+ FileId(0) 68..73 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_enum_var_pat() {
+ check(
+ r#"
+enum En {
+ Variant {
+ field$0: u8,
+ }
+}
+
+fn f(e: En) {
+ match e {
+ En::Variant { field } => {}
+ }
+}
+"#,
+ expect![[r#"
+ field Field FileId(0) 32..41 32..37
+
+ FileId(0) 102..107 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_enum_var_privacy() {
+ check(
+ r#"
+mod m {
+ pub enum En {
+ Variant {
+ field$0: u8,
+ }
+ }
+}
+
+fn f() -> m::En {
+ m::En::Variant { field: 0 }
+}
+"#,
+ expect![[r#"
+ field Field FileId(0) 56..65 56..61
+
+ FileId(0) 125..130 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_self_refs() {
+ check(
+ r#"
+struct Foo { bar: i32 }
+
+impl Foo {
+ fn foo(self) {
+ let x = self$0.bar;
+ if true {
+ let _ = match () {
+ () => self,
+ };
+ }
+ }
+}
+"#,
+ expect![[r#"
+ self SelfParam FileId(0) 47..51 47..51
+
+ FileId(0) 71..75 Read
+ FileId(0) 152..156 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_self_refs_decl() {
+ check(
+ r#"
+struct Foo { bar: i32 }
+
+impl Foo {
+ fn foo(self$0) {
+ self;
+ }
+}
+"#,
+ expect![[r#"
+ self SelfParam FileId(0) 47..51 47..51
+
+ FileId(0) 63..67 Read
+ "#]],
+ );
+ }
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ check_with_scope(ra_fixture, None, expect)
+ }
+
+ fn check_with_scope(ra_fixture: &str, search_scope: Option<SearchScope>, expect: Expect) {
+ let (analysis, pos) = fixture::position(ra_fixture);
+ let refs = analysis.find_all_refs(pos, search_scope).unwrap().unwrap();
+
+ let mut actual = String::new();
+ for refs in refs {
+ actual += "\n\n";
+
+ if let Some(decl) = refs.declaration {
+ format_to!(actual, "{}", decl.nav.debug_render());
+ if decl.is_mut {
+ format_to!(actual, " {:?}", ReferenceCategory::Write)
+ }
+ actual += "\n\n";
+ }
+
+ for (file_id, references) in &refs.references {
+ for (range, access) in references {
+ format_to!(actual, "{:?} {:?}", file_id, range);
+ if let Some(access) = access {
+ format_to!(actual, " {:?}", access);
+ }
+ actual += "\n";
+ }
+ }
+
+ if refs.references.is_empty() {
+ actual += "(no references)\n";
+ }
+ }
+ expect.assert_eq(actual.trim_start())
+ }
+
+ #[test]
+ fn test_find_lifetimes_function() {
+ check(
+ r#"
+trait Foo<'a> {}
+impl<'a> Foo<'a> for &'a () {}
+fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> {
+ fn bar<'a>(_: &'a ()) {}
+ x
+}
+"#,
+ expect![[r#"
+ 'a LifetimeParam FileId(0) 55..57 55..57
+
+ FileId(0) 63..65
+ FileId(0) 71..73
+ FileId(0) 82..84
+ FileId(0) 95..97
+ FileId(0) 106..108
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_lifetimes_type_alias() {
+ check(
+ r#"
+type Foo<'a, T> where T: 'a$0 = &'a T;
+"#,
+ expect![[r#"
+ 'a LifetimeParam FileId(0) 9..11 9..11
+
+ FileId(0) 25..27
+ FileId(0) 31..33
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_lifetimes_trait_impl() {
+ check(
+ r#"
+trait Foo<'a> {
+ fn foo() -> &'a ();
+}
+impl<'a> Foo<'a> for &'a () {
+ fn foo() -> &'a$0 () {
+ unimplemented!()
+ }
+}
+"#,
+ expect![[r#"
+ 'a LifetimeParam FileId(0) 47..49 47..49
+
+ FileId(0) 55..57
+ FileId(0) 64..66
+ FileId(0) 89..91
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_map_range_to_original() {
+ check(
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let a$0 = "test";
+ foo!(a);
+}
+"#,
+ expect![[r#"
+ a Local FileId(0) 59..60 59..60
+
+ FileId(0) 80..81 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_map_range_to_original_ref() {
+ check(
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let a = "test";
+ foo!(a$0);
+}
+"#,
+ expect![[r#"
+ a Local FileId(0) 59..60 59..60
+
+ FileId(0) 80..81 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_labels() {
+ check(
+ r#"
+fn foo<'a>() -> &'a () {
+ 'a: loop {
+ 'b: loop {
+ continue 'a$0;
+ }
+ break 'a;
+ }
+}
+"#,
+ expect![[r#"
+ 'a Label FileId(0) 29..32 29..31
+
+ FileId(0) 80..82
+ FileId(0) 108..110
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_const_param() {
+ check(
+ r#"
+fn foo<const FOO$0: usize>() -> usize {
+ FOO
+}
+"#,
+ expect![[r#"
+ FOO ConstParam FileId(0) 7..23 13..16
+
+ FileId(0) 42..45
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_trait() {
+ check(
+ r#"
+trait Foo$0 where Self: {}
+
+impl Foo for () {}
+"#,
+ expect![[r#"
+ Foo Trait FileId(0) 0..24 6..9
+
+ FileId(0) 31..34
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_trait_self() {
+ check(
+ r#"
+trait Foo where Self$0 {
+ fn f() -> Self;
+}
+
+impl Foo for () {}
+"#,
+ expect![[r#"
+ Self TypeParam FileId(0) 6..9 6..9
+
+ FileId(0) 16..20
+ FileId(0) 37..41
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_self_ty() {
+ check(
+ r#"
+ struct $0Foo;
+
+ impl Foo where Self: {
+ fn f() -> Self;
+ }
+ "#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..11 7..10
+
+ FileId(0) 18..21
+ FileId(0) 28..32
+ FileId(0) 50..54
+ "#]],
+ );
+ check(
+ r#"
+struct Foo;
+
+impl Foo where Self: {
+ fn f() -> Self$0;
+}
+"#,
+ expect![[r#"
+ impl Impl FileId(0) 13..57 18..21
+
+ FileId(0) 18..21
+ FileId(0) 28..32
+ FileId(0) 50..54
+ "#]],
+ );
+ }
+ #[test]
+ fn test_self_variant_with_payload() {
+ check(
+ r#"
+enum Foo { Bar() }
+
+impl Foo {
+ fn foo(self) {
+ match self {
+ Self::Bar$0() => (),
+ }
+ }
+}
+
+"#,
+ expect![[r#"
+ Bar Variant FileId(0) 11..16 11..14
+
+ FileId(0) 89..92
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_attr_differs_from_fn_with_same_name() {
+ check(
+ r#"
+#[test]
+fn test$0() {
+ test();
+}
+"#,
+ expect![[r#"
+ test Function FileId(0) 0..33 11..15
+
+ FileId(0) 24..28
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_const_in_pattern() {
+ check(
+ r#"
+const A$0: i32 = 42;
+
+fn main() {
+ match A {
+ A => (),
+ _ => (),
+ }
+ if let A = A {}
+}
+"#,
+ expect![[r#"
+ A Const FileId(0) 0..18 6..7
+
+ FileId(0) 42..43
+ FileId(0) 54..55
+ FileId(0) 97..98
+ FileId(0) 101..102
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_primitives() {
+ check(
+ r#"
+fn foo(_: bool) -> bo$0ol { true }
+"#,
+ expect![[r#"
+ FileId(0) 10..14
+ FileId(0) 19..23
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_transitive() {
+ check(
+ r#"
+//- /level3.rs new_source_root:local crate:level3
+pub struct Fo$0o;
+//- /level2.rs new_source_root:local crate:level2 deps:level3
+pub use level3::Foo;
+//- /level1.rs new_source_root:local crate:level1 deps:level2
+pub use level2::Foo;
+//- /level0.rs new_source_root:local crate:level0 deps:level1
+pub use level1::Foo;
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..15 11..14
+
+ FileId(1) 16..19
+ FileId(2) 16..19
+ FileId(3) 16..19
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_decl_macro_references() {
+ check(
+ r#"
+//- /lib.rs crate:lib
+#[macro_use]
+mod qux;
+mod bar;
+
+pub use self::foo;
+//- /qux.rs
+#[macro_export]
+macro_rules! foo$0 {
+ () => {struct Foo;};
+}
+//- /bar.rs
+foo!();
+//- /other.rs crate:other deps:lib new_source_root:local
+lib::foo!();
+"#,
+ expect![[r#"
+ foo Macro FileId(1) 0..61 29..32
+
+ FileId(0) 46..49
+ FileId(2) 0..3
+ FileId(3) 5..8
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_doesnt_reference_attribute_on_call() {
+ check(
+ r#"
+macro_rules! m {
+ () => {};
+}
+
+#[proc_macro_test::attr_noop]
+m$0!();
+
+"#,
+ expect![[r#"
+ m Macro FileId(0) 0..32 13..14
+
+ FileId(0) 64..65
+ "#]],
+ );
+ }
+
+ #[test]
+ fn multi_def() {
+ check(
+ r#"
+macro_rules! m {
+ ($name:ident) => {
+ mod module {
+ pub fn $name() {}
+ }
+
+ pub fn $name() {}
+ }
+}
+
+m!(func$0);
+
+fn f() {
+ func();
+ module::func();
+}
+ "#,
+ expect![[r#"
+ func Function FileId(0) 137..146 140..144
+
+ FileId(0) 161..165
+
+
+ func Function FileId(0) 137..146 140..144
+
+ FileId(0) 181..185
+ "#]],
+ )
+ }
+
+ #[test]
+ fn attr_expanded() {
+ check(
+ r#"
+//- proc_macros: identity
+#[proc_macros::identity]
+fn func$0() {
+ func();
+}
+"#,
+ expect![[r#"
+ func Function FileId(0) 25..50 28..32
+
+ FileId(0) 41..45
+ "#]],
+ )
+ }
+
+ #[test]
+ fn attr_assoc_item() {
+ check(
+ r#"
+//- proc_macros: identity
+
+trait Trait {
+ #[proc_macros::identity]
+ fn func() {
+ Self::func$0();
+ }
+}
+"#,
+ expect![[r#"
+ func Function FileId(0) 48..87 51..55
+
+ FileId(0) 74..78
+ "#]],
+ )
+ }
+
+ // FIXME: import is classified as function
+ #[test]
+ fn attr() {
+ check(
+ r#"
+//- proc_macros: identity
+use proc_macros::identity;
+
+#[proc_macros::$0identity]
+fn func() {}
+"#,
+ expect![[r#"
+ identity Attribute FileId(1) 1..107 32..40
+
+ FileId(0) 43..51
+ "#]],
+ );
+ check(
+ r#"
+#![crate_type="proc-macro"]
+#[proc_macro_attribute]
+fn func$0() {}
+"#,
+ expect![[r#"
+ func Attribute FileId(0) 28..64 55..59
+
+ (no references)
+ "#]],
+ );
+ }
+
+ // FIXME: import is classified as function
+ #[test]
+ fn proc_macro() {
+ check(
+ r#"
+//- proc_macros: mirror
+use proc_macros::mirror;
+
+mirror$0! {}
+"#,
+ expect![[r#"
+ mirror Macro FileId(1) 1..77 22..28
+
+ FileId(0) 26..32
+ "#]],
+ )
+ }
+
+ #[test]
+ fn derive() {
+ check(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+use proc_macros::DeriveIdentity;
+
+#[derive(proc_macros::DeriveIdentity$0)]
+struct Foo;
+"#,
+ expect![[r#"
+ derive_identity Derive FileId(2) 1..107 45..60
+
+ FileId(0) 17..31
+ FileId(0) 56..70
+ "#]],
+ );
+ check(
+ r#"
+#![crate_type="proc-macro"]
+#[proc_macro_derive(Derive, attributes(x))]
+pub fn deri$0ve(_stream: TokenStream) -> TokenStream {}
+"#,
+ expect![[r#"
+ derive Derive FileId(0) 28..125 79..85
+
+ (no references)
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs
new file mode 100644
index 000000000..fe44856dc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs
@@ -0,0 +1,2252 @@
+//! Renaming functionality.
+//!
+//! This is mostly front-end for [`ide_db::rename`], but it also includes the
+//! tests. This module also implements a couple of magic tricks, like renaming
+//! `self` and to `self` (to switch between associated function and method).
+
+use hir::{AsAssocItem, InFile, Semantics};
+use ide_db::{
+ base_db::FileId,
+ defs::{Definition, NameClass, NameRefClass},
+ rename::{bail, format_err, source_edit_from_references, IdentifierKind},
+ RootDatabase,
+};
+use itertools::Itertools;
+use stdx::{always, never};
+use syntax::{ast, AstNode, SyntaxNode};
+
+use text_edit::TextEdit;
+
+use crate::{FilePosition, RangeInfo, SourceChange};
+
+pub use ide_db::rename::RenameError;
+
+type RenameResult<T> = Result<T, RenameError>;
+
+/// Prepares a rename. The sole job of this function is to return the TextRange of the thing that is
+/// being targeted for a rename.
+pub(crate) fn prepare_rename(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> RenameResult<RangeInfo<()>> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+ let syntax = source_file.syntax();
+
+ let res = find_definitions(&sema, syntax, position)?
+ .map(|(name_like, def)| {
+ // ensure all ranges are valid
+
+ if def.range_for_rename(&sema).is_none() {
+ bail!("No references found at position")
+ }
+ let frange = sema.original_range(name_like.syntax());
+
+ always!(
+ frange.range.contains_inclusive(position.offset)
+ && frange.file_id == position.file_id
+ );
+ Ok(frange.range)
+ })
+ .reduce(|acc, cur| match (acc, cur) {
+ // ensure all ranges are the same
+ (Ok(acc_inner), Ok(cur_inner)) if acc_inner == cur_inner => Ok(acc_inner),
+ (Err(e), _) => Err(e),
+ _ => bail!("inconsistent text range"),
+ });
+
+ match res {
+ // ensure at least one definition was found
+ Some(res) => res.map(|range| RangeInfo::new(range, ())),
+ None => bail!("No references found at position"),
+ }
+}
+
+// Feature: Rename
+//
+// Renames the item below the cursor and all of its references
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[F2]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065582-055aae80-91b1-11eb-8ade-2b58e6d81883.gif[]
+pub(crate) fn rename(
+ db: &RootDatabase,
+ position: FilePosition,
+ new_name: &str,
+) -> RenameResult<SourceChange> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+ let syntax = source_file.syntax();
+
+ let defs = find_definitions(&sema, syntax, position)?;
+
+ let ops: RenameResult<Vec<SourceChange>> = defs
+ .map(|(_namelike, def)| {
+ if let Definition::Local(local) = def {
+ if let Some(self_param) = local.as_self_param(sema.db) {
+ cov_mark::hit!(rename_self_to_param);
+ return rename_self_to_param(&sema, local, self_param, new_name);
+ }
+ if new_name == "self" {
+ cov_mark::hit!(rename_to_self);
+ return rename_to_self(&sema, local);
+ }
+ }
+ def.rename(&sema, new_name)
+ })
+ .collect();
+
+ ops?.into_iter()
+ .reduce(|acc, elem| acc.merge(elem))
+ .ok_or_else(|| format_err!("No references found at position"))
+}
+
+/// Called by the client when it is about to rename a file.
+pub(crate) fn will_rename_file(
+ db: &RootDatabase,
+ file_id: FileId,
+ new_name_stem: &str,
+) -> Option<SourceChange> {
+ let sema = Semantics::new(db);
+ let module = sema.to_module_def(file_id)?;
+ let def = Definition::Module(module);
+ let mut change = def.rename(&sema, new_name_stem).ok()?;
+ change.file_system_edits.clear();
+ Some(change)
+}
+
+fn find_definitions(
+ sema: &Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ position: FilePosition,
+) -> RenameResult<impl Iterator<Item = (ast::NameLike, Definition)>> {
+ let symbols = sema
+ .find_nodes_at_offset_with_descend::<ast::NameLike>(syntax, position.offset)
+ .map(|name_like| {
+ let res = match &name_like {
+ // renaming aliases would rename the item being aliased as the HIR doesn't track aliases yet
+ ast::NameLike::Name(name)
+ if name
+ .syntax()
+ .parent()
+ .map_or(false, |it| ast::Rename::can_cast(it.kind())) =>
+ {
+ bail!("Renaming aliases is currently unsupported")
+ }
+ ast::NameLike::Name(name) => NameClass::classify(sema, name)
+ .map(|class| match class {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
+ Definition::Local(local_def)
+ }
+ })
+ .map(|def| (name_like.clone(), def))
+ .ok_or_else(|| format_err!("No references found at position")),
+ ast::NameLike::NameRef(name_ref) => {
+ NameRefClass::classify(sema, name_ref)
+ .map(|class| match class {
+ NameRefClass::Definition(def) => def,
+ NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
+ Definition::Local(local_ref)
+ }
+ })
+ .ok_or_else(|| format_err!("No references found at position"))
+ .and_then(|def| {
+ // if the name differs from the definitions name it has to be an alias
+ if def
+ .name(sema.db)
+ .map_or(false, |it| it.to_smol_str() != name_ref.text().as_str())
+ {
+ Err(format_err!("Renaming aliases is currently unsupported"))
+ } else {
+ Ok((name_like.clone(), def))
+ }
+ })
+ }
+ ast::NameLike::Lifetime(lifetime) => {
+ NameRefClass::classify_lifetime(sema, lifetime)
+ .and_then(|class| match class {
+ NameRefClass::Definition(def) => Some(def),
+ _ => None,
+ })
+ .or_else(|| {
+ NameClass::classify_lifetime(sema, lifetime).and_then(|it| match it {
+ NameClass::Definition(it) => Some(it),
+ _ => None,
+ })
+ })
+ .map(|def| (name_like, def))
+ .ok_or_else(|| format_err!("No references found at position"))
+ }
+ };
+ res
+ });
+
+ let res: RenameResult<Vec<_>> = symbols.collect();
+ match res {
+ Ok(v) => {
+ if v.is_empty() {
+ // FIXME: some semantic duplication between "empty vec" and "Err()"
+ Err(format_err!("No references found at position"))
+ } else {
+ // remove duplicates, comparing `Definition`s
+ Ok(v.into_iter().unique_by(|t| t.1))
+ }
+ }
+ Err(e) => Err(e),
+ }
+}
+
+fn rename_to_self(
+ sema: &Semantics<'_, RootDatabase>,
+ local: hir::Local,
+) -> RenameResult<SourceChange> {
+ if never!(local.is_self(sema.db)) {
+ bail!("rename_to_self invoked on self");
+ }
+
+ let fn_def = match local.parent(sema.db) {
+ hir::DefWithBody::Function(func) => func,
+ _ => bail!("Cannot rename local to self outside of function"),
+ };
+
+ if fn_def.self_param(sema.db).is_some() {
+ bail!("Method already has a self parameter");
+ }
+
+ let params = fn_def.assoc_fn_params(sema.db);
+ let first_param = params
+ .first()
+ .ok_or_else(|| format_err!("Cannot rename local to self unless it is a parameter"))?;
+ match first_param.as_local(sema.db) {
+ Some(plocal) => {
+ if plocal != local {
+ bail!("Only the first parameter may be renamed to self");
+ }
+ }
+ None => bail!("rename_to_self invoked on destructuring parameter"),
+ }
+
+ let assoc_item = fn_def
+ .as_assoc_item(sema.db)
+ .ok_or_else(|| format_err!("Cannot rename parameter to self for free function"))?;
+ let impl_ = match assoc_item.container(sema.db) {
+ hir::AssocItemContainer::Trait(_) => {
+ bail!("Cannot rename parameter to self for trait functions");
+ }
+ hir::AssocItemContainer::Impl(impl_) => impl_,
+ };
+ let first_param_ty = first_param.ty();
+ let impl_ty = impl_.self_ty(sema.db);
+ let (ty, self_param) = if impl_ty.remove_ref().is_some() {
+ // if the impl is a ref to the type we can just match the `&T` with self directly
+ (first_param_ty.clone(), "self")
+ } else {
+ first_param_ty.remove_ref().map_or((first_param_ty.clone(), "self"), |ty| {
+ (ty, if first_param_ty.is_mutable_reference() { "&mut self" } else { "&self" })
+ })
+ };
+
+ if ty != impl_ty {
+ bail!("Parameter type differs from impl block type");
+ }
+
+ let InFile { file_id, value: param_source } =
+ first_param.source(sema.db).ok_or_else(|| format_err!("No source for parameter found"))?;
+
+ let def = Definition::Local(local);
+ let usages = def.usages(sema).all();
+ let mut source_change = SourceChange::default();
+ source_change.extend(usages.iter().map(|(&file_id, references)| {
+ (file_id, source_edit_from_references(references, def, "self"))
+ }));
+ source_change.insert_source_edit(
+ file_id.original_file(sema.db),
+ TextEdit::replace(param_source.syntax().text_range(), String::from(self_param)),
+ );
+ Ok(source_change)
+}
+
+fn rename_self_to_param(
+ sema: &Semantics<'_, RootDatabase>,
+ local: hir::Local,
+ self_param: hir::SelfParam,
+ new_name: &str,
+) -> RenameResult<SourceChange> {
+ if new_name == "self" {
+ // Let's do nothing rather than complain.
+ cov_mark::hit!(rename_self_to_self);
+ return Ok(SourceChange::default());
+ }
+
+ let identifier_kind = IdentifierKind::classify(new_name)?;
+
+ let InFile { file_id, value: self_param } =
+ self_param.source(sema.db).ok_or_else(|| format_err!("cannot find function source"))?;
+
+ let def = Definition::Local(local);
+ let usages = def.usages(sema).all();
+ let edit = text_edit_from_self_param(&self_param, new_name)
+ .ok_or_else(|| format_err!("No target type found"))?;
+ if usages.len() > 1 && identifier_kind == IdentifierKind::Underscore {
+ bail!("Cannot rename reference to `_` as it is being referenced multiple times");
+ }
+ let mut source_change = SourceChange::default();
+ source_change.insert_source_edit(file_id.original_file(sema.db), edit);
+ source_change.extend(usages.iter().map(|(&file_id, references)| {
+ (file_id, source_edit_from_references(references, def, new_name))
+ }));
+ Ok(source_change)
+}
+
+fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Option<TextEdit> {
+ fn target_type_name(impl_def: &ast::Impl) -> Option<String> {
+ if let Some(ast::Type::PathType(p)) = impl_def.self_ty() {
+ return Some(p.path()?.segment()?.name_ref()?.text().to_string());
+ }
+ None
+ }
+
+ let impl_def = self_param.syntax().ancestors().find_map(ast::Impl::cast)?;
+ let type_name = target_type_name(&impl_def)?;
+
+ let mut replacement_text = String::from(new_name);
+ replacement_text.push_str(": ");
+ match (self_param.amp_token(), self_param.mut_token()) {
+ (Some(_), None) => replacement_text.push('&'),
+ (Some(_), Some(_)) => replacement_text.push_str("&mut "),
+ (_, _) => (),
+ };
+ replacement_text.push_str(type_name.as_str());
+
+ Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text))
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use stdx::trim_indent;
+ use test_utils::assert_eq_text;
+ use text_edit::TextEdit;
+
+ use crate::{fixture, FileId};
+
+ use super::{RangeInfo, RenameError};
+
+ #[track_caller]
+ fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
+ let ra_fixture_after = &trim_indent(ra_fixture_after);
+ let (analysis, position) = fixture::position(ra_fixture_before);
+ let rename_result = analysis
+ .rename(position, new_name)
+ .unwrap_or_else(|err| panic!("Rename to '{}' was cancelled: {}", new_name, err));
+ match rename_result {
+ Ok(source_change) => {
+ let mut text_edit_builder = TextEdit::builder();
+ let mut file_id: Option<FileId> = None;
+ for edit in source_change.source_file_edits {
+ file_id = Some(edit.0);
+ for indel in edit.1.into_iter() {
+ text_edit_builder.replace(indel.delete, indel.insert);
+ }
+ }
+ if let Some(file_id) = file_id {
+ let mut result = analysis.file_text(file_id).unwrap().to_string();
+ text_edit_builder.finish().apply(&mut result);
+ assert_eq_text!(ra_fixture_after, &*result);
+ }
+ }
+ Err(err) => {
+ if ra_fixture_after.starts_with("error:") {
+ let error_message = ra_fixture_after
+ .chars()
+ .into_iter()
+ .skip("error:".len())
+ .collect::<String>();
+ assert_eq!(error_message.trim(), err.to_string());
+ } else {
+ panic!("Rename to '{}' failed unexpectedly: {}", new_name, err)
+ }
+ }
+ };
+ }
+
+ fn check_expect(new_name: &str, ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let source_change =
+ analysis.rename(position, new_name).unwrap().expect("Expect returned a RenameError");
+ expect.assert_debug_eq(&source_change)
+ }
+
+ fn check_expect_will_rename_file(new_name: &str, ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let source_change = analysis
+ .will_rename_file(position.file_id, new_name)
+ .unwrap()
+ .expect("Expect returned a RenameError");
+ expect.assert_debug_eq(&source_change)
+ }
+
+ fn check_prepare(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let result = analysis
+ .prepare_rename(position)
+ .unwrap_or_else(|err| panic!("PrepareRename was cancelled: {}", err));
+ match result {
+ Ok(RangeInfo { range, info: () }) => {
+ let source = analysis.file_text(position.file_id).unwrap();
+ expect.assert_eq(&format!("{:?}: {}", range, &source[range]))
+ }
+ Err(RenameError(err)) => expect.assert_eq(&err),
+ };
+ }
+
+ #[test]
+ fn test_prepare_rename_namelikes() {
+ check_prepare(r"fn name$0<'lifetime>() {}", expect![[r#"3..7: name"#]]);
+ check_prepare(r"fn name<'lifetime$0>() {}", expect![[r#"8..17: 'lifetime"#]]);
+ check_prepare(r"fn name<'lifetime>() { name$0(); }", expect![[r#"23..27: name"#]]);
+ }
+
+ #[test]
+ fn test_prepare_rename_in_macro() {
+ check_prepare(
+ r"macro_rules! foo {
+ ($ident:ident) => {
+ pub struct $ident;
+ }
+}
+foo!(Foo$0);",
+ expect![[r#"83..86: Foo"#]],
+ );
+ }
+
+ #[test]
+ fn test_prepare_rename_keyword() {
+ check_prepare(r"struct$0 Foo;", expect![[r#"No references found at position"#]]);
+ }
+
+ #[test]
+ fn test_prepare_rename_tuple_field() {
+ check_prepare(
+ r#"
+struct Foo(i32);
+
+fn baz() {
+ let mut x = Foo(4);
+ x.0$0 = 5;
+}
+"#,
+ expect![[r#"No references found at position"#]],
+ );
+ }
+
+ #[test]
+ fn test_prepare_rename_builtin() {
+ check_prepare(
+ r#"
+fn foo() {
+ let x: i32$0 = 0;
+}
+"#,
+ expect![[r#"No references found at position"#]],
+ );
+ }
+
+ #[test]
+ fn test_prepare_rename_self() {
+ check_prepare(
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn foo(self) -> Self$0 {
+ self
+ }
+}
+"#,
+ expect![[r#"No references found at position"#]],
+ );
+ }
+
+ #[test]
+ fn test_rename_to_underscore() {
+ check("_", r#"fn main() { let i$0 = 1; }"#, r#"fn main() { let _ = 1; }"#);
+ }
+
+ #[test]
+ fn test_rename_to_raw_identifier() {
+ check("r#fn", r#"fn main() { let i$0 = 1; }"#, r#"fn main() { let r#fn = 1; }"#);
+ }
+
+ #[test]
+ fn test_rename_to_invalid_identifier1() {
+ check(
+ "invalid!",
+ r#"fn main() { let i$0 = 1; }"#,
+ "error: Invalid name `invalid!`: not an identifier",
+ );
+ }
+
+ #[test]
+ fn test_rename_to_invalid_identifier2() {
+ check(
+ "multiple tokens",
+ r#"fn main() { let i$0 = 1; }"#,
+ "error: Invalid name `multiple tokens`: not an identifier",
+ );
+ }
+
+ #[test]
+ fn test_rename_to_invalid_identifier3() {
+ check(
+ "let",
+ r#"fn main() { let i$0 = 1; }"#,
+ "error: Invalid name `let`: not an identifier",
+ );
+ }
+
+ #[test]
+ fn test_rename_to_invalid_identifier_lifetime() {
+ cov_mark::check!(rename_not_an_ident_ref);
+ check(
+ "'foo",
+ r#"fn main() { let i$0 = 1; }"#,
+ "error: Invalid name `'foo`: not an identifier",
+ );
+ }
+
+ #[test]
+ fn test_rename_to_invalid_identifier_lifetime2() {
+ cov_mark::check!(rename_not_a_lifetime_ident_ref);
+ check(
+ "foo",
+ r#"fn main<'a>(_: &'a$0 ()) {}"#,
+ "error: Invalid name `foo`: not a lifetime identifier",
+ );
+ }
+
+ #[test]
+ fn test_rename_to_underscore_invalid() {
+ cov_mark::check!(rename_underscore_multiple);
+ check(
+ "_",
+ r#"fn main(foo$0: ()) {foo;}"#,
+ "error: Cannot rename reference to `_` as it is being referenced multiple times",
+ );
+ }
+
+ #[test]
+ fn test_rename_mod_invalid() {
+ check(
+ "'foo",
+ r#"mod foo$0 {}"#,
+ "error: Invalid name `'foo`: cannot rename module to 'foo",
+ );
+ }
+
+ #[test]
+ fn test_rename_for_local() {
+ check(
+ "k",
+ r#"
+fn main() {
+ let mut i = 1;
+ let j = 1;
+ i = i$0 + j;
+
+ { i = 0; }
+
+ i = 5;
+}
+"#,
+ r#"
+fn main() {
+ let mut k = 1;
+ let j = 1;
+ k = k + j;
+
+ { k = 0; }
+
+ k = 5;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_unresolved_reference() {
+ check(
+ "new_name",
+ r#"fn main() { let _ = unresolved_ref$0; }"#,
+ "error: No references found at position",
+ );
+ }
+
+ #[test]
+ fn test_rename_macro_multiple_occurrences() {
+ check(
+ "Baaah",
+ r#"macro_rules! foo {
+ ($ident:ident) => {
+ const $ident: () = ();
+ struct $ident {}
+ };
+}
+
+foo!($0Foo);
+const _: () = Foo;
+const _: Foo = Foo {};
+ "#,
+ r#"
+macro_rules! foo {
+ ($ident:ident) => {
+ const $ident: () = ();
+ struct $ident {}
+ };
+}
+
+foo!(Baaah);
+const _: () = Baaah;
+const _: Baaah = Baaah {};
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_rename_for_macro_args() {
+ check(
+ "b",
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let a$0 = "test";
+ foo!(a);
+}
+"#,
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let b = "test";
+ foo!(b);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_for_macro_args_rev() {
+ check(
+ "b",
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let a = "test";
+ foo!(a$0);
+}
+"#,
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let b = "test";
+ foo!(b);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_for_macro_define_fn() {
+ check(
+ "bar",
+ r#"
+macro_rules! define_fn {($id:ident) => { fn $id{} }}
+define_fn!(foo);
+fn main() {
+ fo$0o();
+}
+"#,
+ r#"
+macro_rules! define_fn {($id:ident) => { fn $id{} }}
+define_fn!(bar);
+fn main() {
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_for_macro_define_fn_rev() {
+ check(
+ "bar",
+ r#"
+macro_rules! define_fn {($id:ident) => { fn $id{} }}
+define_fn!(fo$0o);
+fn main() {
+ foo();
+}
+"#,
+ r#"
+macro_rules! define_fn {($id:ident) => { fn $id{} }}
+define_fn!(bar);
+fn main() {
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_for_param_inside() {
+ check("j", r#"fn foo(i : u32) -> u32 { i$0 }"#, r#"fn foo(j : u32) -> u32 { j }"#);
+ }
+
+ #[test]
+ fn test_rename_refs_for_fn_param() {
+ check("j", r#"fn foo(i$0 : u32) -> u32 { i }"#, r#"fn foo(j : u32) -> u32 { j }"#);
+ }
+
+ #[test]
+ fn test_rename_for_mut_param() {
+ check("j", r#"fn foo(mut i$0 : u32) -> u32 { i }"#, r#"fn foo(mut j : u32) -> u32 { j }"#);
+ }
+
+ #[test]
+ fn test_rename_struct_field() {
+ check(
+ "foo",
+ r#"
+struct Foo { field$0: i32 }
+
+impl Foo {
+ fn new(i: i32) -> Self {
+ Self { field: i }
+ }
+}
+"#,
+ r#"
+struct Foo { foo: i32 }
+
+impl Foo {
+ fn new(i: i32) -> Self {
+ Self { foo: i }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_field_in_field_shorthand() {
+ cov_mark::check!(test_rename_field_in_field_shorthand);
+ check(
+ "field",
+ r#"
+struct Foo { foo$0: i32 }
+
+impl Foo {
+ fn new(foo: i32) -> Self {
+ Self { foo }
+ }
+}
+"#,
+ r#"
+struct Foo { field: i32 }
+
+impl Foo {
+ fn new(foo: i32) -> Self {
+ Self { field: foo }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_local_in_field_shorthand() {
+ cov_mark::check!(test_rename_local_in_field_shorthand);
+ check(
+ "j",
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn new(i$0: i32) -> Self {
+ Self { i }
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn new(j: i32) -> Self {
+ Self { i: j }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_field_shorthand_correct_struct() {
+ check(
+ "j",
+ r#"
+struct Foo { i$0: i32 }
+struct Bar { i: i32 }
+
+impl Bar {
+ fn new(i: i32) -> Self {
+ Self { i }
+ }
+}
+"#,
+ r#"
+struct Foo { j: i32 }
+struct Bar { i: i32 }
+
+impl Bar {
+ fn new(i: i32) -> Self {
+ Self { i }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_shadow_local_for_struct_shorthand() {
+ check(
+ "j",
+ r#"
+struct Foo { i: i32 }
+
+fn baz(i$0: i32) -> Self {
+ let x = Foo { i };
+ {
+ let i = 0;
+ Foo { i }
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+fn baz(j: i32) -> Self {
+ let x = Foo { i: j };
+ {
+ let i = 0;
+ Foo { i }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_mod() {
+ check_expect(
+ "foo2",
+ r#"
+//- /lib.rs
+mod bar;
+
+//- /bar.rs
+mod foo$0;
+
+//- /bar/foo.rs
+// empty
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 1,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 4..7,
+ },
+ ],
+ },
+ },
+ file_system_edits: [
+ MoveFile {
+ src: FileId(
+ 2,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 2,
+ ),
+ path: "foo2.rs",
+ },
+ },
+ ],
+ is_snippet: false,
+ }
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_rename_mod_in_use_tree() {
+ check_expect(
+ "quux",
+ r#"
+//- /main.rs
+pub mod foo;
+pub mod bar;
+fn main() {}
+
+//- /foo.rs
+pub struct FooContent;
+
+//- /bar.rs
+use crate::foo$0::FooContent;
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "quux",
+ delete: 8..11,
+ },
+ ],
+ },
+ FileId(
+ 2,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "quux",
+ delete: 11..14,
+ },
+ ],
+ },
+ },
+ file_system_edits: [
+ MoveFile {
+ src: FileId(
+ 1,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "quux.rs",
+ },
+ },
+ ],
+ is_snippet: false,
+ }
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_rename_mod_in_dir() {
+ check_expect(
+ "foo2",
+ r#"
+//- /lib.rs
+mod fo$0o;
+//- /foo/mod.rs
+// empty
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 4..7,
+ },
+ ],
+ },
+ },
+ file_system_edits: [
+ MoveDir {
+ src: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "../foo",
+ },
+ src_id: FileId(
+ 1,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "../foo2",
+ },
+ },
+ ],
+ is_snippet: false,
+ }
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_rename_unusually_nested_mod() {
+ check_expect(
+ "bar",
+ r#"
+//- /lib.rs
+mod outer { mod fo$0o; }
+
+//- /outer/foo.rs
+// empty
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "bar",
+ delete: 16..19,
+ },
+ ],
+ },
+ },
+ file_system_edits: [
+ MoveFile {
+ src: FileId(
+ 1,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "bar.rs",
+ },
+ },
+ ],
+ is_snippet: false,
+ }
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_module_rename_in_path() {
+ check(
+ "baz",
+ r#"
+mod $0foo {
+ pub use self::bar as qux;
+ pub fn bar() {}
+}
+
+fn main() { foo::bar(); }
+"#,
+ r#"
+mod baz {
+ pub use self::bar as qux;
+ pub fn bar() {}
+}
+
+fn main() { baz::bar(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_mod_filename_and_path() {
+ check_expect(
+ "foo2",
+ r#"
+//- /lib.rs
+mod bar;
+fn f() {
+ bar::foo::fun()
+}
+
+//- /bar.rs
+pub mod foo$0;
+
+//- /bar/foo.rs
+// pub fn fun() {}
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 27..30,
+ },
+ ],
+ },
+ FileId(
+ 1,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 8..11,
+ },
+ ],
+ },
+ },
+ file_system_edits: [
+ MoveFile {
+ src: FileId(
+ 2,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 2,
+ ),
+ path: "foo2.rs",
+ },
+ },
+ ],
+ is_snippet: false,
+ }
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_rename_mod_recursive() {
+ check_expect(
+ "foo2",
+ r#"
+//- /lib.rs
+mod foo$0;
+
+//- /foo.rs
+mod bar;
+mod corge;
+
+//- /foo/bar.rs
+mod qux;
+
+//- /foo/bar/qux.rs
+mod quux;
+
+//- /foo/bar/qux/quux/mod.rs
+// empty
+
+//- /foo/corge.rs
+// empty
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 4..7,
+ },
+ ],
+ },
+ },
+ file_system_edits: [
+ MoveFile {
+ src: FileId(
+ 1,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "foo2.rs",
+ },
+ },
+ MoveDir {
+ src: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "foo",
+ },
+ src_id: FileId(
+ 1,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "foo2",
+ },
+ },
+ ],
+ is_snippet: false,
+ }
+ "#]],
+ )
+ }
+ #[test]
+ fn test_rename_mod_ref_by_super() {
+ check(
+ "baz",
+ r#"
+ mod $0foo {
+ struct X;
+
+ mod bar {
+ use super::X;
+ }
+ }
+ "#,
+ r#"
+ mod baz {
+ struct X;
+
+ mod bar {
+ use super::X;
+ }
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_rename_mod_in_macro() {
+ check(
+ "bar",
+ r#"
+//- /foo.rs
+
+//- /lib.rs
+macro_rules! submodule {
+ ($name:ident) => {
+ mod $name;
+ };
+}
+
+submodule!($0foo);
+"#,
+ r#"
+macro_rules! submodule {
+ ($name:ident) => {
+ mod $name;
+ };
+}
+
+submodule!(bar);
+"#,
+ )
+ }
+
+ #[test]
+ fn test_rename_mod_for_crate_root() {
+ check_expect_will_rename_file(
+ "main",
+ r#"
+//- /lib.rs
+use crate::foo as bar;
+fn foo() {}
+mod bar$0;
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {},
+ file_system_edits: [],
+ is_snippet: false,
+ }
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_enum_variant_from_module_1() {
+ cov_mark::check!(rename_non_local);
+ check(
+ "Baz",
+ r#"
+mod foo {
+ pub enum Foo { Bar$0 }
+}
+
+fn func(f: foo::Foo) {
+ match f {
+ foo::Foo::Bar => {}
+ }
+}
+"#,
+ r#"
+mod foo {
+ pub enum Foo { Baz }
+}
+
+fn func(f: foo::Foo) {
+ match f {
+ foo::Foo::Baz => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_enum_variant_from_module_2() {
+ check(
+ "baz",
+ r#"
+mod foo {
+ pub struct Foo { pub bar$0: uint }
+}
+
+fn foo(f: foo::Foo) {
+ let _ = f.bar;
+}
+"#,
+ r#"
+mod foo {
+ pub struct Foo { pub baz: uint }
+}
+
+fn foo(f: foo::Foo) {
+ let _ = f.baz;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_parameter_to_self() {
+ cov_mark::check!(rename_to_self);
+ check(
+ "self",
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(foo$0: &mut Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(&mut self) -> i32 {
+ self.i
+ }
+}
+"#,
+ );
+ check(
+ "self",
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(foo$0: Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(self) -> i32 {
+ self.i
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_parameter_to_self_error_no_impl() {
+ check(
+ "self",
+ r#"
+struct Foo { i: i32 }
+
+fn f(foo$0: &mut Foo) -> i32 {
+ foo.i
+}
+"#,
+ "error: Cannot rename parameter to self for free function",
+ );
+ check(
+ "self",
+ r#"
+struct Foo { i: i32 }
+struct Bar;
+
+impl Bar {
+ fn f(foo$0: &mut Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ "error: Parameter type differs from impl block type",
+ );
+ }
+
+ #[test]
+ fn test_parameter_to_self_error_not_first() {
+ check(
+ "self",
+ r#"
+struct Foo { i: i32 }
+impl Foo {
+ fn f(x: (), foo$0: &mut Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ "error: Only the first parameter may be renamed to self",
+ );
+ }
+
+ #[test]
+ fn test_parameter_to_self_impl_ref() {
+ check(
+ "self",
+ r#"
+struct Foo { i: i32 }
+impl &Foo {
+ fn f(foo$0: &Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+impl &Foo {
+ fn f(self) -> i32 {
+ self.i
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_self_to_parameter() {
+ check(
+ "foo",
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(&mut $0self) -> i32 {
+ self.i
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(foo: &mut Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_owned_self_to_parameter() {
+ cov_mark::check!(rename_self_to_param);
+ check(
+ "foo",
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f($0self) -> i32 {
+ self.i
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(foo: Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_self_in_path_to_parameter() {
+ check(
+ "foo",
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(&self) -> i32 {
+ let self_var = 1;
+ self$0.i
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(foo: &Foo) -> i32 {
+ let self_var = 1;
+ foo.i
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_field_put_init_shorthand() {
+ cov_mark::check!(test_rename_field_put_init_shorthand);
+ check(
+ "bar",
+ r#"
+struct Foo { i$0: i32 }
+
+fn foo(bar: i32) -> Foo {
+ Foo { i: bar }
+}
+"#,
+ r#"
+struct Foo { bar: i32 }
+
+fn foo(bar: i32) -> Foo {
+ Foo { bar }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_local_put_init_shorthand() {
+ cov_mark::check!(test_rename_local_put_init_shorthand);
+ check(
+ "i",
+ r#"
+struct Foo { i: i32 }
+
+fn foo(bar$0: i32) -> Foo {
+ Foo { i: bar }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+fn foo(i: i32) -> Foo {
+ Foo { i }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_struct_field_pat_into_shorthand() {
+ cov_mark::check!(test_rename_field_put_init_shorthand_pat);
+ check(
+ "baz",
+ r#"
+struct Foo { i$0: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { i: ref baz @ qux } = foo;
+ let _ = qux;
+}
+"#,
+ r#"
+struct Foo { baz: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { baz: ref baz @ qux } = foo;
+ let _ = qux;
+}
+"#,
+ );
+ check(
+ "baz",
+ r#"
+struct Foo { i$0: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { i: ref baz } = foo;
+ let _ = qux;
+}
+"#,
+ r#"
+struct Foo { baz: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { ref baz } = foo;
+ let _ = qux;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_struct_local_pat_into_shorthand() {
+ cov_mark::check!(test_rename_local_put_init_shorthand_pat);
+ check(
+ "field",
+ r#"
+struct Foo { field: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { field: qux$0 } = foo;
+ let _ = qux;
+}
+"#,
+ r#"
+struct Foo { field: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { field } = foo;
+ let _ = field;
+}
+"#,
+ );
+ check(
+ "field",
+ r#"
+struct Foo { field: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { field: x @ qux$0 } = foo;
+ let _ = qux;
+}
+"#,
+ r#"
+struct Foo { field: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { field: x @ field } = foo;
+ let _ = field;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_binding_in_destructure_pat() {
+ let expected_fixture = r#"
+struct Foo {
+ i: i32,
+}
+
+fn foo(foo: Foo) {
+ let Foo { i: bar } = foo;
+ let _ = bar;
+}
+"#;
+ check(
+ "bar",
+ r#"
+struct Foo {
+ i: i32,
+}
+
+fn foo(foo: Foo) {
+ let Foo { i: b } = foo;
+ let _ = b$0;
+}
+"#,
+ expected_fixture,
+ );
+ check(
+ "bar",
+ r#"
+struct Foo {
+ i: i32,
+}
+
+fn foo(foo: Foo) {
+ let Foo { i } = foo;
+ let _ = i$0;
+}
+"#,
+ expected_fixture,
+ );
+ }
+
+ #[test]
+ fn test_rename_binding_in_destructure_param_pat() {
+ check(
+ "bar",
+ r#"
+struct Foo {
+ i: i32
+}
+
+fn foo(Foo { i }: Foo) -> i32 {
+ i$0
+}
+"#,
+ r#"
+struct Foo {
+ i: i32
+}
+
+fn foo(Foo { i: bar }: Foo) -> i32 {
+ bar
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_struct_field_complex_ident_pat() {
+ cov_mark::check!(rename_record_pat_field_name_split);
+ check(
+ "baz",
+ r#"
+struct Foo { i$0: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { ref i } = foo;
+}
+"#,
+ r#"
+struct Foo { baz: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { baz: ref i } = foo;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_lifetimes() {
+ cov_mark::check!(rename_lifetime);
+ check(
+ "'yeeee",
+ r#"
+trait Foo<'a> {
+ fn foo() -> &'a ();
+}
+impl<'a> Foo<'a> for &'a () {
+ fn foo() -> &'a$0 () {
+ unimplemented!()
+ }
+}
+"#,
+ r#"
+trait Foo<'a> {
+ fn foo() -> &'a ();
+}
+impl<'yeeee> Foo<'yeeee> for &'yeeee () {
+ fn foo() -> &'yeeee () {
+ unimplemented!()
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_rename_bind_pat() {
+ check(
+ "new_name",
+ r#"
+fn main() {
+ enum CustomOption<T> {
+ None,
+ Some(T),
+ }
+
+ let test_variable = CustomOption::Some(22);
+
+ match test_variable {
+ CustomOption::Some(foo$0) if foo == 11 => {}
+ _ => (),
+ }
+}"#,
+ r#"
+fn main() {
+ enum CustomOption<T> {
+ None,
+ Some(T),
+ }
+
+ let test_variable = CustomOption::Some(22);
+
+ match test_variable {
+ CustomOption::Some(new_name) if new_name == 11 => {}
+ _ => (),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_label() {
+ check(
+ "'foo",
+ r#"
+fn foo<'a>() -> &'a () {
+ 'a: {
+ 'b: loop {
+ break 'a$0;
+ }
+ }
+}
+"#,
+ r#"
+fn foo<'a>() -> &'a () {
+ 'foo: {
+ 'b: loop {
+ break 'foo;
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_self_to_self() {
+ cov_mark::check!(rename_self_to_self);
+ check(
+ "self",
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(self$0) {}
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(self) {}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_rename_field_in_pat_in_macro_doesnt_shorthand() {
+ // ideally we would be able to make this emit a short hand, but I doubt this is easily possible
+ check(
+ "baz",
+ r#"
+macro_rules! foo {
+ ($pattern:pat) => {
+ let $pattern = loop {};
+ };
+}
+struct Foo {
+ bar$0: u32,
+}
+fn foo() {
+ foo!(Foo { bar: baz });
+}
+"#,
+ r#"
+macro_rules! foo {
+ ($pattern:pat) => {
+ let $pattern = loop {};
+ };
+}
+struct Foo {
+ baz: u32,
+}
+fn foo() {
+ foo!(Foo { baz: baz });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_rename_tuple_field() {
+ check(
+ "foo",
+ r#"
+struct Foo(i32);
+
+fn baz() {
+ let mut x = Foo(4);
+ x.0$0 = 5;
+}
+"#,
+ "error: No identifier available to rename",
+ );
+ }
+
+ #[test]
+ fn test_rename_builtin() {
+ check(
+ "foo",
+ r#"
+fn foo() {
+ let x: i32$0 = 0;
+}
+"#,
+ "error: Cannot rename builtin type",
+ );
+ }
+
+ #[test]
+ fn test_rename_self() {
+ check(
+ "foo",
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn foo(self) -> Self$0 {
+ self
+ }
+}
+"#,
+ "error: Cannot rename `Self`",
+ );
+ }
+
+ #[test]
+ fn test_rename_ignores_self_ty() {
+ check(
+ "Fo0",
+ r#"
+struct $0Foo;
+
+impl Foo where Self: {}
+"#,
+ r#"
+struct Fo0;
+
+impl Fo0 where Self: {}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_fails_on_aliases() {
+ check(
+ "Baz",
+ r#"
+struct Foo;
+use Foo as Bar$0;
+"#,
+ "error: Renaming aliases is currently unsupported",
+ );
+ check(
+ "Baz",
+ r#"
+struct Foo;
+use Foo as Bar;
+use Bar$0;
+"#,
+ "error: Renaming aliases is currently unsupported",
+ );
+ }
+
+ #[test]
+ fn test_rename_trait_method() {
+ let res = r"
+trait Foo {
+ fn foo(&self) {
+ self.foo();
+ }
+}
+
+impl Foo for () {
+ fn foo(&self) {
+ self.foo();
+ }
+}";
+ check(
+ "foo",
+ r#"
+trait Foo {
+ fn bar$0(&self) {
+ self.bar();
+ }
+}
+
+impl Foo for () {
+ fn bar(&self) {
+ self.bar();
+ }
+}"#,
+ res,
+ );
+ check(
+ "foo",
+ r#"
+trait Foo {
+ fn bar(&self) {
+ self.bar$0();
+ }
+}
+
+impl Foo for () {
+ fn bar(&self) {
+ self.bar();
+ }
+}"#,
+ res,
+ );
+ check(
+ "foo",
+ r#"
+trait Foo {
+ fn bar(&self) {
+ self.bar();
+ }
+}
+
+impl Foo for () {
+ fn bar$0(&self) {
+ self.bar();
+ }
+}"#,
+ res,
+ );
+ check(
+ "foo",
+ r#"
+trait Foo {
+ fn bar(&self) {
+ self.bar();
+ }
+}
+
+impl Foo for () {
+ fn bar(&self) {
+ self.bar$0();
+ }
+}"#,
+ res,
+ );
+ }
+
+ #[test]
+ fn test_rename_trait_method_prefix_of_second() {
+ check(
+ "qux",
+ r#"
+trait Foo {
+ fn foo$0() {}
+ fn foobar() {}
+}
+"#,
+ r#"
+trait Foo {
+ fn qux() {}
+ fn foobar() {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_trait_const() {
+ let res = r"
+trait Foo {
+ const FOO: ();
+}
+
+impl Foo for () {
+ const FOO: ();
+}
+fn f() { <()>::FOO; }";
+ check(
+ "FOO",
+ r#"
+trait Foo {
+ const BAR$0: ();
+}
+
+impl Foo for () {
+ const BAR: ();
+}
+fn f() { <()>::BAR; }"#,
+ res,
+ );
+ check(
+ "FOO",
+ r#"
+trait Foo {
+ const BAR: ();
+}
+
+impl Foo for () {
+ const BAR$0: ();
+}
+fn f() { <()>::BAR; }"#,
+ res,
+ );
+ check(
+ "FOO",
+ r#"
+trait Foo {
+ const BAR: ();
+}
+
+impl Foo for () {
+ const BAR: ();
+}
+fn f() { <()>::BAR$0; }"#,
+ res,
+ );
+ }
+
+ #[test]
+ fn defs_from_macros_arent_renamed() {
+ check(
+ "lol",
+ r#"
+macro_rules! m { () => { fn f() {} } }
+m!();
+fn main() { f$0() }
+"#,
+ "error: No identifier available to rename",
+ )
+ }
+
+ #[test]
+ fn attributed_item() {
+ check(
+ "function",
+ r#"
+//- proc_macros: identity
+
+#[proc_macros::identity]
+fn func$0() {
+ func();
+}
+"#,
+ r#"
+
+#[proc_macros::identity]
+fn function() {
+ function();
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn in_macro_multi_mapping() {
+ check(
+ "a",
+ r#"
+fn foo() {
+ macro_rules! match_ast2 {
+ ($node:ident {
+ $( $res:expr, )*
+ }) => {{
+ $( if $node { $res } else )*
+ { loop {} }
+ }};
+ }
+ let $0d = 3;
+ match_ast2! {
+ d {
+ d,
+ d,
+ }
+ };
+}
+"#,
+ r#"
+fn foo() {
+ macro_rules! match_ast2 {
+ ($node:ident {
+ $( $res:expr, )*
+ }) => {{
+ $( if $node { $res } else )*
+ { loop {} }
+ }};
+ }
+ let a = 3;
+ match_ast2! {
+ a {
+ a,
+ a,
+ }
+ };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn rename_multi_local() {
+ check(
+ "bar",
+ r#"
+fn foo((foo$0 | foo | foo): ()) {
+ foo;
+ let foo;
+}
+"#,
+ r#"
+fn foo((bar | bar | bar): ()) {
+ bar;
+ let foo;
+}
+"#,
+ );
+ check(
+ "bar",
+ r#"
+fn foo((foo | foo$0 | foo): ()) {
+ foo;
+ let foo;
+}
+"#,
+ r#"
+fn foo((bar | bar | bar): ()) {
+ bar;
+ let foo;
+}
+"#,
+ );
+ check(
+ "bar",
+ r#"
+fn foo((foo | foo | foo): ()) {
+ foo$0;
+ let foo;
+}
+"#,
+ r#"
+fn foo((bar | bar | bar): ()) {
+ bar;
+ let foo;
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
new file mode 100644
index 000000000..bec770ed9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
@@ -0,0 +1,2163 @@
+use std::fmt;
+
+use ast::HasName;
+use cfg::CfgExpr;
+use hir::{AsAssocItem, HasAttrs, HasSource, HirDisplay, Semantics};
+use ide_assists::utils::test_related_attribute;
+use ide_db::{
+ base_db::{FilePosition, FileRange},
+ defs::Definition,
+ helpers::visit_file_defs,
+ search::SearchScope,
+ FxHashMap, FxHashSet, RootDatabase, SymbolKind,
+};
+use itertools::Itertools;
+use stdx::{always, format_to};
+use syntax::{
+ ast::{self, AstNode, HasAttrs as _},
+ SmolStr, SyntaxNode,
+};
+
+use crate::{references, FileId, NavigationTarget, ToNav, TryToNav};
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct Runnable {
+ pub use_name_in_title: bool,
+ pub nav: NavigationTarget,
+ pub kind: RunnableKind,
+ pub cfg: Option<CfgExpr>,
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub enum TestId {
+ Name(SmolStr),
+ Path(String),
+}
+
+impl fmt::Display for TestId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ TestId::Name(name) => name.fmt(f),
+ TestId::Path(path) => path.fmt(f),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub enum RunnableKind {
+ Test { test_id: TestId, attr: TestAttr },
+ TestMod { path: String },
+ Bench { test_id: TestId },
+ DocTest { test_id: TestId },
+ Bin,
+}
+
+#[cfg(test)]
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+enum RunnableTestKind {
+ Test,
+ TestMod,
+ DocTest,
+ Bench,
+ Bin,
+}
+
+impl Runnable {
+ // test package::module::testname
+ pub fn label(&self, target: Option<String>) -> String {
+ match &self.kind {
+ RunnableKind::Test { test_id, .. } => format!("test {}", test_id),
+ RunnableKind::TestMod { path } => format!("test-mod {}", path),
+ RunnableKind::Bench { test_id } => format!("bench {}", test_id),
+ RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id),
+ RunnableKind::Bin => {
+ target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t))
+ }
+ }
+ }
+
+ pub fn title(&self) -> String {
+ let mut s = String::from("▶\u{fe0e} Run ");
+ if self.use_name_in_title {
+ format_to!(s, "{}", self.nav.name);
+ if !matches!(self.kind, RunnableKind::Bin) {
+ s.push(' ');
+ }
+ }
+ let suffix = match &self.kind {
+ RunnableKind::TestMod { .. } => "Tests",
+ RunnableKind::Test { .. } => "Test",
+ RunnableKind::DocTest { .. } => "Doctest",
+ RunnableKind::Bench { .. } => "Bench",
+ RunnableKind::Bin => return s,
+ };
+ s.push_str(suffix);
+ s
+ }
+
+ #[cfg(test)]
+ fn test_kind(&self) -> RunnableTestKind {
+ match &self.kind {
+ RunnableKind::TestMod { .. } => RunnableTestKind::TestMod,
+ RunnableKind::Test { .. } => RunnableTestKind::Test,
+ RunnableKind::DocTest { .. } => RunnableTestKind::DocTest,
+ RunnableKind::Bench { .. } => RunnableTestKind::Bench,
+ RunnableKind::Bin => RunnableTestKind::Bin,
+ }
+ }
+}
+
+// Feature: Run
+//
+// Shows a popup suggesting to run a test/benchmark/binary **at the current cursor
+// location**. Super useful for repeatedly running just a single test. Do bind this
+// to a shortcut!
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Run**
+// |===
+// image::https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif[]
+pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
+ let sema = Semantics::new(db);
+
+ let mut res = Vec::new();
+ // Record all runnables that come from macro expansions here instead.
+ // In case an expansion creates multiple runnables we want to name them to avoid emitting a bunch of equally named runnables.
+ let mut in_macro_expansion = FxHashMap::<hir::HirFileId, Vec<Runnable>>::default();
+ let mut add_opt = |runnable: Option<Runnable>, def| {
+ if let Some(runnable) = runnable.filter(|runnable| {
+ always!(
+ runnable.nav.file_id == file_id,
+ "tried adding a runnable pointing to a different file: {:?} for {:?}",
+ runnable.kind,
+ file_id
+ )
+ }) {
+ if let Some(def) = def {
+ let file_id = match def {
+ Definition::Module(it) => it.declaration_source(db).map(|src| src.file_id),
+ Definition::Function(it) => it.source(db).map(|src| src.file_id),
+ _ => None,
+ };
+ if let Some(file_id) = file_id.filter(|file| file.call_node(db).is_some()) {
+ in_macro_expansion.entry(file_id).or_default().push(runnable);
+ return;
+ }
+ }
+ res.push(runnable);
+ }
+ };
+ visit_file_defs(&sema, file_id, &mut |def| {
+ let runnable = match def {
+ Definition::Module(it) => runnable_mod(&sema, it),
+ Definition::Function(it) => runnable_fn(&sema, it),
+ Definition::SelfType(impl_) => runnable_impl(&sema, &impl_),
+ _ => None,
+ };
+ add_opt(
+ runnable
+ .or_else(|| module_def_doctest(sema.db, def))
+ // #[macro_export] mbe macros are declared in the root, while their definition may reside in a different module
+ .filter(|it| it.nav.file_id == file_id),
+ Some(def),
+ );
+ if let Definition::SelfType(impl_) = def {
+ impl_.items(db).into_iter().for_each(|assoc| {
+ let runnable = match assoc {
+ hir::AssocItem::Function(it) => {
+ runnable_fn(&sema, it).or_else(|| module_def_doctest(sema.db, it.into()))
+ }
+ hir::AssocItem::Const(it) => module_def_doctest(sema.db, it.into()),
+ hir::AssocItem::TypeAlias(it) => module_def_doctest(sema.db, it.into()),
+ };
+ add_opt(runnable, Some(assoc.into()))
+ });
+ }
+ });
+
+ sema.to_module_defs(file_id)
+ .map(|it| runnable_mod_outline_definition(&sema, it))
+ .for_each(|it| add_opt(it, None));
+
+ res.extend(in_macro_expansion.into_iter().flat_map(|(_, runnables)| {
+ let use_name_in_title = runnables.len() != 1;
+ runnables.into_iter().map(move |mut r| {
+ r.use_name_in_title = use_name_in_title;
+ r
+ })
+ }));
+ res
+}
+
+// Feature: Related Tests
+//
+// Provides a sneak peek of all tests where the current item is used.
+//
+// The simplest way to use this feature is via the context menu:
+// - Right-click on the selected item. The context menu opens.
+// - Select **Peek related tests**
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Peek related tests**
+// |===
+pub(crate) fn related_tests(
+ db: &RootDatabase,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+) -> Vec<Runnable> {
+ let sema = Semantics::new(db);
+ let mut res: FxHashSet<Runnable> = FxHashSet::default();
+ let syntax = sema.parse(position.file_id).syntax().clone();
+
+ find_related_tests(&sema, &syntax, position, search_scope, &mut res);
+
+ res.into_iter().collect()
+}
+
+fn find_related_tests(
+ sema: &Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+ tests: &mut FxHashSet<Runnable>,
+) {
+ // FIXME: why is this using references::find_defs, this should use ide_db::search
+ let defs = match references::find_defs(sema, syntax, position.offset) {
+ Some(defs) => defs,
+ None => return,
+ };
+ for def in defs {
+ let defs = def
+ .usages(sema)
+ .set_scope(search_scope.clone())
+ .all()
+ .references
+ .into_values()
+ .flatten();
+ for ref_ in defs {
+ let name_ref = match ref_.name {
+ ast::NameLike::NameRef(name_ref) => name_ref,
+ _ => continue,
+ };
+ if let Some(fn_def) =
+ sema.ancestors_with_macros(name_ref.syntax().clone()).find_map(ast::Fn::cast)
+ {
+ if let Some(runnable) = as_test_runnable(sema, &fn_def) {
+ // direct test
+ tests.insert(runnable);
+ } else if let Some(module) = parent_test_module(sema, &fn_def) {
+ // indirect test
+ find_related_tests_in_module(sema, syntax, &fn_def, &module, tests);
+ }
+ }
+ }
+ }
+}
+
+fn find_related_tests_in_module(
+ sema: &Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ fn_def: &ast::Fn,
+ parent_module: &hir::Module,
+ tests: &mut FxHashSet<Runnable>,
+) {
+ let fn_name = match fn_def.name() {
+ Some(it) => it,
+ _ => return,
+ };
+ let mod_source = parent_module.definition_source(sema.db);
+ let range = match &mod_source.value {
+ hir::ModuleSource::Module(m) => m.syntax().text_range(),
+ hir::ModuleSource::BlockExpr(b) => b.syntax().text_range(),
+ hir::ModuleSource::SourceFile(f) => f.syntax().text_range(),
+ };
+
+ let file_id = mod_source.file_id.original_file(sema.db);
+ let mod_scope = SearchScope::file_range(FileRange { file_id, range });
+ let fn_pos = FilePosition { file_id, offset: fn_name.syntax().text_range().start() };
+ find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests)
+}
+
+fn as_test_runnable(sema: &Semantics<'_, RootDatabase>, fn_def: &ast::Fn) -> Option<Runnable> {
+ if test_related_attribute(fn_def).is_some() {
+ let function = sema.to_def(fn_def)?;
+ runnable_fn(sema, function)
+ } else {
+ None
+ }
+}
+
+fn parent_test_module(sema: &Semantics<'_, RootDatabase>, fn_def: &ast::Fn) -> Option<hir::Module> {
+ fn_def.syntax().ancestors().find_map(|node| {
+ let module = ast::Module::cast(node)?;
+ let module = sema.to_def(&module)?;
+
+ if has_test_function_or_multiple_test_submodules(sema, &module) {
+ Some(module)
+ } else {
+ None
+ }
+ })
+}
+
+pub(crate) fn runnable_fn(
+ sema: &Semantics<'_, RootDatabase>,
+ def: hir::Function,
+) -> Option<Runnable> {
+ let func = def.source(sema.db)?;
+ let name = def.name(sema.db).to_smol_str();
+
+ let root = def.module(sema.db).krate().root_module(sema.db);
+
+ let kind = if name == "main" && def.module(sema.db) == root {
+ RunnableKind::Bin
+ } else {
+ let test_id = || {
+ let canonical_path = {
+ let def: hir::ModuleDef = def.into();
+ def.canonical_path(sema.db)
+ };
+ canonical_path.map(TestId::Path).unwrap_or(TestId::Name(name))
+ };
+
+ if test_related_attribute(&func.value).is_some() {
+ let attr = TestAttr::from_fn(&func.value);
+ RunnableKind::Test { test_id: test_id(), attr }
+ } else if func.value.has_atom_attr("bench") {
+ RunnableKind::Bench { test_id: test_id() }
+ } else {
+ return None;
+ }
+ };
+
+ let nav = NavigationTarget::from_named(
+ sema.db,
+ func.as_ref().map(|it| it as &dyn ast::HasName),
+ SymbolKind::Function,
+ );
+ let cfg = def.attrs(sema.db).cfg();
+ Some(Runnable { use_name_in_title: false, nav, kind, cfg })
+}
+
+pub(crate) fn runnable_mod(
+ sema: &Semantics<'_, RootDatabase>,
+ def: hir::Module,
+) -> Option<Runnable> {
+ if !has_test_function_or_multiple_test_submodules(sema, &def) {
+ return None;
+ }
+ let path =
+ def.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::");
+
+ let attrs = def.attrs(sema.db);
+ let cfg = attrs.cfg();
+ let nav = NavigationTarget::from_module_to_decl(sema.db, def);
+ Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::TestMod { path }, cfg })
+}
+
+pub(crate) fn runnable_impl(
+ sema: &Semantics<'_, RootDatabase>,
+ def: &hir::Impl,
+) -> Option<Runnable> {
+ let attrs = def.attrs(sema.db);
+ if !has_runnable_doc_test(&attrs) {
+ return None;
+ }
+ let cfg = attrs.cfg();
+ let nav = def.try_to_nav(sema.db)?;
+ let ty = def.self_ty(sema.db);
+ let adt_name = ty.as_adt()?.name(sema.db);
+ let mut ty_args = ty.type_arguments().peekable();
+ let params = if ty_args.peek().is_some() {
+ format!("<{}>", ty_args.format_with(", ", |ty, cb| cb(&ty.display(sema.db))))
+ } else {
+ String::new()
+ };
+ let test_id = TestId::Path(format!("{}{}", adt_name, params));
+
+ Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::DocTest { test_id }, cfg })
+}
+
+/// Creates a test mod runnable for outline modules at the top of their definition.
+fn runnable_mod_outline_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ def: hir::Module,
+) -> Option<Runnable> {
+ if !has_test_function_or_multiple_test_submodules(sema, &def) {
+ return None;
+ }
+ let path =
+ def.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::");
+
+ let attrs = def.attrs(sema.db);
+ let cfg = attrs.cfg();
+ match def.definition_source(sema.db).value {
+ hir::ModuleSource::SourceFile(_) => Some(Runnable {
+ use_name_in_title: false,
+ nav: def.to_nav(sema.db),
+ kind: RunnableKind::TestMod { path },
+ cfg,
+ }),
+ _ => None,
+ }
+}
+
+fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option<Runnable> {
+ let attrs = match def {
+ Definition::Module(it) => it.attrs(db),
+ Definition::Function(it) => it.attrs(db),
+ Definition::Adt(it) => it.attrs(db),
+ Definition::Variant(it) => it.attrs(db),
+ Definition::Const(it) => it.attrs(db),
+ Definition::Static(it) => it.attrs(db),
+ Definition::Trait(it) => it.attrs(db),
+ Definition::TypeAlias(it) => it.attrs(db),
+ Definition::Macro(it) => it.attrs(db),
+ Definition::SelfType(it) => it.attrs(db),
+ _ => return None,
+ };
+ if !has_runnable_doc_test(&attrs) {
+ return None;
+ }
+ let def_name = def.name(db)?;
+ let path = (|| {
+ let mut path = String::new();
+ def.canonical_module_path(db)?
+ .flat_map(|it| it.name(db))
+ .for_each(|name| format_to!(path, "{}::", name));
+ // This probably belongs to canonical_path?
+ if let Some(assoc_item) = def.as_assoc_item(db) {
+ if let hir::AssocItemContainer::Impl(imp) = assoc_item.container(db) {
+ let ty = imp.self_ty(db);
+ if let Some(adt) = ty.as_adt() {
+ let name = adt.name(db);
+ let mut ty_args = ty.type_arguments().peekable();
+ format_to!(path, "{}", name);
+ if ty_args.peek().is_some() {
+ format_to!(
+ path,
+ "<{}>",
+ ty_args.format_with(", ", |ty, cb| cb(&ty.display(db)))
+ );
+ }
+ format_to!(path, "::{}", def_name);
+ return Some(path);
+ }
+ }
+ }
+ format_to!(path, "{}", def_name);
+ Some(path)
+ })();
+
+ let test_id = path.map_or_else(|| TestId::Name(def_name.to_smol_str()), TestId::Path);
+
+ let mut nav = match def {
+ Definition::Module(def) => NavigationTarget::from_module_to_decl(db, def),
+ def => def.try_to_nav(db)?,
+ };
+ nav.focus_range = None;
+ nav.description = None;
+ nav.docs = None;
+ nav.kind = None;
+ let res = Runnable {
+ use_name_in_title: false,
+ nav,
+ kind: RunnableKind::DocTest { test_id },
+ cfg: attrs.cfg(),
+ };
+ Some(res)
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct TestAttr {
+ pub ignore: bool,
+}
+
+impl TestAttr {
+ fn from_fn(fn_def: &ast::Fn) -> TestAttr {
+ let ignore = fn_def
+ .attrs()
+ .filter_map(|attr| attr.simple_name())
+ .any(|attribute_text| attribute_text == "ignore");
+ TestAttr { ignore }
+ }
+}
+
+const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
+const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] =
+ &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"];
+
+fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool {
+ attrs.docs().map_or(false, |doc| {
+ let mut in_code_block = false;
+
+ for line in String::from(doc).lines() {
+ if let Some(header) =
+ RUSTDOC_FENCES.into_iter().find_map(|fence| line.strip_prefix(fence))
+ {
+ in_code_block = !in_code_block;
+
+ if in_code_block
+ && header
+ .split(',')
+ .all(|sub| RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE.contains(&sub.trim()))
+ {
+ return true;
+ }
+ }
+ }
+
+ false
+ })
+}
+
+// We could create runnables for modules with number_of_test_submodules > 0,
+// but that bloats the runnables for no real benefit, since all tests can be run by the submodule already
+fn has_test_function_or_multiple_test_submodules(
+ sema: &Semantics<'_, RootDatabase>,
+ module: &hir::Module,
+) -> bool {
+ let mut number_of_test_submodules = 0;
+
+ for item in module.declarations(sema.db) {
+ match item {
+ hir::ModuleDef::Function(f) => {
+ if let Some(it) = f.source(sema.db) {
+ if test_related_attribute(&it.value).is_some() {
+ return true;
+ }
+ }
+ }
+ hir::ModuleDef::Module(submodule) => {
+ if has_test_function_or_multiple_test_submodules(sema, &submodule) {
+ number_of_test_submodules += 1;
+ }
+ }
+ _ => (),
+ }
+ }
+
+ number_of_test_submodules > 1
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::fixture;
+
+ use super::{RunnableTestKind::*, *};
+
+ fn check(
+ ra_fixture: &str,
+ // FIXME: fold this into `expect` as well
+ actions: &[RunnableTestKind],
+ expect: Expect,
+ ) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let mut runnables = analysis.runnables(position.file_id).unwrap();
+ runnables.sort_by_key(|it| (it.nav.full_range.start(), it.nav.name.clone()));
+ expect.assert_debug_eq(&runnables);
+ assert_eq!(
+ actions,
+ runnables.into_iter().map(|it| it.test_kind()).collect::<Vec<_>>().as_slice()
+ );
+ }
+
+ fn check_tests(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let tests = analysis.related_tests(position, None).unwrap();
+ expect.assert_debug_eq(&tests);
+ }
+
+ #[test]
+ fn test_runnables() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+#[test]
+fn test_foo() {}
+
+#[test]
+#[ignore]
+fn test_foo() {}
+
+#[bench]
+fn bench() {}
+
+mod not_a_root {
+ fn main() {}
+}
+"#,
+ &[TestMod, Bin, Test, Test, Bench],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..137,
+ name: "",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 15..39,
+ focus_range: 26..34,
+ name: "test_foo",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_foo",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 41..75,
+ focus_range: 62..70,
+ name: "test_foo",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_foo",
+ ),
+ attr: TestAttr {
+ ignore: true,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 77..99,
+ focus_range: 89..94,
+ name: "bench",
+ kind: Function,
+ },
+ kind: Bench {
+ test_id: Path(
+ "bench",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_doc_test() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+/// ```
+/// let x = 5;
+/// ```
+fn should_have_runnable() {}
+
+/// ```edition2018
+/// let x = 5;
+/// ```
+fn should_have_runnable_1() {}
+
+/// ```
+/// let z = 55;
+/// ```
+///
+/// ```ignore
+/// let z = 56;
+/// ```
+fn should_have_runnable_2() {}
+
+/**
+```rust
+let z = 55;
+```
+*/
+fn should_have_no_runnable_3() {}
+
+/**
+ ```rust
+ let z = 55;
+ ```
+*/
+fn should_have_no_runnable_4() {}
+
+/// ```no_run
+/// let z = 55;
+/// ```
+fn should_have_no_runnable() {}
+
+/// ```ignore
+/// let z = 55;
+/// ```
+fn should_have_no_runnable_2() {}
+
+/// ```compile_fail
+/// let z = 55;
+/// ```
+fn should_have_no_runnable_3() {}
+
+/// ```text
+/// arbitrary plain text
+/// ```
+fn should_have_no_runnable_4() {}
+
+/// ```text
+/// arbitrary plain text
+/// ```
+///
+/// ```sh
+/// $ shell code
+/// ```
+fn should_have_no_runnable_5() {}
+
+/// ```rust,no_run
+/// let z = 55;
+/// ```
+fn should_have_no_runnable_6() {}
+
+/// ```
+/// let x = 5;
+/// ```
+struct StructWithRunnable(String);
+
+/// ```
+/// let x = 5;
+/// ```
+impl StructWithRunnable {}
+
+trait Test {
+ fn test() -> usize {
+ 5usize
+ }
+}
+
+/// ```
+/// let x = 5;
+/// ```
+impl Test for StructWithRunnable {}
+"#,
+ &[Bin, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 15..74,
+ name: "should_have_runnable",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_runnable",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 76..148,
+ name: "should_have_runnable_1",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_runnable_1",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 150..254,
+ name: "should_have_runnable_2",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_runnable_2",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 256..320,
+ name: "should_have_no_runnable_3",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_no_runnable_3",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 322..398,
+ name: "should_have_no_runnable_4",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_no_runnable_4",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 900..965,
+ name: "StructWithRunnable",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "StructWithRunnable",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 967..1024,
+ focus_range: 1003..1021,
+ name: "impl",
+ kind: Impl,
+ },
+ kind: DocTest {
+ test_id: Path(
+ "StructWithRunnable",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1088..1154,
+ focus_range: 1133..1151,
+ name: "impl",
+ kind: Impl,
+ },
+ kind: DocTest {
+ test_id: Path(
+ "StructWithRunnable",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_doc_test_in_impl() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+struct Data;
+impl Data {
+ /// ```
+ /// let x = 5;
+ /// ```
+ fn foo() {}
+}
+"#,
+ &[Bin, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 44..98,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Data::foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_module() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod test_mod {
+ #[test]
+ fn test_foo1() {}
+}
+"#,
+ &[TestMod, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..51,
+ focus_range: 5..13,
+ name: "test_mod",
+ kind: Module,
+ description: "mod test_mod",
+ },
+ kind: TestMod {
+ path: "test_mod",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 20..49,
+ focus_range: 35..44,
+ name: "test_foo1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_mod::test_foo1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn only_modules_with_test_functions_or_more_than_one_test_submodule_have_runners() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod root_tests {
+ mod nested_tests_0 {
+ mod nested_tests_1 {
+ #[test]
+ fn nested_test_11() {}
+
+ #[test]
+ fn nested_test_12() {}
+ }
+
+ mod nested_tests_2 {
+ #[test]
+ fn nested_test_2() {}
+ }
+
+ mod nested_tests_3 {}
+ }
+
+ mod nested_tests_4 {}
+}
+"#,
+ &[TestMod, TestMod, Test, Test, TestMod, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 22..323,
+ focus_range: 26..40,
+ name: "nested_tests_0",
+ kind: Module,
+ description: "mod nested_tests_0",
+ },
+ kind: TestMod {
+ path: "root_tests::nested_tests_0",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 51..192,
+ focus_range: 55..69,
+ name: "nested_tests_1",
+ kind: Module,
+ description: "mod nested_tests_1",
+ },
+ kind: TestMod {
+ path: "root_tests::nested_tests_0::nested_tests_1",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 84..126,
+ focus_range: 107..121,
+ name: "nested_test_11",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "root_tests::nested_tests_0::nested_tests_1::nested_test_11",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 140..182,
+ focus_range: 163..177,
+ name: "nested_test_12",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "root_tests::nested_tests_0::nested_tests_1::nested_test_12",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 202..286,
+ focus_range: 206..220,
+ name: "nested_tests_2",
+ kind: Module,
+ description: "mod nested_tests_2",
+ },
+ kind: TestMod {
+ path: "root_tests::nested_tests_0::nested_tests_2",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 235..276,
+ focus_range: 258..271,
+ name: "nested_test_2",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "root_tests::nested_tests_0::nested_tests_2::nested_test_2",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_with_feature() {
+ check(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+$0
+#[test]
+#[cfg(feature = "foo")]
+fn test_foo1() {}
+"#,
+ &[TestMod, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..51,
+ name: "",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..50,
+ focus_range: 36..45,
+ name: "test_foo1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_foo1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: Some(
+ Atom(
+ KeyValue {
+ key: "feature",
+ value: "foo",
+ },
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_with_features() {
+ check(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo,feature=bar
+$0
+#[test]
+#[cfg(all(feature = "foo", feature = "bar"))]
+fn test_foo1() {}
+"#,
+ &[TestMod, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..73,
+ name: "",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..72,
+ focus_range: 58..67,
+ name: "test_foo1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_foo1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: Some(
+ All(
+ [
+ Atom(
+ KeyValue {
+ key: "feature",
+ value: "foo",
+ },
+ ),
+ Atom(
+ KeyValue {
+ key: "feature",
+ value: "bar",
+ },
+ ),
+ ],
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_no_test_function_in_module() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod test_mod {
+ fn foo1() {}
+}
+"#,
+ &[],
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_doc_runnables_impl_mod() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+struct Foo;$0
+impl Foo {
+ /// ```
+ /// let x = 5;
+ /// ```
+ fn foo() {}
+}
+ "#,
+ &[DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 27..81,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "foo::Foo::foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_in_macro() {
+ check(
+ r#"
+//- /lib.rs
+$0
+macro_rules! gen {
+ () => {
+ #[test]
+ fn foo_test() {}
+ }
+}
+macro_rules! gen2 {
+ () => {
+ mod tests2 {
+ #[test]
+ fn foo_test2() {}
+ }
+ }
+}
+mod tests {
+ gen!();
+}
+gen2!();
+"#,
+ &[TestMod, TestMod, Test, Test, TestMod],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..237,
+ name: "",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 202..227,
+ focus_range: 206..211,
+ name: "tests",
+ kind: Module,
+ description: "mod tests",
+ },
+ kind: TestMod {
+ path: "tests",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 218..225,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 228..236,
+ name: "foo_test2",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests2::foo_test2",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 228..236,
+ name: "tests2",
+ kind: Module,
+ description: "mod tests2",
+ },
+ kind: TestMod {
+ path: "tests2",
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn big_mac() {
+ check(
+ r#"
+//- /lib.rs
+$0
+macro_rules! foo {
+ () => {
+ mod foo_tests {
+ #[test]
+ fn foo0() {}
+ #[test]
+ fn foo1() {}
+ #[test]
+ fn foo2() {}
+ }
+ };
+}
+foo!();
+"#,
+ &[Test, Test, Test, TestMod],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 210..217,
+ name: "foo0",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "foo_tests::foo0",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 210..217,
+ name: "foo1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "foo_tests::foo1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 210..217,
+ name: "foo2",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "foo_tests::foo2",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 210..217,
+ name: "foo_tests",
+ kind: Module,
+ description: "mod foo_tests",
+ },
+ kind: TestMod {
+ path: "foo_tests",
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn dont_recurse_in_outline_submodules() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod m;
+//- /m.rs
+mod tests {
+ #[test]
+ fn t() {}
+}
+"#,
+ &[],
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn outline_submodule1() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod m;
+//- /m.rs
+#[test]
+fn t0() {}
+#[test]
+fn t1() {}
+"#,
+ &[TestMod],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..7,
+ focus_range: 5..6,
+ name: "m",
+ kind: Module,
+ description: "mod m",
+ },
+ kind: TestMod {
+ path: "m",
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn outline_submodule2() {
+ check(
+ r#"
+//- /lib.rs
+mod m;
+//- /m.rs
+$0
+#[test]
+fn t0() {}
+#[test]
+fn t1() {}
+"#,
+ &[TestMod, Test, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 0..39,
+ name: "m",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "m",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 1..19,
+ focus_range: 12..14,
+ name: "t0",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "m::t0",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 20..38,
+ focus_range: 31..33,
+ name: "t1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "m::t1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn attributed_module() {
+ check(
+ r#"
+//- proc_macros: identity
+//- /lib.rs
+$0
+#[proc_macros::identity]
+mod module {
+ #[test]
+ fn t0() {}
+ #[test]
+ fn t1() {}
+}
+"#,
+ &[TestMod, Test, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 26..94,
+ focus_range: 30..36,
+ name: "module",
+ kind: Module,
+ description: "mod module",
+ },
+ kind: TestMod {
+ path: "module",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 43..65,
+ focus_range: 58..60,
+ name: "t0",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "module::t0",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 70..92,
+ focus_range: 85..87,
+ name: "t1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "module::t1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn find_no_tests() {
+ check_tests(
+ r#"
+//- /lib.rs
+fn foo$0() { };
+"#,
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn find_direct_fn_test() {
+ check_tests(
+ r#"
+//- /lib.rs
+fn foo$0() { };
+
+mod tests {
+ #[test]
+ fn foo_test() {
+ super::foo()
+ }
+}
+"#,
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 31..85,
+ focus_range: 46..54,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn find_direct_struct_test() {
+ check_tests(
+ r#"
+//- /lib.rs
+struct Fo$0o;
+fn foo(arg: &Foo) { };
+
+mod tests {
+ use super::*;
+
+ #[test]
+ fn foo_test() {
+ foo(Foo);
+ }
+}
+"#,
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 71..122,
+ focus_range: 86..94,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn find_indirect_fn_test() {
+ check_tests(
+ r#"
+//- /lib.rs
+fn foo$0() { };
+
+mod tests {
+ use super::foo;
+
+ fn check1() {
+ check2()
+ }
+
+ fn check2() {
+ foo()
+ }
+
+ #[test]
+ fn foo_test() {
+ check1()
+ }
+}
+"#,
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 133..183,
+ focus_range: 148..156,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn tests_are_unique() {
+ check_tests(
+ r#"
+//- /lib.rs
+fn foo$0() { };
+
+mod tests {
+ use super::foo;
+
+ #[test]
+ fn foo_test() {
+ foo();
+ foo();
+ }
+
+ #[test]
+ fn foo2_test() {
+ foo();
+ foo();
+ }
+
+}
+"#,
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 52..115,
+ focus_range: 67..75,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 121..185,
+ focus_range: 136..145,
+ name: "foo2_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo2_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn doc_test_type_params() {
+ check(
+ r#"
+//- /lib.rs
+$0
+struct Foo<T, U>;
+
+impl<T, U> Foo<T, U> {
+ /// ```rust
+ /// ````
+ fn t() {}
+}
+"#,
+ &[DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 47..85,
+ name: "t",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Foo<T, U>::t",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn doc_test_macro_export_mbe() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod foo;
+
+//- /foo.rs
+/// ```
+/// fn foo() {
+/// }
+/// ```
+#[macro_export]
+macro_rules! foo {
+ () => {
+
+ };
+}
+"#,
+ &[],
+ expect![[r#"
+ []
+ "#]],
+ );
+ check(
+ r#"
+//- /lib.rs
+$0
+/// ```
+/// fn foo() {
+/// }
+/// ```
+#[macro_export]
+macro_rules! foo {
+ () => {
+
+ };
+}
+"#,
+ &[DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..94,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs
new file mode 100644
index 000000000..15cb89dcc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs
@@ -0,0 +1,71 @@
+use std::sync::Arc;
+
+use ide_db::{
+ base_db::{salsa::Durability, CrateGraph, SourceDatabase},
+ FxHashMap, RootDatabase,
+};
+
+// Feature: Shuffle Crate Graph
+//
+// Randomizes all crate IDs in the crate graph, for debugging.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Shuffle Crate Graph**
+// |===
+pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) {
+ let crate_graph = db.crate_graph();
+
+ let mut shuffled_ids = crate_graph.iter().collect::<Vec<_>>();
+ shuffle(&mut shuffled_ids);
+
+ let mut new_graph = CrateGraph::default();
+
+ let mut map = FxHashMap::default();
+ for old_id in shuffled_ids.iter().copied() {
+ let data = &crate_graph[old_id];
+ let new_id = new_graph.add_crate_root(
+ data.root_file_id,
+ data.edition,
+ data.display_name.clone(),
+ data.version.clone(),
+ data.cfg_options.clone(),
+ data.potential_cfg_options.clone(),
+ data.env.clone(),
+ data.proc_macro.clone(),
+ data.is_proc_macro,
+ data.origin.clone(),
+ );
+ map.insert(old_id, new_id);
+ }
+
+ for old_id in shuffled_ids.iter().copied() {
+ let data = &crate_graph[old_id];
+ for dep in &data.dependencies {
+ let mut new_dep = dep.clone();
+ new_dep.crate_id = map[&dep.crate_id];
+ new_graph.add_dep(map[&old_id], new_dep).unwrap();
+ }
+ }
+
+ db.set_crate_graph_with_durability(Arc::new(new_graph), Durability::HIGH);
+}
+
+fn shuffle<T>(slice: &mut [T]) {
+ let mut rng = oorandom::Rand32::new(seed());
+
+ let mut remaining = slice.len() - 1;
+ while remaining > 0 {
+ let index = rng.rand_range(0..remaining as u32);
+ slice.swap(remaining, index as usize);
+ remaining -= 1;
+ }
+}
+
+fn seed() -> u64 {
+ use std::collections::hash_map::RandomState;
+ use std::hash::{BuildHasher, Hasher};
+
+ RandomState::new().build_hasher().finish()
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
new file mode 100644
index 000000000..fedc1a435
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
@@ -0,0 +1,1334 @@
+//! This module provides primitives for showing type and function parameter information when editing
+//! a call or use-site.
+
+use std::collections::BTreeSet;
+
+use either::Either;
+use hir::{AssocItem, GenericParam, HasAttrs, HirDisplay, Semantics, Trait};
+use ide_db::{active_parameter::callable_for_node, base_db::FilePosition};
+use stdx::format_to;
+use syntax::{
+ algo,
+ ast::{self, HasArgList},
+ match_ast, AstNode, Direction, SyntaxToken, TextRange, TextSize,
+};
+
+use crate::RootDatabase;
+
+/// Contains information about an item signature as seen from a use site.
+///
+/// This includes the "active parameter", which is the parameter whose value is currently being
+/// edited.
+#[derive(Debug)]
+pub struct SignatureHelp {
+ pub doc: Option<String>,
+ pub signature: String,
+ pub active_parameter: Option<usize>,
+ parameters: Vec<TextRange>,
+}
+
+impl SignatureHelp {
+ pub fn parameter_labels(&self) -> impl Iterator<Item = &str> + '_ {
+ self.parameters.iter().map(move |&it| &self.signature[it])
+ }
+
+ pub fn parameter_ranges(&self) -> &[TextRange] {
+ &self.parameters
+ }
+
+ fn push_call_param(&mut self, param: &str) {
+ self.push_param('(', param);
+ }
+
+ fn push_generic_param(&mut self, param: &str) {
+ self.push_param('<', param);
+ }
+
+ fn push_param(&mut self, opening_delim: char, param: &str) {
+ if !self.signature.ends_with(opening_delim) {
+ self.signature.push_str(", ");
+ }
+ let start = TextSize::of(&self.signature);
+ self.signature.push_str(param);
+ let end = TextSize::of(&self.signature);
+ self.parameters.push(TextRange::new(start, end))
+ }
+}
+
+/// Computes parameter information for the given position.
+pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Option<SignatureHelp> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(position.file_id);
+ let file = file.syntax();
+ let token = file
+ .token_at_offset(position.offset)
+ .left_biased()
+ // if the cursor is sandwiched between two space tokens and the call is unclosed
+ // this prevents us from leaving the CallExpression
+ .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
+ let token = sema.descend_into_macros_single(token);
+
+ for node in token.parent_ancestors() {
+ match_ast! {
+ match node {
+ ast::ArgList(arg_list) => {
+ let cursor_outside = arg_list.r_paren_token().as_ref() == Some(&token);
+ if cursor_outside {
+ return None;
+ }
+ return signature_help_for_call(&sema, token);
+ },
+ ast::GenericArgList(garg_list) => {
+ let cursor_outside = garg_list.r_angle_token().as_ref() == Some(&token);
+ if cursor_outside {
+ return None;
+ }
+ return signature_help_for_generics(&sema, token);
+ },
+ _ => (),
+ }
+ }
+ }
+
+ None
+}
+
+fn signature_help_for_call(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<SignatureHelp> {
+ // Find the calling expression and its NameRef
+ let mut node = token.parent()?;
+ let calling_node = loop {
+ if let Some(callable) = ast::CallableExpr::cast(node.clone()) {
+ if callable
+ .arg_list()
+ .map_or(false, |it| it.syntax().text_range().contains(token.text_range().start()))
+ {
+ break callable;
+ }
+ }
+
+ // Stop at multi-line expressions, since the signature of the outer call is not very
+ // helpful inside them.
+ if let Some(expr) = ast::Expr::cast(node.clone()) {
+ if expr.syntax().text().contains_char('\n') {
+ return None;
+ }
+ }
+
+ node = node.parent()?;
+ };
+
+ let (callable, active_parameter) = callable_for_node(sema, &calling_node, &token)?;
+
+ let mut res =
+ SignatureHelp { doc: None, signature: String::new(), parameters: vec![], active_parameter };
+
+ let db = sema.db;
+ let mut fn_params = None;
+ match callable.kind() {
+ hir::CallableKind::Function(func) => {
+ res.doc = func.docs(db).map(|it| it.into());
+ format_to!(res.signature, "fn {}", func.name(db));
+ fn_params = Some(match callable.receiver_param(db) {
+ Some(_self) => func.params_without_self(db),
+ None => func.assoc_fn_params(db),
+ });
+ }
+ hir::CallableKind::TupleStruct(strukt) => {
+ res.doc = strukt.docs(db).map(|it| it.into());
+ format_to!(res.signature, "struct {}", strukt.name(db));
+ }
+ hir::CallableKind::TupleEnumVariant(variant) => {
+ res.doc = variant.docs(db).map(|it| it.into());
+ format_to!(
+ res.signature,
+ "enum {}::{}",
+ variant.parent_enum(db).name(db),
+ variant.name(db)
+ );
+ }
+ hir::CallableKind::Closure | hir::CallableKind::FnPtr => (),
+ }
+
+ res.signature.push('(');
+ {
+ if let Some(self_param) = callable.receiver_param(db) {
+ format_to!(res.signature, "{}", self_param)
+ }
+ let mut buf = String::new();
+ for (idx, (pat, ty)) in callable.params(db).into_iter().enumerate() {
+ buf.clear();
+ if let Some(pat) = pat {
+ match pat {
+ Either::Left(_self) => format_to!(buf, "self: "),
+ Either::Right(pat) => format_to!(buf, "{}: ", pat),
+ }
+ }
+ // APITs (argument position `impl Trait`s) are inferred as {unknown} as the user is
+ // in the middle of entering call arguments.
+ // In that case, fall back to render definitions of the respective parameters.
+ // This is overly conservative: we do not substitute known type vars
+ // (see FIXME in tests::impl_trait) and falling back on any unknowns.
+ match (ty.contains_unknown(), fn_params.as_deref()) {
+ (true, Some(fn_params)) => format_to!(buf, "{}", fn_params[idx].ty().display(db)),
+ _ => format_to!(buf, "{}", ty.display(db)),
+ }
+ res.push_call_param(&buf);
+ }
+ }
+ res.signature.push(')');
+
+ let mut render = |ret_type: hir::Type| {
+ if !ret_type.is_unit() {
+ format_to!(res.signature, " -> {}", ret_type.display(db));
+ }
+ };
+ match callable.kind() {
+ hir::CallableKind::Function(func) if callable.return_type().contains_unknown() => {
+ render(func.ret_type(db))
+ }
+ hir::CallableKind::Function(_) | hir::CallableKind::Closure | hir::CallableKind::FnPtr => {
+ render(callable.return_type())
+ }
+ hir::CallableKind::TupleStruct(_) | hir::CallableKind::TupleEnumVariant(_) => {}
+ }
+ Some(res)
+}
+
+fn signature_help_for_generics(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<SignatureHelp> {
+ let parent = token.parent()?;
+ let arg_list = parent
+ .ancestors()
+ .filter_map(ast::GenericArgList::cast)
+ .find(|list| list.syntax().text_range().contains(token.text_range().start()))?;
+
+ let mut active_parameter = arg_list
+ .generic_args()
+ .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start())
+ .count();
+
+ let first_arg_is_non_lifetime = arg_list
+ .generic_args()
+ .next()
+ .map_or(false, |arg| !matches!(arg, ast::GenericArg::LifetimeArg(_)));
+
+ let mut generics_def = if let Some(path) =
+ arg_list.syntax().ancestors().find_map(ast::Path::cast)
+ {
+ let res = sema.resolve_path(&path)?;
+ let generic_def: hir::GenericDef = match res {
+ hir::PathResolution::Def(hir::ModuleDef::Adt(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::Function(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::Trait(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::TypeAlias(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_))
+ | hir::PathResolution::Def(hir::ModuleDef::Const(_))
+ | hir::PathResolution::Def(hir::ModuleDef::Macro(_))
+ | hir::PathResolution::Def(hir::ModuleDef::Module(_))
+ | hir::PathResolution::Def(hir::ModuleDef::Static(_)) => return None,
+ hir::PathResolution::BuiltinAttr(_)
+ | hir::PathResolution::ToolModule(_)
+ | hir::PathResolution::Local(_)
+ | hir::PathResolution::TypeParam(_)
+ | hir::PathResolution::ConstParam(_)
+ | hir::PathResolution::SelfType(_)
+ | hir::PathResolution::DeriveHelper(_) => return None,
+ };
+
+ generic_def
+ } else if let Some(method_call) = arg_list.syntax().parent().and_then(ast::MethodCallExpr::cast)
+ {
+ // recv.method::<$0>()
+ let method = sema.resolve_method_call(&method_call)?;
+ method.into()
+ } else {
+ return None;
+ };
+
+ let mut res = SignatureHelp {
+ doc: None,
+ signature: String::new(),
+ parameters: vec![],
+ active_parameter: None,
+ };
+
+ let db = sema.db;
+ match generics_def {
+ hir::GenericDef::Function(it) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "fn {}", it.name(db));
+ }
+ hir::GenericDef::Adt(hir::Adt::Enum(it)) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "enum {}", it.name(db));
+ }
+ hir::GenericDef::Adt(hir::Adt::Struct(it)) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "struct {}", it.name(db));
+ }
+ hir::GenericDef::Adt(hir::Adt::Union(it)) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "union {}", it.name(db));
+ }
+ hir::GenericDef::Trait(it) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "trait {}", it.name(db));
+ }
+ hir::GenericDef::TypeAlias(it) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "type {}", it.name(db));
+ }
+ hir::GenericDef::Variant(it) => {
+ // In paths, generics of an enum can be specified *after* one of its variants.
+ // eg. `None::<u8>`
+ // We'll use the signature of the enum, but include the docs of the variant.
+ res.doc = it.docs(db).map(|it| it.into());
+ let it = it.parent_enum(db);
+ format_to!(res.signature, "enum {}", it.name(db));
+ generics_def = it.into();
+ }
+ // These don't have generic args that can be specified
+ hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) => return None,
+ }
+
+ let params = generics_def.params(sema.db);
+ let num_lifetime_params =
+ params.iter().take_while(|param| matches!(param, GenericParam::LifetimeParam(_))).count();
+ if first_arg_is_non_lifetime {
+ // Lifetime parameters were omitted.
+ active_parameter += num_lifetime_params;
+ }
+ res.active_parameter = Some(active_parameter);
+
+ res.signature.push('<');
+ let mut buf = String::new();
+ for param in params {
+ if let hir::GenericParam::TypeParam(ty) = param {
+ if ty.is_implicit(db) {
+ continue;
+ }
+ }
+
+ buf.clear();
+ format_to!(buf, "{}", param.display(db));
+ res.push_generic_param(&buf);
+ }
+ if let hir::GenericDef::Trait(tr) = generics_def {
+ add_assoc_type_bindings(db, &mut res, tr, arg_list);
+ }
+ res.signature.push('>');
+
+ Some(res)
+}
+
+fn add_assoc_type_bindings(
+ db: &RootDatabase,
+ res: &mut SignatureHelp,
+ tr: Trait,
+ args: ast::GenericArgList,
+) {
+ if args.syntax().ancestors().find_map(ast::TypeBound::cast).is_none() {
+ // Assoc type bindings are only valid in type bound position.
+ return;
+ }
+
+ let present_bindings = args
+ .generic_args()
+ .filter_map(|arg| match arg {
+ ast::GenericArg::AssocTypeArg(arg) => arg.name_ref().map(|n| n.to_string()),
+ _ => None,
+ })
+ .collect::<BTreeSet<_>>();
+
+ let mut buf = String::new();
+ for binding in &present_bindings {
+ buf.clear();
+ format_to!(buf, "{} = …", binding);
+ res.push_generic_param(&buf);
+ }
+
+ for item in tr.items_with_supertraits(db) {
+ if let AssocItem::TypeAlias(ty) = item {
+ let name = ty.name(db).to_smol_str();
+ if !present_bindings.contains(&*name) {
+ buf.clear();
+ format_to!(buf, "{} = …", name);
+ res.push_generic_param(&buf);
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::iter;
+
+ use expect_test::{expect, Expect};
+ use ide_db::base_db::{fixture::ChangeFixture, FilePosition};
+ use stdx::format_to;
+
+ use crate::RootDatabase;
+
+ /// Creates analysis from a multi-file fixture, returns positions marked with $0.
+ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ let mut database = RootDatabase::default();
+ database.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) =
+ change_fixture.file_position.expect("expected a marker ($0)");
+ let offset = range_or_offset.expect_offset();
+ (database, FilePosition { file_id, offset })
+ }
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ // Implicitly add `Sized` to avoid noisy `T: ?Sized` in the results.
+ let fixture = format!(
+ r#"
+#[lang = "sized"] trait Sized {{}}
+{ra_fixture}
+ "#
+ );
+ let (db, position) = position(&fixture);
+ let sig_help = crate::signature_help::signature_help(&db, position);
+ let actual = match sig_help {
+ Some(sig_help) => {
+ let mut rendered = String::new();
+ if let Some(docs) = &sig_help.doc {
+ format_to!(rendered, "{}\n------\n", docs.as_str());
+ }
+ format_to!(rendered, "{}\n", sig_help.signature);
+ let mut offset = 0;
+ for (i, range) in sig_help.parameter_ranges().iter().enumerate() {
+ let is_active = sig_help.active_parameter == Some(i);
+
+ let start = u32::from(range.start());
+ let gap = start.checked_sub(offset).unwrap_or_else(|| {
+ panic!("parameter ranges out of order: {:?}", sig_help.parameter_ranges())
+ });
+ rendered.extend(iter::repeat(' ').take(gap as usize));
+ let param_text = &sig_help.signature[*range];
+ let width = param_text.chars().count(); // …
+ let marker = if is_active { '^' } else { '-' };
+ rendered.extend(iter::repeat(marker).take(width));
+ offset += gap + u32::from(range.len());
+ }
+ if !sig_help.parameter_ranges().is_empty() {
+ format_to!(rendered, "\n");
+ }
+ rendered
+ }
+ None => String::new(),
+ };
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn test_fn_signature_two_args() {
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo($03, ); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ^^^^^^ ------
+ "#]],
+ );
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo(3$0, ); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ^^^^^^ ------
+ "#]],
+ );
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo(3,$0 ); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ------ ^^^^^^
+ "#]],
+ );
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo(3, $0); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ------ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_two_args_empty() {
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo($0); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ^^^^^^ ------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_two_args_first_generics() {
+ check(
+ r#"
+fn foo<T, U: Copy + Display>(x: T, y: U) -> u32
+ where T: Copy + Display, U: Debug
+{ x + y }
+
+fn bar() { foo($03, ); }
+"#,
+ expect![[r#"
+ fn foo(x: i32, y: U) -> u32
+ ^^^^^^ ----
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_no_params() {
+ check(
+ r#"
+fn foo<T>() -> T where T: Copy + Display {}
+fn bar() { foo($0); }
+"#,
+ expect![[r#"
+ fn foo() -> T
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_impl() {
+ check(
+ r#"
+struct F;
+impl F { pub fn new() { } }
+fn bar() {
+ let _ : F = F::new($0);
+}
+"#,
+ expect![[r#"
+ fn new()
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_method_self() {
+ check(
+ r#"
+struct S;
+impl S { pub fn do_it(&self) {} }
+
+fn bar() {
+ let s: S = S;
+ s.do_it($0);
+}
+"#,
+ expect![[r#"
+ fn do_it(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_method_with_arg() {
+ check(
+ r#"
+struct S;
+impl S {
+ fn foo(&self, x: i32) {}
+}
+
+fn main() { S.foo($0); }
+"#,
+ expect![[r#"
+ fn foo(&self, x: i32)
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_generic_method() {
+ check(
+ r#"
+struct S<T>(T);
+impl<T> S<T> {
+ fn foo(&self, x: T) {}
+}
+
+fn main() { S(1u32).foo($0); }
+"#,
+ expect![[r#"
+ fn foo(&self, x: u32)
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_method_with_arg_as_assoc_fn() {
+ check(
+ r#"
+struct S;
+impl S {
+ fn foo(&self, x: i32) {}
+}
+
+fn main() { S::foo($0); }
+"#,
+ expect![[r#"
+ fn foo(self: &S, x: i32)
+ ^^^^^^^^ ------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_with_docs_simple() {
+ check(
+ r#"
+/// test
+// non-doc-comment
+fn foo(j: u32) -> u32 {
+ j
+}
+
+fn bar() {
+ let _ = foo($0);
+}
+"#,
+ expect![[r#"
+ test
+ ------
+ fn foo(j: u32) -> u32
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_with_docs() {
+ check(
+ r#"
+/// Adds one to the number given.
+///
+/// # Examples
+///
+/// ```
+/// let five = 5;
+///
+/// assert_eq!(6, my_crate::add_one(5));
+/// ```
+pub fn add_one(x: i32) -> i32 {
+ x + 1
+}
+
+pub fn do() {
+ add_one($0
+}"#,
+ expect![[r##"
+ Adds one to the number given.
+
+ # Examples
+
+ ```
+ let five = 5;
+
+ assert_eq!(6, my_crate::add_one(5));
+ ```
+ ------
+ fn add_one(x: i32) -> i32
+ ^^^^^^
+ "##]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_with_docs_impl() {
+ check(
+ r#"
+struct addr;
+impl addr {
+ /// Adds one to the number given.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let five = 5;
+ ///
+ /// assert_eq!(6, my_crate::add_one(5));
+ /// ```
+ pub fn add_one(x: i32) -> i32 {
+ x + 1
+ }
+}
+
+pub fn do_it() {
+ addr {};
+ addr::add_one($0);
+}
+"#,
+ expect![[r##"
+ Adds one to the number given.
+
+ # Examples
+
+ ```
+ let five = 5;
+
+ assert_eq!(6, my_crate::add_one(5));
+ ```
+ ------
+ fn add_one(x: i32) -> i32
+ ^^^^^^
+ "##]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_with_docs_from_actix() {
+ check(
+ r#"
+trait Actor {
+ /// Actor execution context type
+ type Context;
+}
+trait WriteHandler<E>
+where
+ Self: Actor
+{
+ /// Method is called when writer finishes.
+ ///
+ /// By default this method stops actor's `Context`.
+ fn finished(&mut self, ctx: &mut Self::Context) {}
+}
+
+fn foo(mut r: impl WriteHandler<()>) {
+ r.finished($0);
+}
+"#,
+ expect![[r#"
+ Method is called when writer finishes.
+
+ By default this method stops actor's `Context`.
+ ------
+ fn finished(&mut self, ctx: &mut <impl WriteHandler<()> as Actor>::Context)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn call_info_bad_offset() {
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo $0 (3, ); }
+"#,
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn outside_of_arg_list() {
+ check(
+ r#"
+fn foo(a: u8) {}
+fn f() {
+ foo(123)$0
+}
+"#,
+ expect![[]],
+ );
+ check(
+ r#"
+fn foo<T>(a: u8) {}
+fn f() {
+ foo::<u32>$0()
+}
+"#,
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_nested_method_in_lambda() {
+ check(
+ r#"
+struct Foo;
+impl Foo { fn bar(&self, _: u32) { } }
+
+fn bar(_: u32) { }
+
+fn main() {
+ let foo = Foo;
+ std::thread::spawn(move || foo.bar($0));
+}
+"#,
+ expect![[r#"
+ fn bar(&self, _: u32)
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn works_for_tuple_structs() {
+ check(
+ r#"
+/// A cool tuple struct
+struct S(u32, i32);
+fn main() {
+ let s = S(0, $0);
+}
+"#,
+ expect![[r#"
+ A cool tuple struct
+ ------
+ struct S(u32, i32)
+ --- ^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn generic_struct() {
+ check(
+ r#"
+struct S<T>(T);
+fn main() {
+ let s = S($0);
+}
+"#,
+ expect![[r#"
+ struct S({unknown})
+ ^^^^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn works_for_enum_variants() {
+ check(
+ r#"
+enum E {
+ /// A Variant
+ A(i32),
+ /// Another
+ B,
+ /// And C
+ C { a: i32, b: i32 }
+}
+
+fn main() {
+ let a = E::A($0);
+}
+"#,
+ expect![[r#"
+ A Variant
+ ------
+ enum E::A(i32)
+ ^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn cant_call_struct_record() {
+ check(
+ r#"
+struct S { x: u32, y: i32 }
+fn main() {
+ let s = S($0);
+}
+"#,
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn cant_call_enum_record() {
+ check(
+ r#"
+enum E {
+ /// A Variant
+ A(i32),
+ /// Another
+ B,
+ /// And C
+ C { a: i32, b: i32 }
+}
+
+fn main() {
+ let a = E::C($0);
+}
+"#,
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn fn_signature_for_call_in_macro() {
+ check(
+ r#"
+macro_rules! id { ($($tt:tt)*) => { $($tt)* } }
+fn foo() { }
+id! {
+ fn bar() { foo($0); }
+}
+"#,
+ expect![[r#"
+ fn foo()
+ "#]],
+ );
+ }
+
+ #[test]
+ fn call_info_for_lambdas() {
+ check(
+ r#"
+struct S;
+fn foo(s: S) -> i32 { 92 }
+fn main() {
+ (|s| foo(s))($0)
+}
+ "#,
+ expect![[r#"
+ (s: S) -> i32
+ ^^^^
+ "#]],
+ )
+ }
+
+ #[test]
+ fn call_info_for_fn_ptr() {
+ check(
+ r#"
+fn main(f: fn(i32, f64) -> char) {
+ f(0, $0)
+}
+ "#,
+ expect![[r#"
+ (i32, f64) -> char
+ --- ^^^
+ "#]],
+ )
+ }
+
+ #[test]
+ fn call_info_for_unclosed_call() {
+ check(
+ r#"
+fn foo(foo: u32, bar: u32) {}
+fn main() {
+ foo($0
+}"#,
+ expect![[r#"
+ fn foo(foo: u32, bar: u32)
+ ^^^^^^^^ --------
+ "#]],
+ );
+ // check with surrounding space
+ check(
+ r#"
+fn foo(foo: u32, bar: u32) {}
+fn main() {
+ foo( $0
+}"#,
+ expect![[r#"
+ fn foo(foo: u32, bar: u32)
+ ^^^^^^^^ --------
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_multiline_argument() {
+ check(
+ r#"
+fn callee(a: u8, b: u8) {}
+fn main() {
+ callee(match 0 {
+ 0 => 1,$0
+ })
+}"#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+fn callee(a: u8, b: u8) {}
+fn main() {
+ callee(match 0 {
+ 0 => 1,
+ },$0)
+}"#,
+ expect![[r#"
+ fn callee(a: u8, b: u8)
+ ----- ^^^^^
+ "#]],
+ );
+ check(
+ r#"
+fn callee(a: u8, b: u8) {}
+fn main() {
+ callee($0match 0 {
+ 0 => 1,
+ })
+}"#,
+ expect![[r#"
+ fn callee(a: u8, b: u8)
+ ^^^^^ -----
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generics_simple() {
+ check(
+ r#"
+/// Option docs.
+enum Option<T> {
+ Some(T),
+ None,
+}
+
+fn f() {
+ let opt: Option<$0
+}
+ "#,
+ expect![[r#"
+ Option docs.
+ ------
+ enum Option<T>
+ ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generics_on_variant() {
+ check(
+ r#"
+/// Option docs.
+enum Option<T> {
+ /// Some docs.
+ Some(T),
+ /// None docs.
+ None,
+}
+
+use Option::*;
+
+fn f() {
+ None::<$0
+}
+ "#,
+ expect![[r#"
+ None docs.
+ ------
+ enum Option<T>
+ ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_lots_of_generics() {
+ check(
+ r#"
+trait Tr<T> {}
+
+struct S<T>(T);
+
+impl<T> S<T> {
+ fn f<G, H>(g: G, h: impl Tr<G>) where G: Tr<()> {}
+}
+
+fn f() {
+ S::<u8>::f::<(), $0
+}
+ "#,
+ expect![[r#"
+ fn f<G: Tr<()>, H>
+ --------- ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generics_in_trait_ufcs() {
+ check(
+ r#"
+trait Tr {
+ fn f<T: Tr, U>() {}
+}
+
+struct S;
+
+impl Tr for S {}
+
+fn f() {
+ <S as Tr>::f::<$0
+}
+ "#,
+ expect![[r#"
+ fn f<T: Tr, U>
+ ^^^^^ -
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generics_in_method_call() {
+ check(
+ r#"
+struct S;
+
+impl S {
+ fn f<T>(&self) {}
+}
+
+fn f() {
+ S.f::<$0
+}
+ "#,
+ expect![[r#"
+ fn f<T>
+ ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generic_param_in_method_call() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn test<V>(&mut self, val: V) {}
+}
+fn sup() {
+ Foo.test($0)
+}
+"#,
+ expect![[r#"
+ fn test(&mut self, val: V)
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generic_kinds() {
+ check(
+ r#"
+fn callee<'a, const A: u8, T, const C: u8>() {}
+
+fn f() {
+ callee::<'static, $0
+}
+ "#,
+ expect![[r#"
+ fn callee<'a, const A: u8, T, const C: u8>
+ -- ^^^^^^^^^^^ - -----------
+ "#]],
+ );
+ check(
+ r#"
+fn callee<'a, const A: u8, T, const C: u8>() {}
+
+fn f() {
+ callee::<NON_LIFETIME$0
+}
+ "#,
+ expect![[r#"
+ fn callee<'a, const A: u8, T, const C: u8>
+ -- ^^^^^^^^^^^ - -----------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_trait_assoc_types() {
+ check(
+ r#"
+trait Trait<'a, T> {
+ type Assoc;
+}
+fn f() -> impl Trait<(), $0
+ "#,
+ expect![[r#"
+ trait Trait<'a, T, Assoc = …>
+ -- - ^^^^^^^^^
+ "#]],
+ );
+ check(
+ r#"
+trait Iterator {
+ type Item;
+}
+fn f() -> impl Iterator<$0
+ "#,
+ expect![[r#"
+ trait Iterator<Item = …>
+ ^^^^^^^^
+ "#]],
+ );
+ check(
+ r#"
+trait Iterator {
+ type Item;
+}
+fn f() -> impl Iterator<Item = $0
+ "#,
+ expect![[r#"
+ trait Iterator<Item = …>
+ ^^^^^^^^
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ type A;
+ type B;
+}
+fn f() -> impl Tr<$0
+ "#,
+ expect![[r#"
+ trait Tr<A = …, B = …>
+ ^^^^^ -----
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ type A;
+ type B;
+}
+fn f() -> impl Tr<B$0
+ "#,
+ expect![[r#"
+ trait Tr<A = …, B = …>
+ ^^^^^ -----
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ type A;
+ type B;
+}
+fn f() -> impl Tr<B = $0
+ "#,
+ expect![[r#"
+ trait Tr<B = …, A = …>
+ ^^^^^ -----
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ type A;
+ type B;
+}
+fn f() -> impl Tr<B = (), $0
+ "#,
+ expect![[r#"
+ trait Tr<B = …, A = …>
+ ----- ^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_supertrait_assoc() {
+ check(
+ r#"
+trait Super {
+ type SuperTy;
+}
+trait Sub: Super + Super {
+ type SubTy;
+}
+fn f() -> impl Sub<$0
+ "#,
+ expect![[r#"
+ trait Sub<SubTy = …, SuperTy = …>
+ ^^^^^^^^^ -----------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn no_assoc_types_outside_type_bounds() {
+ check(
+ r#"
+trait Tr<T> {
+ type Assoc;
+}
+
+impl Tr<$0
+ "#,
+ expect![[r#"
+ trait Tr<T>
+ ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn impl_trait() {
+ // FIXME: Substitute type vars in impl trait (`U` -> `i8`)
+ check(
+ r#"
+trait Trait<T> {}
+struct Wrap<T>(T);
+fn foo<U>(x: Wrap<impl Trait<U>>) {}
+fn f() {
+ foo::<i8>($0)
+}
+"#,
+ expect![[r#"
+ fn foo(x: Wrap<impl Trait<U>>)
+ ^^^^^^^^^^^^^^^^^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn fully_qualified_syntax() {
+ check(
+ r#"
+fn f() {
+ trait A { fn foo(&self, other: Self); }
+ A::foo(&self$0, other);
+}
+"#,
+ expect![[r#"
+ fn foo(self: &Self, other: Self)
+ ^^^^^^^^^^^ -----------
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/ssr.rs b/src/tools/rust-analyzer/crates/ide/src/ssr.rs
new file mode 100644
index 000000000..497eb1cc1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/ssr.rs
@@ -0,0 +1,255 @@
+//! This module provides an SSR assist. It is not desirable to include this
+//! assist in ide_assists because that would require the ide_assists crate
+//! depend on the ide_ssr crate.
+
+use ide_assists::{Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel};
+use ide_db::{base_db::FileRange, label::Label, source_change::SourceChange, RootDatabase};
+
+pub(crate) fn ssr_assists(
+ db: &RootDatabase,
+ resolve: &AssistResolveStrategy,
+ frange: FileRange,
+) -> Vec<Assist> {
+ let mut ssr_assists = Vec::with_capacity(2);
+
+ let (match_finder, comment_range) = match ide_ssr::ssr_from_comment(db, frange) {
+ Some(ssr_data) => ssr_data,
+ None => return ssr_assists,
+ };
+ let id = AssistId("ssr", AssistKind::RefactorRewrite);
+
+ let (source_change_for_file, source_change_for_workspace) = if resolve.should_resolve(&id) {
+ let edits = match_finder.edits();
+
+ let source_change_for_file = {
+ let text_edit_for_file = edits.get(&frange.file_id).cloned().unwrap_or_default();
+ SourceChange::from_text_edit(frange.file_id, text_edit_for_file)
+ };
+
+ let source_change_for_workspace = SourceChange::from(match_finder.edits());
+
+ (Some(source_change_for_file), Some(source_change_for_workspace))
+ } else {
+ (None, None)
+ };
+
+ let assists = vec![
+ ("Apply SSR in file", source_change_for_file),
+ ("Apply SSR in workspace", source_change_for_workspace),
+ ];
+
+ for (label, source_change) in assists.into_iter() {
+ let assist = Assist {
+ id,
+ label: Label::new(label.to_string()),
+ group: Some(GroupLabel("Apply SSR".into())),
+ target: comment_range,
+ source_change,
+ trigger_signature_help: false,
+ };
+
+ ssr_assists.push(assist);
+ }
+
+ ssr_assists
+}
+
+#[cfg(test)]
+mod tests {
+ use std::sync::Arc;
+
+ use expect_test::expect;
+ use ide_assists::{Assist, AssistResolveStrategy};
+ use ide_db::{
+ base_db::{fixture::WithFixture, salsa::Durability, FileRange},
+ symbol_index::SymbolsDatabase,
+ FxHashSet, RootDatabase,
+ };
+
+ use super::ssr_assists;
+
+ fn get_assists(ra_fixture: &str, resolve: AssistResolveStrategy) -> Vec<Assist> {
+ let (mut db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture);
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(ide_db::base_db::fixture::WORKSPACE);
+ db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
+ ssr_assists(&db, &resolve, FileRange { file_id, range: range_or_offset.into() })
+ }
+
+ #[test]
+ fn not_applicable_comment_not_ssr() {
+ let ra_fixture = r#"
+ //- /lib.rs
+
+ // This is foo $0
+ fn foo() {}
+ "#;
+ let assists = get_assists(ra_fixture, AssistResolveStrategy::All);
+
+ assert_eq!(0, assists.len());
+ }
+
+ #[test]
+ fn resolve_edits_true() {
+ let assists = get_assists(
+ r#"
+ //- /lib.rs
+ mod bar;
+
+ // 2 ==>> 3$0
+ fn foo() { 2 }
+
+ //- /bar.rs
+ fn bar() { 2 }
+ "#,
+ AssistResolveStrategy::All,
+ );
+
+ assert_eq!(2, assists.len());
+ let mut assists = assists.into_iter();
+
+ let apply_in_file_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "ssr",
+ RefactorRewrite,
+ ),
+ label: "Apply SSR in file",
+ group: Some(
+ GroupLabel(
+ "Apply SSR",
+ ),
+ ),
+ target: 10..21,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "3",
+ delete: 33..34,
+ },
+ ],
+ },
+ },
+ file_system_edits: [],
+ is_snippet: false,
+ },
+ ),
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&apply_in_file_assist);
+
+ let apply_in_workspace_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "ssr",
+ RefactorRewrite,
+ ),
+ label: "Apply SSR in workspace",
+ group: Some(
+ GroupLabel(
+ "Apply SSR",
+ ),
+ ),
+ target: 10..21,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "3",
+ delete: 33..34,
+ },
+ ],
+ },
+ FileId(
+ 1,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "3",
+ delete: 11..12,
+ },
+ ],
+ },
+ },
+ file_system_edits: [],
+ is_snippet: false,
+ },
+ ),
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&apply_in_workspace_assist);
+ }
+
+ #[test]
+ fn resolve_edits_false() {
+ let assists = get_assists(
+ r#"
+ //- /lib.rs
+ mod bar;
+
+ // 2 ==>> 3$0
+ fn foo() { 2 }
+
+ //- /bar.rs
+ fn bar() { 2 }
+ "#,
+ AssistResolveStrategy::None,
+ );
+
+ assert_eq!(2, assists.len());
+ let mut assists = assists.into_iter();
+
+ let apply_in_file_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "ssr",
+ RefactorRewrite,
+ ),
+ label: "Apply SSR in file",
+ group: Some(
+ GroupLabel(
+ "Apply SSR",
+ ),
+ ),
+ target: 10..21,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&apply_in_file_assist);
+
+ let apply_in_workspace_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "ssr",
+ RefactorRewrite,
+ ),
+ label: "Apply SSR in workspace",
+ group: Some(
+ GroupLabel(
+ "Apply SSR",
+ ),
+ ),
+ target: 10..21,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&apply_in_workspace_assist);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
new file mode 100644
index 000000000..d74b64041
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -0,0 +1,321 @@
+//! This module provides `StaticIndex` which is used for powering
+//! read-only code browsers and emitting LSIF
+
+use std::collections::HashMap;
+
+use hir::{db::HirDatabase, Crate, Module, Semantics};
+use ide_db::{
+ base_db::{FileId, FileRange, SourceDatabaseExt},
+ defs::{Definition, IdentClass},
+ FxHashSet, RootDatabase,
+};
+use syntax::{AstNode, SyntaxKind::*, SyntaxToken, TextRange, T};
+
+use crate::{
+ hover::hover_for_definition,
+ moniker::{crate_for_file, def_to_moniker, MonikerResult},
+ Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult, InlayHint, InlayHintsConfig,
+ TryToNav,
+};
+
+/// A static representation of fully analyzed source code.
+///
+/// The intended use-case is powering read-only code browsers and emitting LSIF
+#[derive(Debug)]
+pub struct StaticIndex<'a> {
+ pub files: Vec<StaticIndexedFile>,
+ pub tokens: TokenStore,
+ analysis: &'a Analysis,
+ db: &'a RootDatabase,
+ def_map: HashMap<Definition, TokenId>,
+}
+
+#[derive(Debug)]
+pub struct ReferenceData {
+ pub range: FileRange,
+ pub is_definition: bool,
+}
+
+#[derive(Debug)]
+pub struct TokenStaticData {
+ pub hover: Option<HoverResult>,
+ pub definition: Option<FileRange>,
+ pub references: Vec<ReferenceData>,
+ pub moniker: Option<MonikerResult>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TokenId(usize);
+
+impl TokenId {
+ pub fn raw(self) -> usize {
+ self.0
+ }
+}
+
+#[derive(Default, Debug)]
+pub struct TokenStore(Vec<TokenStaticData>);
+
+impl TokenStore {
+ pub fn insert(&mut self, data: TokenStaticData) -> TokenId {
+ let id = TokenId(self.0.len());
+ self.0.push(data);
+ id
+ }
+
+ pub fn get_mut(&mut self, id: TokenId) -> Option<&mut TokenStaticData> {
+ self.0.get_mut(id.0)
+ }
+
+ pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> {
+ self.0.get(id.0)
+ }
+
+ pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> {
+ self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x))
+ }
+}
+
+#[derive(Debug)]
+pub struct StaticIndexedFile {
+ pub file_id: FileId,
+ pub folds: Vec<Fold>,
+ pub inlay_hints: Vec<InlayHint>,
+ pub tokens: Vec<(TextRange, TokenId)>,
+}
+
+fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
+ let mut worklist: Vec<_> =
+ Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect();
+ let mut modules = Vec::new();
+
+ while let Some(module) = worklist.pop() {
+ modules.push(module);
+ worklist.extend(module.children(db));
+ }
+
+ modules
+}
+
+impl StaticIndex<'_> {
+ fn add_file(&mut self, file_id: FileId) {
+ let current_crate = crate_for_file(self.db, file_id);
+ let folds = self.analysis.folding_ranges(file_id).unwrap();
+ let inlay_hints = self
+ .analysis
+ .inlay_hints(
+ &InlayHintsConfig {
+ render_colons: true,
+ type_hints: true,
+ parameter_hints: true,
+ chaining_hints: true,
+ closure_return_type_hints: crate::ClosureReturnTypeHints::WithBlock,
+ lifetime_elision_hints: crate::LifetimeElisionHints::Never,
+ reborrow_hints: crate::ReborrowHints::Never,
+ hide_named_constructor_hints: false,
+ hide_closure_initialization_hints: false,
+ param_names_for_lifetime_elision_hints: false,
+ binding_mode_hints: false,
+ max_length: Some(25),
+ closing_brace_hints_min_lines: Some(25),
+ },
+ file_id,
+ None,
+ )
+ .unwrap();
+ // hovers
+ let sema = hir::Semantics::new(self.db);
+ let tokens_or_nodes = sema.parse(file_id).syntax().clone();
+ let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x {
+ syntax::NodeOrToken::Node(_) => None,
+ syntax::NodeOrToken::Token(x) => Some(x),
+ });
+ let hover_config =
+ HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) };
+ let tokens = tokens.filter(|token| {
+ matches!(
+ token.kind(),
+ IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
+ )
+ });
+ let mut result = StaticIndexedFile { file_id, inlay_hints, folds, tokens: vec![] };
+ for token in tokens {
+ let range = token.text_range();
+ let node = token.parent().unwrap();
+ let def = match get_definition(&sema, token.clone()) {
+ Some(x) => x,
+ None => continue,
+ };
+ let id = if let Some(x) = self.def_map.get(&def) {
+ *x
+ } else {
+ let x = self.tokens.insert(TokenStaticData {
+ hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
+ definition: def
+ .try_to_nav(self.db)
+ .map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }),
+ references: vec![],
+ moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
+ });
+ self.def_map.insert(def, x);
+ x
+ };
+ let token = self.tokens.get_mut(id).unwrap();
+ token.references.push(ReferenceData {
+ range: FileRange { range, file_id },
+ is_definition: match def.try_to_nav(self.db) {
+ Some(x) => x.file_id == file_id && x.focus_or_full_range() == range,
+ None => false,
+ },
+ });
+ result.tokens.push((range, id));
+ }
+ self.files.push(result);
+ }
+
+ pub fn compute(analysis: &Analysis) -> StaticIndex<'_> {
+ let db = &*analysis.db;
+ let work = all_modules(db).into_iter().filter(|module| {
+ let file_id = module.definition_source(db).file_id.original_file(db);
+ let source_root = db.file_source_root(file_id);
+ let source_root = db.source_root(source_root);
+ !source_root.is_library
+ });
+ let mut this = StaticIndex {
+ files: vec![],
+ tokens: Default::default(),
+ analysis,
+ db,
+ def_map: Default::default(),
+ };
+ let mut visited_files = FxHashSet::default();
+ for module in work {
+ let file_id = module.definition_source(db).file_id.original_file(db);
+ if visited_files.contains(&file_id) {
+ continue;
+ }
+ this.add_file(file_id);
+ // mark the file
+ visited_files.insert(file_id);
+ }
+ this
+ }
+}
+
+fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Definition> {
+ for token in sema.descend_into_macros(token) {
+ let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions);
+ if let Some(&[x]) = def.as_deref() {
+ return Some(x);
+ } else {
+ continue;
+ };
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{fixture, StaticIndex};
+ use ide_db::base_db::FileRange;
+ use std::collections::HashSet;
+ use syntax::TextSize;
+
+ fn check_all_ranges(ra_fixture: &str) {
+ let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
+ let s = StaticIndex::compute(&analysis);
+ let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
+ for f in s.files {
+ for (range, _) in f.tokens {
+ let x = FileRange { file_id: f.file_id, range };
+ if !range_set.contains(&x) {
+ panic!("additional range {:?}", x);
+ }
+ range_set.remove(&x);
+ }
+ }
+ if !range_set.is_empty() {
+ panic!("unfound ranges {:?}", range_set);
+ }
+ }
+
+ fn check_definitions(ra_fixture: &str) {
+ let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
+ let s = StaticIndex::compute(&analysis);
+ let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
+ for (_, t) in s.tokens.iter() {
+ if let Some(x) = t.definition {
+ if x.range.start() == TextSize::from(0) {
+ // ignore definitions that are whole of file
+ continue;
+ }
+ if !range_set.contains(&x) {
+ panic!("additional definition {:?}", x);
+ }
+ range_set.remove(&x);
+ }
+ }
+ if !range_set.is_empty() {
+ panic!("unfound definitions {:?}", range_set);
+ }
+ }
+
+ #[test]
+ fn struct_and_enum() {
+ check_all_ranges(
+ r#"
+struct Foo;
+ //^^^
+enum E { X(Foo) }
+ //^ ^ ^^^
+"#,
+ );
+ check_definitions(
+ r#"
+struct Foo;
+ //^^^
+enum E { X(Foo) }
+ //^ ^
+"#,
+ );
+ }
+
+ #[test]
+ fn multi_crate() {
+ check_definitions(
+ r#"
+//- /main.rs crate:main deps:foo
+
+
+use foo::func;
+
+fn main() {
+ //^^^^
+ func();
+}
+//- /foo/lib.rs crate:foo
+
+pub func() {
+
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn derives() {
+ check_all_ranges(
+ r#"
+//- minicore:derive
+#[rustc_builtin_macro]
+//^^^^^^^^^^^^^^^^^^^
+pub macro Copy {}
+ //^^^^
+#[derive(Copy)]
+//^^^^^^ ^^^^
+struct Hello(i32);
+ //^^^^^ ^^^
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs
new file mode 100644
index 000000000..3191870eb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/status.rs
@@ -0,0 +1,164 @@
+use std::{fmt, iter::FromIterator, sync::Arc};
+
+use hir::{ExpandResult, MacroFile};
+use ide_db::base_db::{
+ salsa::debug::{DebugQueryTable, TableEntry},
+ CrateId, FileId, FileTextQuery, SourceDatabase, SourceRootId,
+};
+use ide_db::{
+ symbol_index::{LibrarySymbolsQuery, SymbolIndex},
+ RootDatabase,
+};
+use itertools::Itertools;
+use profile::{memory_usage, Bytes};
+use std::env;
+use stdx::format_to;
+use syntax::{ast, Parse, SyntaxNode};
+
+fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats {
+ ide_db::base_db::ParseQuery.in_db(db).entries::<SyntaxTreeStats>()
+}
+fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats {
+ hir::db::ParseMacroExpansionQuery.in_db(db).entries::<SyntaxTreeStats>()
+}
+
+// Feature: Status
+//
+// Shows internal statistic about memory usage of rust-analyzer.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Status**
+// |===
+// image::https://user-images.githubusercontent.com/48062697/113065584-05f34500-91b1-11eb-98cc-5c196f76be7f.gif[]
+pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
+ let mut buf = String::new();
+ format_to!(buf, "{}\n", FileTextQuery.in_db(db).entries::<FilesStats>());
+ format_to!(buf, "{}\n", LibrarySymbolsQuery.in_db(db).entries::<LibrarySymbolsStats>());
+ format_to!(buf, "{}\n", syntax_tree_stats(db));
+ format_to!(buf, "{} (Macros)\n", macro_syntax_tree_stats(db));
+ format_to!(buf, "{} in total\n", memory_usage());
+ if env::var("RA_COUNT").is_ok() {
+ format_to!(buf, "\nCounts:\n{}", profile::countme::get_all());
+ }
+
+ if let Some(file_id) = file_id {
+ format_to!(buf, "\nFile info:\n");
+ let crates = crate::parent_module::crate_for(db, file_id);
+ if crates.is_empty() {
+ format_to!(buf, "Does not belong to any crate");
+ }
+ let crate_graph = db.crate_graph();
+ for krate in crates {
+ let display_crate = |krate: CrateId| match &crate_graph[krate].display_name {
+ Some(it) => format!("{}({:?})", it, krate),
+ None => format!("{:?}", krate),
+ };
+ format_to!(buf, "Crate: {}\n", display_crate(krate));
+ let deps = crate_graph[krate]
+ .dependencies
+ .iter()
+ .map(|dep| format!("{}={:?}", dep.name, dep.crate_id))
+ .format(", ");
+ format_to!(buf, "Dependencies: {}\n", deps);
+ }
+ }
+
+ buf.trim().to_string()
+}
+
+#[derive(Default)]
+struct FilesStats {
+ total: usize,
+ size: Bytes,
+}
+
+impl fmt::Display for FilesStats {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(fmt, "{} of files", self.size)
+ }
+}
+
+impl FromIterator<TableEntry<FileId, Arc<String>>> for FilesStats {
+ fn from_iter<T>(iter: T) -> FilesStats
+ where
+ T: IntoIterator<Item = TableEntry<FileId, Arc<String>>>,
+ {
+ let mut res = FilesStats::default();
+ for entry in iter {
+ res.total += 1;
+ res.size += entry.value.unwrap().len();
+ }
+ res
+ }
+}
+
+#[derive(Default)]
+pub(crate) struct SyntaxTreeStats {
+ total: usize,
+ pub(crate) retained: usize,
+}
+
+impl fmt::Display for SyntaxTreeStats {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(fmt, "{} trees, {} preserved", self.total, self.retained)
+ }
+}
+
+impl FromIterator<TableEntry<FileId, Parse<ast::SourceFile>>> for SyntaxTreeStats {
+ fn from_iter<T>(iter: T) -> SyntaxTreeStats
+ where
+ T: IntoIterator<Item = TableEntry<FileId, Parse<ast::SourceFile>>>,
+ {
+ let mut res = SyntaxTreeStats::default();
+ for entry in iter {
+ res.total += 1;
+ res.retained += entry.value.is_some() as usize;
+ }
+ res
+ }
+}
+
+impl<M> FromIterator<TableEntry<MacroFile, ExpandResult<Option<(Parse<SyntaxNode>, M)>>>>
+ for SyntaxTreeStats
+{
+ fn from_iter<T>(iter: T) -> SyntaxTreeStats
+ where
+ T: IntoIterator<Item = TableEntry<MacroFile, ExpandResult<Option<(Parse<SyntaxNode>, M)>>>>,
+ {
+ let mut res = SyntaxTreeStats::default();
+ for entry in iter {
+ res.total += 1;
+ res.retained += entry.value.is_some() as usize;
+ }
+ res
+ }
+}
+
+#[derive(Default)]
+struct LibrarySymbolsStats {
+ total: usize,
+ size: Bytes,
+}
+
+impl fmt::Display for LibrarySymbolsStats {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(fmt, "{} of index symbols ({})", self.size, self.total)
+ }
+}
+
+impl FromIterator<TableEntry<SourceRootId, Arc<SymbolIndex>>> for LibrarySymbolsStats {
+ fn from_iter<T>(iter: T) -> LibrarySymbolsStats
+ where
+ T: IntoIterator<Item = TableEntry<SourceRootId, Arc<SymbolIndex>>>,
+ {
+ let mut res = LibrarySymbolsStats::default();
+ for entry in iter {
+ let symbols = entry.value.unwrap();
+ res.total += symbols.len();
+ res.size += symbols.memory_size();
+ }
+ res
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
new file mode 100644
index 000000000..3fb49b45d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -0,0 +1,449 @@
+pub(crate) mod tags;
+
+mod highlights;
+mod injector;
+
+mod highlight;
+mod format;
+mod macro_;
+mod inject;
+mod escape;
+
+mod html;
+#[cfg(test)]
+mod tests;
+
+use hir::{Name, Semantics};
+use ide_db::{FxHashMap, RootDatabase};
+use syntax::{
+ ast, AstNode, AstToken, NodeOrToken, SyntaxKind::*, SyntaxNode, TextRange, WalkEvent, T,
+};
+
+use crate::{
+ syntax_highlighting::{
+ escape::highlight_escape_string, format::highlight_format_string, highlights::Highlights,
+ macro_::MacroHighlighter, tags::Highlight,
+ },
+ FileId, HlMod, HlTag,
+};
+
+pub(crate) use html::highlight_as_html;
+
+#[derive(Debug, Clone, Copy)]
+pub struct HlRange {
+ pub range: TextRange,
+ pub highlight: Highlight,
+ pub binding_hash: Option<u64>,
+}
+
+// Feature: Semantic Syntax Highlighting
+//
+// rust-analyzer highlights the code semantically.
+// For example, `Bar` in `foo::Bar` might be colored differently depending on whether `Bar` is an enum or a trait.
+// rust-analyzer does not specify colors directly, instead it assigns a tag (like `struct`) and a set of modifiers (like `declaration`) to each token.
+// It's up to the client to map those to specific colors.
+//
+// The general rule is that a reference to an entity gets colored the same way as the entity itself.
+// We also give special modifier for `mut` and `&mut` local variables.
+//
+//
+// .Token Tags
+//
+// Rust-analyzer currently emits the following token tags:
+//
+// - For items:
+// +
+// [horizontal]
+// attribute:: Emitted for attribute macros.
+// enum:: Emitted for enums.
+// function:: Emitted for free-standing functions.
+// derive:: Emitted for derive macros.
+// macro:: Emitted for function-like macros.
+// method:: Emitted for associated functions, also knowns as methods.
+// namespace:: Emitted for modules.
+// struct:: Emitted for structs.
+// trait:: Emitted for traits.
+// typeAlias:: Emitted for type aliases and `Self` in `impl`s.
+// union:: Emitted for unions.
+//
+// - For literals:
+// +
+// [horizontal]
+// boolean:: Emitted for the boolean literals `true` and `false`.
+// character:: Emitted for character literals.
+// number:: Emitted for numeric literals.
+// string:: Emitted for string literals.
+// escapeSequence:: Emitted for escaped sequences inside strings like `\n`.
+// formatSpecifier:: Emitted for format specifiers `{:?}` in `format!`-like macros.
+//
+// - For operators:
+// +
+// [horizontal]
+// operator:: Emitted for general operators.
+// arithmetic:: Emitted for the arithmetic operators `+`, `-`, `*`, `/`, `+=`, `-=`, `*=`, `/=`.
+// bitwise:: Emitted for the bitwise operators `|`, `&`, `!`, `^`, `|=`, `&=`, `^=`.
+// comparison:: Emitted for the comparison operators `>`, `<`, `==`, `>=`, `<=`, `!=`.
+// logical:: Emitted for the logical operators `||`, `&&`, `!`.
+//
+// - For punctuation:
+// +
+// [horizontal]
+// punctuation:: Emitted for general punctuation.
+// attributeBracket:: Emitted for attribute invocation brackets, that is the `#[` and `]` tokens.
+// angle:: Emitted for `<>` angle brackets.
+// brace:: Emitted for `{}` braces.
+// bracket:: Emitted for `[]` brackets.
+// parenthesis:: Emitted for `()` parentheses.
+// colon:: Emitted for the `:` token.
+// comma:: Emitted for the `,` token.
+// dot:: Emitted for the `.` token.
+// semi:: Emitted for the `;` token.
+// macroBang:: Emitted for the `!` token in macro calls.
+//
+// //-
+//
+// [horizontal]
+// builtinAttribute:: Emitted for names to builtin attributes in attribute path, the `repr` in `#[repr(u8)]` for example.
+// builtinType:: Emitted for builtin types like `u32`, `str` and `f32`.
+// comment:: Emitted for comments.
+// constParameter:: Emitted for const parameters.
+// deriveHelper:: Emitted for derive helper attributes.
+// enumMember:: Emitted for enum variants.
+// generic:: Emitted for generic tokens that have no mapping.
+// keyword:: Emitted for keywords.
+// label:: Emitted for labels.
+// lifetime:: Emitted for lifetimes.
+// parameter:: Emitted for non-self function parameters.
+// property:: Emitted for struct and union fields.
+// selfKeyword:: Emitted for the self function parameter and self path-specifier.
+// selfTypeKeyword:: Emitted for the Self type parameter.
+// toolModule:: Emitted for tool modules.
+// typeParameter:: Emitted for type parameters.
+// unresolvedReference:: Emitted for unresolved references, names that rust-analyzer can't find the definition of.
+// variable:: Emitted for locals, constants and statics.
+//
+//
+// .Token Modifiers
+//
+// Token modifiers allow to style some elements in the source code more precisely.
+//
+// Rust-analyzer currently emits the following token modifiers:
+//
+// [horizontal]
+// async:: Emitted for async functions and the `async` and `await` keywords.
+// attribute:: Emitted for tokens inside attributes.
+// callable:: Emitted for locals whose types implements one of the `Fn*` traits.
+// constant:: Emitted for consts.
+// consuming:: Emitted for locals that are being consumed when use in a function call.
+// controlFlow:: Emitted for control-flow related tokens, this includes the `?` operator.
+// crateRoot:: Emitted for crate names, like `serde` and `crate`.
+// declaration:: Emitted for names of definitions, like `foo` in `fn foo() {}`.
+// defaultLibrary:: Emitted for items from built-in crates (std, core, alloc, test and proc_macro).
+// documentation:: Emitted for documentation comments.
+// injected:: Emitted for doc-string injected highlighting like rust source blocks in documentation.
+// intraDocLink:: Emitted for intra doc links in doc-strings.
+// library:: Emitted for items that are defined outside of the current crate.
+// mutable:: Emitted for mutable locals and statics as well as functions taking `&mut self`.
+// public:: Emitted for items that are from the current crate and are `pub`.
+// reference:: Emitted for locals behind a reference and functions taking `self` by reference.
+// static:: Emitted for "static" functions, also known as functions that do not take a `self` param, as well as statics and consts.
+// trait:: Emitted for associated trait items.
+// unsafe:: Emitted for unsafe operations, like unsafe function calls, as well as the `unsafe` token.
+//
+//
+// image::https://user-images.githubusercontent.com/48062697/113164457-06cfb980-9239-11eb-819b-0f93e646acf8.png[]
+// image::https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png[]
+pub(crate) fn highlight(
+ db: &RootDatabase,
+ file_id: FileId,
+ range_to_highlight: Option<TextRange>,
+ syntactic_name_ref_highlighting: bool,
+) -> Vec<HlRange> {
+ let _p = profile::span("highlight");
+ let sema = Semantics::new(db);
+
+ // Determine the root based on the given range.
+ let (root, range_to_highlight) = {
+ let source_file = sema.parse(file_id);
+ let source_file = source_file.syntax();
+ match range_to_highlight {
+ Some(range) => {
+ let node = match source_file.covering_element(range) {
+ NodeOrToken::Node(it) => it,
+ NodeOrToken::Token(it) => it.parent().unwrap_or_else(|| source_file.clone()),
+ };
+ (node, range)
+ }
+ None => (source_file.clone(), source_file.text_range()),
+ }
+ };
+
+ let mut hl = highlights::Highlights::new(root.text_range());
+ let krate = match sema.scope(&root) {
+ Some(it) => it.krate(),
+ None => return hl.to_vec(),
+ };
+ traverse(
+ &mut hl,
+ &sema,
+ file_id,
+ &root,
+ krate,
+ range_to_highlight,
+ syntactic_name_ref_highlighting,
+ );
+ hl.to_vec()
+}
+
+fn traverse(
+ hl: &mut Highlights,
+ sema: &Semantics<'_, RootDatabase>,
+ file_id: FileId,
+ root: &SyntaxNode,
+ krate: hir::Crate,
+ range_to_highlight: TextRange,
+ syntactic_name_ref_highlighting: bool,
+) {
+ let is_unlinked = sema.to_module_def(file_id).is_none();
+ let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
+
+ enum AttrOrDerive {
+ Attr(ast::Item),
+ Derive(ast::Item),
+ }
+
+ impl AttrOrDerive {
+ fn item(&self) -> &ast::Item {
+ match self {
+ AttrOrDerive::Attr(item) | AttrOrDerive::Derive(item) => item,
+ }
+ }
+ }
+
+ let mut tt_level = 0;
+ let mut attr_or_derive_item = None;
+ let mut current_macro: Option<ast::Macro> = None;
+ let mut macro_highlighter = MacroHighlighter::default();
+ let mut inside_attribute = false;
+
+ // Walk all nodes, keeping track of whether we are inside a macro or not.
+ // If in macro, expand it first and highlight the expanded code.
+ for event in root.preorder_with_tokens() {
+ use WalkEvent::{Enter, Leave};
+
+ let range = match &event {
+ Enter(it) | Leave(it) => it.text_range(),
+ };
+
+ // Element outside of the viewport, no need to highlight
+ if range_to_highlight.intersect(range).is_none() {
+ continue;
+ }
+
+ // set macro and attribute highlighting states
+ match event.clone() {
+ Enter(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
+ tt_level += 1;
+ }
+ Leave(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
+ tt_level -= 1;
+ }
+ Enter(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => {
+ inside_attribute = true
+ }
+ Leave(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => {
+ inside_attribute = false
+ }
+
+ Enter(NodeOrToken::Node(node)) if ast::Item::can_cast(node.kind()) => {
+ match ast::Item::cast(node.clone()) {
+ Some(ast::Item::MacroRules(mac)) => {
+ macro_highlighter.init();
+ current_macro = Some(mac.into());
+ continue;
+ }
+ Some(ast::Item::MacroDef(mac)) => {
+ macro_highlighter.init();
+ current_macro = Some(mac.into());
+ continue;
+ }
+ Some(item) => {
+ if matches!(node.kind(), FN | CONST | STATIC) {
+ bindings_shadow_count.clear();
+ }
+
+ if attr_or_derive_item.is_none() {
+ if sema.is_attr_macro_call(&item) {
+ attr_or_derive_item = Some(AttrOrDerive::Attr(item));
+ } else {
+ let adt = match item {
+ ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
+ ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
+ ast::Item::Union(it) => Some(ast::Adt::Union(it)),
+ _ => None,
+ };
+ match adt {
+ Some(adt) if sema.is_derive_annotated(&adt) => {
+ attr_or_derive_item =
+ Some(AttrOrDerive::Derive(ast::Item::from(adt)));
+ }
+ _ => (),
+ }
+ }
+ }
+ }
+ _ => (),
+ }
+ }
+ Leave(NodeOrToken::Node(node)) if ast::Item::can_cast(node.kind()) => {
+ match ast::Item::cast(node.clone()) {
+ Some(ast::Item::MacroRules(mac)) => {
+ assert_eq!(current_macro, Some(mac.into()));
+ current_macro = None;
+ macro_highlighter = MacroHighlighter::default();
+ }
+ Some(ast::Item::MacroDef(mac)) => {
+ assert_eq!(current_macro, Some(mac.into()));
+ current_macro = None;
+ macro_highlighter = MacroHighlighter::default();
+ }
+ Some(item)
+ if attr_or_derive_item.as_ref().map_or(false, |it| *it.item() == item) =>
+ {
+ attr_or_derive_item = None;
+ }
+ _ => (),
+ }
+ }
+ _ => (),
+ }
+
+ let element = match event {
+ Enter(NodeOrToken::Token(tok)) if tok.kind() == WHITESPACE => continue,
+ Enter(it) => it,
+ Leave(NodeOrToken::Token(_)) => continue,
+ Leave(NodeOrToken::Node(node)) => {
+ // Doc comment highlighting injection, we do this when leaving the node
+ // so that we overwrite the highlighting of the doc comment itself.
+ inject::doc_comment(hl, sema, file_id, &node);
+ continue;
+ }
+ };
+
+ if current_macro.is_some() {
+ if let Some(tok) = element.as_token() {
+ macro_highlighter.advance(tok);
+ }
+ }
+
+ let element = match element.clone() {
+ NodeOrToken::Node(n) => match ast::NameLike::cast(n) {
+ Some(n) => NodeOrToken::Node(n),
+ None => continue,
+ },
+ NodeOrToken::Token(t) => NodeOrToken::Token(t),
+ };
+ let token = element.as_token().cloned();
+
+ // Descending tokens into macros is expensive even if no descending occurs, so make sure
+ // that we actually are in a position where descending is possible.
+ let in_macro = tt_level > 0
+ || match attr_or_derive_item {
+ Some(AttrOrDerive::Attr(_)) => true,
+ Some(AttrOrDerive::Derive(_)) => inside_attribute,
+ None => false,
+ };
+ let descended_element = if in_macro {
+ // Attempt to descend tokens into macro-calls.
+ match element {
+ NodeOrToken::Token(token) if token.kind() != COMMENT => {
+ let token = match attr_or_derive_item {
+ Some(AttrOrDerive::Attr(_)) => {
+ sema.descend_into_macros_with_kind_preference(token)
+ }
+ Some(AttrOrDerive::Derive(_)) | None => {
+ sema.descend_into_macros_single(token)
+ }
+ };
+ match token.parent().and_then(ast::NameLike::cast) {
+ // Remap the token into the wrapping single token nodes
+ Some(parent) => match (token.kind(), parent.syntax().kind()) {
+ (T![self] | T![ident], NAME | NAME_REF) => NodeOrToken::Node(parent),
+ (T![self] | T![super] | T![crate] | T![Self], NAME_REF) => {
+ NodeOrToken::Node(parent)
+ }
+ (INT_NUMBER, NAME_REF) => NodeOrToken::Node(parent),
+ (LIFETIME_IDENT, LIFETIME) => NodeOrToken::Node(parent),
+ _ => NodeOrToken::Token(token),
+ },
+ None => NodeOrToken::Token(token),
+ }
+ }
+ e => e,
+ }
+ } else {
+ element
+ };
+
+ // FIXME: do proper macro def highlighting https://github.com/rust-lang/rust-analyzer/issues/6232
+ // Skip metavariables from being highlighted to prevent keyword highlighting in them
+ if descended_element.as_token().and_then(|t| macro_highlighter.highlight(t)).is_some() {
+ continue;
+ }
+
+ // string highlight injections, note this does not use the descended element as proc-macros
+ // can rewrite string literals which invalidates our indices
+ if let (Some(token), Some(descended_token)) = (token, descended_element.as_token()) {
+ if ast::String::can_cast(token.kind()) && ast::String::can_cast(descended_token.kind())
+ {
+ let string = ast::String::cast(token);
+ let string_to_highlight = ast::String::cast(descended_token.clone());
+ if let Some((string, expanded_string)) = string.zip(string_to_highlight) {
+ if string.is_raw() {
+ if inject::ra_fixture(hl, sema, &string, &expanded_string).is_some() {
+ continue;
+ }
+ }
+ highlight_format_string(hl, &string, &expanded_string, range);
+ highlight_escape_string(hl, &string, range.start());
+ }
+ } else if ast::ByteString::can_cast(token.kind())
+ && ast::ByteString::can_cast(descended_token.kind())
+ {
+ if let Some(byte_string) = ast::ByteString::cast(token) {
+ highlight_escape_string(hl, &byte_string, range.start());
+ }
+ }
+ }
+
+ let element = match descended_element {
+ NodeOrToken::Node(name_like) => highlight::name_like(
+ sema,
+ krate,
+ &mut bindings_shadow_count,
+ syntactic_name_ref_highlighting,
+ name_like,
+ ),
+ NodeOrToken::Token(token) => highlight::token(sema, token).zip(Some(None)),
+ };
+ if let Some((mut highlight, binding_hash)) = element {
+ if is_unlinked && highlight.tag == HlTag::UnresolvedReference {
+ // do not emit unresolved references if the file is unlinked
+ // let the editor do its highlighting for these tokens instead
+ continue;
+ }
+ if highlight.tag == HlTag::UnresolvedReference
+ && matches!(attr_or_derive_item, Some(AttrOrDerive::Derive(_)) if inside_attribute)
+ {
+ // do not emit unresolved references in derive helpers if the token mapping maps to
+ // something unresolvable. FIXME: There should be a way to prevent that
+ continue;
+ }
+ if inside_attribute {
+ highlight |= HlMod::Attribute
+ }
+
+ hl.add(HlRange { range, highlight, binding_hash });
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs
new file mode 100644
index 000000000..6a1236c79
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs
@@ -0,0 +1,25 @@
+//! Syntax highlighting for escape sequences
+use crate::syntax_highlighting::highlights::Highlights;
+use crate::{HlRange, HlTag};
+use syntax::ast::IsString;
+use syntax::TextSize;
+
+pub(super) fn highlight_escape_string<T: IsString>(
+ stack: &mut Highlights,
+ string: &T,
+ start: TextSize,
+) {
+ string.escaped_char_ranges(&mut |piece_range, char| {
+ if char.is_err() {
+ return;
+ }
+
+ if string.text()[piece_range.start().into()..].starts_with('\\') {
+ stack.add(HlRange {
+ range: piece_range + start,
+ highlight: HlTag::EscapeSequence.into(),
+ binding_hash: None,
+ });
+ }
+ });
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
new file mode 100644
index 000000000..2ed57e201
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
@@ -0,0 +1,50 @@
+//! Syntax highlighting for format macro strings.
+use ide_db::{
+ syntax_helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier},
+ SymbolKind,
+};
+use syntax::{ast, TextRange};
+
+use crate::{syntax_highlighting::highlights::Highlights, HlRange, HlTag};
+
+pub(super) fn highlight_format_string(
+ stack: &mut Highlights,
+ string: &ast::String,
+ expanded_string: &ast::String,
+ range: TextRange,
+) {
+ if !is_format_string(expanded_string) {
+ return;
+ }
+
+ lex_format_specifiers(string, &mut |piece_range, kind| {
+ if let Some(highlight) = highlight_format_specifier(kind) {
+ stack.add(HlRange {
+ range: piece_range + range.start(),
+ highlight: highlight.into(),
+ binding_hash: None,
+ });
+ }
+ });
+}
+
+fn highlight_format_specifier(kind: FormatSpecifier) -> Option<HlTag> {
+ Some(match kind {
+ FormatSpecifier::Open
+ | FormatSpecifier::Close
+ | FormatSpecifier::Colon
+ | FormatSpecifier::Fill
+ | FormatSpecifier::Align
+ | FormatSpecifier::Sign
+ | FormatSpecifier::NumberSign
+ | FormatSpecifier::DollarSign
+ | FormatSpecifier::Dot
+ | FormatSpecifier::Asterisk
+ | FormatSpecifier::QuestionMark => HlTag::FormatSpecifier,
+
+ FormatSpecifier::Integer | FormatSpecifier::Zero => HlTag::NumericLiteral,
+
+ FormatSpecifier::Identifier => HlTag::Symbol(SymbolKind::Local),
+ FormatSpecifier::Escape => HlTag::EscapeSequence,
+ })
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
new file mode 100644
index 000000000..9395e914c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
@@ -0,0 +1,690 @@
+//! Computes color for a single element.
+
+use hir::{AsAssocItem, HasVisibility, Semantics};
+use ide_db::{
+ defs::{Definition, IdentClass, NameClass, NameRefClass},
+ FxHashMap, RootDatabase, SymbolKind,
+};
+use syntax::{
+ ast, match_ast, AstNode, AstToken, NodeOrToken,
+ SyntaxKind::{self, *},
+ SyntaxNode, SyntaxToken, T,
+};
+
+use crate::{
+ syntax_highlighting::tags::{HlOperator, HlPunct},
+ Highlight, HlMod, HlTag,
+};
+
+pub(super) fn token(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Highlight> {
+ if let Some(comment) = ast::Comment::cast(token.clone()) {
+ let h = HlTag::Comment;
+ return Some(match comment.kind().doc {
+ Some(_) => h | HlMod::Documentation,
+ None => h.into(),
+ });
+ }
+
+ let highlight: Highlight = match token.kind() {
+ STRING | BYTE_STRING => HlTag::StringLiteral.into(),
+ INT_NUMBER if token.parent_ancestors().nth(1).map(|it| it.kind()) == Some(FIELD_EXPR) => {
+ SymbolKind::Field.into()
+ }
+ INT_NUMBER | FLOAT_NUMBER => HlTag::NumericLiteral.into(),
+ BYTE => HlTag::ByteLiteral.into(),
+ CHAR => HlTag::CharLiteral.into(),
+ IDENT if token.parent().and_then(ast::TokenTree::cast).is_some() => {
+ // from this point on we are inside a token tree, this only happens for identifiers
+ // that were not mapped down into macro invocations
+ HlTag::None.into()
+ }
+ p if p.is_punct() => punctuation(sema, token, p),
+ k if k.is_keyword() => keyword(sema, token, k)?,
+ _ => return None,
+ };
+ Some(highlight)
+}
+
+pub(super) fn name_like(
+ sema: &Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+ bindings_shadow_count: &mut FxHashMap<hir::Name, u32>,
+ syntactic_name_ref_highlighting: bool,
+ name_like: ast::NameLike,
+) -> Option<(Highlight, Option<u64>)> {
+ let mut binding_hash = None;
+ let highlight = match name_like {
+ ast::NameLike::NameRef(name_ref) => highlight_name_ref(
+ sema,
+ krate,
+ bindings_shadow_count,
+ &mut binding_hash,
+ syntactic_name_ref_highlighting,
+ name_ref,
+ ),
+ ast::NameLike::Name(name) => {
+ highlight_name(sema, bindings_shadow_count, &mut binding_hash, krate, name)
+ }
+ ast::NameLike::Lifetime(lifetime) => match IdentClass::classify_lifetime(sema, &lifetime) {
+ Some(IdentClass::NameClass(NameClass::Definition(def))) => {
+ highlight_def(sema, krate, def) | HlMod::Definition
+ }
+ Some(IdentClass::NameRefClass(NameRefClass::Definition(def))) => {
+ highlight_def(sema, krate, def)
+ }
+ // FIXME: Fallback for 'static and '_, as we do not resolve these yet
+ _ => SymbolKind::LifetimeParam.into(),
+ },
+ };
+ Some((highlight, binding_hash))
+}
+
+fn punctuation(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+ kind: SyntaxKind,
+) -> Highlight {
+ let parent = token.parent();
+ let parent_kind = parent.as_ref().map_or(EOF, SyntaxNode::kind);
+ match (kind, parent_kind) {
+ (T![?], _) => HlTag::Operator(HlOperator::Other) | HlMod::ControlFlow,
+ (T![&], BIN_EXPR) => HlOperator::Bitwise.into(),
+ (T![&], _) => {
+ let h = HlTag::Operator(HlOperator::Other).into();
+ let is_unsafe = parent
+ .and_then(ast::RefExpr::cast)
+ .map(|ref_expr| sema.is_unsafe_ref_expr(&ref_expr));
+ if let Some(true) = is_unsafe {
+ h | HlMod::Unsafe
+ } else {
+ h
+ }
+ }
+ (T![::] | T![->] | T![=>] | T![..] | T![=] | T![@] | T![.], _) => HlOperator::Other.into(),
+ (T![!], MACRO_CALL | MACRO_RULES) => HlPunct::MacroBang.into(),
+ (T![!], NEVER_TYPE) => HlTag::BuiltinType.into(),
+ (T![!], PREFIX_EXPR) => HlOperator::Logical.into(),
+ (T![*], PTR_TYPE) => HlTag::Keyword.into(),
+ (T![*], PREFIX_EXPR) => {
+ let is_raw_ptr = (|| {
+ let prefix_expr = parent.and_then(ast::PrefixExpr::cast)?;
+ let expr = prefix_expr.expr()?;
+ sema.type_of_expr(&expr)?.original.is_raw_ptr().then(|| ())
+ })();
+ if let Some(()) = is_raw_ptr {
+ HlTag::Operator(HlOperator::Other) | HlMod::Unsafe
+ } else {
+ HlOperator::Other.into()
+ }
+ }
+ (T![-], PREFIX_EXPR) => {
+ let prefix_expr = parent.and_then(ast::PrefixExpr::cast).and_then(|e| e.expr());
+ match prefix_expr {
+ Some(ast::Expr::Literal(_)) => HlTag::NumericLiteral,
+ _ => HlTag::Operator(HlOperator::Other),
+ }
+ .into()
+ }
+ (T![+] | T![-] | T![*] | T![/] | T![%], BIN_EXPR) => HlOperator::Arithmetic.into(),
+ (T![+=] | T![-=] | T![*=] | T![/=] | T![%=], BIN_EXPR) => {
+ Highlight::from(HlOperator::Arithmetic) | HlMod::Mutable
+ }
+ (T![|] | T![&] | T![!] | T![^] | T![>>] | T![<<], BIN_EXPR) => HlOperator::Bitwise.into(),
+ (T![|=] | T![&=] | T![^=] | T![>>=] | T![<<=], BIN_EXPR) => {
+ Highlight::from(HlOperator::Bitwise) | HlMod::Mutable
+ }
+ (T![&&] | T![||], BIN_EXPR) => HlOperator::Logical.into(),
+ (T![>] | T![<] | T![==] | T![>=] | T![<=] | T![!=], BIN_EXPR) => {
+ HlOperator::Comparison.into()
+ }
+ (_, PREFIX_EXPR | BIN_EXPR | RANGE_EXPR | RANGE_PAT | REST_PAT) => HlOperator::Other.into(),
+ (_, ATTR) => HlTag::AttributeBracket.into(),
+ (kind, _) => match kind {
+ T!['['] | T![']'] => HlPunct::Bracket,
+ T!['{'] | T!['}'] => HlPunct::Brace,
+ T!['('] | T![')'] => HlPunct::Parenthesis,
+ T![<] | T![>] => HlPunct::Angle,
+ T![,] => HlPunct::Comma,
+ T![:] => HlPunct::Colon,
+ T![;] => HlPunct::Semi,
+ T![.] => HlPunct::Dot,
+ _ => HlPunct::Other,
+ }
+ .into(),
+ }
+}
+
+fn keyword(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+ kind: SyntaxKind,
+) -> Option<Highlight> {
+ let h = Highlight::new(HlTag::Keyword);
+ let h = match kind {
+ T![await] => h | HlMod::Async | HlMod::ControlFlow,
+ T![async] => h | HlMod::Async,
+ T![break]
+ | T![continue]
+ | T![else]
+ | T![if]
+ | T![in]
+ | T![loop]
+ | T![match]
+ | T![return]
+ | T![while]
+ | T![yield] => h | HlMod::ControlFlow,
+ T![for] if parent_matches::<ast::ForExpr>(&token) => h | HlMod::ControlFlow,
+ T![unsafe] => h | HlMod::Unsafe,
+ T![true] | T![false] => HlTag::BoolLiteral.into(),
+ // crate is handled just as a token if it's in an `extern crate`
+ T![crate] if parent_matches::<ast::ExternCrate>(&token) => h,
+ // self, crate, super and `Self` are handled as either a Name or NameRef already, unless they
+ // are inside unmapped token trees
+ T![self] | T![crate] | T![super] | T![Self] if parent_matches::<ast::NameRef>(&token) => {
+ return None
+ }
+ T![self] if parent_matches::<ast::Name>(&token) => return None,
+ T![ref] => match token.parent().and_then(ast::IdentPat::cast) {
+ Some(ident) if sema.is_unsafe_ident_pat(&ident) => h | HlMod::Unsafe,
+ _ => h,
+ },
+ _ => h,
+ };
+ Some(h)
+}
+
+fn highlight_name_ref(
+ sema: &Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+ bindings_shadow_count: &mut FxHashMap<hir::Name, u32>,
+ binding_hash: &mut Option<u64>,
+ syntactic_name_ref_highlighting: bool,
+ name_ref: ast::NameRef,
+) -> Highlight {
+ let db = sema.db;
+ if let Some(res) = highlight_method_call_by_name_ref(sema, krate, &name_ref) {
+ return res;
+ }
+
+ let name_class = match NameRefClass::classify(sema, &name_ref) {
+ Some(name_kind) => name_kind,
+ None if syntactic_name_ref_highlighting => {
+ return highlight_name_ref_by_syntax(name_ref, sema, krate)
+ }
+ // FIXME: This is required for helper attributes used by proc-macros, as those do not map down
+ // to anything when used.
+ // We can fix this for derive attributes since derive helpers are recorded, but not for
+ // general attributes.
+ None if name_ref.syntax().ancestors().any(|it| it.kind() == ATTR) => {
+ return HlTag::Symbol(SymbolKind::Attribute).into();
+ }
+ None => return HlTag::UnresolvedReference.into(),
+ };
+ let mut h = match name_class {
+ NameRefClass::Definition(def) => {
+ if let Definition::Local(local) = &def {
+ let name = local.name(db);
+ let shadow_count = bindings_shadow_count.entry(name.clone()).or_default();
+ *binding_hash = Some(calc_binding_hash(&name, *shadow_count))
+ };
+
+ let mut h = highlight_def(sema, krate, def);
+
+ match def {
+ Definition::Local(local) if is_consumed_lvalue(name_ref.syntax(), &local, db) => {
+ h |= HlMod::Consuming;
+ }
+ Definition::Trait(trait_) if trait_.is_unsafe(db) => {
+ if ast::Impl::for_trait_name_ref(&name_ref)
+ .map_or(false, |impl_| impl_.unsafe_token().is_some())
+ {
+ h |= HlMod::Unsafe;
+ }
+ }
+ Definition::Field(field) => {
+ if let Some(parent) = name_ref.syntax().parent() {
+ if matches!(parent.kind(), FIELD_EXPR | RECORD_PAT_FIELD) {
+ if let hir::VariantDef::Union(_) = field.parent_def(db) {
+ h |= HlMod::Unsafe;
+ }
+ }
+ }
+ }
+ Definition::Macro(_) => {
+ if let Some(macro_call) =
+ ide_db::syntax_helpers::node_ext::full_path_of_name_ref(&name_ref)
+ .and_then(|it| it.syntax().parent().and_then(ast::MacroCall::cast))
+ {
+ if sema.is_unsafe_macro_call(&macro_call) {
+ h |= HlMod::Unsafe;
+ }
+ }
+ }
+ _ => (),
+ }
+
+ h
+ }
+ NameRefClass::FieldShorthand { .. } => SymbolKind::Field.into(),
+ };
+
+ h.tag = match name_ref.token_kind() {
+ T![Self] => HlTag::Symbol(SymbolKind::SelfType),
+ T![self] => HlTag::Symbol(SymbolKind::SelfParam),
+ T![super] | T![crate] => HlTag::Keyword,
+ _ => h.tag,
+ };
+ h
+}
+
+fn highlight_name(
+ sema: &Semantics<'_, RootDatabase>,
+ bindings_shadow_count: &mut FxHashMap<hir::Name, u32>,
+ binding_hash: &mut Option<u64>,
+ krate: hir::Crate,
+ name: ast::Name,
+) -> Highlight {
+ let name_kind = NameClass::classify(sema, &name);
+ if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind {
+ let name = local.name(sema.db);
+ let shadow_count = bindings_shadow_count.entry(name.clone()).or_default();
+ *shadow_count += 1;
+ *binding_hash = Some(calc_binding_hash(&name, *shadow_count))
+ };
+ match name_kind {
+ Some(NameClass::Definition(def)) => {
+ let mut h = highlight_def(sema, krate, def) | HlMod::Definition;
+ if let Definition::Trait(trait_) = &def {
+ if trait_.is_unsafe(sema.db) {
+ h |= HlMod::Unsafe;
+ }
+ }
+ h
+ }
+ Some(NameClass::ConstReference(def)) => highlight_def(sema, krate, def),
+ Some(NameClass::PatFieldShorthand { field_ref, .. }) => {
+ let mut h = HlTag::Symbol(SymbolKind::Field).into();
+ if let hir::VariantDef::Union(_) = field_ref.parent_def(sema.db) {
+ h |= HlMod::Unsafe;
+ }
+ h
+ }
+ None => highlight_name_by_syntax(name) | HlMod::Definition,
+ }
+}
+
+fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 {
+ fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
+ use std::{collections::hash_map::DefaultHasher, hash::Hasher};
+
+ let mut hasher = DefaultHasher::new();
+ x.hash(&mut hasher);
+ hasher.finish()
+ }
+
+ hash((name, shadow_count))
+}
+
+fn highlight_def(
+ sema: &Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+ def: Definition,
+) -> Highlight {
+ let db = sema.db;
+ let mut h = match def {
+ Definition::Macro(m) => Highlight::new(HlTag::Symbol(m.kind(sema.db).into())),
+ Definition::Field(_) => Highlight::new(HlTag::Symbol(SymbolKind::Field)),
+ Definition::Module(module) => {
+ let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Module));
+ if module.is_crate_root(db) {
+ h |= HlMod::CrateRoot;
+ }
+ h
+ }
+ Definition::Function(func) => {
+ let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Function));
+ if let Some(item) = func.as_assoc_item(db) {
+ h |= HlMod::Associated;
+ match func.self_param(db) {
+ Some(sp) => match sp.access(db) {
+ hir::Access::Exclusive => {
+ h |= HlMod::Mutable;
+ h |= HlMod::Reference;
+ }
+ hir::Access::Shared => h |= HlMod::Reference,
+ hir::Access::Owned => h |= HlMod::Consuming,
+ },
+ None => h |= HlMod::Static,
+ }
+
+ match item.container(db) {
+ hir::AssocItemContainer::Impl(i) => {
+ if i.trait_(db).is_some() {
+ h |= HlMod::Trait;
+ }
+ }
+ hir::AssocItemContainer::Trait(_t) => {
+ h |= HlMod::Trait;
+ }
+ }
+ }
+
+ if func.is_unsafe_to_call(db) {
+ h |= HlMod::Unsafe;
+ }
+ if func.is_async(db) {
+ h |= HlMod::Async;
+ }
+
+ h
+ }
+ Definition::Adt(adt) => {
+ let h = match adt {
+ hir::Adt::Struct(_) => HlTag::Symbol(SymbolKind::Struct),
+ hir::Adt::Enum(_) => HlTag::Symbol(SymbolKind::Enum),
+ hir::Adt::Union(_) => HlTag::Symbol(SymbolKind::Union),
+ };
+
+ Highlight::new(h)
+ }
+ Definition::Variant(_) => Highlight::new(HlTag::Symbol(SymbolKind::Variant)),
+ Definition::Const(konst) => {
+ let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Const));
+
+ if let Some(item) = konst.as_assoc_item(db) {
+ h |= HlMod::Associated;
+ match item.container(db) {
+ hir::AssocItemContainer::Impl(i) => {
+ if i.trait_(db).is_some() {
+ h |= HlMod::Trait;
+ }
+ }
+ hir::AssocItemContainer::Trait(_t) => {
+ h |= HlMod::Trait;
+ }
+ }
+ }
+
+ h
+ }
+ Definition::Trait(_) => Highlight::new(HlTag::Symbol(SymbolKind::Trait)),
+ Definition::TypeAlias(type_) => {
+ let mut h = Highlight::new(HlTag::Symbol(SymbolKind::TypeAlias));
+
+ if let Some(item) = type_.as_assoc_item(db) {
+ h |= HlMod::Associated;
+ match item.container(db) {
+ hir::AssocItemContainer::Impl(i) => {
+ if i.trait_(db).is_some() {
+ h |= HlMod::Trait;
+ }
+ }
+ hir::AssocItemContainer::Trait(_t) => {
+ h |= HlMod::Trait;
+ }
+ }
+ }
+
+ h
+ }
+ Definition::BuiltinType(_) => Highlight::new(HlTag::BuiltinType),
+ Definition::Static(s) => {
+ let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Static));
+
+ if s.is_mut(db) {
+ h |= HlMod::Mutable;
+ h |= HlMod::Unsafe;
+ }
+
+ h
+ }
+ Definition::SelfType(_) => Highlight::new(HlTag::Symbol(SymbolKind::Impl)),
+ Definition::GenericParam(it) => match it {
+ hir::GenericParam::TypeParam(_) => Highlight::new(HlTag::Symbol(SymbolKind::TypeParam)),
+ hir::GenericParam::ConstParam(_) => {
+ Highlight::new(HlTag::Symbol(SymbolKind::ConstParam))
+ }
+ hir::GenericParam::LifetimeParam(_) => {
+ Highlight::new(HlTag::Symbol(SymbolKind::LifetimeParam))
+ }
+ },
+ Definition::Local(local) => {
+ let tag = if local.is_self(db) {
+ HlTag::Symbol(SymbolKind::SelfParam)
+ } else if local.is_param(db) {
+ HlTag::Symbol(SymbolKind::ValueParam)
+ } else {
+ HlTag::Symbol(SymbolKind::Local)
+ };
+ let mut h = Highlight::new(tag);
+ let ty = local.ty(db);
+ if local.is_mut(db) || ty.is_mutable_reference() {
+ h |= HlMod::Mutable;
+ }
+ if local.is_ref(db) || ty.is_reference() {
+ h |= HlMod::Reference;
+ }
+ if ty.as_callable(db).is_some() || ty.impls_fnonce(db) {
+ h |= HlMod::Callable;
+ }
+ h
+ }
+ Definition::Label(_) => Highlight::new(HlTag::Symbol(SymbolKind::Label)),
+ Definition::BuiltinAttr(_) => Highlight::new(HlTag::Symbol(SymbolKind::BuiltinAttr)),
+ Definition::ToolModule(_) => Highlight::new(HlTag::Symbol(SymbolKind::ToolModule)),
+ Definition::DeriveHelper(_) => Highlight::new(HlTag::Symbol(SymbolKind::DeriveHelper)),
+ };
+
+ let def_crate = def.krate(db);
+ let is_from_other_crate = def_crate != Some(krate);
+ let is_from_builtin_crate = def_crate.map_or(false, |def_crate| def_crate.is_builtin(db));
+ let is_builtin_type = matches!(def, Definition::BuiltinType(_));
+ let is_public = def.visibility(db) == Some(hir::Visibility::Public);
+
+ match (is_from_other_crate, is_builtin_type, is_public) {
+ (true, false, _) => h |= HlMod::Library,
+ (false, _, true) => h |= HlMod::Public,
+ _ => {}
+ }
+
+ if is_from_builtin_crate {
+ h |= HlMod::DefaultLibrary;
+ }
+
+ h
+}
+
+fn highlight_method_call_by_name_ref(
+ sema: &Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+ name_ref: &ast::NameRef,
+) -> Option<Highlight> {
+ let mc = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast)?;
+ highlight_method_call(sema, krate, &mc)
+}
+
+fn highlight_method_call(
+ sema: &Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+ method_call: &ast::MethodCallExpr,
+) -> Option<Highlight> {
+ let func = sema.resolve_method_call(method_call)?;
+
+ let mut h = SymbolKind::Function.into();
+ h |= HlMod::Associated;
+
+ if func.is_unsafe_to_call(sema.db) || sema.is_unsafe_method_call(method_call) {
+ h |= HlMod::Unsafe;
+ }
+ if func.is_async(sema.db) {
+ h |= HlMod::Async;
+ }
+ if func
+ .as_assoc_item(sema.db)
+ .and_then(|it| it.containing_trait_or_trait_impl(sema.db))
+ .is_some()
+ {
+ h |= HlMod::Trait;
+ }
+
+ let def_crate = func.module(sema.db).krate();
+ let is_from_other_crate = def_crate != krate;
+ let is_from_builtin_crate = def_crate.is_builtin(sema.db);
+ let is_public = func.visibility(sema.db) == hir::Visibility::Public;
+
+ if is_from_other_crate {
+ h |= HlMod::Library;
+ } else if is_public {
+ h |= HlMod::Public;
+ }
+
+ if is_from_builtin_crate {
+ h |= HlMod::DefaultLibrary;
+ }
+
+ if let Some(self_param) = func.self_param(sema.db) {
+ match self_param.access(sema.db) {
+ hir::Access::Shared => h |= HlMod::Reference,
+ hir::Access::Exclusive => {
+ h |= HlMod::Mutable;
+ h |= HlMod::Reference;
+ }
+ hir::Access::Owned => {
+ if let Some(receiver_ty) =
+ method_call.receiver().and_then(|it| sema.type_of_expr(&it))
+ {
+ if !receiver_ty.adjusted().is_copy(sema.db) {
+ h |= HlMod::Consuming
+ }
+ }
+ }
+ }
+ }
+ Some(h)
+}
+
+fn highlight_name_by_syntax(name: ast::Name) -> Highlight {
+ let default = HlTag::UnresolvedReference;
+
+ let parent = match name.syntax().parent() {
+ Some(it) => it,
+ _ => return default.into(),
+ };
+
+ let tag = match parent.kind() {
+ STRUCT => SymbolKind::Struct,
+ ENUM => SymbolKind::Enum,
+ VARIANT => SymbolKind::Variant,
+ UNION => SymbolKind::Union,
+ TRAIT => SymbolKind::Trait,
+ TYPE_ALIAS => SymbolKind::TypeAlias,
+ TYPE_PARAM => SymbolKind::TypeParam,
+ RECORD_FIELD => SymbolKind::Field,
+ MODULE => SymbolKind::Module,
+ FN => SymbolKind::Function,
+ CONST => SymbolKind::Const,
+ STATIC => SymbolKind::Static,
+ IDENT_PAT => SymbolKind::Local,
+ _ => return default.into(),
+ };
+
+ tag.into()
+}
+
+fn highlight_name_ref_by_syntax(
+ name: ast::NameRef,
+ sema: &Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+) -> Highlight {
+ let default = HlTag::UnresolvedReference;
+
+ let parent = match name.syntax().parent() {
+ Some(it) => it,
+ _ => return default.into(),
+ };
+
+ match parent.kind() {
+ METHOD_CALL_EXPR => ast::MethodCallExpr::cast(parent)
+ .and_then(|it| highlight_method_call(sema, krate, &it))
+ .unwrap_or_else(|| SymbolKind::Function.into()),
+ FIELD_EXPR => {
+ let h = HlTag::Symbol(SymbolKind::Field);
+ let is_union = ast::FieldExpr::cast(parent)
+ .and_then(|field_expr| sema.resolve_field(&field_expr))
+ .map_or(false, |field| {
+ matches!(field.parent_def(sema.db), hir::VariantDef::Union(_))
+ });
+ if is_union {
+ h | HlMod::Unsafe
+ } else {
+ h.into()
+ }
+ }
+ PATH_SEGMENT => {
+ let name_based_fallback = || {
+ if name.text().chars().next().unwrap_or_default().is_uppercase() {
+ SymbolKind::Struct.into()
+ } else {
+ SymbolKind::Module.into()
+ }
+ };
+ let path = match parent.parent().and_then(ast::Path::cast) {
+ Some(it) => it,
+ _ => return name_based_fallback(),
+ };
+ let expr = match path.syntax().parent() {
+ Some(parent) => match_ast! {
+ match parent {
+ ast::PathExpr(path) => path,
+ ast::MacroCall(_) => return SymbolKind::Macro.into(),
+ _ => return name_based_fallback(),
+ }
+ },
+ // within path, decide whether it is module or adt by checking for uppercase name
+ None => return name_based_fallback(),
+ };
+ let parent = match expr.syntax().parent() {
+ Some(it) => it,
+ None => return default.into(),
+ };
+
+ match parent.kind() {
+ CALL_EXPR => SymbolKind::Function.into(),
+ _ => if name.text().chars().next().unwrap_or_default().is_uppercase() {
+ SymbolKind::Struct
+ } else {
+ SymbolKind::Const
+ }
+ .into(),
+ }
+ }
+ _ => default.into(),
+ }
+}
+
+fn is_consumed_lvalue(node: &SyntaxNode, local: &hir::Local, db: &RootDatabase) -> bool {
+ // When lvalues are passed as arguments and they're not Copy, then mark them as Consuming.
+ parents_match(node.clone().into(), &[PATH_SEGMENT, PATH, PATH_EXPR, ARG_LIST])
+ && !local.ty(db).is_copy(db)
+}
+
+/// Returns true if the parent nodes of `node` all match the `SyntaxKind`s in `kinds` exactly.
+fn parents_match(mut node: NodeOrToken<SyntaxNode, SyntaxToken>, mut kinds: &[SyntaxKind]) -> bool {
+ while let (Some(parent), [kind, rest @ ..]) = (&node.parent(), kinds) {
+ if parent.kind() != *kind {
+ return false;
+ }
+
+ // FIXME: Would be nice to get parent out of the match, but binding by-move and by-value
+ // in the same pattern is unstable: rust-lang/rust#68354.
+ node = node.parent().unwrap().into();
+ kinds = rest;
+ }
+
+ // Only true if we matched all expected kinds
+ kinds.is_empty()
+}
+
+fn parent_matches<N: AstNode>(token: &SyntaxToken) -> bool {
+ token.parent().map_or(false, |it| N::can_cast(it.kind()))
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlights.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlights.rs
new file mode 100644
index 000000000..340290eaf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlights.rs
@@ -0,0 +1,92 @@
+//! Collects a tree of highlighted ranges and flattens it.
+use std::iter;
+
+use stdx::equal_range_by;
+use syntax::TextRange;
+
+use crate::{HlRange, HlTag};
+
+pub(super) struct Highlights {
+ root: Node,
+}
+
+struct Node {
+ hl_range: HlRange,
+ nested: Vec<Node>,
+}
+
+impl Highlights {
+ pub(super) fn new(range: TextRange) -> Highlights {
+ Highlights {
+ root: Node::new(HlRange { range, highlight: HlTag::None.into(), binding_hash: None }),
+ }
+ }
+
+ pub(super) fn add(&mut self, hl_range: HlRange) {
+ self.root.add(hl_range);
+ }
+
+ pub(super) fn to_vec(&self) -> Vec<HlRange> {
+ let mut res = Vec::new();
+ self.root.flatten(&mut res);
+ res
+ }
+}
+
+impl Node {
+ fn new(hl_range: HlRange) -> Node {
+ Node { hl_range, nested: Vec::new() }
+ }
+
+ fn add(&mut self, hl_range: HlRange) {
+ assert!(self.hl_range.range.contains_range(hl_range.range));
+
+ // Fast path
+ if let Some(last) = self.nested.last_mut() {
+ if last.hl_range.range.contains_range(hl_range.range) {
+ return last.add(hl_range);
+ }
+ if last.hl_range.range.end() <= hl_range.range.start() {
+ return self.nested.push(Node::new(hl_range));
+ }
+ }
+
+ let overlapping =
+ equal_range_by(&self.nested, |n| TextRange::ordering(n.hl_range.range, hl_range.range));
+
+ if overlapping.len() == 1
+ && self.nested[overlapping.start].hl_range.range.contains_range(hl_range.range)
+ {
+ return self.nested[overlapping.start].add(hl_range);
+ }
+
+ let nested = self
+ .nested
+ .splice(overlapping.clone(), iter::once(Node::new(hl_range)))
+ .collect::<Vec<_>>();
+ self.nested[overlapping.start].nested = nested;
+ }
+
+ fn flatten(&self, acc: &mut Vec<HlRange>) {
+ let mut start = self.hl_range.range.start();
+ let mut nested = self.nested.iter();
+ loop {
+ let next = nested.next();
+ let end = next.map_or(self.hl_range.range.end(), |it| it.hl_range.range.start());
+ if start < end {
+ acc.push(HlRange {
+ range: TextRange::new(start, end),
+ highlight: self.hl_range.highlight,
+ binding_hash: self.hl_range.binding_hash,
+ });
+ }
+ start = match next {
+ Some(child) => {
+ child.flatten(acc);
+ child.hl_range.range.end()
+ }
+ None => break,
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
new file mode 100644
index 000000000..9777c014c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
@@ -0,0 +1,97 @@
+//! Renders a bit of code as HTML.
+
+use ide_db::base_db::SourceDatabase;
+use oorandom::Rand32;
+use stdx::format_to;
+use syntax::AstNode;
+
+use crate::{syntax_highlighting::highlight, FileId, RootDatabase};
+
+pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
+ let parse = db.parse(file_id);
+
+ fn rainbowify(seed: u64) -> String {
+ let mut rng = Rand32::new(seed);
+ format!(
+ "hsl({h},{s}%,{l}%)",
+ h = rng.rand_range(0..361),
+ s = rng.rand_range(42..99),
+ l = rng.rand_range(40..91),
+ )
+ }
+
+ let hl_ranges = highlight(db, file_id, None, false);
+ let text = parse.tree().syntax().to_string();
+ let mut buf = String::new();
+ buf.push_str(STYLE);
+ buf.push_str("<pre><code>");
+ for r in &hl_ranges {
+ let chunk = html_escape(&text[r.range]);
+ if r.highlight.is_empty() {
+ format_to!(buf, "{}", chunk);
+ continue;
+ }
+
+ let class = r.highlight.to_string().replace('.', " ");
+ let color = match (rainbow, r.binding_hash) {
+ (true, Some(hash)) => {
+ format!(" data-binding-hash=\"{}\" style=\"color: {};\"", hash, rainbowify(hash))
+ }
+ _ => "".into(),
+ };
+ format_to!(buf, "<span class=\"{}\"{}>{}</span>", class, color, chunk);
+ }
+ buf.push_str("</code></pre>");
+ buf
+}
+
+//FIXME: like, real html escaping
+fn html_escape(text: &str) -> String {
+ text.replace('<', "&lt;").replace('>', "&gt;")
+}
+
+const STYLE: &str = "
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+";
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
new file mode 100644
index 000000000..f376f9fda
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
@@ -0,0 +1,279 @@
+//! "Recursive" Syntax highlighting for code in doctests and fixtures.
+
+use std::mem;
+
+use either::Either;
+use hir::{InFile, Semantics};
+use ide_db::{
+ active_parameter::ActiveParameter, base_db::FileId, defs::Definition, rust_doc::is_rust_fence,
+ SymbolKind,
+};
+use syntax::{
+ ast::{self, AstNode, IsString, QuoteOffsets},
+ AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize,
+};
+
+use crate::{
+ doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def},
+ syntax_highlighting::{highlights::Highlights, injector::Injector},
+ Analysis, HlMod, HlRange, HlTag, RootDatabase,
+};
+
+pub(super) fn ra_fixture(
+ hl: &mut Highlights,
+ sema: &Semantics<'_, RootDatabase>,
+ literal: &ast::String,
+ expanded: &ast::String,
+) -> Option<()> {
+ let active_parameter = ActiveParameter::at_token(sema, expanded.syntax().clone())?;
+ if !active_parameter.ident().map_or(false, |name| name.text().starts_with("ra_fixture")) {
+ return None;
+ }
+ let value = literal.value()?;
+
+ if let Some(range) = literal.open_quote_text_range() {
+ hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None })
+ }
+
+ let mut inj = Injector::default();
+
+ let mut text = &*value;
+ let mut offset: TextSize = 0.into();
+
+ while !text.is_empty() {
+ let marker = "$0";
+ let idx = text.find(marker).unwrap_or(text.len());
+ let (chunk, next) = text.split_at(idx);
+ inj.add(chunk, TextRange::at(offset, TextSize::of(chunk)));
+
+ text = next;
+ offset += TextSize::of(chunk);
+
+ if let Some(next) = text.strip_prefix(marker) {
+ if let Some(range) = literal.map_range_up(TextRange::at(offset, TextSize::of(marker))) {
+ hl.add(HlRange { range, highlight: HlTag::Keyword.into(), binding_hash: None });
+ }
+
+ text = next;
+
+ let marker_len = TextSize::of(marker);
+ offset += marker_len;
+ }
+ }
+
+ let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
+
+ for mut hl_range in analysis.highlight(tmp_file_id).unwrap() {
+ for range in inj.map_range_up(hl_range.range) {
+ if let Some(range) = literal.map_range_up(range) {
+ hl_range.range = range;
+ hl.add(hl_range);
+ }
+ }
+ }
+
+ if let Some(range) = literal.close_quote_text_range() {
+ hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None })
+ }
+
+ Some(())
+}
+
+const RUSTDOC_FENCE_LENGTH: usize = 3;
+const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
+
+/// Injection of syntax highlighting of doctests and intra doc links.
+pub(super) fn doc_comment(
+ hl: &mut Highlights,
+ sema: &Semantics<'_, RootDatabase>,
+ src_file_id: FileId,
+ node: &SyntaxNode,
+) {
+ let (attributes, def) = match doc_attributes(sema, node) {
+ Some(it) => it,
+ None => return,
+ };
+ let src_file_id = src_file_id.into();
+
+ // Extract intra-doc links and emit highlights for them.
+ if let Some((docs, doc_mapping)) = attributes.docs_with_rangemap(sema.db) {
+ extract_definitions_from_docs(&docs)
+ .into_iter()
+ .filter_map(|(range, link, ns)| {
+ doc_mapping.map(range).filter(|mapping| mapping.file_id == src_file_id).and_then(
+ |InFile { value: mapped_range, .. }| {
+ Some(mapped_range).zip(resolve_doc_path_for_def(sema.db, def, &link, ns))
+ },
+ )
+ })
+ .for_each(|(range, def)| {
+ hl.add(HlRange {
+ range,
+ highlight: module_def_to_hl_tag(def)
+ | HlMod::Documentation
+ | HlMod::Injected
+ | HlMod::IntraDocLink,
+ binding_hash: None,
+ })
+ });
+ }
+
+ // Extract doc-test sources from the docs and calculate highlighting for them.
+
+ let mut inj = Injector::default();
+ inj.add_unmapped("fn doctest() {\n");
+
+ let attrs_source_map = attributes.source_map(sema.db);
+
+ let mut is_codeblock = false;
+ let mut is_doctest = false;
+
+ let mut new_comments = Vec::new();
+ let mut string;
+
+ for attr in attributes.by_key("doc").attrs() {
+ let InFile { file_id, value: src } = attrs_source_map.source_of(attr);
+ if file_id != src_file_id {
+ continue;
+ }
+ let (line, range) = match &src {
+ Either::Left(it) => {
+ string = match find_doc_string_in_attr(attr, it) {
+ Some(it) => it,
+ None => continue,
+ };
+ let text = string.text();
+ let text_range = string.syntax().text_range();
+ match string.quote_offsets() {
+ Some(QuoteOffsets { contents, .. }) => {
+ (&text[contents - text_range.start()], contents)
+ }
+ None => (text, text_range),
+ }
+ }
+ Either::Right(comment) => {
+ let value = comment.prefix().len();
+ let range = comment.syntax().text_range();
+ (
+ &comment.text()[value..],
+ TextRange::new(range.start() + TextSize::try_from(value).unwrap(), range.end()),
+ )
+ }
+ };
+
+ let mut range_start = range.start();
+ for line in line.split('\n') {
+ let line_len = TextSize::from(line.len() as u32);
+ let prev_range_start = {
+ let next_range_start = range_start + line_len + TextSize::from(1);
+ mem::replace(&mut range_start, next_range_start)
+ };
+ let mut pos = TextSize::from(0);
+
+ match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) {
+ Some(idx) => {
+ is_codeblock = !is_codeblock;
+ // Check whether code is rust by inspecting fence guards
+ let guards = &line[idx + RUSTDOC_FENCE_LENGTH..];
+ let is_rust = is_rust_fence(guards);
+ is_doctest = is_codeblock && is_rust;
+ continue;
+ }
+ None if !is_doctest => continue,
+ None => (),
+ }
+
+ // whitespace after comment is ignored
+ if let Some(ws) = line[pos.into()..].chars().next().filter(|c| c.is_whitespace()) {
+ pos += TextSize::of(ws);
+ }
+ // lines marked with `#` should be ignored in output, we skip the `#` char
+ if line[pos.into()..].starts_with('#') {
+ pos += TextSize::of('#');
+ }
+
+ new_comments.push(TextRange::at(prev_range_start, pos));
+ inj.add(&line[pos.into()..], TextRange::new(pos, line_len) + prev_range_start);
+ inj.add_unmapped("\n");
+ }
+ }
+
+ if new_comments.is_empty() {
+ return; // no need to run an analysis on an empty file
+ }
+
+ inj.add_unmapped("\n}");
+
+ let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
+
+ if let Ok(ranges) = analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)) {
+ for HlRange { range, highlight, binding_hash } in ranges {
+ for range in inj.map_range_up(range) {
+ hl.add(HlRange { range, highlight: highlight | HlMod::Injected, binding_hash });
+ }
+ }
+ }
+
+ for range in new_comments {
+ hl.add(HlRange {
+ range,
+ highlight: HlTag::Comment | HlMod::Documentation,
+ binding_hash: None,
+ });
+ }
+}
+
+fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option<ast::String> {
+ match it.expr() {
+ // #[doc = lit]
+ Some(ast::Expr::Literal(lit)) => match lit.kind() {
+ ast::LiteralKind::String(it) => Some(it),
+ _ => None,
+ },
+ // #[cfg_attr(..., doc = "", ...)]
+ None => {
+ // We gotta hunt the string token manually here
+ let text = attr.string_value()?;
+ // FIXME: We just pick the first string literal that has the same text as the doc attribute
+ // This means technically we might highlight the wrong one
+ it.syntax()
+ .descendants_with_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .filter_map(ast::String::cast)
+ .find(|string| {
+ string.text().get(1..string.text().len() - 1).map_or(false, |it| it == text)
+ })
+ }
+ _ => None,
+ }
+}
+
+fn module_def_to_hl_tag(def: Definition) -> HlTag {
+ let symbol = match def {
+ Definition::Module(_) => SymbolKind::Module,
+ Definition::Function(_) => SymbolKind::Function,
+ Definition::Adt(hir::Adt::Struct(_)) => SymbolKind::Struct,
+ Definition::Adt(hir::Adt::Enum(_)) => SymbolKind::Enum,
+ Definition::Adt(hir::Adt::Union(_)) => SymbolKind::Union,
+ Definition::Variant(_) => SymbolKind::Variant,
+ Definition::Const(_) => SymbolKind::Const,
+ Definition::Static(_) => SymbolKind::Static,
+ Definition::Trait(_) => SymbolKind::Trait,
+ Definition::TypeAlias(_) => SymbolKind::TypeAlias,
+ Definition::BuiltinType(_) => return HlTag::BuiltinType,
+ Definition::Macro(_) => SymbolKind::Macro,
+ Definition::Field(_) => SymbolKind::Field,
+ Definition::SelfType(_) => SymbolKind::Impl,
+ Definition::Local(_) => SymbolKind::Local,
+ Definition::GenericParam(gp) => match gp {
+ hir::GenericParam::TypeParam(_) => SymbolKind::TypeParam,
+ hir::GenericParam::ConstParam(_) => SymbolKind::ConstParam,
+ hir::GenericParam::LifetimeParam(_) => SymbolKind::LifetimeParam,
+ },
+ Definition::Label(_) => SymbolKind::Label,
+ Definition::BuiltinAttr(_) => SymbolKind::BuiltinAttr,
+ Definition::ToolModule(_) => SymbolKind::ToolModule,
+ Definition::DeriveHelper(_) => SymbolKind::DeriveHelper,
+ };
+ HlTag::Symbol(symbol)
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs
new file mode 100644
index 000000000..a902fd717
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs
@@ -0,0 +1,81 @@
+//! Extracts a subsequence of a text document, remembering the mapping of ranges
+//! between original and extracted texts.
+use std::ops::{self, Sub};
+
+use stdx::equal_range_by;
+use syntax::{TextRange, TextSize};
+
+#[derive(Default)]
+pub(super) struct Injector {
+ buf: String,
+ ranges: Vec<(TextRange, Option<Delta<TextSize>>)>,
+}
+
+impl Injector {
+ pub(super) fn add(&mut self, text: &str, source_range: TextRange) {
+ let len = TextSize::of(text);
+ assert_eq!(len, source_range.len());
+ self.add_impl(text, Some(source_range.start()));
+ }
+
+ pub(super) fn add_unmapped(&mut self, text: &str) {
+ self.add_impl(text, None);
+ }
+
+ fn add_impl(&mut self, text: &str, source: Option<TextSize>) {
+ let len = TextSize::of(text);
+ let target_range = TextRange::at(TextSize::of(&self.buf), len);
+ self.ranges.push((target_range, source.map(|it| Delta::new(target_range.start(), it))));
+ self.buf.push_str(text);
+ }
+
+ pub(super) fn take_text(&mut self) -> String {
+ std::mem::take(&mut self.buf)
+ }
+
+ pub(super) fn map_range_up(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ {
+ equal_range_by(&self.ranges, |&(r, _)| TextRange::ordering(r, range)).filter_map(move |i| {
+ let (target_range, delta) = self.ranges[i];
+ let intersection = target_range.intersect(range).unwrap();
+ Some(intersection + delta?)
+ })
+ }
+}
+
+#[derive(Clone, Copy)]
+enum Delta<T> {
+ Add(T),
+ Sub(T),
+}
+
+impl<T> Delta<T> {
+ fn new(from: T, to: T) -> Delta<T>
+ where
+ T: Ord + Sub<Output = T>,
+ {
+ if to >= from {
+ Delta::Add(to - from)
+ } else {
+ Delta::Sub(from - to)
+ }
+ }
+}
+
+impl ops::Add<Delta<TextSize>> for TextSize {
+ type Output = TextSize;
+
+ fn add(self, rhs: Delta<TextSize>) -> TextSize {
+ match rhs {
+ Delta::Add(it) => self + it,
+ Delta::Sub(it) => self - it,
+ }
+ }
+}
+
+impl ops::Add<Delta<TextSize>> for TextRange {
+ type Output = TextRange;
+
+ fn add(self, rhs: Delta<TextSize>) -> TextRange {
+ TextRange::at(self.start() + rhs, self.len())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs
new file mode 100644
index 000000000..1099d9c23
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs
@@ -0,0 +1,128 @@
+//! Syntax highlighting for macro_rules!.
+use syntax::{SyntaxKind, SyntaxToken, TextRange, T};
+
+use crate::{HlRange, HlTag};
+
+#[derive(Default)]
+pub(super) struct MacroHighlighter {
+ state: Option<MacroMatcherParseState>,
+}
+
+impl MacroHighlighter {
+ pub(super) fn init(&mut self) {
+ self.state = Some(MacroMatcherParseState::default());
+ }
+
+ pub(super) fn advance(&mut self, token: &SyntaxToken) {
+ if let Some(state) = self.state.as_mut() {
+ update_macro_state(state, token);
+ }
+ }
+
+ pub(super) fn highlight(&self, token: &SyntaxToken) -> Option<HlRange> {
+ if let Some(state) = self.state.as_ref() {
+ if matches!(state.rule_state, RuleState::Matcher | RuleState::Expander) {
+ if let Some(range) = is_metavariable(token) {
+ return Some(HlRange {
+ range,
+ highlight: HlTag::UnresolvedReference.into(),
+ binding_hash: None,
+ });
+ }
+ }
+ }
+ None
+ }
+}
+
+struct MacroMatcherParseState {
+ /// Opening and corresponding closing bracket of the matcher or expander of the current rule
+ paren_ty: Option<(SyntaxKind, SyntaxKind)>,
+ paren_level: usize,
+ rule_state: RuleState,
+ /// Whether we are inside the outer `{` `}` macro block that holds the rules
+ in_invoc_body: bool,
+}
+
+impl Default for MacroMatcherParseState {
+ fn default() -> Self {
+ MacroMatcherParseState {
+ paren_ty: None,
+ paren_level: 0,
+ in_invoc_body: false,
+ rule_state: RuleState::None,
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+enum RuleState {
+ Matcher,
+ Expander,
+ Between,
+ None,
+}
+
+impl RuleState {
+ fn transition(&mut self) {
+ *self = match self {
+ RuleState::Matcher => RuleState::Between,
+ RuleState::Expander => RuleState::None,
+ RuleState::Between => RuleState::Expander,
+ RuleState::None => RuleState::Matcher,
+ };
+ }
+}
+
+fn update_macro_state(state: &mut MacroMatcherParseState, tok: &SyntaxToken) {
+ if !state.in_invoc_body {
+ if tok.kind() == T!['{'] || tok.kind() == T!['('] {
+ state.in_invoc_body = true;
+ }
+ return;
+ }
+
+ match state.paren_ty {
+ Some((open, close)) => {
+ if tok.kind() == open {
+ state.paren_level += 1;
+ } else if tok.kind() == close {
+ state.paren_level -= 1;
+ if state.paren_level == 0 {
+ state.rule_state.transition();
+ state.paren_ty = None;
+ }
+ }
+ }
+ None => {
+ match tok.kind() {
+ T!['('] => {
+ state.paren_ty = Some((T!['('], T![')']));
+ }
+ T!['{'] => {
+ state.paren_ty = Some((T!['{'], T!['}']));
+ }
+ T!['['] => {
+ state.paren_ty = Some((T!['['], T![']']));
+ }
+ _ => (),
+ }
+ if state.paren_ty.is_some() {
+ state.paren_level = 1;
+ state.rule_state.transition();
+ }
+ }
+ }
+}
+
+fn is_metavariable(token: &SyntaxToken) -> Option<TextRange> {
+ match token.kind() {
+ kind if kind == SyntaxKind::IDENT || kind.is_keyword() => {
+ if let Some(_dollar) = token.prev_token().filter(|t| t.kind() == T![$]) {
+ return Some(token.text_range());
+ }
+ }
+ _ => (),
+ };
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
new file mode 100644
index 000000000..5262770f3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
@@ -0,0 +1,340 @@
+//! Defines token tags we use for syntax highlighting.
+//! A tag is not unlike a CSS class.
+
+use std::{
+ fmt::{self, Write},
+ ops,
+};
+
+use ide_db::SymbolKind;
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct Highlight {
+ pub tag: HlTag,
+ pub mods: HlMods,
+}
+
+#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct HlMods(u32);
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum HlTag {
+ Symbol(SymbolKind),
+
+ AttributeBracket,
+ BoolLiteral,
+ BuiltinType,
+ ByteLiteral,
+ CharLiteral,
+ Comment,
+ EscapeSequence,
+ FormatSpecifier,
+ Keyword,
+ NumericLiteral,
+ Operator(HlOperator),
+ Punctuation(HlPunct),
+ StringLiteral,
+ UnresolvedReference,
+
+ // For things which don't have a specific highlight.
+ None,
+}
+
+// Don't forget to adjust the feature description in crates/ide/src/syntax_highlighting.rs.
+// And make sure to use the lsp strings used when converting to the protocol in crates\rust-analyzer\src\semantic_tokens.rs, not the names of the variants here.
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+#[repr(u8)]
+pub enum HlMod {
+ /// Used for items in traits and impls.
+ Associated = 0,
+ /// Used with keywords like `async` and `await`.
+ Async,
+ /// Used to differentiate individual elements within attributes.
+ Attribute,
+ /// Callable item or value.
+ Callable,
+ /// Value that is being consumed in a function call
+ Consuming,
+ /// Used with keywords like `if` and `break`.
+ ControlFlow,
+ /// Used for crate names, like `serde`.
+ CrateRoot,
+ /// Used for items from built-in crates (std, core, alloc, test and proc_macro).
+ DefaultLibrary,
+ /// `foo` in `fn foo(x: i32)` is a definition, `foo` in `foo(90 + 2)` is
+ /// not.
+ Definition,
+ /// Doc-strings like this one.
+ Documentation,
+ /// Highlighting injection like rust code in doc strings or ra_fixture.
+ Injected,
+ /// Used for intra doc links in doc injection.
+ IntraDocLink,
+ /// Used for items from other crates.
+ Library,
+ /// Mutable binding.
+ Mutable,
+ /// Used for public items.
+ Public,
+ /// Immutable reference.
+ Reference,
+ /// Used for associated functions.
+ Static,
+ /// Used for items in traits and trait impls.
+ Trait,
+ // Keep this last!
+ /// Used for unsafe functions, unsafe traits, mutable statics, union accesses and unsafe operations.
+ Unsafe,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum HlPunct {
+ /// []
+ Bracket,
+ /// {}
+ Brace,
+ /// ()
+ Parenthesis,
+ /// <>
+ Angle,
+ /// ,
+ Comma,
+ /// .
+ Dot,
+ /// :
+ Colon,
+ /// ;
+ Semi,
+ /// ! (only for macro calls)
+ MacroBang,
+ ///
+ Other,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum HlOperator {
+ /// |, &, !, ^, |=, &=, ^=
+ Bitwise,
+ /// +, -, *, /, +=, -=, *=, /=
+ Arithmetic,
+ /// &&, ||, !
+ Logical,
+ /// >, <, ==, >=, <=, !=
+ Comparison,
+ ///
+ Other,
+}
+
+impl HlTag {
+ fn as_str(self) -> &'static str {
+ match self {
+ HlTag::Symbol(symbol) => match symbol {
+ SymbolKind::Attribute => "attribute",
+ SymbolKind::BuiltinAttr => "builtin_attr",
+ SymbolKind::Const => "constant",
+ SymbolKind::ConstParam => "const_param",
+ SymbolKind::Derive => "derive",
+ SymbolKind::DeriveHelper => "derive_helper",
+ SymbolKind::Enum => "enum",
+ SymbolKind::Field => "field",
+ SymbolKind::Function => "function",
+ SymbolKind::Impl => "self_type",
+ SymbolKind::Label => "label",
+ SymbolKind::LifetimeParam => "lifetime",
+ SymbolKind::Local => "variable",
+ SymbolKind::Macro => "macro",
+ SymbolKind::Module => "module",
+ SymbolKind::SelfParam => "self_keyword",
+ SymbolKind::SelfType => "self_type_keyword",
+ SymbolKind::Static => "static",
+ SymbolKind::Struct => "struct",
+ SymbolKind::ToolModule => "tool_module",
+ SymbolKind::Trait => "trait",
+ SymbolKind::TypeAlias => "type_alias",
+ SymbolKind::TypeParam => "type_param",
+ SymbolKind::Union => "union",
+ SymbolKind::ValueParam => "value_param",
+ SymbolKind::Variant => "enum_variant",
+ },
+ HlTag::AttributeBracket => "attribute_bracket",
+ HlTag::BoolLiteral => "bool_literal",
+ HlTag::BuiltinType => "builtin_type",
+ HlTag::ByteLiteral => "byte_literal",
+ HlTag::CharLiteral => "char_literal",
+ HlTag::Comment => "comment",
+ HlTag::EscapeSequence => "escape_sequence",
+ HlTag::FormatSpecifier => "format_specifier",
+ HlTag::Keyword => "keyword",
+ HlTag::Punctuation(punct) => match punct {
+ HlPunct::Bracket => "bracket",
+ HlPunct::Brace => "brace",
+ HlPunct::Parenthesis => "parenthesis",
+ HlPunct::Angle => "angle",
+ HlPunct::Comma => "comma",
+ HlPunct::Dot => "dot",
+ HlPunct::Colon => "colon",
+ HlPunct::Semi => "semicolon",
+ HlPunct::MacroBang => "macro_bang",
+ HlPunct::Other => "punctuation",
+ },
+ HlTag::NumericLiteral => "numeric_literal",
+ HlTag::Operator(op) => match op {
+ HlOperator::Bitwise => "bitwise",
+ HlOperator::Arithmetic => "arithmetic",
+ HlOperator::Logical => "logical",
+ HlOperator::Comparison => "comparison",
+ HlOperator::Other => "operator",
+ },
+ HlTag::StringLiteral => "string_literal",
+ HlTag::UnresolvedReference => "unresolved_reference",
+ HlTag::None => "none",
+ }
+ }
+}
+
+impl fmt::Display for HlTag {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+
+impl HlMod {
+ const ALL: &'static [HlMod; HlMod::Unsafe as u8 as usize + 1] = &[
+ HlMod::Associated,
+ HlMod::Async,
+ HlMod::Attribute,
+ HlMod::Callable,
+ HlMod::Consuming,
+ HlMod::ControlFlow,
+ HlMod::CrateRoot,
+ HlMod::DefaultLibrary,
+ HlMod::Definition,
+ HlMod::Documentation,
+ HlMod::Injected,
+ HlMod::IntraDocLink,
+ HlMod::Library,
+ HlMod::Mutable,
+ HlMod::Public,
+ HlMod::Reference,
+ HlMod::Static,
+ HlMod::Trait,
+ HlMod::Unsafe,
+ ];
+
+ fn as_str(self) -> &'static str {
+ match self {
+ HlMod::Associated => "associated",
+ HlMod::Async => "async",
+ HlMod::Attribute => "attribute",
+ HlMod::Callable => "callable",
+ HlMod::Consuming => "consuming",
+ HlMod::ControlFlow => "control",
+ HlMod::CrateRoot => "crate_root",
+ HlMod::DefaultLibrary => "default_library",
+ HlMod::Definition => "declaration",
+ HlMod::Documentation => "documentation",
+ HlMod::Injected => "injected",
+ HlMod::IntraDocLink => "intra_doc_link",
+ HlMod::Library => "library",
+ HlMod::Mutable => "mutable",
+ HlMod::Public => "public",
+ HlMod::Reference => "reference",
+ HlMod::Static => "static",
+ HlMod::Trait => "trait",
+ HlMod::Unsafe => "unsafe",
+ }
+ }
+
+ fn mask(self) -> u32 {
+ 1 << (self as u32)
+ }
+}
+
+impl fmt::Display for HlMod {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+
+impl fmt::Display for Highlight {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.tag.fmt(f)?;
+ for modifier in self.mods.iter() {
+ f.write_char('.')?;
+ modifier.fmt(f)?;
+ }
+ Ok(())
+ }
+}
+
+impl From<HlTag> for Highlight {
+ fn from(tag: HlTag) -> Highlight {
+ Highlight::new(tag)
+ }
+}
+
+impl From<HlOperator> for Highlight {
+ fn from(op: HlOperator) -> Highlight {
+ Highlight::new(HlTag::Operator(op))
+ }
+}
+
+impl From<HlPunct> for Highlight {
+ fn from(punct: HlPunct) -> Highlight {
+ Highlight::new(HlTag::Punctuation(punct))
+ }
+}
+
+impl From<SymbolKind> for Highlight {
+ fn from(sym: SymbolKind) -> Highlight {
+ Highlight::new(HlTag::Symbol(sym))
+ }
+}
+
+impl Highlight {
+ pub(crate) fn new(tag: HlTag) -> Highlight {
+ Highlight { tag, mods: HlMods::default() }
+ }
+ pub fn is_empty(&self) -> bool {
+ self.tag == HlTag::None && self.mods == HlMods::default()
+ }
+}
+
+impl ops::BitOr<HlMod> for HlTag {
+ type Output = Highlight;
+
+ fn bitor(self, rhs: HlMod) -> Highlight {
+ Highlight::new(self) | rhs
+ }
+}
+
+impl ops::BitOrAssign<HlMod> for HlMods {
+ fn bitor_assign(&mut self, rhs: HlMod) {
+ self.0 |= rhs.mask();
+ }
+}
+
+impl ops::BitOrAssign<HlMod> for Highlight {
+ fn bitor_assign(&mut self, rhs: HlMod) {
+ self.mods |= rhs;
+ }
+}
+
+impl ops::BitOr<HlMod> for Highlight {
+ type Output = Highlight;
+
+ fn bitor(mut self, rhs: HlMod) -> Highlight {
+ self |= rhs;
+ self
+ }
+}
+
+impl HlMods {
+ pub fn contains(self, m: HlMod) -> bool {
+ self.0 & m.mask() == m.mask()
+ }
+
+ pub fn iter(self) -> impl Iterator<Item = HlMod> {
+ HlMod::ALL.iter().copied().filter(move |it| self.0 & it.mask() == it.mask())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
new file mode 100644
index 000000000..e07fd3925
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
@@ -0,0 +1,62 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">fn</span> <span class="function declaration">not_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">struct</span> <span class="struct declaration">foo</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="struct">foo</span> <span class="brace">{</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public static">is_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">is_not_static</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">trait</span> <span class="trait declaration">t</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration static trait">t_is_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">t_is_not_static</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="trait">t</span> <span class="keyword">for</span> <span class="struct">foo</span> <span class="brace">{</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public static trait">is_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference trait">is_not_static</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
new file mode 100644
index 000000000..1a4398814
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
@@ -0,0 +1,58 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">allow</span><span class="parenthesis attribute">(</span><span class="none attribute">dead_code</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="tool_module attribute library">rustfmt</span><span class="operator attribute">::</span><span class="tool_module attribute library">skip</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="module attribute crate_root library">proc_macros</span><span class="operator attribute">::</span><span class="attribute attribute library">identity</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="comment documentation">/// This is a doc comment</span>
+<span class="comment">// This is a normal comment</span>
+<span class="comment documentation">/// This is a doc comment</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="comment">// This is another normal comment</span>
+<span class="comment documentation">/// This is another doc comment</span>
+<span class="comment">// This is another normal comment</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="comment">// The reason for these being here is to test AttrIds</span>
+<span class="keyword">struct</span> <span class="struct declaration">Foo</span><span class="semicolon">;</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
new file mode 100644
index 000000000..1e4c06df7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
@@ -0,0 +1,66 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root library">foo</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">iter</span><span class="semicolon">;</span>
+
+<span class="keyword">pub</span> <span class="keyword">const</span> <span class="constant declaration public">NINETY_TWO</span><span class="colon">:</span> <span class="builtin_type">u8</span> <span class="operator">=</span> <span class="numeric_literal">92</span><span class="semicolon">;</span>
+
+<span class="keyword">use</span> <span class="module crate_root library">foo</span> <span class="keyword">as</span> <span class="module crate_root declaration library">foooo</span><span class="semicolon">;</span>
+
+<span class="keyword">pub</span><span class="parenthesis">(</span><span class="keyword crate_root public">crate</span><span class="parenthesis">)</span> <span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">baz</span> <span class="operator">=</span> <span class="module default_library library">iter</span><span class="operator">::</span><span class="function default_library library">repeat</span><span class="parenthesis">(</span><span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">mod</span> <span class="module declaration">bar</span> <span class="brace">{</span>
+ <span class="keyword">pub</span><span class="parenthesis">(</span><span class="keyword control">in</span> <span class="keyword crate_root public">super</span><span class="parenthesis">)</span> <span class="keyword">const</span> <span class="constant declaration">FORTY_TWO</span><span class="colon">:</span> <span class="builtin_type">u8</span> <span class="operator">=</span> <span class="numeric_literal">42</span><span class="semicolon">;</span>
+
+ <span class="keyword">mod</span> <span class="module declaration">baz</span> <span class="brace">{</span>
+ <span class="keyword">use</span> <span class="keyword">super</span><span class="operator">::</span><span class="keyword crate_root public">super</span><span class="operator">::</span><span class="constant public">NINETY_TWO</span><span class="semicolon">;</span>
+ <span class="keyword">use</span> <span class="keyword crate_root public">crate</span><span class="operator">::</span><span class="module crate_root library">foooo</span><span class="operator">::</span><span class="struct library">Point</span><span class="semicolon">;</span>
+
+ <span class="keyword">pub</span><span class="parenthesis">(</span><span class="keyword control">in</span> <span class="keyword">super</span><span class="operator">::</span><span class="keyword crate_root public">super</span><span class="parenthesis">)</span> <span class="keyword">const</span> <span class="constant declaration">TWENTY_NINE</span><span class="colon">:</span> <span class="builtin_type">u8</span> <span class="operator">=</span> <span class="numeric_literal">29</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
new file mode 100644
index 000000000..5d66f832d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
@@ -0,0 +1,50 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">iter</span><span class="semicolon">;</span>
+
+<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">foo</span> <span class="operator">=</span> <span class="enum_variant default_library library">Some</span><span class="parenthesis">(</span><span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">nums</span> <span class="operator">=</span> <span class="module default_library library">iter</span><span class="operator">::</span><span class="function default_library library">repeat</span><span class="parenthesis">(</span><span class="variable">foo</span><span class="operator">.</span><span class="function associated consuming default_library library">unwrap</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
new file mode 100644
index 000000000..a747b4bc1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -0,0 +1,190 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="comment documentation">//! This is a module to test doc injection.</span>
+<span class="comment documentation">//! ```</span>
+<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
+<span class="comment documentation">//! ```</span>
+
+<span class="keyword">mod</span> <span class="module declaration">outline_module</span><span class="semicolon">;</span>
+
+<span class="comment documentation">/// ```</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"early doctests should not go boom"</span><span class="semicolon injected">;</span>
+<span class="comment documentation">/// ```</span>
+<span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span>
+ <span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span>
+<span class="brace">}</span>
+
+<span class="comment documentation">/// This is an impl with a code block.</span>
+<span class="comment documentation">///</span>
+<span class="comment documentation">/// ```</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
+<span class="comment documentation">///</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="brace injected">}</span>
+<span class="comment documentation">/// ```</span>
+<span class="keyword">impl</span> <span class="struct">Foo</span> <span class="brace">{</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Call me</span>
+ <span class="comment">// KILLER WHALE</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected"> Ishmael."</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="keyword">pub</span> <span class="keyword">const</span> <span class="constant associated declaration public">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span> <span class="operator">=</span> <span class="bool_literal">true</span><span class="semicolon">;</span>
+
+ <span class="comment documentation">/// Constructs a new `Foo`.</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// # Examples</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> #</span><span class="none injected"> </span><span class="attribute_bracket attribute injected">#</span><span class="attribute_bracket attribute injected">!</span><span class="attribute_bracket attribute injected">[</span><span class="builtin_attr attribute injected library">allow</span><span class="parenthesis attribute injected">(</span><span class="none attribute injected">unused_mut</span><span class="parenthesis attribute injected">)</span><span class="attribute_bracket attribute injected">]</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="keyword injected">mut</span><span class="none injected"> </span><span class="variable declaration injected mutable">foo</span><span class="colon injected">:</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="keyword">pub</span> <span class="keyword">const</span> <span class="keyword">fn</span> <span class="function associated declaration public static">new</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="struct">Foo</span> <span class="brace">{</span>
+ <span class="struct">Foo</span> <span class="brace">{</span> <span class="field">bar</span><span class="colon">:</span> <span class="bool_literal">true</span> <span class="brace">}</span>
+ <span class="brace">}</span>
+
+ <span class="comment documentation">/// `bar` method on `Foo`.</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// # Examples</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">use</span><span class="none injected"> </span><span class="module injected">x</span><span class="operator injected">::</span><span class="module injected">y</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// calls bar on foo</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected">assert</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="none injected">foo</span><span class="operator injected">.</span><span class="none injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">bar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="variable injected">foo</span><span class="operator injected">.</span><span class="field injected">bar</span><span class="none injected"> </span><span class="logical injected">||</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="constant injected">bar</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">/* multi-line</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected"> comment */</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected reference">multi_line_string</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Foo</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected"> bar</span><span class="escape_sequence injected">\n</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected"> "</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ```rust,no_run</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="function injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ~~~rust,no_run</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// code block with tilde.</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="function injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// ~~~</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// functions</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">T</span><span class="comma injected">,</span><span class="none injected"> </span><span class="keyword injected">const</span><span class="none injected"> </span><span class="const_param declaration injected">X</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">usize</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="value_param declaration injected">arg</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">i32</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">x</span><span class="colon injected">:</span><span class="none injected"> </span><span class="type_param injected">T</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="const_param injected">X</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="brace injected">}</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ```sh</span>
+ <span class="comment documentation">/// echo 1</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">foo</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">bool</span> <span class="brace">{</span>
+ <span class="bool_literal">true</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[`Foo`](Foo)</span><span class="comment documentation"> is a struct</span>
+<span class="comment documentation">/// This function is &gt; </span><span class="function documentation injected intra_doc_link">[`all_the_links`](all_the_links)</span><span class="comment documentation"> &lt;</span>
+<span class="comment documentation">/// </span><span class="macro documentation injected intra_doc_link">[`noop`](noop)</span><span class="comment documentation"> is a macro below</span>
+<span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[`Item`]</span><span class="comment documentation"> is a struct in the module </span><span class="module documentation injected intra_doc_link">[`module`]</span>
+<span class="comment documentation">///</span>
+<span class="comment documentation">/// [`Item`]: module::Item</span>
+<span class="comment documentation">/// [mix_and_match]: ThisShouldntResolve</span>
+<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">all_the_links</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">pub</span> <span class="keyword">mod</span> <span class="module declaration public">module</span> <span class="brace">{</span>
+ <span class="keyword">pub</span> <span class="keyword">struct</span> <span class="struct declaration public">Item</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="comment documentation">/// ```</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">macro_rules</span><span class="macro_bang injected">!</span><span class="none injected"> </span><span class="macro declaration injected">noop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="colon injected">:</span><span class="none injected">expr</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">=</span><span class="angle injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="punctuation injected">$</span><span class="none injected">expr </span><span class="brace injected">}</span><span class="brace injected">}</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected">noop</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="numeric_literal injected">1</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+<span class="comment documentation">/// ```</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">noop</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>expr<span class="colon">:</span>expr<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="punctuation">$</span>expr
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="comment documentation">/// ```rust</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+<span class="comment documentation">/// ```</span>
+<span class="comment documentation">///</span>
+<span class="comment documentation">/// ```</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">not</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"false"</span><span class="parenthesis attribute">)</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"</span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span><span class="string_literal attribute">"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"</span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span><span class="string_literal attribute">"</span><span class="attribute_bracket attribute">]</span>
+<span class="comment documentation">/// ```</span>
+<span class="comment documentation">///</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"alloc"</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"```rust"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">not</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"alloc"</span><span class="parenthesis attribute">)</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"```ignore"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="module injected">alloc</span><span class="operator injected">::</span><span class="macro injected">vec</span><span class="macro_bang injected">!</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+<span class="comment documentation">/// ```</span>
+<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">mix_and_match</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="comment documentation">/**
+It is beyond me why you'd use these when you got ///
+```rust
+</span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation">
+```
+</span><span class="function documentation injected intra_doc_link">[`block_comments2`]</span><span class="comment documentation"> tests these with indentation
+ */</span>
+<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">block_comments</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="comment documentation">/**
+ Really, I don't get it
+ ```rust
+</span><span class="comment documentation"> </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation">
+ ```
+ </span><span class="function documentation injected intra_doc_link">[`block_comments`]</span><span class="comment documentation"> tests these without indentation
+*/</span>
+<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">block_comments2</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
new file mode 100644
index 000000000..af41796e2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
@@ -0,0 +1,47 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">std</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root library">alloc</span> <span class="keyword">as</span> <span class="module crate_root declaration library">abc</span><span class="semicolon">;</span>
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
new file mode 100644
index 000000000..a97802cbb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
@@ -0,0 +1,233 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">use</span> <span class="module">inner</span><span class="operator">::</span><span class="brace">{</span><span class="self_keyword">self</span> <span class="keyword">as</span> <span class="module declaration">inner_mod</span><span class="brace">}</span><span class="semicolon">;</span>
+<span class="keyword">mod</span> <span class="module declaration">inner</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">pub</span> <span class="keyword">mod</span> <span class="module declaration public">ops</span> <span class="brace">{</span>
+ <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn_once"</span><span class="attribute_bracket attribute">]</span>
+ <span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">FnOnce</span><span class="angle">&lt;</span><span class="type_param declaration">Args</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">}</span>
+
+ <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn_mut"</span><span class="attribute_bracket attribute">]</span>
+ <span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">FnMut</span><span class="angle">&lt;</span><span class="type_param declaration">Args</span><span class="angle">&gt;</span><span class="colon">:</span> <span class="trait public">FnOnce</span><span class="angle">&lt;</span><span class="type_param">Args</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">}</span>
+
+ <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn"</span><span class="attribute_bracket attribute">]</span>
+ <span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">Fn</span><span class="angle">&lt;</span><span class="type_param declaration">Args</span><span class="angle">&gt;</span><span class="colon">:</span> <span class="trait public">FnMut</span><span class="angle">&lt;</span><span class="type_param">Args</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span>
+ <span class="field declaration">x</span><span class="colon">:</span> <span class="builtin_type">u32</span><span class="comma">,</span>
+<span class="brace">}</span>
+
+<span class="keyword">trait</span> <span class="trait declaration">Bar</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">bar</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="trait">Bar</span> <span class="keyword">for</span> <span class="struct">Foo</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">bar</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
+ <span class="self_keyword reference">self</span><span class="operator">.</span><span class="field">x</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="struct">Foo</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated consuming declaration">baz</span><span class="parenthesis">(</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable">self</span><span class="comma">,</span> <span class="value_param declaration">f</span><span class="colon">:</span> <span class="struct">Foo</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
+ <span class="value_param">f</span><span class="operator">.</span><span class="function associated consuming">baz</span><span class="parenthesis">(</span><span class="self_keyword consuming mutable">self</span><span class="parenthesis">)</span>
+ <span class="brace">}</span>
+
+ <span class="keyword">fn</span> <span class="function associated declaration mutable reference">qux</span><span class="parenthesis">(</span><span class="operator">&</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="self_keyword mutable reference">self</span><span class="operator">.</span><span class="field">x</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+
+ <span class="keyword">fn</span> <span class="function associated declaration reference">quop</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
+ <span class="self_keyword reference">self</span><span class="operator">.</span><span class="field">x</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">use</span> <span class="self_keyword crate_root public">self</span><span class="operator">::</span><span class="struct">FooCopy</span><span class="operator">::</span><span class="brace">{</span><span class="self_keyword">self</span> <span class="keyword">as</span> <span class="struct declaration">BarCopy</span><span class="brace">}</span><span class="semicolon">;</span>
+
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">struct</span> <span class="struct declaration">FooCopy</span> <span class="brace">{</span>
+ <span class="field declaration">x</span><span class="colon">:</span> <span class="builtin_type">u32</span><span class="comma">,</span>
+<span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="struct">FooCopy</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated consuming declaration">baz</span><span class="parenthesis">(</span><span class="self_keyword declaration">self</span><span class="comma">,</span> <span class="value_param declaration">f</span><span class="colon">:</span> <span class="struct">FooCopy</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">u32</span> <span class="brace">{</span>
+ <span class="value_param">f</span><span class="operator">.</span><span class="function associated">baz</span><span class="parenthesis">(</span><span class="self_keyword">self</span><span class="parenthesis">)</span>
+ <span class="brace">}</span>
+
+ <span class="keyword">fn</span> <span class="function associated declaration mutable reference">qux</span><span class="parenthesis">(</span><span class="operator">&</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="self_keyword mutable reference">self</span><span class="operator">.</span><span class="field">x</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+
+ <span class="keyword">fn</span> <span class="function associated declaration reference">quop</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">u32</span> <span class="brace">{</span>
+ <span class="self_keyword reference">self</span><span class="operator">.</span><span class="field">x</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">str</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="function">str</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">foo</span><span class="angle">&lt;</span><span class="lifetime declaration">'a</span><span class="comma">,</span> <span class="type_param declaration">T</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="type_param">T</span> <span class="brace">{</span>
+ <span class="function">foo</span><span class="operator">::</span><span class="angle">&lt;</span><span class="lifetime">'a</span><span class="comma">,</span> <span class="builtin_type">i32</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="parenthesis">)</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">never</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">!</span> <span class="brace">{</span>
+ <span class="keyword control">loop</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">const_param</span><span class="angle">&lt;</span><span class="keyword">const</span> <span class="const_param declaration">FOO</span><span class="colon">:</span> <span class="builtin_type">usize</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">usize</span> <span class="brace">{</span>
+ <span class="function">const_param</span><span class="operator">::</span><span class="angle">&lt;</span><span class="brace">{</span> <span class="const_param">FOO</span> <span class="brace">}</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="const_param">FOO</span>
+<span class="brace">}</span>
+
+<span class="keyword">use</span> <span class="module public">ops</span><span class="operator">::</span><span class="trait public">Fn</span><span class="semicolon">;</span>
+<span class="keyword">fn</span> <span class="function declaration">baz</span><span class="angle">&lt;</span><span class="type_param declaration">F</span><span class="colon">:</span> <span class="trait public">Fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="value_param callable declaration">f</span><span class="colon">:</span> <span class="type_param">F</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="value_param callable">f</span><span class="parenthesis">(</span><span class="parenthesis">)</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">foobar</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="keyword">impl</span> <span class="trait default_library library">Copy</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">foo</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">bar</span> <span class="operator">=</span> <span class="function">foobar</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="comment">// comment</span>
+<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">x</span> <span class="operator">=</span> <span class="numeric_literal">42</span><span class="semicolon">;</span>
+ <span class="variable mutable">x</span> <span class="arithmetic mutable">+=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration mutable reference">y</span> <span class="operator">=</span> <span class="operator">&</span><span class="keyword">mut</span> <span class="variable mutable">x</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration reference">z</span> <span class="operator">=</span> <span class="operator">&</span><span class="variable mutable reference">y</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="struct">Foo</span> <span class="brace">{</span> <span class="field">x</span><span class="colon">:</span> <span class="variable declaration">z</span><span class="comma">,</span> <span class="variable declaration">y</span> <span class="brace">}</span> <span class="operator">=</span> <span class="struct">Foo</span> <span class="brace">{</span> <span class="field">x</span><span class="colon">:</span> <span class="variable reference">z</span><span class="comma">,</span> <span class="variable mutable reference">y</span> <span class="brace">}</span><span class="semicolon">;</span>
+
+ <span class="variable">y</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">foo</span> <span class="operator">=</span> <span class="struct">Foo</span> <span class="brace">{</span> <span class="field">x</span><span class="comma">,</span> <span class="unresolved_reference">y</span><span class="colon">:</span> <span class="variable mutable">x</span> <span class="brace">}</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">foo2</span> <span class="operator">=</span> <span class="struct">Foo</span> <span class="brace">{</span> <span class="field">x</span><span class="comma">,</span> <span class="unresolved_reference">y</span><span class="colon">:</span> <span class="variable mutable">x</span> <span class="brace">}</span><span class="semicolon">;</span>
+ <span class="variable mutable">foo</span><span class="operator">.</span><span class="function associated reference">quop</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="variable mutable">foo</span><span class="operator">.</span><span class="function associated mutable reference">qux</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="variable mutable">foo</span><span class="operator">.</span><span class="function associated consuming">baz</span><span class="parenthesis">(</span><span class="variable consuming">foo2</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">copy</span> <span class="operator">=</span> <span class="struct">FooCopy</span> <span class="brace">{</span> <span class="field">x</span> <span class="brace">}</span><span class="semicolon">;</span>
+ <span class="variable mutable">copy</span><span class="operator">.</span><span class="function associated reference">quop</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="variable mutable">copy</span><span class="operator">.</span><span class="function associated mutable reference">qux</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="variable mutable">copy</span><span class="operator">.</span><span class="function associated">baz</span><span class="parenthesis">(</span><span class="variable mutable">copy</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="variable callable declaration">a</span> <span class="operator">=</span> <span class="punctuation">|</span><span class="value_param declaration">x</span><span class="punctuation">|</span> <span class="value_param">x</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable callable declaration">bar</span> <span class="operator">=</span> <span class="struct">Foo</span><span class="operator">::</span><span class="function associated consuming">baz</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="variable declaration">baz</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="numeric_literal">-</span><span class="numeric_literal">42</span><span class="comma">,</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">baz</span> <span class="operator">=</span> <span class="operator">-</span><span class="variable">baz</span><span class="operator">.</span><span class="field">0</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="logical">!</span><span class="bool_literal">true</span><span class="semicolon">;</span>
+
+ <span class="label declaration">'foo</span><span class="colon">:</span> <span class="keyword control">loop</span> <span class="brace">{</span>
+ <span class="keyword control">break</span> <span class="label">'foo</span><span class="semicolon">;</span>
+ <span class="keyword control">continue</span> <span class="label">'foo</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">enum</span> <span class="enum declaration">Option</span><span class="angle">&lt;</span><span class="type_param declaration">T</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="enum_variant declaration">Some</span><span class="parenthesis">(</span><span class="type_param">T</span><span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="enum_variant declaration">None</span><span class="comma">,</span>
+<span class="brace">}</span>
+<span class="keyword">use</span> <span class="enum">Option</span><span class="operator">::</span><span class="punctuation">*</span><span class="semicolon">;</span>
+
+<span class="keyword">impl</span><span class="angle">&lt;</span><span class="type_param declaration">T</span><span class="angle">&gt;</span> <span class="enum">Option</span><span class="angle">&lt;</span><span class="type_param">T</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated consuming declaration">and</span><span class="angle">&lt;</span><span class="type_param declaration">U</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="self_keyword declaration">self</span><span class="comma">,</span> <span class="value_param declaration">other</span><span class="colon">:</span> <span class="enum">Option</span><span class="angle">&lt;</span><span class="type_param">U</span><span class="angle">&gt;</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="enum">Option</span><span class="angle">&lt;</span><span class="parenthesis">(</span><span class="type_param">T</span><span class="comma">,</span> <span class="type_param">U</span><span class="parenthesis">)</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="keyword control">match</span> <span class="value_param">other</span> <span class="brace">{</span>
+ <span class="enum_variant">None</span> <span class="operator">=&gt;</span> <span class="unresolved_reference">unimplemented</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="variable declaration">Nope</span> <span class="operator">=&gt;</span> <span class="variable">Nope</span><span class="comma">,</span>
+ <span class="brace">}</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword async">async</span> <span class="keyword">fn</span> <span class="function async declaration">learn_and_sing</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">song</span> <span class="operator">=</span> <span class="unresolved_reference">learn_song</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="operator">.</span><span class="keyword async control">await</span><span class="semicolon">;</span>
+ <span class="unresolved_reference">sing_song</span><span class="parenthesis">(</span><span class="variable consuming">song</span><span class="parenthesis">)</span><span class="operator">.</span><span class="keyword async control">await</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword async">async</span> <span class="keyword">fn</span> <span class="function async declaration">async_main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">f1</span> <span class="operator">=</span> <span class="function async">learn_and_sing</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">f2</span> <span class="operator">=</span> <span class="unresolved_reference">dance</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="unresolved_reference">futures</span><span class="operator">::</span><span class="unresolved_reference">join</span><span class="macro_bang">!</span><span class="parenthesis">(</span>f1<span class="comma">,</span> f2<span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">use_foo_items</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">bob</span> <span class="operator">=</span> <span class="module crate_root library">foo</span><span class="operator">::</span><span class="struct library">Person</span> <span class="brace">{</span>
+ <span class="field library">name</span><span class="colon">:</span> <span class="string_literal">"Bob"</span><span class="comma">,</span>
+ <span class="field library">age</span><span class="colon">:</span> <span class="module crate_root library">foo</span><span class="operator">::</span><span class="module library">consts</span><span class="operator">::</span><span class="constant library">NUMBER</span><span class="comma">,</span>
+ <span class="brace">}</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="variable declaration">control_flow</span> <span class="operator">=</span> <span class="module crate_root library">foo</span><span class="operator">::</span><span class="function library">identity</span><span class="parenthesis">(</span><span class="module crate_root library">foo</span><span class="operator">::</span><span class="enum library">ControlFlow</span><span class="operator">::</span><span class="enum_variant library">Continue</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="keyword control">if</span> <span class="variable">control_flow</span><span class="operator">.</span><span class="function associated consuming library">should_die</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="module crate_root library">foo</span><span class="operator">::</span><span class="unresolved_reference">die</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">pub</span> <span class="keyword">enum</span> <span class="enum declaration public">Bool</span> <span class="brace">{</span> <span class="enum_variant declaration public">True</span><span class="comma">,</span> <span class="enum_variant declaration public">False</span> <span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="enum public">Bool</span> <span class="brace">{</span>
+ <span class="keyword">pub</span> <span class="keyword">const</span> <span class="keyword">fn</span> <span class="function associated consuming declaration public">to_primitive</span><span class="parenthesis">(</span><span class="self_keyword declaration">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">bool</span> <span class="brace">{</span>
+ <span class="bool_literal">true</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+<span class="keyword">const</span> <span class="constant declaration">USAGE_OF_BOOL</span><span class="colon">:</span><span class="builtin_type">bool</span> <span class="operator">=</span> <span class="enum public">Bool</span><span class="operator">::</span><span class="enum_variant public">True</span><span class="operator">.</span><span class="function associated consuming public">to_primitive</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+<span class="keyword">trait</span> <span class="trait declaration">Baz</span> <span class="brace">{</span>
+ <span class="keyword">type</span> <span class="type_alias associated declaration trait">Qux</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">baz</span><span class="angle">&lt;</span><span class="type_param declaration">T</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="value_param declaration">t</span><span class="colon">:</span> <span class="type_param">T</span><span class="parenthesis">)</span>
+<span class="keyword">where</span>
+ <span class="type_param">T</span><span class="colon">:</span> <span class="trait">Baz</span><span class="comma">,</span>
+ <span class="angle">&lt;</span><span class="type_param">T</span> <span class="keyword">as</span> <span class="trait">Baz</span><span class="angle">&gt;</span><span class="operator">::</span><span class="type_alias associated trait">Qux</span><span class="colon">:</span> <span class="trait">Bar</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">gp_shadows_trait</span><span class="angle">&lt;</span><span class="type_param declaration">Baz</span><span class="colon">:</span> <span class="trait">Bar</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="type_param">Baz</span><span class="operator">::</span><span class="function associated reference trait">bar</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
new file mode 100644
index 000000000..ced7d22f0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
@@ -0,0 +1,62 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">fn</span> <span class="function declaration">fixture</span><span class="parenthesis">(</span><span class="value_param declaration reference">ra_fixture</span><span class="colon">:</span> <span class="operator">&</span><span class="builtin_type">str</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="function">fixture</span><span class="parenthesis">(</span><span class="string_literal">r#"</span>
+<span class="keyword">trait</span> <span class="trait declaration">Foo</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration static trait">foo</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="unresolved_reference">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"2 + 2 = {}"</span><span class="comma">,</span> <span class="numeric_literal">4</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+<span class="brace">}</span><span class="string_literal">"#</span>
+ <span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="function">fixture</span><span class="parenthesis">(</span><span class="string_literal">r"</span>
+<span class="keyword">fn</span> <span class="function declaration">foo</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="function">foo</span><span class="parenthesis">(</span><span class="keyword">$0</span><span class="brace">{</span>
+ <span class="numeric_literal">92</span>
+ <span class="brace">}</span><span class="keyword">$0</span><span class="parenthesis">)</span>
+<span class="brace">}</span><span class="string_literal">"</span>
+ <span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html
new file mode 100644
index 000000000..66f9ede96
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html
@@ -0,0 +1,58 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword crate_root public">self</span><span class="semicolon">;</span>
+
+<span class="keyword">use</span> <span class="keyword crate_root public">crate</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="self_keyword crate_root public">self</span><span class="semicolon">;</span>
+<span class="keyword">mod</span> <span class="module declaration">__</span> <span class="brace">{</span>
+ <span class="keyword">use</span> <span class="keyword crate_root public">super</span><span class="operator">::</span><span class="punctuation">*</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">void</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+<span class="macro">void</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="keyword">Self</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="keyword">struct</span> <span class="struct declaration">__</span> <span class="keyword">where</span> <span class="self_type_keyword">Self</span><span class="colon">:</span><span class="semicolon">;</span>
+<span class="keyword">fn</span> <span class="function declaration">__</span><span class="parenthesis">(</span><span class="punctuation">_</span><span class="colon">:</span> <span class="unresolved_reference">Self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
new file mode 100644
index 000000000..2d85fc8c9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
@@ -0,0 +1,55 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">struct</span> <span class="struct declaration">Foo</span><span class="angle">&lt;</span><span class="lifetime declaration">'a</span><span class="comma">,</span> <span class="lifetime declaration">'b</span><span class="comma">,</span> <span class="lifetime declaration">'c</span><span class="angle">&gt;</span> <span class="keyword">where</span> <span class="lifetime">'a</span><span class="colon">:</span> <span class="lifetime">'a</span><span class="comma">,</span> <span class="lifetime">'static</span><span class="colon">:</span> <span class="lifetime">'static</span> <span class="brace">{</span>
+ <span class="field declaration">field</span><span class="colon">:</span> <span class="operator">&</span><span class="lifetime">'a</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="field declaration">field2</span><span class="colon">:</span> <span class="operator">&</span><span class="lifetime">'static</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
+<span class="brace">}</span>
+<span class="keyword">impl</span><span class="angle">&lt;</span><span class="lifetime declaration">'a</span><span class="angle">&gt;</span> <span class="struct">Foo</span><span class="angle">&lt;</span><span class="lifetime">'_</span><span class="comma">,</span> <span class="lifetime">'a</span><span class="comma">,</span> <span class="lifetime">'static</span><span class="angle">&gt;</span>
+<span class="keyword">where</span>
+ <span class="lifetime">'a</span><span class="colon">:</span> <span class="lifetime">'a</span><span class="comma">,</span>
+ <span class="lifetime">'static</span><span class="colon">:</span> <span class="lifetime">'static</span>
+<span class="brace">{</span><span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
new file mode 100644
index 000000000..54d427952
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
@@ -0,0 +1,96 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="module crate_root library">proc_macros</span><span class="operator">::</span><span class="macro library">mirror</span><span class="macro_bang">!</span> <span class="brace">{</span>
+ <span class="brace">{</span>
+ <span class="comma">,</span><span class="builtin_type">i32</span> <span class="colon">:</span><span class="field declaration public">x</span> <span class="keyword">pub</span>
+ <span class="comma">,</span><span class="builtin_type">i32</span> <span class="colon">:</span><span class="field declaration public">y</span> <span class="keyword">pub</span>
+ <span class="brace">}</span> <span class="struct declaration">Foo</span> <span class="keyword">struct</span>
+<span class="brace">}</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">def_fn</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="macro">def_fn</span><span class="macro_bang">!</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function declaration">bar</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-</span><span class="operator">&gt;</span> <span class="builtin_type">u32</span> <span class="brace">{</span>
+ <span class="numeric_literal">100</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">dont_color_me_braces</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="numeric_literal">0</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">noop</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>expr<span class="colon">:</span>expr<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="punctuation">$</span>expr
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="comment documentation">/// textually shadow previous definition</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">noop</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>expr<span class="colon">:</span>expr<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="punctuation">$</span>expr
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">keyword_frag</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>type<span class="colon">:</span>ty<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="punctuation">$</span>type<span class="parenthesis">)</span>
+<span class="brace">}</span>
+
+<span class="keyword">macro</span> <span class="macro declaration">with_args</span><span class="parenthesis">(</span><span class="punctuation">$</span>i<span class="colon">:</span>ident<span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="punctuation">$</span>i
+<span class="brace">}</span>
+
+<span class="keyword">macro</span> <span class="macro declaration">without_args</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>i<span class="colon">:</span>ident<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="punctuation">$</span>i
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="unresolved_reference">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello, {}!"</span><span class="comma">,</span> <span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">dont_color_me_braces</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">noop</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="macro">noop</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="numeric_literal">1</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
new file mode 100644
index 000000000..8a1d69816
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
@@ -0,0 +1,51 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="comment documentation">//! </span><span class="struct documentation injected intra_doc_link">[Struct]</span>
+<span class="comment documentation">//! This is an intra doc injection test for modules</span>
+<span class="comment documentation">//! </span><span class="struct documentation injected intra_doc_link">[Struct]</span>
+<span class="comment documentation">//! This is an intra doc injection test for modules</span>
+
+<span class="keyword">pub</span> <span class="keyword">struct</span> <span class="struct declaration public">Struct</span><span class="semicolon">;</span>
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
new file mode 100644
index 000000000..c4c3e3dc2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
@@ -0,0 +1,50 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[crate::foo::Struct]</span>
+<span class="comment documentation">/// This is an intra doc injection test for modules</span>
+<span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[crate::foo::Struct]</span>
+<span class="comment documentation">/// This is an intra doc injection test for modules</span>
+<span class="keyword">mod</span> <span class="module declaration">foo</span><span class="semicolon">;</span>
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
new file mode 100644
index 000000000..2369071ae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
@@ -0,0 +1,58 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="numeric_literal">1</span> <span class="arithmetic">+</span> <span class="numeric_literal">1</span> <span class="arithmetic">-</span> <span class="numeric_literal">1</span> <span class="arithmetic">*</span> <span class="numeric_literal">1</span> <span class="arithmetic">/</span> <span class="numeric_literal">1</span> <span class="arithmetic">%</span> <span class="numeric_literal">1</span> <span class="bitwise">|</span> <span class="numeric_literal">1</span> <span class="bitwise">&</span> <span class="numeric_literal">1</span> <span class="logical">!</span> <span class="numeric_literal">1</span> <span class="bitwise">^</span> <span class="numeric_literal">1</span> <span class="bitwise">&gt;&gt;</span> <span class="numeric_literal">1</span> <span class="bitwise">&lt;&lt;</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">a</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="arithmetic mutable">+=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="arithmetic mutable">-=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="arithmetic mutable">*=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="arithmetic mutable">/=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="arithmetic mutable">%=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="bitwise mutable">|=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="bitwise mutable">&=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="bitwise mutable">^=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="bitwise mutable">&gt;&gt;=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="bitwise mutable">&lt;&lt;=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
new file mode 100644
index 000000000..bff35c897
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
@@ -0,0 +1,56 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="8121853618659664005" style="color: hsl(273,88%,88%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="2705725358298919760" style="color: hsl(76,47%,83%);">x</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="8121853618659664005" style="color: hsl(273,88%,88%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="3365759661443752373" style="color: hsl(15,86%,51%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="8121853618659664005" style="color: hsl(273,88%,88%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="794745962933817518" style="color: hsl(127,71%,87%);">x</span> <span class="operator">=</span> <span class="string_literal">"other color please!"</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="6717528807933952652" style="color: hsl(90,74%,79%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="794745962933817518" style="color: hsl(127,71%,87%);">x</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">bar</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable reference" data-binding-hash="8121853618659664005" style="color: hsl(273,88%,88%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
new file mode 100644
index 000000000..c627bc9b0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -0,0 +1,164 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">println</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="brace">{</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>io<span class="colon">:</span><span class="colon">:</span>_print<span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>format_args_nl<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="brace">}</span><span class="parenthesis">)</span>
+<span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">macro_export</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">format_args</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">macro_export</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">const_format_args</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">macro_export</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">format_args_nl</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">mod</span> <span class="module declaration">panic</span> <span class="brace">{</span>
+ <span class="keyword">pub</span> <span class="keyword">macro</span> <span class="macro declaration">panic_2015</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic<span class="parenthesis">(</span><span class="string_literal">"explicit panic"</span><span class="parenthesis">)</span>
+ <span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>literal <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="operator control">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
+ <span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="comment">// Use `panic_str` instead of `panic_display::&lt;&str&gt;` for non_fmt_panic lint.</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="operator control">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_str<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
+ <span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="comment">// Special-case the single-argument case for const_panic.</span>
+ <span class="parenthesis">(</span><span class="string_literal">"{}"</span><span class="comma">,</span> <span class="punctuation">$</span>arg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="operator control">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_display<span class="parenthesis">(</span><span class="operator">&</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span>
+ <span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="colon">:</span>expr<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_fmt<span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>const_format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
+ <span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="parenthesis attribute">(</span><span class="none attribute">std_panic</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">macro_export</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">panic</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">assert</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">asm</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">toho</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panic<span class="punctuation">!</span><span class="parenthesis">(</span><span class="string_literal">"not yet implemented"</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panic<span class="punctuation">!</span><span class="parenthesis">(</span><span class="string_literal">"not yet implemented: {}"</span><span class="comma">,</span> <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="escape_sequence">{{</span><span class="string_literal">Hello</span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="comment">// from https://doc.rust-lang.org/std/fmt/index.html</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello"</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "Hello"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"world"</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "Hello, world!"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"The number is </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "The number is 1"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">?</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="parenthesis">(</span><span class="numeric_literal">3</span><span class="comma">,</span> <span class="numeric_literal">4</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "(3, 4)"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">value</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> value<span class="operator">=</span><span class="numeric_literal">4</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "4"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="comma">,</span> <span class="numeric_literal">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "1 2"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">4</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">42</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "0042" with leading zerosV</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="comma">,</span> <span class="numeric_literal">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "2 1 1 2"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">argument</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> argument <span class="operator">=</span> <span class="string_literal">"test"</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "test"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="comma">,</span> name <span class="operator">=</span> <span class="numeric_literal">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "2 1"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">a</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="variable">c</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="variable">b</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> a<span class="operator">=</span><span class="string_literal">"a"</span><span class="comma">,</span> b<span class="operator">=</span><span class="char_literal">'b'</span><span class="comma">,</span> c<span class="operator">=</span><span class="numeric_literal">3</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "a 3 b"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">{{</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "{2}"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">1</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="variable">width</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> width <span class="operator">=</span> <span class="numeric_literal">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">&lt;</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">-</span><span class="format_specifier">&lt;</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">^</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">&gt;</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">+</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">#</span><span class="variable">x</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="numeric_literal">27</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="numeric_literal">-</span><span class="numeric_literal">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">#</span><span class="numeric_literal">0</span><span class="numeric_literal">10</span><span class="variable">x</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="numeric_literal">27</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal"> is </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> <span class="numeric_literal">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">}</span><span class="string_literal"> is </span><span class="format_specifier">{</span><span class="numeric_literal">2</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">0</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> <span class="numeric_literal">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal"> is </span><span class="format_specifier">{</span><span class="numeric_literal">2</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">1</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="comma">,</span> <span class="numeric_literal">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> is </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="comma">,</span> <span class="numeric_literal">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> is </span><span class="format_specifier">{</span><span class="numeric_literal">2</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="comma">,</span> <span class="numeric_literal">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> is </span><span class="format_specifier">{</span><span class="variable">number</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="variable">prec</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> prec <span class="operator">=</span> <span class="numeric_literal">5</span><span class="comma">,</span> number <span class="operator">=</span> <span class="numeric_literal">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">, `</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal">` has 3 fractional digits"</span><span class="comma">,</span> <span class="string_literal">"Hello"</span><span class="comma">,</span> <span class="numeric_literal">3</span><span class="comma">,</span> name<span class="operator">=</span><span class="numeric_literal">1234.56</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">, `</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal">` has 3 characters"</span><span class="comma">,</span> <span class="string_literal">"Hello"</span><span class="comma">,</span> <span class="numeric_literal">3</span><span class="comma">,</span> name<span class="operator">=</span><span class="string_literal">"1234.56"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">, `</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">:</span><span class="format_specifier">&gt;</span><span class="numeric_literal">8</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal">` has 3 right-aligned characters"</span><span class="comma">,</span> <span class="string_literal">"Hello"</span><span class="comma">,</span> <span class="numeric_literal">3</span><span class="comma">,</span> name<span class="operator">=</span><span class="string_literal">"1234.56"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"{}"</span>
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"{{}}"</span><span class="semicolon">;</span>
+
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="escape_sequence">{{</span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">{{</span><span class="string_literal"> Hello"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">{{</span><span class="string_literal">Hello</span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">{{</span><span class="string_literal"> Hello </span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">{{</span><span class="string_literal">Hello </span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">{{</span><span class="string_literal"> Hello</span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">r"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"world"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="comment">// escape sequences</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello</span><span class="escape_sequence">\n</span><span class="string_literal">World"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">\u{48}</span><span class="escape_sequence">\x65</span><span class="escape_sequence">\x6C</span><span class="escape_sequence">\x6C</span><span class="escape_sequence">\x6F</span><span class="string_literal"> World"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">b"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span>
+
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="escape_sequence">\x41</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> A <span class="operator">=</span> <span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">ничоси</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> ничоси <span class="operator">=</span> <span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="variable">x</span><span class="format_specifier">?</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> "</span><span class="comma">,</span> <span class="unresolved_reference">thingy</span><span class="comma">,</span> <span class="unresolved_reference">n2</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">panic</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">0</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">panic</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"more </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">assert</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="bool_literal">true</span><span class="comma">,</span> <span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">assert</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="bool_literal">true</span><span class="comma">,</span> <span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> asdasd"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">toho</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">fmt"</span><span class="comma">,</span> <span class="numeric_literal">0</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"mov eax, </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis">(</span>concat<span class="punctuation">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="comma">,</span> <span class="string_literal">"{}"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
new file mode 100644
index 000000000..0716bae75
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
@@ -0,0 +1,126 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">id</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span>
+ <span class="brace">}</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">unsafe_deref</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="punctuation">*</span><span class="parenthesis">(</span><span class="operator">&</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="keyword">as</span> <span class="punctuation">*</span><span class="keyword">const</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
+ <span class="brace">}</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+<span class="keyword">static</span> <span class="keyword">mut</span> <span class="static declaration mutable unsafe">MUT_GLOBAL</span><span class="colon">:</span> <span class="struct">Struct</span> <span class="operator">=</span> <span class="struct">Struct</span> <span class="brace">{</span> <span class="field">field</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="semicolon">;</span>
+<span class="keyword">static</span> <span class="static declaration">GLOBAL</span><span class="colon">:</span> <span class="struct">Struct</span> <span class="operator">=</span> <span class="struct">Struct</span> <span class="brace">{</span> <span class="field">field</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="semicolon">;</span>
+<span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span> <span class="function declaration unsafe">unsafe_fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">union</span> <span class="union declaration">Union</span> <span class="brace">{</span>
+ <span class="field declaration">a</span><span class="colon">:</span> <span class="builtin_type">u32</span><span class="comma">,</span>
+ <span class="field declaration">b</span><span class="colon">:</span> <span class="builtin_type">f32</span><span class="comma">,</span>
+<span class="brace">}</span>
+
+<span class="keyword">struct</span> <span class="struct declaration">Struct</span> <span class="brace">{</span> <span class="field declaration">field</span><span class="colon">:</span> <span class="builtin_type">i32</span> <span class="brace">}</span>
+<span class="keyword">impl</span> <span class="struct">Struct</span> <span class="brace">{</span>
+ <span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span> <span class="function associated declaration reference unsafe">unsafe_method</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">repr</span><span class="parenthesis attribute">(</span><span class="none attribute">packed</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">struct</span> <span class="struct declaration">Packed</span> <span class="brace">{</span>
+ <span class="field declaration">a</span><span class="colon">:</span> <span class="builtin_type">u16</span><span class="comma">,</span>
+<span class="brace">}</span>
+
+<span class="keyword unsafe">unsafe</span> <span class="keyword">trait</span> <span class="trait declaration unsafe">UnsafeTrait</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="keyword unsafe">unsafe</span> <span class="keyword">impl</span> <span class="trait unsafe">UnsafeTrait</span> <span class="keyword">for</span> <span class="struct">Packed</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="keyword">impl</span> <span class="punctuation">!</span><span class="trait">UnsafeTrait</span> <span class="keyword">for</span> <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">unsafe_trait_bound</span><span class="angle">&lt;</span><span class="type_param declaration">T</span><span class="colon">:</span> <span class="trait">UnsafeTrait</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="punctuation">_</span><span class="colon">:</span> <span class="type_param">T</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">trait</span> <span class="trait declaration">DoTheAutoref</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">calls_autoref</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="trait">DoTheAutoref</span> <span class="keyword">for</span> <span class="builtin_type">u16</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">calls_autoref</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">x</span> <span class="operator">=</span> <span class="operator">&</span><span class="numeric_literal">5</span> <span class="keyword">as</span> <span class="keyword">*</span><span class="keyword">const</span> <span class="punctuation">_</span> <span class="keyword">as</span> <span class="keyword">*</span><span class="keyword">const</span> <span class="builtin_type">usize</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">u</span> <span class="operator">=</span> <span class="union">Union</span> <span class="brace">{</span> <span class="field">b</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="semicolon">;</span>
+
+ <span class="macro">id</span><span class="macro_bang">!</span> <span class="brace">{</span>
+ <span class="keyword unsafe">unsafe</span> <span class="brace">{</span> <span class="macro unsafe">unsafe_deref</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">}</span>
+ <span class="brace">}</span><span class="semicolon">;</span>
+
+ <span class="keyword unsafe">unsafe</span> <span class="brace">{</span>
+ <span class="macro unsafe">unsafe_deref</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro unsafe">id</span><span class="macro_bang">!</span> <span class="brace">{</span> <span class="macro unsafe">unsafe_deref</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">}</span><span class="semicolon">;</span>
+
+ <span class="comment">// unsafe fn and method calls</span>
+ <span class="function unsafe">unsafe_fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">b</span> <span class="operator">=</span> <span class="variable">u</span><span class="operator">.</span><span class="field unsafe">b</span><span class="semicolon">;</span>
+ <span class="keyword control">match</span> <span class="variable">u</span> <span class="brace">{</span>
+ <span class="union">Union</span> <span class="brace">{</span> <span class="field unsafe">b</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span> <span class="operator">=&gt;</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="union">Union</span> <span class="brace">{</span> <span class="field unsafe">a</span> <span class="brace">}</span> <span class="operator">=&gt;</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="brace">}</span>
+ <span class="struct">Struct</span> <span class="brace">{</span> <span class="field">field</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="operator">.</span><span class="function associated reference unsafe">unsafe_method</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="comment">// unsafe deref</span>
+ <span class="operator unsafe">*</span><span class="variable">x</span><span class="semicolon">;</span>
+
+ <span class="comment">// unsafe access to a static mut</span>
+ <span class="static mutable unsafe">MUT_GLOBAL</span><span class="operator">.</span><span class="field">field</span><span class="semicolon">;</span>
+ <span class="static">GLOBAL</span><span class="operator">.</span><span class="field">field</span><span class="semicolon">;</span>
+
+ <span class="comment">// unsafe ref of packed fields</span>
+ <span class="keyword">let</span> <span class="variable declaration">packed</span> <span class="operator">=</span> <span class="struct">Packed</span> <span class="brace">{</span> <span class="field">a</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration reference">a</span> <span class="operator">=</span> <span class="operator unsafe">&</span><span class="variable">packed</span><span class="operator">.</span><span class="field">a</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="keyword unsafe">ref</span> <span class="variable declaration reference">a</span> <span class="operator">=</span> <span class="variable">packed</span><span class="operator">.</span><span class="field">a</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="struct">Packed</span> <span class="brace">{</span> <span class="keyword unsafe">ref</span> <span class="field">a</span> <span class="brace">}</span> <span class="operator">=</span> <span class="variable">packed</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="struct">Packed</span> <span class="brace">{</span> <span class="field">a</span><span class="colon">:</span> <span class="keyword unsafe">ref</span> <span class="variable declaration reference">_a</span> <span class="brace">}</span> <span class="operator">=</span> <span class="variable">packed</span><span class="semicolon">;</span>
+
+ <span class="comment">// unsafe auto ref of packed field</span>
+ <span class="variable">packed</span><span class="operator">.</span><span class="field">a</span><span class="operator">.</span><span class="function associated reference trait unsafe">calls_autoref</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
new file mode 100644
index 000000000..99be7c664
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
@@ -0,0 +1,1096 @@
+use std::time::Instant;
+
+use expect_test::{expect_file, ExpectFile};
+use ide_db::SymbolKind;
+use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear};
+
+use crate::{fixture, FileRange, HlTag, TextRange};
+
+#[test]
+fn attributes() {
+ check_highlighting(
+ r#"
+//- proc_macros: identity
+//- minicore: derive, copy
+#[allow(dead_code)]
+#[rustfmt::skip]
+#[proc_macros::identity]
+#[derive(Copy)]
+/// This is a doc comment
+// This is a normal comment
+/// This is a doc comment
+#[derive(Copy)]
+// This is another normal comment
+/// This is another doc comment
+// This is another normal comment
+#[derive(Copy)]
+// The reason for these being here is to test AttrIds
+struct Foo;
+"#,
+ expect_file!["./test_data/highlight_attributes.html"],
+ false,
+ );
+}
+
+#[test]
+fn macros() {
+ check_highlighting(
+ r#"
+//- proc_macros: mirror
+proc_macros::mirror! {
+ {
+ ,i32 :x pub
+ ,i32 :y pub
+ } Foo struct
+}
+macro_rules! def_fn {
+ ($($tt:tt)*) => {$($tt)*}
+}
+
+def_fn! {
+ fn bar() -> u32 {
+ 100
+ }
+}
+
+macro_rules! dont_color_me_braces {
+ () => {0}
+}
+
+macro_rules! noop {
+ ($expr:expr) => {
+ $expr
+ }
+}
+
+/// textually shadow previous definition
+macro_rules! noop {
+ ($expr:expr) => {
+ $expr
+ }
+}
+
+macro_rules! keyword_frag {
+ ($type:ty) => ($type)
+}
+
+macro with_args($i:ident) {
+ $i
+}
+
+macro without_args {
+ ($i:ident) => {
+ $i
+ }
+}
+
+fn main() {
+ println!("Hello, {}!", 92);
+ dont_color_me_braces!();
+ noop!(noop!(1));
+}
+"#,
+ expect_file!["./test_data/highlight_macros.html"],
+ false,
+ );
+}
+
+/// If what you want to test feels like a specific entity consider making a new test instead,
+/// this test fixture here in fact should shrink instead of grow ideally.
+#[test]
+fn test_highlighting() {
+ check_highlighting(
+ r#"
+//- minicore: derive, copy
+//- /main.rs crate:main deps:foo
+use inner::{self as inner_mod};
+mod inner {}
+
+pub mod ops {
+ #[lang = "fn_once"]
+ pub trait FnOnce<Args> {}
+
+ #[lang = "fn_mut"]
+ pub trait FnMut<Args>: FnOnce<Args> {}
+
+ #[lang = "fn"]
+ pub trait Fn<Args>: FnMut<Args> {}
+}
+
+struct Foo {
+ x: u32,
+}
+
+trait Bar {
+ fn bar(&self) -> i32;
+}
+
+impl Bar for Foo {
+ fn bar(&self) -> i32 {
+ self.x
+ }
+}
+
+impl Foo {
+ fn baz(mut self, f: Foo) -> i32 {
+ f.baz(self)
+ }
+
+ fn qux(&mut self) {
+ self.x = 0;
+ }
+
+ fn quop(&self) -> i32 {
+ self.x
+ }
+}
+
+use self::FooCopy::{self as BarCopy};
+
+#[derive(Copy)]
+struct FooCopy {
+ x: u32,
+}
+
+impl FooCopy {
+ fn baz(self, f: FooCopy) -> u32 {
+ f.baz(self)
+ }
+
+ fn qux(&mut self) {
+ self.x = 0;
+ }
+
+ fn quop(&self) -> u32 {
+ self.x
+ }
+}
+
+fn str() {
+ str();
+}
+
+fn foo<'a, T>() -> T {
+ foo::<'a, i32>()
+}
+
+fn never() -> ! {
+ loop {}
+}
+
+fn const_param<const FOO: usize>() -> usize {
+ const_param::<{ FOO }>();
+ FOO
+}
+
+use ops::Fn;
+fn baz<F: Fn() -> ()>(f: F) {
+ f()
+}
+
+fn foobar() -> impl Copy {}
+
+fn foo() {
+ let bar = foobar();
+}
+
+// comment
+fn main() {
+ let mut x = 42;
+ x += 1;
+ let y = &mut x;
+ let z = &y;
+
+ let Foo { x: z, y } = Foo { x: z, y };
+
+ y;
+
+ let mut foo = Foo { x, y: x };
+ let foo2 = Foo { x, y: x };
+ foo.quop();
+ foo.qux();
+ foo.baz(foo2);
+
+ let mut copy = FooCopy { x };
+ copy.quop();
+ copy.qux();
+ copy.baz(copy);
+
+ let a = |x| x;
+ let bar = Foo::baz;
+
+ let baz = (-42,);
+ let baz = -baz.0;
+
+ let _ = !true;
+
+ 'foo: loop {
+ break 'foo;
+ continue 'foo;
+ }
+}
+
+enum Option<T> {
+ Some(T),
+ None,
+}
+use Option::*;
+
+impl<T> Option<T> {
+ fn and<U>(self, other: Option<U>) -> Option<(T, U)> {
+ match other {
+ None => unimplemented!(),
+ Nope => Nope,
+ }
+ }
+}
+
+async fn learn_and_sing() {
+ let song = learn_song().await;
+ sing_song(song).await;
+}
+
+async fn async_main() {
+ let f1 = learn_and_sing();
+ let f2 = dance();
+ futures::join!(f1, f2);
+}
+
+fn use_foo_items() {
+ let bob = foo::Person {
+ name: "Bob",
+ age: foo::consts::NUMBER,
+ };
+
+ let control_flow = foo::identity(foo::ControlFlow::Continue);
+
+ if control_flow.should_die() {
+ foo::die!();
+ }
+}
+
+pub enum Bool { True, False }
+
+impl Bool {
+ pub const fn to_primitive(self) -> bool {
+ true
+ }
+}
+const USAGE_OF_BOOL:bool = Bool::True.to_primitive();
+
+trait Baz {
+ type Qux;
+}
+
+fn baz<T>(t: T)
+where
+ T: Baz,
+ <T as Baz>::Qux: Bar {}
+
+fn gp_shadows_trait<Baz: Bar>() {
+ Baz::bar;
+}
+
+//- /foo.rs crate:foo
+pub struct Person {
+ pub name: &'static str,
+ pub age: u8,
+}
+
+pub enum ControlFlow {
+ Continue,
+ Die,
+}
+
+impl ControlFlow {
+ pub fn should_die(self) -> bool {
+ matches!(self, ControlFlow::Die)
+ }
+}
+
+pub fn identity<T>(x: T) -> T { x }
+
+pub mod consts {
+ pub const NUMBER: i64 = 92;
+}
+
+macro_rules! die {
+ () => {
+ panic!();
+ };
+}
+"#,
+ expect_file!["./test_data/highlight_general.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_lifetime_highlighting() {
+ check_highlighting(
+ r#"
+//- minicore: derive
+
+#[derive()]
+struct Foo<'a, 'b, 'c> where 'a: 'a, 'static: 'static {
+ field: &'a (),
+ field2: &'static (),
+}
+impl<'a> Foo<'_, 'a, 'static>
+where
+ 'a: 'a,
+ 'static: 'static
+{}
+"#,
+ expect_file!["./test_data/highlight_lifetimes.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_keyword_highlighting() {
+ check_highlighting(
+ r#"
+extern crate self;
+
+use crate;
+use self;
+mod __ {
+ use super::*;
+}
+
+macro_rules! void {
+ ($($tt:tt)*) => {}
+}
+void!(Self);
+struct __ where Self:;
+fn __(_: Self) {}
+"#,
+ expect_file!["./test_data/highlight_keywords.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_string_highlighting() {
+ // The format string detection is based on macro-expansion,
+ // thus, we have to copy the macro definition from `std`
+ check_highlighting(
+ r#"
+macro_rules! println {
+ ($($arg:tt)*) => ({
+ $crate::io::_print($crate::format_args_nl!($($arg)*));
+ })
+}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! const_format_args {}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args_nl {}
+
+mod panic {
+ pub macro panic_2015 {
+ () => (
+ $crate::panicking::panic("explicit panic")
+ ),
+ ($msg:literal $(,)?) => (
+ $crate::panicking::panic($msg)
+ ),
+ // Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint.
+ ($msg:expr $(,)?) => (
+ $crate::panicking::panic_str($msg)
+ ),
+ // Special-case the single-argument case for const_panic.
+ ("{}", $arg:expr $(,)?) => (
+ $crate::panicking::panic_display(&$arg)
+ ),
+ ($fmt:expr, $($arg:tt)+) => (
+ $crate::panicking::panic_fmt($crate::const_format_args!($fmt, $($arg)+))
+ ),
+ }
+}
+
+#[rustc_builtin_macro(std_panic)]
+#[macro_export]
+macro_rules! panic {}
+#[rustc_builtin_macro]
+macro_rules! assert {}
+#[rustc_builtin_macro]
+macro_rules! asm {}
+
+macro_rules! toho {
+ () => ($crate::panic!("not yet implemented"));
+ ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", $crate::format_args!($($arg)+)));
+}
+
+fn main() {
+ println!("Hello {{Hello}}");
+ // from https://doc.rust-lang.org/std/fmt/index.html
+ println!("Hello"); // => "Hello"
+ println!("Hello, {}!", "world"); // => "Hello, world!"
+ println!("The number is {}", 1); // => "The number is 1"
+ println!("{:?}", (3, 4)); // => "(3, 4)"
+ println!("{value}", value=4); // => "4"
+ println!("{} {}", 1, 2); // => "1 2"
+ println!("{:04}", 42); // => "0042" with leading zerosV
+ println!("{1} {} {0} {}", 1, 2); // => "2 1 1 2"
+ println!("{argument}", argument = "test"); // => "test"
+ println!("{name} {}", 1, name = 2); // => "2 1"
+ println!("{a} {c} {b}", a="a", b='b', c=3); // => "a 3 b"
+ println!("{{{}}}", 2); // => "{2}"
+ println!("Hello {:5}!", "x");
+ println!("Hello {:1$}!", "x", 5);
+ println!("Hello {1:0$}!", 5, "x");
+ println!("Hello {:width$}!", "x", width = 5);
+ println!("Hello {:<5}!", "x");
+ println!("Hello {:-<5}!", "x");
+ println!("Hello {:^5}!", "x");
+ println!("Hello {:>5}!", "x");
+ println!("Hello {:+}!", 5);
+ println!("{:#x}!", 27);
+ println!("Hello {:05}!", 5);
+ println!("Hello {:05}!", -5);
+ println!("{:#010x}!", 27);
+ println!("Hello {0} is {1:.5}", "x", 0.01);
+ println!("Hello {1} is {2:.0$}", 5, "x", 0.01);
+ println!("Hello {0} is {2:.1$}", "x", 5, 0.01);
+ println!("Hello {} is {:.*}", "x", 5, 0.01);
+ println!("Hello {} is {2:.*}", "x", 5, 0.01);
+ println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01);
+ println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56);
+ println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56");
+ println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56");
+
+ let _ = "{}"
+ let _ = "{{}}";
+
+ println!("Hello {{}}");
+ println!("{{ Hello");
+ println!("Hello }}");
+ println!("{{Hello}}");
+ println!("{{ Hello }}");
+ println!("{{Hello }}");
+ println!("{{ Hello}}");
+
+ println!(r"Hello, {}!", "world");
+
+ // escape sequences
+ println!("Hello\nWorld");
+ println!("\u{48}\x65\x6C\x6C\x6F World");
+
+ let _ = "\x28\x28\x00\x63\n";
+ let _ = b"\x28\x28\x00\x63\n";
+
+ println!("{\x41}", A = 92);
+ println!("{ничоси}", ничоси = 92);
+
+ println!("{:x?} {} ", thingy, n2);
+ panic!("{}", 0);
+ panic!("more {}", 1);
+ assert!(true, "{}", 1);
+ assert!(true, "{} asdasd", 1);
+ toho!("{}fmt", 0);
+ asm!("mov eax, {0}");
+ format_args!(concat!("{}"), "{}");
+}"#,
+ expect_file!["./test_data/highlight_strings.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_unsafe_highlighting() {
+ check_highlighting(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => {
+ $($tt)*
+ };
+}
+macro_rules! unsafe_deref {
+ () => {
+ *(&() as *const ())
+ };
+}
+static mut MUT_GLOBAL: Struct = Struct { field: 0 };
+static GLOBAL: Struct = Struct { field: 0 };
+unsafe fn unsafe_fn() {}
+
+union Union {
+ a: u32,
+ b: f32,
+}
+
+struct Struct { field: i32 }
+impl Struct {
+ unsafe fn unsafe_method(&self) {}
+}
+
+#[repr(packed)]
+struct Packed {
+ a: u16,
+}
+
+unsafe trait UnsafeTrait {}
+unsafe impl UnsafeTrait for Packed {}
+impl !UnsafeTrait for () {}
+
+fn unsafe_trait_bound<T: UnsafeTrait>(_: T) {}
+
+trait DoTheAutoref {
+ fn calls_autoref(&self);
+}
+
+impl DoTheAutoref for u16 {
+ fn calls_autoref(&self) {}
+}
+
+fn main() {
+ let x = &5 as *const _ as *const usize;
+ let u = Union { b: 0 };
+
+ id! {
+ unsafe { unsafe_deref!() }
+ };
+
+ unsafe {
+ unsafe_deref!();
+ id! { unsafe_deref!() };
+
+ // unsafe fn and method calls
+ unsafe_fn();
+ let b = u.b;
+ match u {
+ Union { b: 0 } => (),
+ Union { a } => (),
+ }
+ Struct { field: 0 }.unsafe_method();
+
+ // unsafe deref
+ *x;
+
+ // unsafe access to a static mut
+ MUT_GLOBAL.field;
+ GLOBAL.field;
+
+ // unsafe ref of packed fields
+ let packed = Packed { a: 0 };
+ let a = &packed.a;
+ let ref a = packed.a;
+ let Packed { ref a } = packed;
+ let Packed { a: ref _a } = packed;
+
+ // unsafe auto ref of packed field
+ packed.a.calls_autoref();
+ }
+}
+"#,
+ expect_file!["./test_data/highlight_unsafe.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_highlight_doc_comment() {
+ check_highlighting(
+ r#"
+//- /main.rs
+//! This is a module to test doc injection.
+//! ```
+//! fn test() {}
+//! ```
+
+mod outline_module;
+
+/// ```
+/// let _ = "early doctests should not go boom";
+/// ```
+struct Foo {
+ bar: bool,
+}
+
+/// This is an impl with a code block.
+///
+/// ```
+/// fn foo() {
+///
+/// }
+/// ```
+impl Foo {
+ /// ```
+ /// let _ = "Call me
+ // KILLER WHALE
+ /// Ishmael.";
+ /// ```
+ pub const bar: bool = true;
+
+ /// Constructs a new `Foo`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![allow(unused_mut)]
+ /// let mut foo: Foo = Foo::new();
+ /// ```
+ pub const fn new() -> Foo {
+ Foo { bar: true }
+ }
+
+ /// `bar` method on `Foo`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use x::y;
+ ///
+ /// let foo = Foo::new();
+ ///
+ /// // calls bar on foo
+ /// assert!(foo.bar());
+ ///
+ /// let bar = foo.bar || Foo::bar;
+ ///
+ /// /* multi-line
+ /// comment */
+ ///
+ /// let multi_line_string = "Foo
+ /// bar\n
+ /// ";
+ ///
+ /// ```
+ ///
+ /// ```rust,no_run
+ /// let foobar = Foo::new().bar();
+ /// ```
+ ///
+ /// ~~~rust,no_run
+ /// // code block with tilde.
+ /// let foobar = Foo::new().bar();
+ /// ~~~
+ ///
+ /// ```
+ /// // functions
+ /// fn foo<T, const X: usize>(arg: i32) {
+ /// let x: T = X;
+ /// }
+ /// ```
+ ///
+ /// ```sh
+ /// echo 1
+ /// ```
+ pub fn foo(&self) -> bool {
+ true
+ }
+}
+
+/// [`Foo`](Foo) is a struct
+/// This function is > [`all_the_links`](all_the_links) <
+/// [`noop`](noop) is a macro below
+/// [`Item`] is a struct in the module [`module`]
+///
+/// [`Item`]: module::Item
+/// [mix_and_match]: ThisShouldntResolve
+pub fn all_the_links() {}
+
+pub mod module {
+ pub struct Item;
+}
+
+/// ```
+/// macro_rules! noop { ($expr:expr) => { $expr }}
+/// noop!(1);
+/// ```
+macro_rules! noop {
+ ($expr:expr) => {
+ $expr
+ }
+}
+
+/// ```rust
+/// let _ = example(&[1, 2, 3]);
+/// ```
+///
+/// ```
+/// loop {}
+#[cfg_attr(not(feature = "false"), doc = "loop {}")]
+#[doc = "loop {}"]
+/// ```
+///
+#[cfg_attr(feature = "alloc", doc = "```rust")]
+#[cfg_attr(not(feature = "alloc"), doc = "```ignore")]
+/// let _ = example(&alloc::vec![1, 2, 3]);
+/// ```
+pub fn mix_and_match() {}
+
+/**
+It is beyond me why you'd use these when you got ///
+```rust
+let _ = example(&[1, 2, 3]);
+```
+[`block_comments2`] tests these with indentation
+ */
+pub fn block_comments() {}
+
+/**
+ Really, I don't get it
+ ```rust
+ let _ = example(&[1, 2, 3]);
+ ```
+ [`block_comments`] tests these without indentation
+*/
+pub fn block_comments2() {}
+
+//- /outline_module.rs
+//! This is an outline module whose purpose is to test that its inline attribute injection does not
+//! spill into its parent.
+//! ```
+//! fn test() {}
+//! ```
+"#,
+ expect_file!["./test_data/highlight_doctest.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_extern_crate() {
+ check_highlighting(
+ r#"
+//- /main.rs crate:main deps:std,alloc
+extern crate std;
+extern crate alloc as abc;
+//- /std/lib.rs crate:std
+pub struct S;
+//- /alloc/lib.rs crate:alloc
+pub struct A
+"#,
+ expect_file!["./test_data/highlight_extern_crate.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_crate_root() {
+ check_highlighting(
+ r#"
+//- minicore: iterators
+//- /main.rs crate:main deps:foo
+extern crate foo;
+use core::iter;
+
+pub const NINETY_TWO: u8 = 92;
+
+use foo as foooo;
+
+pub(crate) fn main() {
+ let baz = iter::repeat(92);
+}
+
+mod bar {
+ pub(in super) const FORTY_TWO: u8 = 42;
+
+ mod baz {
+ use super::super::NINETY_TWO;
+ use crate::foooo::Point;
+
+ pub(in super::super) const TWENTY_NINE: u8 = 29;
+ }
+}
+//- /foo.rs crate:foo
+struct Point {
+ x: u8,
+ y: u8,
+}
+
+mod inner {
+ pub(super) fn swap(p: crate::Point) -> crate::Point {
+ crate::Point { x: p.y, y: p.x }
+ }
+}
+"#,
+ expect_file!["./test_data/highlight_crate_root.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_default_library() {
+ check_highlighting(
+ r#"
+//- minicore: option, iterators
+use core::iter;
+
+fn main() {
+ let foo = Some(92);
+ let nums = iter::repeat(foo.unwrap());
+}
+"#,
+ expect_file!["./test_data/highlight_default_library.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_associated_function() {
+ check_highlighting(
+ r#"
+fn not_static() {}
+
+struct foo {}
+
+impl foo {
+ pub fn is_static() {}
+ pub fn is_not_static(&self) {}
+}
+
+trait t {
+ fn t_is_static() {}
+ fn t_is_not_static(&self) {}
+}
+
+impl t for foo {
+ pub fn is_static() {}
+ pub fn is_not_static(&self) {}
+}
+"#,
+ expect_file!["./test_data/highlight_assoc_functions.html"],
+ false,
+ )
+}
+
+#[test]
+fn test_injection() {
+ check_highlighting(
+ r##"
+fn fixture(ra_fixture: &str) {}
+
+fn main() {
+ fixture(r#"
+trait Foo {
+ fn foo() {
+ println!("2 + 2 = {}", 4);
+ }
+}"#
+ );
+ fixture(r"
+fn foo() {
+ foo(\$0{
+ 92
+ }\$0)
+}"
+ );
+}
+"##,
+ expect_file!["./test_data/highlight_injection.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_operators() {
+ check_highlighting(
+ r##"
+fn main() {
+ 1 + 1 - 1 * 1 / 1 % 1 | 1 & 1 ! 1 ^ 1 >> 1 << 1;
+ let mut a = 0;
+ a += 1;
+ a -= 1;
+ a *= 1;
+ a /= 1;
+ a %= 1;
+ a |= 1;
+ a &= 1;
+ a ^= 1;
+ a >>= 1;
+ a <<= 1;
+}
+"##,
+ expect_file!["./test_data/highlight_operators.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_mod_hl_injection() {
+ check_highlighting(
+ r##"
+//- /foo.rs
+//! [Struct]
+//! This is an intra doc injection test for modules
+//! [Struct]
+//! This is an intra doc injection test for modules
+
+pub struct Struct;
+//- /lib.rs crate:foo
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+mod foo;
+"##,
+ expect_file!["./test_data/highlight_module_docs_inline.html"],
+ false,
+ );
+ check_highlighting(
+ r##"
+//- /lib.rs crate:foo
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+mod foo;
+//- /foo.rs
+//! [Struct]
+//! This is an intra doc injection test for modules
+//! [Struct]
+//! This is an intra doc injection test for modules
+
+pub struct Struct;
+"##,
+ expect_file!["./test_data/highlight_module_docs_outline.html"],
+ false,
+ );
+}
+
+#[test]
+#[cfg_attr(
+ all(unix, not(target_pointer_width = "64")),
+ ignore = "depends on `DefaultHasher` outputs"
+)]
+fn test_rainbow_highlighting() {
+ check_highlighting(
+ r#"
+fn main() {
+ let hello = "hello";
+ let x = hello.to_string();
+ let y = hello.to_string();
+
+ let x = "other color please!";
+ let y = x.to_string();
+}
+
+fn bar() {
+ let mut hello = "hello";
+}
+"#,
+ expect_file!["./test_data/highlight_rainbow.html"],
+ true,
+ );
+}
+
+#[test]
+fn test_ranges() {
+ let (analysis, file_id) = fixture::file(
+ r#"
+#[derive(Clone, Debug)]
+struct Foo {
+ pub x: i32,
+ pub y: i32,
+}
+"#,
+ );
+
+ // The "x"
+ let highlights = &analysis
+ .highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) })
+ .unwrap();
+
+ assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public");
+}
+
+#[test]
+fn ranges_sorted() {
+ let (analysis, file_id) = fixture::file(
+ r#"
+#[foo(bar = "bar")]
+macro_rules! test {}
+}"#
+ .trim(),
+ );
+ let _ = analysis.highlight(file_id).unwrap();
+}
+
+/// Highlights the code given by the `ra_fixture` argument, renders the
+/// result as HTML, and compares it with the HTML file given as `snapshot`.
+/// Note that the `snapshot` file is overwritten by the rendered HTML.
+fn check_highlighting(ra_fixture: &str, expect: ExpectFile, rainbow: bool) {
+ let (analysis, file_id) = fixture::file(ra_fixture.trim());
+ let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap();
+ expect.assert_eq(actual_html)
+}
+
+#[test]
+fn benchmark_syntax_highlighting_long_struct() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let fixture = bench_fixture::big_struct();
+ let (analysis, file_id) = fixture::file(&fixture);
+
+ let hash = {
+ let _pt = bench("syntax highlighting long struct");
+ analysis
+ .highlight(file_id)
+ .unwrap()
+ .iter()
+ .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
+ .count()
+ };
+ assert_eq!(hash, 2001);
+}
+
+#[test]
+fn syntax_highlighting_not_quadratic() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let mut al = AssertLinear::default();
+ while al.next_round() {
+ for i in 6..=10 {
+ let n = 1 << i;
+
+ let fixture = bench_fixture::big_struct_n(n);
+ let (analysis, file_id) = fixture::file(&fixture);
+
+ let time = Instant::now();
+
+ let hash = analysis
+ .highlight(file_id)
+ .unwrap()
+ .iter()
+ .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
+ .count();
+ assert!(hash > n as usize);
+
+ let elapsed = time.elapsed();
+ al.sample(n as f64, elapsed.as_millis() as f64);
+ }
+ }
+}
+
+#[test]
+fn benchmark_syntax_highlighting_parser() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let fixture = bench_fixture::glorious_old_parser();
+ let (analysis, file_id) = fixture::file(&fixture);
+
+ let hash = {
+ let _pt = bench("syntax highlighting parser");
+ analysis
+ .highlight(file_id)
+ .unwrap()
+ .iter()
+ .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function))
+ .count()
+ };
+ assert_eq!(hash, 1609);
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs
new file mode 100644
index 000000000..9003e7cd3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs
@@ -0,0 +1,339 @@
+use ide_db::base_db::{FileId, SourceDatabase};
+use ide_db::RootDatabase;
+use syntax::{
+ AstNode, NodeOrToken, SourceFile, SyntaxKind::STRING, SyntaxToken, TextRange, TextSize,
+};
+
+// Feature: Show Syntax Tree
+//
+// Shows the parse tree of the current file. It exists mostly for debugging
+// rust-analyzer itself.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Show Syntax Tree**
+// |===
+// image::https://user-images.githubusercontent.com/48062697/113065586-068bdb80-91b1-11eb-9507-fee67f9f45a0.gif[]
+pub(crate) fn syntax_tree(
+ db: &RootDatabase,
+ file_id: FileId,
+ text_range: Option<TextRange>,
+) -> String {
+ let parse = db.parse(file_id);
+ if let Some(text_range) = text_range {
+ let node = match parse.tree().syntax().covering_element(text_range) {
+ NodeOrToken::Node(node) => node,
+ NodeOrToken::Token(token) => {
+ if let Some(tree) = syntax_tree_for_string(&token, text_range) {
+ return tree;
+ }
+ token.parent().unwrap()
+ }
+ };
+
+ format!("{:#?}", node)
+ } else {
+ format!("{:#?}", parse.tree().syntax())
+ }
+}
+
+/// Attempts parsing the selected contents of a string literal
+/// as rust syntax and returns its syntax tree
+fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<String> {
+ // When the range is inside a string
+ // we'll attempt parsing it as rust syntax
+ // to provide the syntax tree of the contents of the string
+ match token.kind() {
+ STRING => syntax_tree_for_token(token, text_range),
+ _ => None,
+ }
+}
+
+fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> {
+ // Range of the full node
+ let node_range = node.text_range();
+ let text = node.text().to_string();
+
+ // We start at some point inside the node
+ // Either we have selected the whole string
+ // or our selection is inside it
+ let start = text_range.start() - node_range.start();
+
+ // how many characters we have selected
+ let len = text_range.len();
+
+ let node_len = node_range.len();
+
+ let start = start;
+
+ // We want to cap our length
+ let len = len.min(node_len);
+
+ // Ensure our slice is inside the actual string
+ let end =
+ if start + len < TextSize::of(&text) { start + len } else { TextSize::of(&text) - start };
+
+ let text = &text[TextRange::new(start, end)];
+
+ // Remove possible extra string quotes from the start
+ // and the end of the string
+ let text = text
+ .trim_start_matches('r')
+ .trim_start_matches('#')
+ .trim_start_matches('"')
+ .trim_end_matches('#')
+ .trim_end_matches('"')
+ .trim()
+ // Remove custom markers
+ .replace("$0", "");
+
+ let parsed = SourceFile::parse(&text);
+
+ // If the "file" parsed without errors,
+ // return its syntax
+ if parsed.errors().is_empty() {
+ return Some(format!("{:#?}", parsed.tree().syntax()));
+ }
+
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::expect;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str, expect: expect_test::Expect) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let syn = analysis.syntax_tree(file_id, None).unwrap();
+ expect.assert_eq(&syn)
+ }
+ fn check_range(ra_fixture: &str, expect: expect_test::Expect) {
+ let (analysis, frange) = fixture::range(ra_fixture);
+ let syn = analysis.syntax_tree(frange.file_id, Some(frange.range)).unwrap();
+ expect.assert_eq(&syn)
+ }
+
+ #[test]
+ fn test_syntax_tree_without_range() {
+ // Basic syntax
+ check(
+ r#"fn foo() {}"#,
+ expect![[r#"
+ SOURCE_FILE@0..11
+ FN@0..11
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..11
+ STMT_LIST@9..11
+ L_CURLY@9..10 "{"
+ R_CURLY@10..11 "}"
+ "#]],
+ );
+
+ check(
+ r#"
+fn test() {
+ assert!("
+ fn foo() {
+ }
+ ", "");
+}"#,
+ expect![[r#"
+ SOURCE_FILE@0..60
+ FN@0..60
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..7
+ IDENT@3..7 "test"
+ PARAM_LIST@7..9
+ L_PAREN@7..8 "("
+ R_PAREN@8..9 ")"
+ WHITESPACE@9..10 " "
+ BLOCK_EXPR@10..60
+ STMT_LIST@10..60
+ L_CURLY@10..11 "{"
+ WHITESPACE@11..16 "\n "
+ EXPR_STMT@16..58
+ MACRO_EXPR@16..57
+ MACRO_CALL@16..57
+ PATH@16..22
+ PATH_SEGMENT@16..22
+ NAME_REF@16..22
+ IDENT@16..22 "assert"
+ BANG@22..23 "!"
+ TOKEN_TREE@23..57
+ L_PAREN@23..24 "("
+ STRING@24..52 "\"\n fn foo() {\n ..."
+ COMMA@52..53 ","
+ WHITESPACE@53..54 " "
+ STRING@54..56 "\"\""
+ R_PAREN@56..57 ")"
+ SEMICOLON@57..58 ";"
+ WHITESPACE@58..59 "\n"
+ R_CURLY@59..60 "}"
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_syntax_tree_with_range() {
+ check_range(
+ r#"$0fn foo() {}$0"#,
+ expect![[r#"
+ FN@0..11
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..11
+ STMT_LIST@9..11
+ L_CURLY@9..10 "{"
+ R_CURLY@10..11 "}"
+ "#]],
+ );
+
+ check_range(
+ r#"
+fn test() {
+ $0assert!("
+ fn foo() {
+ }
+ ", "");$0
+}"#,
+ expect![[r#"
+ EXPR_STMT@16..58
+ MACRO_EXPR@16..57
+ MACRO_CALL@16..57
+ PATH@16..22
+ PATH_SEGMENT@16..22
+ NAME_REF@16..22
+ IDENT@16..22 "assert"
+ BANG@22..23 "!"
+ TOKEN_TREE@23..57
+ L_PAREN@23..24 "("
+ STRING@24..52 "\"\n fn foo() {\n ..."
+ COMMA@52..53 ","
+ WHITESPACE@53..54 " "
+ STRING@54..56 "\"\""
+ R_PAREN@56..57 ")"
+ SEMICOLON@57..58 ";"
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_syntax_tree_inside_string() {
+ check_range(
+ r#"fn test() {
+ assert!("
+$0fn foo() {
+}$0
+fn bar() {
+}
+ ", "");
+}"#,
+ expect![[r#"
+ SOURCE_FILE@0..12
+ FN@0..12
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..12
+ STMT_LIST@9..12
+ L_CURLY@9..10 "{"
+ WHITESPACE@10..11 "\n"
+ R_CURLY@11..12 "}"
+ "#]],
+ );
+
+ // With a raw string
+ check_range(
+ r###"fn test() {
+ assert!(r#"
+$0fn foo() {
+}$0
+fn bar() {
+}
+ "#, "");
+}"###,
+ expect![[r#"
+ SOURCE_FILE@0..12
+ FN@0..12
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..12
+ STMT_LIST@9..12
+ L_CURLY@9..10 "{"
+ WHITESPACE@10..11 "\n"
+ R_CURLY@11..12 "}"
+ "#]],
+ );
+
+ // With a raw string
+ check_range(
+ r###"fn test() {
+ assert!(r$0#"
+fn foo() {
+}
+fn bar() {
+}"$0#, "");
+}"###,
+ expect![[r#"
+ SOURCE_FILE@0..25
+ FN@0..12
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..12
+ STMT_LIST@9..12
+ L_CURLY@9..10 "{"
+ WHITESPACE@10..11 "\n"
+ R_CURLY@11..12 "}"
+ WHITESPACE@12..13 "\n"
+ FN@13..25
+ FN_KW@13..15 "fn"
+ WHITESPACE@15..16 " "
+ NAME@16..19
+ IDENT@16..19 "bar"
+ PARAM_LIST@19..21
+ L_PAREN@19..20 "("
+ R_PAREN@20..21 ")"
+ WHITESPACE@21..22 " "
+ BLOCK_EXPR@22..25
+ STMT_LIST@22..25
+ L_CURLY@22..23 "{"
+ WHITESPACE@23..24 "\n"
+ R_CURLY@24..25 "}"
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs
new file mode 100644
index 000000000..9118f3c69
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs
@@ -0,0 +1,1210 @@
+//! This module handles auto-magic editing actions applied together with users
+//! edits. For example, if the user typed
+//!
+//! ```text
+//! foo
+//! .bar()
+//! .baz()
+//! | // <- cursor is here
+//! ```
+//!
+//! and types `.` next, we want to indent the dot.
+//!
+//! Language server executes such typing assists synchronously. That is, they
+//! block user's typing and should be pretty fast for this reason!
+
+mod on_enter;
+
+use ide_db::{
+ base_db::{FilePosition, SourceDatabase},
+ RootDatabase,
+};
+use syntax::{
+ algo::{ancestors_at_offset, find_node_at_offset},
+ ast::{self, edit::IndentLevel, AstToken},
+ AstNode, Parse, SourceFile, SyntaxKind, TextRange, TextSize, T,
+};
+
+use text_edit::{Indel, TextEdit};
+
+use crate::SourceChange;
+
+pub(crate) use on_enter::on_enter;
+
+// Don't forget to add new trigger characters to `server_capabilities` in `caps.rs`.
+pub(crate) const TRIGGER_CHARS: &str = ".=<>{";
+
+struct ExtendedTextEdit {
+ edit: TextEdit,
+ is_snippet: bool,
+}
+
+// Feature: On Typing Assists
+//
+// Some features trigger on typing certain characters:
+//
+// - typing `let =` tries to smartly add `;` if `=` is followed by an existing expression
+// - typing `=` between two expressions adds `;` when in statement position
+// - typing `=` to turn an assignment into an equality comparison removes `;` when in expression position
+// - typing `.` in a chain method call auto-indents
+// - typing `{` in front of an expression inserts a closing `}` after the expression
+// - typing `{` in a use item adds a closing `}` in the right place
+//
+// VS Code::
+//
+// Add the following to `settings.json`:
+// [source,json]
+// ----
+// "editor.formatOnType": true,
+// ----
+//
+// image::https://user-images.githubusercontent.com/48062697/113166163-69758500-923a-11eb-81ee-eb33ec380399.gif[]
+// image::https://user-images.githubusercontent.com/48062697/113171066-105c2000-923f-11eb-87ab-f4a263346567.gif[]
+pub(crate) fn on_char_typed(
+ db: &RootDatabase,
+ position: FilePosition,
+ char_typed: char,
+) -> Option<SourceChange> {
+ if !stdx::always!(TRIGGER_CHARS.contains(char_typed)) {
+ return None;
+ }
+ let file = &db.parse(position.file_id);
+ if !stdx::always!(file.tree().syntax().text().char_at(position.offset) == Some(char_typed)) {
+ return None;
+ }
+ let edit = on_char_typed_inner(file, position.offset, char_typed)?;
+ let mut sc = SourceChange::from_text_edit(position.file_id, edit.edit);
+ sc.is_snippet = edit.is_snippet;
+ Some(sc)
+}
+
+fn on_char_typed_inner(
+ file: &Parse<SourceFile>,
+ offset: TextSize,
+ char_typed: char,
+) -> Option<ExtendedTextEdit> {
+ if !stdx::always!(TRIGGER_CHARS.contains(char_typed)) {
+ return None;
+ }
+ return match char_typed {
+ '.' => conv(on_dot_typed(&file.tree(), offset)),
+ '=' => conv(on_eq_typed(&file.tree(), offset)),
+ '<' => on_left_angle_typed(&file.tree(), offset),
+ '>' => conv(on_right_angle_typed(&file.tree(), offset)),
+ '{' => conv(on_opening_brace_typed(file, offset)),
+ _ => return None,
+ };
+
+ fn conv(text_edit: Option<TextEdit>) -> Option<ExtendedTextEdit> {
+ Some(ExtendedTextEdit { edit: text_edit?, is_snippet: false })
+ }
+}
+
+/// Inserts a closing `}` when the user types an opening `{`, wrapping an existing expression in a
+/// block, or a part of a `use` item.
+fn on_opening_brace_typed(file: &Parse<SourceFile>, offset: TextSize) -> Option<TextEdit> {
+ if !stdx::always!(file.tree().syntax().text().char_at(offset) == Some('{')) {
+ return None;
+ }
+
+ let brace_token = file.tree().syntax().token_at_offset(offset).right_biased()?;
+ if brace_token.kind() != SyntaxKind::L_CURLY {
+ return None;
+ }
+
+ // Remove the `{` to get a better parse tree, and reparse.
+ let range = brace_token.text_range();
+ if !stdx::always!(range.len() == TextSize::of('{')) {
+ return None;
+ }
+ let file = file.reparse(&Indel::delete(range));
+
+ if let Some(edit) = brace_expr(&file.tree(), offset) {
+ return Some(edit);
+ }
+
+ if let Some(edit) = brace_use_path(&file.tree(), offset) {
+ return Some(edit);
+ }
+
+ return None;
+
+ fn brace_use_path(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let segment: ast::PathSegment = find_node_at_offset(file.syntax(), offset)?;
+ if segment.syntax().text_range().start() != offset {
+ return None;
+ }
+
+ let tree: ast::UseTree = find_node_at_offset(file.syntax(), offset)?;
+
+ Some(TextEdit::insert(
+ tree.syntax().text_range().end() + TextSize::of("{"),
+ "}".to_string(),
+ ))
+ }
+
+ fn brace_expr(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let mut expr: ast::Expr = find_node_at_offset(file.syntax(), offset)?;
+ if expr.syntax().text_range().start() != offset {
+ return None;
+ }
+
+ // Enclose the outermost expression starting at `offset`
+ while let Some(parent) = expr.syntax().parent() {
+ if parent.text_range().start() != expr.syntax().text_range().start() {
+ break;
+ }
+
+ match ast::Expr::cast(parent) {
+ Some(parent) => expr = parent,
+ None => break,
+ }
+ }
+
+ // If it's a statement in a block, we don't know how many statements should be included
+ if ast::ExprStmt::can_cast(expr.syntax().parent()?.kind()) {
+ return None;
+ }
+
+ // Insert `}` right after the expression.
+ Some(TextEdit::insert(
+ expr.syntax().text_range().end() + TextSize::of("{"),
+ "}".to_string(),
+ ))
+ }
+}
+
+/// Returns an edit which should be applied after `=` was typed. Primarily,
+/// this works when adding `let =`.
+// FIXME: use a snippet completion instead of this hack here.
+fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ if !stdx::always!(file.syntax().text().char_at(offset) == Some('=')) {
+ return None;
+ }
+
+ if let Some(edit) = let_stmt(file, offset) {
+ return Some(edit);
+ }
+ if let Some(edit) = assign_expr(file, offset) {
+ return Some(edit);
+ }
+ if let Some(edit) = assign_to_eq(file, offset) {
+ return Some(edit);
+ }
+
+ return None;
+
+ fn assign_expr(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let binop: ast::BinExpr = find_node_at_offset(file.syntax(), offset)?;
+ if !matches!(binop.op_kind(), Some(ast::BinaryOp::Assignment { op: None })) {
+ return None;
+ }
+
+ // Parent must be `ExprStmt` or `StmtList` for `;` to be valid.
+ if let Some(expr_stmt) = ast::ExprStmt::cast(binop.syntax().parent()?) {
+ if expr_stmt.semicolon_token().is_some() {
+ return None;
+ }
+ } else {
+ if !ast::StmtList::can_cast(binop.syntax().parent()?.kind()) {
+ return None;
+ }
+ }
+
+ let expr = binop.rhs()?;
+ let expr_range = expr.syntax().text_range();
+ if expr_range.contains(offset) && offset != expr_range.start() {
+ return None;
+ }
+ if file.syntax().text().slice(offset..expr_range.start()).contains_char('\n') {
+ return None;
+ }
+ let offset = expr.syntax().text_range().end();
+ Some(TextEdit::insert(offset, ";".to_string()))
+ }
+
+ /// `a =$0 b;` removes the semicolon if an expression is valid in this context.
+ fn assign_to_eq(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let binop: ast::BinExpr = find_node_at_offset(file.syntax(), offset)?;
+ if !matches!(binop.op_kind(), Some(ast::BinaryOp::CmpOp(ast::CmpOp::Eq { negated: false })))
+ {
+ return None;
+ }
+
+ let expr_stmt = ast::ExprStmt::cast(binop.syntax().parent()?)?;
+ let semi = expr_stmt.semicolon_token()?;
+
+ if expr_stmt.syntax().next_sibling().is_some() {
+ // Not the last statement in the list.
+ return None;
+ }
+
+ Some(TextEdit::delete(semi.text_range()))
+ }
+
+ fn let_stmt(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?;
+ if let_stmt.semicolon_token().is_some() {
+ return None;
+ }
+ let expr = let_stmt.initializer()?;
+ let expr_range = expr.syntax().text_range();
+ if expr_range.contains(offset) && offset != expr_range.start() {
+ return None;
+ }
+ if file.syntax().text().slice(offset..expr_range.start()).contains_char('\n') {
+ return None;
+ }
+ let offset = let_stmt.syntax().text_range().end();
+ Some(TextEdit::insert(offset, ";".to_string()))
+ }
+}
+
+/// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately.
+fn on_dot_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ if !stdx::always!(file.syntax().text().char_at(offset) == Some('.')) {
+ return None;
+ }
+ let whitespace =
+ file.syntax().token_at_offset(offset).left_biased().and_then(ast::Whitespace::cast)?;
+
+ // if prior is fn call over multiple lines dont indent
+ // or if previous is method call over multiples lines keep that indent
+ let current_indent = {
+ let text = whitespace.text();
+ let (_prefix, suffix) = text.rsplit_once('\n')?;
+ suffix
+ };
+ let current_indent_len = TextSize::of(current_indent);
+
+ let parent = whitespace.syntax().parent()?;
+ // Make sure dot is a part of call chain
+ let receiver = if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
+ field_expr.expr()?
+ } else if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent.clone()) {
+ method_call_expr.receiver()?
+ } else {
+ return None;
+ };
+
+ let receiver_is_multiline = receiver.syntax().text().find_char('\n').is_some();
+ let target_indent = match (receiver, receiver_is_multiline) {
+ // if receiver is multiline field or method call, just take the previous `.` indentation
+ (ast::Expr::MethodCallExpr(expr), true) => {
+ expr.dot_token().as_ref().map(IndentLevel::from_token)
+ }
+ (ast::Expr::FieldExpr(expr), true) => {
+ expr.dot_token().as_ref().map(IndentLevel::from_token)
+ }
+ // if receiver is multiline expression, just keeps its indentation
+ (_, true) => Some(IndentLevel::from_node(&parent)),
+ _ => None,
+ };
+ let target_indent = match target_indent {
+ Some(x) => x,
+ // in all other cases, take previous indentation and indent once
+ None => IndentLevel::from_node(&parent) + 1,
+ }
+ .to_string();
+
+ if current_indent_len == TextSize::of(&target_indent) {
+ return None;
+ }
+
+ Some(TextEdit::replace(TextRange::new(offset - current_indent_len, offset), target_indent))
+}
+
+/// Add closing `>` for generic arguments/parameters.
+fn on_left_angle_typed(file: &SourceFile, offset: TextSize) -> Option<ExtendedTextEdit> {
+ let file_text = file.syntax().text();
+ if !stdx::always!(file_text.char_at(offset) == Some('<')) {
+ return None;
+ }
+
+ // Find the next non-whitespace char in the line.
+ let mut next_offset = offset + TextSize::of('<');
+ while file_text.char_at(next_offset) == Some(' ') {
+ next_offset += TextSize::of(' ')
+ }
+ if file_text.char_at(next_offset) == Some('>') {
+ return None;
+ }
+
+ let range = TextRange::at(offset, TextSize::of('<'));
+ if let Some(t) = file.syntax().token_at_offset(offset).left_biased() {
+ if T![impl] == t.kind() {
+ return Some(ExtendedTextEdit {
+ edit: TextEdit::replace(range, "<$0>".to_string()),
+ is_snippet: true,
+ });
+ }
+ }
+
+ if ancestors_at_offset(file.syntax(), offset)
+ .find(|n| {
+ ast::GenericParamList::can_cast(n.kind()) || ast::GenericArgList::can_cast(n.kind())
+ })
+ .is_some()
+ {
+ return Some(ExtendedTextEdit {
+ edit: TextEdit::replace(range, "<$0>".to_string()),
+ is_snippet: true,
+ });
+ }
+
+ None
+}
+
+/// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }`
+fn on_right_angle_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let file_text = file.syntax().text();
+ if !stdx::always!(file_text.char_at(offset) == Some('>')) {
+ return None;
+ }
+ let after_arrow = offset + TextSize::of('>');
+ if file_text.char_at(after_arrow) != Some('{') {
+ return None;
+ }
+ if find_node_at_offset::<ast::RetType>(file.syntax(), offset).is_none() {
+ return None;
+ }
+
+ Some(TextEdit::insert(after_arrow, " ".to_string()))
+}
+
+#[cfg(test)]
+mod tests {
+ use test_utils::{assert_eq_text, extract_offset};
+
+ use super::*;
+
+ impl ExtendedTextEdit {
+ fn apply(&self, text: &mut String) {
+ self.edit.apply(text);
+ }
+ }
+
+ fn do_type_char(char_typed: char, before: &str) -> Option<String> {
+ let (offset, mut before) = extract_offset(before);
+ let edit = TextEdit::insert(offset, char_typed.to_string());
+ edit.apply(&mut before);
+ let parse = SourceFile::parse(&before);
+ on_char_typed_inner(&parse, offset, char_typed).map(|it| {
+ it.apply(&mut before);
+ before.to_string()
+ })
+ }
+
+ fn type_char(char_typed: char, ra_fixture_before: &str, ra_fixture_after: &str) {
+ let actual = do_type_char(char_typed, ra_fixture_before)
+ .unwrap_or_else(|| panic!("typing `{}` did nothing", char_typed));
+
+ assert_eq_text!(ra_fixture_after, &actual);
+ }
+
+ fn type_char_noop(char_typed: char, ra_fixture_before: &str) {
+ let file_change = do_type_char(char_typed, ra_fixture_before);
+ assert!(file_change.is_none())
+ }
+
+ #[test]
+ fn test_semi_after_let() {
+ // do_check(r"
+ // fn foo() {
+ // let foo =$0
+ // }
+ // ", r"
+ // fn foo() {
+ // let foo =;
+ // }
+ // ");
+ type_char(
+ '=',
+ r#"
+fn foo() {
+ let foo $0 1 + 1
+}
+"#,
+ r#"
+fn foo() {
+ let foo = 1 + 1;
+}
+"#,
+ );
+ // do_check(r"
+ // fn foo() {
+ // let foo =$0
+ // let bar = 1;
+ // }
+ // ", r"
+ // fn foo() {
+ // let foo =;
+ // let bar = 1;
+ // }
+ // ");
+ }
+
+ #[test]
+ fn test_semi_after_assign() {
+ type_char(
+ '=',
+ r#"
+fn f() {
+ i $0 0
+}
+"#,
+ r#"
+fn f() {
+ i = 0;
+}
+"#,
+ );
+ type_char(
+ '=',
+ r#"
+fn f() {
+ i $0 0
+ i
+}
+"#,
+ r#"
+fn f() {
+ i = 0;
+ i
+}
+"#,
+ );
+ type_char_noop(
+ '=',
+ r#"
+fn f(x: u8) {
+ if x $0
+}
+"#,
+ );
+ type_char_noop(
+ '=',
+ r#"
+fn f(x: u8) {
+ if x $0 {}
+}
+"#,
+ );
+ type_char_noop(
+ '=',
+ r#"
+fn f(x: u8) {
+ if x $0 0 {}
+}
+"#,
+ );
+ type_char_noop(
+ '=',
+ r#"
+fn f() {
+ g(i $0 0);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn assign_to_eq() {
+ type_char(
+ '=',
+ r#"
+fn f(a: u8) {
+ a =$0 0;
+}
+"#,
+ r#"
+fn f(a: u8) {
+ a == 0
+}
+"#,
+ );
+ type_char(
+ '=',
+ r#"
+fn f(a: u8) {
+ a $0= 0;
+}
+"#,
+ r#"
+fn f(a: u8) {
+ a == 0
+}
+"#,
+ );
+ type_char_noop(
+ '=',
+ r#"
+fn f(a: u8) {
+ let e = a =$0 0;
+}
+"#,
+ );
+ type_char_noop(
+ '=',
+ r#"
+fn f(a: u8) {
+ let e = a =$0 0;
+ e
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn indents_new_chain_call() {
+ type_char(
+ '.',
+ r#"
+fn main() {
+ xs.foo()
+ $0
+}
+ "#,
+ r#"
+fn main() {
+ xs.foo()
+ .
+}
+ "#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn main() {
+ xs.foo()
+ $0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn indents_new_chain_call_with_semi() {
+ type_char(
+ '.',
+ r"
+fn main() {
+ xs.foo()
+ $0;
+}
+ ",
+ r#"
+fn main() {
+ xs.foo()
+ .;
+}
+ "#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn main() {
+ xs.foo()
+ $0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn indents_new_chain_call_with_let() {
+ type_char(
+ '.',
+ r#"
+fn main() {
+ let _ = foo
+ $0
+ bar()
+}
+"#,
+ r#"
+fn main() {
+ let _ = foo
+ .
+ bar()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn indents_continued_chain_call() {
+ type_char(
+ '.',
+ r#"
+fn main() {
+ xs.foo()
+ .first()
+ $0
+}
+ "#,
+ r#"
+fn main() {
+ xs.foo()
+ .first()
+ .
+}
+ "#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn main() {
+ xs.foo()
+ .first()
+ $0
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_middle_of_chain_call() {
+ type_char(
+ '.',
+ r#"
+fn source_impl() {
+ let var = enum_defvariant_list().unwrap()
+ $0
+ .nth(92)
+ .unwrap();
+}
+ "#,
+ r#"
+fn source_impl() {
+ let var = enum_defvariant_list().unwrap()
+ .
+ .nth(92)
+ .unwrap();
+}
+ "#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn source_impl() {
+ let var = enum_defvariant_list().unwrap()
+ $0
+ .nth(92)
+ .unwrap();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn dont_indent_freestanding_dot() {
+ type_char_noop(
+ '.',
+ r#"
+fn main() {
+ $0
+}
+ "#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn main() {
+$0
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn adds_space_after_return_type() {
+ type_char(
+ '>',
+ r#"
+fn foo() -$0{ 92 }
+"#,
+ r#"
+fn foo() -> { 92 }
+"#,
+ );
+ }
+
+ #[test]
+ fn adds_closing_brace_for_expr() {
+ type_char(
+ '{',
+ r#"
+fn f() { match () { _ => $0() } }
+ "#,
+ r#"
+fn f() { match () { _ => {()} } }
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+fn f() { $0() }
+ "#,
+ r#"
+fn f() { {()} }
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+fn f() { let x = $0(); }
+ "#,
+ r#"
+fn f() { let x = {()}; }
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+fn f() { let x = $0a.b(); }
+ "#,
+ r#"
+fn f() { let x = {a.b()}; }
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+const S: () = $0();
+fn f() {}
+ "#,
+ r#"
+const S: () = {()};
+fn f() {}
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+const S: () = $0a.b();
+fn f() {}
+ "#,
+ r#"
+const S: () = {a.b()};
+fn f() {}
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+fn f() {
+ match x {
+ 0 => $0(),
+ 1 => (),
+ }
+}
+ "#,
+ r#"
+fn f() {
+ match x {
+ 0 => {()},
+ 1 => (),
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn noop_in_string_literal() {
+ // Regression test for #9351
+ type_char_noop(
+ '{',
+ r##"
+fn check_with(ra_fixture: &str, expect: Expect) {
+ let base = r#"
+enum E { T(), R$0, C }
+use self::E::X;
+const Z: E = E::C;
+mod m {}
+asdasdasdasdasdasda
+sdasdasdasdasdasda
+sdasdasdasdasd
+"#;
+ let actual = completion_list(&format!("{}\n{}", base, ra_fixture));
+ expect.assert_eq(&actual)
+}
+ "##,
+ );
+ }
+
+ #[test]
+ fn noop_in_item_position_with_macro() {
+ type_char_noop('{', r#"$0println!();"#);
+ type_char_noop(
+ '{',
+ r#"
+fn main() $0println!("hello");
+}"#,
+ );
+ }
+
+ #[test]
+ fn adds_closing_brace_for_use_tree() {
+ type_char(
+ '{',
+ r#"
+use some::$0Path;
+ "#,
+ r#"
+use some::{Path};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use some::{Path, $0Other};
+ "#,
+ r#"
+use some::{Path, {Other}};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use some::{$0Path, Other};
+ "#,
+ r#"
+use some::{{Path}, Other};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use some::path::$0to::Item;
+ "#,
+ r#"
+use some::path::{to::Item};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use some::$0path::to::Item;
+ "#,
+ r#"
+use some::{path::to::Item};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use $0some::path::to::Item;
+ "#,
+ r#"
+use {some::path::to::Item};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use some::path::$0to::{Item};
+ "#,
+ r#"
+use some::path::{to::{Item}};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use $0Thing as _;
+ "#,
+ r#"
+use {Thing as _};
+ "#,
+ );
+
+ type_char_noop(
+ '{',
+ r#"
+use some::pa$0th::to::Item;
+ "#,
+ );
+ }
+
+ #[test]
+ fn adds_closing_angle_bracket_for_generic_args() {
+ type_char(
+ '<',
+ r#"
+fn foo() {
+ bar::$0
+}
+ "#,
+ r#"
+fn foo() {
+ bar::<$0>
+}
+ "#,
+ );
+
+ type_char(
+ '<',
+ r#"
+fn foo(bar: &[u64]) {
+ bar.iter().collect::$0();
+}
+ "#,
+ r#"
+fn foo(bar: &[u64]) {
+ bar.iter().collect::<$0>();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn adds_closing_angle_bracket_for_generic_params() {
+ type_char(
+ '<',
+ r#"
+fn foo$0() {}
+ "#,
+ r#"
+fn foo<$0>() {}
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+fn foo$0
+ "#,
+ r#"
+fn foo<$0>
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+struct Foo$0 {}
+ "#,
+ r#"
+struct Foo<$0> {}
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+struct Foo$0();
+ "#,
+ r#"
+struct Foo<$0>();
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+struct Foo$0
+ "#,
+ r#"
+struct Foo<$0>
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+enum Foo$0
+ "#,
+ r#"
+enum Foo<$0>
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+trait Foo$0
+ "#,
+ r#"
+trait Foo<$0>
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+type Foo$0 = Bar;
+ "#,
+ r#"
+type Foo<$0> = Bar;
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+impl$0 Foo {}
+ "#,
+ r#"
+impl<$0> Foo {}
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+impl<T> Foo$0 {}
+ "#,
+ r#"
+impl<T> Foo<$0> {}
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+impl Foo$0 {}
+ "#,
+ r#"
+impl Foo<$0> {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn dont_add_closing_angle_bracket_for_comparison() {
+ type_char_noop(
+ '<',
+ r#"
+fn main() {
+ 42$0
+}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+fn main() {
+ 42 $0
+}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+fn main() {
+ let foo = 42;
+ foo $0
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn dont_add_closing_angle_bracket_if_it_is_already_there() {
+ type_char_noop(
+ '<',
+ r#"
+fn foo() {
+ bar::$0>
+}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+fn foo(bar: &[u64]) {
+ bar.iter().collect::$0 >();
+}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+fn foo$0>() {}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+fn foo$0>
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+struct Foo$0> {}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+struct Foo$0>();
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+struct Foo$0>
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+enum Foo$0>
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+trait Foo$0>
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+type Foo$0> = Bar;
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+impl$0> Foo {}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+impl<T> Foo$0> {}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+impl Foo$0> {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn regression_629() {
+ type_char_noop(
+ '.',
+ r#"
+fn foo() {
+ CompletionItem::new(
+ CompletionKind::Reference,
+ ctx.source_range(),
+ field.name().to_string(),
+ )
+ .foo()
+ $0
+}
+"#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn foo() {
+ CompletionItem::new(
+ CompletionKind::Reference,
+ ctx.source_range(),
+ field.name().to_string(),
+ )
+ $0
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
new file mode 100644
index 000000000..48c171327
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
@@ -0,0 +1,616 @@
+//! Handles the `Enter` key press. At the momently, this only continues
+//! comments, but should handle indent some time in the future as well.
+
+use ide_db::base_db::{FilePosition, SourceDatabase};
+use ide_db::RootDatabase;
+use syntax::{
+ algo::find_node_at_offset,
+ ast::{self, edit::IndentLevel, AstToken},
+ AstNode, SmolStr, SourceFile,
+ SyntaxKind::*,
+ SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset,
+};
+
+use text_edit::TextEdit;
+
+// Feature: On Enter
+//
+// rust-analyzer can override kbd:[Enter] key to make it smarter:
+//
+// - kbd:[Enter] inside triple-slash comments automatically inserts `///`
+// - kbd:[Enter] in the middle or after a trailing space in `//` inserts `//`
+// - kbd:[Enter] inside `//!` doc comments automatically inserts `//!`
+// - kbd:[Enter] after `{` indents contents and closing `}` of single-line block
+//
+// This action needs to be assigned to shortcut explicitly.
+//
+// Note that, depending on the other installed extensions, this feature can visibly slow down typing.
+// Similarly, if rust-analyzer crashes or stops responding, `Enter` might not work.
+// In that case, you can still press `Shift-Enter` to insert a newline.
+//
+// VS Code::
+//
+// Add the following to `keybindings.json`:
+// [source,json]
+// ----
+// {
+// "key": "Enter",
+// "command": "rust-analyzer.onEnter",
+// "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust"
+// }
+// ----
+//
+// When using the Vim plugin:
+// [source,json]
+// ----
+// {
+// "key": "Enter",
+// "command": "rust-analyzer.onEnter",
+// "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust && vim.mode == 'Insert'"
+// }
+// ----
+//
+// image::https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif[]
+pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
+ let parse = db.parse(position.file_id);
+ let file = parse.tree();
+ let token = file.syntax().token_at_offset(position.offset).left_biased()?;
+
+ if let Some(comment) = ast::Comment::cast(token.clone()) {
+ return on_enter_in_comment(&comment, &file, position.offset);
+ }
+
+ if token.kind() == L_CURLY {
+ // Typing enter after the `{` of a block expression, where the `}` is on the same line
+ if let Some(edit) = find_node_at_offset(file.syntax(), position.offset - TextSize::of('{'))
+ .and_then(|block| on_enter_in_block(block, position))
+ {
+ cov_mark::hit!(indent_block_contents);
+ return Some(edit);
+ }
+
+ // Typing enter after the `{` of a use tree list.
+ if let Some(edit) = find_node_at_offset(file.syntax(), position.offset - TextSize::of('{'))
+ .and_then(|list| on_enter_in_use_tree_list(list, position))
+ {
+ cov_mark::hit!(indent_block_contents);
+ return Some(edit);
+ }
+ }
+
+ None
+}
+
+fn on_enter_in_comment(
+ comment: &ast::Comment,
+ file: &ast::SourceFile,
+ offset: TextSize,
+) -> Option<TextEdit> {
+ if comment.kind().shape.is_block() {
+ return None;
+ }
+
+ let prefix = comment.prefix();
+ let comment_range = comment.syntax().text_range();
+ if offset < comment_range.start() + TextSize::of(prefix) {
+ return None;
+ }
+
+ let mut remove_trailing_whitespace = false;
+ // Continuing single-line non-doc comments (like this one :) ) is annoying
+ if prefix == "//" && comment_range.end() == offset {
+ if comment.text().ends_with(' ') {
+ cov_mark::hit!(continues_end_of_line_comment_with_space);
+ remove_trailing_whitespace = true;
+ } else if !followed_by_comment(comment) {
+ return None;
+ }
+ }
+
+ let indent = node_indent(file, comment.syntax())?;
+ let inserted = format!("\n{}{} $0", indent, prefix);
+ let delete = if remove_trailing_whitespace {
+ let trimmed_len = comment.text().trim_end().len() as u32;
+ let trailing_whitespace_len = comment.text().len() as u32 - trimmed_len;
+ TextRange::new(offset - TextSize::from(trailing_whitespace_len), offset)
+ } else {
+ TextRange::empty(offset)
+ };
+ let edit = TextEdit::replace(delete, inserted);
+ Some(edit)
+}
+
+fn on_enter_in_block(block: ast::BlockExpr, position: FilePosition) -> Option<TextEdit> {
+ let contents = block_contents(&block)?;
+
+ if block.syntax().text().contains_char('\n') {
+ return None;
+ }
+
+ let indent = IndentLevel::from_node(block.syntax());
+ let mut edit = TextEdit::insert(position.offset, format!("\n{}$0", indent + 1));
+ edit.union(TextEdit::insert(contents.text_range().end(), format!("\n{}", indent))).ok()?;
+ Some(edit)
+}
+
+fn on_enter_in_use_tree_list(list: ast::UseTreeList, position: FilePosition) -> Option<TextEdit> {
+ if list.syntax().text().contains_char('\n') {
+ return None;
+ }
+
+ let indent = IndentLevel::from_node(list.syntax());
+ let mut edit = TextEdit::insert(position.offset, format!("\n{}$0", indent + 1));
+ edit.union(TextEdit::insert(
+ list.r_curly_token()?.text_range().start(),
+ format!("\n{}", indent),
+ ))
+ .ok()?;
+ Some(edit)
+}
+
+fn block_contents(block: &ast::BlockExpr) -> Option<SyntaxNode> {
+ let mut node = block.tail_expr().map(|e| e.syntax().clone());
+
+ for stmt in block.statements() {
+ if node.is_some() {
+ // More than 1 node in the block
+ return None;
+ }
+
+ node = Some(stmt.syntax().clone());
+ }
+
+ node
+}
+
+fn followed_by_comment(comment: &ast::Comment) -> bool {
+ let ws = match comment.syntax().next_token().and_then(ast::Whitespace::cast) {
+ Some(it) => it,
+ None => return false,
+ };
+ if ws.spans_multiple_lines() {
+ return false;
+ }
+ ws.syntax().next_token().and_then(ast::Comment::cast).is_some()
+}
+
+fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
+ let ws = match file.syntax().token_at_offset(token.text_range().start()) {
+ TokenAtOffset::Between(l, r) => {
+ assert!(r == *token);
+ l
+ }
+ TokenAtOffset::Single(n) => {
+ assert!(n == *token);
+ return Some("".into());
+ }
+ TokenAtOffset::None => unreachable!(),
+ };
+ if ws.kind() != WHITESPACE {
+ return None;
+ }
+ let text = ws.text();
+ let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0);
+ Some(text[pos..].into())
+}
+
+#[cfg(test)]
+mod tests {
+ use stdx::trim_indent;
+ use test_utils::assert_eq_text;
+
+ use crate::fixture;
+
+ fn apply_on_enter(before: &str) -> Option<String> {
+ let (analysis, position) = fixture::position(before);
+ let result = analysis.on_enter(position).unwrap()?;
+
+ let mut actual = analysis.file_text(position.file_id).unwrap().to_string();
+ result.apply(&mut actual);
+ Some(actual)
+ }
+
+ fn do_check(ra_fixture_before: &str, ra_fixture_after: &str) {
+ let ra_fixture_after = &trim_indent(ra_fixture_after);
+ let actual = apply_on_enter(ra_fixture_before).unwrap();
+ assert_eq_text!(ra_fixture_after, &actual);
+ }
+
+ fn do_check_noop(ra_fixture_text: &str) {
+ assert!(apply_on_enter(ra_fixture_text).is_none())
+ }
+
+ #[test]
+ fn continues_doc_comment() {
+ do_check(
+ r"
+/// Some docs$0
+fn foo() {
+}
+",
+ r"
+/// Some docs
+/// $0
+fn foo() {
+}
+",
+ );
+
+ do_check(
+ r"
+impl S {
+ /// Some$0 docs.
+ fn foo() {}
+}
+",
+ r"
+impl S {
+ /// Some
+ /// $0 docs.
+ fn foo() {}
+}
+",
+ );
+
+ do_check(
+ r"
+///$0 Some docs
+fn foo() {
+}
+",
+ r"
+///
+/// $0 Some docs
+fn foo() {
+}
+",
+ );
+ }
+
+ #[test]
+ fn does_not_continue_before_doc_comment() {
+ do_check_noop(r"$0//! docz");
+ }
+
+ #[test]
+ fn continues_another_doc_comment() {
+ do_check(
+ r#"
+fn main() {
+ //! Documentation for$0 on enter
+ let x = 1 + 1;
+}
+"#,
+ r#"
+fn main() {
+ //! Documentation for
+ //! $0 on enter
+ let x = 1 + 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn continues_code_comment_in_the_middle_of_line() {
+ do_check(
+ r"
+fn main() {
+ // Fix$0 me
+ let x = 1 + 1;
+}
+",
+ r"
+fn main() {
+ // Fix
+ // $0 me
+ let x = 1 + 1;
+}
+",
+ );
+ }
+
+ #[test]
+ fn continues_code_comment_in_the_middle_several_lines() {
+ do_check(
+ r"
+fn main() {
+ // Fix$0
+ // me
+ let x = 1 + 1;
+}
+",
+ r"
+fn main() {
+ // Fix
+ // $0
+ // me
+ let x = 1 + 1;
+}
+",
+ );
+ }
+
+ #[test]
+ fn does_not_continue_end_of_line_comment() {
+ do_check_noop(
+ r"
+fn main() {
+ // Fix me$0
+ let x = 1 + 1;
+}
+",
+ );
+ }
+
+ #[test]
+ fn continues_end_of_line_comment_with_space() {
+ cov_mark::check!(continues_end_of_line_comment_with_space);
+ do_check(
+ r#"
+fn main() {
+ // Fix me $0
+ let x = 1 + 1;
+}
+"#,
+ r#"
+fn main() {
+ // Fix me
+ // $0
+ let x = 1 + 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trims_all_trailing_whitespace() {
+ do_check(
+ "
+fn main() {
+ // Fix me \t\t $0
+ let x = 1 + 1;
+}
+",
+ "
+fn main() {
+ // Fix me
+ // $0
+ let x = 1 + 1;
+}
+",
+ );
+ }
+
+ #[test]
+ fn indents_fn_body_block() {
+ cov_mark::check!(indent_block_contents);
+ do_check(
+ r#"
+fn f() {$0()}
+ "#,
+ r#"
+fn f() {
+ $0()
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_block_expr() {
+ do_check(
+ r#"
+fn f() {
+ let x = {$0()};
+}
+ "#,
+ r#"
+fn f() {
+ let x = {
+ $0()
+ };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_match_arm() {
+ do_check(
+ r#"
+fn f() {
+ match 6 {
+ 1 => {$0f()},
+ _ => (),
+ }
+}
+ "#,
+ r#"
+fn f() {
+ match 6 {
+ 1 => {
+ $0f()
+ },
+ _ => (),
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_block_with_statement() {
+ do_check(
+ r#"
+fn f() {$0a = b}
+ "#,
+ r#"
+fn f() {
+ $0a = b
+}
+ "#,
+ );
+ do_check(
+ r#"
+fn f() {$0fn f() {}}
+ "#,
+ r#"
+fn f() {
+ $0fn f() {}
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_nested_blocks() {
+ do_check(
+ r#"
+fn f() {$0{}}
+ "#,
+ r#"
+fn f() {
+ $0{}
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn does_not_indent_empty_block() {
+ do_check_noop(
+ r#"
+fn f() {$0}
+ "#,
+ );
+ do_check_noop(
+ r#"
+fn f() {{$0}}
+ "#,
+ );
+ }
+
+ #[test]
+ fn does_not_indent_block_with_too_much_content() {
+ do_check_noop(
+ r#"
+fn f() {$0 a = b; ()}
+ "#,
+ );
+ do_check_noop(
+ r#"
+fn f() {$0 a = b; a = b; }
+ "#,
+ );
+ }
+
+ #[test]
+ fn does_not_indent_multiline_block() {
+ do_check_noop(
+ r#"
+fn f() {$0
+}
+ "#,
+ );
+ do_check_noop(
+ r#"
+fn f() {$0
+
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_use_tree_list() {
+ do_check(
+ r#"
+use crate::{$0};
+ "#,
+ r#"
+use crate::{
+ $0
+};
+ "#,
+ );
+ do_check(
+ r#"
+use crate::{$0Object, path::to::OtherThing};
+ "#,
+ r#"
+use crate::{
+ $0Object, path::to::OtherThing
+};
+ "#,
+ );
+ do_check(
+ r#"
+use {crate::{$0Object, path::to::OtherThing}};
+ "#,
+ r#"
+use {crate::{
+ $0Object, path::to::OtherThing
+}};
+ "#,
+ );
+ do_check(
+ r#"
+use {
+ crate::{$0Object, path::to::OtherThing}
+};
+ "#,
+ r#"
+use {
+ crate::{
+ $0Object, path::to::OtherThing
+ }
+};
+ "#,
+ );
+ }
+
+ #[test]
+ fn does_not_indent_use_tree_list_when_not_at_curly_brace() {
+ do_check_noop(
+ r#"
+use path::{Thing$0};
+ "#,
+ );
+ }
+
+ #[test]
+ fn does_not_indent_use_tree_list_without_curly_braces() {
+ do_check_noop(
+ r#"
+use path::Thing$0;
+ "#,
+ );
+ do_check_noop(
+ r#"
+use path::$0Thing;
+ "#,
+ );
+ do_check_noop(
+ r#"
+use path::Thing$0};
+ "#,
+ );
+ do_check_noop(
+ r#"
+use path::{$0Thing;
+ "#,
+ );
+ }
+
+ #[test]
+ fn does_not_indent_multiline_use_tree_list() {
+ do_check_noop(
+ r#"
+use path::{$0
+ Thing
+};
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
new file mode 100644
index 000000000..51291a645
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
@@ -0,0 +1,93 @@
+use std::sync::Arc;
+
+use dot::{Id, LabelText};
+use ide_db::{
+ base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt},
+ FxHashSet, RootDatabase,
+};
+
+// Feature: View Crate Graph
+//
+// Renders the currently loaded crate graph as an SVG graphic. Requires the `dot` tool, which
+// is part of graphviz, to be installed.
+//
+// Only workspace crates are included, no crates.io dependencies or sysroot crates.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: View Crate Graph**
+// |===
+pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String, String> {
+ let crate_graph = db.crate_graph();
+ let crates_to_render = crate_graph
+ .iter()
+ .filter(|krate| {
+ if full {
+ true
+ } else {
+ // Only render workspace crates
+ let root_id = db.file_source_root(crate_graph[*krate].root_file_id);
+ !db.source_root(root_id).is_library
+ }
+ })
+ .collect();
+ let graph = DotCrateGraph { graph: crate_graph, crates_to_render };
+
+ let mut dot = Vec::new();
+ dot::render(&graph, &mut dot).unwrap();
+ Ok(String::from_utf8(dot).unwrap())
+}
+
+struct DotCrateGraph {
+ graph: Arc<CrateGraph>,
+ crates_to_render: FxHashSet<CrateId>,
+}
+
+type Edge<'a> = (CrateId, &'a Dependency);
+
+impl<'a> dot::GraphWalk<'a, CrateId, Edge<'a>> for DotCrateGraph {
+ fn nodes(&'a self) -> dot::Nodes<'a, CrateId> {
+ self.crates_to_render.iter().copied().collect()
+ }
+
+ fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> {
+ self.crates_to_render
+ .iter()
+ .flat_map(|krate| {
+ self.graph[*krate]
+ .dependencies
+ .iter()
+ .filter(|dep| self.crates_to_render.contains(&dep.crate_id))
+ .map(move |dep| (*krate, dep))
+ })
+ .collect()
+ }
+
+ fn source(&'a self, edge: &Edge<'a>) -> CrateId {
+ edge.0
+ }
+
+ fn target(&'a self, edge: &Edge<'a>) -> CrateId {
+ edge.1.crate_id
+ }
+}
+
+impl<'a> dot::Labeller<'a, CrateId, Edge<'a>> for DotCrateGraph {
+ fn graph_id(&'a self) -> Id<'a> {
+ Id::new("rust_analyzer_crate_graph").unwrap()
+ }
+
+ fn node_id(&'a self, n: &CrateId) -> Id<'a> {
+ Id::new(format!("_{}", n.0)).unwrap()
+ }
+
+ fn node_shape(&'a self, _node: &CrateId) -> Option<LabelText<'a>> {
+ Some(LabelText::LabelStr("box".into()))
+ }
+
+ fn node_label(&'a self, n: &CrateId) -> LabelText<'a> {
+ let name = self.graph[*n].display_name.as_ref().map_or("(unnamed crate)", |name| &*name);
+ LabelText::LabelStr(name.into())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
new file mode 100644
index 000000000..7312afe53
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
@@ -0,0 +1,26 @@
+use hir::{Function, Semantics};
+use ide_db::base_db::FilePosition;
+use ide_db::RootDatabase;
+use syntax::{algo::find_node_at_offset, ast, AstNode};
+
+// Feature: View Hir
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: View Hir**
+// |===
+// image::https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif[]
+pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String {
+ body_hir(db, position).unwrap_or_else(|| "Not inside a function body".to_string())
+}
+
+fn body_hir(db: &RootDatabase, position: FilePosition) -> Option<String> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+
+ let function = find_node_at_offset::<ast::Fn>(source_file.syntax(), position.offset)?;
+
+ let function: Function = sema.to_def(&function)?;
+ Some(function.debug_hir(db))
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
new file mode 100644
index 000000000..3dc03085d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
@@ -0,0 +1,16 @@
+use hir::db::DefDatabase;
+use ide_db::base_db::FileId;
+use ide_db::RootDatabase;
+
+// Feature: Debug ItemTree
+//
+// Displays the ItemTree of the currently open file, for debugging.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Debug ItemTree**
+// |===
+pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
+ db.file_item_tree(file_id.into()).pretty_print()
+}
diff --git a/src/tools/rust-analyzer/crates/limit/Cargo.toml b/src/tools/rust-analyzer/crates/limit/Cargo.toml
new file mode 100644
index 000000000..893db436d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/limit/Cargo.toml
@@ -0,0 +1,11 @@
+[package]
+name = "limit"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[features]
+tracking = []
+default = ["tracking"]
diff --git a/src/tools/rust-analyzer/crates/limit/src/lib.rs b/src/tools/rust-analyzer/crates/limit/src/lib.rs
new file mode 100644
index 000000000..d6a706a7c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/limit/src/lib.rs
@@ -0,0 +1,69 @@
+//! limit defines a struct to enforce limits.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[cfg(feature = "tracking")]
+use std::sync::atomic::AtomicUsize;
+
+/// Represents a struct used to enforce a numerical limit.
+pub struct Limit {
+ upper_bound: usize,
+ #[cfg(feature = "tracking")]
+ max: AtomicUsize,
+}
+
+impl Limit {
+ /// Creates a new limit.
+ #[inline]
+ pub const fn new(upper_bound: usize) -> Self {
+ Self {
+ upper_bound,
+ #[cfg(feature = "tracking")]
+ max: AtomicUsize::new(0),
+ }
+ }
+
+ /// Creates a new limit.
+ #[inline]
+ #[cfg(feature = "tracking")]
+ pub const fn new_tracking(upper_bound: usize) -> Self {
+ Self {
+ upper_bound,
+ #[cfg(feature = "tracking")]
+ max: AtomicUsize::new(1),
+ }
+ }
+
+ /// Gets the underlying numeric limit.
+ #[inline]
+ pub const fn inner(&self) -> usize {
+ self.upper_bound
+ }
+
+ /// Checks whether the given value is below the limit.
+ /// Returns `Ok` when `other` is below `self`, and `Err` otherwise.
+ #[inline]
+ pub fn check(&self, other: usize) -> Result<(), ()> {
+ if other > self.upper_bound {
+ Err(())
+ } else {
+ #[cfg(feature = "tracking")]
+ loop {
+ use std::sync::atomic::Ordering;
+ let old_max = self.max.load(Ordering::Relaxed);
+ if other <= old_max || old_max == 0 {
+ break;
+ }
+ if self
+ .max
+ .compare_exchange_weak(old_max, other, Ordering::Relaxed, Ordering::Relaxed)
+ .is_ok()
+ {
+ eprintln!("new max: {}", other);
+ }
+ }
+
+ Ok(())
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
new file mode 100644
index 000000000..5ff3448a1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "mbe"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+rustc-hash = "1.1.0"
+smallvec = "1.9.0"
+tracing = "0.1.35"
+
+syntax = { path = "../syntax", version = "0.0.0" }
+parser = { path = "../parser", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
new file mode 100644
index 000000000..ac691578d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
@@ -0,0 +1,222 @@
+//! This module add real world mbe example for benchmark tests
+
+use rustc_hash::FxHashMap;
+use syntax::{
+ ast::{self, HasName},
+ AstNode, SmolStr,
+};
+use test_utils::{bench, bench_fixture, skip_slow_tests};
+
+use crate::{
+ parser::{Op, RepeatKind, Separator},
+ syntax_node_to_token_tree, DeclarativeMacro,
+};
+
+#[test]
+fn benchmark_parse_macro_rules() {
+ if skip_slow_tests() {
+ return;
+ }
+ let rules = macro_rules_fixtures_tt();
+ let hash: usize = {
+ let _pt = bench("mbe parse macro rules");
+ rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it).unwrap().rules.len()).sum()
+ };
+ assert_eq!(hash, 1144);
+}
+
+#[test]
+fn benchmark_expand_macro_rules() {
+ if skip_slow_tests() {
+ return;
+ }
+ let rules = macro_rules_fixtures();
+ let invocations = invocation_fixtures(&rules);
+
+ let hash: usize = {
+ let _pt = bench("mbe expand macro rules");
+ invocations
+ .into_iter()
+ .map(|(id, tt)| {
+ let res = rules[&id].expand(&tt);
+ assert!(res.err.is_none());
+ res.value.token_trees.len()
+ })
+ .sum()
+ };
+ assert_eq!(hash, 69413);
+}
+
+fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
+ macro_rules_fixtures_tt()
+ .into_iter()
+ .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt).unwrap()))
+ .collect()
+}
+
+fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
+ let fixture = bench_fixture::numerous_macro_rules();
+ let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
+
+ source_file
+ .syntax()
+ .descendants()
+ .filter_map(ast::MacroRules::cast)
+ .map(|rule| {
+ let id = rule.name().unwrap().to_string();
+ let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax());
+ (id, def_tt)
+ })
+ .collect()
+}
+
+/// Generate random invocation fixtures from rules
+fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(String, tt::Subtree)> {
+ let mut seed = 123456789;
+ let mut res = Vec::new();
+
+ for (name, it) in rules {
+ for rule in &it.rules {
+ // Generate twice
+ for _ in 0..2 {
+ // The input are generated by filling the `Op` randomly.
+ // However, there are some cases generated are ambiguous for expanding, for example:
+ // ```rust
+ // macro_rules! m {
+ // ($($t:ident),* as $ty:ident) => {}
+ // }
+ // m!(as u32); // error: local ambiguity: multiple parsing options: built-in NTs ident ('t') or 1 other option.
+ // ```
+ //
+ // So we just skip any error cases and try again
+ let mut try_cnt = 0;
+ loop {
+ let mut subtree = tt::Subtree::default();
+ for op in rule.lhs.iter() {
+ collect_from_op(op, &mut subtree, &mut seed);
+ }
+ if it.expand(&subtree).err.is_none() {
+ res.push((name.clone(), subtree));
+ break;
+ }
+ try_cnt += 1;
+ if try_cnt > 100 {
+ panic!("invocaton fixture {} cannot be generated.\n", name);
+ }
+ }
+ }
+ }
+ }
+ return res;
+
+ fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) {
+ return match op {
+ Op::Var { kind, .. } => match kind.as_ref().map(|it| it.as_str()) {
+ Some("ident") => parent.token_trees.push(make_ident("foo")),
+ Some("ty") => parent.token_trees.push(make_ident("Foo")),
+ Some("tt") => parent.token_trees.push(make_ident("foo")),
+ Some("vis") => parent.token_trees.push(make_ident("pub")),
+ Some("pat") => parent.token_trees.push(make_ident("foo")),
+ Some("path") => parent.token_trees.push(make_ident("foo")),
+ Some("literal") => parent.token_trees.push(make_literal("1")),
+ Some("expr") => parent.token_trees.push(make_ident("foo")),
+ Some("lifetime") => {
+ parent.token_trees.push(make_punct('\''));
+ parent.token_trees.push(make_ident("a"));
+ }
+ Some("block") => {
+ parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None))
+ }
+ Some("item") => {
+ parent.token_trees.push(make_ident("fn"));
+ parent.token_trees.push(make_ident("foo"));
+ parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None));
+ parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None));
+ }
+ Some("meta") => {
+ parent.token_trees.push(make_ident("foo"));
+ parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None));
+ }
+
+ None => (),
+ Some(kind) => panic!("Unhandled kind {}", kind),
+ },
+ Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()),
+ Op::Repeat { tokens, kind, separator } => {
+ let max = 10;
+ let cnt = match kind {
+ RepeatKind::ZeroOrMore => rand(seed) % max,
+ RepeatKind::OneOrMore => 1 + rand(seed) % max,
+ RepeatKind::ZeroOrOne => rand(seed) % 2,
+ };
+ for i in 0..cnt {
+ for it in tokens.iter() {
+ collect_from_op(it, parent, seed);
+ }
+ if i + 1 != cnt {
+ if let Some(sep) = separator {
+ match sep {
+ Separator::Literal(it) => {
+ parent.token_trees.push(tt::Leaf::Literal(it.clone()).into())
+ }
+ Separator::Ident(it) => {
+ parent.token_trees.push(tt::Leaf::Ident(it.clone()).into())
+ }
+ Separator::Puncts(puncts) => {
+ for it in puncts {
+ parent.token_trees.push(tt::Leaf::Punct(*it).into())
+ }
+ }
+ };
+ }
+ }
+ }
+ }
+ Op::Subtree { tokens, delimiter } => {
+ let mut subtree = tt::Subtree { delimiter: *delimiter, token_trees: Vec::new() };
+ tokens.iter().for_each(|it| {
+ collect_from_op(it, &mut subtree, seed);
+ });
+ parent.token_trees.push(subtree.into());
+ }
+ Op::Ignore { .. } | Op::Index { .. } => {}
+ };
+
+ // Simple linear congruential generator for determistic result
+ fn rand(seed: &mut usize) -> usize {
+ let a = 1664525;
+ let c = 1013904223;
+ *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
+ *seed
+ }
+ fn make_ident(ident: &str) -> tt::TokenTree {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) })
+ .into()
+ }
+ fn make_punct(char: char) -> tt::TokenTree {
+ tt::Leaf::Punct(tt::Punct {
+ id: tt::TokenId::unspecified(),
+ char,
+ spacing: tt::Spacing::Alone,
+ })
+ .into()
+ }
+ fn make_literal(lit: &str) -> tt::TokenTree {
+ tt::Leaf::Literal(tt::Literal {
+ id: tt::TokenId::unspecified(),
+ text: SmolStr::new(lit),
+ })
+ .into()
+ }
+ fn make_subtree(
+ kind: tt::DelimiterKind,
+ token_trees: Option<Vec<tt::TokenTree>>,
+ ) -> tt::TokenTree {
+ tt::Subtree {
+ delimiter: Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }),
+ token_trees: token_trees.unwrap_or_default(),
+ }
+ .into()
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
new file mode 100644
index 000000000..1e1bfa550
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
@@ -0,0 +1,121 @@
+//! This module takes a (parsed) definition of `macro_rules` invocation, a
+//! `tt::TokenTree` representing an argument of macro invocation, and produces a
+//! `tt::TokenTree` for the result of the expansion.
+
+mod matcher;
+mod transcriber;
+
+use rustc_hash::FxHashMap;
+use syntax::SmolStr;
+
+use crate::{ExpandError, ExpandResult};
+
+pub(crate) fn expand_rules(
+ rules: &[crate::Rule],
+ input: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
+ for rule in rules {
+ let new_match = matcher::match_(&rule.lhs, input);
+
+ if new_match.err.is_none() {
+ // If we find a rule that applies without errors, we're done.
+ // Unconditionally returning the transcription here makes the
+ // `test_repeat_bad_var` test fail.
+ let ExpandResult { value, err: transcribe_err } =
+ transcriber::transcribe(&rule.rhs, &new_match.bindings);
+ if transcribe_err.is_none() {
+ return ExpandResult::ok(value);
+ }
+ }
+ // Use the rule if we matched more tokens, or bound variables count
+ if let Some((prev_match, _)) = &match_ {
+ if (new_match.unmatched_tts, -(new_match.bound_count as i32))
+ < (prev_match.unmatched_tts, -(prev_match.bound_count as i32))
+ {
+ match_ = Some((new_match, rule));
+ }
+ } else {
+ match_ = Some((new_match, rule));
+ }
+ }
+ if let Some((match_, rule)) = match_ {
+ // if we got here, there was no match without errors
+ let ExpandResult { value, err: transcribe_err } =
+ transcriber::transcribe(&rule.rhs, &match_.bindings);
+ ExpandResult { value, err: match_.err.or(transcribe_err) }
+ } else {
+ ExpandResult::only_err(ExpandError::NoMatchingRule)
+ }
+}
+
+/// The actual algorithm for expansion is not too hard, but is pretty tricky.
+/// `Bindings` structure is the key to understanding what we are doing here.
+///
+/// On the high level, it stores mapping from meta variables to the bits of
+/// syntax it should be substituted with. For example, if `$e:expr` is matched
+/// with `1 + 1` by macro_rules, the `Binding` will store `$e -> 1 + 1`.
+///
+/// The tricky bit is dealing with repetitions (`$()*`). Consider this example:
+///
+/// ```not_rust
+/// macro_rules! foo {
+/// ($($ i:ident $($ e:expr),*);*) => {
+/// $(fn $ i() { $($ e);*; })*
+/// }
+/// }
+/// foo! { foo 1,2,3; bar 4,5,6 }
+/// ```
+///
+/// Here, the `$i` meta variable is matched first with `foo` and then with
+/// `bar`, and `$e` is matched in turn with `1`, `2`, `3`, `4`, `5`, `6`.
+///
+/// To represent such "multi-mappings", we use a recursive structures: we map
+/// variables not to values, but to *lists* of values or other lists (that is,
+/// to the trees).
+///
+/// For the above example, the bindings would store
+///
+/// ```not_rust
+/// i -> [foo, bar]
+/// e -> [[1, 2, 3], [4, 5, 6]]
+/// ```
+///
+/// We construct `Bindings` in the `match_lhs`. The interesting case is
+/// `TokenTree::Repeat`, where we use `push_nested` to create the desired
+/// nesting structure.
+///
+/// The other side of the puzzle is `expand_subtree`, where we use the bindings
+/// to substitute meta variables in the output template. When expanding, we
+/// maintain a `nesting` stack of indices which tells us which occurrence from
+/// the `Bindings` we should take. We push to the stack when we enter a
+/// repetition.
+///
+/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to
+/// `tt::TokenTree`, where the index to select a particular `TokenTree` among
+/// many is not a plain `usize`, but a `&[usize]`.
+#[derive(Debug, Default, Clone, PartialEq, Eq)]
+struct Bindings {
+ inner: FxHashMap<SmolStr, Binding>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+enum Binding {
+ Fragment(Fragment),
+ Nested(Vec<Binding>),
+ Empty,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+enum Fragment {
+ /// token fragments are just copy-pasted into the output
+ Tokens(tt::TokenTree),
+ /// Expr ast fragments are surrounded with `()` on insertion to preserve
+ /// precedence. Note that this impl is different from the one currently in
+ /// `rustc` -- `rustc` doesn't translate fragments into token trees at all.
+ ///
+ /// At one point in time, we tried to to use "fake" delimiters here a-la
+ /// proc-macro delimiter=none. As we later discovered, "none" delimiters are
+ /// tricky to handle in the parser, and rustc doesn't handle those either.
+ Expr(tt::TokenTree),
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
new file mode 100644
index 000000000..5020e9aba
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
@@ -0,0 +1,914 @@
+//! An NFA-based parser, which is porting from rustc mbe parsing code
+//!
+//! See <https://github.com/rust-lang/rust/blob/70b18bc2cbac4712020019f5bf57c00905373205/compiler/rustc_expand/src/mbe/macro_parser.rs>
+//! Here is a quick intro to how the parser works, copied from rustc:
+//!
+//! A 'position' is a dot in the middle of a matcher, usually represented as a
+//! dot. For example `· a $( a )* a b` is a position, as is `a $( · a )* a b`.
+//!
+//! The parser walks through the input a character at a time, maintaining a list
+//! of threads consistent with the current position in the input string: `cur_items`.
+//!
+//! As it processes them, it fills up `eof_items` with threads that would be valid if
+//! the macro invocation is now over, `bb_items` with threads that are waiting on
+//! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting
+//! on a particular token. Most of the logic concerns moving the · through the
+//! repetitions indicated by Kleene stars. The rules for moving the · without
+//! consuming any input are called epsilon transitions. It only advances or calls
+//! out to the real Rust parser when no `cur_items` threads remain.
+//!
+//! Example:
+//!
+//! ```text, ignore
+//! Start parsing a a a a b against [· a $( a )* a b].
+//!
+//! Remaining input: a a a a b
+//! next: [· a $( a )* a b]
+//!
+//! - - - Advance over an a. - - -
+//!
+//! Remaining input: a a a b
+//! cur: [a · $( a )* a b]
+//! Descend/Skip (first item).
+//! next: [a $( · a )* a b] [a $( a )* · a b].
+//!
+//! - - - Advance over an a. - - -
+//!
+//! Remaining input: a a b
+//! cur: [a $( a · )* a b] [a $( a )* a · b]
+//! Follow epsilon transition: Finish/Repeat (first item)
+//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
+//!
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
+//!
+//! Remaining input: a b
+//! cur: [a $( a · )* a b] [a $( a )* a · b]
+//! Follow epsilon transition: Finish/Repeat (first item)
+//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
+//!
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
+//!
+//! Remaining input: b
+//! cur: [a $( a · )* a b] [a $( a )* a · b]
+//! Follow epsilon transition: Finish/Repeat (first item)
+//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
+//!
+//! - - - Advance over a b. - - -
+//!
+//! Remaining input: ''
+//! eof: [a $( a )* a b ·]
+//! ```
+
+use std::rc::Rc;
+
+use smallvec::{smallvec, SmallVec};
+use syntax::SmolStr;
+
+use crate::{
+ expander::{Binding, Bindings, ExpandResult, Fragment},
+ parser::{Op, RepeatKind, Separator},
+ tt_iter::TtIter,
+ ExpandError, MetaTemplate,
+};
+
+impl Bindings {
+ fn push_optional(&mut self, name: &SmolStr) {
+ // FIXME: Do we have a better way to represent an empty token ?
+ // Insert an empty subtree for empty token
+ let tt = tt::Subtree::default().into();
+ self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
+ }
+
+ fn push_empty(&mut self, name: &SmolStr) {
+ self.inner.insert(name.clone(), Binding::Empty);
+ }
+
+ fn bindings(&self) -> impl Iterator<Item = &Binding> {
+ self.inner.values()
+ }
+}
+
+#[derive(Clone, Debug, Default, PartialEq, Eq)]
+pub(super) struct Match {
+ pub(super) bindings: Bindings,
+ /// We currently just keep the first error and count the rest to compare matches.
+ pub(super) err: Option<ExpandError>,
+ pub(super) err_count: usize,
+ /// How many top-level token trees were left to match.
+ pub(super) unmatched_tts: usize,
+ /// The number of bound variables
+ pub(super) bound_count: usize,
+}
+
+impl Match {
+ fn add_err(&mut self, err: ExpandError) {
+ let prev_err = self.err.take();
+ self.err = prev_err.or(Some(err));
+ self.err_count += 1;
+ }
+}
+
+/// Matching errors are added to the `Match`.
+pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match {
+ let mut res = match_loop(pattern, input);
+ res.bound_count = count(res.bindings.bindings());
+ return res;
+
+ fn count<'a>(bindings: impl Iterator<Item = &'a Binding>) -> usize {
+ bindings
+ .map(|it| match it {
+ Binding::Fragment(_) => 1,
+ Binding::Empty => 1,
+ Binding::Nested(it) => count(it.iter()),
+ })
+ .sum()
+ }
+}
+
+#[derive(Debug, Clone)]
+enum BindingKind {
+ Empty(SmolStr),
+ Optional(SmolStr),
+ Fragment(SmolStr, Fragment),
+ Nested(usize, usize),
+}
+
+#[derive(Debug, Clone)]
+struct BindingsIdx(usize, usize);
+
+#[derive(Debug, Clone)]
+enum LinkNode<T> {
+ Node(T),
+ Parent { idx: usize, len: usize },
+}
+
+#[derive(Default)]
+struct BindingsBuilder {
+ nodes: Vec<Vec<LinkNode<Rc<BindingKind>>>>,
+ nested: Vec<Vec<LinkNode<usize>>>,
+}
+
+impl BindingsBuilder {
+ fn alloc(&mut self) -> BindingsIdx {
+ let idx = self.nodes.len();
+ self.nodes.push(Vec::new());
+ let nidx = self.nested.len();
+ self.nested.push(Vec::new());
+ BindingsIdx(idx, nidx)
+ }
+
+ fn copy(&mut self, bindings: &BindingsIdx) -> BindingsIdx {
+ let idx = copy_parent(bindings.0, &mut self.nodes);
+ let nidx = copy_parent(bindings.1, &mut self.nested);
+ return BindingsIdx(idx, nidx);
+
+ fn copy_parent<T>(idx: usize, target: &mut Vec<Vec<LinkNode<T>>>) -> usize
+ where
+ T: Clone,
+ {
+ let new_idx = target.len();
+ let len = target[idx].len();
+ if len < 4 {
+ target.push(target[idx].clone())
+ } else {
+ target.push(vec![LinkNode::Parent { idx, len }]);
+ }
+ new_idx
+ }
+ }
+
+ fn push_empty(&mut self, idx: &mut BindingsIdx, var: &SmolStr) {
+ self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Empty(var.clone()))));
+ }
+
+ fn push_optional(&mut self, idx: &mut BindingsIdx, var: &SmolStr) {
+ self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone()))));
+ }
+
+ fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) {
+ self.nodes[idx.0]
+ .push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment))));
+ }
+
+ fn push_nested(&mut self, parent: &mut BindingsIdx, child: &BindingsIdx) {
+ let BindingsIdx(idx, nidx) = self.copy(child);
+ self.nodes[parent.0].push(LinkNode::Node(Rc::new(BindingKind::Nested(idx, nidx))));
+ }
+
+ fn push_default(&mut self, idx: &mut BindingsIdx) {
+ self.nested[idx.1].push(LinkNode::Node(idx.0));
+ let new_idx = self.nodes.len();
+ self.nodes.push(Vec::new());
+ idx.0 = new_idx;
+ }
+
+ fn build(self, idx: &BindingsIdx) -> Bindings {
+ let mut bindings = Bindings::default();
+ self.build_inner(&mut bindings, &self.nodes[idx.0]);
+ bindings
+ }
+
+ fn build_inner(&self, bindings: &mut Bindings, link_nodes: &[LinkNode<Rc<BindingKind>>]) {
+ let mut nodes = Vec::new();
+ self.collect_nodes(link_nodes, &mut nodes);
+
+ for cmd in nodes {
+ match &**cmd {
+ BindingKind::Empty(name) => {
+ bindings.push_empty(name);
+ }
+ BindingKind::Optional(name) => {
+ bindings.push_optional(name);
+ }
+ BindingKind::Fragment(name, fragment) => {
+ bindings.inner.insert(name.clone(), Binding::Fragment(fragment.clone()));
+ }
+ BindingKind::Nested(idx, nested_idx) => {
+ let mut nested_nodes = Vec::new();
+ self.collect_nested(*idx, *nested_idx, &mut nested_nodes);
+
+ for (idx, iter) in nested_nodes.into_iter().enumerate() {
+ for (key, value) in &iter.inner {
+ let bindings = bindings
+ .inner
+ .entry(key.clone())
+ .or_insert_with(|| Binding::Nested(Vec::new()));
+
+ if let Binding::Nested(it) = bindings {
+ // insert empty nested bindings before this one
+ while it.len() < idx {
+ it.push(Binding::Nested(Vec::new()));
+ }
+ it.push(value.clone());
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fn collect_nested_ref<'a>(
+ &'a self,
+ id: usize,
+ len: usize,
+ nested_refs: &mut Vec<&'a Vec<LinkNode<Rc<BindingKind>>>>,
+ ) {
+ self.nested[id].iter().take(len).for_each(|it| match it {
+ LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]),
+ LinkNode::Parent { idx, len } => self.collect_nested_ref(*idx, *len, nested_refs),
+ });
+ }
+
+ fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings>) {
+ let last = &self.nodes[idx];
+ let mut nested_refs = Vec::new();
+ self.nested[nested_idx].iter().for_each(|it| match *it {
+ LinkNode::Node(idx) => nested_refs.push(&self.nodes[idx]),
+ LinkNode::Parent { idx, len } => self.collect_nested_ref(idx, len, &mut nested_refs),
+ });
+ nested_refs.push(last);
+
+ nested_refs.into_iter().for_each(|iter| {
+ let mut child_bindings = Bindings::default();
+ self.build_inner(&mut child_bindings, iter);
+ nested.push(child_bindings)
+ })
+ }
+
+ fn collect_nodes_ref<'a>(
+ &'a self,
+ id: usize,
+ len: usize,
+ nodes: &mut Vec<&'a Rc<BindingKind>>,
+ ) {
+ self.nodes[id].iter().take(len).for_each(|it| match it {
+ LinkNode::Node(it) => nodes.push(it),
+ LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
+ });
+ }
+
+ fn collect_nodes<'a>(
+ &'a self,
+ link_nodes: &'a [LinkNode<Rc<BindingKind>>],
+ nodes: &mut Vec<&'a Rc<BindingKind>>,
+ ) {
+ link_nodes.iter().for_each(|it| match it {
+ LinkNode::Node(it) => nodes.push(it),
+ LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
+ });
+ }
+}
+
+#[derive(Debug, Clone)]
+struct MatchState<'t> {
+ /// The position of the "dot" in this matcher
+ dot: OpDelimitedIter<'t>,
+
+ /// Token subtree stack
+ /// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. )
+ /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
+ /// that where the bottom of the stack is the outermost matcher.
+ stack: SmallVec<[OpDelimitedIter<'t>; 4]>,
+
+ /// The "parent" matcher position if we are in a repetition. That is, the matcher position just
+ /// before we enter the repetition.
+ up: Option<Box<MatchState<'t>>>,
+
+ /// The separator if we are in a repetition.
+ sep: Option<Separator>,
+
+ /// The KleeneOp of this sequence if we are in a repetition.
+ sep_kind: Option<RepeatKind>,
+
+ /// Number of tokens of seperator parsed
+ sep_parsed: Option<usize>,
+
+ /// Matched meta variables bindings
+ bindings: BindingsIdx,
+
+ /// Cached result of meta variable parsing
+ meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment>>)>,
+
+ /// Is error occuried in this state, will `poised` to "parent"
+ is_error: bool,
+}
+
+/// Process the matcher positions of `cur_items` until it is empty. In the process, this will
+/// produce more items in `next_items`, `eof_items`, and `bb_items`.
+///
+/// For more info about the how this happens, see the module-level doc comments and the inline
+/// comments of this function.
+///
+/// # Parameters
+///
+/// - `src`: the current token of the parser.
+/// - `stack`: the "parent" frames of the token tree
+/// - `res`: the match result to store errors
+/// - `cur_items`: the set of current items to be processed. This should be empty by the end of a
+/// successful execution of this function.
+/// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in
+/// the function `parse`.
+/// - `eof_items`: the set of items that would be valid if this was the EOF.
+/// - `bb_items`: the set of items that are waiting for the black-box parser.
+/// - `error_items`: the set of items in errors, used for error-resilient parsing
+fn match_loop_inner<'t>(
+ src: TtIter<'t>,
+ stack: &[TtIter<'t>],
+ res: &mut Match,
+ bindings_builder: &mut BindingsBuilder,
+ cur_items: &mut SmallVec<[MatchState<'t>; 1]>,
+ bb_items: &mut SmallVec<[MatchState<'t>; 1]>,
+ next_items: &mut Vec<MatchState<'t>>,
+ eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
+ error_items: &mut SmallVec<[MatchState<'t>; 1]>,
+) {
+ macro_rules! try_push {
+ ($items: expr, $it:expr) => {
+ if $it.is_error {
+ error_items.push($it);
+ } else {
+ $items.push($it);
+ }
+ };
+ }
+
+ while let Some(mut item) = cur_items.pop() {
+ while item.dot.is_eof() {
+ match item.stack.pop() {
+ Some(frame) => {
+ item.dot = frame;
+ item.dot.next();
+ }
+ None => break,
+ }
+ }
+ let op = match item.dot.peek() {
+ None => {
+ // We are at or past the end of the matcher of `item`.
+ if item.up.is_some() {
+ if item.sep_parsed.is_none() {
+ // Get the `up` matcher
+ let mut new_pos = *item.up.clone().unwrap();
+ new_pos.bindings = bindings_builder.copy(&new_pos.bindings);
+ // Add matches from this repetition to the `matches` of `up`
+ bindings_builder.push_nested(&mut new_pos.bindings, &item.bindings);
+
+ // Move the "dot" past the repetition in `up`
+ new_pos.dot.next();
+ new_pos.is_error = new_pos.is_error || item.is_error;
+ cur_items.push(new_pos);
+ }
+
+ // Check if we need a separator.
+ // We check the separator one by one
+ let sep_idx = *item.sep_parsed.as_ref().unwrap_or(&0);
+ let sep_len = item.sep.as_ref().map_or(0, Separator::tt_count);
+ if item.sep.is_some() && sep_idx != sep_len {
+ let sep = item.sep.as_ref().unwrap();
+ if src.clone().expect_separator(sep, sep_idx) {
+ item.dot.next();
+ item.sep_parsed = Some(sep_idx + 1);
+ try_push!(next_items, item);
+ }
+ }
+ // We don't need a separator. Move the "dot" back to the beginning of the matcher
+ // and try to match again UNLESS we are only allowed to have _one_ repetition.
+ else if item.sep_kind != Some(RepeatKind::ZeroOrOne) {
+ item.dot = item.dot.reset();
+ item.sep_parsed = None;
+ bindings_builder.push_default(&mut item.bindings);
+ cur_items.push(item);
+ }
+ } else {
+ // If we are not in a repetition, then being at the end of a matcher means that we have
+ // reached the potential end of the input.
+ try_push!(eof_items, item);
+ }
+ continue;
+ }
+ Some(it) => it,
+ };
+
+ // We are in the middle of a matcher.
+ match op {
+ OpDelimited::Op(Op::Repeat { tokens, kind, separator }) => {
+ if matches!(kind, RepeatKind::ZeroOrMore | RepeatKind::ZeroOrOne) {
+ let mut new_item = item.clone();
+ new_item.bindings = bindings_builder.copy(&new_item.bindings);
+ new_item.dot.next();
+ collect_vars(
+ &mut |s| {
+ bindings_builder.push_empty(&mut new_item.bindings, &s);
+ },
+ tokens,
+ );
+ cur_items.push(new_item);
+ }
+ cur_items.push(MatchState {
+ dot: tokens.iter_delimited(None),
+ stack: Default::default(),
+ up: Some(Box::new(item)),
+ sep: separator.clone(),
+ sep_kind: Some(*kind),
+ sep_parsed: None,
+ bindings: bindings_builder.alloc(),
+ meta_result: None,
+ is_error: false,
+ })
+ }
+ OpDelimited::Op(Op::Subtree { tokens, delimiter }) => {
+ if let Ok(subtree) = src.clone().expect_subtree() {
+ if subtree.delimiter_kind() == delimiter.map(|it| it.kind) {
+ item.stack.push(item.dot);
+ item.dot = tokens.iter_delimited(delimiter.as_ref());
+ cur_items.push(item);
+ }
+ }
+ }
+ OpDelimited::Op(Op::Var { kind, name, .. }) => {
+ if let Some(kind) = kind {
+ let mut fork = src.clone();
+ let match_res = match_meta_var(kind.as_str(), &mut fork);
+ match match_res.err {
+ None => {
+ // Some meta variables are optional (e.g. vis)
+ if match_res.value.is_some() {
+ item.meta_result = Some((fork, match_res));
+ try_push!(bb_items, item);
+ } else {
+ bindings_builder.push_optional(&mut item.bindings, name);
+ item.dot.next();
+ cur_items.push(item);
+ }
+ }
+ Some(err) => {
+ res.add_err(err);
+ if let Some(fragment) = match_res.value {
+ bindings_builder.push_fragment(&mut item.bindings, name, fragment);
+ }
+ item.is_error = true;
+ error_items.push(item);
+ }
+ }
+ }
+ }
+ OpDelimited::Op(Op::Leaf(leaf)) => {
+ if let Err(err) = match_leaf(leaf, &mut src.clone()) {
+ res.add_err(err);
+ item.is_error = true;
+ } else {
+ item.dot.next();
+ }
+ try_push!(next_items, item);
+ }
+ OpDelimited::Op(Op::Ignore { .. } | Op::Index { .. }) => {}
+ OpDelimited::Open => {
+ if matches!(src.clone().next(), Some(tt::TokenTree::Subtree(..))) {
+ item.dot.next();
+ try_push!(next_items, item);
+ }
+ }
+ OpDelimited::Close => {
+ let is_delim_closed = src.peek_n(0).is_none() && !stack.is_empty();
+ if is_delim_closed {
+ item.dot.next();
+ try_push!(next_items, item);
+ }
+ }
+ }
+ }
+}
+
+fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
+ let mut src = TtIter::new(src);
+ let mut stack: SmallVec<[TtIter<'_>; 1]> = SmallVec::new();
+ let mut res = Match::default();
+ let mut error_recover_item = None;
+
+ let mut bindings_builder = BindingsBuilder::default();
+
+ let mut cur_items = smallvec![MatchState {
+ dot: pattern.iter_delimited(None),
+ stack: Default::default(),
+ up: None,
+ sep: None,
+ sep_kind: None,
+ sep_parsed: None,
+ bindings: bindings_builder.alloc(),
+ is_error: false,
+ meta_result: None,
+ }];
+
+ let mut next_items = vec![];
+
+ loop {
+ let mut bb_items = SmallVec::new();
+ let mut eof_items = SmallVec::new();
+ let mut error_items = SmallVec::new();
+
+ stdx::always!(next_items.is_empty());
+
+ match_loop_inner(
+ src.clone(),
+ &stack,
+ &mut res,
+ &mut bindings_builder,
+ &mut cur_items,
+ &mut bb_items,
+ &mut next_items,
+ &mut eof_items,
+ &mut error_items,
+ );
+ stdx::always!(cur_items.is_empty());
+
+ if !error_items.is_empty() {
+ error_recover_item = error_items.pop().map(|it| it.bindings);
+ } else if let [state, ..] = &*eof_items {
+ error_recover_item = Some(state.bindings.clone());
+ }
+
+ // We need to do some post processing after the `match_loop_inner`.
+ // If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
+ // either the parse is ambiguous (which should never happen) or there is a syntax error.
+ if src.peek_n(0).is_none() && stack.is_empty() {
+ if let [state] = &*eof_items {
+ // remove all errors, because it is the correct answer !
+ res = Match::default();
+ res.bindings = bindings_builder.build(&state.bindings);
+ } else {
+ // Error recovery
+ if let Some(item) = error_recover_item {
+ res.bindings = bindings_builder.build(&item);
+ }
+ res.add_err(ExpandError::UnexpectedToken);
+ }
+ return res;
+ }
+
+ // If there are no possible next positions AND we aren't waiting for the black-box parser,
+ // then there is a syntax error.
+ //
+ // Another possibility is that we need to call out to parse some rust nonterminal
+ // (black-box) parser. However, if there is not EXACTLY ONE of these, something is wrong.
+ let has_leftover_tokens = (bb_items.is_empty() && next_items.is_empty())
+ || !(bb_items.is_empty() || next_items.is_empty())
+ || bb_items.len() > 1;
+ if has_leftover_tokens {
+ res.unmatched_tts += src.len();
+ while let Some(it) = stack.pop() {
+ src = it;
+ res.unmatched_tts += src.len();
+ }
+ res.add_err(ExpandError::LeftoverTokens);
+
+ if let Some(error_reover_item) = error_recover_item {
+ res.bindings = bindings_builder.build(&error_reover_item);
+ }
+ return res;
+ }
+ // Dump all possible `next_items` into `cur_items` for the next iteration.
+ else if !next_items.is_empty() {
+ // Now process the next token
+ cur_items.extend(next_items.drain(..));
+
+ match src.next() {
+ Some(tt::TokenTree::Subtree(subtree)) => {
+ stack.push(src.clone());
+ src = TtIter::new(subtree);
+ }
+ None => {
+ if let Some(iter) = stack.pop() {
+ src = iter;
+ }
+ }
+ _ => (),
+ }
+ }
+ // Finally, we have the case where we need to call the black-box parser to get some
+ // nonterminal.
+ else {
+ stdx::always!(bb_items.len() == 1);
+ let mut item = bb_items.pop().unwrap();
+
+ if let Some(OpDelimited::Op(Op::Var { name, .. })) = item.dot.peek() {
+ let (iter, match_res) = item.meta_result.take().unwrap();
+ match match_res.value {
+ Some(fragment) => {
+ bindings_builder.push_fragment(&mut item.bindings, name, fragment);
+ }
+ None if match_res.err.is_none() => {
+ bindings_builder.push_optional(&mut item.bindings, name);
+ }
+ None => {}
+ }
+ if let Some(err) = match_res.err {
+ res.add_err(err);
+ }
+ src = iter.clone();
+ item.dot.next();
+ } else {
+ unreachable!()
+ }
+ cur_items.push(item);
+ }
+ stdx::always!(!cur_items.is_empty());
+ }
+}
+
+fn match_leaf(lhs: &tt::Leaf, src: &mut TtIter<'_>) -> Result<(), ExpandError> {
+ let rhs = src
+ .expect_leaf()
+ .map_err(|()| ExpandError::binding_error(format!("expected leaf: `{lhs}`")))?;
+ match (lhs, rhs) {
+ (
+ tt::Leaf::Punct(tt::Punct { char: lhs, .. }),
+ tt::Leaf::Punct(tt::Punct { char: rhs, .. }),
+ ) if lhs == rhs => Ok(()),
+ (
+ tt::Leaf::Ident(tt::Ident { text: lhs, .. }),
+ tt::Leaf::Ident(tt::Ident { text: rhs, .. }),
+ ) if lhs == rhs => Ok(()),
+ (
+ tt::Leaf::Literal(tt::Literal { text: lhs, .. }),
+ tt::Leaf::Literal(tt::Literal { text: rhs, .. }),
+ ) if lhs == rhs => Ok(()),
+ _ => Err(ExpandError::UnexpectedToken),
+ }
+}
+
+fn match_meta_var(kind: &str, input: &mut TtIter<'_>) -> ExpandResult<Option<Fragment>> {
+ let fragment = match kind {
+ "path" => parser::PrefixEntryPoint::Path,
+ "ty" => parser::PrefixEntryPoint::Ty,
+ // FIXME: These two should actually behave differently depending on the edition.
+ //
+ // https://doc.rust-lang.org/edition-guide/rust-2021/or-patterns-macro-rules.html
+ "pat" | "pat_param" => parser::PrefixEntryPoint::Pat,
+ "stmt" => parser::PrefixEntryPoint::Stmt,
+ "block" => parser::PrefixEntryPoint::Block,
+ "meta" => parser::PrefixEntryPoint::MetaItem,
+ "item" => parser::PrefixEntryPoint::Item,
+ "vis" => parser::PrefixEntryPoint::Vis,
+ "expr" => {
+ // `expr` should not match underscores.
+ // HACK: Macro expansion should not be done using "rollback and try another alternative".
+ // rustc [explicitly checks the next token][0].
+ // [0]: https://github.com/rust-lang/rust/blob/f0c4da499/compiler/rustc_expand/src/mbe/macro_parser.rs#L576
+ match input.peek_n(0) {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Ident(it))) if it.text == "_" => {
+ return ExpandResult::only_err(ExpandError::NoMatchingRule)
+ }
+ _ => {}
+ };
+ return input
+ .expect_fragment(parser::PrefixEntryPoint::Expr)
+ .map(|tt| tt.map(Fragment::Expr));
+ }
+ _ => {
+ let tt_result = match kind {
+ "ident" => input
+ .expect_ident()
+ .map(|ident| tt::Leaf::from(ident.clone()).into())
+ .map_err(|()| ExpandError::binding_error("expected ident")),
+ "tt" => input
+ .expect_tt()
+ .map_err(|()| ExpandError::binding_error("expected token tree")),
+ "lifetime" => input
+ .expect_lifetime()
+ .map_err(|()| ExpandError::binding_error("expected lifetime")),
+ "literal" => {
+ let neg = input.eat_char('-');
+ input
+ .expect_literal()
+ .map(|literal| {
+ let lit = literal.clone();
+ match neg {
+ None => lit.into(),
+ Some(neg) => tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: None,
+ token_trees: vec![neg, lit.into()],
+ }),
+ }
+ })
+ .map_err(|()| ExpandError::binding_error("expected literal"))
+ }
+ _ => Err(ExpandError::UnexpectedToken),
+ };
+ return tt_result.map(|it| Some(Fragment::Tokens(it))).into();
+ }
+ };
+ input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens))
+}
+
+fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) {
+ for op in pattern.iter() {
+ match op {
+ Op::Var { name, .. } => collector_fun(name.clone()),
+ Op::Leaf(_) => (),
+ Op::Subtree { tokens, .. } => collect_vars(collector_fun, tokens),
+ Op::Repeat { tokens, .. } => collect_vars(collector_fun, tokens),
+ Op::Ignore { .. } | Op::Index { .. } => {}
+ }
+ }
+}
+impl MetaTemplate {
+ fn iter_delimited<'a>(&'a self, delimited: Option<&'a tt::Delimiter>) -> OpDelimitedIter<'a> {
+ OpDelimitedIter { inner: &self.0, idx: 0, delimited }
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+enum OpDelimited<'a> {
+ Op(&'a Op),
+ Open,
+ Close,
+}
+
+#[derive(Debug, Clone, Copy)]
+struct OpDelimitedIter<'a> {
+ inner: &'a [Op],
+ delimited: Option<&'a tt::Delimiter>,
+ idx: usize,
+}
+
+impl<'a> OpDelimitedIter<'a> {
+ fn is_eof(&self) -> bool {
+ let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 };
+ self.idx >= len
+ }
+
+ fn peek(&self) -> Option<OpDelimited<'a>> {
+ match self.delimited {
+ None => self.inner.get(self.idx).map(OpDelimited::Op),
+ Some(_) => match self.idx {
+ 0 => Some(OpDelimited::Open),
+ i if i == self.inner.len() + 1 => Some(OpDelimited::Close),
+ i => self.inner.get(i - 1).map(OpDelimited::Op),
+ },
+ }
+ }
+
+ fn reset(&self) -> Self {
+ Self { inner: self.inner, idx: 0, delimited: self.delimited }
+ }
+}
+
+impl<'a> Iterator for OpDelimitedIter<'a> {
+ type Item = OpDelimited<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let res = self.peek();
+ self.idx += 1;
+ res
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 };
+ let remain = len.saturating_sub(self.idx);
+ (remain, Some(remain))
+ }
+}
+
+impl<'a> TtIter<'a> {
+ fn expect_separator(&mut self, separator: &Separator, idx: usize) -> bool {
+ let mut fork = self.clone();
+ let ok = match separator {
+ Separator::Ident(lhs) if idx == 0 => match fork.expect_ident_or_underscore() {
+ Ok(rhs) => rhs.text == lhs.text,
+ Err(_) => false,
+ },
+ Separator::Literal(lhs) if idx == 0 => match fork.expect_literal() {
+ Ok(rhs) => match rhs {
+ tt::Leaf::Literal(rhs) => rhs.text == lhs.text,
+ tt::Leaf::Ident(rhs) => rhs.text == lhs.text,
+ tt::Leaf::Punct(_) => false,
+ },
+ Err(_) => false,
+ },
+ Separator::Puncts(lhss) if idx < lhss.len() => match fork.expect_punct() {
+ Ok(rhs) => rhs.char == lhss[idx].char,
+ Err(_) => false,
+ },
+ _ => false,
+ };
+ if ok {
+ *self = fork;
+ }
+ ok
+ }
+
+ fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
+ match self.peek_n(0) {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => {
+ return self.expect_lifetime();
+ }
+ _ => (),
+ }
+
+ let tt = self.next().ok_or(())?.clone();
+ let punct = match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => {
+ punct
+ }
+ _ => return Ok(tt),
+ };
+
+ let (second, third) = match (self.peek_n(0), self.peek_n(1)) {
+ (
+ Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))),
+ Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))),
+ ) if p2.spacing == tt::Spacing::Joint => (p2.char, Some(p3.char)),
+ (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2.char, None),
+ _ => return Ok(tt),
+ };
+
+ match (punct.char, second, third) {
+ ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => {
+ let tt2 = self.next().unwrap().clone();
+ let tt3 = self.next().unwrap().clone();
+ Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2, tt3] }.into())
+ }
+ ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _)
+ | ('-' | '=' | '>', '>', _)
+ | (':', ':', _)
+ | ('.', '.', _)
+ | ('&', '&', _)
+ | ('<', '<', _)
+ | ('|', '|', _) => {
+ let tt2 = self.next().unwrap().clone();
+ Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2] }.into())
+ }
+ _ => Ok(tt),
+ }
+ }
+
+ fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> {
+ let punct = self.expect_punct()?;
+ if punct.char != '\'' {
+ return Err(());
+ }
+ let ident = self.expect_ident_or_underscore()?;
+
+ Ok(tt::Subtree {
+ delimiter: None,
+ token_trees: vec![
+ tt::Leaf::Punct(*punct).into(),
+ tt::Leaf::Ident(ident.clone()).into(),
+ ],
+ }
+ .into())
+ }
+
+ fn eat_char(&mut self, c: char) -> Option<tt::TokenTree> {
+ let mut fork = self.clone();
+ match fork.expect_char(c) {
+ Ok(_) => {
+ let tt = self.next().cloned();
+ *self = fork;
+ tt
+ }
+ Err(_) => None,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
new file mode 100644
index 000000000..7bcc84740
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
@@ -0,0 +1,272 @@
+//! Transcriber takes a template, like `fn $ident() {}`, a set of bindings like
+//! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}`
+
+use syntax::SmolStr;
+use tt::{Delimiter, Subtree};
+
+use crate::{
+ expander::{Binding, Bindings, Fragment},
+ parser::{Op, RepeatKind, Separator},
+ ExpandError, ExpandResult, MetaTemplate,
+};
+
+impl Bindings {
+ fn contains(&self, name: &str) -> bool {
+ self.inner.contains_key(name)
+ }
+
+ fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> {
+ macro_rules! binding_err {
+ ($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) };
+ }
+
+ let mut b: &Binding =
+ self.inner.get(name).ok_or_else(|| binding_err!("could not find binding `{name}`"))?;
+ for nesting_state in nesting.iter_mut() {
+ nesting_state.hit = true;
+ b = match b {
+ Binding::Fragment(_) => break,
+ Binding::Nested(bs) => bs.get(nesting_state.idx).ok_or_else(|| {
+ nesting_state.at_end = true;
+ binding_err!("could not find nested binding `{name}`")
+ })?,
+ Binding::Empty => {
+ nesting_state.at_end = true;
+ return Err(binding_err!("could not find empty binding `{name}`"));
+ }
+ };
+ }
+ match b {
+ Binding::Fragment(it) => Ok(it),
+ Binding::Nested(_) => {
+ Err(binding_err!("expected simple binding, found nested binding `{name}`"))
+ }
+ Binding::Empty => {
+ Err(binding_err!("expected simple binding, found empty binding `{name}`"))
+ }
+ }
+ }
+}
+
+pub(super) fn transcribe(
+ template: &MetaTemplate,
+ bindings: &Bindings,
+) -> ExpandResult<tt::Subtree> {
+ let mut ctx = ExpandCtx { bindings, nesting: Vec::new() };
+ let mut arena: Vec<tt::TokenTree> = Vec::new();
+ expand_subtree(&mut ctx, template, None, &mut arena)
+}
+
+#[derive(Debug)]
+struct NestingState {
+ idx: usize,
+ /// `hit` is currently necessary to tell `expand_repeat` if it should stop
+ /// because there is no variable in use by the current repetition
+ hit: bool,
+ /// `at_end` is currently necessary to tell `expand_repeat` if it should stop
+ /// because there is no more value available for the current repetition
+ at_end: bool,
+}
+
+#[derive(Debug)]
+struct ExpandCtx<'a> {
+ bindings: &'a Bindings,
+ nesting: Vec<NestingState>,
+}
+
+fn expand_subtree(
+ ctx: &mut ExpandCtx<'_>,
+ template: &MetaTemplate,
+ delimiter: Option<Delimiter>,
+ arena: &mut Vec<tt::TokenTree>,
+) -> ExpandResult<tt::Subtree> {
+ // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation
+ let start_elements = arena.len();
+ let mut err = None;
+ for op in template.iter() {
+ match op {
+ Op::Leaf(tt) => arena.push(tt.clone().into()),
+ Op::Subtree { tokens, delimiter } => {
+ let ExpandResult { value: tt, err: e } =
+ expand_subtree(ctx, tokens, *delimiter, arena);
+ err = err.or(e);
+ arena.push(tt.into());
+ }
+ Op::Var { name, id, .. } => {
+ let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id);
+ err = err.or(e);
+ push_fragment(arena, fragment);
+ }
+ Op::Repeat { tokens: subtree, kind, separator } => {
+ let ExpandResult { value: fragment, err: e } =
+ expand_repeat(ctx, subtree, *kind, separator, arena);
+ err = err.or(e);
+ push_fragment(arena, fragment)
+ }
+ Op::Ignore { name, id } => {
+ // Expand the variable, but ignore the result. This registers the repetition count.
+ expand_var(ctx, name, *id);
+ }
+ Op::Index { depth } => {
+ let index = ctx
+ .nesting
+ .get(ctx.nesting.len() - 1 - (*depth as usize))
+ .map_or(0, |nest| nest.idx);
+ arena.push(
+ tt::Leaf::Literal(tt::Literal {
+ text: index.to_string().into(),
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
+ }
+ }
+ }
+ // drain the elements added in this instance of expand_subtree
+ let tts = arena.drain(start_elements..).collect();
+ ExpandResult { value: tt::Subtree { delimiter, token_trees: tts }, err }
+}
+
+fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandResult<Fragment> {
+ // We already handle $crate case in mbe parser
+ debug_assert!(v != "crate");
+
+ if !ctx.bindings.contains(v) {
+ // Note that it is possible to have a `$var` inside a macro which is not bound.
+ // For example:
+ // ```
+ // macro_rules! foo {
+ // ($a:ident, $b:ident, $c:tt) => {
+ // macro_rules! bar {
+ // ($bi:ident) => {
+ // fn $bi() -> u8 {$c}
+ // }
+ // }
+ // }
+ // ```
+ // We just treat it a normal tokens
+ let tt = tt::Subtree {
+ delimiter: None,
+ token_trees: vec![
+ tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, id }).into(),
+ tt::Leaf::from(tt::Ident { text: v.clone(), id }).into(),
+ ],
+ }
+ .into();
+ ExpandResult::ok(Fragment::Tokens(tt))
+ } else {
+ ctx.bindings.get(v, &mut ctx.nesting).map_or_else(
+ |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) },
+ |b| ExpandResult::ok(b.clone()),
+ )
+ }
+}
+
+fn expand_repeat(
+ ctx: &mut ExpandCtx<'_>,
+ template: &MetaTemplate,
+ kind: RepeatKind,
+ separator: &Option<Separator>,
+ arena: &mut Vec<tt::TokenTree>,
+) -> ExpandResult<Fragment> {
+ let mut buf: Vec<tt::TokenTree> = Vec::new();
+ ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
+ // Dirty hack to make macro-expansion terminate.
+ // This should be replaced by a proper macro-by-example implementation
+ let limit = 65536;
+ let mut has_seps = 0;
+ let mut counter = 0;
+
+ loop {
+ let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, None, arena);
+ let nesting_state = ctx.nesting.last_mut().unwrap();
+ if nesting_state.at_end || !nesting_state.hit {
+ break;
+ }
+ nesting_state.idx += 1;
+ nesting_state.hit = false;
+
+ counter += 1;
+ if counter == limit {
+ tracing::warn!(
+ "expand_tt in repeat pattern exceed limit => {:#?}\n{:#?}",
+ template,
+ ctx
+ );
+ return ExpandResult {
+ value: Fragment::Tokens(Subtree::default().into()),
+ err: Some(ExpandError::LimitExceeded),
+ };
+ }
+
+ if e.is_some() {
+ continue;
+ }
+
+ t.delimiter = None;
+ push_subtree(&mut buf, t);
+
+ if let Some(sep) = separator {
+ has_seps = match sep {
+ Separator::Ident(ident) => {
+ buf.push(tt::Leaf::from(ident.clone()).into());
+ 1
+ }
+ Separator::Literal(lit) => {
+ buf.push(tt::Leaf::from(lit.clone()).into());
+ 1
+ }
+ Separator::Puncts(puncts) => {
+ for &punct in puncts {
+ buf.push(tt::Leaf::from(punct).into());
+ }
+ puncts.len()
+ }
+ };
+ }
+
+ if RepeatKind::ZeroOrOne == kind {
+ break;
+ }
+ }
+
+ ctx.nesting.pop().unwrap();
+ for _ in 0..has_seps {
+ buf.pop();
+ }
+
+ // Check if it is a single token subtree without any delimiter
+ // e.g {Delimiter:None> ['>'] /Delimiter:None>}
+ let tt = tt::Subtree { delimiter: None, token_trees: buf }.into();
+
+ if RepeatKind::OneOrMore == kind && counter == 0 {
+ return ExpandResult {
+ value: Fragment::Tokens(tt),
+ err: Some(ExpandError::UnexpectedToken),
+ };
+ }
+ ExpandResult::ok(Fragment::Tokens(tt))
+}
+
+fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
+ match fragment {
+ Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
+ Fragment::Expr(tt::TokenTree::Subtree(mut tt)) => {
+ if tt.delimiter.is_none() {
+ tt.delimiter = Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ })
+ }
+ buf.push(tt.into())
+ }
+ Fragment::Tokens(tt) | Fragment::Expr(tt) => buf.push(tt),
+ }
+}
+
+fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
+ match tt.delimiter {
+ None => buf.extend(tt.token_trees),
+ Some(_) => buf.push(tt.into()),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
new file mode 100644
index 000000000..79da84f4a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
@@ -0,0 +1,352 @@
+//! `mbe` (short for Macro By Example) crate contains code for handling
+//! `macro_rules` macros. It uses `TokenTree` (from `tt` package) as the
+//! interface, although it contains some code to bridge `SyntaxNode`s and
+//! `TokenTree`s as well!
+//!
+//! The tes for this functionality live in another crate:
+//! `hir_def::macro_expansion_tests::mbe`.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod parser;
+mod expander;
+mod syntax_bridge;
+mod tt_iter;
+mod to_parser_input;
+
+#[cfg(test)]
+mod benchmark;
+mod token_map;
+
+use std::fmt;
+
+use crate::{
+ parser::{MetaTemplate, Op},
+ tt_iter::TtIter,
+};
+
+// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
+pub use ::parser::TopEntryPoint;
+pub use tt::{Delimiter, DelimiterKind, Punct};
+
+pub use crate::{
+ syntax_bridge::{
+ parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
+ syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken,
+ SyntheticTokenId,
+ },
+ token_map::TokenMap,
+};
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum ParseError {
+ UnexpectedToken(Box<str>),
+ Expected(Box<str>),
+ InvalidRepeat,
+ RepetitionEmptyTokenTree,
+}
+
+impl ParseError {
+ fn expected(e: &str) -> ParseError {
+ ParseError::Expected(e.into())
+ }
+
+ fn unexpected(e: &str) -> ParseError {
+ ParseError::UnexpectedToken(e.into())
+ }
+}
+
+impl fmt::Display for ParseError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ParseError::UnexpectedToken(it) => f.write_str(it),
+ ParseError::Expected(it) => f.write_str(it),
+ ParseError::InvalidRepeat => f.write_str("invalid repeat"),
+ ParseError::RepetitionEmptyTokenTree => f.write_str("empty token tree in repetition"),
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum ExpandError {
+ BindingError(Box<Box<str>>),
+ LeftoverTokens,
+ ConversionError,
+ LimitExceeded,
+ NoMatchingRule,
+ UnexpectedToken,
+}
+
+impl ExpandError {
+ fn binding_error(e: impl Into<Box<str>>) -> ExpandError {
+ ExpandError::BindingError(Box::new(e.into()))
+ }
+}
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"),
+ ExpandError::UnexpectedToken => f.write_str("unexpected token in input"),
+ ExpandError::BindingError(e) => f.write_str(e),
+ ExpandError::ConversionError => f.write_str("could not convert tokens"),
+ ExpandError::LimitExceeded => f.write_str("Expand exceed limit"),
+ ExpandError::LeftoverTokens => f.write_str("leftover tokens"),
+ }
+ }
+}
+
+/// This struct contains AST for a single `macro_rules` definition. What might
+/// be very confusing is that AST has almost exactly the same shape as
+/// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident`
+/// and `$()*` have special meaning (see `Var` and `Repeat` data structures)
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct DeclarativeMacro {
+ rules: Vec<Rule>,
+ /// Highest id of the token we have in TokenMap
+ shift: Shift,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+struct Rule {
+ lhs: MetaTemplate,
+ rhs: MetaTemplate,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct Shift(u32);
+
+impl Shift {
+ pub fn new(tt: &tt::Subtree) -> Shift {
+ // Note that TokenId is started from zero,
+ // We have to add 1 to prevent duplication.
+ let value = max_id(tt).map_or(0, |it| it + 1);
+ return Shift(value);
+
+ // Find the max token id inside a subtree
+ fn max_id(subtree: &tt::Subtree) -> Option<u32> {
+ let filter = |tt: &_| match tt {
+ tt::TokenTree::Subtree(subtree) => {
+ let tree_id = max_id(subtree);
+ match subtree.delimiter {
+ Some(it) if it.id != tt::TokenId::unspecified() => {
+ Some(tree_id.map_or(it.id.0, |t| t.max(it.id.0)))
+ }
+ _ => tree_id,
+ }
+ }
+ tt::TokenTree::Leaf(leaf) => {
+ let &(tt::Leaf::Ident(tt::Ident { id, .. })
+ | tt::Leaf::Punct(tt::Punct { id, .. })
+ | tt::Leaf::Literal(tt::Literal { id, .. })) = leaf;
+
+ (id != tt::TokenId::unspecified()).then(|| id.0)
+ }
+ };
+ subtree.token_trees.iter().filter_map(filter).max()
+ }
+ }
+
+ /// Shift given TokenTree token id
+ pub fn shift_all(self, tt: &mut tt::Subtree) {
+ for t in &mut tt.token_trees {
+ match t {
+ tt::TokenTree::Leaf(
+ tt::Leaf::Ident(tt::Ident { id, .. })
+ | tt::Leaf::Punct(tt::Punct { id, .. })
+ | tt::Leaf::Literal(tt::Literal { id, .. }),
+ ) => *id = self.shift(*id),
+ tt::TokenTree::Subtree(tt) => {
+ if let Some(it) = tt.delimiter.as_mut() {
+ it.id = self.shift(it.id);
+ }
+ self.shift_all(tt)
+ }
+ }
+ }
+ }
+
+ pub fn shift(self, id: tt::TokenId) -> tt::TokenId {
+ if id == tt::TokenId::unspecified() {
+ id
+ } else {
+ tt::TokenId(id.0 + self.0)
+ }
+ }
+
+ pub fn unshift(self, id: tt::TokenId) -> Option<tt::TokenId> {
+ id.0.checked_sub(self.0).map(tt::TokenId)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub enum Origin {
+ Def,
+ Call,
+}
+
+impl DeclarativeMacro {
+ /// The old, `macro_rules! m {}` flavor.
+ pub fn parse_macro_rules(tt: &tt::Subtree) -> Result<DeclarativeMacro, ParseError> {
+ // Note: this parsing can be implemented using mbe machinery itself, by
+ // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
+ // manually seems easier.
+ let mut src = TtIter::new(tt);
+ let mut rules = Vec::new();
+ while src.len() > 0 {
+ let rule = Rule::parse(&mut src, true)?;
+ rules.push(rule);
+ if let Err(()) = src.expect_char(';') {
+ if src.len() > 0 {
+ return Err(ParseError::expected("expected `;`"));
+ }
+ break;
+ }
+ }
+
+ for Rule { lhs, .. } in &rules {
+ validate(lhs)?;
+ }
+
+ Ok(DeclarativeMacro { rules, shift: Shift::new(tt) })
+ }
+
+ /// The new, unstable `macro m {}` flavor.
+ pub fn parse_macro2(tt: &tt::Subtree) -> Result<DeclarativeMacro, ParseError> {
+ let mut src = TtIter::new(tt);
+ let mut rules = Vec::new();
+
+ if Some(tt::DelimiterKind::Brace) == tt.delimiter_kind() {
+ cov_mark::hit!(parse_macro_def_rules);
+ while src.len() > 0 {
+ let rule = Rule::parse(&mut src, true)?;
+ rules.push(rule);
+ if let Err(()) = src.expect_any_char(&[';', ',']) {
+ if src.len() > 0 {
+ return Err(ParseError::expected("expected `;` or `,` to delimit rules"));
+ }
+ break;
+ }
+ }
+ } else {
+ cov_mark::hit!(parse_macro_def_simple);
+ let rule = Rule::parse(&mut src, false)?;
+ if src.len() != 0 {
+ return Err(ParseError::expected("remaining tokens in macro def"));
+ }
+ rules.push(rule);
+ }
+
+ for Rule { lhs, .. } in &rules {
+ validate(lhs)?;
+ }
+
+ Ok(DeclarativeMacro { rules, shift: Shift::new(tt) })
+ }
+
+ pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
+ // apply shift
+ let mut tt = tt.clone();
+ self.shift.shift_all(&mut tt);
+ expander::expand_rules(&self.rules, &tt)
+ }
+
+ pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
+ self.shift.shift(id)
+ }
+
+ pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) {
+ match self.shift.unshift(id) {
+ Some(id) => (id, Origin::Call),
+ None => (id, Origin::Def),
+ }
+ }
+
+ pub fn shift(&self) -> Shift {
+ self.shift
+ }
+}
+
+impl Rule {
+ fn parse(src: &mut TtIter<'_>, expect_arrow: bool) -> Result<Self, ParseError> {
+ let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
+ if expect_arrow {
+ src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?;
+ src.expect_char('>').map_err(|()| ParseError::expected("expected `>`"))?;
+ }
+ let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
+
+ let lhs = MetaTemplate::parse_pattern(lhs)?;
+ let rhs = MetaTemplate::parse_template(rhs)?;
+
+ Ok(crate::Rule { lhs, rhs })
+ }
+}
+
+fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
+ for op in pattern.iter() {
+ match op {
+ Op::Subtree { tokens, .. } => validate(tokens)?,
+ Op::Repeat { tokens: subtree, separator, .. } => {
+ // Checks that no repetition which could match an empty token
+ // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558
+ let lsh_is_empty_seq = separator.is_none() && subtree.iter().all(|child_op| {
+ match child_op {
+ // vis is optional
+ Op::Var { kind: Some(kind), .. } => kind == "vis",
+ Op::Repeat {
+ kind: parser::RepeatKind::ZeroOrMore | parser::RepeatKind::ZeroOrOne,
+ ..
+ } => true,
+ _ => false,
+ }
+ });
+ if lsh_is_empty_seq {
+ return Err(ParseError::RepetitionEmptyTokenTree);
+ }
+ validate(subtree)?
+ }
+ _ => (),
+ }
+ }
+ Ok(())
+}
+
+pub type ExpandResult<T> = ValueResult<T, ExpandError>;
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ValueResult<T, E> {
+ pub value: T,
+ pub err: Option<E>,
+}
+
+impl<T, E> ValueResult<T, E> {
+ pub fn ok(value: T) -> Self {
+ Self { value, err: None }
+ }
+
+ pub fn only_err(err: E) -> Self
+ where
+ T: Default,
+ {
+ Self { value: Default::default(), err: Some(err) }
+ }
+
+ pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ValueResult<U, E> {
+ ValueResult { value: f(self.value), err: self.err }
+ }
+
+ pub fn map_err<E2>(self, f: impl FnOnce(E) -> E2) -> ValueResult<T, E2> {
+ ValueResult { value: self.value, err: self.err.map(f) }
+ }
+
+ pub fn result(self) -> Result<T, E> {
+ self.err.map_or(Ok(self.value), Err)
+ }
+}
+
+impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> {
+ fn from(result: Result<T, E>) -> Self {
+ result.map_or_else(Self::only_err, Self::ok)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/parser.rs b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
new file mode 100644
index 000000000..acb4be584
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
@@ -0,0 +1,261 @@
+//! Parser recognizes special macro syntax, `$var` and `$(repeat)*`, in token
+//! trees.
+
+use smallvec::SmallVec;
+use syntax::SmolStr;
+
+use crate::{tt_iter::TtIter, ParseError};
+
+/// Consider
+///
+/// ```
+/// macro_rules! an_macro {
+/// ($x:expr + $y:expr) => ($y * $x)
+/// }
+/// ```
+///
+/// Stuff to the left of `=>` is a [`MetaTemplate`] pattern (which is matched
+/// with input).
+///
+/// Stuff to the right is a [`MetaTemplate`] template which is used to produce
+/// output.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) struct MetaTemplate(pub(crate) Vec<Op>);
+
+impl MetaTemplate {
+ pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result<MetaTemplate, ParseError> {
+ MetaTemplate::parse(pattern, Mode::Pattern)
+ }
+
+ pub(crate) fn parse_template(template: &tt::Subtree) -> Result<MetaTemplate, ParseError> {
+ MetaTemplate::parse(template, Mode::Template)
+ }
+
+ pub(crate) fn iter(&self) -> impl Iterator<Item = &Op> {
+ self.0.iter()
+ }
+
+ fn parse(tt: &tt::Subtree, mode: Mode) -> Result<MetaTemplate, ParseError> {
+ let mut src = TtIter::new(tt);
+
+ let mut res = Vec::new();
+ while let Some(first) = src.next() {
+ let op = next_op(first, &mut src, mode)?;
+ res.push(op);
+ }
+
+ Ok(MetaTemplate(res))
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum Op {
+ Var { name: SmolStr, kind: Option<SmolStr>, id: tt::TokenId },
+ Ignore { name: SmolStr, id: tt::TokenId },
+ Index { depth: u32 },
+ Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option<Separator> },
+ Leaf(tt::Leaf),
+ Subtree { tokens: MetaTemplate, delimiter: Option<tt::Delimiter> },
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(crate) enum RepeatKind {
+ ZeroOrMore,
+ OneOrMore,
+ ZeroOrOne,
+}
+
+#[derive(Clone, Debug, Eq)]
+pub(crate) enum Separator {
+ Literal(tt::Literal),
+ Ident(tt::Ident),
+ Puncts(SmallVec<[tt::Punct; 3]>),
+}
+
+// Note that when we compare a Separator, we just care about its textual value.
+impl PartialEq for Separator {
+ fn eq(&self, other: &Separator) -> bool {
+ use Separator::*;
+
+ match (self, other) {
+ (Ident(a), Ident(b)) => a.text == b.text,
+ (Literal(a), Literal(b)) => a.text == b.text,
+ (Puncts(a), Puncts(b)) if a.len() == b.len() => {
+ let a_iter = a.iter().map(|a| a.char);
+ let b_iter = b.iter().map(|b| b.char);
+ a_iter.eq(b_iter)
+ }
+ _ => false,
+ }
+ }
+}
+
+impl Separator {
+ pub(crate) fn tt_count(&self) -> usize {
+ match self {
+ Separator::Literal(_) => 1,
+ Separator::Ident(_) => 1,
+ Separator::Puncts(it) => it.len(),
+ }
+ }
+}
+
+#[derive(Clone, Copy)]
+enum Mode {
+ Pattern,
+ Template,
+}
+
+fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Result<Op, ParseError> {
+ let res = match first {
+ tt::TokenTree::Leaf(leaf @ tt::Leaf::Punct(tt::Punct { char: '$', .. })) => {
+ // Note that the '$' itself is a valid token inside macro_rules.
+ let second = match src.next() {
+ None => return Ok(Op::Leaf(leaf.clone())),
+ Some(it) => it,
+ };
+ match second {
+ tt::TokenTree::Subtree(subtree) => match subtree.delimiter_kind() {
+ Some(tt::DelimiterKind::Parenthesis) => {
+ let (separator, kind) = parse_repeat(src)?;
+ let tokens = MetaTemplate::parse(subtree, mode)?;
+ Op::Repeat { tokens, separator, kind }
+ }
+ Some(tt::DelimiterKind::Brace) => match mode {
+ Mode::Template => {
+ parse_metavar_expr(&mut TtIter::new(subtree)).map_err(|()| {
+ ParseError::unexpected("invalid metavariable expression")
+ })?
+ }
+ Mode::Pattern => {
+ return Err(ParseError::unexpected(
+ "`${}` metavariable expressions are not allowed in matchers",
+ ))
+ }
+ },
+ _ => {
+ return Err(ParseError::expected(
+ "expected `$()` repetition or `${}` expression",
+ ))
+ }
+ },
+ tt::TokenTree::Leaf(leaf) => match leaf {
+ tt::Leaf::Ident(ident) if ident.text == "crate" => {
+ // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path.
+ Op::Leaf(tt::Leaf::from(tt::Ident { text: "$crate".into(), id: ident.id }))
+ }
+ tt::Leaf::Ident(ident) => {
+ let kind = eat_fragment_kind(src, mode)?;
+ let name = ident.text.clone();
+ let id = ident.id;
+ Op::Var { name, kind, id }
+ }
+ tt::Leaf::Literal(lit) if is_boolean_literal(lit) => {
+ let kind = eat_fragment_kind(src, mode)?;
+ let name = lit.text.clone();
+ let id = lit.id;
+ Op::Var { name, kind, id }
+ }
+ tt::Leaf::Punct(punct @ tt::Punct { char: '$', .. }) => match mode {
+ Mode::Pattern => {
+ return Err(ParseError::unexpected(
+ "`$$` is not allowed on the pattern side",
+ ))
+ }
+ Mode::Template => Op::Leaf(tt::Leaf::Punct(*punct)),
+ },
+ tt::Leaf::Punct(_) | tt::Leaf::Literal(_) => {
+ return Err(ParseError::expected("expected ident"))
+ }
+ },
+ }
+ }
+ tt::TokenTree::Leaf(tt) => Op::Leaf(tt.clone()),
+ tt::TokenTree::Subtree(subtree) => {
+ let tokens = MetaTemplate::parse(subtree, mode)?;
+ Op::Subtree { tokens, delimiter: subtree.delimiter }
+ }
+ };
+ Ok(res)
+}
+
+fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result<Option<SmolStr>, ParseError> {
+ if let Mode::Pattern = mode {
+ src.expect_char(':').map_err(|()| ParseError::unexpected("missing fragment specifier"))?;
+ let ident = src
+ .expect_ident()
+ .map_err(|()| ParseError::unexpected("missing fragment specifier"))?;
+ return Ok(Some(ident.text.clone()));
+ };
+ Ok(None)
+}
+
+fn is_boolean_literal(lit: &tt::Literal) -> bool {
+ matches!(lit.text.as_str(), "true" | "false")
+}
+
+fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind), ParseError> {
+ let mut separator = Separator::Puncts(SmallVec::new());
+ for tt in src {
+ let tt = match tt {
+ tt::TokenTree::Leaf(leaf) => leaf,
+ tt::TokenTree::Subtree(_) => return Err(ParseError::InvalidRepeat),
+ };
+ let has_sep = match &separator {
+ Separator::Puncts(puncts) => !puncts.is_empty(),
+ _ => true,
+ };
+ match tt {
+ tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => {
+ return Err(ParseError::InvalidRepeat)
+ }
+ tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()),
+ tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()),
+ tt::Leaf::Punct(punct) => {
+ let repeat_kind = match punct.char {
+ '*' => RepeatKind::ZeroOrMore,
+ '+' => RepeatKind::OneOrMore,
+ '?' => RepeatKind::ZeroOrOne,
+ _ => match &mut separator {
+ Separator::Puncts(puncts) if puncts.len() != 3 => {
+ puncts.push(*punct);
+ continue;
+ }
+ _ => return Err(ParseError::InvalidRepeat),
+ },
+ };
+ return Ok((has_sep.then(|| separator), repeat_kind));
+ }
+ }
+ }
+ Err(ParseError::InvalidRepeat)
+}
+
+fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
+ let func = src.expect_ident()?;
+ let args = src.expect_subtree()?;
+
+ if args.delimiter_kind() != Some(tt::DelimiterKind::Parenthesis) {
+ return Err(());
+ }
+
+ let mut args = TtIter::new(args);
+
+ let op = match &*func.text {
+ "ignore" => {
+ let ident = args.expect_ident()?;
+ Op::Ignore { name: ident.text.clone(), id: ident.id }
+ }
+ "index" => {
+ let depth = if args.len() == 0 { 0 } else { args.expect_u32_literal()? };
+ Op::Index { depth }
+ }
+ _ => return Err(()),
+ };
+
+ if args.next().is_some() {
+ return Err(());
+ }
+
+ Ok(op)
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
new file mode 100644
index 000000000..aca6ecd42
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
@@ -0,0 +1,844 @@
+//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
+
+use rustc_hash::FxHashMap;
+use stdx::{always, non_empty_vec::NonEmptyVec};
+use syntax::{
+ ast::{self, make::tokens::doc_comment},
+ AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
+ SyntaxKind::*,
+ SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
+};
+use tt::buffer::{Cursor, TokenBuffer};
+
+use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
+
+/// Convert the syntax node to a `TokenTree` (what macro
+/// will consume).
+pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
+ let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications(
+ node,
+ Default::default(),
+ 0,
+ Default::default(),
+ Default::default(),
+ );
+ (subtree, token_map)
+}
+
+/// Convert the syntax node to a `TokenTree` (what macro will consume)
+/// with the censored range excluded.
+pub fn syntax_node_to_token_tree_with_modifications(
+ node: &SyntaxNode,
+ existing_token_map: TokenMap,
+ next_id: u32,
+ replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+) -> (tt::Subtree, TokenMap, u32) {
+ let global_offset = node.text_range().start();
+ let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append);
+ let subtree = convert_tokens(&mut c);
+ c.id_alloc.map.shrink_to_fit();
+ always!(c.replace.is_empty(), "replace: {:?}", c.replace);
+ always!(c.append.is_empty(), "append: {:?}", c.append);
+ (subtree, c.id_alloc.map, c.id_alloc.next_id)
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct SyntheticTokenId(pub u32);
+
+#[derive(Debug, Clone)]
+pub struct SyntheticToken {
+ pub kind: SyntaxKind,
+ pub text: SmolStr,
+ pub range: TextRange,
+ pub id: SyntheticTokenId,
+}
+
+// The following items are what `rustc` macro can be parsed into :
+// link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
+// * Expr(P<ast::Expr>) -> token_tree_to_expr
+// * Pat(P<ast::Pat>) -> token_tree_to_pat
+// * Ty(P<ast::Ty>) -> token_tree_to_ty
+// * Stmts(SmallVec<[ast::Stmt; 1]>) -> token_tree_to_stmts
+// * Items(SmallVec<[P<ast::Item>; 1]>) -> token_tree_to_items
+//
+// * TraitItems(SmallVec<[ast::TraitItem; 1]>)
+// * AssocItems(SmallVec<[ast::AssocItem; 1]>)
+// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
+
+pub fn token_tree_to_syntax_node(
+ tt: &tt::Subtree,
+ entry_point: parser::TopEntryPoint,
+) -> (Parse<SyntaxNode>, TokenMap) {
+ let buffer = match tt {
+ tt::Subtree { delimiter: None, token_trees } => {
+ TokenBuffer::from_tokens(token_trees.as_slice())
+ }
+ _ => TokenBuffer::from_subtree(tt),
+ };
+ let parser_input = to_parser_input(&buffer);
+ let parser_output = entry_point.parse(&parser_input);
+ let mut tree_sink = TtTreeSink::new(buffer.begin());
+ for event in parser_output.iter() {
+ match event {
+ parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
+ tree_sink.token(kind, n_raw_tokens)
+ }
+ parser::Step::Enter { kind } => tree_sink.start_node(kind),
+ parser::Step::Exit => tree_sink.finish_node(),
+ parser::Step::Error { msg } => tree_sink.error(msg.to_string()),
+ }
+ }
+ let (parse, range_map) = tree_sink.finish();
+ (parse, range_map)
+}
+
+/// Convert a string to a `TokenTree`
+pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
+ let lexed = parser::LexedStr::new(text);
+ if lexed.errors().next().is_some() {
+ return None;
+ }
+
+ let mut conv = RawConvertor {
+ lexed,
+ pos: 0,
+ id_alloc: TokenIdAlloc {
+ map: Default::default(),
+ global_offset: TextSize::default(),
+ next_id: 0,
+ },
+ };
+
+ let subtree = convert_tokens(&mut conv);
+ Some((subtree, conv.id_alloc.map))
+}
+
+/// Split token tree with separate expr: $($e:expr)SEP*
+pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
+ if tt.token_trees.is_empty() {
+ return Vec::new();
+ }
+
+ let mut iter = TtIter::new(tt);
+ let mut res = Vec::new();
+
+ while iter.peek_n(0).is_some() {
+ let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr);
+
+ res.push(match expanded.value {
+ None => break,
+ Some(tt @ tt::TokenTree::Leaf(_)) => {
+ tt::Subtree { delimiter: None, token_trees: vec![tt] }
+ }
+ Some(tt::TokenTree::Subtree(tt)) => tt,
+ });
+
+ let mut fork = iter.clone();
+ if fork.expect_char(sep).is_err() {
+ break;
+ }
+ iter = fork;
+ }
+
+ if iter.peek_n(0).is_some() {
+ res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() });
+ }
+
+ res
+}
+
+fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
+ struct StackEntry {
+ subtree: tt::Subtree,
+ idx: usize,
+ open_range: TextRange,
+ }
+
+ let entry = StackEntry {
+ subtree: tt::Subtree { delimiter: None, ..Default::default() },
+ // never used (delimiter is `None`)
+ idx: !0,
+ open_range: TextRange::empty(TextSize::of('.')),
+ };
+ let mut stack = NonEmptyVec::new(entry);
+
+ loop {
+ let StackEntry { subtree, .. } = stack.last_mut();
+ let result = &mut subtree.token_trees;
+ let (token, range) = match conv.bump() {
+ Some(it) => it,
+ None => break,
+ };
+ let synth_id = token.synthetic_id(conv);
+
+ let kind = token.kind(conv);
+ if kind == COMMENT {
+ if let Some(tokens) = conv.convert_doc_comment(&token) {
+ // FIXME: There has to be a better way to do this
+ // Add the comments token id to the converted doc string
+ let id = conv.id_alloc().alloc(range, synth_id);
+ result.extend(tokens.into_iter().map(|mut tt| {
+ if let tt::TokenTree::Subtree(sub) = &mut tt {
+ if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
+ sub.token_trees.get_mut(2)
+ {
+ lit.id = id
+ }
+ }
+ tt
+ }));
+ }
+ continue;
+ }
+ let tt = if kind.is_punct() && kind != UNDERSCORE {
+ if synth_id.is_none() {
+ assert_eq!(range.len(), TextSize::of('.'));
+ }
+
+ if let Some(delim) = subtree.delimiter {
+ let expected = match delim.kind {
+ tt::DelimiterKind::Parenthesis => T![')'],
+ tt::DelimiterKind::Brace => T!['}'],
+ tt::DelimiterKind::Bracket => T![']'],
+ };
+
+ if kind == expected {
+ if let Some(entry) = stack.pop() {
+ conv.id_alloc().close_delim(entry.idx, Some(range));
+ stack.last_mut().subtree.token_trees.push(entry.subtree.into());
+ }
+ continue;
+ }
+ }
+
+ let delim = match kind {
+ T!['('] => Some(tt::DelimiterKind::Parenthesis),
+ T!['{'] => Some(tt::DelimiterKind::Brace),
+ T!['['] => Some(tt::DelimiterKind::Bracket),
+ _ => None,
+ };
+
+ if let Some(kind) = delim {
+ let mut subtree = tt::Subtree::default();
+ let (id, idx) = conv.id_alloc().open_delim(range, synth_id);
+ subtree.delimiter = Some(tt::Delimiter { id, kind });
+ stack.push(StackEntry { subtree, idx, open_range: range });
+ continue;
+ }
+
+ let spacing = match conv.peek().map(|next| next.kind(conv)) {
+ Some(kind)
+ if !kind.is_trivia()
+ && kind.is_punct()
+ && kind != T!['[']
+ && kind != T!['{']
+ && kind != T!['(']
+ && kind != UNDERSCORE =>
+ {
+ tt::Spacing::Joint
+ }
+ _ => tt::Spacing::Alone,
+ };
+ let char = match token.to_char(conv) {
+ Some(c) => c,
+ None => {
+ panic!("Token from lexer must be single char: token = {:#?}", token);
+ }
+ };
+ tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) })
+ .into()
+ } else {
+ macro_rules! make_leaf {
+ ($i:ident) => {
+ tt::$i { id: conv.id_alloc().alloc(range, synth_id), text: token.to_text(conv) }
+ .into()
+ };
+ }
+ let leaf: tt::Leaf = match kind {
+ T![true] | T![false] => make_leaf!(Ident),
+ IDENT => make_leaf!(Ident),
+ UNDERSCORE => make_leaf!(Ident),
+ k if k.is_keyword() => make_leaf!(Ident),
+ k if k.is_literal() => make_leaf!(Literal),
+ LIFETIME_IDENT => {
+ let char_unit = TextSize::of('\'');
+ let r = TextRange::at(range.start(), char_unit);
+ let apostrophe = tt::Leaf::from(tt::Punct {
+ char: '\'',
+ spacing: tt::Spacing::Joint,
+ id: conv.id_alloc().alloc(r, synth_id),
+ });
+ result.push(apostrophe.into());
+
+ let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
+ let ident = tt::Leaf::from(tt::Ident {
+ text: SmolStr::new(&token.to_text(conv)[1..]),
+ id: conv.id_alloc().alloc(r, synth_id),
+ });
+ result.push(ident.into());
+ continue;
+ }
+ _ => continue,
+ };
+
+ leaf.into()
+ };
+ result.push(tt);
+ }
+
+ // If we get here, we've consumed all input tokens.
+ // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
+ // Merge them so we're left with one.
+ while let Some(entry) = stack.pop() {
+ let parent = stack.last_mut();
+
+ conv.id_alloc().close_delim(entry.idx, None);
+ let leaf: tt::Leaf = tt::Punct {
+ id: conv.id_alloc().alloc(entry.open_range, None),
+ char: match entry.subtree.delimiter.unwrap().kind {
+ tt::DelimiterKind::Parenthesis => '(',
+ tt::DelimiterKind::Brace => '{',
+ tt::DelimiterKind::Bracket => '[',
+ },
+ spacing: tt::Spacing::Alone,
+ }
+ .into();
+ parent.subtree.token_trees.push(leaf.into());
+ parent.subtree.token_trees.extend(entry.subtree.token_trees);
+ }
+
+ let subtree = stack.into_last().subtree;
+ if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees {
+ first.clone()
+ } else {
+ subtree
+ }
+}
+
+/// Returns the textual content of a doc comment block as a quoted string
+/// That is, strips leading `///` (or `/**`, etc)
+/// and strips the ending `*/`
+/// And then quote the string, which is needed to convert to `tt::Literal`
+fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
+ let prefix_len = comment.prefix().len();
+ let mut text = &comment.text()[prefix_len..];
+
+ // Remove ending "*/"
+ if comment.kind().shape == ast::CommentShape::Block {
+ text = &text[0..text.len() - 2];
+ }
+
+ // Quote the string
+ // Note that `tt::Literal` expect an escaped string
+ let text = format!("\"{}\"", text.escape_debug());
+ text.into()
+}
+
+fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> {
+ cov_mark::hit!(test_meta_doc_comments);
+ let comment = ast::Comment::cast(token.clone())?;
+ let doc = comment.kind().doc?;
+
+ // Make `doc="\" Comments\""
+ let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
+
+ // Make `#![]`
+ let mut token_trees = Vec::with_capacity(3);
+ token_trees.push(mk_punct('#'));
+ if let ast::CommentPlacement::Inner = doc {
+ token_trees.push(mk_punct('!'));
+ }
+ token_trees.push(tt::TokenTree::from(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ kind: tt::DelimiterKind::Bracket,
+ id: tt::TokenId::unspecified(),
+ }),
+ token_trees: meta_tkns,
+ }));
+
+ return Some(token_trees);
+
+ // Helper functions
+ fn mk_ident(s: &str) -> tt::TokenTree {
+ tt::TokenTree::from(tt::Leaf::from(tt::Ident {
+ text: s.into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ }
+
+ fn mk_punct(c: char) -> tt::TokenTree {
+ tt::TokenTree::from(tt::Leaf::from(tt::Punct {
+ char: c,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }))
+ }
+
+ fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
+ let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
+
+ tt::TokenTree::from(tt::Leaf::from(lit))
+ }
+}
+
+struct TokenIdAlloc {
+ map: TokenMap,
+ global_offset: TextSize,
+ next_id: u32,
+}
+
+impl TokenIdAlloc {
+ fn alloc(
+ &mut self,
+ absolute_range: TextRange,
+ synthetic_id: Option<SyntheticTokenId>,
+ ) -> tt::TokenId {
+ let relative_range = absolute_range - self.global_offset;
+ let token_id = tt::TokenId(self.next_id);
+ self.next_id += 1;
+ self.map.insert(token_id, relative_range);
+ if let Some(id) = synthetic_id {
+ self.map.insert_synthetic(token_id, id);
+ }
+ token_id
+ }
+
+ fn open_delim(
+ &mut self,
+ open_abs_range: TextRange,
+ synthetic_id: Option<SyntheticTokenId>,
+ ) -> (tt::TokenId, usize) {
+ let token_id = tt::TokenId(self.next_id);
+ self.next_id += 1;
+ let idx = self.map.insert_delim(
+ token_id,
+ open_abs_range - self.global_offset,
+ open_abs_range - self.global_offset,
+ );
+ if let Some(id) = synthetic_id {
+ self.map.insert_synthetic(token_id, id);
+ }
+ (token_id, idx)
+ }
+
+ fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
+ match close_abs_range {
+ None => {
+ self.map.remove_delim(idx);
+ }
+ Some(close) => {
+ self.map.update_close_delim(idx, close - self.global_offset);
+ }
+ }
+ }
+}
+
+/// A raw token (straight from lexer) convertor
+struct RawConvertor<'a> {
+ lexed: parser::LexedStr<'a>,
+ pos: usize,
+ id_alloc: TokenIdAlloc,
+}
+
+trait SrcToken<Ctx>: std::fmt::Debug {
+ fn kind(&self, ctx: &Ctx) -> SyntaxKind;
+
+ fn to_char(&self, ctx: &Ctx) -> Option<char>;
+
+ fn to_text(&self, ctx: &Ctx) -> SmolStr;
+
+ fn synthetic_id(&self, ctx: &Ctx) -> Option<SyntheticTokenId>;
+}
+
+trait TokenConvertor: Sized {
+ type Token: SrcToken<Self>;
+
+ fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
+
+ fn peek(&self) -> Option<Self::Token>;
+
+ fn id_alloc(&mut self) -> &mut TokenIdAlloc;
+}
+
+impl<'a> SrcToken<RawConvertor<'a>> for usize {
+ fn kind(&self, ctx: &RawConvertor<'a>) -> SyntaxKind {
+ ctx.lexed.kind(*self)
+ }
+
+ fn to_char(&self, ctx: &RawConvertor<'a>) -> Option<char> {
+ ctx.lexed.text(*self).chars().next()
+ }
+
+ fn to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr {
+ ctx.lexed.text(*self).into()
+ }
+
+ fn synthetic_id(&self, _ctx: &RawConvertor<'a>) -> Option<SyntheticTokenId> {
+ None
+ }
+}
+
+impl<'a> TokenConvertor for RawConvertor<'a> {
+ type Token = usize;
+
+ fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> {
+ let text = self.lexed.text(token);
+ convert_doc_comment(&doc_comment(text))
+ }
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ let token = self.pos;
+ self.pos += 1;
+ let range = self.lexed.text_range(token);
+ let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+
+ Some((token, range))
+ }
+
+ fn peek(&self) -> Option<Self::Token> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ Some(self.pos)
+ }
+
+ fn id_alloc(&mut self) -> &mut TokenIdAlloc {
+ &mut self.id_alloc
+ }
+}
+
+struct Convertor {
+ id_alloc: TokenIdAlloc,
+ current: Option<SyntaxToken>,
+ current_synthetic: Vec<SyntheticToken>,
+ preorder: PreorderWithTokens,
+ replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ range: TextRange,
+ punct_offset: Option<(SyntaxToken, TextSize)>,
+}
+
+impl Convertor {
+ fn new(
+ node: &SyntaxNode,
+ global_offset: TextSize,
+ existing_token_map: TokenMap,
+ next_id: u32,
+ mut replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ mut append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ ) -> Convertor {
+ let range = node.text_range();
+ let mut preorder = node.preorder_with_tokens();
+ let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
+ Convertor {
+ id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } },
+ current: first,
+ current_synthetic: synthetic,
+ preorder,
+ range,
+ replace,
+ append,
+ punct_offset: None,
+ }
+ }
+
+ fn next_token(
+ preorder: &mut PreorderWithTokens,
+ replace: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ append: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ ) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
+ while let Some(ev) = preorder.next() {
+ let ele = match ev {
+ WalkEvent::Enter(ele) => ele,
+ WalkEvent::Leave(ele) => {
+ if let Some(mut v) = append.remove(&ele) {
+ if !v.is_empty() {
+ v.reverse();
+ return (None, v);
+ }
+ }
+ continue;
+ }
+ };
+ if let Some(mut v) = replace.remove(&ele) {
+ preorder.skip_subtree();
+ if !v.is_empty() {
+ v.reverse();
+ return (None, v);
+ }
+ }
+ match ele {
+ SyntaxElement::Token(t) => return (Some(t), Vec::new()),
+ _ => {}
+ }
+ }
+ (None, Vec::new())
+ }
+}
+
+#[derive(Debug)]
+enum SynToken {
+ Ordinary(SyntaxToken),
+ // FIXME is this supposed to be `Punct`?
+ Punch(SyntaxToken, TextSize),
+ Synthetic(SyntheticToken),
+}
+
+impl SynToken {
+ fn token(&self) -> Option<&SyntaxToken> {
+ match self {
+ SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it),
+ SynToken::Synthetic(_) => None,
+ }
+ }
+}
+
+impl SrcToken<Convertor> for SynToken {
+ fn kind(&self, _ctx: &Convertor) -> SyntaxKind {
+ match self {
+ SynToken::Ordinary(token) => token.kind(),
+ SynToken::Punch(token, _) => token.kind(),
+ SynToken::Synthetic(token) => token.kind,
+ }
+ }
+ fn to_char(&self, _ctx: &Convertor) -> Option<char> {
+ match self {
+ SynToken::Ordinary(_) => None,
+ SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
+ SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(),
+ SynToken::Synthetic(_) => None,
+ }
+ }
+ fn to_text(&self, _ctx: &Convertor) -> SmolStr {
+ match self {
+ SynToken::Ordinary(token) => token.text().into(),
+ SynToken::Punch(token, _) => token.text().into(),
+ SynToken::Synthetic(token) => token.text.clone(),
+ }
+ }
+
+ fn synthetic_id(&self, _ctx: &Convertor) -> Option<SyntheticTokenId> {
+ match self {
+ SynToken::Synthetic(token) => Some(token.id),
+ _ => None,
+ }
+ }
+}
+
+impl TokenConvertor for Convertor {
+ type Token = SynToken;
+ fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
+ convert_doc_comment(token.token()?)
+ }
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
+ if let Some((punct, offset)) = self.punct_offset.clone() {
+ if usize::from(offset) + 1 < punct.text().len() {
+ let offset = offset + TextSize::of('.');
+ let range = punct.text_range();
+ self.punct_offset = Some((punct.clone(), offset));
+ let range = TextRange::at(range.start() + offset, TextSize::of('.'));
+ return Some((SynToken::Punch(punct, offset), range));
+ }
+ }
+
+ if let Some(synth_token) = self.current_synthetic.pop() {
+ if self.current_synthetic.is_empty() {
+ let (new_current, new_synth) =
+ Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
+ self.current = new_current;
+ self.current_synthetic = new_synth;
+ }
+ let range = synth_token.range;
+ return Some((SynToken::Synthetic(synth_token), range));
+ }
+
+ let curr = self.current.clone()?;
+ if !&self.range.contains_range(curr.text_range()) {
+ return None;
+ }
+ let (new_current, new_synth) =
+ Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
+ self.current = new_current;
+ self.current_synthetic = new_synth;
+ let token = if curr.kind().is_punct() {
+ self.punct_offset = Some((curr.clone(), 0.into()));
+ let range = curr.text_range();
+ let range = TextRange::at(range.start(), TextSize::of('.'));
+ (SynToken::Punch(curr, 0.into()), range)
+ } else {
+ self.punct_offset = None;
+ let range = curr.text_range();
+ (SynToken::Ordinary(curr), range)
+ };
+
+ Some(token)
+ }
+
+ fn peek(&self) -> Option<Self::Token> {
+ if let Some((punct, mut offset)) = self.punct_offset.clone() {
+ offset += TextSize::of('.');
+ if usize::from(offset) < punct.text().len() {
+ return Some(SynToken::Punch(punct, offset));
+ }
+ }
+
+ if let Some(synth_token) = self.current_synthetic.last() {
+ return Some(SynToken::Synthetic(synth_token.clone()));
+ }
+
+ let curr = self.current.clone()?;
+ if !self.range.contains_range(curr.text_range()) {
+ return None;
+ }
+
+ let token = if curr.kind().is_punct() {
+ SynToken::Punch(curr, 0.into())
+ } else {
+ SynToken::Ordinary(curr)
+ };
+ Some(token)
+ }
+
+ fn id_alloc(&mut self) -> &mut TokenIdAlloc {
+ &mut self.id_alloc
+ }
+}
+
+struct TtTreeSink<'a> {
+ buf: String,
+ cursor: Cursor<'a>,
+ open_delims: FxHashMap<tt::TokenId, TextSize>,
+ text_pos: TextSize,
+ inner: SyntaxTreeBuilder,
+ token_map: TokenMap,
+}
+
+impl<'a> TtTreeSink<'a> {
+ fn new(cursor: Cursor<'a>) -> Self {
+ TtTreeSink {
+ buf: String::new(),
+ cursor,
+ open_delims: FxHashMap::default(),
+ text_pos: 0.into(),
+ inner: SyntaxTreeBuilder::default(),
+ token_map: TokenMap::default(),
+ }
+ }
+
+ fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
+ self.token_map.shrink_to_fit();
+ (self.inner.finish(), self.token_map)
+ }
+}
+
+fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> &'static str {
+ let texts = match d {
+ tt::DelimiterKind::Parenthesis => "()",
+ tt::DelimiterKind::Brace => "{}",
+ tt::DelimiterKind::Bracket => "[]",
+ };
+
+ let idx = closing as usize;
+ &texts[idx..texts.len() - (1 - idx)]
+}
+
+impl<'a> TtTreeSink<'a> {
+ fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
+ if kind == LIFETIME_IDENT {
+ n_tokens = 2;
+ }
+
+ let mut last = self.cursor;
+ for _ in 0..n_tokens {
+ let tmp: u8;
+ if self.cursor.eof() {
+ break;
+ }
+ last = self.cursor;
+ let text: &str = loop {
+ break match self.cursor.token_tree() {
+ Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
+ // Mark the range if needed
+ let (text, id) = match leaf {
+ tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.id),
+ tt::Leaf::Punct(punct) => {
+ assert!(punct.char.is_ascii());
+ tmp = punct.char as u8;
+ (std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(), punct.id)
+ }
+ tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.id),
+ };
+ let range = TextRange::at(self.text_pos, TextSize::of(text));
+ self.token_map.insert(id, range);
+ self.cursor = self.cursor.bump();
+ text
+ }
+ Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
+ self.cursor = self.cursor.subtree().unwrap();
+ match subtree.delimiter {
+ Some(d) => {
+ self.open_delims.insert(d.id, self.text_pos);
+ delim_to_str(d.kind, false)
+ }
+ None => continue,
+ }
+ }
+ None => {
+ let parent = self.cursor.end().unwrap();
+ self.cursor = self.cursor.bump();
+ match parent.delimiter {
+ Some(d) => {
+ if let Some(open_delim) = self.open_delims.get(&d.id) {
+ let open_range = TextRange::at(*open_delim, TextSize::of('('));
+ let close_range =
+ TextRange::at(self.text_pos, TextSize::of('('));
+ self.token_map.insert_delim(d.id, open_range, close_range);
+ }
+ delim_to_str(d.kind, true)
+ }
+ None => continue,
+ }
+ }
+ };
+ };
+ self.buf += text;
+ self.text_pos += TextSize::of(text);
+ }
+
+ self.inner.token(kind, self.buf.as_str());
+ self.buf.clear();
+ // Add whitespace between adjoint puncts
+ let next = last.bump();
+ if let (
+ Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)),
+ Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(_), _)),
+ ) = (last.token_tree(), next.token_tree())
+ {
+ // Note: We always assume the semi-colon would be the last token in
+ // other parts of RA such that we don't add whitespace here.
+ if curr.spacing == tt::Spacing::Alone && curr.char != ';' {
+ self.inner.token(WHITESPACE, " ");
+ self.text_pos += TextSize::of(' ');
+ }
+ }
+ }
+
+ fn start_node(&mut self, kind: SyntaxKind) {
+ self.inner.start_node(kind);
+ }
+
+ fn finish_node(&mut self) {
+ self.inner.finish_node();
+ }
+
+ fn error(&mut self, error: String) {
+ self.inner.error(error, self.text_pos)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
new file mode 100644
index 000000000..783c3ca4a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
@@ -0,0 +1,99 @@
+//! Convert macro-by-example tokens which are specific to macro expansion into a
+//! format that works for our parser.
+
+use syntax::{SyntaxKind, SyntaxKind::*, T};
+use tt::buffer::TokenBuffer;
+
+pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input {
+ let mut res = parser::Input::default();
+
+ let mut current = buffer.begin();
+
+ while !current.eof() {
+ let cursor = current;
+ let tt = cursor.token_tree();
+
+ // Check if it is lifetime
+ if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = tt {
+ if punct.char == '\'' {
+ let next = cursor.bump();
+ match next.token_tree() {
+ Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(_ident), _)) => {
+ res.push(LIFETIME_IDENT);
+ current = next.bump();
+ continue;
+ }
+ _ => panic!("Next token must be ident : {:#?}", next.token_tree()),
+ }
+ }
+ }
+
+ current = match tt {
+ Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ let is_negated = lit.text.starts_with('-');
+ let inner_text = &lit.text[if is_negated { 1 } else { 0 }..];
+
+ let kind = parser::LexedStr::single_token(inner_text)
+ .map(|(kind, _error)| kind)
+ .filter(|kind| {
+ kind.is_literal()
+ && (!is_negated || matches!(kind, FLOAT_NUMBER | INT_NUMBER))
+ })
+ .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &lit));
+
+ res.push(kind);
+ }
+ tt::Leaf::Ident(ident) => match ident.text.as_ref() {
+ "_" => res.push(T![_]),
+ i if i.starts_with('\'') => res.push(LIFETIME_IDENT),
+ _ => match SyntaxKind::from_keyword(&ident.text) {
+ Some(kind) => res.push(kind),
+ None => {
+ let contextual_keyword =
+ SyntaxKind::from_contextual_keyword(&ident.text)
+ .unwrap_or(SyntaxKind::IDENT);
+ res.push_ident(contextual_keyword);
+ }
+ },
+ },
+ tt::Leaf::Punct(punct) => {
+ let kind = SyntaxKind::from_char(punct.char)
+ .unwrap_or_else(|| panic!("{:#?} is not a valid punct", punct));
+ res.push(kind);
+ if punct.spacing == tt::Spacing::Joint {
+ res.was_joint();
+ }
+ }
+ }
+ cursor.bump()
+ }
+ Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
+ if let Some(d) = subtree.delimiter_kind() {
+ res.push(match d {
+ tt::DelimiterKind::Parenthesis => T!['('],
+ tt::DelimiterKind::Brace => T!['{'],
+ tt::DelimiterKind::Bracket => T!['['],
+ });
+ }
+ cursor.subtree().unwrap()
+ }
+ None => match cursor.end() {
+ Some(subtree) => {
+ if let Some(d) = subtree.delimiter_kind() {
+ res.push(match d {
+ tt::DelimiterKind::Parenthesis => T![')'],
+ tt::DelimiterKind::Brace => T!['}'],
+ tt::DelimiterKind::Bracket => T![']'],
+ })
+ }
+ cursor.bump()
+ }
+ None => continue,
+ },
+ };
+ }
+
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
new file mode 100644
index 000000000..c923e7a69
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
@@ -0,0 +1,113 @@
+//! Mapping between `TokenId`s and the token's position in macro definitions or inputs.
+
+use std::hash::Hash;
+
+use parser::{SyntaxKind, T};
+use syntax::{TextRange, TextSize};
+
+use crate::syntax_bridge::SyntheticTokenId;
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+enum TokenTextRange {
+ Token(TextRange),
+ Delimiter(TextRange),
+}
+
+impl TokenTextRange {
+ fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
+ match self {
+ TokenTextRange::Token(it) => Some(it),
+ TokenTextRange::Delimiter(it) => match kind {
+ T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
+ T!['}'] | T![')'] | T![']'] => {
+ Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
+ }
+ _ => None,
+ },
+ }
+ }
+}
+
+/// Maps `tt::TokenId` to the relative range of the original token.
+#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
+pub struct TokenMap {
+ /// Maps `tt::TokenId` to the *relative* source range.
+ entries: Vec<(tt::TokenId, TokenTextRange)>,
+ pub synthetic_entries: Vec<(tt::TokenId, SyntheticTokenId)>,
+}
+
+impl TokenMap {
+ pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
+ let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
+ TokenTextRange::Token(it) => *it == relative_range,
+ TokenTextRange::Delimiter(it) => {
+ let open = TextRange::at(it.start(), 1.into());
+ let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
+ open == relative_range || close == relative_range
+ }
+ })?;
+ Some(token_id)
+ }
+
+ pub fn ranges_by_token(
+ &self,
+ token_id: tt::TokenId,
+ kind: SyntaxKind,
+ ) -> impl Iterator<Item = TextRange> + '_ {
+ self.entries
+ .iter()
+ .filter(move |&&(tid, _)| tid == token_id)
+ .filter_map(move |(_, range)| range.by_kind(kind))
+ }
+
+ pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option<SyntheticTokenId> {
+ self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id)
+ }
+
+ pub fn first_range_by_token(
+ &self,
+ token_id: tt::TokenId,
+ kind: SyntaxKind,
+ ) -> Option<TextRange> {
+ self.ranges_by_token(token_id, kind).next()
+ }
+
+ pub(crate) fn shrink_to_fit(&mut self) {
+ self.entries.shrink_to_fit();
+ self.synthetic_entries.shrink_to_fit();
+ }
+
+ pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
+ self.entries.push((token_id, TokenTextRange::Token(relative_range)));
+ }
+
+ pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) {
+ self.synthetic_entries.push((token_id, id));
+ }
+
+ pub(crate) fn insert_delim(
+ &mut self,
+ token_id: tt::TokenId,
+ open_relative_range: TextRange,
+ close_relative_range: TextRange,
+ ) -> usize {
+ let res = self.entries.len();
+ let cover = open_relative_range.cover(close_relative_range);
+
+ self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
+ res
+ }
+
+ pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
+ let (_, token_text_range) = &mut self.entries[idx];
+ if let TokenTextRange::Delimiter(dim) = token_text_range {
+ let cover = dim.cover(close_relative_range);
+ *token_text_range = TokenTextRange::Delimiter(cover);
+ }
+ }
+
+ pub(crate) fn remove_delim(&mut self, idx: usize) {
+ // FIXME: This could be accidentally quadratic
+ self.entries.remove(idx);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
new file mode 100644
index 000000000..7aceb676c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
@@ -0,0 +1,160 @@
+//! A "Parser" structure for token trees. We use this when parsing a declarative
+//! macro definition into a list of patterns and templates.
+
+use syntax::SyntaxKind;
+use tt::buffer::TokenBuffer;
+
+use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult};
+
+#[derive(Debug, Clone)]
+pub(crate) struct TtIter<'a> {
+ pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>,
+}
+
+impl<'a> TtIter<'a> {
+ pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a> {
+ TtIter { inner: subtree.token_trees.iter() }
+ }
+
+ pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ()> {
+ match self.next() {
+ Some(&tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: c, .. }))) if c == char => {
+ Ok(())
+ }
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_any_char(&mut self, chars: &[char]) -> Result<(), ()> {
+ match self.next() {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: c, .. })))
+ if chars.contains(c) =>
+ {
+ Ok(())
+ }
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree, ()> {
+ match self.next() {
+ Some(tt::TokenTree::Subtree(it)) => Ok(it),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf, ()> {
+ match self.next() {
+ Some(tt::TokenTree::Leaf(it)) => Ok(it),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> {
+ match self.expect_leaf()? {
+ tt::Leaf::Ident(it) if it.text != "_" => Ok(it),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident, ()> {
+ match self.expect_leaf()? {
+ tt::Leaf::Ident(it) => Ok(it),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> {
+ let it = self.expect_leaf()?;
+ match it {
+ tt::Leaf::Literal(_) => Ok(it),
+ tt::Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_u32_literal(&mut self) -> Result<u32, ()> {
+ match self.expect_literal()? {
+ tt::Leaf::Literal(lit) => lit.text.parse().map_err(drop),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_punct(&mut self) -> Result<&'a tt::Punct, ()> {
+ match self.expect_leaf()? {
+ tt::Leaf::Punct(it) => Ok(it),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_fragment(
+ &mut self,
+ entry_point: parser::PrefixEntryPoint,
+ ) -> ExpandResult<Option<tt::TokenTree>> {
+ let buffer = TokenBuffer::from_tokens(self.inner.as_slice());
+ let parser_input = to_parser_input(&buffer);
+ let tree_traversal = entry_point.parse(&parser_input);
+
+ let mut cursor = buffer.begin();
+ let mut error = false;
+ for step in tree_traversal.iter() {
+ match step {
+ parser::Step::Token { kind, mut n_input_tokens } => {
+ if kind == SyntaxKind::LIFETIME_IDENT {
+ n_input_tokens = 2;
+ }
+ for _ in 0..n_input_tokens {
+ cursor = cursor.bump_subtree();
+ }
+ }
+ parser::Step::Enter { .. } | parser::Step::Exit => (),
+ parser::Step::Error { .. } => error = true,
+ }
+ }
+
+ let err = if error || !cursor.is_root() {
+ Some(ExpandError::binding_error(format!("expected {entry_point:?}")))
+ } else {
+ None
+ };
+
+ let mut curr = buffer.begin();
+ let mut res = vec![];
+
+ if cursor.is_root() {
+ while curr != cursor {
+ if let Some(token) = curr.token_tree() {
+ res.push(token);
+ }
+ curr = curr.bump();
+ }
+ }
+ self.inner = self.inner.as_slice()[res.len()..].iter();
+ let res = match res.len() {
+ 1 => Some(res[0].cloned()),
+ 0 => None,
+ _ => Some(tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: None,
+ token_trees: res.into_iter().map(|it| it.cloned()).collect(),
+ })),
+ };
+ ExpandResult { value: res, err }
+ }
+
+ pub(crate) fn peek_n(&self, n: usize) -> Option<&tt::TokenTree> {
+ self.inner.as_slice().get(n)
+ }
+}
+
+impl<'a> Iterator for TtIter<'a> {
+ type Item = &'a tt::TokenTree;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+impl<'a> std::iter::ExactSizeIterator for TtIter<'a> {}
diff --git a/src/tools/rust-analyzer/crates/parser/Cargo.toml b/src/tools/rust-analyzer/crates/parser/Cargo.toml
new file mode 100644
index 000000000..a286a6bcd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/Cargo.toml
@@ -0,0 +1,19 @@
+[package]
+name = "parser"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+drop_bomb = "0.1.5"
+rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+sourcegen = { path = "../sourcegen" }
diff --git a/src/tools/rust-analyzer/crates/parser/src/event.rs b/src/tools/rust-analyzer/crates/parser/src/event.rs
new file mode 100644
index 000000000..b0e70e794
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/event.rs
@@ -0,0 +1,133 @@
+//! This module provides a way to construct a `File`.
+//! It is intended to be completely decoupled from the
+//! parser, so as to allow to evolve the tree representation
+//! and the parser algorithm independently.
+//!
+//! The `TreeSink` trait is the bridge between the parser and the
+//! tree builder: the parser produces a stream of events like
+//! `start node`, `finish node`, and `FileBuilder` converts
+//! this stream to a real tree.
+use std::mem;
+
+use crate::{
+ output::Output,
+ SyntaxKind::{self, *},
+};
+
+/// `Parser` produces a flat list of `Event`s.
+/// They are converted to a tree-structure in
+/// a separate pass, via `TreeBuilder`.
+#[derive(Debug)]
+pub(crate) enum Event {
+ /// This event signifies the start of the node.
+ /// It should be either abandoned (in which case the
+ /// `kind` is `TOMBSTONE`, and the event is ignored),
+ /// or completed via a `Finish` event.
+ ///
+ /// All tokens between a `Start` and a `Finish` would
+ /// become the children of the respective node.
+ ///
+ /// For left-recursive syntactic constructs, the parser produces
+ /// a child node before it sees a parent. `forward_parent`
+ /// saves the position of current event's parent.
+ ///
+ /// Consider this path
+ ///
+ /// foo::bar
+ ///
+ /// The events for it would look like this:
+ ///
+ /// ```text
+ /// START(PATH) IDENT('foo') FINISH START(PATH) T![::] IDENT('bar') FINISH
+ /// | /\
+ /// | |
+ /// +------forward-parent------+
+ /// ```
+ ///
+ /// And the tree would look like this
+ ///
+ /// ```text
+ /// +--PATH---------+
+ /// | | |
+ /// | | |
+ /// | '::' 'bar'
+ /// |
+ /// PATH
+ /// |
+ /// 'foo'
+ /// ```
+ ///
+ /// See also `CompletedMarker::precede`.
+ Start {
+ kind: SyntaxKind,
+ forward_parent: Option<u32>,
+ },
+
+ /// Complete the previous `Start` event
+ Finish,
+
+ /// Produce a single leaf-element.
+ /// `n_raw_tokens` is used to glue complex contextual tokens.
+ /// For example, lexer tokenizes `>>` as `>`, `>`, and
+ /// `n_raw_tokens = 2` is used to produced a single `>>`.
+ Token {
+ kind: SyntaxKind,
+ n_raw_tokens: u8,
+ },
+
+ Error {
+ msg: String,
+ },
+}
+
+impl Event {
+ pub(crate) fn tombstone() -> Self {
+ Event::Start { kind: TOMBSTONE, forward_parent: None }
+ }
+}
+
+/// Generate the syntax tree with the control of events.
+pub(super) fn process(mut events: Vec<Event>) -> Output {
+ let mut res = Output::default();
+ let mut forward_parents = Vec::new();
+
+ for i in 0..events.len() {
+ match mem::replace(&mut events[i], Event::tombstone()) {
+ Event::Start { kind, forward_parent } => {
+ // For events[A, B, C], B is A's forward_parent, C is B's forward_parent,
+ // in the normal control flow, the parent-child relation: `A -> B -> C`,
+ // while with the magic forward_parent, it writes: `C <- B <- A`.
+
+ // append `A` into parents.
+ forward_parents.push(kind);
+ let mut idx = i;
+ let mut fp = forward_parent;
+ while let Some(fwd) = fp {
+ idx += fwd as usize;
+ // append `A`'s forward_parent `B`
+ fp = match mem::replace(&mut events[idx], Event::tombstone()) {
+ Event::Start { kind, forward_parent } => {
+ forward_parents.push(kind);
+ forward_parent
+ }
+ _ => unreachable!(),
+ };
+ // append `B`'s forward_parent `C` in the next stage.
+ }
+
+ for kind in forward_parents.drain(..).rev() {
+ if kind != TOMBSTONE {
+ res.enter_node(kind);
+ }
+ }
+ }
+ Event::Finish => res.leave_node(),
+ Event::Token { kind, n_raw_tokens } => {
+ res.token(kind, n_raw_tokens);
+ }
+ Event::Error { msg } => res.error(msg),
+ }
+ }
+
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar.rs b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
new file mode 100644
index 000000000..b74683296
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
@@ -0,0 +1,342 @@
+//! This is the actual "grammar" of the Rust language.
+//!
+//! Each function in this module and its children corresponds
+//! to a production of the formal grammar. Submodules roughly
+//! correspond to different *areas* of the grammar. By convention,
+//! each submodule starts with `use super::*` import and exports
+//! "public" productions via `pub(super)`.
+//!
+//! See docs for [`Parser`](super::parser::Parser) to learn about API,
+//! available to the grammar, and see docs for [`Event`](super::event::Event)
+//! to learn how this actually manages to produce parse trees.
+//!
+//! Code in this module also contains inline tests, which start with
+//! `// test name-of-the-test` comment and look like this:
+//!
+//! ```
+//! // test function_with_zero_parameters
+//! // fn foo() {}
+//! ```
+//!
+//! After adding a new inline-test, run `cargo test -p xtask` to
+//! extract it as a standalone text-fixture into
+//! `crates/syntax/test_data/parser/`, and run `cargo test` once to
+//! create the "gold" value.
+//!
+//! Coding convention: rules like `where_clause` always produce either a
+//! node or an error, rules like `opt_where_clause` may produce nothing.
+//! Non-opt rules typically start with `assert!(p.at(FIRST_TOKEN))`, the
+//! caller is responsible for branching on the first token.
+
+mod attributes;
+mod expressions;
+mod items;
+mod params;
+mod paths;
+mod patterns;
+mod generic_args;
+mod generic_params;
+mod types;
+
+use crate::{
+ parser::{CompletedMarker, Marker, Parser},
+ SyntaxKind::{self, *},
+ TokenSet, T,
+};
+
+pub(crate) mod entry {
+ use super::*;
+
+ pub(crate) mod prefix {
+ use super::*;
+
+ pub(crate) fn vis(p: &mut Parser<'_>) {
+ let _ = opt_visibility(p, false);
+ }
+
+ pub(crate) fn block(p: &mut Parser<'_>) {
+ expressions::block_expr(p);
+ }
+
+ pub(crate) fn stmt(p: &mut Parser<'_>) {
+ expressions::stmt(p, expressions::Semicolon::Forbidden);
+ }
+
+ pub(crate) fn pat(p: &mut Parser<'_>) {
+ patterns::pattern_single(p);
+ }
+
+ pub(crate) fn ty(p: &mut Parser<'_>) {
+ types::type_(p);
+ }
+ pub(crate) fn expr(p: &mut Parser<'_>) {
+ let _ = expressions::expr(p);
+ }
+ pub(crate) fn path(p: &mut Parser<'_>) {
+ let _ = paths::type_path(p);
+ }
+ pub(crate) fn item(p: &mut Parser<'_>) {
+ items::item_or_macro(p, true);
+ }
+ // Parse a meta item , which excluded [], e.g : #[ MetaItem ]
+ pub(crate) fn meta_item(p: &mut Parser<'_>) {
+ attributes::meta(p);
+ }
+ }
+
+ pub(crate) mod top {
+ use super::*;
+
+ pub(crate) fn source_file(p: &mut Parser<'_>) {
+ let m = p.start();
+ p.eat(SHEBANG);
+ items::mod_contents(p, false);
+ m.complete(p, SOURCE_FILE);
+ }
+
+ pub(crate) fn macro_stmts(p: &mut Parser<'_>) {
+ let m = p.start();
+
+ while !p.at(EOF) {
+ expressions::stmt(p, expressions::Semicolon::Optional);
+ }
+
+ m.complete(p, MACRO_STMTS);
+ }
+
+ pub(crate) fn macro_items(p: &mut Parser<'_>) {
+ let m = p.start();
+ items::mod_contents(p, false);
+ m.complete(p, MACRO_ITEMS);
+ }
+
+ pub(crate) fn pattern(p: &mut Parser<'_>) {
+ let m = p.start();
+ patterns::pattern_top(p);
+ if p.at(EOF) {
+ m.abandon(p);
+ return;
+ }
+ while !p.at(EOF) {
+ p.bump_any();
+ }
+ m.complete(p, ERROR);
+ }
+
+ pub(crate) fn type_(p: &mut Parser<'_>) {
+ let m = p.start();
+ types::type_(p);
+ if p.at(EOF) {
+ m.abandon(p);
+ return;
+ }
+ while !p.at(EOF) {
+ p.bump_any();
+ }
+ m.complete(p, ERROR);
+ }
+
+ pub(crate) fn expr(p: &mut Parser<'_>) {
+ let m = p.start();
+ expressions::expr(p);
+ if p.at(EOF) {
+ m.abandon(p);
+ return;
+ }
+ while !p.at(EOF) {
+ p.bump_any();
+ }
+ m.complete(p, ERROR);
+ }
+
+ pub(crate) fn meta_item(p: &mut Parser<'_>) {
+ let m = p.start();
+ attributes::meta(p);
+ if p.at(EOF) {
+ m.abandon(p);
+ return;
+ }
+ while !p.at(EOF) {
+ p.bump_any();
+ }
+ m.complete(p, ERROR);
+ }
+ }
+}
+
+pub(crate) fn reparser(
+ node: SyntaxKind,
+ first_child: Option<SyntaxKind>,
+ parent: Option<SyntaxKind>,
+) -> Option<fn(&mut Parser<'_>)> {
+ let res = match node {
+ BLOCK_EXPR => expressions::block_expr,
+ RECORD_FIELD_LIST => items::record_field_list,
+ RECORD_EXPR_FIELD_LIST => items::record_expr_field_list,
+ VARIANT_LIST => items::variant_list,
+ MATCH_ARM_LIST => items::match_arm_list,
+ USE_TREE_LIST => items::use_tree_list,
+ EXTERN_ITEM_LIST => items::extern_item_list,
+ TOKEN_TREE if first_child? == T!['{'] => items::token_tree,
+ ASSOC_ITEM_LIST => match parent? {
+ IMPL | TRAIT => items::assoc_item_list,
+ _ => return None,
+ },
+ ITEM_LIST => items::item_list,
+ _ => return None,
+ };
+ Some(res)
+}
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+enum BlockLike {
+ Block,
+ NotBlock,
+}
+
+impl BlockLike {
+ fn is_block(self) -> bool {
+ self == BlockLike::Block
+ }
+}
+
+fn opt_visibility(p: &mut Parser<'_>, in_tuple_field: bool) -> bool {
+ match p.current() {
+ T![pub] => {
+ let m = p.start();
+ p.bump(T![pub]);
+ if p.at(T!['(']) {
+ match p.nth(1) {
+ // test crate_visibility
+ // pub(crate) struct S;
+ // pub(self) struct S;
+ // pub(super) struct S;
+
+ // test pub_parens_typepath
+ // struct B(pub (super::A));
+ // struct B(pub (crate::A,));
+ T![crate] | T![self] | T![super] | T![ident] if p.nth(2) != T![:] => {
+ // If we are in a tuple struct, then the parens following `pub`
+ // might be an tuple field, not part of the visibility. So in that
+ // case we don't want to consume an identifier.
+
+ // test pub_tuple_field
+ // struct MyStruct(pub (u32, u32));
+ if !(in_tuple_field && matches!(p.nth(1), T![ident])) {
+ p.bump(T!['(']);
+ paths::use_path(p);
+ p.expect(T![')']);
+ }
+ }
+ // test crate_visibility_in
+ // pub(in super::A) struct S;
+ // pub(in crate) struct S;
+ T![in] => {
+ p.bump(T!['(']);
+ p.bump(T![in]);
+ paths::use_path(p);
+ p.expect(T![')']);
+ }
+ _ => (),
+ }
+ }
+ m.complete(p, VISIBILITY);
+ true
+ }
+ // test crate_keyword_vis
+ // crate fn main() { }
+ // struct S { crate field: u32 }
+ // struct T(crate u32);
+ T![crate] => {
+ if p.nth_at(1, T![::]) {
+ // test crate_keyword_path
+ // fn foo() { crate::foo(); }
+ return false;
+ }
+ let m = p.start();
+ p.bump(T![crate]);
+ m.complete(p, VISIBILITY);
+ true
+ }
+ _ => false,
+ }
+}
+
+fn opt_rename(p: &mut Parser<'_>) {
+ if p.at(T![as]) {
+ let m = p.start();
+ p.bump(T![as]);
+ if !p.eat(T![_]) {
+ name(p);
+ }
+ m.complete(p, RENAME);
+ }
+}
+
+fn abi(p: &mut Parser<'_>) {
+ assert!(p.at(T![extern]));
+ let abi = p.start();
+ p.bump(T![extern]);
+ p.eat(STRING);
+ abi.complete(p, ABI);
+}
+
+fn opt_ret_type(p: &mut Parser<'_>) -> bool {
+ if p.at(T![->]) {
+ let m = p.start();
+ p.bump(T![->]);
+ types::type_no_bounds(p);
+ m.complete(p, RET_TYPE);
+ true
+ } else {
+ false
+ }
+}
+
+fn name_r(p: &mut Parser<'_>, recovery: TokenSet) {
+ if p.at(IDENT) {
+ let m = p.start();
+ p.bump(IDENT);
+ m.complete(p, NAME);
+ } else {
+ p.err_recover("expected a name", recovery);
+ }
+}
+
+fn name(p: &mut Parser<'_>) {
+ name_r(p, TokenSet::EMPTY);
+}
+
+fn name_ref(p: &mut Parser<'_>) {
+ if p.at(IDENT) {
+ let m = p.start();
+ p.bump(IDENT);
+ m.complete(p, NAME_REF);
+ } else {
+ p.err_and_bump("expected identifier");
+ }
+}
+
+fn name_ref_or_index(p: &mut Parser<'_>) {
+ assert!(p.at(IDENT) || p.at(INT_NUMBER));
+ let m = p.start();
+ p.bump_any();
+ m.complete(p, NAME_REF);
+}
+
+fn lifetime(p: &mut Parser<'_>) {
+ assert!(p.at(LIFETIME_IDENT));
+ let m = p.start();
+ p.bump(LIFETIME_IDENT);
+ m.complete(p, LIFETIME);
+}
+
+fn error_block(p: &mut Parser<'_>, message: &str) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.error(message);
+ p.bump(T!['{']);
+ expressions::expr_block_contents(p);
+ p.eat(T!['}']);
+ m.complete(p, ERROR);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
new file mode 100644
index 000000000..0cf6a16f8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
@@ -0,0 +1,53 @@
+use super::*;
+
+pub(super) fn inner_attrs(p: &mut Parser<'_>) {
+ while p.at(T![#]) && p.nth(1) == T![!] {
+ attr(p, true);
+ }
+}
+
+pub(super) fn outer_attrs(p: &mut Parser<'_>) {
+ while p.at(T![#]) {
+ attr(p, false);
+ }
+}
+
+fn attr(p: &mut Parser<'_>, inner: bool) {
+ assert!(p.at(T![#]));
+
+ let attr = p.start();
+ p.bump(T![#]);
+
+ if inner {
+ p.bump(T![!]);
+ }
+
+ if p.eat(T!['[']) {
+ meta(p);
+
+ if !p.eat(T![']']) {
+ p.error("expected `]`");
+ }
+ } else {
+ p.error("expected `[`");
+ }
+ attr.complete(p, ATTR);
+}
+
+pub(super) fn meta(p: &mut Parser<'_>) {
+ let meta = p.start();
+ paths::use_path(p);
+
+ match p.current() {
+ T![=] => {
+ p.bump(T![=]);
+ if !expressions::expr(p) {
+ p.error("expected expression");
+ }
+ }
+ T!['('] | T!['['] | T!['{'] => items::token_tree(p),
+ _ => {}
+ }
+
+ meta.complete(p, META);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
new file mode 100644
index 000000000..e7402104e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
@@ -0,0 +1,625 @@
+mod atom;
+
+use super::*;
+
+pub(crate) use self::atom::{block_expr, match_arm_list};
+pub(super) use self::atom::{literal, LITERAL_FIRST};
+
+#[derive(PartialEq, Eq)]
+pub(super) enum Semicolon {
+ Required,
+ Optional,
+ Forbidden,
+}
+
+const EXPR_FIRST: TokenSet = LHS_FIRST;
+
+pub(super) fn expr(p: &mut Parser<'_>) -> bool {
+ let r = Restrictions { forbid_structs: false, prefer_stmt: false };
+ expr_bp(p, None, r, 1).is_some()
+}
+
+pub(super) fn expr_stmt(
+ p: &mut Parser<'_>,
+ m: Option<Marker>,
+) -> Option<(CompletedMarker, BlockLike)> {
+ let r = Restrictions { forbid_structs: false, prefer_stmt: true };
+ expr_bp(p, m, r, 1)
+}
+
+fn expr_no_struct(p: &mut Parser<'_>) {
+ let r = Restrictions { forbid_structs: true, prefer_stmt: false };
+ expr_bp(p, None, r, 1);
+}
+
+/// Parses the expression in `let pattern = expression`.
+/// It needs to be parsed with lower precedence than `&&`, so that
+/// `if let true = true && false` is parsed as `if (let true = true) && (true)`
+/// and not `if let true = (true && true)`.
+fn expr_let(p: &mut Parser<'_>) {
+ let r = Restrictions { forbid_structs: true, prefer_stmt: false };
+ expr_bp(p, None, r, 5);
+}
+
+pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
+ if p.eat(T![;]) {
+ return;
+ }
+
+ let m = p.start();
+ // test attr_on_expr_stmt
+ // fn foo() {
+ // #[A] foo();
+ // #[B] bar!{}
+ // #[C] #[D] {}
+ // #[D] return ();
+ // }
+ attributes::outer_attrs(p);
+
+ if p.at(T![let]) {
+ let_stmt(p, m, semicolon);
+ return;
+ }
+
+ // test block_items
+ // fn a() { fn b() {} }
+ let m = match items::opt_item(p, m) {
+ Ok(()) => return,
+ Err(m) => m,
+ };
+
+ if let Some((cm, blocklike)) = expr_stmt(p, Some(m)) {
+ if !(p.at(T!['}']) || (semicolon != Semicolon::Required && p.at(EOF))) {
+ // test no_semi_after_block
+ // fn foo() {
+ // if true {}
+ // loop {}
+ // match () {}
+ // while true {}
+ // for _ in () {}
+ // {}
+ // {}
+ // macro_rules! test {
+ // () => {}
+ // }
+ // test!{}
+ // }
+ let m = cm.precede(p);
+ match semicolon {
+ Semicolon::Required => {
+ if blocklike.is_block() {
+ p.eat(T![;]);
+ } else {
+ p.expect(T![;]);
+ }
+ }
+ Semicolon::Optional => {
+ p.eat(T![;]);
+ }
+ Semicolon::Forbidden => (),
+ }
+ m.complete(p, EXPR_STMT);
+ }
+ }
+
+ // test let_stmt
+ // fn f() { let x: i32 = 92; }
+ fn let_stmt(p: &mut Parser<'_>, m: Marker, with_semi: Semicolon) {
+ p.bump(T![let]);
+ patterns::pattern(p);
+ if p.at(T![:]) {
+ // test let_stmt_ascription
+ // fn f() { let x: i32; }
+ types::ascription(p);
+ }
+ if p.eat(T![=]) {
+ // test let_stmt_init
+ // fn f() { let x = 92; }
+ expressions::expr(p);
+ }
+
+ if p.at(T![else]) {
+ // test let_else
+ // fn f() { let Some(x) = opt else { return }; }
+
+ let m = p.start();
+ p.bump(T![else]);
+ block_expr(p);
+ m.complete(p, LET_ELSE);
+ }
+
+ match with_semi {
+ Semicolon::Forbidden => (),
+ Semicolon::Optional => {
+ p.eat(T![;]);
+ }
+ Semicolon::Required => {
+ p.expect(T![;]);
+ }
+ }
+ m.complete(p, LET_STMT);
+ }
+}
+
+pub(super) fn expr_block_contents(p: &mut Parser<'_>) {
+ attributes::inner_attrs(p);
+
+ while !p.at(EOF) && !p.at(T!['}']) {
+ // test nocontentexpr
+ // fn foo(){
+ // ;;;some_expr();;;;{;;;};;;;Ok(())
+ // }
+
+ // test nocontentexpr_after_item
+ // fn simple_function() {
+ // enum LocalEnum {
+ // One,
+ // Two,
+ // };
+ // fn f() {};
+ // struct S {};
+ // }
+ stmt(p, Semicolon::Required);
+ }
+}
+
+#[derive(Clone, Copy)]
+struct Restrictions {
+ forbid_structs: bool,
+ prefer_stmt: bool,
+}
+
+/// Binding powers of operators for a Pratt parser.
+///
+/// See <https://matklad.github.io/2020/04/13/simple-but-powerful-pratt-parsing.html>
+#[rustfmt::skip]
+fn current_op(p: &Parser<'_>) -> (u8, SyntaxKind) {
+ const NOT_AN_OP: (u8, SyntaxKind) = (0, T![@]);
+ match p.current() {
+ T![|] if p.at(T![||]) => (3, T![||]),
+ T![|] if p.at(T![|=]) => (1, T![|=]),
+ T![|] => (6, T![|]),
+ T![>] if p.at(T![>>=]) => (1, T![>>=]),
+ T![>] if p.at(T![>>]) => (9, T![>>]),
+ T![>] if p.at(T![>=]) => (5, T![>=]),
+ T![>] => (5, T![>]),
+ T![=] if p.at(T![=>]) => NOT_AN_OP,
+ T![=] if p.at(T![==]) => (5, T![==]),
+ T![=] => (1, T![=]),
+ T![<] if p.at(T![<=]) => (5, T![<=]),
+ T![<] if p.at(T![<<=]) => (1, T![<<=]),
+ T![<] if p.at(T![<<]) => (9, T![<<]),
+ T![<] => (5, T![<]),
+ T![+] if p.at(T![+=]) => (1, T![+=]),
+ T![+] => (10, T![+]),
+ T![^] if p.at(T![^=]) => (1, T![^=]),
+ T![^] => (7, T![^]),
+ T![%] if p.at(T![%=]) => (1, T![%=]),
+ T![%] => (11, T![%]),
+ T![&] if p.at(T![&=]) => (1, T![&=]),
+ // If you update this, remember to update `expr_let()` too.
+ T![&] if p.at(T![&&]) => (4, T![&&]),
+ T![&] => (8, T![&]),
+ T![/] if p.at(T![/=]) => (1, T![/=]),
+ T![/] => (11, T![/]),
+ T![*] if p.at(T![*=]) => (1, T![*=]),
+ T![*] => (11, T![*]),
+ T![.] if p.at(T![..=]) => (2, T![..=]),
+ T![.] if p.at(T![..]) => (2, T![..]),
+ T![!] if p.at(T![!=]) => (5, T![!=]),
+ T![-] if p.at(T![-=]) => (1, T![-=]),
+ T![-] => (10, T![-]),
+ T![as] => (12, T![as]),
+
+ _ => NOT_AN_OP
+ }
+}
+
+// Parses expression with binding power of at least bp.
+fn expr_bp(
+ p: &mut Parser<'_>,
+ m: Option<Marker>,
+ mut r: Restrictions,
+ bp: u8,
+) -> Option<(CompletedMarker, BlockLike)> {
+ let m = m.unwrap_or_else(|| {
+ let m = p.start();
+ attributes::outer_attrs(p);
+ m
+ });
+ let mut lhs = match lhs(p, r) {
+ Some((lhs, blocklike)) => {
+ let lhs = lhs.extend_to(p, m);
+ if r.prefer_stmt && blocklike.is_block() {
+ // test stmt_bin_expr_ambiguity
+ // fn f() {
+ // let _ = {1} & 2;
+ // {1} &2;
+ // }
+ return Some((lhs, BlockLike::Block));
+ }
+ lhs
+ }
+ None => {
+ m.abandon(p);
+ return None;
+ }
+ };
+
+ loop {
+ let is_range = p.at(T![..]) || p.at(T![..=]);
+ let (op_bp, op) = current_op(p);
+ if op_bp < bp {
+ break;
+ }
+ // test as_precedence
+ // fn f() { let _ = &1 as *const i32; }
+ if p.at(T![as]) {
+ lhs = cast_expr(p, lhs);
+ continue;
+ }
+ let m = lhs.precede(p);
+ p.bump(op);
+
+ // test binop_resets_statementness
+ // fn f() { v = {1}&2; }
+ r = Restrictions { prefer_stmt: false, ..r };
+
+ if is_range {
+ // test postfix_range
+ // fn foo() {
+ // let x = 1..;
+ // match 1.. { _ => () };
+ // match a.b()..S { _ => () };
+ // }
+ let has_trailing_expression =
+ p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{']));
+ if !has_trailing_expression {
+ // no RHS
+ lhs = m.complete(p, RANGE_EXPR);
+ break;
+ }
+ }
+
+ expr_bp(p, None, Restrictions { prefer_stmt: false, ..r }, op_bp + 1);
+ lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR });
+ }
+ Some((lhs, BlockLike::NotBlock))
+}
+
+const LHS_FIRST: TokenSet =
+ atom::ATOM_EXPR_FIRST.union(TokenSet::new(&[T![&], T![*], T![!], T![.], T![-]]));
+
+fn lhs(p: &mut Parser<'_>, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
+ let m;
+ let kind = match p.current() {
+ // test ref_expr
+ // fn foo() {
+ // // reference operator
+ // let _ = &1;
+ // let _ = &mut &f();
+ // let _ = &raw;
+ // let _ = &raw.0;
+ // // raw reference operator
+ // let _ = &raw mut foo;
+ // let _ = &raw const foo;
+ // }
+ T![&] => {
+ m = p.start();
+ p.bump(T![&]);
+ if p.at_contextual_kw(T![raw]) && (p.nth_at(1, T![mut]) || p.nth_at(1, T![const])) {
+ p.bump_remap(T![raw]);
+ p.bump_any();
+ } else {
+ p.eat(T![mut]);
+ }
+ REF_EXPR
+ }
+ // test unary_expr
+ // fn foo() {
+ // **&1;
+ // !!true;
+ // --1;
+ // }
+ T![*] | T![!] | T![-] => {
+ m = p.start();
+ p.bump_any();
+ PREFIX_EXPR
+ }
+ _ => {
+ // test full_range_expr
+ // fn foo() { xs[..]; }
+ for op in [T![..=], T![..]] {
+ if p.at(op) {
+ m = p.start();
+ p.bump(op);
+ if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])) {
+ expr_bp(p, None, r, 2);
+ }
+ let cm = m.complete(p, RANGE_EXPR);
+ return Some((cm, BlockLike::NotBlock));
+ }
+ }
+
+ // test expression_after_block
+ // fn foo() {
+ // let mut p = F{x: 5};
+ // {p}.x = 10;
+ // }
+ let (lhs, blocklike) = atom::atom_expr(p, r)?;
+ let (cm, block_like) =
+ postfix_expr(p, lhs, blocklike, !(r.prefer_stmt && blocklike.is_block()));
+ return Some((cm, block_like));
+ }
+ };
+ // parse the interior of the unary expression
+ expr_bp(p, None, r, 255);
+ let cm = m.complete(p, kind);
+ Some((cm, BlockLike::NotBlock))
+}
+
+fn postfix_expr(
+ p: &mut Parser<'_>,
+ mut lhs: CompletedMarker,
+ // Calls are disallowed if the type is a block and we prefer statements because the call cannot be disambiguated from a tuple
+ // E.g. `while true {break}();` is parsed as
+ // `while true {break}; ();`
+ mut block_like: BlockLike,
+ mut allow_calls: bool,
+) -> (CompletedMarker, BlockLike) {
+ loop {
+ lhs = match p.current() {
+ // test stmt_postfix_expr_ambiguity
+ // fn foo() {
+ // match () {
+ // _ => {}
+ // () => {}
+ // [] => {}
+ // }
+ // }
+ T!['('] if allow_calls => call_expr(p, lhs),
+ T!['['] if allow_calls => index_expr(p, lhs),
+ T![.] => match postfix_dot_expr(p, lhs) {
+ Ok(it) => it,
+ Err(it) => {
+ lhs = it;
+ break;
+ }
+ },
+ T![?] => try_expr(p, lhs),
+ _ => break,
+ };
+ allow_calls = true;
+ block_like = BlockLike::NotBlock;
+ }
+ return (lhs, block_like);
+
+ fn postfix_dot_expr(
+ p: &mut Parser<'_>,
+ lhs: CompletedMarker,
+ ) -> Result<CompletedMarker, CompletedMarker> {
+ assert!(p.at(T![.]));
+ if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])) {
+ return Ok(method_call_expr(p, lhs));
+ }
+
+ // test await_expr
+ // fn foo() {
+ // x.await;
+ // x.0.await;
+ // x.0().await?.hello();
+ // }
+ if p.nth(1) == T![await] {
+ let m = lhs.precede(p);
+ p.bump(T![.]);
+ p.bump(T![await]);
+ return Ok(m.complete(p, AWAIT_EXPR));
+ }
+
+ if p.at(T![..=]) || p.at(T![..]) {
+ return Err(lhs);
+ }
+
+ Ok(field_expr(p, lhs))
+ }
+}
+
+// test call_expr
+// fn foo() {
+// let _ = f();
+// let _ = f()(1)(1, 2,);
+// let _ = f(<Foo>::func());
+// f(<Foo as Trait>::func());
+// }
+fn call_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
+ assert!(p.at(T!['(']));
+ let m = lhs.precede(p);
+ arg_list(p);
+ m.complete(p, CALL_EXPR)
+}
+
+// test index_expr
+// fn foo() {
+// x[1][2];
+// }
+fn index_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
+ assert!(p.at(T!['[']));
+ let m = lhs.precede(p);
+ p.bump(T!['[']);
+ expr(p);
+ p.expect(T![']']);
+ m.complete(p, INDEX_EXPR)
+}
+
+// test method_call_expr
+// fn foo() {
+// x.foo();
+// y.bar::<T>(1, 2,);
+// }
+fn method_call_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
+ assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])));
+ let m = lhs.precede(p);
+ p.bump_any();
+ name_ref(p);
+ generic_args::opt_generic_arg_list(p, true);
+ if p.at(T!['(']) {
+ arg_list(p);
+ }
+ m.complete(p, METHOD_CALL_EXPR)
+}
+
+// test field_expr
+// fn foo() {
+// x.foo;
+// x.0.bar;
+// x.0();
+// }
+fn field_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
+ assert!(p.at(T![.]));
+ let m = lhs.precede(p);
+ p.bump(T![.]);
+ if p.at(IDENT) || p.at(INT_NUMBER) {
+ name_ref_or_index(p);
+ } else if p.at(FLOAT_NUMBER) {
+ // FIXME: How to recover and instead parse INT + T![.]?
+ p.bump_any();
+ } else {
+ p.error("expected field name or number");
+ }
+ m.complete(p, FIELD_EXPR)
+}
+
+// test try_expr
+// fn foo() {
+// x?;
+// }
+fn try_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
+ assert!(p.at(T![?]));
+ let m = lhs.precede(p);
+ p.bump(T![?]);
+ m.complete(p, TRY_EXPR)
+}
+
+// test cast_expr
+// fn foo() {
+// 82 as i32;
+// 81 as i8 + 1;
+// 79 as i16 - 1;
+// 0x36 as u8 <= 0x37;
+// }
+fn cast_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
+ assert!(p.at(T![as]));
+ let m = lhs.precede(p);
+ p.bump(T![as]);
+ // Use type_no_bounds(), because cast expressions are not
+ // allowed to have bounds.
+ types::type_no_bounds(p);
+ m.complete(p, CAST_EXPR)
+}
+
+fn arg_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['(']));
+ let m = p.start();
+ p.bump(T!['(']);
+ while !p.at(T![')']) && !p.at(EOF) {
+ // test arg_with_attr
+ // fn main() {
+ // foo(#[attr] 92)
+ // }
+ if !expr(p) {
+ break;
+ }
+ if !p.at(T![')']) && !p.expect(T![,]) {
+ break;
+ }
+ }
+ p.eat(T![')']);
+ m.complete(p, ARG_LIST);
+}
+
+// test path_expr
+// fn foo() {
+// let _ = a;
+// let _ = a::b;
+// let _ = ::a::<b>;
+// let _ = format!();
+// }
+fn path_expr(p: &mut Parser<'_>, r: Restrictions) -> (CompletedMarker, BlockLike) {
+ assert!(paths::is_path_start(p));
+ let m = p.start();
+ paths::expr_path(p);
+ match p.current() {
+ T!['{'] if !r.forbid_structs => {
+ record_expr_field_list(p);
+ (m.complete(p, RECORD_EXPR), BlockLike::NotBlock)
+ }
+ T![!] if !p.at(T![!=]) => {
+ let block_like = items::macro_call_after_excl(p);
+ (m.complete(p, MACRO_CALL).precede(p).complete(p, MACRO_EXPR), block_like)
+ }
+ _ => (m.complete(p, PATH_EXPR), BlockLike::NotBlock),
+ }
+}
+
+// test record_lit
+// fn foo() {
+// S {};
+// S { x, y: 32, };
+// S { x, y: 32, ..Default::default() };
+// TupleStruct { 0: 1 };
+// }
+pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ while !p.at(EOF) && !p.at(T!['}']) {
+ let m = p.start();
+ // test record_literal_field_with_attr
+ // fn main() {
+ // S { #[cfg(test)] field: 1 }
+ // }
+ attributes::outer_attrs(p);
+
+ match p.current() {
+ IDENT | INT_NUMBER => {
+ // test_err record_literal_before_ellipsis_recovery
+ // fn main() {
+ // S { field ..S::default() }
+ // }
+ if p.nth_at(1, T![:]) || p.nth_at(1, T![..]) {
+ name_ref_or_index(p);
+ p.expect(T![:]);
+ }
+ expr(p);
+ m.complete(p, RECORD_EXPR_FIELD);
+ }
+ T![.] if p.at(T![..]) => {
+ m.abandon(p);
+ p.bump(T![..]);
+
+ // test destructuring_assignment_struct_rest_pattern
+ // fn foo() {
+ // S { .. } = S {};
+ // }
+
+ // We permit `.. }` on the left-hand side of a destructuring assignment.
+ if !p.at(T!['}']) {
+ expr(p);
+ }
+ }
+ T!['{'] => {
+ error_block(p, "expected a field");
+ m.abandon(p);
+ }
+ _ => {
+ p.err_and_bump("expected identifier");
+ m.abandon(p);
+ }
+ }
+ if !p.at(T!['}']) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T!['}']);
+ m.complete(p, RECORD_EXPR_FIELD_LIST);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
new file mode 100644
index 000000000..99f42a266
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
@@ -0,0 +1,643 @@
+use super::*;
+
+// test expr_literals
+// fn foo() {
+// let _ = true;
+// let _ = false;
+// let _ = 1;
+// let _ = 2.0;
+// let _ = b'a';
+// let _ = 'b';
+// let _ = "c";
+// let _ = r"d";
+// let _ = b"e";
+// let _ = br"f";
+// }
+pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[
+ T![true],
+ T![false],
+ INT_NUMBER,
+ FLOAT_NUMBER,
+ BYTE,
+ CHAR,
+ STRING,
+ BYTE_STRING,
+]);
+
+pub(crate) fn literal(p: &mut Parser<'_>) -> Option<CompletedMarker> {
+ if !p.at_ts(LITERAL_FIRST) {
+ return None;
+ }
+ let m = p.start();
+ p.bump_any();
+ Some(m.complete(p, LITERAL))
+}
+
+// E.g. for after the break in `if break {}`, this should not match
+pub(super) const ATOM_EXPR_FIRST: TokenSet =
+ LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[
+ T!['('],
+ T!['{'],
+ T!['['],
+ T![|],
+ T![move],
+ T![box],
+ T![if],
+ T![while],
+ T![match],
+ T![unsafe],
+ T![return],
+ T![yield],
+ T![break],
+ T![continue],
+ T![async],
+ T![try],
+ T![const],
+ T![loop],
+ T![for],
+ LIFETIME_IDENT,
+ ]));
+
+const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[T![let]]);
+
+pub(super) fn atom_expr(
+ p: &mut Parser<'_>,
+ r: Restrictions,
+) -> Option<(CompletedMarker, BlockLike)> {
+ if let Some(m) = literal(p) {
+ return Some((m, BlockLike::NotBlock));
+ }
+ if paths::is_path_start(p) {
+ return Some(path_expr(p, r));
+ }
+ let la = p.nth(1);
+ let done = match p.current() {
+ T!['('] => tuple_expr(p),
+ T!['['] => array_expr(p),
+ T![if] => if_expr(p),
+ T![let] => let_expr(p),
+ T![_] => {
+ // test destructuring_assignment_wildcard_pat
+ // fn foo() {
+ // _ = 1;
+ // Some(_) = None;
+ // }
+ let m = p.start();
+ p.bump(T![_]);
+ m.complete(p, UNDERSCORE_EXPR)
+ }
+ T![loop] => loop_expr(p, None),
+ T![box] => box_expr(p, None),
+ T![while] => while_expr(p, None),
+ T![try] => try_block_expr(p, None),
+ T![match] => match_expr(p),
+ T![return] => return_expr(p),
+ T![yield] => yield_expr(p),
+ T![continue] => continue_expr(p),
+ T![break] => break_expr(p, r),
+
+ LIFETIME_IDENT if la == T![:] => {
+ let m = p.start();
+ label(p);
+ match p.current() {
+ T![loop] => loop_expr(p, Some(m)),
+ T![for] => for_expr(p, Some(m)),
+ T![while] => while_expr(p, Some(m)),
+ // test labeled_block
+ // fn f() { 'label: {}; }
+ T!['{'] => {
+ stmt_list(p);
+ m.complete(p, BLOCK_EXPR)
+ }
+ _ => {
+ // test_err misplaced_label_err
+ // fn main() {
+ // 'loop: impl
+ // }
+ p.error("expected a loop");
+ m.complete(p, ERROR);
+ return None;
+ }
+ }
+ }
+ // test effect_blocks
+ // fn f() { unsafe { } }
+ // fn f() { const { } }
+ // fn f() { async { } }
+ // fn f() { async move { } }
+ T![const] | T![unsafe] | T![async] if la == T!['{'] => {
+ let m = p.start();
+ p.bump_any();
+ stmt_list(p);
+ m.complete(p, BLOCK_EXPR)
+ }
+ T![async] if la == T![move] && p.nth(2) == T!['{'] => {
+ let m = p.start();
+ p.bump(T![async]);
+ p.eat(T![move]);
+ stmt_list(p);
+ m.complete(p, BLOCK_EXPR)
+ }
+ T!['{'] => {
+ // test for_range_from
+ // fn foo() {
+ // for x in 0 .. {
+ // break;
+ // }
+ // }
+ let m = p.start();
+ stmt_list(p);
+ m.complete(p, BLOCK_EXPR)
+ }
+
+ T![static] | T![async] | T![move] | T![|] => closure_expr(p),
+ T![for] if la == T![<] => closure_expr(p),
+ T![for] => for_expr(p, None),
+
+ _ => {
+ p.err_recover("expected expression", EXPR_RECOVERY_SET);
+ return None;
+ }
+ };
+ let blocklike = match done.kind() {
+ IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR => BlockLike::Block,
+ _ => BlockLike::NotBlock,
+ };
+ Some((done, blocklike))
+}
+
+// test tuple_expr
+// fn foo() {
+// ();
+// (1);
+// (1,);
+// }
+fn tuple_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T!['(']));
+ let m = p.start();
+ p.expect(T!['(']);
+
+ let mut saw_comma = false;
+ let mut saw_expr = false;
+ while !p.at(EOF) && !p.at(T![')']) {
+ saw_expr = true;
+
+ // test tuple_attrs
+ // const A: (i64, i64) = (1, #[cfg(test)] 2);
+ if !expr(p) {
+ break;
+ }
+
+ if !p.at(T![')']) {
+ saw_comma = true;
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T![')']);
+ m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR })
+}
+
+// test array_expr
+// fn foo() {
+// [];
+// [1];
+// [1, 2,];
+// [1; 2];
+// }
+fn array_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T!['[']));
+ let m = p.start();
+
+ let mut n_exprs = 0u32;
+ let mut has_semi = false;
+
+ p.bump(T!['[']);
+ while !p.at(EOF) && !p.at(T![']']) {
+ n_exprs += 1;
+
+ // test array_attrs
+ // const A: &[i64] = &[1, #[cfg(test)] 2];
+ if !expr(p) {
+ break;
+ }
+
+ if n_exprs == 1 && p.eat(T![;]) {
+ has_semi = true;
+ continue;
+ }
+
+ if has_semi || !p.at(T![']']) && !p.expect(T![,]) {
+ break;
+ }
+ }
+ p.expect(T![']']);
+
+ m.complete(p, ARRAY_EXPR)
+}
+
+// test lambda_expr
+// fn foo() {
+// || ();
+// || -> i32 { 92 };
+// |x| x;
+// move |x: i32,| x;
+// async || {};
+// move || {};
+// async move || {};
+// static || {};
+// static move || {};
+// static async || {};
+// static async move || {};
+// for<'a> || {};
+// for<'a> move || {};
+// }
+fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(match p.current() {
+ T![static] | T![async] | T![move] | T![|] => true,
+ T![for] => p.nth(1) == T![<],
+ _ => false,
+ });
+
+ let m = p.start();
+
+ if p.at(T![for]) {
+ types::for_binder(p);
+ }
+
+ p.eat(T![static]);
+ p.eat(T![async]);
+ p.eat(T![move]);
+
+ if !p.at(T![|]) {
+ p.error("expected `|`");
+ return m.complete(p, CLOSURE_EXPR);
+ }
+ params::param_list_closure(p);
+ if opt_ret_type(p) {
+ // test lambda_ret_block
+ // fn main() { || -> i32 { 92 }(); }
+ block_expr(p);
+ } else if p.at_ts(EXPR_FIRST) {
+ expr(p);
+ } else {
+ p.error("expected expression");
+ }
+ m.complete(p, CLOSURE_EXPR)
+}
+
+// test if_expr
+// fn foo() {
+// if true {};
+// if true {} else {};
+// if true {} else if false {} else {};
+// if S {};
+// if { true } { } else { };
+// }
+fn if_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![if]));
+ let m = p.start();
+ p.bump(T![if]);
+ expr_no_struct(p);
+ block_expr(p);
+ if p.at(T![else]) {
+ p.bump(T![else]);
+ if p.at(T![if]) {
+ if_expr(p);
+ } else {
+ block_expr(p);
+ }
+ }
+ m.complete(p, IF_EXPR)
+}
+
+// test label
+// fn foo() {
+// 'a: loop {}
+// 'b: while true {}
+// 'c: for x in () {}
+// }
+fn label(p: &mut Parser<'_>) {
+ assert!(p.at(LIFETIME_IDENT) && p.nth(1) == T![:]);
+ let m = p.start();
+ lifetime(p);
+ p.bump_any();
+ m.complete(p, LABEL);
+}
+
+// test loop_expr
+// fn foo() {
+// loop {};
+// }
+fn loop_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker {
+ assert!(p.at(T![loop]));
+ let m = m.unwrap_or_else(|| p.start());
+ p.bump(T![loop]);
+ block_expr(p);
+ m.complete(p, LOOP_EXPR)
+}
+
+// test while_expr
+// fn foo() {
+// while true {};
+// while let Some(x) = it.next() {};
+// while { true } {};
+// }
+fn while_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker {
+ assert!(p.at(T![while]));
+ let m = m.unwrap_or_else(|| p.start());
+ p.bump(T![while]);
+ expr_no_struct(p);
+ block_expr(p);
+ m.complete(p, WHILE_EXPR)
+}
+
+// test for_expr
+// fn foo() {
+// for x in [] {};
+// }
+fn for_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker {
+ assert!(p.at(T![for]));
+ let m = m.unwrap_or_else(|| p.start());
+ p.bump(T![for]);
+ patterns::pattern(p);
+ p.expect(T![in]);
+ expr_no_struct(p);
+ block_expr(p);
+ m.complete(p, FOR_EXPR)
+}
+
+// test let_expr
+// fn foo() {
+// if let Some(_) = None && true {}
+// while 1 == 5 && (let None = None) {}
+// }
+fn let_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ let m = p.start();
+ p.bump(T![let]);
+ patterns::pattern_top(p);
+ p.expect(T![=]);
+ expr_let(p);
+ m.complete(p, LET_EXPR)
+}
+
+// test match_expr
+// fn foo() {
+// match () { };
+// match S {};
+// match { } { _ => () };
+// match { S {} } {};
+// }
+fn match_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![match]));
+ let m = p.start();
+ p.bump(T![match]);
+ expr_no_struct(p);
+ if p.at(T!['{']) {
+ match_arm_list(p);
+ } else {
+ p.error("expected `{`");
+ }
+ m.complete(p, MATCH_EXPR)
+}
+
+pub(crate) fn match_arm_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.eat(T!['{']);
+
+ // test match_arms_inner_attribute
+ // fn foo() {
+ // match () {
+ // #![doc("Inner attribute")]
+ // #![doc("Can be")]
+ // #![doc("Stacked")]
+ // _ => (),
+ // }
+ // }
+ attributes::inner_attrs(p);
+
+ while !p.at(EOF) && !p.at(T!['}']) {
+ if p.at(T!['{']) {
+ error_block(p, "expected match arm");
+ continue;
+ }
+ match_arm(p);
+ }
+ p.expect(T!['}']);
+ m.complete(p, MATCH_ARM_LIST);
+}
+
+// test match_arm
+// fn foo() {
+// match () {
+// _ => (),
+// _ if Test > Test{field: 0} => (),
+// X | Y if Z => (),
+// | X | Y if Z => (),
+// | X => (),
+// };
+// }
+fn match_arm(p: &mut Parser<'_>) {
+ let m = p.start();
+ // test match_arms_outer_attributes
+ // fn foo() {
+ // match () {
+ // #[cfg(feature = "some")]
+ // _ => (),
+ // #[cfg(feature = "other")]
+ // _ => (),
+ // #[cfg(feature = "many")]
+ // #[cfg(feature = "attributes")]
+ // #[cfg(feature = "before")]
+ // _ => (),
+ // }
+ // }
+ attributes::outer_attrs(p);
+
+ patterns::pattern_top_r(p, TokenSet::EMPTY);
+ if p.at(T![if]) {
+ match_guard(p);
+ }
+ p.expect(T![=>]);
+ let blocklike = match expr_stmt(p, None) {
+ Some((_, blocklike)) => blocklike,
+ None => BlockLike::NotBlock,
+ };
+
+ // test match_arms_commas
+ // fn foo() {
+ // match () {
+ // _ => (),
+ // _ => {}
+ // _ => ()
+ // }
+ // }
+ if !p.eat(T![,]) && !blocklike.is_block() && !p.at(T!['}']) {
+ p.error("expected `,`");
+ }
+ m.complete(p, MATCH_ARM);
+}
+
+// test match_guard
+// fn foo() {
+// match () {
+// _ if foo => (),
+// _ if let foo = bar => (),
+// }
+// }
+fn match_guard(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![if]));
+ let m = p.start();
+ p.bump(T![if]);
+ expr(p);
+ m.complete(p, MATCH_GUARD)
+}
+
+// test block
+// fn a() {}
+// fn b() { let _ = 1; }
+// fn c() { 1; 2; }
+// fn d() { 1; 2 }
+pub(crate) fn block_expr(p: &mut Parser<'_>) {
+ if !p.at(T!['{']) {
+ p.error("expected a block");
+ return;
+ }
+ let m = p.start();
+ stmt_list(p);
+ m.complete(p, BLOCK_EXPR);
+}
+
+fn stmt_list(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ expr_block_contents(p);
+ p.expect(T!['}']);
+ m.complete(p, STMT_LIST)
+}
+
+// test return_expr
+// fn foo() {
+// return;
+// return 92;
+// }
+fn return_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![return]));
+ let m = p.start();
+ p.bump(T![return]);
+ if p.at_ts(EXPR_FIRST) {
+ expr(p);
+ }
+ m.complete(p, RETURN_EXPR)
+}
+// test yield_expr
+// fn foo() {
+// yield;
+// yield 1;
+// }
+fn yield_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![yield]));
+ let m = p.start();
+ p.bump(T![yield]);
+ if p.at_ts(EXPR_FIRST) {
+ expr(p);
+ }
+ m.complete(p, YIELD_EXPR)
+}
+
+// test continue_expr
+// fn foo() {
+// loop {
+// continue;
+// continue 'l;
+// }
+// }
+fn continue_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![continue]));
+ let m = p.start();
+ p.bump(T![continue]);
+ if p.at(LIFETIME_IDENT) {
+ lifetime(p);
+ }
+ m.complete(p, CONTINUE_EXPR)
+}
+
+// test break_expr
+// fn foo() {
+// loop {
+// break;
+// break 'l;
+// break 92;
+// break 'l 92;
+// }
+// }
+fn break_expr(p: &mut Parser<'_>, r: Restrictions) -> CompletedMarker {
+ assert!(p.at(T![break]));
+ let m = p.start();
+ p.bump(T![break]);
+ if p.at(LIFETIME_IDENT) {
+ lifetime(p);
+ }
+ // test break_ambiguity
+ // fn foo(){
+ // if break {}
+ // while break {}
+ // for i in break {}
+ // match break {}
+ // }
+ if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])) {
+ expr(p);
+ }
+ m.complete(p, BREAK_EXPR)
+}
+
+// test try_block_expr
+// fn foo() {
+// let _ = try {};
+// }
+fn try_block_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker {
+ assert!(p.at(T![try]));
+ let m = m.unwrap_or_else(|| p.start());
+ // Special-case `try!` as macro.
+ // This is a hack until we do proper edition support
+ if p.nth_at(1, T![!]) {
+ // test try_macro_fallback
+ // fn foo() { try!(Ok(())); }
+ let macro_call = p.start();
+ let path = p.start();
+ let path_segment = p.start();
+ let name_ref = p.start();
+ p.bump_remap(IDENT);
+ name_ref.complete(p, NAME_REF);
+ path_segment.complete(p, PATH_SEGMENT);
+ path.complete(p, PATH);
+ let _block_like = items::macro_call_after_excl(p);
+ macro_call.complete(p, MACRO_CALL);
+ return m.complete(p, MACRO_EXPR);
+ }
+
+ p.bump(T![try]);
+ if p.at(T!['{']) {
+ stmt_list(p);
+ } else {
+ p.error("expected a block");
+ }
+ m.complete(p, BLOCK_EXPR)
+}
+
+// test box_expr
+// fn foo() {
+// let x = box 1i32;
+// let y = (box 1i32, box 2i32);
+// let z = Foo(box 1i32, box 2i32);
+// }
+fn box_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker {
+ assert!(p.at(T![box]));
+ let m = m.unwrap_or_else(|| p.start());
+ p.bump(T![box]);
+ if p.at_ts(EXPR_FIRST) {
+ expr(p);
+ }
+ m.complete(p, BOX_EXPR)
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
new file mode 100644
index 000000000..c438943a0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
@@ -0,0 +1,131 @@
+use super::*;
+
+pub(super) fn opt_generic_arg_list(p: &mut Parser<'_>, colon_colon_required: bool) {
+ let m;
+ if p.at(T![::]) && p.nth(2) == T![<] {
+ m = p.start();
+ p.bump(T![::]);
+ p.bump(T![<]);
+ } else if !colon_colon_required && p.at(T![<]) && p.nth(1) != T![=] {
+ m = p.start();
+ p.bump(T![<]);
+ } else {
+ return;
+ }
+
+ while !p.at(EOF) && !p.at(T![>]) {
+ generic_arg(p);
+ if !p.at(T![>]) && !p.expect(T![,]) {
+ break;
+ }
+ }
+ p.expect(T![>]);
+ m.complete(p, GENERIC_ARG_LIST);
+}
+
+// test generic_arg
+// type T = S<i32>;
+fn generic_arg(p: &mut Parser<'_>) {
+ match p.current() {
+ LIFETIME_IDENT => lifetime_arg(p),
+ T!['{'] | T![true] | T![false] | T![-] => const_arg(p),
+ k if k.is_literal() => const_arg(p),
+ // test associated_type_bounds
+ // fn print_all<T: Iterator<Item, Item::Item, Item::<true>, Item: Display, Item<'a> = Item>>(printables: T) {}
+
+ // test macro_inside_generic_arg
+ // type A = Foo<syn::Token![_]>;
+ IDENT if [T![<], T![=], T![:]].contains(&p.nth(1)) && !p.nth_at(1, T![::]) => {
+ let m = p.start();
+ name_ref(p);
+ opt_generic_arg_list(p, false);
+ match p.current() {
+ T![=] => {
+ p.bump_any();
+ if types::TYPE_FIRST.contains(p.current()) {
+ // test assoc_type_eq
+ // type T = StreamingIterator<Item<'a> = &'a T>;
+ types::type_(p);
+ } else {
+ // test assoc_const_eq
+ // fn foo<F: Foo<N=3>>() {}
+ // const TEST: usize = 3;
+ // fn bar<F: Foo<N={TEST}>>() {}
+ const_arg(p);
+ }
+ m.complete(p, ASSOC_TYPE_ARG);
+ }
+ // test assoc_type_bound
+ // type T = StreamingIterator<Item<'a>: Clone>;
+ T![:] if !p.at(T![::]) => {
+ generic_params::bounds(p);
+ m.complete(p, ASSOC_TYPE_ARG);
+ }
+ _ => {
+ let m = m.complete(p, PATH_SEGMENT).precede(p).complete(p, PATH);
+ let m = paths::type_path_for_qualifier(p, m);
+ m.precede(p).complete(p, PATH_TYPE).precede(p).complete(p, TYPE_ARG);
+ }
+ }
+ }
+ _ => type_arg(p),
+ }
+}
+
+// test lifetime_arg
+// type T = S<'static>;
+fn lifetime_arg(p: &mut Parser<'_>) {
+ let m = p.start();
+ lifetime(p);
+ m.complete(p, LIFETIME_ARG);
+}
+
+pub(super) fn const_arg_expr(p: &mut Parser<'_>) {
+ // The tests in here are really for `const_arg`, which wraps the content
+ // CONST_ARG.
+ match p.current() {
+ // test const_arg_block
+ // type T = S<{90 + 2}>;
+ T!['{'] => {
+ expressions::block_expr(p);
+ }
+ // test const_arg_literal
+ // type T = S<"hello", 0xdeadbeef>;
+ k if k.is_literal() => {
+ expressions::literal(p);
+ }
+ // test const_arg_bool_literal
+ // type T = S<true>;
+ T![true] | T![false] => {
+ expressions::literal(p);
+ }
+ // test const_arg_negative_number
+ // type T = S<-92>;
+ T![-] => {
+ let lm = p.start();
+ p.bump(T![-]);
+ expressions::literal(p);
+ lm.complete(p, PREFIX_EXPR);
+ }
+ _ => {
+ // This shouldn't be hit by `const_arg`
+ let lm = p.start();
+ paths::use_path(p);
+ lm.complete(p, PATH_EXPR);
+ }
+ }
+}
+
+// test const_arg
+// type T = S<92>;
+pub(super) fn const_arg(p: &mut Parser<'_>) {
+ let m = p.start();
+ const_arg_expr(p);
+ m.complete(p, CONST_ARG);
+}
+
+fn type_arg(p: &mut Parser<'_>) {
+ let m = p.start();
+ types::type_(p);
+ m.complete(p, TYPE_ARG);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs
new file mode 100644
index 000000000..6db28ef13
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs
@@ -0,0 +1,242 @@
+use super::*;
+
+pub(super) fn opt_generic_param_list(p: &mut Parser<'_>) {
+ if p.at(T![<]) {
+ generic_param_list(p);
+ }
+}
+
+// test generic_param_list
+// fn f<T: Clone>() {}
+fn generic_param_list(p: &mut Parser<'_>) {
+ assert!(p.at(T![<]));
+ let m = p.start();
+ p.bump(T![<]);
+
+ while !p.at(EOF) && !p.at(T![>]) {
+ generic_param(p);
+ if !p.at(T![>]) && !p.expect(T![,]) {
+ break;
+ }
+ }
+ p.expect(T![>]);
+ m.complete(p, GENERIC_PARAM_LIST);
+}
+
+fn generic_param(p: &mut Parser<'_>) {
+ let m = p.start();
+ // test generic_param_attribute
+ // fn foo<#[lt_attr] 'a, #[t_attr] T>() {}
+ attributes::outer_attrs(p);
+ match p.current() {
+ LIFETIME_IDENT => lifetime_param(p, m),
+ IDENT => type_param(p, m),
+ T![const] => const_param(p, m),
+ _ => {
+ m.abandon(p);
+ p.err_and_bump("expected type parameter");
+ }
+ }
+}
+
+// test lifetime_param
+// fn f<'a: 'b>() {}
+fn lifetime_param(p: &mut Parser<'_>, m: Marker) {
+ assert!(p.at(LIFETIME_IDENT));
+ lifetime(p);
+ if p.at(T![:]) {
+ lifetime_bounds(p);
+ }
+ m.complete(p, LIFETIME_PARAM);
+}
+
+// test type_param
+// fn f<T: Clone>() {}
+fn type_param(p: &mut Parser<'_>, m: Marker) {
+ assert!(p.at(IDENT));
+ name(p);
+ if p.at(T![:]) {
+ bounds(p);
+ }
+ if p.at(T![=]) {
+ // test type_param_default
+ // struct S<T = i32>;
+ p.bump(T![=]);
+ types::type_(p);
+ }
+ m.complete(p, TYPE_PARAM);
+}
+
+// test const_param
+// struct S<const N: u32>;
+fn const_param(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![const]);
+ name(p);
+ if p.at(T![:]) {
+ types::ascription(p);
+ } else {
+ p.error("missing type for const parameter");
+ }
+
+ if p.at(T![=]) {
+ // test const_param_default_literal
+ // struct A<const N: i32 = -1>;
+ p.bump(T![=]);
+
+ // test const_param_default_expression
+ // struct A<const N: i32 = { 1 }>;
+
+ // test const_param_default_path
+ // struct A<const N: i32 = i32::MAX>;
+ generic_args::const_arg_expr(p);
+ }
+
+ m.complete(p, CONST_PARAM);
+}
+
+fn lifetime_bounds(p: &mut Parser<'_>) {
+ assert!(p.at(T![:]));
+ p.bump(T![:]);
+ while p.at(LIFETIME_IDENT) {
+ lifetime(p);
+ if !p.eat(T![+]) {
+ break;
+ }
+ }
+}
+
+// test type_param_bounds
+// struct S<T: 'a + ?Sized + (Copy) + ~const Drop>;
+pub(super) fn bounds(p: &mut Parser<'_>) {
+ assert!(p.at(T![:]));
+ p.bump(T![:]);
+ bounds_without_colon(p);
+}
+
+pub(super) fn bounds_without_colon(p: &mut Parser<'_>) {
+ let m = p.start();
+ bounds_without_colon_m(p, m);
+}
+
+pub(super) fn bounds_without_colon_m(p: &mut Parser<'_>, marker: Marker) -> CompletedMarker {
+ while type_bound(p) {
+ if !p.eat(T![+]) {
+ break;
+ }
+ }
+ marker.complete(p, TYPE_BOUND_LIST)
+}
+
+fn type_bound(p: &mut Parser<'_>) -> bool {
+ let m = p.start();
+ let has_paren = p.eat(T!['(']);
+ match p.current() {
+ LIFETIME_IDENT => lifetime(p),
+ T![for] => types::for_type(p, false),
+ T![?] if p.nth_at(1, T![for]) => {
+ // test question_for_type_trait_bound
+ // fn f<T>() where T: ?for<> Sized {}
+ p.bump_any();
+ types::for_type(p, false)
+ }
+ current => {
+ match current {
+ T![?] => p.bump_any(),
+ T![~] => {
+ p.bump_any();
+ p.expect(T![const]);
+ }
+ _ => (),
+ }
+ if paths::is_use_path_start(p) {
+ types::path_type_(p, false);
+ } else {
+ m.abandon(p);
+ return false;
+ }
+ }
+ }
+ if has_paren {
+ p.expect(T![')']);
+ }
+ m.complete(p, TYPE_BOUND);
+
+ true
+}
+
+// test where_clause
+// fn foo()
+// where
+// 'a: 'b + 'c,
+// T: Clone + Copy + 'static,
+// Iterator::Item: 'a,
+// <T as Iterator>::Item: 'a
+// {}
+pub(super) fn opt_where_clause(p: &mut Parser<'_>) {
+ if !p.at(T![where]) {
+ return;
+ }
+ let m = p.start();
+ p.bump(T![where]);
+
+ while is_where_predicate(p) {
+ where_predicate(p);
+
+ let comma = p.eat(T![,]);
+
+ match p.current() {
+ T!['{'] | T![;] | T![=] => break,
+ _ => (),
+ }
+
+ if !comma {
+ p.error("expected comma");
+ }
+ }
+
+ m.complete(p, WHERE_CLAUSE);
+
+ fn is_where_predicate(p: &mut Parser<'_>) -> bool {
+ match p.current() {
+ LIFETIME_IDENT => true,
+ T![impl] => false,
+ token => types::TYPE_FIRST.contains(token),
+ }
+ }
+}
+
+fn where_predicate(p: &mut Parser<'_>) {
+ let m = p.start();
+ match p.current() {
+ LIFETIME_IDENT => {
+ lifetime(p);
+ if p.at(T![:]) {
+ bounds(p);
+ } else {
+ p.error("expected colon");
+ }
+ }
+ T![impl] => {
+ p.error("expected lifetime or type");
+ }
+ _ => {
+ if p.at(T![for]) {
+ // test where_pred_for
+ // fn for_trait<F>()
+ // where
+ // for<'a> F: Fn(&'a str)
+ // { }
+ types::for_binder(p);
+ }
+
+ types::type_(p);
+
+ if p.at(T![:]) {
+ bounds(p);
+ } else {
+ p.error("expected colon");
+ }
+ }
+ }
+ m.complete(p, WHERE_PRED);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
new file mode 100644
index 000000000..5e0951bf8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
@@ -0,0 +1,465 @@
+mod consts;
+mod adt;
+mod traits;
+mod use_item;
+
+pub(crate) use self::{
+ adt::{record_field_list, variant_list},
+ expressions::{match_arm_list, record_expr_field_list},
+ traits::assoc_item_list,
+ use_item::use_tree_list,
+};
+use super::*;
+
+// test mod_contents
+// fn foo() {}
+// macro_rules! foo {}
+// foo::bar!();
+// super::baz! {}
+// struct S;
+pub(super) fn mod_contents(p: &mut Parser<'_>, stop_on_r_curly: bool) {
+ attributes::inner_attrs(p);
+ while !p.at(EOF) && !(p.at(T!['}']) && stop_on_r_curly) {
+ item_or_macro(p, stop_on_r_curly);
+ }
+}
+
+pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[
+ T![fn],
+ T![struct],
+ T![enum],
+ T![impl],
+ T![trait],
+ T![const],
+ T![static],
+ T![let],
+ T![mod],
+ T![pub],
+ T![crate],
+ T![use],
+ T![macro],
+ T![;],
+]);
+
+pub(super) fn item_or_macro(p: &mut Parser<'_>, stop_on_r_curly: bool) {
+ let m = p.start();
+ attributes::outer_attrs(p);
+
+ let m = match opt_item(p, m) {
+ Ok(()) => {
+ if p.at(T![;]) {
+ p.err_and_bump(
+ "expected item, found `;`\n\
+ consider removing this semicolon",
+ );
+ }
+ return;
+ }
+ Err(m) => m,
+ };
+
+ if paths::is_use_path_start(p) {
+ match macro_call(p) {
+ BlockLike::Block => (),
+ BlockLike::NotBlock => {
+ p.expect(T![;]);
+ }
+ }
+ m.complete(p, MACRO_CALL);
+ return;
+ }
+
+ m.abandon(p);
+ match p.current() {
+ T!['{'] => error_block(p, "expected an item"),
+ T!['}'] if !stop_on_r_curly => {
+ let e = p.start();
+ p.error("unmatched `}`");
+ p.bump(T!['}']);
+ e.complete(p, ERROR);
+ }
+ EOF | T!['}'] => p.error("expected an item"),
+ _ => p.err_and_bump("expected an item"),
+ }
+}
+
+/// Try to parse an item, completing `m` in case of success.
+pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
+ // test_err pub_expr
+ // fn foo() { pub 92; }
+ let has_visibility = opt_visibility(p, false);
+
+ let m = match opt_item_without_modifiers(p, m) {
+ Ok(()) => return Ok(()),
+ Err(m) => m,
+ };
+
+ let mut has_mods = false;
+ let mut has_extern = false;
+
+ // modifiers
+ if p.at(T![const]) && p.nth(1) != T!['{'] {
+ p.eat(T![const]);
+ has_mods = true;
+ }
+
+ // test_err async_without_semicolon
+ // fn foo() { let _ = async {} }
+ if p.at(T![async]) && !matches!(p.nth(1), T!['{'] | T![move] | T![|]) {
+ p.eat(T![async]);
+ has_mods = true;
+ }
+
+ // test_err unsafe_block_in_mod
+ // fn foo(){} unsafe { } fn bar(){}
+ if p.at(T![unsafe]) && p.nth(1) != T!['{'] {
+ p.eat(T![unsafe]);
+ has_mods = true;
+ }
+
+ if p.at(T![extern]) {
+ has_extern = true;
+ has_mods = true;
+ abi(p);
+ }
+ if p.at_contextual_kw(T![auto]) && p.nth(1) == T![trait] {
+ p.bump_remap(T![auto]);
+ has_mods = true;
+ }
+
+ // test default_item
+ // default impl T for Foo {}
+ if p.at_contextual_kw(T![default]) {
+ match p.nth(1) {
+ T![fn] | T![type] | T![const] | T![impl] => {
+ p.bump_remap(T![default]);
+ has_mods = true;
+ }
+ // test default_unsafe_item
+ // default unsafe impl T for Foo {
+ // default unsafe fn foo() {}
+ // }
+ T![unsafe] if matches!(p.nth(2), T![impl] | T![fn]) => {
+ p.bump_remap(T![default]);
+ p.bump(T![unsafe]);
+ has_mods = true;
+ }
+ // test default_async_fn
+ // impl T for Foo {
+ // default async fn foo() {}
+ // }
+ T![async] => {
+ let mut maybe_fn = p.nth(2);
+ let is_unsafe = if matches!(maybe_fn, T![unsafe]) {
+ // test default_async_unsafe_fn
+ // impl T for Foo {
+ // default async unsafe fn foo() {}
+ // }
+ maybe_fn = p.nth(3);
+ true
+ } else {
+ false
+ };
+
+ if matches!(maybe_fn, T![fn]) {
+ p.bump_remap(T![default]);
+ p.bump(T![async]);
+ if is_unsafe {
+ p.bump(T![unsafe]);
+ }
+ has_mods = true;
+ }
+ }
+ _ => (),
+ }
+ }
+
+ // test existential_type
+ // existential type Foo: Fn() -> usize;
+ if p.at_contextual_kw(T![existential]) && p.nth(1) == T![type] {
+ p.bump_remap(T![existential]);
+ has_mods = true;
+ }
+
+ // items
+ match p.current() {
+ T![fn] => fn_(p, m),
+
+ T![const] if p.nth(1) != T!['{'] => consts::konst(p, m),
+
+ T![trait] => traits::trait_(p, m),
+ T![impl] => traits::impl_(p, m),
+
+ T![type] => type_alias(p, m),
+
+ // test extern_block
+ // unsafe extern "C" {}
+ // extern {}
+ T!['{'] if has_extern => {
+ extern_item_list(p);
+ m.complete(p, EXTERN_BLOCK);
+ }
+
+ _ if has_visibility || has_mods => {
+ if has_mods {
+ p.error("expected existential, fn, trait or impl");
+ } else {
+ p.error("expected an item");
+ }
+ m.complete(p, ERROR);
+ }
+
+ _ => return Err(m),
+ }
+ Ok(())
+}
+
+fn opt_item_without_modifiers(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
+ let la = p.nth(1);
+ match p.current() {
+ T![extern] if la == T![crate] => extern_crate(p, m),
+ T![use] => use_item::use_(p, m),
+ T![mod] => mod_item(p, m),
+
+ T![type] => type_alias(p, m),
+ T![struct] => adt::strukt(p, m),
+ T![enum] => adt::enum_(p, m),
+ IDENT if p.at_contextual_kw(T![union]) && p.nth(1) == IDENT => adt::union(p, m),
+
+ T![macro] => macro_def(p, m),
+ IDENT if p.at_contextual_kw(T![macro_rules]) && p.nth(1) == BANG => macro_rules(p, m),
+
+ T![const] if (la == IDENT || la == T![_] || la == T![mut]) => consts::konst(p, m),
+ T![static] if (la == IDENT || la == T![_] || la == T![mut]) => consts::static_(p, m),
+
+ _ => return Err(m),
+ };
+ Ok(())
+}
+
+// test extern_crate
+// extern crate foo;
+fn extern_crate(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![extern]);
+ p.bump(T![crate]);
+
+ if p.at(T![self]) {
+ // test extern_crate_self
+ // extern crate self;
+ let m = p.start();
+ p.bump(T![self]);
+ m.complete(p, NAME_REF);
+ } else {
+ name_ref(p);
+ }
+
+ // test extern_crate_rename
+ // extern crate foo as bar;
+ opt_rename(p);
+ p.expect(T![;]);
+ m.complete(p, EXTERN_CRATE);
+}
+
+// test mod_item
+// mod a;
+pub(crate) fn mod_item(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![mod]);
+ name(p);
+ if p.at(T!['{']) {
+ // test mod_item_curly
+ // mod b { }
+ item_list(p);
+ } else if !p.eat(T![;]) {
+ p.error("expected `;` or `{`");
+ }
+ m.complete(p, MODULE);
+}
+
+// test type_alias
+// type Foo = Bar;
+fn type_alias(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![type]);
+
+ name(p);
+
+ // test type_item_type_params
+ // type Result<T> = ();
+ generic_params::opt_generic_param_list(p);
+
+ if p.at(T![:]) {
+ generic_params::bounds(p);
+ }
+
+ // test type_item_where_clause_deprecated
+ // type Foo where Foo: Copy = ();
+ generic_params::opt_where_clause(p);
+ if p.eat(T![=]) {
+ types::type_(p);
+ }
+
+ // test type_item_where_clause
+ // type Foo = () where Foo: Copy;
+ generic_params::opt_where_clause(p);
+
+ p.expect(T![;]);
+ m.complete(p, TYPE_ALIAS);
+}
+
+pub(crate) fn item_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ mod_contents(p, true);
+ p.expect(T!['}']);
+ m.complete(p, ITEM_LIST);
+}
+
+pub(crate) fn extern_item_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ mod_contents(p, true);
+ p.expect(T!['}']);
+ m.complete(p, EXTERN_ITEM_LIST);
+}
+
+fn macro_rules(p: &mut Parser<'_>, m: Marker) {
+ assert!(p.at_contextual_kw(T![macro_rules]));
+ p.bump_remap(T![macro_rules]);
+ p.expect(T![!]);
+
+ if p.at(IDENT) {
+ name(p);
+ }
+ // Special-case `macro_rules! try`.
+ // This is a hack until we do proper edition support
+
+ // test try_macro_rules
+ // macro_rules! try { () => {} }
+ if p.at(T![try]) {
+ let m = p.start();
+ p.bump_remap(IDENT);
+ m.complete(p, NAME);
+ }
+
+ match p.current() {
+ // test macro_rules_non_brace
+ // macro_rules! m ( ($i:ident) => {} );
+ // macro_rules! m [ ($i:ident) => {} ];
+ T!['['] | T!['('] => {
+ token_tree(p);
+ p.expect(T![;]);
+ }
+ T!['{'] => token_tree(p),
+ _ => p.error("expected `{`, `[`, `(`"),
+ }
+ m.complete(p, MACRO_RULES);
+}
+
+// test macro_def
+// macro m($i:ident) {}
+fn macro_def(p: &mut Parser<'_>, m: Marker) {
+ p.expect(T![macro]);
+ name_r(p, ITEM_RECOVERY_SET);
+ if p.at(T!['{']) {
+ // test macro_def_curly
+ // macro m { ($i:ident) => {} }
+ token_tree(p);
+ } else if p.at(T!['(']) {
+ let m = p.start();
+ token_tree(p);
+ match p.current() {
+ T!['{'] | T!['['] | T!['('] => token_tree(p),
+ _ => p.error("expected `{`, `[`, `(`"),
+ }
+ m.complete(p, TOKEN_TREE);
+ } else {
+ p.error("unmatched `(`");
+ }
+
+ m.complete(p, MACRO_DEF);
+}
+
+// test fn
+// fn foo() {}
+fn fn_(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![fn]);
+
+ name_r(p, ITEM_RECOVERY_SET);
+ // test function_type_params
+ // fn foo<T: Clone + Copy>(){}
+ generic_params::opt_generic_param_list(p);
+
+ if p.at(T!['(']) {
+ params::param_list_fn_def(p);
+ } else {
+ p.error("expected function arguments");
+ }
+ // test function_ret_type
+ // fn foo() {}
+ // fn bar() -> () {}
+ opt_ret_type(p);
+
+ // test function_where_clause
+ // fn foo<T>() where T: Copy {}
+ generic_params::opt_where_clause(p);
+
+ if p.at(T![;]) {
+ // test fn_decl
+ // trait T { fn foo(); }
+ p.bump(T![;]);
+ } else {
+ expressions::block_expr(p);
+ }
+ m.complete(p, FN);
+}
+
+fn macro_call(p: &mut Parser<'_>) -> BlockLike {
+ assert!(paths::is_use_path_start(p));
+ paths::use_path(p);
+ macro_call_after_excl(p)
+}
+
+pub(super) fn macro_call_after_excl(p: &mut Parser<'_>) -> BlockLike {
+ p.expect(T![!]);
+
+ match p.current() {
+ T!['{'] => {
+ token_tree(p);
+ BlockLike::Block
+ }
+ T!['('] | T!['['] => {
+ token_tree(p);
+ BlockLike::NotBlock
+ }
+ _ => {
+ p.error("expected `{`, `[`, `(`");
+ BlockLike::NotBlock
+ }
+ }
+}
+
+pub(crate) fn token_tree(p: &mut Parser<'_>) {
+ let closing_paren_kind = match p.current() {
+ T!['{'] => T!['}'],
+ T!['('] => T![')'],
+ T!['['] => T![']'],
+ _ => unreachable!(),
+ };
+ let m = p.start();
+ p.bump_any();
+ while !p.at(EOF) && !p.at(closing_paren_kind) {
+ match p.current() {
+ T!['{'] | T!['('] | T!['['] => token_tree(p),
+ T!['}'] => {
+ p.error("unmatched `}`");
+ m.complete(p, TOKEN_TREE);
+ return;
+ }
+ T![')'] | T![']'] => p.err_and_bump("unmatched brace"),
+ _ => p.bump_any(),
+ }
+ }
+ p.expect(closing_paren_kind);
+ m.complete(p, TOKEN_TREE);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs
new file mode 100644
index 000000000..e7d30516b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs
@@ -0,0 +1,168 @@
+use super::*;
+
+// test struct_item
+// struct S {}
+pub(super) fn strukt(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![struct]);
+ struct_or_union(p, m, true);
+}
+
+// test union_item
+// struct U { i: i32, f: f32 }
+pub(super) fn union(p: &mut Parser<'_>, m: Marker) {
+ assert!(p.at_contextual_kw(T![union]));
+ p.bump_remap(T![union]);
+ struct_or_union(p, m, false);
+}
+
+fn struct_or_union(p: &mut Parser<'_>, m: Marker, is_struct: bool) {
+ name_r(p, ITEM_RECOVERY_SET);
+ generic_params::opt_generic_param_list(p);
+ match p.current() {
+ T![where] => {
+ generic_params::opt_where_clause(p);
+ match p.current() {
+ T![;] => p.bump(T![;]),
+ T!['{'] => record_field_list(p),
+ _ => {
+ //FIXME: special case `(` error message
+ p.error("expected `;` or `{`");
+ }
+ }
+ }
+ T!['{'] => record_field_list(p),
+ // test unit_struct
+ // struct S;
+ T![;] if is_struct => {
+ p.bump(T![;]);
+ }
+ // test tuple_struct
+ // struct S(String, usize);
+ T!['('] if is_struct => {
+ tuple_field_list(p);
+ // test tuple_struct_where
+ // struct S<T>(T) where T: Clone;
+ generic_params::opt_where_clause(p);
+ p.expect(T![;]);
+ }
+ _ => p.error(if is_struct { "expected `;`, `{`, or `(`" } else { "expected `{`" }),
+ }
+ m.complete(p, if is_struct { STRUCT } else { UNION });
+}
+
+pub(super) fn enum_(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![enum]);
+ name_r(p, ITEM_RECOVERY_SET);
+ generic_params::opt_generic_param_list(p);
+ generic_params::opt_where_clause(p);
+ if p.at(T!['{']) {
+ variant_list(p);
+ } else {
+ p.error("expected `{`");
+ }
+ m.complete(p, ENUM);
+}
+
+pub(crate) fn variant_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ while !p.at(EOF) && !p.at(T!['}']) {
+ if p.at(T!['{']) {
+ error_block(p, "expected enum variant");
+ continue;
+ }
+ variant(p);
+ if !p.at(T!['}']) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T!['}']);
+ m.complete(p, VARIANT_LIST);
+
+ fn variant(p: &mut Parser<'_>) {
+ let m = p.start();
+ attributes::outer_attrs(p);
+ if p.at(IDENT) {
+ name(p);
+ match p.current() {
+ T!['{'] => record_field_list(p),
+ T!['('] => tuple_field_list(p),
+ _ => (),
+ }
+
+ // test variant_discriminant
+ // enum E { X(i32) = 10 }
+ if p.eat(T![=]) {
+ expressions::expr(p);
+ }
+ m.complete(p, VARIANT);
+ } else {
+ m.abandon(p);
+ p.err_and_bump("expected enum variant");
+ }
+ }
+}
+
+// test record_field_list
+// struct S { a: i32, b: f32 }
+pub(crate) fn record_field_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ while !p.at(T!['}']) && !p.at(EOF) {
+ if p.at(T!['{']) {
+ error_block(p, "expected field");
+ continue;
+ }
+ record_field(p);
+ if !p.at(T!['}']) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T!['}']);
+ m.complete(p, RECORD_FIELD_LIST);
+
+ fn record_field(p: &mut Parser<'_>) {
+ let m = p.start();
+ // test record_field_attrs
+ // struct S { #[attr] f: f32 }
+ attributes::outer_attrs(p);
+ opt_visibility(p, false);
+ if p.at(IDENT) {
+ name(p);
+ p.expect(T![:]);
+ types::type_(p);
+ m.complete(p, RECORD_FIELD);
+ } else {
+ m.abandon(p);
+ p.err_and_bump("expected field declaration");
+ }
+ }
+}
+
+fn tuple_field_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['(']));
+ let m = p.start();
+ p.bump(T!['(']);
+ while !p.at(T![')']) && !p.at(EOF) {
+ let m = p.start();
+ // test tuple_field_attrs
+ // struct S (#[attr] f32);
+ attributes::outer_attrs(p);
+ opt_visibility(p, true);
+ if !p.at_ts(types::TYPE_FIRST) {
+ p.error("expected a type");
+ m.complete(p, ERROR);
+ break;
+ }
+ types::type_(p);
+ m.complete(p, TUPLE_FIELD);
+
+ if !p.at(T![')']) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T![')']);
+ m.complete(p, TUPLE_FIELD_LIST);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs
new file mode 100644
index 000000000..9549ec9b4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs
@@ -0,0 +1,37 @@
+use super::*;
+
+// test const_item
+// const C: u32 = 92;
+pub(super) fn konst(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![const]);
+ const_or_static(p, m, true);
+}
+
+pub(super) fn static_(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![static]);
+ const_or_static(p, m, false);
+}
+
+fn const_or_static(p: &mut Parser<'_>, m: Marker, is_const: bool) {
+ p.eat(T![mut]);
+
+ if is_const && p.eat(T![_]) {
+ // test anonymous_const
+ // const _: u32 = 0;
+ } else {
+ // test_err anonymous_static
+ // static _: i32 = 5;
+ name(p);
+ }
+
+ if p.at(T![:]) {
+ types::ascription(p);
+ } else {
+ p.error("missing type for `const` or `static`");
+ }
+ if p.eat(T![=]) {
+ expressions::expr(p);
+ }
+ p.expect(T![;]);
+ m.complete(p, if is_const { CONST } else { STATIC });
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs
new file mode 100644
index 000000000..c982e2d56
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs
@@ -0,0 +1,140 @@
+use super::*;
+
+// test trait_item
+// trait T { fn new() -> Self; }
+pub(super) fn trait_(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![trait]);
+ name_r(p, ITEM_RECOVERY_SET);
+
+ // test trait_item_generic_params
+ // trait X<U: Debug + Display> {}
+ generic_params::opt_generic_param_list(p);
+
+ if p.eat(T![=]) {
+ // test trait_alias
+ // trait Z<U> = T<U>;
+ generic_params::bounds_without_colon(p);
+
+ // test trait_alias_where_clause
+ // trait Z<U> = T<U> where U: Copy;
+ // trait Z<U> = where Self: T<U>;
+ generic_params::opt_where_clause(p);
+ p.expect(T![;]);
+ m.complete(p, TRAIT);
+ return;
+ }
+
+ if p.at(T![:]) {
+ // test trait_item_bounds
+ // trait T: Hash + Clone {}
+ generic_params::bounds(p);
+ }
+
+ // test trait_item_where_clause
+ // trait T where Self: Copy {}
+ generic_params::opt_where_clause(p);
+
+ if p.at(T!['{']) {
+ assoc_item_list(p);
+ } else {
+ p.error("expected `{`");
+ }
+ m.complete(p, TRAIT);
+}
+
+// test impl_item
+// impl S {}
+pub(super) fn impl_(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![impl]);
+ if p.at(T![<]) && not_a_qualified_path(p) {
+ generic_params::opt_generic_param_list(p);
+ }
+
+ // test impl_item_const
+ // impl const Send for S {}
+ p.eat(T![const]);
+
+ // FIXME: never type
+ // impl ! {}
+
+ // test impl_item_neg
+ // impl !Send for S {}
+ p.eat(T![!]);
+ impl_type(p);
+ if p.eat(T![for]) {
+ impl_type(p);
+ }
+ generic_params::opt_where_clause(p);
+ if p.at(T!['{']) {
+ assoc_item_list(p);
+ } else {
+ p.error("expected `{`");
+ }
+ m.complete(p, IMPL);
+}
+
+// test assoc_item_list
+// impl F {
+// type A = i32;
+// const B: i32 = 92;
+// fn foo() {}
+// fn bar(&self) {}
+// }
+pub(crate) fn assoc_item_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+
+ let m = p.start();
+ p.bump(T!['{']);
+ // test assoc_item_list_inner_attrs
+ // impl S { #![attr] }
+ attributes::inner_attrs(p);
+
+ while !p.at(EOF) && !p.at(T!['}']) {
+ if p.at(T!['{']) {
+ error_block(p, "expected an item");
+ continue;
+ }
+ item_or_macro(p, true);
+ }
+ p.expect(T!['}']);
+ m.complete(p, ASSOC_ITEM_LIST);
+}
+
+// test impl_type_params
+// impl<const N: u32> Bar<N> {}
+fn not_a_qualified_path(p: &Parser<'_>) -> bool {
+ // There's an ambiguity between generic parameters and qualified paths in impls.
+ // If we see `<` it may start both, so we have to inspect some following tokens.
+ // The following combinations can only start generics,
+ // but not qualified paths (with one exception):
+ // `<` `>` - empty generic parameters
+ // `<` `#` - generic parameters with attributes
+ // `<` `const` - const generic parameters
+ // `<` (LIFETIME_IDENT|IDENT) `>` - single generic parameter
+ // `<` (LIFETIME_IDENT|IDENT) `,` - first generic parameter in a list
+ // `<` (LIFETIME_IDENT|IDENT) `:` - generic parameter with bounds
+ // `<` (LIFETIME_IDENT|IDENT) `=` - generic parameter with a default
+ // The only truly ambiguous case is
+ // `<` IDENT `>` `::` IDENT ...
+ // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
+ // because this is what almost always expected in practice, qualified paths in impls
+ // (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment.
+ if p.nth(1) == T![#] || p.nth(1) == T![>] || p.nth(1) == T![const] {
+ return true;
+ }
+ (p.nth(1) == LIFETIME_IDENT || p.nth(1) == IDENT)
+ && (p.nth(2) == T![>] || p.nth(2) == T![,] || p.nth(2) == T![:] || p.nth(2) == T![=])
+}
+
+// test_err impl_type
+// impl Type {}
+// impl Trait1 for T {}
+// impl impl NotType {}
+// impl Trait2 for impl NotType {}
+pub(crate) fn impl_type(p: &mut Parser<'_>) {
+ if p.at(T![impl]) {
+ p.error("expected trait or type");
+ return;
+ }
+ types::type_(p);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/use_item.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/use_item.rs
new file mode 100644
index 000000000..69880b794
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/use_item.rs
@@ -0,0 +1,93 @@
+use super::*;
+
+// test use_item
+// use std::collections;
+pub(super) fn use_(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![use]);
+ use_tree(p, true);
+ p.expect(T![;]);
+ m.complete(p, USE);
+}
+
+// test use_tree
+// use outer::tree::{inner::tree};
+fn use_tree(p: &mut Parser<'_>, top_level: bool) {
+ let m = p.start();
+ match p.current() {
+ // test use_tree_star
+ // use *;
+ // use std::{*};
+ T![*] => p.bump(T![*]),
+ // test use_tree_abs_star
+ // use ::*;
+ // use std::{::*};
+ T![:] if p.at(T![::]) && p.nth(2) == T![*] => {
+ p.bump(T![::]);
+ p.bump(T![*]);
+ }
+ T!['{'] => use_tree_list(p),
+ T![:] if p.at(T![::]) && p.nth(2) == T!['{'] => {
+ p.bump(T![::]);
+ use_tree_list(p);
+ }
+
+ // test use_tree_path
+ // use ::std;
+ // use std::collections;
+ //
+ // use self::m;
+ // use super::m;
+ // use crate::m;
+ _ if paths::is_use_path_start(p) => {
+ paths::use_path(p);
+ match p.current() {
+ // test use_tree_alias
+ // use std as stdlib;
+ // use Trait as _;
+ T![as] => opt_rename(p),
+ T![:] if p.at(T![::]) => {
+ p.bump(T![::]);
+ match p.current() {
+ // test use_tree_path_star
+ // use std::*;
+ T![*] => p.bump(T![*]),
+ // test use_tree_path_use_tree
+ // use std::{collections};
+ T!['{'] => use_tree_list(p),
+ _ => p.error("expected `{` or `*`"),
+ }
+ }
+ _ => (),
+ }
+ }
+ _ => {
+ m.abandon(p);
+ let msg = "expected one of `*`, `::`, `{`, `self`, `super` or an identifier";
+ if top_level {
+ p.err_recover(msg, ITEM_RECOVERY_SET);
+ } else {
+ // if we are parsing a nested tree, we have to eat a token to
+ // main balanced `{}`
+ p.err_and_bump(msg);
+ }
+ return;
+ }
+ }
+ m.complete(p, USE_TREE);
+}
+
+// test use_tree_list
+// use {a, b, c};
+pub(crate) fn use_tree_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ while !p.at(EOF) && !p.at(T!['}']) {
+ use_tree(p, false);
+ if !p.at(T!['}']) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T!['}']);
+ m.complete(p, USE_TREE_LIST);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs
new file mode 100644
index 000000000..20e8e95f0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs
@@ -0,0 +1,209 @@
+use super::*;
+
+// test param_list
+// fn a() {}
+// fn b(x: i32) {}
+// fn c(x: i32, ) {}
+// fn d(x: i32, y: ()) {}
+pub(super) fn param_list_fn_def(p: &mut Parser<'_>) {
+ list_(p, Flavor::FnDef);
+}
+
+// test param_list_opt_patterns
+// fn foo<F: FnMut(&mut Foo<'a>)>(){}
+pub(super) fn param_list_fn_trait(p: &mut Parser<'_>) {
+ list_(p, Flavor::FnTrait);
+}
+
+pub(super) fn param_list_fn_ptr(p: &mut Parser<'_>) {
+ list_(p, Flavor::FnPointer);
+}
+
+pub(super) fn param_list_closure(p: &mut Parser<'_>) {
+ list_(p, Flavor::Closure);
+}
+
+#[derive(Debug, Clone, Copy)]
+enum Flavor {
+ FnDef, // Includes trait fn params; omitted param idents are not supported
+ FnTrait, // Params for `Fn(...)`/`FnMut(...)`/`FnOnce(...)` annotations
+ FnPointer,
+ Closure,
+}
+
+fn list_(p: &mut Parser<'_>, flavor: Flavor) {
+ use Flavor::*;
+
+ let (bra, ket) = match flavor {
+ Closure => (T![|], T![|]),
+ FnDef | FnTrait | FnPointer => (T!['('], T![')']),
+ };
+
+ let list_marker = p.start();
+ p.bump(bra);
+
+ let mut param_marker = None;
+ if let FnDef = flavor {
+ // test self_param_outer_attr
+ // fn f(#[must_use] self) {}
+ let m = p.start();
+ attributes::outer_attrs(p);
+ match opt_self_param(p, m) {
+ Ok(()) => {}
+ Err(m) => param_marker = Some(m),
+ }
+ }
+
+ while !p.at(EOF) && !p.at(ket) {
+ // test param_outer_arg
+ // fn f(#[attr1] pat: Type) {}
+ let m = match param_marker.take() {
+ Some(m) => m,
+ None => {
+ let m = p.start();
+ attributes::outer_attrs(p);
+ m
+ }
+ };
+
+ if !p.at_ts(PARAM_FIRST) {
+ p.error("expected value parameter");
+ m.abandon(p);
+ break;
+ }
+ param(p, m, flavor);
+ if !p.at(ket) {
+ p.expect(T![,]);
+ }
+ }
+
+ if let Some(m) = param_marker {
+ m.abandon(p);
+ }
+
+ p.expect(ket);
+ list_marker.complete(p, PARAM_LIST);
+}
+
+const PARAM_FIRST: TokenSet = patterns::PATTERN_FIRST.union(types::TYPE_FIRST);
+
+fn param(p: &mut Parser<'_>, m: Marker, flavor: Flavor) {
+ match flavor {
+ // test param_list_vararg
+ // extern "C" { fn printf(format: *const i8, ..., _: u8) -> i32; }
+ Flavor::FnDef | Flavor::FnPointer if p.eat(T![...]) => {}
+
+ // test fn_def_param
+ // fn foo(..., (x, y): (i32, i32)) {}
+ Flavor::FnDef => {
+ patterns::pattern(p);
+ if !variadic_param(p) {
+ if p.at(T![:]) {
+ types::ascription(p);
+ } else {
+ // test_err missing_fn_param_type
+ // fn f(x y: i32, z, t: i32) {}
+ p.error("missing type for function parameter");
+ }
+ }
+ }
+ // test value_parameters_no_patterns
+ // type F = Box<Fn(i32, &i32, &i32, ())>;
+ Flavor::FnTrait => {
+ types::type_(p);
+ }
+ // test fn_pointer_param_ident_path
+ // type Foo = fn(Bar::Baz);
+ // type Qux = fn(baz: Bar::Baz);
+
+ // test fn_pointer_unnamed_arg
+ // type Foo = fn(_: bar);
+ Flavor::FnPointer => {
+ if (p.at(IDENT) || p.at(UNDERSCORE)) && p.nth(1) == T![:] && !p.nth_at(1, T![::]) {
+ patterns::pattern_single(p);
+ if !variadic_param(p) {
+ if p.at(T![:]) {
+ types::ascription(p);
+ } else {
+ p.error("missing type for function parameter");
+ }
+ }
+ } else {
+ types::type_(p);
+ }
+ }
+ // test closure_params
+ // fn main() {
+ // let foo = |bar, baz: Baz, qux: Qux::Quux| ();
+ // }
+ Flavor::Closure => {
+ patterns::pattern_single(p);
+ if p.at(T![:]) && !p.at(T![::]) {
+ types::ascription(p);
+ }
+ }
+ }
+ m.complete(p, PARAM);
+}
+
+fn variadic_param(p: &mut Parser<'_>) -> bool {
+ if p.at(T![:]) && p.nth_at(1, T![...]) {
+ p.bump(T![:]);
+ p.bump(T![...]);
+ true
+ } else {
+ false
+ }
+}
+
+// test self_param
+// impl S {
+// fn a(self) {}
+// fn b(&self,) {}
+// fn c(&'a self,) {}
+// fn d(&'a mut self, x: i32) {}
+// fn e(mut self) {}
+// }
+fn opt_self_param(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
+ if p.at(T![self]) || p.at(T![mut]) && p.nth(1) == T![self] {
+ p.eat(T![mut]);
+ self_as_name(p);
+ // test arb_self_types
+ // impl S {
+ // fn a(self: &Self) {}
+ // fn b(mut self: Box<Self>) {}
+ // }
+ if p.at(T![:]) {
+ types::ascription(p);
+ }
+ } else {
+ let la1 = p.nth(1);
+ let la2 = p.nth(2);
+ let la3 = p.nth(3);
+ if !matches!(
+ (p.current(), la1, la2, la3),
+ (T![&], T![self], _, _)
+ | (T![&], T![mut] | LIFETIME_IDENT, T![self], _)
+ | (T![&], LIFETIME_IDENT, T![mut], T![self])
+ ) {
+ return Err(m);
+ }
+ p.bump(T![&]);
+ if p.at(LIFETIME_IDENT) {
+ lifetime(p);
+ }
+ p.eat(T![mut]);
+ self_as_name(p);
+ }
+ m.complete(p, SELF_PARAM);
+ if !p.at(T![')']) {
+ p.expect(T![,]);
+ }
+ Ok(())
+}
+
+fn self_as_name(p: &mut Parser<'_>) {
+ let m = p.start();
+ p.bump(T![self]);
+ m.complete(p, NAME);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs
new file mode 100644
index 000000000..8de5d33a1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs
@@ -0,0 +1,132 @@
+use super::*;
+
+pub(super) const PATH_FIRST: TokenSet =
+ TokenSet::new(&[IDENT, T![self], T![super], T![crate], T![Self], T![:], T![<]]);
+
+pub(super) fn is_path_start(p: &Parser<'_>) -> bool {
+ is_use_path_start(p) || p.at(T![<]) || p.at(T![Self])
+}
+
+pub(super) fn is_use_path_start(p: &Parser<'_>) -> bool {
+ match p.current() {
+ IDENT | T![self] | T![super] | T![crate] => true,
+ T![:] if p.at(T![::]) => true,
+ _ => false,
+ }
+}
+
+pub(super) fn use_path(p: &mut Parser<'_>) {
+ path(p, Mode::Use);
+}
+
+pub(crate) fn type_path(p: &mut Parser<'_>) {
+ path(p, Mode::Type);
+}
+
+pub(super) fn expr_path(p: &mut Parser<'_>) {
+ path(p, Mode::Expr);
+}
+
+pub(crate) fn type_path_for_qualifier(
+ p: &mut Parser<'_>,
+ qual: CompletedMarker,
+) -> CompletedMarker {
+ path_for_qualifier(p, Mode::Type, qual)
+}
+
+#[derive(Clone, Copy, Eq, PartialEq)]
+enum Mode {
+ Use,
+ Type,
+ Expr,
+}
+
+fn path(p: &mut Parser<'_>, mode: Mode) {
+ let path = p.start();
+ path_segment(p, mode, true);
+ let qual = path.complete(p, PATH);
+ path_for_qualifier(p, mode, qual);
+}
+
+fn path_for_qualifier(
+ p: &mut Parser<'_>,
+ mode: Mode,
+ mut qual: CompletedMarker,
+) -> CompletedMarker {
+ loop {
+ let use_tree = mode == Mode::Use && matches!(p.nth(2), T![*] | T!['{']);
+ if p.at(T![::]) && !use_tree {
+ let path = qual.precede(p);
+ p.bump(T![::]);
+ path_segment(p, mode, false);
+ let path = path.complete(p, PATH);
+ qual = path;
+ } else {
+ return qual;
+ }
+ }
+}
+
+fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) {
+ let m = p.start();
+ // test qual_paths
+ // type X = <A as B>::Output;
+ // fn foo() { <usize as Default>::default(); }
+ if first && p.eat(T![<]) {
+ types::type_(p);
+ if p.eat(T![as]) {
+ if is_use_path_start(p) {
+ types::path_type(p);
+ } else {
+ p.error("expected a trait");
+ }
+ }
+ p.expect(T![>]);
+ } else {
+ let mut empty = true;
+ if first {
+ p.eat(T![::]);
+ empty = false;
+ }
+ match p.current() {
+ IDENT => {
+ name_ref(p);
+ opt_path_type_args(p, mode);
+ }
+ // test crate_path
+ // use crate::foo;
+ T![self] | T![super] | T![crate] | T![Self] => {
+ let m = p.start();
+ p.bump_any();
+ m.complete(p, NAME_REF);
+ }
+ _ => {
+ p.err_recover("expected identifier", items::ITEM_RECOVERY_SET);
+ if empty {
+ // test_err empty_segment
+ // use crate::;
+ m.abandon(p);
+ return;
+ }
+ }
+ };
+ }
+ m.complete(p, PATH_SEGMENT);
+}
+
+fn opt_path_type_args(p: &mut Parser<'_>, mode: Mode) {
+ match mode {
+ Mode::Use => {}
+ Mode::Type => {
+ // test path_fn_trait_args
+ // type F = Box<Fn(i32) -> ()>;
+ if p.at(T!['(']) {
+ params::param_list_fn_trait(p);
+ opt_ret_type(p);
+ } else {
+ generic_args::opt_generic_arg_list(p, false);
+ }
+ }
+ Mode::Expr => generic_args::opt_generic_arg_list(p, true),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs
new file mode 100644
index 000000000..4cbf10306
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs
@@ -0,0 +1,440 @@
+use super::*;
+
+pub(super) const PATTERN_FIRST: TokenSet =
+ expressions::LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[
+ T![box],
+ T![ref],
+ T![mut],
+ T!['('],
+ T!['['],
+ T![&],
+ T![_],
+ T![-],
+ T![.],
+ ]));
+
+pub(crate) fn pattern(p: &mut Parser<'_>) {
+ pattern_r(p, PAT_RECOVERY_SET);
+}
+
+/// Parses a pattern list separated by pipes `|`.
+pub(super) fn pattern_top(p: &mut Parser<'_>) {
+ pattern_top_r(p, PAT_RECOVERY_SET);
+}
+
+pub(crate) fn pattern_single(p: &mut Parser<'_>) {
+ pattern_single_r(p, PAT_RECOVERY_SET);
+}
+
+/// Parses a pattern list separated by pipes `|`
+/// using the given `recovery_set`.
+pub(super) fn pattern_top_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
+ p.eat(T![|]);
+ pattern_r(p, recovery_set);
+}
+
+/// Parses a pattern list separated by pipes `|`, with no leading `|`,using the
+/// given `recovery_set`.
+
+// test or_pattern
+// fn main() {
+// match () {
+// (_ | _) => (),
+// &(_ | _) => (),
+// (_ | _,) => (),
+// [_ | _,] => (),
+// }
+// }
+fn pattern_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
+ let m = p.start();
+ pattern_single_r(p, recovery_set);
+
+ if !p.at(T![|]) {
+ m.abandon(p);
+ return;
+ }
+ while p.eat(T![|]) {
+ pattern_single_r(p, recovery_set);
+ }
+ m.complete(p, OR_PAT);
+}
+
+fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
+ if let Some(lhs) = atom_pat(p, recovery_set) {
+ // test range_pat
+ // fn main() {
+ // match 92 {
+ // 0 ... 100 => (),
+ // 101 ..= 200 => (),
+ // 200 .. 301 => (),
+ // 302 .. => (),
+ // }
+ //
+ // match Some(10 as u8) {
+ // Some(0) | None => (),
+ // Some(1..) => ()
+ // }
+ //
+ // match (10 as u8, 5 as u8) {
+ // (0, _) => (),
+ // (1.., _) => ()
+ // }
+ // }
+
+ // FIXME: support half_open_range_patterns (`..=2`),
+ // exclusive_range_pattern (`..5`) with missing lhs
+ for range_op in [T![...], T![..=], T![..]] {
+ if p.at(range_op) {
+ let m = lhs.precede(p);
+ p.bump(range_op);
+
+ // `0 .. =>` or `let 0 .. =` or `Some(0 .. )`
+ // ^ ^ ^
+ if p.at(T![=]) | p.at(T![')']) | p.at(T![,]) {
+ // test half_open_range_pat
+ // fn f() { let 0 .. = 1u32; }
+ } else {
+ atom_pat(p, recovery_set);
+ }
+ m.complete(p, RANGE_PAT);
+ return;
+ }
+ }
+ }
+}
+
+const PAT_RECOVERY_SET: TokenSet =
+ TokenSet::new(&[T![let], T![if], T![while], T![loop], T![match], T![')'], T![,], T![=]]);
+
+fn atom_pat(p: &mut Parser<'_>, recovery_set: TokenSet) -> Option<CompletedMarker> {
+ let m = match p.current() {
+ T![box] => box_pat(p),
+ T![ref] | T![mut] => ident_pat(p, true),
+ T![const] => const_block_pat(p),
+ IDENT => match p.nth(1) {
+ // Checks the token after an IDENT to see if a pattern is a path (Struct { .. }) or macro
+ // (T![x]).
+ T!['('] | T!['{'] | T![!] => path_or_macro_pat(p),
+ T![:] if p.nth_at(1, T![::]) => path_or_macro_pat(p),
+ _ => ident_pat(p, true),
+ },
+
+ // test type_path_in_pattern
+ // fn main() { let <_>::Foo = (); }
+ _ if paths::is_path_start(p) => path_or_macro_pat(p),
+ _ if is_literal_pat_start(p) => literal_pat(p),
+
+ T![.] if p.at(T![..]) => rest_pat(p),
+ T![_] => wildcard_pat(p),
+ T![&] => ref_pat(p),
+ T!['('] => tuple_pat(p),
+ T!['['] => slice_pat(p),
+
+ _ => {
+ p.err_recover("expected pattern", recovery_set);
+ return None;
+ }
+ };
+
+ Some(m)
+}
+
+fn is_literal_pat_start(p: &Parser<'_>) -> bool {
+ p.at(T![-]) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER)
+ || p.at_ts(expressions::LITERAL_FIRST)
+}
+
+// test literal_pattern
+// fn main() {
+// match () {
+// -1 => (),
+// 92 => (),
+// 'c' => (),
+// "hello" => (),
+// }
+// }
+fn literal_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(is_literal_pat_start(p));
+ let m = p.start();
+ if p.at(T![-]) {
+ p.bump(T![-]);
+ }
+ expressions::literal(p);
+ m.complete(p, LITERAL_PAT)
+}
+
+// test path_part
+// fn foo() {
+// let foo::Bar = ();
+// let ::Bar = ();
+// let Bar { .. } = ();
+// let Bar(..) = ();
+// }
+fn path_or_macro_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(paths::is_path_start(p));
+ let m = p.start();
+ paths::expr_path(p);
+ let kind = match p.current() {
+ T!['('] => {
+ tuple_pat_fields(p);
+ TUPLE_STRUCT_PAT
+ }
+ T!['{'] => {
+ record_pat_field_list(p);
+ RECORD_PAT
+ }
+ // test marco_pat
+ // fn main() {
+ // let m!(x) = 0;
+ // }
+ T![!] => {
+ items::macro_call_after_excl(p);
+ return m.complete(p, MACRO_CALL).precede(p).complete(p, MACRO_PAT);
+ }
+ _ => PATH_PAT,
+ };
+ m.complete(p, kind)
+}
+
+// test tuple_pat_fields
+// fn foo() {
+// let S() = ();
+// let S(_) = ();
+// let S(_,) = ();
+// let S(_, .. , x) = ();
+// }
+fn tuple_pat_fields(p: &mut Parser<'_>) {
+ assert!(p.at(T!['(']));
+ p.bump(T!['(']);
+ pat_list(p, T![')']);
+ p.expect(T![')']);
+}
+
+// test record_pat_field
+// fn foo() {
+// let S { 0: 1 } = ();
+// let S { x: 1 } = ();
+// let S { #[cfg(any())] x: 1 } = ();
+// }
+fn record_pat_field(p: &mut Parser<'_>) {
+ match p.current() {
+ IDENT | INT_NUMBER if p.nth(1) == T![:] => {
+ name_ref_or_index(p);
+ p.bump(T![:]);
+ pattern(p);
+ }
+ T![box] => {
+ // FIXME: not all box patterns should be allowed
+ box_pat(p);
+ }
+ T![ref] | T![mut] | IDENT => {
+ ident_pat(p, false);
+ }
+ _ => {
+ p.err_and_bump("expected identifier");
+ }
+ }
+}
+
+// test record_pat_field_list
+// fn foo() {
+// let S {} = ();
+// let S { f, ref mut g } = ();
+// let S { h: _, ..} = ();
+// let S { h: _, } = ();
+// let S { #[cfg(any())] .. } = ();
+// }
+fn record_pat_field_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ while !p.at(EOF) && !p.at(T!['}']) {
+ let m = p.start();
+ attributes::outer_attrs(p);
+
+ match p.current() {
+ // A trailing `..` is *not* treated as a REST_PAT.
+ T![.] if p.at(T![..]) => {
+ p.bump(T![..]);
+ m.complete(p, REST_PAT);
+ }
+ T!['{'] => {
+ error_block(p, "expected ident");
+ m.abandon(p);
+ }
+ _ => {
+ record_pat_field(p);
+ m.complete(p, RECORD_PAT_FIELD);
+ }
+ }
+ if !p.at(T!['}']) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T!['}']);
+ m.complete(p, RECORD_PAT_FIELD_LIST);
+}
+
+// test placeholder_pat
+// fn main() { let _ = (); }
+fn wildcard_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![_]));
+ let m = p.start();
+ p.bump(T![_]);
+ m.complete(p, WILDCARD_PAT)
+}
+
+// test dot_dot_pat
+// fn main() {
+// let .. = ();
+// //
+// // Tuples
+// //
+// let (a, ..) = ();
+// let (a, ..,) = ();
+// let Tuple(a, ..) = ();
+// let Tuple(a, ..,) = ();
+// let (.., ..) = ();
+// let Tuple(.., ..) = ();
+// let (.., a, ..) = ();
+// let Tuple(.., a, ..) = ();
+// //
+// // Slices
+// //
+// let [..] = ();
+// let [head, ..] = ();
+// let [head, tail @ ..] = ();
+// let [head, .., cons] = ();
+// let [head, mid @ .., cons] = ();
+// let [head, .., .., cons] = ();
+// let [head, .., mid, tail @ ..] = ();
+// let [head, .., mid, .., cons] = ();
+// }
+fn rest_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![..]));
+ let m = p.start();
+ p.bump(T![..]);
+ m.complete(p, REST_PAT)
+}
+
+// test ref_pat
+// fn main() {
+// let &a = ();
+// let &mut b = ();
+// }
+fn ref_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![&]));
+ let m = p.start();
+ p.bump(T![&]);
+ p.eat(T![mut]);
+ pattern_single(p);
+ m.complete(p, REF_PAT)
+}
+
+// test tuple_pat
+// fn main() {
+// let (a, b, ..) = ();
+// let (a,) = ();
+// let (..) = ();
+// let () = ();
+// }
+fn tuple_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T!['(']));
+ let m = p.start();
+ p.bump(T!['(']);
+ let mut has_comma = false;
+ let mut has_pat = false;
+ let mut has_rest = false;
+ while !p.at(EOF) && !p.at(T![')']) {
+ has_pat = true;
+ if !p.at_ts(PATTERN_FIRST) {
+ p.error("expected a pattern");
+ break;
+ }
+ has_rest |= p.at(T![..]);
+
+ pattern(p);
+ if !p.at(T![')']) {
+ has_comma = true;
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T![')']);
+
+ m.complete(p, if !has_comma && !has_rest && has_pat { PAREN_PAT } else { TUPLE_PAT })
+}
+
+// test slice_pat
+// fn main() {
+// let [a, b, ..] = [];
+// }
+fn slice_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T!['[']));
+ let m = p.start();
+ p.bump(T!['[']);
+ pat_list(p, T![']']);
+ p.expect(T![']']);
+ m.complete(p, SLICE_PAT)
+}
+
+fn pat_list(p: &mut Parser<'_>, ket: SyntaxKind) {
+ while !p.at(EOF) && !p.at(ket) {
+ if !p.at_ts(PATTERN_FIRST) {
+ p.error("expected a pattern");
+ break;
+ }
+
+ pattern(p);
+ if !p.at(ket) {
+ p.expect(T![,]);
+ }
+ }
+}
+
+// test bind_pat
+// fn main() {
+// let a = ();
+// let mut b = ();
+// let ref c = ();
+// let ref mut d = ();
+// let e @ _ = ();
+// let ref mut f @ g @ _ = ();
+// }
+fn ident_pat(p: &mut Parser<'_>, with_at: bool) -> CompletedMarker {
+ assert!(matches!(p.current(), T![ref] | T![mut] | IDENT));
+ let m = p.start();
+ p.eat(T![ref]);
+ p.eat(T![mut]);
+ name_r(p, PAT_RECOVERY_SET);
+ if with_at && p.eat(T![@]) {
+ pattern_single(p);
+ }
+ m.complete(p, IDENT_PAT)
+}
+
+// test box_pat
+// fn main() {
+// let box i = ();
+// let box Outer { box i, j: box Inner(box &x) } = ();
+// let box ref mut i = ();
+// }
+fn box_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![box]));
+ let m = p.start();
+ p.bump(T![box]);
+ pattern_single(p);
+ m.complete(p, BOX_PAT)
+}
+
+// test const_block_pat
+// fn main() {
+// let const { 15 } = ();
+// let const { foo(); bar() } = ();
+// }
+fn const_block_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![const]));
+ let m = p.start();
+ p.bump(T![const]);
+ expressions::block_expr(p);
+ m.complete(p, CONST_BLOCK_PAT)
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs
new file mode 100644
index 000000000..5c6e18fee
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs
@@ -0,0 +1,352 @@
+use super::*;
+
+pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[
+ T!['('],
+ T!['['],
+ T![<],
+ T![!],
+ T![*],
+ T![&],
+ T![_],
+ T![fn],
+ T![unsafe],
+ T![extern],
+ T![for],
+ T![impl],
+ T![dyn],
+ T![Self],
+]));
+
+const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[
+ T![')'],
+ T![,],
+ // test_err struct_field_recover
+ // struct S { f pub g: () }
+ T![pub],
+]);
+
+pub(crate) fn type_(p: &mut Parser<'_>) {
+ type_with_bounds_cond(p, true);
+}
+
+pub(super) fn type_no_bounds(p: &mut Parser<'_>) {
+ type_with_bounds_cond(p, false);
+}
+
+fn type_with_bounds_cond(p: &mut Parser<'_>, allow_bounds: bool) {
+ match p.current() {
+ T!['('] => paren_or_tuple_type(p),
+ T![!] => never_type(p),
+ T![*] => ptr_type(p),
+ T!['['] => array_or_slice_type(p),
+ T![&] => ref_type(p),
+ T![_] => infer_type(p),
+ T![fn] | T![unsafe] | T![extern] => fn_ptr_type(p),
+ T![for] => for_type(p, allow_bounds),
+ T![impl] => impl_trait_type(p),
+ T![dyn] => dyn_trait_type(p),
+ // Some path types are not allowed to have bounds (no plus)
+ T![<] => path_type_(p, allow_bounds),
+ _ if paths::is_path_start(p) => path_or_macro_type_(p, allow_bounds),
+ _ => {
+ p.err_recover("expected type", TYPE_RECOVERY_SET);
+ }
+ }
+}
+
+pub(super) fn ascription(p: &mut Parser<'_>) {
+ assert!(p.at(T![:]));
+ p.bump(T![:]);
+ if p.at(T![=]) {
+ // recover from `let x: = expr;`, `const X: = expr;` and similars
+ // hopefully no type starts with `=`
+ p.error("missing type");
+ return;
+ }
+ type_(p);
+}
+
+fn paren_or_tuple_type(p: &mut Parser<'_>) {
+ assert!(p.at(T!['(']));
+ let m = p.start();
+ p.bump(T!['(']);
+ let mut n_types: u32 = 0;
+ let mut trailing_comma: bool = false;
+ while !p.at(EOF) && !p.at(T![')']) {
+ n_types += 1;
+ type_(p);
+ if p.eat(T![,]) {
+ trailing_comma = true;
+ } else {
+ trailing_comma = false;
+ break;
+ }
+ }
+ p.expect(T![')']);
+
+ let kind = if n_types == 1 && !trailing_comma {
+ // test paren_type
+ // type T = (i32);
+ PAREN_TYPE
+ } else {
+ // test unit_type
+ // type T = ();
+
+ // test singleton_tuple_type
+ // type T = (i32,);
+ TUPLE_TYPE
+ };
+ m.complete(p, kind);
+}
+
+// test never_type
+// type Never = !;
+fn never_type(p: &mut Parser<'_>) {
+ assert!(p.at(T![!]));
+ let m = p.start();
+ p.bump(T![!]);
+ m.complete(p, NEVER_TYPE);
+}
+
+fn ptr_type(p: &mut Parser<'_>) {
+ assert!(p.at(T![*]));
+ let m = p.start();
+ p.bump(T![*]);
+
+ match p.current() {
+ // test pointer_type_mut
+ // type M = *mut ();
+ // type C = *mut ();
+ T![mut] | T![const] => p.bump_any(),
+ _ => {
+ // test_err pointer_type_no_mutability
+ // type T = *();
+ p.error(
+ "expected mut or const in raw pointer type \
+ (use `*mut T` or `*const T` as appropriate)",
+ );
+ }
+ };
+
+ type_no_bounds(p);
+ m.complete(p, PTR_TYPE);
+}
+
+fn array_or_slice_type(p: &mut Parser<'_>) {
+ assert!(p.at(T!['[']));
+ let m = p.start();
+ p.bump(T!['[']);
+
+ type_(p);
+ let kind = match p.current() {
+ // test slice_type
+ // type T = [()];
+ T![']'] => {
+ p.bump(T![']']);
+ SLICE_TYPE
+ }
+
+ // test array_type
+ // type T = [(); 92];
+ T![;] => {
+ p.bump(T![;]);
+ expressions::expr(p);
+ p.expect(T![']']);
+ ARRAY_TYPE
+ }
+ // test_err array_type_missing_semi
+ // type T = [() 92];
+ _ => {
+ p.error("expected `;` or `]`");
+ SLICE_TYPE
+ }
+ };
+ m.complete(p, kind);
+}
+
+// test reference_type;
+// type A = &();
+// type B = &'static ();
+// type C = &mut ();
+fn ref_type(p: &mut Parser<'_>) {
+ assert!(p.at(T![&]));
+ let m = p.start();
+ p.bump(T![&]);
+ if p.at(LIFETIME_IDENT) {
+ lifetime(p);
+ }
+ p.eat(T![mut]);
+ type_no_bounds(p);
+ m.complete(p, REF_TYPE);
+}
+
+// test placeholder_type
+// type Placeholder = _;
+fn infer_type(p: &mut Parser<'_>) {
+ assert!(p.at(T![_]));
+ let m = p.start();
+ p.bump(T![_]);
+ m.complete(p, INFER_TYPE);
+}
+
+// test fn_pointer_type
+// type A = fn();
+// type B = unsafe fn();
+// type C = unsafe extern "C" fn();
+// type D = extern "C" fn ( u8 , ... ) -> u8;
+fn fn_ptr_type(p: &mut Parser<'_>) {
+ let m = p.start();
+ p.eat(T![unsafe]);
+ if p.at(T![extern]) {
+ abi(p);
+ }
+ // test_err fn_pointer_type_missing_fn
+ // type F = unsafe ();
+ if !p.eat(T![fn]) {
+ m.abandon(p);
+ p.error("expected `fn`");
+ return;
+ }
+ if p.at(T!['(']) {
+ params::param_list_fn_ptr(p);
+ } else {
+ p.error("expected parameters");
+ }
+ // test fn_pointer_type_with_ret
+ // type F = fn() -> ();
+ opt_ret_type(p);
+ m.complete(p, FN_PTR_TYPE);
+}
+
+pub(super) fn for_binder(p: &mut Parser<'_>) {
+ assert!(p.at(T![for]));
+ p.bump(T![for]);
+ if p.at(T![<]) {
+ generic_params::opt_generic_param_list(p);
+ } else {
+ p.error("expected `<`");
+ }
+}
+
+// test for_type
+// type A = for<'a> fn() -> ();
+// type B = for<'a> unsafe extern "C" fn(&'a ()) -> ();
+// type Obj = for<'a> PartialEq<&'a i32>;
+pub(super) fn for_type(p: &mut Parser<'_>, allow_bounds: bool) {
+ assert!(p.at(T![for]));
+ let m = p.start();
+ for_binder(p);
+ match p.current() {
+ T![fn] | T![unsafe] | T![extern] => {}
+ // OK: legacy trait object format
+ _ if paths::is_use_path_start(p) => {}
+ _ => {
+ p.error("expected a function pointer or path");
+ }
+ }
+ type_no_bounds(p);
+ let completed = m.complete(p, FOR_TYPE);
+
+ // test no_dyn_trait_leading_for
+ // type A = for<'a> Test<'a> + Send;
+ if allow_bounds {
+ opt_type_bounds_as_dyn_trait_type(p, completed);
+ }
+}
+
+// test impl_trait_type
+// type A = impl Iterator<Item=Foo<'a>> + 'a;
+fn impl_trait_type(p: &mut Parser<'_>) {
+ assert!(p.at(T![impl]));
+ let m = p.start();
+ p.bump(T![impl]);
+ generic_params::bounds_without_colon(p);
+ m.complete(p, IMPL_TRAIT_TYPE);
+}
+
+// test dyn_trait_type
+// type A = dyn Iterator<Item=Foo<'a>> + 'a;
+fn dyn_trait_type(p: &mut Parser<'_>) {
+ assert!(p.at(T![dyn]));
+ let m = p.start();
+ p.bump(T![dyn]);
+ generic_params::bounds_without_colon(p);
+ m.complete(p, DYN_TRAIT_TYPE);
+}
+
+// test path_type
+// type A = Foo;
+// type B = ::Foo;
+// type C = self::Foo;
+// type D = super::Foo;
+pub(super) fn path_type(p: &mut Parser<'_>) {
+ path_type_(p, true);
+}
+
+// test macro_call_type
+// type A = foo!();
+// type B = crate::foo!();
+fn path_or_macro_type_(p: &mut Parser<'_>, allow_bounds: bool) {
+ assert!(paths::is_path_start(p));
+ let r = p.start();
+ let m = p.start();
+
+ paths::type_path(p);
+
+ let kind = if p.at(T![!]) && !p.at(T![!=]) {
+ items::macro_call_after_excl(p);
+ m.complete(p, MACRO_CALL);
+ MACRO_TYPE
+ } else {
+ m.abandon(p);
+ PATH_TYPE
+ };
+
+ let path = r.complete(p, kind);
+
+ if allow_bounds {
+ opt_type_bounds_as_dyn_trait_type(p, path);
+ }
+}
+
+pub(super) fn path_type_(p: &mut Parser<'_>, allow_bounds: bool) {
+ assert!(paths::is_path_start(p));
+ let m = p.start();
+ paths::type_path(p);
+
+ // test path_type_with_bounds
+ // fn foo() -> Box<T + 'f> {}
+ // fn foo() -> Box<dyn T + 'f> {}
+ let path = m.complete(p, PATH_TYPE);
+ if allow_bounds {
+ opt_type_bounds_as_dyn_trait_type(p, path);
+ }
+}
+
+/// This turns a parsed PATH_TYPE or FOR_TYPE optionally into a DYN_TRAIT_TYPE
+/// with a TYPE_BOUND_LIST
+fn opt_type_bounds_as_dyn_trait_type(p: &mut Parser<'_>, type_marker: CompletedMarker) {
+ assert!(matches!(
+ type_marker.kind(),
+ SyntaxKind::PATH_TYPE | SyntaxKind::FOR_TYPE | SyntaxKind::MACRO_TYPE
+ ));
+ if !p.at(T![+]) {
+ return;
+ }
+
+ // First create a TYPE_BOUND from the completed PATH_TYPE
+ let m = type_marker.precede(p).complete(p, TYPE_BOUND);
+
+ // Next setup a marker for the TYPE_BOUND_LIST
+ let m = m.precede(p);
+
+ // This gets consumed here so it gets properly set
+ // in the TYPE_BOUND_LIST
+ p.eat(T![+]);
+
+ // Parse rest of the bounds into the TYPE_BOUND_LIST
+ let m = generic_params::bounds_without_colon_m(p, m);
+
+ // Finally precede everything with DYN_TRAIT_TYPE
+ m.precede(p).complete(p, DYN_TRAIT_TYPE);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/input.rs b/src/tools/rust-analyzer/crates/parser/src/input.rs
new file mode 100644
index 000000000..9504bd4d9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/input.rs
@@ -0,0 +1,88 @@
+//! See [`Input`].
+
+use crate::SyntaxKind;
+
+#[allow(non_camel_case_types)]
+type bits = u64;
+
+/// Input for the parser -- a sequence of tokens.
+///
+/// As of now, parser doesn't have access to the *text* of the tokens, and makes
+/// decisions based solely on their classification. Unlike `LexerToken`, the
+/// `Tokens` doesn't include whitespace and comments. Main input to the parser.
+///
+/// Struct of arrays internally, but this shouldn't really matter.
+#[derive(Default)]
+pub struct Input {
+ kind: Vec<SyntaxKind>,
+ joint: Vec<bits>,
+ contextual_kind: Vec<SyntaxKind>,
+}
+
+/// `pub` impl used by callers to create `Tokens`.
+impl Input {
+ #[inline]
+ pub fn push(&mut self, kind: SyntaxKind) {
+ self.push_impl(kind, SyntaxKind::EOF)
+ }
+ #[inline]
+ pub fn push_ident(&mut self, contextual_kind: SyntaxKind) {
+ self.push_impl(SyntaxKind::IDENT, contextual_kind)
+ }
+ /// Sets jointness for the last token we've pushed.
+ ///
+ /// This is a separate API rather than an argument to the `push` to make it
+ /// convenient both for textual and mbe tokens. With text, you know whether
+ /// the *previous* token was joint, with mbe, you know whether the *current*
+ /// one is joint. This API allows for styles of usage:
+ ///
+ /// ```
+ /// // In text:
+ /// tokens.was_joint(prev_joint);
+ /// tokens.push(curr);
+ ///
+ /// // In MBE:
+ /// token.push(curr);
+ /// tokens.push(curr_joint)
+ /// ```
+ #[inline]
+ pub fn was_joint(&mut self) {
+ let n = self.len() - 1;
+ let (idx, b_idx) = self.bit_index(n);
+ self.joint[idx] |= 1 << b_idx;
+ }
+ #[inline]
+ fn push_impl(&mut self, kind: SyntaxKind, contextual_kind: SyntaxKind) {
+ let idx = self.len();
+ if idx % (bits::BITS as usize) == 0 {
+ self.joint.push(0);
+ }
+ self.kind.push(kind);
+ self.contextual_kind.push(contextual_kind);
+ }
+}
+
+/// pub(crate) impl used by the parser to consume `Tokens`.
+impl Input {
+ pub(crate) fn kind(&self, idx: usize) -> SyntaxKind {
+ self.kind.get(idx).copied().unwrap_or(SyntaxKind::EOF)
+ }
+ pub(crate) fn contextual_kind(&self, idx: usize) -> SyntaxKind {
+ self.contextual_kind.get(idx).copied().unwrap_or(SyntaxKind::EOF)
+ }
+ pub(crate) fn is_joint(&self, n: usize) -> bool {
+ let (idx, b_idx) = self.bit_index(n);
+ self.joint[idx] & 1 << b_idx != 0
+ }
+}
+
+impl Input {
+ fn bit_index(&self, n: usize) -> (usize, usize) {
+ let idx = n / (bits::BITS as usize);
+ let b_idx = n % (bits::BITS as usize);
+ (idx, b_idx)
+ }
+ fn len(&self) -> usize {
+ self.kind.len()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
new file mode 100644
index 000000000..f4b9988ea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
@@ -0,0 +1,300 @@
+//! Lexing `&str` into a sequence of Rust tokens.
+//!
+//! Note that strictly speaking the parser in this crate is not required to work
+//! on tokens which originated from text. Macros, eg, can synthesize tokens out
+//! of thin air. So, ideally, lexer should be an orthogonal crate. It is however
+//! convenient to include a text-based lexer here!
+//!
+//! Note that these tokens, unlike the tokens we feed into the parser, do
+//! include info about comments and whitespace.
+
+use std::ops;
+
+use crate::{
+ SyntaxKind::{self, *},
+ T,
+};
+
+pub struct LexedStr<'a> {
+ text: &'a str,
+ kind: Vec<SyntaxKind>,
+ start: Vec<u32>,
+ error: Vec<LexError>,
+}
+
+struct LexError {
+ msg: String,
+ token: u32,
+}
+
+impl<'a> LexedStr<'a> {
+ pub fn new(text: &'a str) -> LexedStr<'a> {
+ let mut conv = Converter::new(text);
+ if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
+ conv.res.push(SHEBANG, conv.offset);
+ conv.offset = shebang_len;
+ };
+
+ for token in rustc_lexer::tokenize(&text[conv.offset..]) {
+ let token_text = &text[conv.offset..][..token.len];
+
+ conv.extend_token(&token.kind, token_text);
+ }
+
+ conv.finalize_with_eof()
+ }
+
+ pub fn single_token(text: &'a str) -> Option<(SyntaxKind, Option<String>)> {
+ if text.is_empty() {
+ return None;
+ }
+
+ let token = rustc_lexer::first_token(text);
+ if token.len != text.len() {
+ return None;
+ }
+
+ let mut conv = Converter::new(text);
+ conv.extend_token(&token.kind, text);
+ match &*conv.res.kind {
+ [kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg.clone()))),
+ _ => None,
+ }
+ }
+
+ pub fn as_str(&self) -> &str {
+ self.text
+ }
+
+ pub fn len(&self) -> usize {
+ self.kind.len() - 1
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ pub fn kind(&self, i: usize) -> SyntaxKind {
+ assert!(i < self.len());
+ self.kind[i]
+ }
+
+ pub fn text(&self, i: usize) -> &str {
+ self.range_text(i..i + 1)
+ }
+ pub fn range_text(&self, r: ops::Range<usize>) -> &str {
+ assert!(r.start < r.end && r.end <= self.len());
+ let lo = self.start[r.start] as usize;
+ let hi = self.start[r.end] as usize;
+ &self.text[lo..hi]
+ }
+
+ // Naming is hard.
+ pub fn text_range(&self, i: usize) -> ops::Range<usize> {
+ assert!(i < self.len());
+ let lo = self.start[i] as usize;
+ let hi = self.start[i + 1] as usize;
+ lo..hi
+ }
+ pub fn text_start(&self, i: usize) -> usize {
+ assert!(i <= self.len());
+ self.start[i] as usize
+ }
+ pub fn text_len(&self, i: usize) -> usize {
+ assert!(i < self.len());
+ let r = self.text_range(i);
+ r.end - r.start
+ }
+
+ pub fn error(&self, i: usize) -> Option<&str> {
+ assert!(i < self.len());
+ let err = self.error.binary_search_by_key(&(i as u32), |i| i.token).ok()?;
+ Some(self.error[err].msg.as_str())
+ }
+
+ pub fn errors(&self) -> impl Iterator<Item = (usize, &str)> + '_ {
+ self.error.iter().map(|it| (it.token as usize, it.msg.as_str()))
+ }
+
+ fn push(&mut self, kind: SyntaxKind, offset: usize) {
+ self.kind.push(kind);
+ self.start.push(offset as u32);
+ }
+}
+
+struct Converter<'a> {
+ res: LexedStr<'a>,
+ offset: usize,
+}
+
+impl<'a> Converter<'a> {
+ fn new(text: &'a str) -> Self {
+ Self {
+ res: LexedStr { text, kind: Vec::new(), start: Vec::new(), error: Vec::new() },
+ offset: 0,
+ }
+ }
+
+ fn finalize_with_eof(mut self) -> LexedStr<'a> {
+ self.res.push(EOF, self.offset);
+ self.res
+ }
+
+ fn push(&mut self, kind: SyntaxKind, len: usize, err: Option<&str>) {
+ self.res.push(kind, self.offset);
+ self.offset += len;
+
+ if let Some(err) = err {
+ let token = self.res.len() as u32;
+ let msg = err.to_string();
+ self.res.error.push(LexError { msg, token });
+ }
+ }
+
+ fn extend_token(&mut self, kind: &rustc_lexer::TokenKind, token_text: &str) {
+ // A note on an intended tradeoff:
+ // We drop some useful information here (see patterns with double dots `..`)
+ // Storing that info in `SyntaxKind` is not possible due to its layout requirements of
+ // being `u16` that come from `rowan::SyntaxKind`.
+ let mut err = "";
+
+ let syntax_kind = {
+ match kind {
+ rustc_lexer::TokenKind::LineComment { doc_style: _ } => COMMENT,
+ rustc_lexer::TokenKind::BlockComment { doc_style: _, terminated } => {
+ if !terminated {
+ err = "Missing trailing `*/` symbols to terminate the block comment";
+ }
+ COMMENT
+ }
+
+ rustc_lexer::TokenKind::Whitespace => WHITESPACE,
+
+ rustc_lexer::TokenKind::Ident if token_text == "_" => UNDERSCORE,
+ rustc_lexer::TokenKind::Ident => {
+ SyntaxKind::from_keyword(token_text).unwrap_or(IDENT)
+ }
+
+ rustc_lexer::TokenKind::RawIdent => IDENT,
+ rustc_lexer::TokenKind::Literal { kind, .. } => {
+ self.extend_literal(token_text.len(), kind);
+ return;
+ }
+
+ rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
+ if *starts_with_number {
+ err = "Lifetime name cannot start with a number";
+ }
+ LIFETIME_IDENT
+ }
+
+ rustc_lexer::TokenKind::Semi => T![;],
+ rustc_lexer::TokenKind::Comma => T![,],
+ rustc_lexer::TokenKind::Dot => T![.],
+ rustc_lexer::TokenKind::OpenParen => T!['('],
+ rustc_lexer::TokenKind::CloseParen => T![')'],
+ rustc_lexer::TokenKind::OpenBrace => T!['{'],
+ rustc_lexer::TokenKind::CloseBrace => T!['}'],
+ rustc_lexer::TokenKind::OpenBracket => T!['['],
+ rustc_lexer::TokenKind::CloseBracket => T![']'],
+ rustc_lexer::TokenKind::At => T![@],
+ rustc_lexer::TokenKind::Pound => T![#],
+ rustc_lexer::TokenKind::Tilde => T![~],
+ rustc_lexer::TokenKind::Question => T![?],
+ rustc_lexer::TokenKind::Colon => T![:],
+ rustc_lexer::TokenKind::Dollar => T![$],
+ rustc_lexer::TokenKind::Eq => T![=],
+ rustc_lexer::TokenKind::Bang => T![!],
+ rustc_lexer::TokenKind::Lt => T![<],
+ rustc_lexer::TokenKind::Gt => T![>],
+ rustc_lexer::TokenKind::Minus => T![-],
+ rustc_lexer::TokenKind::And => T![&],
+ rustc_lexer::TokenKind::Or => T![|],
+ rustc_lexer::TokenKind::Plus => T![+],
+ rustc_lexer::TokenKind::Star => T![*],
+ rustc_lexer::TokenKind::Slash => T![/],
+ rustc_lexer::TokenKind::Caret => T![^],
+ rustc_lexer::TokenKind::Percent => T![%],
+ rustc_lexer::TokenKind::Unknown => ERROR,
+ }
+ };
+
+ let err = if err.is_empty() { None } else { Some(err) };
+ self.push(syntax_kind, token_text.len(), err);
+ }
+
+ fn extend_literal(&mut self, len: usize, kind: &rustc_lexer::LiteralKind) {
+ let mut err = "";
+
+ let syntax_kind = match *kind {
+ rustc_lexer::LiteralKind::Int { empty_int, base: _ } => {
+ if empty_int {
+ err = "Missing digits after the integer base prefix";
+ }
+ INT_NUMBER
+ }
+ rustc_lexer::LiteralKind::Float { empty_exponent, base: _ } => {
+ if empty_exponent {
+ err = "Missing digits after the exponent symbol";
+ }
+ FLOAT_NUMBER
+ }
+ rustc_lexer::LiteralKind::Char { terminated } => {
+ if !terminated {
+ err = "Missing trailing `'` symbol to terminate the character literal";
+ }
+ CHAR
+ }
+ rustc_lexer::LiteralKind::Byte { terminated } => {
+ if !terminated {
+ err = "Missing trailing `'` symbol to terminate the byte literal";
+ }
+ BYTE
+ }
+ rustc_lexer::LiteralKind::Str { terminated } => {
+ if !terminated {
+ err = "Missing trailing `\"` symbol to terminate the string literal";
+ }
+ STRING
+ }
+ rustc_lexer::LiteralKind::ByteStr { terminated } => {
+ if !terminated {
+ err = "Missing trailing `\"` symbol to terminate the byte string literal";
+ }
+ BYTE_STRING
+ }
+ rustc_lexer::LiteralKind::RawStr { err: raw_str_err, .. } => {
+ if let Some(raw_str_err) = raw_str_err {
+ err = match raw_str_err {
+ rustc_lexer::RawStrError::InvalidStarter { .. } => "Missing `\"` symbol after `#` symbols to begin the raw string literal",
+ rustc_lexer::RawStrError::NoTerminator { expected, found, .. } => if expected == found {
+ "Missing trailing `\"` to terminate the raw string literal"
+ } else {
+ "Missing trailing `\"` with `#` symbols to terminate the raw string literal"
+ },
+ rustc_lexer::RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols",
+ };
+ };
+ STRING
+ }
+ rustc_lexer::LiteralKind::RawByteStr { err: raw_str_err, .. } => {
+ if let Some(raw_str_err) = raw_str_err {
+ err = match raw_str_err {
+ rustc_lexer::RawStrError::InvalidStarter { .. } => "Missing `\"` symbol after `#` symbols to begin the raw byte string literal",
+ rustc_lexer::RawStrError::NoTerminator { expected, found, .. } => if expected == found {
+ "Missing trailing `\"` to terminate the raw byte string literal"
+ } else {
+ "Missing trailing `\"` with `#` symbols to terminate the raw byte string literal"
+ },
+ rustc_lexer::RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw byte strings may be delimited by up to 65535 `#` symbols",
+ };
+ };
+
+ BYTE_STRING
+ }
+ };
+
+ let err = if err.is_empty() { None } else { Some(err) };
+ self.push(syntax_kind, len, err);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs
new file mode 100644
index 000000000..87be47927
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs
@@ -0,0 +1,181 @@
+//! The Rust parser.
+//!
+//! NOTE: The crate is undergoing refactors, don't believe everything the docs
+//! say :-)
+//!
+//! The parser doesn't know about concrete representation of tokens and syntax
+//! trees. Abstract [`TokenSource`] and [`TreeSink`] traits are used instead. As
+//! a consequence, this crate does not contain a lexer.
+//!
+//! The [`Parser`] struct from the [`parser`] module is a cursor into the
+//! sequence of tokens. Parsing routines use [`Parser`] to inspect current
+//! state and advance the parsing.
+//!
+//! The actual parsing happens in the [`grammar`] module.
+//!
+//! Tests for this crate live in the `syntax` crate.
+//!
+//! [`Parser`]: crate::parser::Parser
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![allow(rustdoc::private_intra_doc_links)]
+
+mod lexed_str;
+mod token_set;
+mod syntax_kind;
+mod event;
+mod parser;
+mod grammar;
+mod input;
+mod output;
+mod shortcuts;
+
+#[cfg(test)]
+mod tests;
+
+pub(crate) use token_set::TokenSet;
+
+pub use crate::{
+ input::Input,
+ lexed_str::LexedStr,
+ output::{Output, Step},
+ shortcuts::StrStep,
+ syntax_kind::SyntaxKind,
+};
+
+/// Parse the whole of the input as a given syntactic construct.
+///
+/// This covers two main use-cases:
+///
+/// * Parsing a Rust file.
+/// * Parsing a result of macro expansion.
+///
+/// That is, for something like
+///
+/// ```
+/// quick_check! {
+/// fn prop() {}
+/// }
+/// ```
+///
+/// the input to the macro will be parsed with [`PrefixEntryPoint::Item`], and
+/// the result will be [`TopEntryPoint::MacroItems`].
+///
+/// [`TopEntryPoint::parse`] makes a guarantee that
+/// * all input is consumed
+/// * the result is a valid tree (there's one root node)
+#[derive(Debug)]
+pub enum TopEntryPoint {
+ SourceFile,
+ MacroStmts,
+ MacroItems,
+ Pattern,
+ Type,
+ Expr,
+ /// Edge case -- macros generally don't expand to attributes, with the
+ /// exception of `cfg_attr` which does!
+ MetaItem,
+}
+
+impl TopEntryPoint {
+ pub fn parse(&self, input: &Input) -> Output {
+ let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
+ TopEntryPoint::SourceFile => grammar::entry::top::source_file,
+ TopEntryPoint::MacroStmts => grammar::entry::top::macro_stmts,
+ TopEntryPoint::MacroItems => grammar::entry::top::macro_items,
+ TopEntryPoint::Pattern => grammar::entry::top::pattern,
+ TopEntryPoint::Type => grammar::entry::top::type_,
+ TopEntryPoint::Expr => grammar::entry::top::expr,
+ TopEntryPoint::MetaItem => grammar::entry::top::meta_item,
+ };
+ let mut p = parser::Parser::new(input);
+ entry_point(&mut p);
+ let events = p.finish();
+ let res = event::process(events);
+
+ if cfg!(debug_assertions) {
+ let mut depth = 0;
+ let mut first = true;
+ for step in res.iter() {
+ assert!(depth > 0 || first);
+ first = false;
+ match step {
+ Step::Enter { .. } => depth += 1,
+ Step::Exit => depth -= 1,
+ Step::Token { .. } | Step::Error { .. } => (),
+ }
+ }
+ assert!(!first, "no tree at all");
+ }
+
+ res
+ }
+}
+
+/// Parse a prefix of the input as a given syntactic construct.
+///
+/// This is used by macro-by-example parser to implement things like `$i:item`
+/// and the naming of variants follows the naming of macro fragments.
+///
+/// Note that this is generally non-optional -- the result is intentionally not
+/// `Option<Output>`. The way MBE work, by the time we *try* to parse `$e:expr`
+/// we already commit to expression. In other words, this API by design can't be
+/// used to implement "rollback and try another alternative" logic.
+#[derive(Debug)]
+pub enum PrefixEntryPoint {
+ Vis,
+ Block,
+ Stmt,
+ Pat,
+ Ty,
+ Expr,
+ Path,
+ Item,
+ MetaItem,
+}
+
+impl PrefixEntryPoint {
+ pub fn parse(&self, input: &Input) -> Output {
+ let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
+ PrefixEntryPoint::Vis => grammar::entry::prefix::vis,
+ PrefixEntryPoint::Block => grammar::entry::prefix::block,
+ PrefixEntryPoint::Stmt => grammar::entry::prefix::stmt,
+ PrefixEntryPoint::Pat => grammar::entry::prefix::pat,
+ PrefixEntryPoint::Ty => grammar::entry::prefix::ty,
+ PrefixEntryPoint::Expr => grammar::entry::prefix::expr,
+ PrefixEntryPoint::Path => grammar::entry::prefix::path,
+ PrefixEntryPoint::Item => grammar::entry::prefix::item,
+ PrefixEntryPoint::MetaItem => grammar::entry::prefix::meta_item,
+ };
+ let mut p = parser::Parser::new(input);
+ entry_point(&mut p);
+ let events = p.finish();
+ event::process(events)
+ }
+}
+
+/// A parsing function for a specific braced-block.
+pub struct Reparser(fn(&mut parser::Parser<'_>));
+
+impl Reparser {
+ /// If the node is a braced block, return the corresponding `Reparser`.
+ pub fn for_node(
+ node: SyntaxKind,
+ first_child: Option<SyntaxKind>,
+ parent: Option<SyntaxKind>,
+ ) -> Option<Reparser> {
+ grammar::reparser(node, first_child, parent).map(Reparser)
+ }
+
+ /// Re-parse given tokens using this `Reparser`.
+ ///
+ /// Tokens must start with `{`, end with `}` and form a valid brace
+ /// sequence.
+ pub fn parse(self, tokens: &Input) -> Output {
+ let Reparser(r) = self;
+ let mut p = parser::Parser::new(tokens);
+ r(&mut p);
+ let events = p.finish();
+ event::process(events)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/output.rs b/src/tools/rust-analyzer/crates/parser/src/output.rs
new file mode 100644
index 000000000..e9ec9822d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/output.rs
@@ -0,0 +1,77 @@
+//! See [`Output`]
+
+use crate::SyntaxKind;
+
+/// Output of the parser -- a DFS traversal of a concrete syntax tree.
+///
+/// Use the [`Output::iter`] method to iterate over traversal steps and consume
+/// a syntax tree.
+///
+/// In a sense, this is just a sequence of [`SyntaxKind`]-colored parenthesis
+/// interspersed into the original [`crate::Input`]. The output is fundamentally
+/// coordinated with the input and `n_input_tokens` refers to the number of
+/// times [`crate::Input::push`] was called.
+#[derive(Default)]
+pub struct Output {
+ /// 32-bit encoding of events. If LSB is zero, then that's an index into the
+ /// error vector. Otherwise, it's one of the thee other variants, with data encoded as
+ ///
+ /// |16 bit kind|8 bit n_input_tokens|4 bit tag|4 bit leftover|
+ ///
+ event: Vec<u32>,
+ error: Vec<String>,
+}
+
+#[derive(Debug)]
+pub enum Step<'a> {
+ Token { kind: SyntaxKind, n_input_tokens: u8 },
+ Enter { kind: SyntaxKind },
+ Exit,
+ Error { msg: &'a str },
+}
+
+impl Output {
+ pub fn iter(&self) -> impl Iterator<Item = Step<'_>> {
+ self.event.iter().map(|&event| {
+ if event & 0b1 == 0 {
+ return Step::Error { msg: self.error[(event as usize) >> 1].as_str() };
+ }
+ let tag = ((event & 0x0000_00F0) >> 4) as u8;
+ match tag {
+ 0 => {
+ let kind: SyntaxKind = (((event & 0xFFFF_0000) >> 16) as u16).into();
+ let n_input_tokens = ((event & 0x0000_FF00) >> 8) as u8;
+ Step::Token { kind, n_input_tokens }
+ }
+ 1 => {
+ let kind: SyntaxKind = (((event & 0xFFFF_0000) >> 16) as u16).into();
+ Step::Enter { kind }
+ }
+ 2 => Step::Exit,
+ _ => unreachable!(),
+ }
+ })
+ }
+
+ pub(crate) fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
+ let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | (0 << 4) | 1;
+ self.event.push(e)
+ }
+
+ pub(crate) fn enter_node(&mut self, kind: SyntaxKind) {
+ let e = ((kind as u16 as u32) << 16) | (1 << 4) | 1;
+ self.event.push(e)
+ }
+
+ pub(crate) fn leave_node(&mut self) {
+ let e = 2 << 4 | 1;
+ self.event.push(e)
+ }
+
+ pub(crate) fn error(&mut self, error: String) {
+ let idx = self.error.len();
+ self.error.push(error);
+ let e = (idx as u32) << 1;
+ self.event.push(e);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/parser.rs b/src/tools/rust-analyzer/crates/parser/src/parser.rs
new file mode 100644
index 000000000..48d8350e0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/parser.rs
@@ -0,0 +1,340 @@
+//! See [`Parser`].
+
+use std::cell::Cell;
+
+use drop_bomb::DropBomb;
+use limit::Limit;
+
+use crate::{
+ event::Event,
+ input::Input,
+ SyntaxKind::{self, EOF, ERROR, TOMBSTONE},
+ TokenSet, T,
+};
+
+/// `Parser` struct provides the low-level API for
+/// navigating through the stream of tokens and
+/// constructing the parse tree. The actual parsing
+/// happens in the [`grammar`](super::grammar) module.
+///
+/// However, the result of this `Parser` is not a real
+/// tree, but rather a flat stream of events of the form
+/// "start expression, consume number literal,
+/// finish expression". See `Event` docs for more.
+pub(crate) struct Parser<'t> {
+ inp: &'t Input,
+ pos: usize,
+ events: Vec<Event>,
+ steps: Cell<u32>,
+}
+
+static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
+
+impl<'t> Parser<'t> {
+ pub(super) fn new(inp: &'t Input) -> Parser<'t> {
+ Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0) }
+ }
+
+ pub(crate) fn finish(self) -> Vec<Event> {
+ self.events
+ }
+
+ /// Returns the kind of the current token.
+ /// If parser has already reached the end of input,
+ /// the special `EOF` kind is returned.
+ pub(crate) fn current(&self) -> SyntaxKind {
+ self.nth(0)
+ }
+
+ /// Lookahead operation: returns the kind of the next nth
+ /// token.
+ pub(crate) fn nth(&self, n: usize) -> SyntaxKind {
+ assert!(n <= 3);
+
+ let steps = self.steps.get();
+ assert!(PARSER_STEP_LIMIT.check(steps as usize).is_ok(), "the parser seems stuck");
+ self.steps.set(steps + 1);
+
+ self.inp.kind(self.pos + n)
+ }
+
+ /// Checks if the current token is `kind`.
+ pub(crate) fn at(&self, kind: SyntaxKind) -> bool {
+ self.nth_at(0, kind)
+ }
+
+ pub(crate) fn nth_at(&self, n: usize, kind: SyntaxKind) -> bool {
+ match kind {
+ T![-=] => self.at_composite2(n, T![-], T![=]),
+ T![->] => self.at_composite2(n, T![-], T![>]),
+ T![::] => self.at_composite2(n, T![:], T![:]),
+ T![!=] => self.at_composite2(n, T![!], T![=]),
+ T![..] => self.at_composite2(n, T![.], T![.]),
+ T![*=] => self.at_composite2(n, T![*], T![=]),
+ T![/=] => self.at_composite2(n, T![/], T![=]),
+ T![&&] => self.at_composite2(n, T![&], T![&]),
+ T![&=] => self.at_composite2(n, T![&], T![=]),
+ T![%=] => self.at_composite2(n, T![%], T![=]),
+ T![^=] => self.at_composite2(n, T![^], T![=]),
+ T![+=] => self.at_composite2(n, T![+], T![=]),
+ T![<<] => self.at_composite2(n, T![<], T![<]),
+ T![<=] => self.at_composite2(n, T![<], T![=]),
+ T![==] => self.at_composite2(n, T![=], T![=]),
+ T![=>] => self.at_composite2(n, T![=], T![>]),
+ T![>=] => self.at_composite2(n, T![>], T![=]),
+ T![>>] => self.at_composite2(n, T![>], T![>]),
+ T![|=] => self.at_composite2(n, T![|], T![=]),
+ T![||] => self.at_composite2(n, T![|], T![|]),
+
+ T![...] => self.at_composite3(n, T![.], T![.], T![.]),
+ T![..=] => self.at_composite3(n, T![.], T![.], T![=]),
+ T![<<=] => self.at_composite3(n, T![<], T![<], T![=]),
+ T![>>=] => self.at_composite3(n, T![>], T![>], T![=]),
+
+ _ => self.inp.kind(self.pos + n) == kind,
+ }
+ }
+
+ /// Consume the next token if `kind` matches.
+ pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool {
+ if !self.at(kind) {
+ return false;
+ }
+ let n_raw_tokens = match kind {
+ T![-=]
+ | T![->]
+ | T![::]
+ | T![!=]
+ | T![..]
+ | T![*=]
+ | T![/=]
+ | T![&&]
+ | T![&=]
+ | T![%=]
+ | T![^=]
+ | T![+=]
+ | T![<<]
+ | T![<=]
+ | T![==]
+ | T![=>]
+ | T![>=]
+ | T![>>]
+ | T![|=]
+ | T![||] => 2,
+
+ T![...] | T![..=] | T![<<=] | T![>>=] => 3,
+ _ => 1,
+ };
+ self.do_bump(kind, n_raw_tokens);
+ true
+ }
+
+ fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool {
+ self.inp.kind(self.pos + n) == k1
+ && self.inp.kind(self.pos + n + 1) == k2
+ && self.inp.is_joint(self.pos + n)
+ }
+
+ fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool {
+ self.inp.kind(self.pos + n) == k1
+ && self.inp.kind(self.pos + n + 1) == k2
+ && self.inp.kind(self.pos + n + 2) == k3
+ && self.inp.is_joint(self.pos + n)
+ && self.inp.is_joint(self.pos + n + 1)
+ }
+
+ /// Checks if the current token is in `kinds`.
+ pub(crate) fn at_ts(&self, kinds: TokenSet) -> bool {
+ kinds.contains(self.current())
+ }
+
+ /// Checks if the current token is contextual keyword with text `t`.
+ pub(crate) fn at_contextual_kw(&self, kw: SyntaxKind) -> bool {
+ self.inp.contextual_kind(self.pos) == kw
+ }
+
+ /// Starts a new node in the syntax tree. All nodes and tokens
+ /// consumed between the `start` and the corresponding `Marker::complete`
+ /// belong to the same node.
+ pub(crate) fn start(&mut self) -> Marker {
+ let pos = self.events.len() as u32;
+ self.push_event(Event::tombstone());
+ Marker::new(pos)
+ }
+
+ /// Consume the next token if `kind` matches.
+ pub(crate) fn bump(&mut self, kind: SyntaxKind) {
+ assert!(self.eat(kind));
+ }
+
+ /// Advances the parser by one token
+ pub(crate) fn bump_any(&mut self) {
+ let kind = self.nth(0);
+ if kind == EOF {
+ return;
+ }
+ self.do_bump(kind, 1);
+ }
+
+ /// Advances the parser by one token, remapping its kind.
+ /// This is useful to create contextual keywords from
+ /// identifiers. For example, the lexer creates a `union`
+ /// *identifier* token, but the parser remaps it to the
+ /// `union` keyword, and keyword is what ends up in the
+ /// final tree.
+ pub(crate) fn bump_remap(&mut self, kind: SyntaxKind) {
+ if self.nth(0) == EOF {
+ // FIXME: panic!?
+ return;
+ }
+ self.do_bump(kind, 1);
+ }
+
+ /// Emit error with the `message`
+ /// FIXME: this should be much more fancy and support
+ /// structured errors with spans and notes, like rustc
+ /// does.
+ pub(crate) fn error<T: Into<String>>(&mut self, message: T) {
+ let msg = message.into();
+ self.push_event(Event::Error { msg });
+ }
+
+ /// Consume the next token if it is `kind` or emit an error
+ /// otherwise.
+ pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool {
+ if self.eat(kind) {
+ return true;
+ }
+ self.error(format!("expected {:?}", kind));
+ false
+ }
+
+ /// Create an error node and consume the next token.
+ pub(crate) fn err_and_bump(&mut self, message: &str) {
+ self.err_recover(message, TokenSet::EMPTY);
+ }
+
+ /// Create an error node and consume the next token.
+ pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) {
+ match self.current() {
+ T!['{'] | T!['}'] => {
+ self.error(message);
+ return;
+ }
+ _ => (),
+ }
+
+ if self.at_ts(recovery) {
+ self.error(message);
+ return;
+ }
+
+ let m = self.start();
+ self.error(message);
+ self.bump_any();
+ m.complete(self, ERROR);
+ }
+
+ fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
+ self.pos += n_raw_tokens as usize;
+ self.push_event(Event::Token { kind, n_raw_tokens });
+ }
+
+ fn push_event(&mut self, event: Event) {
+ self.events.push(event);
+ }
+}
+
+/// See [`Parser::start`].
+pub(crate) struct Marker {
+ pos: u32,
+ bomb: DropBomb,
+}
+
+impl Marker {
+ fn new(pos: u32) -> Marker {
+ Marker { pos, bomb: DropBomb::new("Marker must be either completed or abandoned") }
+ }
+
+ /// Finishes the syntax tree node and assigns `kind` to it,
+ /// and mark the create a `CompletedMarker` for possible future
+ /// operation like `.precede()` to deal with forward_parent.
+ pub(crate) fn complete(mut self, p: &mut Parser<'_>, kind: SyntaxKind) -> CompletedMarker {
+ self.bomb.defuse();
+ let idx = self.pos as usize;
+ match &mut p.events[idx] {
+ Event::Start { kind: slot, .. } => {
+ *slot = kind;
+ }
+ _ => unreachable!(),
+ }
+ p.push_event(Event::Finish);
+ CompletedMarker::new(self.pos, kind)
+ }
+
+ /// Abandons the syntax tree node. All its children
+ /// are attached to its parent instead.
+ pub(crate) fn abandon(mut self, p: &mut Parser<'_>) {
+ self.bomb.defuse();
+ let idx = self.pos as usize;
+ if idx == p.events.len() - 1 {
+ match p.events.pop() {
+ Some(Event::Start { kind: TOMBSTONE, forward_parent: None }) => (),
+ _ => unreachable!(),
+ }
+ }
+ }
+}
+
+pub(crate) struct CompletedMarker {
+ pos: u32,
+ kind: SyntaxKind,
+}
+
+impl CompletedMarker {
+ fn new(pos: u32, kind: SyntaxKind) -> Self {
+ CompletedMarker { pos, kind }
+ }
+
+ /// This method allows to create a new node which starts
+ /// *before* the current one. That is, parser could start
+ /// node `A`, then complete it, and then after parsing the
+ /// whole `A`, decide that it should have started some node
+ /// `B` before starting `A`. `precede` allows to do exactly
+ /// that. See also docs about
+ /// [`Event::Start::forward_parent`](crate::event::Event::Start::forward_parent).
+ ///
+ /// Given completed events `[START, FINISH]` and its corresponding
+ /// `CompletedMarker(pos: 0, _)`.
+ /// Append a new `START` events as `[START, FINISH, NEWSTART]`,
+ /// then mark `NEWSTART` as `START`'s parent with saving its relative
+ /// distance to `NEWSTART` into forward_parent(=2 in this case);
+ pub(crate) fn precede(self, p: &mut Parser<'_>) -> Marker {
+ let new_pos = p.start();
+ let idx = self.pos as usize;
+ match &mut p.events[idx] {
+ Event::Start { forward_parent, .. } => {
+ *forward_parent = Some(new_pos.pos - self.pos);
+ }
+ _ => unreachable!(),
+ }
+ new_pos
+ }
+
+ /// Extends this completed marker *to the left* up to `m`.
+ pub(crate) fn extend_to(self, p: &mut Parser<'_>, mut m: Marker) -> CompletedMarker {
+ m.bomb.defuse();
+ let idx = m.pos as usize;
+ match &mut p.events[idx] {
+ Event::Start { forward_parent, .. } => {
+ *forward_parent = Some(self.pos - m.pos);
+ }
+ _ => unreachable!(),
+ }
+ self
+ }
+
+ pub(crate) fn kind(&self) -> SyntaxKind {
+ self.kind
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
new file mode 100644
index 000000000..4b805fadd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
@@ -0,0 +1,215 @@
+//! Shortcuts that span lexer/parser abstraction.
+//!
+//! The way Rust works, parser doesn't necessary parse text, and you might
+//! tokenize text without parsing it further. So, it makes sense to keep
+//! abstract token parsing, and string tokenization as completely separate
+//! layers.
+//!
+//! However, often you do pares text into syntax trees and the glue code for
+//! that needs to live somewhere. Rather than putting it to lexer or parser, we
+//! use a separate shortcuts module for that.
+
+use std::mem;
+
+use crate::{
+ LexedStr, Step,
+ SyntaxKind::{self, *},
+};
+
+#[derive(Debug)]
+pub enum StrStep<'a> {
+ Token { kind: SyntaxKind, text: &'a str },
+ Enter { kind: SyntaxKind },
+ Exit,
+ Error { msg: &'a str, pos: usize },
+}
+
+impl<'a> LexedStr<'a> {
+ pub fn to_input(&self) -> crate::Input {
+ let mut res = crate::Input::default();
+ let mut was_joint = false;
+ for i in 0..self.len() {
+ let kind = self.kind(i);
+ if kind.is_trivia() {
+ was_joint = false
+ } else {
+ if kind == SyntaxKind::IDENT {
+ let token_text = self.text(i);
+ let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
+ .unwrap_or(SyntaxKind::IDENT);
+ res.push_ident(contextual_kw);
+ } else {
+ if was_joint {
+ res.was_joint();
+ }
+ res.push(kind);
+ }
+ was_joint = true;
+ }
+ }
+ res
+ }
+
+ /// NB: only valid to call with Output from Reparser/TopLevelEntry.
+ pub fn intersperse_trivia(
+ &self,
+ output: &crate::Output,
+ sink: &mut dyn FnMut(StrStep<'_>),
+ ) -> bool {
+ let mut builder = Builder { lexed: self, pos: 0, state: State::PendingEnter, sink };
+
+ for event in output.iter() {
+ match event {
+ Step::Token { kind, n_input_tokens: n_raw_tokens } => {
+ builder.token(kind, n_raw_tokens)
+ }
+ Step::Enter { kind } => builder.enter(kind),
+ Step::Exit => builder.exit(),
+ Step::Error { msg } => {
+ let text_pos = builder.lexed.text_start(builder.pos);
+ (builder.sink)(StrStep::Error { msg, pos: text_pos });
+ }
+ }
+ }
+
+ match mem::replace(&mut builder.state, State::Normal) {
+ State::PendingExit => {
+ builder.eat_trivias();
+ (builder.sink)(StrStep::Exit);
+ }
+ State::PendingEnter | State::Normal => unreachable!(),
+ }
+
+ let is_eof = builder.pos == builder.lexed.len();
+ is_eof
+ }
+}
+
+struct Builder<'a, 'b> {
+ lexed: &'a LexedStr<'a>,
+ pos: usize,
+ state: State,
+ sink: &'b mut dyn FnMut(StrStep<'_>),
+}
+
+enum State {
+ PendingEnter,
+ Normal,
+ PendingExit,
+}
+
+impl Builder<'_, '_> {
+ fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
+ match mem::replace(&mut self.state, State::Normal) {
+ State::PendingEnter => unreachable!(),
+ State::PendingExit => (self.sink)(StrStep::Exit),
+ State::Normal => (),
+ }
+ self.eat_trivias();
+ self.do_token(kind, n_tokens as usize);
+ }
+
+ fn enter(&mut self, kind: SyntaxKind) {
+ match mem::replace(&mut self.state, State::Normal) {
+ State::PendingEnter => {
+ (self.sink)(StrStep::Enter { kind });
+ // No need to attach trivias to previous node: there is no
+ // previous node.
+ return;
+ }
+ State::PendingExit => (self.sink)(StrStep::Exit),
+ State::Normal => (),
+ }
+
+ let n_trivias =
+ (self.pos..self.lexed.len()).take_while(|&it| self.lexed.kind(it).is_trivia()).count();
+ let leading_trivias = self.pos..self.pos + n_trivias;
+ let n_attached_trivias = n_attached_trivias(
+ kind,
+ leading_trivias.rev().map(|it| (self.lexed.kind(it), self.lexed.text(it))),
+ );
+ self.eat_n_trivias(n_trivias - n_attached_trivias);
+ (self.sink)(StrStep::Enter { kind });
+ self.eat_n_trivias(n_attached_trivias);
+ }
+
+ fn exit(&mut self) {
+ match mem::replace(&mut self.state, State::PendingExit) {
+ State::PendingEnter => unreachable!(),
+ State::PendingExit => (self.sink)(StrStep::Exit),
+ State::Normal => (),
+ }
+ }
+
+ fn eat_trivias(&mut self) {
+ while self.pos < self.lexed.len() {
+ let kind = self.lexed.kind(self.pos);
+ if !kind.is_trivia() {
+ break;
+ }
+ self.do_token(kind, 1);
+ }
+ }
+
+ fn eat_n_trivias(&mut self, n: usize) {
+ for _ in 0..n {
+ let kind = self.lexed.kind(self.pos);
+ assert!(kind.is_trivia());
+ self.do_token(kind, 1);
+ }
+ }
+
+ fn do_token(&mut self, kind: SyntaxKind, n_tokens: usize) {
+ let text = &self.lexed.range_text(self.pos..self.pos + n_tokens);
+ self.pos += n_tokens;
+ (self.sink)(StrStep::Token { kind, text });
+ }
+}
+
+fn n_attached_trivias<'a>(
+ kind: SyntaxKind,
+ trivias: impl Iterator<Item = (SyntaxKind, &'a str)>,
+) -> usize {
+ match kind {
+ CONST | ENUM | FN | IMPL | MACRO_CALL | MACRO_DEF | MACRO_RULES | MODULE | RECORD_FIELD
+ | STATIC | STRUCT | TRAIT | TUPLE_FIELD | TYPE_ALIAS | UNION | USE | VARIANT => {
+ let mut res = 0;
+ let mut trivias = trivias.enumerate().peekable();
+
+ while let Some((i, (kind, text))) = trivias.next() {
+ match kind {
+ WHITESPACE if text.contains("\n\n") => {
+ // we check whether the next token is a doc-comment
+ // and skip the whitespace in this case
+ if let Some((COMMENT, peek_text)) = trivias.peek().map(|(_, pair)| pair) {
+ if is_outer(peek_text) {
+ continue;
+ }
+ }
+ break;
+ }
+ COMMENT => {
+ if is_inner(text) {
+ break;
+ }
+ res = i + 1;
+ }
+ _ => (),
+ }
+ }
+ res
+ }
+ _ => 0,
+ }
+}
+
+fn is_outer(text: &str) -> bool {
+ if text.starts_with("////") || text.starts_with("/***") {
+ return false;
+ }
+ text.starts_with("///") || text.starts_with("/**")
+}
+
+fn is_inner(text: &str) -> bool {
+ text.starts_with("//!") || text.starts_with("/*!")
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs
new file mode 100644
index 000000000..0483adc77
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs
@@ -0,0 +1,29 @@
+//! Defines [`SyntaxKind`] -- a fieldless enum of all possible syntactic
+//! constructs of the Rust language.
+
+mod generated;
+
+#[allow(unreachable_pub)]
+pub use self::generated::{SyntaxKind, T};
+
+impl From<u16> for SyntaxKind {
+ #[inline]
+ fn from(d: u16) -> SyntaxKind {
+ assert!(d <= (SyntaxKind::__LAST as u16));
+ unsafe { std::mem::transmute::<u16, SyntaxKind>(d) }
+ }
+}
+
+impl From<SyntaxKind> for u16 {
+ #[inline]
+ fn from(k: SyntaxKind) -> u16 {
+ k as u16
+ }
+}
+
+impl SyntaxKind {
+ #[inline]
+ pub fn is_trivia(self) -> bool {
+ matches!(self, SyntaxKind::WHITESPACE | SyntaxKind::COMMENT)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
new file mode 100644
index 000000000..628fa745e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
@@ -0,0 +1,390 @@
+//! Generated by `sourcegen_ast`, do not edit by hand.
+
+#![allow(bad_style, missing_docs, unreachable_pub)]
+#[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`."]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+#[repr(u16)]
+pub enum SyntaxKind {
+ #[doc(hidden)]
+ TOMBSTONE,
+ #[doc(hidden)]
+ EOF,
+ SEMICOLON,
+ COMMA,
+ L_PAREN,
+ R_PAREN,
+ L_CURLY,
+ R_CURLY,
+ L_BRACK,
+ R_BRACK,
+ L_ANGLE,
+ R_ANGLE,
+ AT,
+ POUND,
+ TILDE,
+ QUESTION,
+ DOLLAR,
+ AMP,
+ PIPE,
+ PLUS,
+ STAR,
+ SLASH,
+ CARET,
+ PERCENT,
+ UNDERSCORE,
+ DOT,
+ DOT2,
+ DOT3,
+ DOT2EQ,
+ COLON,
+ COLON2,
+ EQ,
+ EQ2,
+ FAT_ARROW,
+ BANG,
+ NEQ,
+ MINUS,
+ THIN_ARROW,
+ LTEQ,
+ GTEQ,
+ PLUSEQ,
+ MINUSEQ,
+ PIPEEQ,
+ AMPEQ,
+ CARETEQ,
+ SLASHEQ,
+ STAREQ,
+ PERCENTEQ,
+ AMP2,
+ PIPE2,
+ SHL,
+ SHR,
+ SHLEQ,
+ SHREQ,
+ AS_KW,
+ ASYNC_KW,
+ AWAIT_KW,
+ BOX_KW,
+ BREAK_KW,
+ CONST_KW,
+ CONTINUE_KW,
+ CRATE_KW,
+ DYN_KW,
+ ELSE_KW,
+ ENUM_KW,
+ EXTERN_KW,
+ FALSE_KW,
+ FN_KW,
+ FOR_KW,
+ IF_KW,
+ IMPL_KW,
+ IN_KW,
+ LET_KW,
+ LOOP_KW,
+ MACRO_KW,
+ MATCH_KW,
+ MOD_KW,
+ MOVE_KW,
+ MUT_KW,
+ PUB_KW,
+ REF_KW,
+ RETURN_KW,
+ SELF_KW,
+ SELF_TYPE_KW,
+ STATIC_KW,
+ STRUCT_KW,
+ SUPER_KW,
+ TRAIT_KW,
+ TRUE_KW,
+ TRY_KW,
+ TYPE_KW,
+ UNSAFE_KW,
+ USE_KW,
+ WHERE_KW,
+ WHILE_KW,
+ YIELD_KW,
+ AUTO_KW,
+ DEFAULT_KW,
+ EXISTENTIAL_KW,
+ UNION_KW,
+ RAW_KW,
+ MACRO_RULES_KW,
+ INT_NUMBER,
+ FLOAT_NUMBER,
+ CHAR,
+ BYTE,
+ STRING,
+ BYTE_STRING,
+ ERROR,
+ IDENT,
+ WHITESPACE,
+ LIFETIME_IDENT,
+ COMMENT,
+ SHEBANG,
+ SOURCE_FILE,
+ STRUCT,
+ UNION,
+ ENUM,
+ FN,
+ RET_TYPE,
+ EXTERN_CRATE,
+ MODULE,
+ USE,
+ STATIC,
+ CONST,
+ TRAIT,
+ IMPL,
+ TYPE_ALIAS,
+ MACRO_CALL,
+ MACRO_RULES,
+ MACRO_ARM,
+ TOKEN_TREE,
+ MACRO_DEF,
+ PAREN_TYPE,
+ TUPLE_TYPE,
+ MACRO_TYPE,
+ NEVER_TYPE,
+ PATH_TYPE,
+ PTR_TYPE,
+ ARRAY_TYPE,
+ SLICE_TYPE,
+ REF_TYPE,
+ INFER_TYPE,
+ FN_PTR_TYPE,
+ FOR_TYPE,
+ IMPL_TRAIT_TYPE,
+ DYN_TRAIT_TYPE,
+ OR_PAT,
+ PAREN_PAT,
+ REF_PAT,
+ BOX_PAT,
+ IDENT_PAT,
+ WILDCARD_PAT,
+ REST_PAT,
+ PATH_PAT,
+ RECORD_PAT,
+ RECORD_PAT_FIELD_LIST,
+ RECORD_PAT_FIELD,
+ TUPLE_STRUCT_PAT,
+ TUPLE_PAT,
+ SLICE_PAT,
+ RANGE_PAT,
+ LITERAL_PAT,
+ MACRO_PAT,
+ CONST_BLOCK_PAT,
+ TUPLE_EXPR,
+ ARRAY_EXPR,
+ PAREN_EXPR,
+ PATH_EXPR,
+ CLOSURE_EXPR,
+ IF_EXPR,
+ WHILE_EXPR,
+ LOOP_EXPR,
+ FOR_EXPR,
+ CONTINUE_EXPR,
+ BREAK_EXPR,
+ LABEL,
+ BLOCK_EXPR,
+ STMT_LIST,
+ RETURN_EXPR,
+ YIELD_EXPR,
+ LET_EXPR,
+ UNDERSCORE_EXPR,
+ MACRO_EXPR,
+ MATCH_EXPR,
+ MATCH_ARM_LIST,
+ MATCH_ARM,
+ MATCH_GUARD,
+ RECORD_EXPR,
+ RECORD_EXPR_FIELD_LIST,
+ RECORD_EXPR_FIELD,
+ BOX_EXPR,
+ CALL_EXPR,
+ INDEX_EXPR,
+ METHOD_CALL_EXPR,
+ FIELD_EXPR,
+ AWAIT_EXPR,
+ TRY_EXPR,
+ CAST_EXPR,
+ REF_EXPR,
+ PREFIX_EXPR,
+ RANGE_EXPR,
+ BIN_EXPR,
+ EXTERN_BLOCK,
+ EXTERN_ITEM_LIST,
+ VARIANT,
+ RECORD_FIELD_LIST,
+ RECORD_FIELD,
+ TUPLE_FIELD_LIST,
+ TUPLE_FIELD,
+ VARIANT_LIST,
+ ITEM_LIST,
+ ASSOC_ITEM_LIST,
+ ATTR,
+ META,
+ USE_TREE,
+ USE_TREE_LIST,
+ PATH,
+ PATH_SEGMENT,
+ LITERAL,
+ RENAME,
+ VISIBILITY,
+ WHERE_CLAUSE,
+ WHERE_PRED,
+ ABI,
+ NAME,
+ NAME_REF,
+ LET_STMT,
+ LET_ELSE,
+ EXPR_STMT,
+ GENERIC_PARAM_LIST,
+ GENERIC_PARAM,
+ LIFETIME_PARAM,
+ TYPE_PARAM,
+ CONST_PARAM,
+ GENERIC_ARG_LIST,
+ LIFETIME,
+ LIFETIME_ARG,
+ TYPE_ARG,
+ ASSOC_TYPE_ARG,
+ CONST_ARG,
+ PARAM_LIST,
+ PARAM,
+ SELF_PARAM,
+ ARG_LIST,
+ TYPE_BOUND,
+ TYPE_BOUND_LIST,
+ MACRO_ITEMS,
+ MACRO_STMTS,
+ #[doc(hidden)]
+ __LAST,
+}
+use self::SyntaxKind::*;
+impl SyntaxKind {
+ pub fn is_keyword(self) -> bool {
+ match self {
+ AS_KW | ASYNC_KW | AWAIT_KW | BOX_KW | BREAK_KW | CONST_KW | CONTINUE_KW | CRATE_KW
+ | DYN_KW | ELSE_KW | ENUM_KW | EXTERN_KW | FALSE_KW | FN_KW | FOR_KW | IF_KW
+ | IMPL_KW | IN_KW | LET_KW | LOOP_KW | MACRO_KW | MATCH_KW | MOD_KW | MOVE_KW
+ | MUT_KW | PUB_KW | REF_KW | RETURN_KW | SELF_KW | SELF_TYPE_KW | STATIC_KW
+ | STRUCT_KW | SUPER_KW | TRAIT_KW | TRUE_KW | TRY_KW | TYPE_KW | UNSAFE_KW | USE_KW
+ | WHERE_KW | WHILE_KW | YIELD_KW | AUTO_KW | DEFAULT_KW | EXISTENTIAL_KW | UNION_KW
+ | RAW_KW | MACRO_RULES_KW => true,
+ _ => false,
+ }
+ }
+ pub fn is_punct(self) -> bool {
+ match self {
+ SEMICOLON | COMMA | L_PAREN | R_PAREN | L_CURLY | R_CURLY | L_BRACK | R_BRACK
+ | L_ANGLE | R_ANGLE | AT | POUND | TILDE | QUESTION | DOLLAR | AMP | PIPE | PLUS
+ | STAR | SLASH | CARET | PERCENT | UNDERSCORE | DOT | DOT2 | DOT3 | DOT2EQ | COLON
+ | COLON2 | EQ | EQ2 | FAT_ARROW | BANG | NEQ | MINUS | THIN_ARROW | LTEQ | GTEQ
+ | PLUSEQ | MINUSEQ | PIPEEQ | AMPEQ | CARETEQ | SLASHEQ | STAREQ | PERCENTEQ | AMP2
+ | PIPE2 | SHL | SHR | SHLEQ | SHREQ => true,
+ _ => false,
+ }
+ }
+ pub fn is_literal(self) -> bool {
+ match self {
+ INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING => true,
+ _ => false,
+ }
+ }
+ pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
+ let kw = match ident {
+ "as" => AS_KW,
+ "async" => ASYNC_KW,
+ "await" => AWAIT_KW,
+ "box" => BOX_KW,
+ "break" => BREAK_KW,
+ "const" => CONST_KW,
+ "continue" => CONTINUE_KW,
+ "crate" => CRATE_KW,
+ "dyn" => DYN_KW,
+ "else" => ELSE_KW,
+ "enum" => ENUM_KW,
+ "extern" => EXTERN_KW,
+ "false" => FALSE_KW,
+ "fn" => FN_KW,
+ "for" => FOR_KW,
+ "if" => IF_KW,
+ "impl" => IMPL_KW,
+ "in" => IN_KW,
+ "let" => LET_KW,
+ "loop" => LOOP_KW,
+ "macro" => MACRO_KW,
+ "match" => MATCH_KW,
+ "mod" => MOD_KW,
+ "move" => MOVE_KW,
+ "mut" => MUT_KW,
+ "pub" => PUB_KW,
+ "ref" => REF_KW,
+ "return" => RETURN_KW,
+ "self" => SELF_KW,
+ "Self" => SELF_TYPE_KW,
+ "static" => STATIC_KW,
+ "struct" => STRUCT_KW,
+ "super" => SUPER_KW,
+ "trait" => TRAIT_KW,
+ "true" => TRUE_KW,
+ "try" => TRY_KW,
+ "type" => TYPE_KW,
+ "unsafe" => UNSAFE_KW,
+ "use" => USE_KW,
+ "where" => WHERE_KW,
+ "while" => WHILE_KW,
+ "yield" => YIELD_KW,
+ _ => return None,
+ };
+ Some(kw)
+ }
+ pub fn from_contextual_keyword(ident: &str) -> Option<SyntaxKind> {
+ let kw = match ident {
+ "auto" => AUTO_KW,
+ "default" => DEFAULT_KW,
+ "existential" => EXISTENTIAL_KW,
+ "union" => UNION_KW,
+ "raw" => RAW_KW,
+ "macro_rules" => MACRO_RULES_KW,
+ _ => return None,
+ };
+ Some(kw)
+ }
+ pub fn from_char(c: char) -> Option<SyntaxKind> {
+ let tok = match c {
+ ';' => SEMICOLON,
+ ',' => COMMA,
+ '(' => L_PAREN,
+ ')' => R_PAREN,
+ '{' => L_CURLY,
+ '}' => R_CURLY,
+ '[' => L_BRACK,
+ ']' => R_BRACK,
+ '<' => L_ANGLE,
+ '>' => R_ANGLE,
+ '@' => AT,
+ '#' => POUND,
+ '~' => TILDE,
+ '?' => QUESTION,
+ '$' => DOLLAR,
+ '&' => AMP,
+ '|' => PIPE,
+ '+' => PLUS,
+ '*' => STAR,
+ '/' => SLASH,
+ '^' => CARET,
+ '%' => PERCENT,
+ '_' => UNDERSCORE,
+ '.' => DOT,
+ ':' => COLON,
+ '=' => EQ,
+ '!' => BANG,
+ '-' => MINUS,
+ _ => return None,
+ };
+ Some(tok)
+ }
+}
+#[macro_export]
+macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; }
+pub use T;
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests.rs b/src/tools/rust-analyzer/crates/parser/src/tests.rs
new file mode 100644
index 000000000..735c0b3e4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/tests.rs
@@ -0,0 +1,166 @@
+mod sourcegen_inline_tests;
+mod top_entries;
+mod prefix_entries;
+
+use std::{
+ fmt::Write,
+ fs,
+ path::{Path, PathBuf},
+};
+
+use expect_test::expect_file;
+
+use crate::{LexedStr, TopEntryPoint};
+
+#[test]
+fn lex_ok() {
+ for case in TestCase::list("lexer/ok") {
+ let actual = lex(&case.text);
+ expect_file![case.rast].assert_eq(&actual)
+ }
+}
+
+#[test]
+fn lex_err() {
+ for case in TestCase::list("lexer/err") {
+ let actual = lex(&case.text);
+ expect_file![case.rast].assert_eq(&actual)
+ }
+}
+
+fn lex(text: &str) -> String {
+ let lexed = LexedStr::new(text);
+
+ let mut res = String::new();
+ for i in 0..lexed.len() {
+ let kind = lexed.kind(i);
+ let text = lexed.text(i);
+ let error = lexed.error(i);
+
+ let error = error.map(|err| format!(" error: {}", err)).unwrap_or_default();
+ writeln!(res, "{:?} {:?}{}", kind, text, error).unwrap();
+ }
+ res
+}
+
+#[test]
+fn parse_ok() {
+ for case in TestCase::list("parser/ok") {
+ let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
+ assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual);
+ expect_file![case.rast].assert_eq(&actual);
+ }
+}
+
+#[test]
+fn parse_inline_ok() {
+ for case in TestCase::list("parser/inline/ok") {
+ let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
+ assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual);
+ expect_file![case.rast].assert_eq(&actual);
+ }
+}
+
+#[test]
+fn parse_err() {
+ for case in TestCase::list("parser/err") {
+ let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
+ assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual);
+ expect_file![case.rast].assert_eq(&actual)
+ }
+}
+
+#[test]
+fn parse_inline_err() {
+ for case in TestCase::list("parser/inline/err") {
+ let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
+ assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual);
+ expect_file![case.rast].assert_eq(&actual)
+ }
+}
+
+fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
+ let lexed = LexedStr::new(text);
+ let input = lexed.to_input();
+ let output = entry.parse(&input);
+
+ let mut buf = String::new();
+ let mut errors = Vec::new();
+ let mut indent = String::new();
+ let mut depth = 0;
+ let mut len = 0;
+ lexed.intersperse_trivia(&output, &mut |step| match step {
+ crate::StrStep::Token { kind, text } => {
+ assert!(depth > 0);
+ len += text.len();
+ write!(buf, "{}", indent).unwrap();
+ write!(buf, "{:?} {:?}\n", kind, text).unwrap();
+ }
+ crate::StrStep::Enter { kind } => {
+ assert!(depth > 0 || len == 0);
+ depth += 1;
+ write!(buf, "{}", indent).unwrap();
+ write!(buf, "{:?}\n", kind).unwrap();
+ indent.push_str(" ");
+ }
+ crate::StrStep::Exit => {
+ assert!(depth > 0);
+ depth -= 1;
+ indent.pop();
+ indent.pop();
+ }
+ crate::StrStep::Error { msg, pos } => {
+ assert!(depth > 0);
+ errors.push(format!("error {}: {}\n", pos, msg))
+ }
+ });
+ assert_eq!(
+ len,
+ text.len(),
+ "didn't parse all text.\nParsed:\n{}\n\nAll:\n{}\n",
+ &text[..len],
+ text
+ );
+
+ for (token, msg) in lexed.errors() {
+ let pos = lexed.text_start(token);
+ errors.push(format!("error {}: {}\n", pos, msg));
+ }
+
+ let has_errors = !errors.is_empty();
+ for e in errors {
+ buf.push_str(&e);
+ }
+ (buf, has_errors)
+}
+
+#[derive(PartialEq, Eq, PartialOrd, Ord)]
+struct TestCase {
+ rs: PathBuf,
+ rast: PathBuf,
+ text: String,
+}
+
+impl TestCase {
+ fn list(path: &'static str) -> Vec<TestCase> {
+ let crate_root_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
+ let test_data_dir = crate_root_dir.join("test_data");
+ let dir = test_data_dir.join(path);
+
+ let mut res = Vec::new();
+ let read_dir = fs::read_dir(&dir)
+ .unwrap_or_else(|err| panic!("can't `read_dir` {}: {}", dir.display(), err));
+ for file in read_dir {
+ let file = file.unwrap();
+ let path = file.path();
+ if path.extension().unwrap_or_default() == "rs" {
+ let rs = path;
+ let rast = rs.with_extension("rast");
+ let text = fs::read_to_string(&rs).unwrap();
+ res.push(TestCase { rs, rast, text });
+ }
+ }
+ res.sort();
+ res
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
new file mode 100644
index 000000000..e626b4f27
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
@@ -0,0 +1,107 @@
+use crate::{LexedStr, PrefixEntryPoint, Step};
+
+#[test]
+fn vis() {
+ check(PrefixEntryPoint::Vis, "pub(crate) fn foo() {}", "pub(crate)");
+ check(PrefixEntryPoint::Vis, "fn foo() {}", "");
+ check(PrefixEntryPoint::Vis, "pub(fn foo() {}", "pub");
+ check(PrefixEntryPoint::Vis, "pub(crate fn foo() {}", "pub(crate");
+ check(PrefixEntryPoint::Vis, "crate fn foo() {}", "crate");
+}
+
+#[test]
+fn block() {
+ check(PrefixEntryPoint::Block, "{}, 92", "{}");
+ check(PrefixEntryPoint::Block, "{, 92)", "{, 92)");
+ check(PrefixEntryPoint::Block, "()", "");
+}
+
+#[test]
+fn stmt() {
+ check(PrefixEntryPoint::Stmt, "92; fn", "92");
+ check(PrefixEntryPoint::Stmt, "let _ = 92; 1", "let _ = 92");
+ check(PrefixEntryPoint::Stmt, "pub fn f() {} = 92", "pub fn f() {}");
+ check(PrefixEntryPoint::Stmt, "struct S;;", "struct S;");
+ check(PrefixEntryPoint::Stmt, "fn f() {};", "fn f() {}");
+ check(PrefixEntryPoint::Stmt, ";;;", ";");
+ check(PrefixEntryPoint::Stmt, "+", "+");
+ check(PrefixEntryPoint::Stmt, "@", "@");
+ check(PrefixEntryPoint::Stmt, "loop {} - 1", "loop {}");
+}
+
+#[test]
+fn pat() {
+ check(PrefixEntryPoint::Pat, "x y", "x");
+ check(PrefixEntryPoint::Pat, "fn f() {}", "fn");
+ // FIXME: This one is wrong, we should consume only one pattern.
+ check(PrefixEntryPoint::Pat, ".. ..", ".. ..");
+}
+
+#[test]
+fn ty() {
+ check(PrefixEntryPoint::Ty, "fn() foo", "fn()");
+ check(PrefixEntryPoint::Ty, "Clone + Copy + fn", "Clone + Copy +");
+ check(PrefixEntryPoint::Ty, "struct f", "struct");
+}
+
+#[test]
+fn expr() {
+ check(PrefixEntryPoint::Expr, "92 92", "92");
+ check(PrefixEntryPoint::Expr, "+1", "+");
+ check(PrefixEntryPoint::Expr, "-1", "-1");
+ check(PrefixEntryPoint::Expr, "fn foo() {}", "fn");
+ check(PrefixEntryPoint::Expr, "#[attr] ()", "#[attr] ()");
+}
+
+#[test]
+fn path() {
+ check(PrefixEntryPoint::Path, "foo::bar baz", "foo::bar");
+ check(PrefixEntryPoint::Path, "foo::<> baz", "foo::<>");
+ check(PrefixEntryPoint::Path, "foo<> baz", "foo<>");
+ check(PrefixEntryPoint::Path, "Fn() -> i32?", "Fn() -> i32");
+ // FIXME: This shouldn't be accepted as path actually.
+ check(PrefixEntryPoint::Path, "<_>::foo", "<_>::foo");
+}
+
+#[test]
+fn item() {
+ // FIXME: This shouldn't consume the semicolon.
+ check(PrefixEntryPoint::Item, "fn foo() {};", "fn foo() {};");
+ check(PrefixEntryPoint::Item, "#[attr] pub struct S {} 92", "#[attr] pub struct S {}");
+ check(PrefixEntryPoint::Item, "item!{}?", "item!{}");
+ check(PrefixEntryPoint::Item, "????", "?");
+}
+
+#[test]
+fn meta_item() {
+ check(PrefixEntryPoint::MetaItem, "attr, ", "attr");
+ check(PrefixEntryPoint::MetaItem, "attr(some token {stream});", "attr(some token {stream})");
+ check(PrefixEntryPoint::MetaItem, "path::attr = 2 * 2!", "path::attr = 2 * 2");
+}
+
+#[track_caller]
+fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) {
+ let lexed = LexedStr::new(input);
+ let input = lexed.to_input();
+
+ let mut n_tokens = 0;
+ for step in entry.parse(&input).iter() {
+ match step {
+ Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize,
+ Step::Enter { .. } | Step::Exit | Step::Error { .. } => (),
+ }
+ }
+
+ let mut i = 0;
+ loop {
+ if n_tokens == 0 {
+ break;
+ }
+ if !lexed.kind(i).is_trivia() {
+ n_tokens -= 1;
+ }
+ i += 1;
+ }
+ let buf = &lexed.as_str()[..lexed.text_start(i)];
+ assert_eq!(buf, prefix);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/sourcegen_inline_tests.rs b/src/tools/rust-analyzer/crates/parser/src/tests/sourcegen_inline_tests.rs
new file mode 100644
index 000000000..7b2b703de
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/tests/sourcegen_inline_tests.rs
@@ -0,0 +1,123 @@
+//! This module greps parser's code for specially formatted comments and turns
+//! them into tests.
+
+use std::{
+ collections::HashMap,
+ fs, iter,
+ path::{Path, PathBuf},
+};
+
+#[test]
+fn sourcegen_parser_tests() {
+ let grammar_dir = sourcegen::project_root().join(Path::new("crates/parser/src/grammar"));
+ let tests = tests_from_dir(&grammar_dir);
+
+ install_tests(&tests.ok, "crates/parser/test_data/parser/inline/ok");
+ install_tests(&tests.err, "crates/parser/test_data/parser/inline/err");
+
+ fn install_tests(tests: &HashMap<String, Test>, into: &str) {
+ let tests_dir = sourcegen::project_root().join(into);
+ if !tests_dir.is_dir() {
+ fs::create_dir_all(&tests_dir).unwrap();
+ }
+ // ok is never actually read, but it needs to be specified to create a Test in existing_tests
+ let existing = existing_tests(&tests_dir, true);
+ for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
+ panic!("Test is deleted: {}", t);
+ }
+
+ let mut new_idx = existing.len() + 1;
+ for (name, test) in tests {
+ let path = match existing.get(name) {
+ Some((path, _test)) => path.clone(),
+ None => {
+ let file_name = format!("{:04}_{}.rs", new_idx, name);
+ new_idx += 1;
+ tests_dir.join(file_name)
+ }
+ };
+ sourcegen::ensure_file_contents(&path, &test.text);
+ }
+ }
+}
+
+#[derive(Debug)]
+struct Test {
+ name: String,
+ text: String,
+ ok: bool,
+}
+
+#[derive(Default, Debug)]
+struct Tests {
+ ok: HashMap<String, Test>,
+ err: HashMap<String, Test>,
+}
+
+fn collect_tests(s: &str) -> Vec<Test> {
+ let mut res = Vec::new();
+ for comment_block in sourcegen::CommentBlock::extract_untagged(s) {
+ let first_line = &comment_block.contents[0];
+ let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") {
+ (name.to_string(), true)
+ } else if let Some(name) = first_line.strip_prefix("test_err ") {
+ (name.to_string(), false)
+ } else {
+ continue;
+ };
+ let text: String = comment_block.contents[1..]
+ .iter()
+ .cloned()
+ .chain(iter::once(String::new()))
+ .collect::<Vec<_>>()
+ .join("\n");
+ assert!(!text.trim().is_empty() && text.ends_with('\n'));
+ res.push(Test { name, text, ok })
+ }
+ res
+}
+
+fn tests_from_dir(dir: &Path) -> Tests {
+ let mut res = Tests::default();
+ for entry in sourcegen::list_rust_files(dir) {
+ process_file(&mut res, entry.as_path());
+ }
+ let grammar_rs = dir.parent().unwrap().join("grammar.rs");
+ process_file(&mut res, &grammar_rs);
+ return res;
+
+ fn process_file(res: &mut Tests, path: &Path) {
+ let text = fs::read_to_string(path).unwrap();
+
+ for test in collect_tests(&text) {
+ if test.ok {
+ if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
+ panic!("Duplicate test: {}", old_test.name);
+ }
+ } else if let Some(old_test) = res.err.insert(test.name.clone(), test) {
+ panic!("Duplicate test: {}", old_test.name);
+ }
+ }
+ }
+}
+
+fn existing_tests(dir: &Path, ok: bool) -> HashMap<String, (PathBuf, Test)> {
+ let mut res = HashMap::default();
+ for file in fs::read_dir(dir).unwrap() {
+ let file = file.unwrap();
+ let path = file.path();
+ if path.extension().unwrap_or_default() != "rs" {
+ continue;
+ }
+ let name = {
+ let file_name = path.file_name().unwrap().to_str().unwrap();
+ file_name[5..file_name.len() - 3].to_string()
+ };
+ let text = fs::read_to_string(&path).unwrap();
+ let test = Test { name: name.clone(), text, ok };
+ if let Some(old) = res.insert(name, (path, test)) {
+ println!("Duplicate test: {:?}", old);
+ }
+ }
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs
new file mode 100644
index 000000000..eb640dc7f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs
@@ -0,0 +1,312 @@
+use expect_test::expect;
+
+use crate::TopEntryPoint;
+
+#[test]
+fn source_file() {
+ check(
+ TopEntryPoint::SourceFile,
+ "",
+ expect![[r#"
+ SOURCE_FILE
+ "#]],
+ );
+
+ check(
+ TopEntryPoint::SourceFile,
+ "struct S;",
+ expect![[r#"
+ SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ "#]],
+ );
+
+ check(
+ TopEntryPoint::SourceFile,
+ "@error@",
+ expect![[r#"
+ SOURCE_FILE
+ ERROR
+ AT "@"
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "error"
+ ERROR
+ AT "@"
+ error 0: expected an item
+ error 6: expected BANG
+ error 6: expected `{`, `[`, `(`
+ error 6: expected SEMICOLON
+ error 6: expected an item
+ "#]],
+ );
+}
+
+#[test]
+fn macro_stmt() {
+ check(
+ TopEntryPoint::MacroStmts,
+ "",
+ expect![[r#"
+ MACRO_STMTS
+ "#]],
+ );
+ check(
+ TopEntryPoint::MacroStmts,
+ "#!/usr/bin/rust",
+ expect![[r##"
+ MACRO_STMTS
+ ERROR
+ SHEBANG "#!/usr/bin/rust"
+ error 0: expected expression
+ "##]],
+ );
+ check(
+ TopEntryPoint::MacroStmts,
+ "let x = 1 2 struct S;",
+ expect![[r#"
+ MACRO_STMTS
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ EXPR_STMT
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ "#]],
+ );
+}
+
+#[test]
+fn macro_items() {
+ check(
+ TopEntryPoint::MacroItems,
+ "",
+ expect![[r#"
+ MACRO_ITEMS
+ "#]],
+ );
+ check(
+ TopEntryPoint::MacroItems,
+ "#!/usr/bin/rust",
+ expect![[r##"
+ MACRO_ITEMS
+ ERROR
+ SHEBANG "#!/usr/bin/rust"
+ error 0: expected an item
+ "##]],
+ );
+ check(
+ TopEntryPoint::MacroItems,
+ "struct S; foo!{}",
+ expect![[r#"
+ MACRO_ITEMS
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE " "
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ BANG "!"
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ "#]],
+ );
+}
+
+#[test]
+fn macro_pattern() {
+ check(
+ TopEntryPoint::Pattern,
+ "",
+ expect![[r#"
+ ERROR
+ error 0: expected pattern
+ "#]],
+ );
+ check(
+ TopEntryPoint::Pattern,
+ "Some(_)",
+ expect![[r#"
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ "#]],
+ );
+
+ check(
+ TopEntryPoint::Pattern,
+ "None leftover tokens",
+ expect![[r#"
+ ERROR
+ IDENT_PAT
+ NAME
+ IDENT "None"
+ WHITESPACE " "
+ IDENT "leftover"
+ WHITESPACE " "
+ IDENT "tokens"
+ "#]],
+ );
+
+ check(
+ TopEntryPoint::Pattern,
+ "@err",
+ expect![[r#"
+ ERROR
+ ERROR
+ AT "@"
+ IDENT "err"
+ error 0: expected pattern
+ "#]],
+ );
+}
+
+#[test]
+fn type_() {
+ check(
+ TopEntryPoint::Type,
+ "",
+ expect![[r#"
+ ERROR
+ error 0: expected type
+ "#]],
+ );
+
+ check(
+ TopEntryPoint::Type,
+ "Option<!>",
+ expect![[r#"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Option"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ NEVER_TYPE
+ BANG "!"
+ R_ANGLE ">"
+ "#]],
+ );
+ check(
+ TopEntryPoint::Type,
+ "() () ()",
+ expect![[r#"
+ ERROR
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ L_PAREN "("
+ R_PAREN ")"
+ "#]],
+ );
+ check(
+ TopEntryPoint::Type,
+ "$$$",
+ expect![[r#"
+ ERROR
+ ERROR
+ DOLLAR "$"
+ DOLLAR "$"
+ DOLLAR "$"
+ error 0: expected type
+ "#]],
+ );
+}
+
+#[test]
+fn expr() {
+ check(
+ TopEntryPoint::Expr,
+ "",
+ expect![[r#"
+ ERROR
+ error 0: expected expression
+ "#]],
+ );
+ check(
+ TopEntryPoint::Expr,
+ "2 + 2 == 5",
+ expect![[r#"
+ BIN_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "5"
+ "#]],
+ );
+ check(
+ TopEntryPoint::Expr,
+ "let _ = 0;",
+ expect![[r#"
+ ERROR
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ "#]],
+ );
+}
+
+#[track_caller]
+fn check(entry: TopEntryPoint, input: &str, expect: expect_test::Expect) {
+ let (parsed, _errors) = super::parse(entry, input);
+ expect.assert_eq(&parsed)
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/token_set.rs b/src/tools/rust-analyzer/crates/parser/src/token_set.rs
new file mode 100644
index 000000000..cd4894c1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/token_set.rs
@@ -0,0 +1,42 @@
+//! A bit-set of `SyntaxKind`s.
+
+use crate::SyntaxKind;
+
+/// A bit-set of `SyntaxKind`s
+#[derive(Clone, Copy)]
+pub(crate) struct TokenSet(u128);
+
+impl TokenSet {
+ pub(crate) const EMPTY: TokenSet = TokenSet(0);
+
+ pub(crate) const fn new(kinds: &[SyntaxKind]) -> TokenSet {
+ let mut res = 0u128;
+ let mut i = 0;
+ while i < kinds.len() {
+ res |= mask(kinds[i]);
+ i += 1;
+ }
+ TokenSet(res)
+ }
+
+ pub(crate) const fn union(self, other: TokenSet) -> TokenSet {
+ TokenSet(self.0 | other.0)
+ }
+
+ pub(crate) const fn contains(&self, kind: SyntaxKind) -> bool {
+ self.0 & mask(kind) != 0
+ }
+}
+
+const fn mask(kind: SyntaxKind) -> u128 {
+ 1u128 << (kind as usize)
+}
+
+#[test]
+fn token_set_works_for_tokens() {
+ use crate::SyntaxKind::*;
+ let ts = TokenSet::new(&[EOF, SHEBANG]);
+ assert!(ts.contains(EOF));
+ assert!(ts.contains(SHEBANG));
+ assert!(!ts.contains(PLUS));
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rast
new file mode 100644
index 000000000..af03d73ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rast
@@ -0,0 +1,48 @@
+FLOAT_NUMBER "0e" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "0E" error: Missing digits after the exponent symbol
+WHITESPACE "\n\n"
+FLOAT_NUMBER "42e+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42e-" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42E+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42E-" error: Missing digits after the exponent symbol
+WHITESPACE "\n\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "e"
+PLUS "+"
+WHITESPACE "\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "e"
+MINUS "-"
+WHITESPACE "\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "E"
+PLUS "+"
+WHITESPACE "\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "E"
+MINUS "-"
+WHITESPACE "\n\n"
+FLOAT_NUMBER "42.2e+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2e-" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E-" error: Missing digits after the exponent symbol
+WHITESPACE "\n\n"
+FLOAT_NUMBER "42.2e+f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2e-f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E+f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E-f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rs
new file mode 100644
index 000000000..286584c88
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rs
@@ -0,0 +1,22 @@
+0e
+0E
+
+42e+
+42e-
+42E+
+42E-
+
+42.e+
+42.e-
+42.E+
+42.E-
+
+42.2e+
+42.2e-
+42.2E+
+42.2E-
+
+42.2e+f32
+42.2e-f32
+42.2E+f32
+42.2E-f32
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.txt
new file mode 100644
index 000000000..af03d73ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.txt
@@ -0,0 +1,48 @@
+FLOAT_NUMBER "0e" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "0E" error: Missing digits after the exponent symbol
+WHITESPACE "\n\n"
+FLOAT_NUMBER "42e+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42e-" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42E+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42E-" error: Missing digits after the exponent symbol
+WHITESPACE "\n\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "e"
+PLUS "+"
+WHITESPACE "\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "e"
+MINUS "-"
+WHITESPACE "\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "E"
+PLUS "+"
+WHITESPACE "\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "E"
+MINUS "-"
+WHITESPACE "\n\n"
+FLOAT_NUMBER "42.2e+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2e-" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E-" error: Missing digits after the exponent symbol
+WHITESPACE "\n\n"
+FLOAT_NUMBER "42.2e+f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2e-f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E+f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E-f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rast
new file mode 100644
index 000000000..7f7194f45
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rast
@@ -0,0 +1,26 @@
+INT_NUMBER "0b" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0o" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0x" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0b_" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0o_" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0x_" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0bnoDigit" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0onoDigit" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0xnoDigit" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0xG" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0xg" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0x_g" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0x_G" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rs
new file mode 100644
index 000000000..aa2a9fdca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rs
@@ -0,0 +1,17 @@
+0b
+0o
+0x
+
+0b_
+0o_
+0x_
+
+0bnoDigit
+0onoDigit
+0xnoDigit
+
+0xG
+0xg
+
+0x_g
+0x_G
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.txt
new file mode 100644
index 000000000..7f7194f45
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.txt
@@ -0,0 +1,26 @@
+INT_NUMBER "0b" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0o" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0x" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0b_" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0o_" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0x_" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0bnoDigit" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0onoDigit" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0xnoDigit" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0xG" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0xg" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0x_g" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0x_G" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rast
new file mode 100644
index 000000000..e919bf2a4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rast
@@ -0,0 +1,4 @@
+LIFETIME_IDENT "'1" error: Lifetime name cannot start with a number
+WHITESPACE "\n"
+LIFETIME_IDENT "'1lifetime" error: Lifetime name cannot start with a number
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rs
new file mode 100644
index 000000000..a7698a404
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rs
@@ -0,0 +1,2 @@
+'1
+'1lifetime
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.txt
new file mode 100644
index 000000000..e919bf2a4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.txt
@@ -0,0 +1,4 @@
+LIFETIME_IDENT "'1" error: Lifetime name cannot start with a number
+WHITESPACE "\n"
+LIFETIME_IDENT "'1lifetime" error: Lifetime name cannot start with a number
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rast
new file mode 100644
index 000000000..7d2c32976
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rast
@@ -0,0 +1 @@
+COMMENT "/*" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rs
new file mode 100644
index 000000000..22e83649f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rs
@@ -0,0 +1 @@
+/* \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.txt
new file mode 100644
index 000000000..7d2c32976
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.txt
@@ -0,0 +1 @@
+COMMENT "/*" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rast
new file mode 100644
index 000000000..227a20660
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rast
@@ -0,0 +1 @@
+COMMENT "/* comment\n" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rs
new file mode 100644
index 000000000..c45c2844d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rs
@@ -0,0 +1 @@
+/* comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.txt
new file mode 100644
index 000000000..227a20660
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.txt
@@ -0,0 +1 @@
+COMMENT "/* comment\n" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rast
new file mode 100644
index 000000000..36944dbb2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rast
@@ -0,0 +1 @@
+BYTE "b'" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rs
new file mode 100644
index 000000000..795dc7e25
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rs
@@ -0,0 +1 @@
+b' \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.txt
new file mode 100644
index 000000000..36944dbb2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.txt
@@ -0,0 +1 @@
+BYTE "b'" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rast
new file mode 100644
index 000000000..534a3cadc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rs
new file mode 100644
index 000000000..36f4f4321
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rs
@@ -0,0 +1 @@
+b" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.txt
new file mode 100644
index 000000000..534a3cadc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rast
new file mode 100644
index 000000000..03f61de9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\x7f" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rs
new file mode 100644
index 000000000..836c112c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rs
@@ -0,0 +1 @@
+b"\x7f \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.txt
new file mode 100644
index 000000000..03f61de9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\x7f" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rast
new file mode 100644
index 000000000..e11d49d1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"🦀" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rs
new file mode 100644
index 000000000..3c23a0372
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rs
@@ -0,0 +1 @@
+b"🦀 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.txt
new file mode 100644
index 000000000..e11d49d1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"🦀" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rast
new file mode 100644
index 000000000..4e374b120
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rs
new file mode 100644
index 000000000..cce661538
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rs
@@ -0,0 +1 @@
+b"\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.txt
new file mode 100644
index 000000000..4e374b120
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rast
new file mode 100644
index 000000000..ee1997586
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\\"" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rs
new file mode 100644
index 000000000..f2ff58ba9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rs
@@ -0,0 +1 @@
+b"\" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.txt
new file mode 100644
index 000000000..ee1997586
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\\"" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rast
new file mode 100644
index 000000000..b109d8629
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\n" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rs
new file mode 100644
index 000000000..5e680aabb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rs
@@ -0,0 +1 @@
+b"\n \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.txt
new file mode 100644
index 000000000..b109d8629
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\n" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rast
new file mode 100644
index 000000000..eaca94fa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\" " error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rs
new file mode 100644
index 000000000..d6898541e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rs
@@ -0,0 +1 @@
+b" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.txt
new file mode 100644
index 000000000..eaca94fa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\" " error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rast
new file mode 100644
index 000000000..3b79f48bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\u{20AA}" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rs
new file mode 100644
index 000000000..1c6df1d00
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rs
@@ -0,0 +1 @@
+b"\u{20AA} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.txt
new file mode 100644
index 000000000..3b79f48bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\u{20AA}" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rast
new file mode 100644
index 000000000..5525376f4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rast
@@ -0,0 +1 @@
+BYTE "b'\\x7f" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rs
new file mode 100644
index 000000000..d146a8090
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rs
@@ -0,0 +1 @@
+b'\x7f \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.txt
new file mode 100644
index 000000000..5525376f4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.txt
@@ -0,0 +1 @@
+BYTE "b'\\x7f" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rast
new file mode 100644
index 000000000..e7a8be4f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rast
@@ -0,0 +1 @@
+BYTE "b'🦀" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rs
new file mode 100644
index 000000000..c9230dc24
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rs
@@ -0,0 +1 @@
+b'🦀 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.txt
new file mode 100644
index 000000000..e7a8be4f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.txt
@@ -0,0 +1 @@
+BYTE "b'🦀" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rast
new file mode 100644
index 000000000..d9937135a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rast
@@ -0,0 +1 @@
+BYTE "b'\\" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rs
new file mode 100644
index 000000000..abffa5037
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rs
@@ -0,0 +1 @@
+b'\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.txt
new file mode 100644
index 000000000..d9937135a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.txt
@@ -0,0 +1 @@
+BYTE "b'\\" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rast
new file mode 100644
index 000000000..c408cdb2b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rast
@@ -0,0 +1 @@
+BYTE "b'\\n" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rs
new file mode 100644
index 000000000..4f46836a9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rs
@@ -0,0 +1 @@
+b'\n \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.txt
new file mode 100644
index 000000000..c408cdb2b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.txt
@@ -0,0 +1 @@
+BYTE "b'\\n" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rast
new file mode 100644
index 000000000..b331f9560
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rast
@@ -0,0 +1 @@
+BYTE "b'\\'" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rs
new file mode 100644
index 000000000..645b641ee
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rs
@@ -0,0 +1 @@
+b'\' \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.txt
new file mode 100644
index 000000000..b331f9560
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.txt
@@ -0,0 +1 @@
+BYTE "b'\\'" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rast
new file mode 100644
index 000000000..80c0e1c00
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rast
@@ -0,0 +1 @@
+BYTE "b' " error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rs
new file mode 100644
index 000000000..93b7f9c87
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rs
@@ -0,0 +1 @@
+b' \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.txt
new file mode 100644
index 000000000..80c0e1c00
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.txt
@@ -0,0 +1 @@
+BYTE "b' " error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rast
new file mode 100644
index 000000000..e1c3dc141
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rast
@@ -0,0 +1 @@
+BYTE "b'\\u{20AA}" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rs
new file mode 100644
index 000000000..a3dec7c25
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rs
@@ -0,0 +1 @@
+b'\u{20AA} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.txt
new file mode 100644
index 000000000..e1c3dc141
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.txt
@@ -0,0 +1 @@
+BYTE "b'\\u{20AA}" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rast
new file mode 100644
index 000000000..218c7a2d7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rast
@@ -0,0 +1 @@
+CHAR "'" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rs
new file mode 100644
index 000000000..ad2823b48
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rs
@@ -0,0 +1 @@
+' \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.txt
new file mode 100644
index 000000000..218c7a2d7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.txt
@@ -0,0 +1 @@
+CHAR "'" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rast
new file mode 100644
index 000000000..a0d8e1b83
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rast
@@ -0,0 +1 @@
+CHAR "'\\x7f" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rs
new file mode 100644
index 000000000..cf74b4dad
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rs
@@ -0,0 +1 @@
+'\x7f \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.txt
new file mode 100644
index 000000000..a0d8e1b83
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.txt
@@ -0,0 +1 @@
+CHAR "'\\x7f" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rast
new file mode 100644
index 000000000..56f19cce0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rast
@@ -0,0 +1 @@
+CHAR "'🦀" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rs
new file mode 100644
index 000000000..e264a4152
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rs
@@ -0,0 +1 @@
+'🦀 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.txt
new file mode 100644
index 000000000..56f19cce0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.txt
@@ -0,0 +1 @@
+CHAR "'🦀" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rast
new file mode 100644
index 000000000..cfa0e0752
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rast
@@ -0,0 +1 @@
+CHAR "'\\" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rs
new file mode 100644
index 000000000..6ba258b10
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rs
@@ -0,0 +1 @@
+'\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.txt
new file mode 100644
index 000000000..cfa0e0752
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.txt
@@ -0,0 +1 @@
+CHAR "'\\" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rast
new file mode 100644
index 000000000..6a42a4e22
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rast
@@ -0,0 +1 @@
+CHAR "'\\n" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rs
new file mode 100644
index 000000000..78bef7e3e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rs
@@ -0,0 +1 @@
+'\n \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.txt
new file mode 100644
index 000000000..6a42a4e22
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.txt
@@ -0,0 +1 @@
+CHAR "'\\n" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rast
new file mode 100644
index 000000000..1275f6aa8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rast
@@ -0,0 +1 @@
+CHAR "'\\'" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rs
new file mode 100644
index 000000000..a0e722065
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rs
@@ -0,0 +1 @@
+'\' \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.txt
new file mode 100644
index 000000000..1275f6aa8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.txt
@@ -0,0 +1 @@
+CHAR "'\\'" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rast
new file mode 100644
index 000000000..746c425c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rast
@@ -0,0 +1 @@
+CHAR "' " error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rs
new file mode 100644
index 000000000..309ecfe47
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rs
@@ -0,0 +1 @@
+' \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.txt
new file mode 100644
index 000000000..746c425c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.txt
@@ -0,0 +1 @@
+CHAR "' " error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rast
new file mode 100644
index 000000000..9abd59098
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rast
@@ -0,0 +1 @@
+CHAR "'\\u{20AA}" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rs
new file mode 100644
index 000000000..50be91f68
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rs
@@ -0,0 +1 @@
+'\u{20AA} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.txt
new file mode 100644
index 000000000..9abd59098
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.txt
@@ -0,0 +1 @@
+CHAR "'\\u{20AA}" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rast
new file mode 100644
index 000000000..15ce8905a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rast
@@ -0,0 +1 @@
+COMMENT "/* /* /*\n" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rs
new file mode 100644
index 000000000..3fcfc9660
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rs
@@ -0,0 +1 @@
+/* /* /*
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.txt
new file mode 100644
index 000000000..15ce8905a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.txt
@@ -0,0 +1 @@
+COMMENT "/* /* /*\n" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rast
new file mode 100644
index 000000000..e9b74ee7f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rast
@@ -0,0 +1 @@
+COMMENT "/** /*! /* comment */ */\n" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rs
new file mode 100644
index 000000000..26c898f01
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rs
@@ -0,0 +1 @@
+/** /*! /* comment */ */
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.txt
new file mode 100644
index 000000000..e9b74ee7f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.txt
@@ -0,0 +1 @@
+COMMENT "/** /*! /* comment */ */\n" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rast
new file mode 100644
index 000000000..6ec1780c3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\"" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rs
new file mode 100644
index 000000000..ae5bae622
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rs
@@ -0,0 +1 @@
+br##" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.txt
new file mode 100644
index 000000000..6ec1780c3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\"" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rast
new file mode 100644
index 000000000..d65f1bb2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\x7f" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rs
new file mode 100644
index 000000000..d50270afe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rs
@@ -0,0 +1 @@
+br##"\x7f \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.txt
new file mode 100644
index 000000000..d65f1bb2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\x7f" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rast
new file mode 100644
index 000000000..0f9e0a165
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\"🦀" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rs
new file mode 100644
index 000000000..9ef01207a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rs
@@ -0,0 +1 @@
+br##"🦀 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.txt
new file mode 100644
index 000000000..0f9e0a165
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\"🦀" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rast
new file mode 100644
index 000000000..202dcd2d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rs
new file mode 100644
index 000000000..0b3c015d7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rs
@@ -0,0 +1 @@
+br##"\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.txt
new file mode 100644
index 000000000..202dcd2d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rast
new file mode 100644
index 000000000..d45485b52
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\n" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rs
new file mode 100644
index 000000000..0d8b0e7ab
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rs
@@ -0,0 +1 @@
+br##"\n \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.txt
new file mode 100644
index 000000000..d45485b52
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\n" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rast
new file mode 100644
index 000000000..1bfabbc3a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\" " error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rs
new file mode 100644
index 000000000..14c602fd2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rs
@@ -0,0 +1 @@
+br##" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.txt
new file mode 100644
index 000000000..1bfabbc3a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\" " error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rast
new file mode 100644
index 000000000..104ab8aae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\u{20AA}" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rs
new file mode 100644
index 000000000..90e299a1a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rs
@@ -0,0 +1 @@
+br##"\u{20AA} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.txt
new file mode 100644
index 000000000..104ab8aae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\u{20AA}" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rast
new file mode 100644
index 000000000..71b20fd19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rast
@@ -0,0 +1 @@
+STRING "r##\"" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rs
new file mode 100644
index 000000000..557c59b62
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rs
@@ -0,0 +1 @@
+r##" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.txt
new file mode 100644
index 000000000..71b20fd19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.txt
@@ -0,0 +1 @@
+STRING "r##\"" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rast
new file mode 100644
index 000000000..dc106dd24
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rast
@@ -0,0 +1 @@
+STRING "r##\"\\x7f" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rs
new file mode 100644
index 000000000..5bec883dc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rs
@@ -0,0 +1 @@
+r##"\x7f \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.txt
new file mode 100644
index 000000000..dc106dd24
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.txt
@@ -0,0 +1 @@
+STRING "r##\"\\x7f" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rast
new file mode 100644
index 000000000..30ee029f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rast
@@ -0,0 +1 @@
+STRING "r##\"🦀" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rs
new file mode 100644
index 000000000..bd046e4bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rs
@@ -0,0 +1 @@
+r##"🦀 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.txt
new file mode 100644
index 000000000..30ee029f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.txt
@@ -0,0 +1 @@
+STRING "r##\"🦀" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rast
new file mode 100644
index 000000000..8a6f6cc43
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rast
@@ -0,0 +1 @@
+STRING "r##\"\\" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rs
new file mode 100644
index 000000000..9242077b8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rs
@@ -0,0 +1 @@
+r##"\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.txt
new file mode 100644
index 000000000..8a6f6cc43
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.txt
@@ -0,0 +1 @@
+STRING "r##\"\\" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rast
new file mode 100644
index 000000000..f46eff251
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rast
@@ -0,0 +1 @@
+STRING "r##\"\\n" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rs
new file mode 100644
index 000000000..db1c16f2b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rs
@@ -0,0 +1 @@
+r##"\n \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.txt
new file mode 100644
index 000000000..f46eff251
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.txt
@@ -0,0 +1 @@
+STRING "r##\"\\n" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rast
new file mode 100644
index 000000000..49b6afea4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rast
@@ -0,0 +1 @@
+STRING "r##\" " error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rs
new file mode 100644
index 000000000..f104bae4f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rs
@@ -0,0 +1 @@
+r##" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.txt
new file mode 100644
index 000000000..49b6afea4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.txt
@@ -0,0 +1 @@
+STRING "r##\" " error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rast
new file mode 100644
index 000000000..d10d6d8e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rast
@@ -0,0 +1 @@
+STRING "r##\"\\u{20AA}" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rs
new file mode 100644
index 000000000..bf05c3913
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rs
@@ -0,0 +1 @@
+r##"\u{20AA} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.txt
new file mode 100644
index 000000000..d10d6d8e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.txt
@@ -0,0 +1 @@
+STRING "r##\"\\u{20AA}" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rast
new file mode 100644
index 000000000..3b89ce0ca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rast
@@ -0,0 +1 @@
+STRING "\"" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rs
new file mode 100644
index 000000000..9d68933c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rs
@@ -0,0 +1 @@
+" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.txt
new file mode 100644
index 000000000..3b89ce0ca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.txt
@@ -0,0 +1 @@
+STRING "\"" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rast
new file mode 100644
index 000000000..6694cf17a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rast
@@ -0,0 +1 @@
+STRING "\"\\x7f" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rs
new file mode 100644
index 000000000..56186a344
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rs
@@ -0,0 +1 @@
+"\x7f \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.txt
new file mode 100644
index 000000000..6694cf17a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.txt
@@ -0,0 +1 @@
+STRING "\"\\x7f" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rast
new file mode 100644
index 000000000..5f4501c18
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rast
@@ -0,0 +1 @@
+STRING "\"🦀" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rs
new file mode 100644
index 000000000..d439b8d2a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rs
@@ -0,0 +1 @@
+"🦀 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.txt
new file mode 100644
index 000000000..5f4501c18
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.txt
@@ -0,0 +1 @@
+STRING "\"🦀" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rast
new file mode 100644
index 000000000..a8ac565ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rast
@@ -0,0 +1 @@
+STRING "\"\\" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rs
new file mode 100644
index 000000000..00a258400
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rs
@@ -0,0 +1 @@
+"\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.txt
new file mode 100644
index 000000000..a8ac565ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.txt
@@ -0,0 +1 @@
+STRING "\"\\" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rast
new file mode 100644
index 000000000..919183b91
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rast
@@ -0,0 +1 @@
+STRING "\"\\\"" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rs
new file mode 100644
index 000000000..403c2d6dd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rs
@@ -0,0 +1 @@
+"\" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.txt
new file mode 100644
index 000000000..919183b91
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.txt
@@ -0,0 +1 @@
+STRING "\"\\\"" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rast
new file mode 100644
index 000000000..39e288af9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rast
@@ -0,0 +1 @@
+STRING "\"\\n" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rs
new file mode 100644
index 000000000..a0c29b8cf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rs
@@ -0,0 +1 @@
+"\n \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.txt
new file mode 100644
index 000000000..39e288af9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.txt
@@ -0,0 +1 @@
+STRING "\"\\n" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rast
new file mode 100644
index 000000000..dcff94d7e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rast
@@ -0,0 +1 @@
+STRING "\" " error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rs
new file mode 100644
index 000000000..72cdc841f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rs
@@ -0,0 +1 @@
+" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.txt
new file mode 100644
index 000000000..dcff94d7e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.txt
@@ -0,0 +1 @@
+STRING "\" " error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rast
new file mode 100644
index 000000000..ac232b530
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rast
@@ -0,0 +1 @@
+STRING "\"\\u{20AA}" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rs
new file mode 100644
index 000000000..ed24095c3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rs
@@ -0,0 +1 @@
+"\u{20AA} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.txt
new file mode 100644
index 000000000..ac232b530
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.txt
@@ -0,0 +1 @@
+STRING "\"\\u{20AA}" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rast
new file mode 100644
index 000000000..cf942c92f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##" error: Missing `"` symbol after `#` symbols to begin the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rs
new file mode 100644
index 000000000..7e8cadf4f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rs
@@ -0,0 +1 @@
+br## \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.txt
new file mode 100644
index 000000000..cf942c92f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##" error: Missing `"` symbol after `#` symbols to begin the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rast
new file mode 100644
index 000000000..042769c27
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rast
@@ -0,0 +1,9 @@
+BYTE_STRING "br## " error: Missing `"` symbol after `#` symbols to begin the raw byte string literal
+IDENT "I"
+WHITESPACE " "
+IDENT "lack"
+WHITESPACE " "
+IDENT "a"
+WHITESPACE " "
+IDENT "quote"
+BANG "!"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rs
new file mode 100644
index 000000000..d9b55455a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rs
@@ -0,0 +1 @@
+br## I lack a quote! \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.txt
new file mode 100644
index 000000000..042769c27
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.txt
@@ -0,0 +1,9 @@
+BYTE_STRING "br## " error: Missing `"` symbol after `#` symbols to begin the raw byte string literal
+IDENT "I"
+WHITESPACE " "
+IDENT "lack"
+WHITESPACE " "
+IDENT "a"
+WHITESPACE " "
+IDENT "quote"
+BANG "!"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rast
new file mode 100644
index 000000000..2f7c7529a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rast
@@ -0,0 +1 @@
+STRING "r##" error: Missing `"` symbol after `#` symbols to begin the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rs
new file mode 100644
index 000000000..eddf8d080
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rs
@@ -0,0 +1 @@
+r## \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.txt
new file mode 100644
index 000000000..2f7c7529a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.txt
@@ -0,0 +1 @@
+STRING "r##" error: Missing `"` symbol after `#` symbols to begin the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rast
new file mode 100644
index 000000000..4a06b0abe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rast
@@ -0,0 +1,9 @@
+STRING "r## " error: Missing `"` symbol after `#` symbols to begin the raw string literal
+IDENT "I"
+WHITESPACE " "
+IDENT "lack"
+WHITESPACE " "
+IDENT "a"
+WHITESPACE " "
+IDENT "quote"
+BANG "!"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rs
new file mode 100644
index 000000000..534668a9b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rs
@@ -0,0 +1 @@
+r## I lack a quote! \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.txt
new file mode 100644
index 000000000..4a06b0abe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.txt
@@ -0,0 +1,9 @@
+STRING "r## " error: Missing `"` symbol after `#` symbols to begin the raw string literal
+IDENT "I"
+WHITESPACE " "
+IDENT "lack"
+WHITESPACE " "
+IDENT "a"
+WHITESPACE " "
+IDENT "quote"
+BANG "!"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rast
new file mode 100644
index 000000000..18bb5cad8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rast
@@ -0,0 +1,6 @@
+COMMENT "/* */"
+WHITESPACE "\n"
+COMMENT "/**/"
+WHITESPACE "\n"
+COMMENT "/* /* */ */"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rs
new file mode 100644
index 000000000..b880a59d9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rs
@@ -0,0 +1,3 @@
+/* */
+/**/
+/* /* */ */
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.txt
new file mode 100644
index 000000000..18bb5cad8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.txt
@@ -0,0 +1,6 @@
+COMMENT "/* */"
+WHITESPACE "\n"
+COMMENT "/**/"
+WHITESPACE "\n"
+COMMENT "/* /* */ */"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast
new file mode 100644
index 000000000..c848ac368
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast
@@ -0,0 +1,22 @@
+BYTE "b''"
+WHITESPACE " "
+BYTE "b'x'"
+WHITESPACE " "
+BYTE_STRING "b\"foo\""
+WHITESPACE " "
+BYTE_STRING "br\"\""
+WHITESPACE "\n"
+BYTE "b''suf"
+WHITESPACE " "
+BYTE_STRING "b\"\"ix"
+WHITESPACE " "
+BYTE_STRING "br\"\"br"
+WHITESPACE "\n"
+BYTE "b'\\n'"
+WHITESPACE " "
+BYTE "b'\\\\'"
+WHITESPACE " "
+BYTE "b'\\''"
+WHITESPACE " "
+BYTE "b'hello'"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs
new file mode 100644
index 000000000..b54930f5e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs
@@ -0,0 +1,3 @@
+b'' b'x' b"foo" br""
+b''suf b""ix br""br
+b'\n' b'\\' b'\'' b'hello'
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.txt
new file mode 100644
index 000000000..c848ac368
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.txt
@@ -0,0 +1,22 @@
+BYTE "b''"
+WHITESPACE " "
+BYTE "b'x'"
+WHITESPACE " "
+BYTE_STRING "b\"foo\""
+WHITESPACE " "
+BYTE_STRING "br\"\""
+WHITESPACE "\n"
+BYTE "b''suf"
+WHITESPACE " "
+BYTE_STRING "b\"\"ix"
+WHITESPACE " "
+BYTE_STRING "br\"\"br"
+WHITESPACE "\n"
+BYTE "b'\\n'"
+WHITESPACE " "
+BYTE "b'\\\\'"
+WHITESPACE " "
+BYTE "b'\\''"
+WHITESPACE " "
+BYTE "b'hello'"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast
new file mode 100644
index 000000000..66e58cc29
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast
@@ -0,0 +1,16 @@
+CHAR "'x'"
+WHITESPACE " "
+CHAR "' '"
+WHITESPACE " "
+CHAR "'0'"
+WHITESPACE " "
+CHAR "'hello'"
+WHITESPACE " "
+CHAR "'\\x7f'"
+WHITESPACE " "
+CHAR "'\\n'"
+WHITESPACE " "
+CHAR "'\\\\'"
+WHITESPACE " "
+CHAR "'\\''"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs
new file mode 100644
index 000000000..454ee0a5f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs
@@ -0,0 +1 @@
+'x' ' ' '0' 'hello' '\x7f' '\n' '\\' '\''
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.txt
new file mode 100644
index 000000000..66e58cc29
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.txt
@@ -0,0 +1,16 @@
+CHAR "'x'"
+WHITESPACE " "
+CHAR "' '"
+WHITESPACE " "
+CHAR "'0'"
+WHITESPACE " "
+CHAR "'hello'"
+WHITESPACE " "
+CHAR "'\\x7f'"
+WHITESPACE " "
+CHAR "'\\n'"
+WHITESPACE " "
+CHAR "'\\\\'"
+WHITESPACE " "
+CHAR "'\\''"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rast
new file mode 100644
index 000000000..7f5ce9de1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rast
@@ -0,0 +1,3 @@
+IDENT "hello"
+WHITESPACE " "
+IDENT "world"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rs
new file mode 100644
index 000000000..95d09f2b1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rs
@@ -0,0 +1 @@
+hello world \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.txt
new file mode 100644
index 000000000..7f5ce9de1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.txt
@@ -0,0 +1,3 @@
+IDENT "hello"
+WHITESPACE " "
+IDENT "world"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rast
new file mode 100644
index 000000000..5689644c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rast
@@ -0,0 +1,14 @@
+IDENT "foo"
+WHITESPACE " "
+IDENT "foo_"
+WHITESPACE " "
+IDENT "_foo"
+WHITESPACE " "
+UNDERSCORE "_"
+WHITESPACE " "
+IDENT "__"
+WHITESPACE " "
+IDENT "x"
+WHITESPACE " "
+IDENT "привет"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rs
new file mode 100644
index 000000000..c05c9c009
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rs
@@ -0,0 +1 @@
+foo foo_ _foo _ __ x привет
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.txt
new file mode 100644
index 000000000..5689644c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.txt
@@ -0,0 +1,14 @@
+IDENT "foo"
+WHITESPACE " "
+IDENT "foo_"
+WHITESPACE " "
+IDENT "_foo"
+WHITESPACE " "
+UNDERSCORE "_"
+WHITESPACE " "
+IDENT "__"
+WHITESPACE " "
+IDENT "x"
+WHITESPACE " "
+IDENT "привет"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rast
new file mode 100644
index 000000000..e19b1399a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rast
@@ -0,0 +1,64 @@
+ASYNC_KW "async"
+WHITESPACE " "
+FN_KW "fn"
+WHITESPACE " "
+USE_KW "use"
+WHITESPACE " "
+STRUCT_KW "struct"
+WHITESPACE " "
+TRAIT_KW "trait"
+WHITESPACE " "
+ENUM_KW "enum"
+WHITESPACE " "
+IMPL_KW "impl"
+WHITESPACE " "
+TRUE_KW "true"
+WHITESPACE " "
+FALSE_KW "false"
+WHITESPACE " "
+AS_KW "as"
+WHITESPACE " "
+EXTERN_KW "extern"
+WHITESPACE " "
+CRATE_KW "crate"
+WHITESPACE "\n"
+MOD_KW "mod"
+WHITESPACE " "
+PUB_KW "pub"
+WHITESPACE " "
+SELF_KW "self"
+WHITESPACE " "
+SUPER_KW "super"
+WHITESPACE " "
+IN_KW "in"
+WHITESPACE " "
+WHERE_KW "where"
+WHITESPACE " "
+FOR_KW "for"
+WHITESPACE " "
+LOOP_KW "loop"
+WHITESPACE " "
+WHILE_KW "while"
+WHITESPACE " "
+IF_KW "if"
+WHITESPACE " "
+MATCH_KW "match"
+WHITESPACE " "
+CONST_KW "const"
+WHITESPACE "\n"
+STATIC_KW "static"
+WHITESPACE " "
+MUT_KW "mut"
+WHITESPACE " "
+TYPE_KW "type"
+WHITESPACE " "
+REF_KW "ref"
+WHITESPACE " "
+LET_KW "let"
+WHITESPACE " "
+ELSE_KW "else"
+WHITESPACE " "
+MOVE_KW "move"
+WHITESPACE " "
+RETURN_KW "return"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rs
new file mode 100644
index 000000000..1e91bff4e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rs
@@ -0,0 +1,3 @@
+async fn use struct trait enum impl true false as extern crate
+mod pub self super in where for loop while if match const
+static mut type ref let else move return
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.txt
new file mode 100644
index 000000000..e19b1399a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.txt
@@ -0,0 +1,64 @@
+ASYNC_KW "async"
+WHITESPACE " "
+FN_KW "fn"
+WHITESPACE " "
+USE_KW "use"
+WHITESPACE " "
+STRUCT_KW "struct"
+WHITESPACE " "
+TRAIT_KW "trait"
+WHITESPACE " "
+ENUM_KW "enum"
+WHITESPACE " "
+IMPL_KW "impl"
+WHITESPACE " "
+TRUE_KW "true"
+WHITESPACE " "
+FALSE_KW "false"
+WHITESPACE " "
+AS_KW "as"
+WHITESPACE " "
+EXTERN_KW "extern"
+WHITESPACE " "
+CRATE_KW "crate"
+WHITESPACE "\n"
+MOD_KW "mod"
+WHITESPACE " "
+PUB_KW "pub"
+WHITESPACE " "
+SELF_KW "self"
+WHITESPACE " "
+SUPER_KW "super"
+WHITESPACE " "
+IN_KW "in"
+WHITESPACE " "
+WHERE_KW "where"
+WHITESPACE " "
+FOR_KW "for"
+WHITESPACE " "
+LOOP_KW "loop"
+WHITESPACE " "
+WHILE_KW "while"
+WHITESPACE " "
+IF_KW "if"
+WHITESPACE " "
+MATCH_KW "match"
+WHITESPACE " "
+CONST_KW "const"
+WHITESPACE "\n"
+STATIC_KW "static"
+WHITESPACE " "
+MUT_KW "mut"
+WHITESPACE " "
+TYPE_KW "type"
+WHITESPACE " "
+REF_KW "ref"
+WHITESPACE " "
+LET_KW "let"
+WHITESPACE " "
+ELSE_KW "else"
+WHITESPACE " "
+MOVE_KW "move"
+WHITESPACE " "
+RETURN_KW "return"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rast
new file mode 100644
index 000000000..eeb1e9541
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rast
@@ -0,0 +1,8 @@
+LIFETIME_IDENT "'a"
+WHITESPACE " "
+LIFETIME_IDENT "'foo"
+WHITESPACE " "
+LIFETIME_IDENT "'foo_bar_baz"
+WHITESPACE " "
+LIFETIME_IDENT "'_"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rs
new file mode 100644
index 000000000..b764f1dce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rs
@@ -0,0 +1 @@
+'a 'foo 'foo_bar_baz '_
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.txt
new file mode 100644
index 000000000..eeb1e9541
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.txt
@@ -0,0 +1,8 @@
+LIFETIME_IDENT "'a"
+WHITESPACE " "
+LIFETIME_IDENT "'foo"
+WHITESPACE " "
+LIFETIME_IDENT "'foo_bar_baz"
+WHITESPACE " "
+LIFETIME_IDENT "'_"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rast
new file mode 100644
index 000000000..8d13c3f61
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rast
@@ -0,0 +1,57 @@
+INT_NUMBER "0"
+WHITESPACE " "
+INT_NUMBER "00"
+WHITESPACE " "
+INT_NUMBER "0_"
+WHITESPACE " "
+FLOAT_NUMBER "0."
+WHITESPACE " "
+INT_NUMBER "0z"
+WHITESPACE "\n"
+INT_NUMBER "01790"
+WHITESPACE " "
+INT_NUMBER "0b1790"
+WHITESPACE " "
+INT_NUMBER "0o1790"
+WHITESPACE " "
+INT_NUMBER "0x1790aAbBcCdDeEfF"
+WHITESPACE " "
+INT_NUMBER "001279"
+WHITESPACE " "
+INT_NUMBER "0_1279"
+WHITESPACE " "
+FLOAT_NUMBER "0.1279"
+WHITESPACE " "
+FLOAT_NUMBER "0e1279"
+WHITESPACE " "
+FLOAT_NUMBER "0E1279"
+WHITESPACE "\n"
+INT_NUMBER "0"
+DOT "."
+DOT "."
+INT_NUMBER "2"
+WHITESPACE "\n"
+INT_NUMBER "0"
+DOT "."
+IDENT "foo"
+L_PAREN "("
+R_PAREN ")"
+WHITESPACE "\n"
+FLOAT_NUMBER "0e+1"
+WHITESPACE "\n"
+INT_NUMBER "0"
+DOT "."
+IDENT "e"
+PLUS "+"
+INT_NUMBER "1"
+WHITESPACE "\n"
+FLOAT_NUMBER "0.0E-2"
+WHITESPACE "\n"
+FLOAT_NUMBER "0___0.10000____0000e+111__"
+WHITESPACE "\n"
+INT_NUMBER "1i64"
+WHITESPACE " "
+FLOAT_NUMBER "92.0f32"
+WHITESPACE " "
+INT_NUMBER "11__s"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rs
new file mode 100644
index 000000000..bc761c235
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rs
@@ -0,0 +1,9 @@
+0 00 0_ 0. 0z
+01790 0b1790 0o1790 0x1790aAbBcCdDeEfF 001279 0_1279 0.1279 0e1279 0E1279
+0..2
+0.foo()
+0e+1
+0.e+1
+0.0E-2
+0___0.10000____0000e+111__
+1i64 92.0f32 11__s
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.txt
new file mode 100644
index 000000000..8d13c3f61
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.txt
@@ -0,0 +1,57 @@
+INT_NUMBER "0"
+WHITESPACE " "
+INT_NUMBER "00"
+WHITESPACE " "
+INT_NUMBER "0_"
+WHITESPACE " "
+FLOAT_NUMBER "0."
+WHITESPACE " "
+INT_NUMBER "0z"
+WHITESPACE "\n"
+INT_NUMBER "01790"
+WHITESPACE " "
+INT_NUMBER "0b1790"
+WHITESPACE " "
+INT_NUMBER "0o1790"
+WHITESPACE " "
+INT_NUMBER "0x1790aAbBcCdDeEfF"
+WHITESPACE " "
+INT_NUMBER "001279"
+WHITESPACE " "
+INT_NUMBER "0_1279"
+WHITESPACE " "
+FLOAT_NUMBER "0.1279"
+WHITESPACE " "
+FLOAT_NUMBER "0e1279"
+WHITESPACE " "
+FLOAT_NUMBER "0E1279"
+WHITESPACE "\n"
+INT_NUMBER "0"
+DOT "."
+DOT "."
+INT_NUMBER "2"
+WHITESPACE "\n"
+INT_NUMBER "0"
+DOT "."
+IDENT "foo"
+L_PAREN "("
+R_PAREN ")"
+WHITESPACE "\n"
+FLOAT_NUMBER "0e+1"
+WHITESPACE "\n"
+INT_NUMBER "0"
+DOT "."
+IDENT "e"
+PLUS "+"
+INT_NUMBER "1"
+WHITESPACE "\n"
+FLOAT_NUMBER "0.0E-2"
+WHITESPACE "\n"
+FLOAT_NUMBER "0___0.10000____0000e+111__"
+WHITESPACE "\n"
+INT_NUMBER "1i64"
+WHITESPACE " "
+FLOAT_NUMBER "92.0f32"
+WHITESPACE " "
+INT_NUMBER "11__s"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rast
new file mode 100644
index 000000000..fddad9982
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rast
@@ -0,0 +1,2 @@
+IDENT "r#raw_ident"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rs
new file mode 100644
index 000000000..b40a1b6a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rs
@@ -0,0 +1 @@
+r#raw_ident
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.txt
new file mode 100644
index 000000000..fddad9982
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.txt
@@ -0,0 +1,2 @@
+IDENT "r#raw_ident"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rast
new file mode 100644
index 000000000..13cf733b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rast
@@ -0,0 +1,2 @@
+STRING "r###\"this is a r##\"raw\"## string\"###"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rs
new file mode 100644
index 000000000..e5ed0b693
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rs
@@ -0,0 +1 @@
+r###"this is a r##"raw"## string"###
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.txt
new file mode 100644
index 000000000..13cf733b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.txt
@@ -0,0 +1,2 @@
+STRING "r###\"this is a r##\"raw\"## string\"###"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rast
new file mode 100644
index 000000000..a7681e9f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rast
@@ -0,0 +1,22 @@
+SHEBANG "#!/usr/bin/env bash"
+WHITESPACE "\n"
+COMMENT "// hello"
+WHITESPACE "\n"
+COMMENT "//! World"
+WHITESPACE "\n"
+COMMENT "//!! Inner line doc"
+WHITESPACE "\n"
+COMMENT "/// Outer line doc"
+WHITESPACE "\n"
+COMMENT "//// Just a comment"
+WHITESPACE "\n\n"
+COMMENT "//"
+WHITESPACE "\n"
+COMMENT "//!"
+WHITESPACE "\n"
+COMMENT "//!!"
+WHITESPACE "\n"
+COMMENT "///"
+WHITESPACE "\n"
+COMMENT "////"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rs
new file mode 100644
index 000000000..4b6653f9c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rs
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+// hello
+//! World
+//!! Inner line doc
+/// Outer line doc
+//// Just a comment
+
+//
+//!
+//!!
+///
+////
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.txt
new file mode 100644
index 000000000..a7681e9f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.txt
@@ -0,0 +1,22 @@
+SHEBANG "#!/usr/bin/env bash"
+WHITESPACE "\n"
+COMMENT "// hello"
+WHITESPACE "\n"
+COMMENT "//! World"
+WHITESPACE "\n"
+COMMENT "//!! Inner line doc"
+WHITESPACE "\n"
+COMMENT "/// Outer line doc"
+WHITESPACE "\n"
+COMMENT "//// Just a comment"
+WHITESPACE "\n\n"
+COMMENT "//"
+WHITESPACE "\n"
+COMMENT "//!"
+WHITESPACE "\n"
+COMMENT "//!!"
+WHITESPACE "\n"
+COMMENT "///"
+WHITESPACE "\n"
+COMMENT "////"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rast
new file mode 100644
index 000000000..ec222591b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rast
@@ -0,0 +1,8 @@
+STRING "\"hello\""
+WHITESPACE " "
+STRING "r\"world\""
+WHITESPACE " "
+STRING "\"\\n\\\"\\\\no escape\""
+WHITESPACE " "
+STRING "\"multi\nline\""
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rs
new file mode 100644
index 000000000..4ddb5bffc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rs
@@ -0,0 +1,2 @@
+"hello" r"world" "\n\"\\no escape" "multi
+line"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.txt
new file mode 100644
index 000000000..ec222591b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.txt
@@ -0,0 +1,8 @@
+STRING "\"hello\""
+WHITESPACE " "
+STRING "r\"world\""
+WHITESPACE " "
+STRING "\"\\n\\\"\\\\no escape\""
+WHITESPACE " "
+STRING "\"multi\nline\""
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rast
new file mode 100644
index 000000000..533ccff9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rast
@@ -0,0 +1,77 @@
+SEMICOLON ";"
+WHITESPACE " "
+COMMA ","
+WHITESPACE " "
+L_PAREN "("
+WHITESPACE " "
+R_PAREN ")"
+WHITESPACE " "
+L_CURLY "{"
+WHITESPACE " "
+R_CURLY "}"
+WHITESPACE " "
+L_BRACK "["
+WHITESPACE " "
+R_BRACK "]"
+WHITESPACE " "
+L_ANGLE "<"
+WHITESPACE " "
+R_ANGLE ">"
+WHITESPACE " "
+AT "@"
+WHITESPACE " "
+POUND "#"
+WHITESPACE " "
+TILDE "~"
+WHITESPACE " "
+QUESTION "?"
+WHITESPACE " "
+DOLLAR "$"
+WHITESPACE " "
+AMP "&"
+WHITESPACE " "
+PIPE "|"
+WHITESPACE " "
+PLUS "+"
+WHITESPACE " "
+STAR "*"
+WHITESPACE " "
+SLASH "/"
+WHITESPACE " "
+CARET "^"
+WHITESPACE " "
+PERCENT "%"
+WHITESPACE "\n"
+DOT "."
+WHITESPACE " "
+DOT "."
+DOT "."
+WHITESPACE " "
+DOT "."
+DOT "."
+DOT "."
+WHITESPACE " "
+DOT "."
+DOT "."
+EQ "="
+WHITESPACE "\n"
+COLON ":"
+WHITESPACE " "
+COLON ":"
+COLON ":"
+WHITESPACE "\n"
+EQ "="
+WHITESPACE " "
+EQ "="
+R_ANGLE ">"
+WHITESPACE "\n"
+BANG "!"
+WHITESPACE " "
+BANG "!"
+EQ "="
+WHITESPACE "\n"
+MINUS "-"
+WHITESPACE " "
+MINUS "-"
+R_ANGLE ">"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rs
new file mode 100644
index 000000000..487569b5a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rs
@@ -0,0 +1,6 @@
+; , ( ) { } [ ] < > @ # ~ ? $ & | + * / ^ %
+. .. ... ..=
+: ::
+= =>
+! !=
+- ->
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.txt
new file mode 100644
index 000000000..533ccff9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.txt
@@ -0,0 +1,77 @@
+SEMICOLON ";"
+WHITESPACE " "
+COMMA ","
+WHITESPACE " "
+L_PAREN "("
+WHITESPACE " "
+R_PAREN ")"
+WHITESPACE " "
+L_CURLY "{"
+WHITESPACE " "
+R_CURLY "}"
+WHITESPACE " "
+L_BRACK "["
+WHITESPACE " "
+R_BRACK "]"
+WHITESPACE " "
+L_ANGLE "<"
+WHITESPACE " "
+R_ANGLE ">"
+WHITESPACE " "
+AT "@"
+WHITESPACE " "
+POUND "#"
+WHITESPACE " "
+TILDE "~"
+WHITESPACE " "
+QUESTION "?"
+WHITESPACE " "
+DOLLAR "$"
+WHITESPACE " "
+AMP "&"
+WHITESPACE " "
+PIPE "|"
+WHITESPACE " "
+PLUS "+"
+WHITESPACE " "
+STAR "*"
+WHITESPACE " "
+SLASH "/"
+WHITESPACE " "
+CARET "^"
+WHITESPACE " "
+PERCENT "%"
+WHITESPACE "\n"
+DOT "."
+WHITESPACE " "
+DOT "."
+DOT "."
+WHITESPACE " "
+DOT "."
+DOT "."
+DOT "."
+WHITESPACE " "
+DOT "."
+DOT "."
+EQ "="
+WHITESPACE "\n"
+COLON ":"
+WHITESPACE " "
+COLON ":"
+COLON ":"
+WHITESPACE "\n"
+EQ "="
+WHITESPACE " "
+EQ "="
+R_ANGLE ">"
+WHITESPACE "\n"
+BANG "!"
+WHITESPACE " "
+BANG "!"
+EQ "="
+WHITESPACE "\n"
+MINUS "-"
+WHITESPACE " "
+MINUS "-"
+R_ANGLE ">"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rast
new file mode 100644
index 000000000..8ccb79e4e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rast
@@ -0,0 +1,12 @@
+IDENT "a"
+WHITESPACE " "
+IDENT "b"
+WHITESPACE " "
+IDENT "c"
+WHITESPACE "\n"
+IDENT "d"
+WHITESPACE "\n\n"
+IDENT "e"
+WHITESPACE "\t"
+IDENT "f"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rs
new file mode 100644
index 000000000..08fce1418
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rs
@@ -0,0 +1,4 @@
+a b c
+d
+
+e f
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.txt
new file mode 100644
index 000000000..8ccb79e4e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.txt
@@ -0,0 +1,12 @@
+IDENT "a"
+WHITESPACE " "
+IDENT "b"
+WHITESPACE " "
+IDENT "c"
+WHITESPACE "\n"
+IDENT "d"
+WHITESPACE "\n\n"
+IDENT "e"
+WHITESPACE "\t"
+IDENT "f"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rast
new file mode 100644
index 000000000..b30328c82
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rast
@@ -0,0 +1,34 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "b"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE "\n"
+ R_CURLY "}"
+error 21: expected COMMA
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rs
new file mode 100644
index 000000000..fe5030d89
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rs
@@ -0,0 +1,4 @@
+struct S {
+ a: u32
+ b: u32
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rast
new file mode 100644
index 000000000..959b87ebb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rast
@@ -0,0 +1,18 @@
+SOURCE_FILE
+ ERROR
+ IF_KW "if"
+ WHITESPACE " "
+ ERROR
+ MATCH_KW "match"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+error 0: expected an item
+error 3: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rs
new file mode 100644
index 000000000..98f23de1f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rs
@@ -0,0 +1,3 @@
+if match
+
+struct S {} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
new file mode 100644
index 000000000..ec6c31510
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ SHEBANG "#!/use/bin/env rusti"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ ERROR
+ SLASH "/"
+ USE
+ USE_KW "use"
+ ERROR
+ SLASH "/"
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bin"
+ ERROR
+ SLASH "/"
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "env"
+ WHITESPACE " "
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "rusti"
+ WHITESPACE "\n"
+error 23: expected `[`
+error 23: expected an item
+error 27: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
+error 28: expected SEMICOLON
+error 31: expected BANG
+error 31: expected `{`, `[`, `(`
+error 31: expected SEMICOLON
+error 31: expected an item
+error 35: expected BANG
+error 35: expected `{`, `[`, `(`
+error 35: expected SEMICOLON
+error 41: expected BANG
+error 41: expected `{`, `[`, `(`
+error 41: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rs
new file mode 100644
index 000000000..48a3a3980
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rs
@@ -0,0 +1,2 @@
+#!/use/bin/env rusti
+#!/use/bin/env rusti
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rast
new file mode 100644
index 000000000..00131bea5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rast
@@ -0,0 +1,39 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "b"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "String"
+ COMMA ","
+ WHITESPACE "\n"
+ R_CURLY "}"
+ ERROR
+ SEMICOLON ";"
+error 39: expected item, found `;`
+consider removing this semicolon
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rs
new file mode 100644
index 000000000..009312270
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rs
@@ -0,0 +1,4 @@
+struct S {
+ a: i32,
+ b: String,
+}; \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rast
new file mode 100644
index 000000000..44e192a5f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rast
@@ -0,0 +1,15 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ ERROR
+ INT_NUMBER "92"
+ SEMICOLON ";"
+error 9: expected identifier
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rs
new file mode 100644
index 000000000..060e65d06
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rs
@@ -0,0 +1 @@
+use foo::92; \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rast
new file mode 100644
index 000000000..6ff072e20
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rast
@@ -0,0 +1,62 @@
+SOURCE_FILE
+ FN
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "foo"
+ COMMA ","
+ WHITESPACE " "
+ PLUS "+"
+ COMMA ","
+ WHITESPACE " "
+ INT_NUMBER "92"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n\n"
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ TOKEN_TREE
+ L_PAREN "("
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ IDENT "foo"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 53: expected R_PAREN
+error 53: expected `]`
+error 53: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rs
new file mode 100644
index 000000000..de7f81628
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rs
@@ -0,0 +1,8 @@
+#[foo(foo, +, 92)]
+fn foo() {
+}
+
+
+#[foo(
+fn foo() {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rast
new file mode 100644
index 000000000..7a4aa93b2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rast
@@ -0,0 +1,74 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ WHITESPACE "\n "
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ ERROR
+ INT_NUMBER "92"
+ WHITESPACE "\n "
+ ERROR
+ PLUS "+"
+ WHITESPACE " "
+ ERROR
+ MINUS "-"
+ WHITESPACE " "
+ ERROR
+ STAR "*"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "z"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f64"
+ COMMA ","
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 31: expected field declaration
+error 33: expected COMMA
+error 38: expected field declaration
+error 39: expected COMMA
+error 40: expected field declaration
+error 41: expected COMMA
+error 42: expected field declaration
+error 43: expected COMMA
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rs
new file mode 100644
index 000000000..8069c111b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rs
@@ -0,0 +1,7 @@
+struct S {
+ f: u32,
+ pub 92
+ + - *
+ pub x: u32,
+ z: f64,
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rast
new file mode 100644
index 000000000..5d87ff866
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ ERROR
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ ERROR
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ ERROR
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 0: unmatched `}`
+error 14: unmatched `}`
+error 29: unmatched `}`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rs
new file mode 100644
index 000000000..dc869fb78
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rs
@@ -0,0 +1,9 @@
+}
+
+struct S;
+
+}
+
+fn foo(){}
+
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rast
new file mode 100644
index 000000000..60b2fe987
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rast
@@ -0,0 +1,80 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ ERROR
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "baz"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 17: expected BANG
+error 19: expected SEMICOLON
+error 20: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rs
new file mode 100644
index 000000000..9fcac19b5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rs
@@ -0,0 +1,13 @@
+fn foo() {
+}
+
+bar() {
+ if true {
+ 1
+ } else {
+ 2 + 3
+ }
+}
+
+fn baz() {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rast
new file mode 100644
index 000000000..a01543217
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rast
@@ -0,0 +1,56 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ ERROR
+ INT_NUMBER "90"
+ WHITESPACE " "
+ ERROR
+ PLUS "+"
+ WHITESPACE " "
+ ERROR
+ INT_NUMBER "2"
+ ERROR
+ R_ANGLE ">"
+ WHITESPACE " "
+ ERROR
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ERROR
+ COLON ":"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 9: expected type parameter
+error 11: expected COMMA
+error 11: expected R_ANGLE
+error 11: expected `;`, `{`, or `(`
+error 12: expected an item
+error 14: expected an item
+error 15: expected an item
+error 17: expected an item
+error 24: expected SEMICOLON
+error 24: expected expression
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rs
new file mode 100644
index 000000000..0dd30d0bd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rs
@@ -0,0 +1,5 @@
+struct S<90 + 2> {
+ f: u32
+}
+
+struct T;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rast
new file mode 100644
index 000000000..9427ee5c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EXPR_STMT
+ BLOCK_EXPR
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 24: expected a block
+error 24: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rs
new file mode 100644
index 000000000..985775282
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rs
@@ -0,0 +1,3 @@
+fn main() {
+ || -> () unsafe { () };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast
new file mode 100644
index 000000000..bd5ec4b7c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast
@@ -0,0 +1,13 @@
+SOURCE_FILE
+ ERROR
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 6: expected existential, fn, trait or impl
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rs
new file mode 100644
index 000000000..c1bd0a2d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rs
@@ -0,0 +1 @@
+extern struct Foo;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0012_broken_lambda.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0012_broken_lambda.rast
new file mode 100644
index 000000000..f31c27633
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0012_broken_lambda.rast
@@ -0,0 +1,387 @@
+SOURCE_FILE@0..389
+ FN@0..389
+ VISIBILITY@0..10
+ PUB_KW@0..3
+ L_PAREN@3..4
+ SUPER_KW@4..9
+ R_PAREN@9..10
+ WHITESPACE@10..11
+ FN_KW@11..13
+ WHITESPACE@13..14
+ NAME@14..21
+ IDENT@14..21 "process"
+ GENERIC_PARAM_LIST@21..38
+ L_ANGLE@21..22
+ LIFETIME_PARAM@22..24
+ LIFETIME@22..24 "'a"
+ COMMA@24..25
+ WHITESPACE@25..26
+ TYPE_PARAM@26..37
+ NAME@26..27
+ IDENT@26..27 "S"
+ COLON@27..28
+ WHITESPACE@28..29
+ PATH@29..37
+ PATH_SEGMENT@29..37
+ NAME_REF@29..33
+ IDENT@29..33 "Sink"
+ GENERIC_ARG_LIST@33..37
+ L_ANGLE@33..34
+ LIFETIME_ARG@34..36
+ LIFETIME@34..36 "'a"
+ R_ANGLE@36..37
+ R_ANGLE@37..38
+ PARAM_LIST@38..93
+ L_PAREN@38..39
+ PARAM@39..54
+ IDENT_PAT@39..46
+ NAME@39..46
+ IDENT@39..46 "builder"
+ COLON@46..47
+ WHITESPACE@47..48
+ REF_TYPE@48..54
+ AMP@48..49
+ MUT_KW@49..52
+ WHITESPACE@52..53
+ PATH_TYPE@53..54
+ PATH@53..54
+ PATH_SEGMENT@53..54
+ NAME_REF@53..54
+ IDENT@53..54 "S"
+ COMMA@54..55
+ WHITESPACE@55..56
+ PARAM@56..72
+ IDENT_PAT@56..62
+ NAME@56..62
+ IDENT@56..62 "tokens"
+ COLON@62..63
+ WHITESPACE@63..64
+ REF_TYPE@64..72
+ AMP@64..65
+ SLICE_TYPE@65..72
+ L_BRACK@65..66
+ PATH_TYPE@66..71
+ PATH@66..71
+ PATH_SEGMENT@66..71
+ NAME_REF@66..71
+ IDENT@66..71 "Token"
+ R_BRACK@71..72
+ COMMA@72..73
+ WHITESPACE@73..74
+ PARAM@74..92
+ IDENT_PAT@74..80
+ NAME@74..80
+ IDENT@74..80 "events"
+ COLON@80..81
+ WHITESPACE@81..82
+ PATH_TYPE@82..92
+ PATH@82..92
+ PATH_SEGMENT@82..92
+ NAME_REF@82..85
+ IDENT@82..85 "Vec"
+ GENERIC_ARG_LIST@85..92
+ L_ANGLE@85..86
+ TYPE_ARG@86..91
+ PATH_TYPE@86..91
+ PATH@86..91
+ PATH_SEGMENT@86..91
+ NAME_REF@86..91
+ IDENT@86..91 "Event"
+ R_ANGLE@91..92
+ R_PAREN@92..93
+ WHITESPACE@93..94
+ BLOCK_EXPR@94..389
+ L_CURLY@94..95
+ WHITESPACE@95..100
+ LET_STMT@100..125
+ LET_KW@100..103
+ WHITESPACE@103..104
+ IDENT_PAT@104..120
+ MUT_KW@104..107
+ WHITESPACE@107..108
+ NAME@108..120
+ IDENT@108..120 "next_tok_idx"
+ WHITESPACE@120..121
+ EQ@121..122
+ WHITESPACE@122..123
+ LITERAL@123..124
+ INT_NUMBER@123..124 "0"
+ SEMICOLON@124..125
+ WHITESPACE@125..130
+ LET_STMT@130..389
+ LET_KW@130..133
+ WHITESPACE@133..134
+ IDENT_PAT@134..140
+ NAME@134..140
+ IDENT@134..140 "eat_ws"
+ WHITESPACE@140..141
+ EQ@141..142
+ WHITESPACE@142..143
+ CLOSURE_EXPR@143..389
+ PARAM_LIST@143..388
+ PIPE@143..144
+ PARAM@144..159
+ IDENT_PAT@144..147
+ NAME@144..147
+ IDENT@144..147 "idx"
+ COLON@147..148
+ WHITESPACE@148..149
+ REF_TYPE@149..159
+ AMP@149..150
+ MUT_KW@150..153
+ WHITESPACE@153..154
+ PATH_TYPE@154..159
+ PATH@154..159
+ PATH_SEGMENT@154..159
+ NAME_REF@154..159
+ IDENT@154..159 "usize"
+ COMMA@159..160
+ WHITESPACE@160..161
+ PARAM@161..167
+ REF_PAT@161..167
+ AMP@161..162
+ MUT_KW@162..165
+ WHITESPACE@165..166
+ err: `expected pattern`
+ ERROR@166..167
+ PIPE@166..167
+ err: `expected COMMA`
+ WHITESPACE@167..168
+ err: `expected pattern`
+ PARAM@168..169
+ ERROR@168..169
+ L_CURLY@168..169
+ err: `expected COMMA`
+ WHITESPACE@169..178
+ err: `expected pattern`
+ PARAM@178..183
+ ERROR@178..183
+ WHILE_KW@178..183
+ err: `expected COMMA`
+ WHITESPACE@183..184
+ err: `expected pattern`
+ PARAM@184..187
+ ERROR@184..187
+ LET_KW@184..187
+ err: `expected COMMA`
+ WHITESPACE@187..188
+ PARAM@188..199
+ TUPLE_STRUCT_PAT@188..199
+ PATH@188..192
+ PATH_SEGMENT@188..192
+ NAME_REF@188..192
+ IDENT@188..192 "Some"
+ L_PAREN@192..193
+ IDENT_PAT@193..198
+ NAME@193..198
+ IDENT@193..198 "token"
+ R_PAREN@198..199
+ err: `expected COMMA`
+ WHITESPACE@199..200
+ err: `expected pattern`
+ PARAM@200..201
+ ERROR@200..201
+ EQ@200..201
+ err: `expected COMMA`
+ WHITESPACE@201..202
+ PARAM@202..208
+ IDENT_PAT@202..208
+ NAME@202..208
+ IDENT@202..208 "tokens"
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@208..209
+ ERROR@208..209
+ DOT@208..209
+ err: `expected COMMA`
+ PARAM@209..218
+ TUPLE_STRUCT_PAT@209..218
+ PATH@209..212
+ PATH_SEGMENT@209..212
+ NAME_REF@209..212
+ IDENT@209..212 "get"
+ L_PAREN@212..213
+ err: `expected pattern`
+ ERROR@213..214
+ STAR@213..214
+ err: `expected COMMA`
+ IDENT_PAT@214..217
+ NAME@214..217
+ IDENT@214..217 "idx"
+ R_PAREN@217..218
+ err: `expected COMMA`
+ WHITESPACE@218..219
+ err: `expected pattern`
+ PARAM@219..220
+ ERROR@219..220
+ L_CURLY@219..220
+ err: `expected COMMA`
+ WHITESPACE@220..233
+ err: `expected pattern`
+ PARAM@233..235
+ ERROR@233..235
+ IF_KW@233..235
+ err: `expected COMMA`
+ WHITESPACE@235..236
+ err: `expected pattern`
+ PARAM@236..237
+ ERROR@236..237
+ BANG@236..237
+ err: `expected COMMA`
+ PARAM@237..242
+ IDENT_PAT@237..242
+ NAME@237..242
+ IDENT@237..242 "token"
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@242..243
+ ERROR@242..243
+ DOT@242..243
+ err: `expected COMMA`
+ PARAM@243..247
+ IDENT_PAT@243..247
+ NAME@243..247
+ IDENT@243..247 "kind"
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@247..248
+ ERROR@247..248
+ DOT@247..248
+ err: `expected COMMA`
+ PARAM@248..259
+ TUPLE_STRUCT_PAT@248..259
+ PATH@248..257
+ PATH_SEGMENT@248..257
+ NAME_REF@248..257
+ IDENT@248..257 "is_trivia"
+ L_PAREN@257..258
+ R_PAREN@258..259
+ err: `expected COMMA`
+ WHITESPACE@259..260
+ err: `expected pattern`
+ PARAM@260..261
+ ERROR@260..261
+ L_CURLY@260..261
+ err: `expected COMMA`
+ WHITESPACE@261..278
+ PARAM@278..283
+ IDENT_PAT@278..283
+ NAME@278..283
+ IDENT@278..283 "break"
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@283..284
+ ERROR@283..284
+ SEMICOLON@283..284
+ err: `expected COMMA`
+ WHITESPACE@284..297
+ err: `expected pattern`
+ PARAM@297..298
+ ERROR@297..298
+ R_CURLY@297..298
+ err: `expected COMMA`
+ WHITESPACE@298..311
+ PARAM@311..318
+ IDENT_PAT@311..318
+ NAME@311..318
+ IDENT@311..318 "builder"
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@318..319
+ ERROR@318..319
+ DOT@318..319
+ err: `expected COMMA`
+ PARAM@319..346
+ TUPLE_STRUCT_PAT@319..346
+ PATH@319..323
+ PATH_SEGMENT@319..323
+ NAME_REF@319..323
+ IDENT@319..323 "leaf"
+ L_PAREN@323..324
+ IDENT_PAT@324..329
+ NAME@324..329
+ IDENT@324..329 "token"
+ err: `expected COMMA`
+ err: `expected pattern`
+ ERROR@329..330
+ DOT@329..330
+ err: `expected COMMA`
+ IDENT_PAT@330..334
+ NAME@330..334
+ IDENT@330..334 "kind"
+ COMMA@334..335
+ WHITESPACE@335..336
+ IDENT_PAT@336..341
+ NAME@336..341
+ IDENT@336..341 "token"
+ err: `expected COMMA`
+ err: `expected pattern`
+ ERROR@341..342
+ DOT@341..342
+ err: `expected COMMA`
+ IDENT_PAT@342..345
+ NAME@342..345
+ IDENT@342..345 "len"
+ R_PAREN@345..346
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@346..347
+ ERROR@346..347
+ SEMICOLON@346..347
+ err: `expected COMMA`
+ WHITESPACE@347..360
+ err: `expected pattern`
+ PARAM@360..361
+ ERROR@360..361
+ STAR@360..361
+ err: `expected COMMA`
+ PARAM@361..364
+ IDENT_PAT@361..364
+ NAME@361..364
+ IDENT@361..364 "idx"
+ err: `expected COMMA`
+ WHITESPACE@364..365
+ err: `expected pattern`
+ PARAM@365..366
+ ERROR@365..366
+ PLUS@365..366
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@366..367
+ ERROR@366..367
+ EQ@366..367
+ err: `expected COMMA`
+ WHITESPACE@367..368
+ PARAM@368..369
+ LITERAL@368..369
+ INT_NUMBER@368..369 "1"
+ err: `expected COMMA`
+ WHITESPACE@369..378
+ err: `expected pattern`
+ PARAM@378..379
+ ERROR@378..379
+ R_CURLY@378..379
+ err: `expected COMMA`
+ WHITESPACE@379..384
+ err: `expected pattern`
+ PARAM@384..385
+ ERROR@384..385
+ R_CURLY@384..385
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@385..386
+ ERROR@385..386
+ SEMICOLON@385..386
+ err: `expected COMMA`
+ WHITESPACE@386..387
+ err: `expected pattern`
+ PARAM@387..388
+ ERROR@387..388
+ R_CURLY@387..388
+ err: `expected COMMA`
+ err: `expected PIPE`
+ WHITESPACE@388..389
+ err: `expected expression`
+ err: `expected SEMI`
+ err: `expected R_CURLY`
+ ERROR@389..389
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rast
new file mode 100644
index 000000000..eec84a0c6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rast
@@ -0,0 +1,89 @@
+SOURCE_FILE
+ STRUCT
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "Cache"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ WHITESPACE "\n "
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "RefCell"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "HashMap"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ WHITESPACE "\n "
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "TypeId"
+ COMMA ","
+ WHITESPACE "\n "
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ ERROR
+ AT "@"
+ WHITESPACE " "
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Any"
+ ERROR
+ ERROR
+ R_ANGLE ">"
+ ERROR
+ COMMA ","
+ WHITESPACE "\n "
+ ERROR
+ R_ANGLE ">"
+ ERROR
+ R_ANGLE ">"
+ WHITESPACE "\n"
+ ERROR
+ R_PAREN ")"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+error 67: expected type
+error 68: expected COMMA
+error 68: expected R_ANGLE
+error 68: expected COMMA
+error 68: expected R_ANGLE
+error 68: expected COMMA
+error 68: expected R_ANGLE
+error 68: expected COMMA
+error 72: expected COMMA
+error 72: expected a type
+error 72: expected R_PAREN
+error 72: expected SEMICOLON
+error 72: expected an item
+error 73: expected an item
+error 79: expected an item
+error 80: expected an item
+error 82: expected an item
+error 83: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rs
new file mode 100644
index 000000000..20dde3bc3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rs
@@ -0,0 +1,7 @@
+pub struct Cache(
+ RefCell<HashMap<
+ TypeId,
+ Box<@ Any>,
+ >>
+);
+
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rast
new file mode 100644
index 000000000..fd2f9ada3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 19: expected colon
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rs
new file mode 100644
index 000000000..75c1d2f98
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rs
@@ -0,0 +1 @@
+fn foo<T>() where T {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rast
new file mode 100644
index 000000000..8e169320d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rast
@@ -0,0 +1,24 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ ERROR
+ R_CURLY "}"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ ERROR
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 7: expected value parameter
+error 7: expected R_PAREN
+error 7: expected a block
+error 7: unmatched `}`
+error 8: expected an item
+error 10: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rs
new file mode 100644
index 000000000..156e70251
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rs
@@ -0,0 +1,2 @@
+fn foo(}) {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rast
new file mode 100644
index 000000000..c48c35bf8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rast
@@ -0,0 +1,44 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ WHITESPACE "\n "
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE "\n "
+ R_PAREN ")"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 38: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rs
new file mode 100644
index 000000000..9ae857686
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ foo(
+ 1, 2
+ )
+ return 92;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rast
new file mode 100644
index 000000000..807356462
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rast
@@ -0,0 +1,47 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "foo"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "bar"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 44: expected expression
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rs
new file mode 100644
index 000000000..17bd49777
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rs
@@ -0,0 +1,4 @@
+fn foo(foo: i32) {
+ let bar = 92;
+ 1 +
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rast
new file mode 100644
index 000000000..6524d8e8f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rast
@@ -0,0 +1,134 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "FnScopes"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "new_scope"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_PAT
+ AMP "&"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "ScopeId"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "res"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ METHOD_CALL_EXPR
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ DOT "."
+ NAME_REF
+ IDENT "scopes"
+ DOT "."
+ NAME_REF
+ IDENT "len"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ METHOD_CALL_EXPR
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ DOT "."
+ NAME_REF
+ IDENT "scopes"
+ DOT "."
+ NAME_REF
+ IDENT "push"
+ ARG_LIST
+ L_PAREN "("
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "ScopeData"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "parent"
+ COLON ":"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "None"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "entries"
+ COLON ":"
+ WHITESPACE " "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "vec"
+ BANG "!"
+ TOKEN_TREE
+ L_BRACK "["
+ R_BRACK "]"
+ WHITESPACE " "
+ R_CURLY "}"
+ R_PAREN ")"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "set_parent"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 34: expected pattern
+error 34: missing type for function parameter
+error 180: expected function arguments
+error 180: expected a block
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rs
new file mode 100644
index 000000000..fe604006c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rs
@@ -0,0 +1,8 @@
+impl FnScopes {
+ fn new_scope(&) -> ScopeId {
+ let res = self.scopes.len();
+ self.scopes.push(ScopeData { parent: None, entries: vec![] })
+ }
+
+ fn set_parent
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rast
new file mode 100644
index 000000000..7d62e0cc1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rast
@@ -0,0 +1,107 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "11"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "bar"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "baz"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE "\n "
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 27: expected SEMICOLON
+error 52: expected pattern
+error 52: expected SEMICOLON
+error 78: expected pattern
+error 78: expected SEMICOLON
+error 101: expected pattern
+error 101: expected SEMICOLON
+error 127: expected pattern
+error 127: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rs
new file mode 100644
index 000000000..5108d5a49
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rs
@@ -0,0 +1,12 @@
+fn foo() {
+ let foo = 11
+ let bar = 1;
+ let
+ let baz = 92;
+ let
+ if true {}
+ let
+ while true {}
+ let
+ loop {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rast
new file mode 100644
index 000000000..56d124cb9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rast
@@ -0,0 +1,21 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 2: expected a name
+error 2: expected function arguments
+error 2: expected a block
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rs
new file mode 100644
index 000000000..3393b668b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rs
@@ -0,0 +1,3 @@
+fn
+
+fn foo() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rast
new file mode 100644
index 000000000..762840aa2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rast
@@ -0,0 +1,34 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "y"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 16: missing type for function parameter
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rs
new file mode 100644
index 000000000..7a6c264f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rs
@@ -0,0 +1,2 @@
+fn foo(x: i32, y) {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rast
new file mode 100644
index 000000000..900394bd9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rast
@@ -0,0 +1,171 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ WHITESPACE " "
+ ERROR
+ AT "@"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ STRUCT
+ STRUCT_KW "struct"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ ERROR
+ R_BRACK "]"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ WHITESPACE " "
+ ERROR
+ AT "@"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ IMPL
+ IMPL_KW "impl"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ DOT "."
+ NAME_REF
+ IDENT "bar"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ WHITESPACE " "
+ ERROR
+ AT "@"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ ERROR
+ R_BRACK "]"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ TRAIT
+ TRAIT_KW "trait"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 16: expected expression
+error 17: expected R_BRACK
+error 17: expected SEMICOLON
+error 17: expected expression
+error 25: expected a name
+error 26: expected `;`, `{`, or `(`
+error 30: expected pattern
+error 31: expected SEMICOLON
+error 53: expected expression
+error 54: expected SEMICOLON
+error 54: expected expression
+error 60: expected type
+error 60: expected `{`
+error 60: expected expression
+error 65: expected pattern
+error 65: expected SEMICOLON
+error 65: expected expression
+error 92: expected expression
+error 93: expected SEMICOLON
+error 93: expected expression
+error 95: expected expression
+error 96: expected expression
+error 103: expected a name
+error 104: expected `{`
+error 108: expected pattern
+error 108: expected SEMICOLON
+error 108: expected expression
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rs
new file mode 100644
index 000000000..cd2d493a1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rs
@@ -0,0 +1,3 @@
+fn a() { [1, 2, @, struct, let] }
+fn b() { foo(1, 2, @, impl, let) }
+fn c() { foo.bar(1, 2, @, ], trait, let) }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rast
new file mode 100644
index 000000000..4064a7a1f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ BANG "!"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_PAREN "("
+ WHITESPACE "\n "
+ IDENT "bar"
+ COMMA ","
+ WHITESPACE " "
+ STRING "\"baz\""
+ COMMA ","
+ WHITESPACE " "
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ FLOAT_NUMBER "2.0"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ COMMENT "//~ ERROR incorrect close delimiter"
+ WHITESPACE "\n"
+ ERROR
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 49: unmatched `}`
+error 92: unmatched `}`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rs
new file mode 100644
index 000000000..0206d563e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rs
@@ -0,0 +1,5 @@
+fn main() {
+ foo! (
+ bar, "baz", 1, 2.0
+ } //~ ERROR incorrect close delimiter
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast
new file mode 100644
index 000000000..d374f8661
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast
@@ -0,0 +1,327 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ L_PAREN "("
+ QUESTION "?"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sized"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ L_PAREN "("
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ R_PAREN ")"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ DYN_TRAIT_TYPE
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PAREN_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ L_PAREN "("
+ QUESTION "?"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sized"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ L_PAREN "("
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ R_PAREN ")"
+ ERROR
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PAREN_TYPE
+ L_PAREN "("
+ ERROR
+ QUESTION "?"
+ EXPR_STMT
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sized"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ ERROR
+ PLUS "+"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ CLOSURE_EXPR
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ BIN_EXPR
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ L_ANGLE "<"
+ ERROR
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_PAREN ")"
+ R_ANGLE ">"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ ERROR
+ COLON ":"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ L_ANGLE "<"
+ TUPLE_EXPR
+ L_PAREN "("
+ CLOSURE_EXPR
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ BIN_EXPR
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ L_ANGLE "<"
+ ERROR
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ ERROR
+ QUESTION "?"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sized"
+ R_PAREN ")"
+ R_ANGLE ">"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 88: expected COMMA
+error 88: expected R_ANGLE
+error 121: expected SEMICOLON
+error 121: expected expression
+error 140: expected type
+error 141: expected R_PAREN
+error 141: expected COMMA
+error 141: expected R_ANGLE
+error 141: expected SEMICOLON
+error 146: expected SEMICOLON
+error 146: expected expression
+error 148: expected expression
+error 158: expected `|`
+error 158: expected COMMA
+error 165: expected expression
+error 168: expected expression
+error 179: expected expression
+error 180: expected COMMA
+error 190: expected EQ
+error 190: expected expression
+error 191: expected COMMA
+error 204: expected `|`
+error 204: expected COMMA
+error 211: expected expression
+error 214: expected expression
+error 228: expected expression
+error 229: expected R_PAREN
+error 229: expected COMMA
+error 236: expected expression
+error 237: expected COMMA
+error 237: expected expression
+error 237: expected R_PAREN
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rs
new file mode 100644
index 000000000..6c2e95c02
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rs
@@ -0,0 +1,7 @@
+fn f<T: (Copy) + (?Sized) + (for<'a> Trait<'a>)>() {}
+
+fn main() {
+ let _: Box<(Copy) + (?Sized) + (for<'a> Trait<'a>)>;
+ let _: Box<(?Sized) + (for<'a> Trait<'a>) + (Copy)>;
+ let _: Box<(for<'a> Trait<'a>) + (Copy) + (?Sized)>;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rast
new file mode 100644
index 000000000..6b49724ec
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rast
@@ -0,0 +1,209 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "Test"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "Var1"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "Var2"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "String"
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "Var3"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "abc"
+ COLON ":"
+ WHITESPACE " "
+ ERROR
+ L_CURLY "{"
+ R_CURLY "}"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ COMMENT "//~ ERROR: expected type, found `{`"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ COMMENT "// recover..."
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "Test2"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "Fine"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "Test3"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "StillFine"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "def"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ENUM
+ COMMENT "// fail again"
+ WHITESPACE "\n "
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "Test4"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "Nope"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ ERROR
+ ERROR
+ L_CURLY "{"
+ R_CURLY "}"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ COMMENT "//~ ERROR: found `{`"
+ WHITESPACE "\n "
+ COMMENT "//~^ ERROR: found `{`"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ COMMENT "// still recover later"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE " "
+ COMMENT "//~ ERROR: expected pattern"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 95: expected type
+error 95: expected COMMA
+error 96: expected field
+error 98: expected field declaration
+error 371: expected COMMA
+error 372: expected a type
+error 372: expected R_PAREN
+error 372: expected COMMA
+error 372: expected enum variant
+error 374: expected enum variant
+error 494: expected pattern
+error 495: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rs
new file mode 100644
index 000000000..c78abe80a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rs
@@ -0,0 +1,32 @@
+fn main() {
+ enum Test {
+ Var1,
+ Var2(String),
+ Var3 {
+ abc: {}, //~ ERROR: expected type, found `{`
+ },
+ }
+
+ // recover...
+ let a = 1;
+ enum Test2 {
+ Fine,
+ }
+
+ enum Test3 {
+ StillFine {
+ def: i32,
+ },
+ }
+
+ {
+ // fail again
+ enum Test4 {
+ Nope(i32 {}) //~ ERROR: found `{`
+ //~^ ERROR: found `{`
+ }
+ }
+ // still recover later
+ let; //~ ERROR: expected pattern
+ let _ = 0;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rast
new file mode 100644
index 000000000..1068418e0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rast
@@ -0,0 +1,49 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ R_ANGLE ">"
+ WHITESPACE "\n"
+ IMPL
+ IMPL_KW "impl"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "OnceCell"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_ANGLE ">"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 14: expected trait or type
+error 14: expected `{`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rs
new file mode 100644
index 000000000..829ca1c4b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rs
@@ -0,0 +1,2 @@
+impl<T: Clone>
+impl<T> OnceCell<T> {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rast
new file mode 100644
index 000000000..674c8d536
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 26: expected type
+error 26: expected colon
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rs
new file mode 100644
index 000000000..2792c2084
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rs
@@ -0,0 +1,3 @@
+fn foo()
+ where for<'a>
+{}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rast
new file mode 100644
index 000000000..fb037112f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ DOT "."
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 21: expected field name or number
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rs
new file mode 100644
index 000000000..a7cdc17bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rs
@@ -0,0 +1,3 @@
+fn foo(a: A) {
+ a.
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast
new file mode 100644
index 000000000..327bf94a4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast
@@ -0,0 +1,205 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ ERROR
+ BANG "!"
+ ARRAY_EXPR
+ L_BRACK "["
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ STRING "\"Not allowed here\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ ERROR
+ BANG "!"
+ ARRAY_EXPR
+ L_BRACK "["
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ STRING "\"Nor here\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "test"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ ERROR
+ BANG "!"
+ ARRAY_EXPR
+ L_BRACK "["
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ STRING "\"Nor here\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 52: expected `[`
+error 52: expected pattern
+error 53: expected FAT_ARROW
+error 78: expected `,`
+error 161: expected `[`
+error 161: expected pattern
+error 162: expected FAT_ARROW
+error 232: expected `[`
+error 232: expected pattern
+error 233: expected FAT_ARROW
+error 250: expected `,`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rs
new file mode 100644
index 000000000..06aa47770
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rs
@@ -0,0 +1,20 @@
+fn foo() {
+ match () {
+ _ => (),
+ #![doc("Not allowed here")]
+ _ => (),
+ }
+
+ match () {
+ _ => (),
+ _ => (),
+ #![doc("Nor here")]
+ }
+
+ match () {
+ #[cfg(test)]
+ #![doc("Nor here")]
+ _ => (),
+ _ => (),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rast
new file mode 100644
index 000000000..b5bc3d84d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rast
@@ -0,0 +1,68 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "test"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 80: expected pattern
+error 80: expected FAT_ARROW
+error 80: expected expression
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rs
new file mode 100644
index 000000000..4635222da
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rs
@@ -0,0 +1,7 @@
+fn foo() {
+ match () {
+ _ => (),
+ _ => (),
+ #[cfg(test)]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rast
new file mode 100644
index 000000000..7a2ae9103
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rast
@@ -0,0 +1,96 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ ERROR
+ BOX_KW "box"
+ WHITESPACE " "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ ERROR
+ BOX_KW "box"
+ WHITESPACE " "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ ERROR
+ BOX_KW "box"
+ WHITESPACE " "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+error 24: expected a name
+error 27: expected SEMICOLON
+error 48: expected a name
+error 51: expected SEMICOLON
+error 76: expected a name
+error 79: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rs
new file mode 100644
index 000000000..d3fa2e468
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rs
@@ -0,0 +1,6 @@
+fn main() {
+ let ref box i = ();
+ let mut box i = ();
+ let ref mut box i = ();
+}
+
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rast
new file mode 100644
index 000000000..f9287d42e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rast
@@ -0,0 +1,55 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "baz"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 17: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
+error 17: expected SEMICOLON
+error 37: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
+error 37: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rs
new file mode 100644
index 000000000..4a2668126
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rs
@@ -0,0 +1,5 @@
+use foo::bar;
+use
+use crate::baz;
+use
+fn f() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rast
new file mode 100644
index 000000000..13e76e683
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rast
@@ -0,0 +1,51 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "error"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Error"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ ERROR
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "io"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 22: expected COMMA
+error 22: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
+error 23: expected COMMA
+error 24: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
+error 27: expected COMMA
+error 35: expected COMMA
+error 35: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
+error 36: expected COMMA
+error 36: expected R_CURLY
+error 36: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rs
new file mode 100644
index 000000000..d521a5bb2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rs
@@ -0,0 +1,2 @@
+use std::{error::Error;
+use std::io;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rast
new file mode 100644
index 000000000..8ca160601
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rast
@@ -0,0 +1,83 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ METHOD_CALL_EXPR
+ METHOD_CALL_EXPR
+ METHOD_CALL_EXPR
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ R_BRACK "]"
+ DOT "."
+ NAME_REF
+ IDENT "iter"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n "
+ DOT "."
+ NAME_REF
+ IDENT "map"
+ ARG_LIST
+ L_PAREN "("
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "it"
+ PIPE "|"
+ R_PAREN ")"
+ WHITESPACE "\n "
+ DOT "."
+ NAME_REF
+ IDENT "max"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_ANGLE ">"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 56: expected expression
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rs
new file mode 100644
index 000000000..a2f74bd87
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rs
@@ -0,0 +1,5 @@
+fn foo() -> i32 {
+ [1, 2, 3].iter()
+ .map(|it|)
+ .max::<i32>();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast
new file mode 100644
index 000000000..9cea337ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast
@@ -0,0 +1,75 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ ERROR
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ ERROR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ BLOCK_EXPR
+ TRY_KW "try"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ ERROR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'label"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 24: expected existential, fn, trait or impl
+error 41: expected existential, fn, trait or impl
+error 56: expected a block
+error 75: expected a loop
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rs
new file mode 100644
index 000000000..8fa324c1a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rs
@@ -0,0 +1,6 @@
+fn main() {
+ { unsafe 92 }
+ { async 92 }
+ { try 92 }
+ { 'label: 92 }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast
new file mode 100644
index 000000000..cb4fb1642
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast
@@ -0,0 +1,256 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "ForRef"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "ForTup"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "ForSlice"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ SLICE_TYPE
+ L_BRACK "["
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "ForForFn"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_for_for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 21: expected a function pointer or path
+error 52: expected a function pointer or path
+error 88: expected a function pointer or path
+error 119: expected a function pointer or path
+error 195: expected a function pointer or path
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rs
new file mode 100644
index 000000000..0e9f8ccb4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rs
@@ -0,0 +1,9 @@
+type ForRef = for<'a> &'a u32;
+type ForTup = for<'a> (&'a u32,);
+type ForSlice = for<'a> [u32];
+type ForForFn = for<'a> for<'b> fn(&'a i32, &'b i32);
+fn for_for_for<T>()
+where
+ for<'a> for<'b> for<'c> fn(&'a T, &'b T, &'c T): Copy,
+{
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast
new file mode 100644
index 000000000..96e471a69
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast
@@ -0,0 +1,48 @@
+SOURCE_FILE
+ ERROR
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN
+ ASYNC_KW "async"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ CONST
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ CONST_KW "const"
+ WHITESPACE " "
+ ERROR
+ FN_KW "fn"
+ WHITESPACE " "
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ ERROR
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 6: expected existential, fn, trait or impl
+error 38: expected a name
+error 40: missing type for `const` or `static`
+error 40: expected SEMICOLON
+error 44: expected BANG
+error 46: expected SEMICOLON
+error 47: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rs
new file mode 100644
index 000000000..731e58013
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rs
@@ -0,0 +1,2 @@
+unsafe async fn foo() {}
+unsafe const fn bar() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rast
new file mode 100644
index 000000000..4b2a74036
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rast
@@ -0,0 +1,15 @@
+SOURCE_FILE
+ ERROR
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ ERROR
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE "\n"
+error 10: expected existential, fn, trait or impl
+error 21: expected existential, fn, trait or impl
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rs
new file mode 100644
index 000000000..db32b98df
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rs
@@ -0,0 +1 @@
+extern "C" extern "C"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rast
new file mode 100644
index 000000000..3a05bfee1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rast
@@ -0,0 +1,123 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "lol"
+ R_ANGLE ">"
+ COLON2 "::"
+ ERROR
+ L_ANGLE "<"
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "nope"
+ SHR ">>"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "lol"
+ R_ANGLE ">"
+ COLON2 "::"
+ ERROR
+ L_ANGLE "<"
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "nope"
+ R_ANGLE ">"
+ WHITESPACE " "
+ ERROR
+ EQ "="
+ WHITESPACE " "
+ EXPR_STMT
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 30: expected identifier
+error 31: expected COMMA
+error 31: expected R_ANGLE
+error 31: expected SEMICOLON
+error 37: expected expression
+error 75: expected identifier
+error 76: expected SEMICOLON
+error 82: expected expression
+error 83: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rs
new file mode 100644
index 000000000..31c12bfff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rs
@@ -0,0 +1,7 @@
+fn f() {
+ S::<Item::<lol>::<nope>>;
+}
+
+fn g() {
+ let _: Item::<lol>::<nope> = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rast
new file mode 100644
index 000000000..ed739a7e3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rast
@@ -0,0 +1,27 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ SLICE_TYPE
+ L_BRACK "["
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ ERROR
+ INT_NUMBER "92"
+ ERROR
+ R_BRACK "]"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 12: expected `;` or `]`
+error 12: expected SEMICOLON
+error 13: expected an item
+error 15: expected an item
+error 16: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rs
new file mode 100644
index 000000000..a94851443
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rs
@@ -0,0 +1 @@
+type T = [() 92];
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast
new file mode 100644
index 000000000..56cea4b15
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast
@@ -0,0 +1,28 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ERROR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'loop"
+ COLON ":"
+ WHITESPACE " "
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 22: expected a loop
+error 27: expected type
+error 27: expected `{`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rs
new file mode 100644
index 000000000..a2164c510
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rs
@@ -0,0 +1,3 @@
+fn main() {
+ 'loop: impl
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast
new file mode 100644
index 000000000..354c4135a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast
@@ -0,0 +1,17 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 10: expected mut or const in raw pointer type (use `*mut T` or `*const T` as appropriate)
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs
new file mode 100644
index 000000000..fae705131
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs
@@ -0,0 +1 @@
+type T = *();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rast
new file mode 100644
index 000000000..dbeb878a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rast
@@ -0,0 +1,79 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Type"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait1"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "NotType"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait2"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "NotType"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 38: expected trait or type
+error 38: expected `{`
+error 70: expected trait or type
+error 70: expected `{`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rs
new file mode 100644
index 000000000..b8c7b65e3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rs
@@ -0,0 +1,4 @@
+impl Type {}
+impl Trait1 for T {}
+impl impl NotType {}
+impl Trait2 for impl NotType {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast
new file mode 100644
index 000000000..eb0595293
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast
@@ -0,0 +1,23 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "F"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ERROR
+ L_PAREN "("
+ ERROR
+ R_PAREN ")"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 15: expected `fn`
+error 15: expected SEMICOLON
+error 16: expected an item
+error 17: expected an item
+error 18: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs
new file mode 100644
index 000000000..f014914ff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs
@@ -0,0 +1 @@
+type F = unsafe ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast
new file mode 100644
index 000000000..77c2b56ad
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast
@@ -0,0 +1,37 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ ERROR
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ERROR
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 11: expected an item
+error 18: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs
new file mode 100644
index 000000000..26141e904
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs
@@ -0,0 +1 @@
+fn foo(){} unsafe { } fn bar(){}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rast
new file mode 100644
index 000000000..bf20d5fa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 27: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rs
new file mode 100644
index 000000000..9a423248c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rs
@@ -0,0 +1 @@
+fn foo() { let _ = async {} }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rast
new file mode 100644
index 000000000..0ae9f64e7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rast
@@ -0,0 +1,26 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ ERROR
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ EXPR_STMT
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 14: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rs
new file mode 100644
index 000000000..2976f6862
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rs
@@ -0,0 +1 @@
+fn foo() { pub 92; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rast
new file mode 100644
index 000000000..823db94f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rast
@@ -0,0 +1,21 @@
+SOURCE_FILE
+ STATIC
+ STATIC_KW "static"
+ WHITESPACE " "
+ ERROR
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "5"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 7: expected a name
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rs
new file mode 100644
index 000000000..df8cecb43
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rs
@@ -0,0 +1 @@
+static _: i32 = 5;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast
new file mode 100644
index 000000000..f51196004
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast
@@ -0,0 +1,49 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "field"
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "default"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 25: expected COLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs
new file mode 100644
index 000000000..a4e5b2f69
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs
@@ -0,0 +1,3 @@
+fn main() {
+ S { field ..S::default() }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rast
new file mode 100644
index 000000000..458d7f4e2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rast
@@ -0,0 +1,31 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "f"
+ WHITESPACE " "
+ RECORD_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ NAME
+ IDENT "g"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 12: expected COLON
+error 12: expected type
+error 12: expected COMMA
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rs
new file mode 100644
index 000000000..da32227ad
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rs
@@ -0,0 +1 @@
+struct S { f pub g: () }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rast
new file mode 100644
index 000000000..b03f5ad9f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rast
@@ -0,0 +1,14 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 11: expected identifier
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rs
new file mode 100644
index 000000000..7510664e1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rs
@@ -0,0 +1 @@
+use crate::;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast
new file mode 100644
index 000000000..e72df374d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast
@@ -0,0 +1,53 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "y"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "z"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "t"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 6: missing type for function parameter
+error 6: expected COMMA
+error 16: missing type for function parameter
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rs
new file mode 100644
index 000000000..4a95b9084
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rs
@@ -0,0 +1 @@
+fn f(x y: i32, z, t: i32) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rast
new file mode 100644
index 000000000..f3b1129f2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "c"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rs
new file mode 100644
index 000000000..6fa175f54
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rs
@@ -0,0 +1 @@
+use {a, b, c};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rast
new file mode 100644
index 000000000..8407e99f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rast
@@ -0,0 +1,63 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_trait"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "str"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rs
new file mode 100644
index 000000000..423bc105b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rs
@@ -0,0 +1,4 @@
+fn for_trait<F>()
+where
+ for<'a> F: Fn(&'a str)
+{ }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast
new file mode 100644
index 000000000..902b06484
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast
@@ -0,0 +1,60 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "F"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs
new file mode 100644
index 000000000..93636e926
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs
@@ -0,0 +1 @@
+type F = Box<Fn(i32, &i32, &i32, ())>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rast
new file mode 100644
index 000000000..3858e3eed
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rast
@@ -0,0 +1,38 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rs
new file mode 100644
index 000000000..9df40ed39
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rs
@@ -0,0 +1 @@
+fn foo<T: Clone + Copy>(){}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rast
new file mode 100644
index 000000000..67e282363
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rast
@@ -0,0 +1,128 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ AMP "&"
+ NAME
+ SELF_KW "self"
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "e"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rs
new file mode 100644
index 000000000..80c0a43f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rs
@@ -0,0 +1,7 @@
+impl S {
+ fn a(self) {}
+ fn b(&self,) {}
+ fn c(&'a self,) {}
+ fn d(&'a mut self, x: i32) {}
+ fn e(mut self) {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rast
new file mode 100644
index 000000000..dee860c24
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rast
@@ -0,0 +1,53 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ QUESTION "?"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sized"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ TILDE "~"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Drop"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rs
new file mode 100644
index 000000000..5da3083b9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rs
@@ -0,0 +1 @@
+struct S<T: 'a + ?Sized + (Copy) + ~const Drop>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rast
new file mode 100644
index 000000000..4ccda19a8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rast
@@ -0,0 +1,98 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ PATH_PAT
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ PATH_PAT
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "Bar"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rs
new file mode 100644
index 000000000..f6e32c7c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ let foo::Bar = ();
+ let ::Bar = ();
+ let Bar { .. } = ();
+ let Bar(..) = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rast
new file mode 100644
index 000000000..ab3b49b0d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rast
@@ -0,0 +1,26 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rs
new file mode 100644
index 000000000..9f078fa48
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ loop {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rast
new file mode 100644
index 000000000..7a3cd6a0d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rast
@@ -0,0 +1,21 @@
+SOURCE_FILE
+ EXTERN_BLOCK
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rs
new file mode 100644
index 000000000..bee5ac845
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rs
@@ -0,0 +1,2 @@
+unsafe extern "C" {}
+extern {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast
new file mode 100644
index 000000000..8498724b9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast
@@ -0,0 +1,60 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ FIELD_EXPR
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ INT_NUMBER "0"
+ DOT "."
+ NAME_REF
+ IDENT "bar"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ INT_NUMBER "0"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs
new file mode 100644
index 000000000..b8da2ddc3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ x.foo;
+ x.0.bar;
+ x.0();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rast
new file mode 100644
index 000000000..31c87d1b3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs
new file mode 100644
index 000000000..2d30e8521
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs
@@ -0,0 +1 @@
+type Foo = () where Foo: Copy;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rast
new file mode 100644
index 000000000..bfe7ed5b4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rast
@@ -0,0 +1,35 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "M"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rs
new file mode 100644
index 000000000..04b2bb9ba
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rs
@@ -0,0 +1,2 @@
+type M = *mut ();
+type C = *mut ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rast
new file mode 100644
index 000000000..53dbf3999
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rast
@@ -0,0 +1,13 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Never"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ NEVER_TYPE
+ BANG "!"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rs
new file mode 100644
index 000000000..de399fcf4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rs
@@ -0,0 +1 @@
+type Never = !;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rast
new file mode 100644
index 000000000..5d80a57a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rast
@@ -0,0 +1,38 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CONTINUE_EXPR
+ CONTINUE_KW "continue"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CONTINUE_EXPR
+ CONTINUE_KW "continue"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'l"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rs
new file mode 100644
index 000000000..474cc3f0e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ loop {
+ continue;
+ continue 'l;
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rast
new file mode 100644
index 000000000..2a5c644d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rast
@@ -0,0 +1,21 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ ARRAY_TYPE
+ L_BRACK "["
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rs
new file mode 100644
index 000000000..27eb22f22
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rs
@@ -0,0 +1 @@
+type T = [(); 92];
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rast
new file mode 100644
index 000000000..a0b562629
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rast
@@ -0,0 +1,76 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ NAME
+ SELF_KW "self"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ R_ANGLE ">"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rs
new file mode 100644
index 000000000..6a170d5ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rs
@@ -0,0 +1,4 @@
+impl S {
+ fn a(self: &Self) {}
+ fn b(mut self: Box<Self>) {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rast
new file mode 100644
index 000000000..525b26745
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PREFIX_EXPR
+ STAR "*"
+ PREFIX_EXPR
+ STAR "*"
+ REF_EXPR
+ AMP "&"
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PREFIX_EXPR
+ BANG "!"
+ PREFIX_EXPR
+ BANG "!"
+ LITERAL
+ TRUE_KW "true"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PREFIX_EXPR
+ MINUS "-"
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rs
new file mode 100644
index 000000000..f1c3f7118
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ **&1;
+ !!true;
+ --1;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rast
new file mode 100644
index 000000000..def7373c9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rast
@@ -0,0 +1,81 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ CONST
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ AMP "&"
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rs
new file mode 100644
index 000000000..f10851487
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rs
@@ -0,0 +1,6 @@
+impl F {
+ type A = i32;
+ const B: i32 = 92;
+ fn foo() {}
+ fn bar(&self) {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rast
new file mode 100644
index 000000000..8738292a9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rast
@@ -0,0 +1,49 @@
+SOURCE_FILE
+ STRUCT
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ R_PAREN ")"
+ WHITESPACE " "
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ R_PAREN ")"
+ WHITESPACE " "
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rs
new file mode 100644
index 000000000..a790a485f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rs
@@ -0,0 +1,3 @@
+pub(crate) struct S;
+pub(self) struct S;
+pub(super) struct S;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rast
new file mode 100644
index 000000000..d9db1c34b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rast
@@ -0,0 +1,13 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Placeholder"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ INFER_TYPE
+ UNDERSCORE "_"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rs
new file mode 100644
index 000000000..7952dbd57
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rs
@@ -0,0 +1 @@
+type Placeholder = _;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rast
new file mode 100644
index 000000000..235a9d7f4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ ARRAY_EXPR
+ L_BRACK "["
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rs
new file mode 100644
index 000000000..7955973b9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rs
@@ -0,0 +1,3 @@
+fn main() {
+ let [a, b, ..] = [];
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rast
new file mode 100644
index 000000000..0bcb31524
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rast
@@ -0,0 +1,17 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ SLICE_TYPE
+ L_BRACK "["
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rs
new file mode 100644
index 000000000..4da1af827
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rs
@@ -0,0 +1 @@
+type T = [()];
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rast
new file mode 100644
index 000000000..3cdaf32b5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rast
@@ -0,0 +1,105 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ WHITESPACE " "
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rs
new file mode 100644
index 000000000..0dfe63629
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ let S() = ();
+ let S(_) = ();
+ let S(_,) = ();
+ let S(_, .. , x) = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rast
new file mode 100644
index 000000000..4516fd011
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rast
@@ -0,0 +1,50 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ REF_PAT
+ AMP "&"
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ REF_PAT
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rs
new file mode 100644
index 000000000..de41f5cae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rs
@@ -0,0 +1,4 @@
+fn main() {
+ let &a = ();
+ let &mut b = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rast
new file mode 100644
index 000000000..c7478da94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ EQ "="
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rs
new file mode 100644
index 000000000..54c5a7c46
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rs
@@ -0,0 +1 @@
+type A = impl Iterator<Item=Foo<'a>> + 'a;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rast
new file mode 100644
index 000000000..d53dde538
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rast
@@ -0,0 +1,90 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "82"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "81"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i8"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "79"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i16"
+ WHITESPACE " "
+ MINUS "-"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "0x36"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ LTEQ "<="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0x37"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rs
new file mode 100644
index 000000000..bfe8e4b36
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ 82 as i32;
+ 81 as i8 + 1;
+ 79 as i16 - 1;
+ 0x36 as u8 <= 0x37;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast
new file mode 100644
index 000000000..dcffcb1ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast
@@ -0,0 +1,90 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BIN_EXPR
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "None"
+ WHITESPACE " "
+ AMP2 "&&"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "5"
+ WHITESPACE " "
+ AMP2 "&&"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "None"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "None"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs
new file mode 100644
index 000000000..0131d5e33
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ if let Some(_) = None && true {}
+ while 1 == 5 && (let None = None) {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rast
new file mode 100644
index 000000000..16c522414
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rast
@@ -0,0 +1,87 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "it"
+ DOT "."
+ NAME_REF
+ IDENT "next"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rs
new file mode 100644
index 000000000..2f8188160
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ while true {};
+ while let Some(x) = it.next() {};
+ while { true } {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rast
new file mode 100644
index 000000000..608b0be16
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rast
@@ -0,0 +1,98 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "D"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ PARAM_LIST
+ L_PAREN "("
+ WHITESPACE " "
+ PARAM
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ DOT3 "..."
+ WHITESPACE " "
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rs
new file mode 100644
index 000000000..9493da83d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rs
@@ -0,0 +1,4 @@
+type A = fn();
+type B = unsafe fn();
+type C = unsafe extern "C" fn();
+type D = extern "C" fn ( u8 , ... ) -> u8;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rast
new file mode 100644
index 000000000..b5c9d7a8d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rast
@@ -0,0 +1,51 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'static"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rs
new file mode 100644
index 000000000..3ac0badab
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rs
@@ -0,0 +1,3 @@
+type A = &();
+type B = &'static ();
+type C = &mut ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rast
new file mode 100644
index 000000000..06c053d0f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rast
@@ -0,0 +1,57 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'l"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'l"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rs
new file mode 100644
index 000000000..1b4094636
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rs
@@ -0,0 +1,8 @@
+fn foo() {
+ loop {
+ break;
+ break 'l;
+ break 92;
+ break 'l 92;
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rast
new file mode 100644
index 000000000..8c66cfe59
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rast
@@ -0,0 +1,79 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "X"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Output"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Default"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "default"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rs
new file mode 100644
index 000000000..d140692e2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rs
@@ -0,0 +1,2 @@
+type X = <A as B>::Output;
+fn foo() { <usize as Default>::default(); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rast
new file mode 100644
index 000000000..9ffc07630
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ INDEX_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "xs"
+ L_BRACK "["
+ RANGE_EXPR
+ DOT2 ".."
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rs
new file mode 100644
index 000000000..ae21ad94c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rs
@@ -0,0 +1 @@
+fn foo() { xs[..]; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast
new file mode 100644
index 000000000..07b0210e4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast
@@ -0,0 +1,63 @@
+SOURCE_FILE
+ FN
+ VISIBILITY
+ CRATE_KW "crate"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ VISIBILITY
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME
+ IDENT "field"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ VISIBILITY
+ CRATE_KW "crate"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs
new file mode 100644
index 000000000..e2b5f2161
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs
@@ -0,0 +1,3 @@
+crate fn main() { }
+struct S { crate field: u32 }
+struct T(crate u32);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rast
new file mode 100644
index 000000000..dd7f76eb9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rast
@@ -0,0 +1,31 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "new"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rs
new file mode 100644
index 000000000..dcd9a7114
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rs
@@ -0,0 +1 @@
+trait T { fn new() -> Self; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rast
new file mode 100644
index 000000000..19cc8d5ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rast
@@ -0,0 +1,148 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CALL_EXPR
+ CALL_EXPR
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ARG_LIST
+ L_PAREN "("
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "func"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ARG_LIST
+ L_PAREN "("
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "func"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rs
new file mode 100644
index 000000000..ffbf46d6d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ let _ = f();
+ let _ = f()(1)(1, 2,);
+ let _ = f(<Foo>::func());
+ f(<Foo as Trait>::func());
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rast
new file mode 100644
index 000000000..2e4b515ca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rs
new file mode 100644
index 000000000..d9868718c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rs
@@ -0,0 +1 @@
+fn a() { fn b() {} }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast
new file mode 100644
index 000000000..e9d93a0d0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast
@@ -0,0 +1,48 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "FnMut"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ R_PAREN ")"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs
new file mode 100644
index 000000000..9b93442c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs
@@ -0,0 +1 @@
+fn foo<F: FnMut(&mut Foo<'a>)>(){}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rast
new file mode 100644
index 000000000..0129955d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rast
@@ -0,0 +1,20 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rs
new file mode 100644
index 000000000..cb66bad24
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rs
@@ -0,0 +1 @@
+type T = (i32,);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rast
new file mode 100644
index 000000000..a059e124a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rast
@@ -0,0 +1,85 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ DYN_TRAIT_TYPE
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'f"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ DYN_TRAIT_TYPE
+ DYN_KW "dyn"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'f"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rs
new file mode 100644
index 000000000..4bb0f63b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rs
@@ -0,0 +1,2 @@
+fn foo() -> Box<T + 'f> {}
+fn foo() -> Box<dyn T + 'f> {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rast
new file mode 100644
index 000000000..f7fac807f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rs
new file mode 100644
index 000000000..c9f74f7f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rs
@@ -0,0 +1 @@
+trait T { fn foo(); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rast
new file mode 100644
index 000000000..b3df31535
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rast
@@ -0,0 +1,14 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rs
new file mode 100644
index 000000000..c039cf7d3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rs
@@ -0,0 +1 @@
+type T = ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rast
new file mode 100644
index 000000000..d498d3721
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rast
@@ -0,0 +1,72 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "Foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "D"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rs
new file mode 100644
index 000000000..bf94f32e1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rs
@@ -0,0 +1,4 @@
+type A = Foo;
+type B = ::Foo;
+type C = self::Foo;
+type D = super::Foo;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rast
new file mode 100644
index 000000000..48e123ab1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rast
@@ -0,0 +1,97 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "a"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "format"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rs
new file mode 100644
index 000000000..333ebabef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ let _ = a;
+ let _ = a::b;
+ let _ = ::a::<b>;
+ let _ = format!();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rast
new file mode 100644
index 000000000..639ee0eb7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f32"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rs
new file mode 100644
index 000000000..d7f0b4382
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rs
@@ -0,0 +1 @@
+struct S { #[attr] f: f32 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rast
new file mode 100644
index 000000000..c83ea7ade
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rast
@@ -0,0 +1,77 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ LITERAL_PAT
+ LITERAL
+ CHAR "'c'"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ LITERAL_PAT
+ LITERAL
+ STRING "\"hello\""
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rs
new file mode 100644
index 000000000..6dfd67b4c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rs
@@ -0,0 +1,8 @@
+fn main() {
+ match () {
+ -1 => (),
+ 92 => (),
+ 'c' => (),
+ "hello" => (),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rast
new file mode 100644
index 000000000..a3cbe457e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rast
@@ -0,0 +1,117 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ COMMA ","
+ WHITESPACE "\n "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'static"
+ COMMA ","
+ WHITESPACE "\n "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COMMA ","
+ WHITESPACE "\n "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rs
new file mode 100644
index 000000000..19d7e571b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rs
@@ -0,0 +1,7 @@
+fn foo()
+where
+ 'a: 'b + 'c,
+ T: Clone + Copy + 'static,
+ Iterator::Item: 'a,
+ <T as Iterator>::Item: 'a
+{}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast
new file mode 100644
index 000000000..44c967e8d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast
@@ -0,0 +1,251 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ DOT3 "..."
+ WHITESPACE " "
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "100"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "101"
+ WHITESPACE " "
+ DOT2EQ "..="
+ WHITESPACE " "
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "200"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "200"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "301"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "302"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "10"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ OR_PAT
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "None"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "10"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ COMMA ","
+ WHITESPACE " "
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "5"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_PAT
+ L_PAREN "("
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "0"
+ COMMA ","
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_PAT
+ L_PAREN "("
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs
new file mode 100644
index 000000000..6c586a895
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs
@@ -0,0 +1,18 @@
+fn main() {
+ match 92 {
+ 0 ... 100 => (),
+ 101 ..= 200 => (),
+ 200 .. 301 => (),
+ 302 .. => (),
+ }
+
+ match Some(10 as u8) {
+ Some(0) | None => (),
+ Some(1..) => ()
+ }
+
+ match (10 as u8, 5 as u8) {
+ (0, _) => (),
+ (1.., _) => ()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rast
new file mode 100644
index 000000000..94897c2d2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rast
@@ -0,0 +1,60 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rs
new file mode 100644
index 000000000..1f25d577a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rs
@@ -0,0 +1,7 @@
+fn foo() {
+ match () {
+ _ => (),
+ _ => {}
+ _ => ()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rast
new file mode 100644
index 000000000..0a660957d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rast
@@ -0,0 +1,10 @@
+SOURCE_FILE
+ EXTERN_CRATE
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rs
new file mode 100644
index 000000000..49af74e1b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rs
@@ -0,0 +1 @@
+extern crate foo;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast
new file mode 100644
index 000000000..9997d0ae3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast
@@ -0,0 +1,125 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "y"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "32"
+ COMMA ","
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "y"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "32"
+ COMMA ","
+ WHITESPACE " "
+ DOT2 ".."
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Default"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "default"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "TupleStruct"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ INT_NUMBER "0"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs
new file mode 100644
index 000000000..6285e5549
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ S {};
+ S { x, y: 32, };
+ S { x, y: 32, ..Default::default() };
+ TupleStruct { 0: 1 };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rast
new file mode 100644
index 000000000..5f60e03d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rast
@@ -0,0 +1,65 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ MACRO_RULES
+ MACRO_RULES_KW "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ MACRO_CALL
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ MACRO_CALL
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "baz"
+ BANG "!"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rs
new file mode 100644
index 000000000..24a15c5c5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rs
@@ -0,0 +1,5 @@
+fn foo() {}
+macro_rules! foo {}
+foo::bar!();
+super::baz! {}
+struct S;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rast
new file mode 100644
index 000000000..805052fbc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rast
@@ -0,0 +1,23 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ BANG "!"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rs
new file mode 100644
index 000000000..a7bd4b048
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rs
@@ -0,0 +1 @@
+impl !Send for S {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rast
new file mode 100644
index 000000000..e2e964e44
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rast
@@ -0,0 +1,126 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ FALSE_KW "false"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rs
new file mode 100644
index 000000000..40f227ba3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rs
@@ -0,0 +1,7 @@
+fn foo() {
+ if true {};
+ if true {} else {};
+ if true {} else if false {} else {};
+ if S {};
+ if { true } { } else { };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rast
new file mode 100644
index 000000000..e37d43aac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ DYN_TRAIT_TYPE
+ DYN_KW "dyn"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ EQ "="
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rs
new file mode 100644
index 000000000..c3ecabb99
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rs
@@ -0,0 +1 @@
+type A = dyn Iterator<Item=Foo<'a>> + 'a;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rast
new file mode 100644
index 000000000..8189cf0a8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rast
@@ -0,0 +1,152 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ MATCH_GUARD
+ IF_KW "if"
+ WHITESPACE " "
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Test"
+ WHITESPACE " "
+ R_ANGLE ">"
+ WHITESPACE " "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Test"
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "field"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_CURLY "}"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ OR_PAT
+ IDENT_PAT
+ NAME
+ IDENT "X"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "Y"
+ WHITESPACE " "
+ MATCH_GUARD
+ IF_KW "if"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Z"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ PIPE "|"
+ WHITESPACE " "
+ OR_PAT
+ IDENT_PAT
+ NAME
+ IDENT "X"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "Y"
+ WHITESPACE " "
+ MATCH_GUARD
+ IF_KW "if"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Z"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ PIPE "|"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "X"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rs
new file mode 100644
index 000000000..9e009e24f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rs
@@ -0,0 +1,9 @@
+fn foo() {
+ match () {
+ _ => (),
+ _ if Test > Test{field: 0} => (),
+ X | Y if Z => (),
+ | X | Y if Z => (),
+ | X => (),
+ };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rast
new file mode 100644
index 000000000..f71367ae1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rs
new file mode 100644
index 000000000..1bbb5930b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rs
@@ -0,0 +1 @@
+use crate::foo;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast
new file mode 100644
index 000000000..e387e14d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast
@@ -0,0 +1,53 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ LITERAL
+ INT_NUMBER "1"
+ R_CURLY "}"
+ WHITESPACE " "
+ AMP "&"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ LITERAL
+ INT_NUMBER "1"
+ R_CURLY "}"
+ WHITESPACE " "
+ EXPR_STMT
+ REF_EXPR
+ AMP "&"
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs
new file mode 100644
index 000000000..7e8bd87bf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs
@@ -0,0 +1,4 @@
+fn f() {
+ let _ = {1} & 2;
+ {1} &2;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rast
new file mode 100644
index 000000000..0d6cd390e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rast
@@ -0,0 +1,96 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rs
new file mode 100644
index 000000000..c4021dc10
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ match () { };
+ match S {};
+ match { } { _ => () };
+ match { S {} } {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rast
new file mode 100644
index 000000000..62cff1220
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RETURN_EXPR
+ RETURN_KW "return"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rs
new file mode 100644
index 000000000..5733666b6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ return;
+ return 92;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rast
new file mode 100644
index 000000000..60ac3b3c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rast
@@ -0,0 +1,20 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Result"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rs
new file mode 100644
index 000000000..defd110c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rs
@@ -0,0 +1 @@
+type Result<T> = ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast
new file mode 100644
index 000000000..950421feb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast
@@ -0,0 +1,63 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_PAT
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MATCH_ARM
+ SLICE_PAT
+ L_BRACK "["
+ R_BRACK "]"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs
new file mode 100644
index 000000000..2edd578f9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs
@@ -0,0 +1,7 @@
+fn foo() {
+ match () {
+ _ => {}
+ () => {}
+ [] => {}
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rast
new file mode 100644
index 000000000..a23364d15
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rast
@@ -0,0 +1,90 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE " "
+ EXPR_STMT
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rs
new file mode 100644
index 000000000..81f44c533
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rs
@@ -0,0 +1,4 @@
+fn a() {}
+fn b() { let _ = 1; }
+fn c() { 1; 2; }
+fn d() { 1; 2 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rast
new file mode 100644
index 000000000..a000d7e59
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rast
@@ -0,0 +1,40 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rs
new file mode 100644
index 000000000..f0920b2a8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rs
@@ -0,0 +1 @@
+fn foo<T>() where T: Copy {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rast
new file mode 100644
index 000000000..c3aa8c15d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rast
@@ -0,0 +1,26 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ TRY_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ QUESTION "?"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rs
new file mode 100644
index 000000000..8b74f7bc8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ x?;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rast
new file mode 100644
index 000000000..c5da79974
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rs
new file mode 100644
index 000000000..04c0344fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rs
@@ -0,0 +1 @@
+type Foo = Bar;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rast
new file mode 100644
index 000000000..879676309
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rast
@@ -0,0 +1,14 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rs
new file mode 100644
index 000000000..647799d7c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rs
@@ -0,0 +1 @@
+impl S {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rast
new file mode 100644
index 000000000..3a59cf7b8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rast
@@ -0,0 +1,96 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ RANGE_EXPR
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ DOT "."
+ NAME_REF
+ IDENT "b"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ DOT2 ".."
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rs
new file mode 100644
index 000000000..e7b7cfc6b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ let x = 1..;
+ match 1.. { _ => () };
+ match a.b()..S { _ => () };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rast
new file mode 100644
index 000000000..7600457a9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rast
@@ -0,0 +1,117 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FN_PTR_TYPE
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Obj"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "PartialEq"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rs
new file mode 100644
index 000000000..8ac7b9e10
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rs
@@ -0,0 +1,3 @@
+type A = for<'a> fn() -> ();
+type B = for<'a> unsafe extern "C" fn(&'a ()) -> ();
+type Obj = for<'a> PartialEq<&'a i32>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rast
new file mode 100644
index 000000000..108b0802c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rast
@@ -0,0 +1,139 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ COMMENT "// reference operator"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "raw"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "raw"
+ DOT "."
+ NAME_REF
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ COMMENT "// raw reference operator"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ RAW_KW "raw"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ RAW_KW "raw"
+ WHITESPACE " "
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rs
new file mode 100644
index 000000000..c5262f446
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rs
@@ -0,0 +1,10 @@
+fn foo() {
+ // reference operator
+ let _ = &1;
+ let _ = &mut &f();
+ let _ = &raw;
+ let _ = &raw.0;
+ // raw reference operator
+ let _ = &raw mut foo;
+ let _ = &raw const foo;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0083_struct_items.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0083_struct_items.rast
new file mode 100644
index 000000000..cdbc40fe0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0083_struct_items.rast
@@ -0,0 +1,87 @@
+SOURCE_FILE@0..106
+ STRUCT@0..11
+ STRUCT_KW@0..6 "struct"
+ WHITESPACE@6..7 " "
+ NAME@7..10
+ IDENT@7..10 "Foo"
+ SEMICOLON@10..11 ";"
+ WHITESPACE@11..12 "\n"
+ STRUCT@12..25
+ STRUCT_KW@12..18 "struct"
+ WHITESPACE@18..19 " "
+ NAME@19..22
+ IDENT@19..22 "Foo"
+ WHITESPACE@22..23 " "
+ RECORD_FIELD_LIST@23..25
+ L_CURLY@23..24 "{"
+ R_CURLY@24..25 "}"
+ WHITESPACE@25..26 "\n"
+ STRUCT@26..39
+ STRUCT_KW@26..32 "struct"
+ WHITESPACE@32..33 " "
+ NAME@33..36
+ IDENT@33..36 "Foo"
+ TUPLE_FIELD_LIST@36..38
+ L_PAREN@36..37 "("
+ R_PAREN@37..38 ")"
+ SEMICOLON@38..39 ";"
+ WHITESPACE@39..40 "\n"
+ STRUCT@40..66
+ STRUCT_KW@40..46 "struct"
+ WHITESPACE@46..47 " "
+ NAME@47..50
+ IDENT@47..50 "Foo"
+ TUPLE_FIELD_LIST@50..65
+ L_PAREN@50..51 "("
+ TUPLE_FIELD@51..57
+ PATH_TYPE@51..57
+ PATH@51..57
+ PATH_SEGMENT@51..57
+ NAME_REF@51..57
+ IDENT@51..57 "String"
+ COMMA@57..58 ","
+ WHITESPACE@58..59 " "
+ TUPLE_FIELD@59..64
+ PATH_TYPE@59..64
+ PATH@59..64
+ PATH_SEGMENT@59..64
+ NAME_REF@59..64
+ IDENT@59..64 "usize"
+ R_PAREN@64..65 ")"
+ SEMICOLON@65..66 ";"
+ WHITESPACE@66..67 "\n"
+ STRUCT@67..105
+ STRUCT_KW@67..73 "struct"
+ WHITESPACE@73..74 " "
+ NAME@74..77
+ IDENT@74..77 "Foo"
+ WHITESPACE@77..78 " "
+ RECORD_FIELD_LIST@78..105
+ L_CURLY@78..79 "{"
+ WHITESPACE@79..84 "\n "
+ RECORD_FIELD@84..90
+ NAME@84..85
+ IDENT@84..85 "a"
+ COLON@85..86 ":"
+ WHITESPACE@86..87 " "
+ PATH_TYPE@87..90
+ PATH@87..90
+ PATH_SEGMENT@87..90
+ NAME_REF@87..90
+ IDENT@87..90 "i32"
+ COMMA@90..91 ","
+ WHITESPACE@91..96 "\n "
+ RECORD_FIELD@96..102
+ NAME@96..97
+ IDENT@96..97 "b"
+ COLON@97..98 ":"
+ WHITESPACE@98..99 " "
+ PATH_TYPE@99..102
+ PATH@99..102
+ PATH_SEGMENT@99..102
+ NAME_REF@99..102
+ IDENT@99..102 "f32"
+ COMMA@102..103 ","
+ WHITESPACE@103..104 "\n"
+ R_CURLY@104..105 "}"
+ WHITESPACE@105..106 "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rast
new file mode 100644
index 000000000..29995bb75
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rast
@@ -0,0 +1,19 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PAREN_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rs
new file mode 100644
index 000000000..6e1b25101
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rs
@@ -0,0 +1 @@
+type T = (i32);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rast
new file mode 100644
index 000000000..403c265ea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rast
@@ -0,0 +1,136 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ FALSE_KW "false"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ FLOAT_NUMBER "2.0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ BYTE "b'a'"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ CHAR "'b'"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ STRING "\"c\""
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ STRING "r\"d\""
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ BYTE_STRING "b\"e\""
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ BYTE_STRING "br\"f\""
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs
new file mode 100644
index 000000000..2e11a5a6e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs
@@ -0,0 +1,12 @@
+fn foo() {
+ let _ = true;
+ let _ = false;
+ let _ = 1;
+ let _ = 2.0;
+ let _ = b'a';
+ let _ = 'b';
+ let _ = "c";
+ let _ = r"d";
+ let _ = b"e";
+ let _ = br"f";
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rast
new file mode 100644
index 000000000..6687c843f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rs
new file mode 100644
index 000000000..d22d8cada
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rs
@@ -0,0 +1,2 @@
+fn foo() {}
+fn bar() -> () {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rast
new file mode 100644
index 000000000..cbf5e84e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rast
@@ -0,0 +1,67 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "i"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rs
new file mode 100644
index 000000000..560eb05b9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rs
@@ -0,0 +1,6 @@
+fn foo(){
+ if break {}
+ while break {}
+ for i in break {}
+ match break {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rast
new file mode 100644
index 000000000..cf7236f62
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rs
new file mode 100644
index 000000000..540eacb02
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rs
@@ -0,0 +1 @@
+struct S<T = i32>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast
new file mode 100644
index 000000000..372c867ae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast
@@ -0,0 +1,23 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "F"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs
new file mode 100644
index 000000000..e3ba5e87f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs
@@ -0,0 +1 @@
+type F = fn() -> ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rast
new file mode 100644
index 000000000..6969259fc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rast
@@ -0,0 +1,34 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ INDEX_EXPR
+ INDEX_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ R_BRACK "]"
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "2"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rs
new file mode 100644
index 000000000..b9ba78a6c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ x[1][2];
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rast
new file mode 100644
index 000000000..d39c3df2b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rs
new file mode 100644
index 000000000..4d719c433
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rs
@@ -0,0 +1 @@
+fn main() { let _ = (); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rast
new file mode 100644
index 000000000..f89cc15e7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rast
@@ -0,0 +1,125 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MACRO_RULES
+ MACRO_RULES_KW "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ NAME
+ IDENT "test"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "test"
+ BANG "!"
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rs
new file mode 100644
index 000000000..4919665cb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rs
@@ -0,0 +1,13 @@
+fn foo() {
+ if true {}
+ loop {}
+ match () {}
+ while true {}
+ for _ in () {}
+ {}
+ {}
+ macro_rules! test {
+ () => {}
+ }
+ test!{}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rast
new file mode 100644
index 000000000..d240a52f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rast
@@ -0,0 +1,103 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "y"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rs
new file mode 100644
index 000000000..9d55bedbb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rs
@@ -0,0 +1,4 @@
+fn a() {}
+fn b(x: i32) {}
+fn c(x: i32, ) {}
+fn d(x: i32, y: ()) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rast
new file mode 100644
index 000000000..6bc3c0fb0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ ARRAY_EXPR
+ L_BRACK "["
+ R_BRACK "]"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rs
new file mode 100644
index 000000000..972197d2a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ for x in [] {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rast
new file mode 100644
index 000000000..f69ae1d64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rast
@@ -0,0 +1,175 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ IDENT_PAT
+ NAME
+ IDENT "f"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "g"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ NAME_REF
+ IDENT "h"
+ COLON ":"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ NAME_REF
+ IDENT "h"
+ COLON ":"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COMMA ","
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ REST_PAT
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "any"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rs
new file mode 100644
index 000000000..0bfaae7c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rs
@@ -0,0 +1,7 @@
+fn foo() {
+ let S {} = ();
+ let S { f, ref mut g } = ();
+ let S { h: _, ..} = ();
+ let S { h: _, } = ();
+ let S { #[cfg(any())] .. } = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rast
new file mode 100644
index 000000000..60395948c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rast
@@ -0,0 +1,55 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ARRAY_EXPR
+ L_BRACK "["
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rs
new file mode 100644
index 000000000..4dc1999d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ [];
+ [1];
+ [1, 2,];
+ [1; 2];
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rast
new file mode 100644
index 000000000..fd83daf84
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rast
@@ -0,0 +1,41 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "F"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs
new file mode 100644
index 000000000..17ed20e5b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs
@@ -0,0 +1 @@
+type F = Box<Fn(i32) -> ()>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rast
new file mode 100644
index 000000000..c25ad8430
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rast
@@ -0,0 +1,246 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ PIPE "|"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ PIPE "|"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ STATIC_KW "static"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ STATIC_KW "static"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ STATIC_KW "static"
+ WHITESPACE " "
+ ASYNC_KW "async"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ STATIC_KW "static"
+ WHITESPACE " "
+ ASYNC_KW "async"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rs
new file mode 100644
index 000000000..75516d258
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rs
@@ -0,0 +1,15 @@
+fn foo() {
+ || ();
+ || -> i32 { 92 };
+ |x| x;
+ move |x: i32,| x;
+ async || {};
+ move || {};
+ async move || {};
+ static || {};
+ static move || {};
+ static async || {};
+ static async move || {};
+ for<'a> || {};
+ for<'a> move || {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast
new file mode 100644
index 000000000..dcbcfe123
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast
@@ -0,0 +1,63 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "y"
+ DOT "."
+ NAME_REF
+ IDENT "bar"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_ANGLE ">"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs
new file mode 100644
index 000000000..1a3aa35ae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ x.foo();
+ y.bar::<T>(1, 2,);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rast
new file mode 100644
index 000000000..ac5a71703
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rast
@@ -0,0 +1,39 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PAREN_EXPR
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ TUPLE_EXPR
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rs
new file mode 100644
index 000000000..e4f774280
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ ();
+ (1);
+ (1,);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rast
new file mode 100644
index 000000000..48d0bde84
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rast
@@ -0,0 +1,70 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ LOOP_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ COLON ":"
+ WHITESPACE " "
+ WHILE_KW "while"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FOR_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ COLON ":"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rs
new file mode 100644
index 000000000..48e83f263
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ 'a: loop {}
+ 'b: while true {}
+ 'c: for x in () {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rast
new file mode 100644
index 000000000..cebe98c43
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rast
@@ -0,0 +1,90 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rs
new file mode 100644
index 000000000..ba719879d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rs
@@ -0,0 +1,6 @@
+fn main() {
+ let (a, b, ..) = ();
+ let (a,) = ();
+ let (..) = ();
+ let () = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rast
new file mode 100644
index 000000000..eb1c32474
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rast
@@ -0,0 +1,128 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "e"
+ WHITESPACE " "
+ AT "@"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ WHITESPACE " "
+ AT "@"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "g"
+ WHITESPACE " "
+ AT "@"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rs
new file mode 100644
index 000000000..820a9e72c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rs
@@ -0,0 +1,8 @@
+fn main() {
+ let a = ();
+ let mut b = ();
+ let ref c = ();
+ let ref mut d = ();
+ let e @ _ = ();
+ let ref mut f @ g @ _ = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rast
new file mode 100644
index 000000000..8bd90a7f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rast
@@ -0,0 +1,57 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "some_expr"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ R_CURLY "}"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Ok"
+ ARG_LIST
+ L_PAREN "("
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rs
new file mode 100644
index 000000000..bbf09e367
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rs
@@ -0,0 +1,3 @@
+fn foo(){
+ ;;;some_expr();;;;{;;;};;;;Ok(())
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rast
new file mode 100644
index 000000000..aab774165
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rs
new file mode 100644
index 000000000..a602e0018
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rs
@@ -0,0 +1 @@
+struct S<T>(T) where T: Clone;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rast
new file mode 100644
index 000000000..1699602f4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rast
@@ -0,0 +1,28 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rs
new file mode 100644
index 000000000..648ffe565
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rs
@@ -0,0 +1 @@
+struct S (#[attr] f32);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rast
new file mode 100644
index 000000000..8165cb7d9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rast
@@ -0,0 +1,46 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ MACRO_TYPE
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ MACRO_TYPE
+ MACRO_CALL
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rs
new file mode 100644
index 000000000..edb470c89
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rs
@@ -0,0 +1,2 @@
+type A = foo!();
+type B = crate::foo!();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rast
new file mode 100644
index 000000000..96318b521
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rast
@@ -0,0 +1,77 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ MATCH_GUARD
+ IF_KW "if"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ MATCH_GUARD
+ IF_KW "if"
+ WHITESPACE " "
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rs
new file mode 100644
index 000000000..cfe05ce4e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ match () {
+ _ if foo => (),
+ _ if let foo = bar => (),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast
new file mode 100644
index 000000000..6fd9f4246
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast
@@ -0,0 +1,84 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Inner attribute\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Can be\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Stacked\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs
new file mode 100644
index 000000000..54a67c9d7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs
@@ -0,0 +1,8 @@
+fn foo() {
+ match () {
+ #![doc("Inner attribute")]
+ #![doc("Can be")]
+ #![doc("Stacked")]
+ _ => (),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast
new file mode 100644
index 000000000..0f7580c1a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast
@@ -0,0 +1,151 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "feature"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"some\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "feature"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"other\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "feature"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"many\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "feature"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"attributes\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "feature"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"before\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs
new file mode 100644
index 000000000..676db42d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs
@@ -0,0 +1,12 @@
+fn foo() {
+ match () {
+ #[cfg(feature = "some")]
+ _ => (),
+ #[cfg(feature = "other")]
+ _ => (),
+ #[cfg(feature = "many")]
+ #[cfg(feature = "attributes")]
+ #[cfg(feature = "before")]
+ _ => (),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rast
new file mode 100644
index 000000000..338d53995
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rast
@@ -0,0 +1,62 @@
+SOURCE_FILE
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "printf"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "format"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i8"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ DOT3 "..."
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs
new file mode 100644
index 000000000..533096cd5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs
@@ -0,0 +1 @@
+extern "C" { fn printf(format: *const i8, ..., _: u8) -> i32; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast
new file mode 100644
index 000000000..8d9b61630
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs
new file mode 100644
index 000000000..0f454d121
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs
@@ -0,0 +1 @@
+fn foo() { crate::foo(); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast
new file mode 100644
index 000000000..a1df70841
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast
@@ -0,0 +1,49 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "test"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME_REF
+ IDENT "field"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs
new file mode 100644
index 000000000..a6c7760c7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs
@@ -0,0 +1,3 @@
+fn main() {
+ S { #[cfg(test)] field: 1 }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast
new file mode 100644
index 000000000..81b7f2b3c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast
@@ -0,0 +1,105 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ R_BRACK "]"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MACRO_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ R_BRACK "]"
+ WHITESPACE " "
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ BANG "!"
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "C"
+ R_BRACK "]"
+ WHITESPACE " "
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "D"
+ R_BRACK "]"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RETURN_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "D"
+ R_BRACK "]"
+ WHITESPACE " "
+ RETURN_KW "return"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs
new file mode 100644
index 000000000..b28c078f9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ #[A] foo();
+ #[B] bar!{}
+ #[C] #[D] {}
+ #[D] return ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rast
new file mode 100644
index 000000000..cedaa9045
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rast
@@ -0,0 +1,37 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ MACRO_PAT
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "m"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "x"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rs
new file mode 100644
index 000000000..811181d9b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rs
@@ -0,0 +1,3 @@
+fn main() {
+ let m!(x) = 0;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rast
new file mode 100644
index 000000000..de9d0fc19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rs
new file mode 100644
index 000000000..8003999fd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rs
@@ -0,0 +1 @@
+fn f() { let x: i32 = 92; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rast
new file mode 100644
index 000000000..aec8fbf47
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ TRY_KW "try"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rs
new file mode 100644
index 000000000..0f1b41eb6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ let _ = try {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast
new file mode 100644
index 000000000..b73780261
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast
@@ -0,0 +1,31 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ EXISTENTIAL_KW "existential"
+ WHITESPACE " "
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs
new file mode 100644
index 000000000..23baf7145
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs
@@ -0,0 +1 @@
+existential type Foo: Fn() -> usize;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rast
new file mode 100644
index 000000000..b21f37cd8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rast
@@ -0,0 +1,90 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BOX_EXPR
+ BOX_KW "box"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1i32"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "y"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ BOX_EXPR
+ BOX_KW "box"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1i32"
+ COMMA ","
+ WHITESPACE " "
+ BOX_EXPR
+ BOX_KW "box"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2i32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "z"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ ARG_LIST
+ L_PAREN "("
+ BOX_EXPR
+ BOX_KW "box"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1i32"
+ COMMA ","
+ WHITESPACE " "
+ BOX_EXPR
+ BOX_KW "box"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2i32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rs
new file mode 100644
index 000000000..fc9923b71
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ let x = box 1i32;
+ let y = (box 1i32, box 2i32);
+ let z = Foo(box 1i32, box 2i32);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast
new file mode 100644
index 000000000..f5ee12fe9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast
@@ -0,0 +1,64 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "simple_function"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "LocalEnum"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "One"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "Two"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs
new file mode 100644
index 000000000..eadc7fffb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs
@@ -0,0 +1,8 @@
+fn simple_function() {
+ enum LocalEnum {
+ One,
+ Two,
+ };
+ fn f() {};
+ struct S {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast
new file mode 100644
index 000000000..9d37ada0d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast
@@ -0,0 +1,70 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ AWAIT_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ AWAIT_KW "await"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ AWAIT_EXPR
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ INT_NUMBER "0"
+ DOT "."
+ AWAIT_KW "await"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ METHOD_CALL_EXPR
+ TRY_EXPR
+ AWAIT_EXPR
+ CALL_EXPR
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ INT_NUMBER "0"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ DOT "."
+ AWAIT_KW "await"
+ QUESTION "?"
+ DOT "."
+ NAME_REF
+ IDENT "hello"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs
new file mode 100644
index 000000000..d2ba89ca6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ x.await;
+ x.0.await;
+ x.0().await?.hello();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rast
new file mode 100644
index 000000000..8cbc98c51
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rast
@@ -0,0 +1,111 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "print_all"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ CONST_ARG
+ LITERAL
+ TRUE_KW "true"
+ R_ANGLE ">"
+ COMMA ","
+ WHITESPACE " "
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Display"
+ COMMA ","
+ WHITESPACE " "
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ R_ANGLE ">"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "printables"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rs
new file mode 100644
index 000000000..0f7a2d160
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rs
@@ -0,0 +1 @@
+fn print_all<T: Iterator<Item, Item::Item, Item::<true>, Item: Display, Item<'a> = Item>>(printables: T) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rast
new file mode 100644
index 000000000..553ac356d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rast
@@ -0,0 +1,66 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "p"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "5"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ FIELD_EXPR
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "p"
+ R_CURLY "}"
+ DOT "."
+ NAME_REF
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "10"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rs
new file mode 100644
index 000000000..76007e3ee
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ let mut p = F{x: 5};
+ {p}.x = 10;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rast
new file mode 100644
index 000000000..db583f7d5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rast
@@ -0,0 +1,28 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "must_use"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rs
new file mode 100644
index 000000000..35155057a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rs
@@ -0,0 +1 @@
+fn f(#[must_use] self) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rast
new file mode 100644
index 000000000..c63ea020a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr1"
+ R_BRACK "]"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "pat"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Type"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rs
new file mode 100644
index 000000000..c238be791
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rs
@@ -0,0 +1 @@
+fn f(#[attr1] pat: Type) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rast
new file mode 100644
index 000000000..90cf3101c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rs
new file mode 100644
index 000000000..af0d40a7a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ for x in 0 .. {
+ break;
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rast
new file mode 100644
index 000000000..df22decde
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rast
@@ -0,0 +1,111 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ BOX_PAT
+ BOX_KW "box"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ BOX_PAT
+ BOX_KW "box"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Outer"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ BOX_PAT
+ BOX_KW "box"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "i"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ NAME_REF
+ IDENT "j"
+ COLON ":"
+ WHITESPACE " "
+ BOX_PAT
+ BOX_KW "box"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Inner"
+ L_PAREN "("
+ BOX_PAT
+ BOX_KW "box"
+ WHITESPACE " "
+ REF_PAT
+ AMP "&"
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ BOX_PAT
+ BOX_KW "box"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rs
new file mode 100644
index 000000000..9d458aa1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rs
@@ -0,0 +1,5 @@
+fn main() {
+ let box i = ();
+ let box Outer { box i, j: box Inner(box &x) } = ();
+ let box ref mut i = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rast
new file mode 100644
index 000000000..4d4011e6b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rast
@@ -0,0 +1,456 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ COMMENT "//"
+ WHITESPACE "\n "
+ COMMENT "// Tuples"
+ WHITESPACE "\n "
+ COMMENT "//"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Tuple"
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Tuple"
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Tuple"
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Tuple"
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ COMMENT "//"
+ WHITESPACE "\n "
+ COMMENT "// Slices"
+ WHITESPACE "\n "
+ COMMENT "//"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ REST_PAT
+ DOT2 ".."
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "tail"
+ WHITESPACE " "
+ AT "@"
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "cons"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "mid"
+ WHITESPACE " "
+ AT "@"
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "cons"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "cons"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "mid"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "tail"
+ WHITESPACE " "
+ AT "@"
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "mid"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "cons"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rs
new file mode 100644
index 000000000..3262f27e1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rs
@@ -0,0 +1,25 @@
+fn main() {
+ let .. = ();
+ //
+ // Tuples
+ //
+ let (a, ..) = ();
+ let (a, ..,) = ();
+ let Tuple(a, ..) = ();
+ let Tuple(a, ..,) = ();
+ let (.., ..) = ();
+ let Tuple(.., ..) = ();
+ let (.., a, ..) = ();
+ let Tuple(.., a, ..) = ();
+ //
+ // Slices
+ //
+ let [..] = ();
+ let [head, ..] = ();
+ let [head, tail @ ..] = ();
+ let [head, .., cons] = ();
+ let [head, mid @ .., cons] = ();
+ let [head, .., .., cons] = ();
+ let [head, .., mid, tail @ ..] = ();
+ let [head, .., mid, .., cons] = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rast
new file mode 100644
index 000000000..f3d2fde46
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rast
@@ -0,0 +1,123 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ NAME_REF
+ INT_NUMBER "0"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ NAME_REF
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "any"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME_REF
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rs
new file mode 100644
index 000000000..53cfdc22d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ let S { 0: 1 } = ();
+ let S { x: 1 } = ();
+ let S { #[cfg(any())] x: 1 } = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rast
new file mode 100644
index 000000000..4079d2a99
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rast
@@ -0,0 +1,43 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CAST_EXPR
+ REF_EXPR
+ AMP "&"
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rs
new file mode 100644
index 000000000..70559c5ef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rs
@@ -0,0 +1 @@
+fn f() { let _ = &1 as *const i32; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rast
new file mode 100644
index 000000000..24595a1a1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rast
@@ -0,0 +1,23 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "N"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rs
new file mode 100644
index 000000000..8cdb3b703
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rs
@@ -0,0 +1 @@
+struct S<const N: u32>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast
new file mode 100644
index 000000000..01de13a90
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast
@@ -0,0 +1,19 @@
+SOURCE_FILE
+ MACRO_DEF
+ MACRO_KW "macro"
+ WHITESPACE " "
+ NAME
+ IDENT "m"
+ TOKEN_TREE
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "i"
+ COLON ":"
+ IDENT "ident"
+ R_PAREN ")"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rs
new file mode 100644
index 000000000..a014ae546
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rs
@@ -0,0 +1 @@
+macro m($i:ident) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rast
new file mode 100644
index 000000000..6eb8af331
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rast
@@ -0,0 +1,48 @@
+SOURCE_FILE
+ CONST
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ SLICE_TYPE
+ L_BRACK "["
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i64"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "test"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ INT_NUMBER "2"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rs
new file mode 100644
index 000000000..2ac310924
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rs
@@ -0,0 +1 @@
+const A: &[i64] = &[1, #[cfg(test)] 2];
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rast
new file mode 100644
index 000000000..24977a22a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rast
@@ -0,0 +1,38 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "N"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "N"
+ R_ANGLE ">"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rs
new file mode 100644
index 000000000..cb0a105c2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rs
@@ -0,0 +1 @@
+impl<const N: u32> Bar<N> {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rast
new file mode 100644
index 000000000..a88b3393f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rast
@@ -0,0 +1,15 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rs
new file mode 100644
index 000000000..8f3b7ef11
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rs
@@ -0,0 +1 @@
+fn foo() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast
new file mode 100644
index 000000000..2ef66484a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "Z"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "U"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rs
new file mode 100644
index 000000000..71d76789f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rs
@@ -0,0 +1 @@
+trait Z<U> = T<U>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rast
new file mode 100644
index 000000000..ae1074c36
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rast
@@ -0,0 +1,38 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ INT_NUMBER "92"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rs
new file mode 100644
index 000000000..5daf1d7b0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rs
@@ -0,0 +1,3 @@
+fn main() {
+ foo(#[attr] 92)
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rast
new file mode 100644
index 000000000..2dede8359
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rast
@@ -0,0 +1,56 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ PAREN_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ COMMA ","
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rs
new file mode 100644
index 000000000..d4c163822
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rs
@@ -0,0 +1,2 @@
+struct B(pub (super::A));
+struct B(pub (crate::A,));
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast
new file mode 100644
index 000000000..ee8465e6c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast
@@ -0,0 +1,58 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Baz"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Qux"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "baz"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Baz"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs
new file mode 100644
index 000000000..80a1701fd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs
@@ -0,0 +1,2 @@
+type Foo = fn(Bar::Baz);
+type Qux = fn(baz: Bar::Baz);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rast
new file mode 100644
index 000000000..30a2842e5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rast
@@ -0,0 +1,43 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ DYN_TRAIT_TYPE
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Test"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rs
new file mode 100644
index 000000000..47a71fd19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rs
@@ -0,0 +1 @@
+type A = for<'a> Test<'a> + Send;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rast
new file mode 100644
index 000000000..39857b23c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rast
@@ -0,0 +1,51 @@
+SOURCE_FILE
+ CONST
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i64"
+ COMMA ","
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i64"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "test"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ INT_NUMBER "2"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rs
new file mode 100644
index 000000000..f84b7ab31
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rs
@@ -0,0 +1 @@
+const A: (i64, i64) = (1, #[cfg(test)] 2);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rast
new file mode 100644
index 000000000..318eb89de
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rast
@@ -0,0 +1,70 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "bar"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "baz"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Baz"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "qux"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Qux"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Quux"
+ PIPE "|"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rs
new file mode 100644
index 000000000..6ca8dd2d6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rs
@@ -0,0 +1,3 @@
+fn main() {
+ let foo = |bar, baz: Baz, qux: Qux::Quux| ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rast
new file mode 100644
index 000000000..59de2b9f1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rast
@@ -0,0 +1,79 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ CONST_BLOCK_PAT
+ CONST_KW "const"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "15"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ CONST_BLOCK_PAT
+ CONST_KW "const"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rs
new file mode 100644
index 000000000..dce9defac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rs
@@ -0,0 +1,4 @@
+fn main() {
+ let const { 15 } = ();
+ let const { foo(); bar() } = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rast
new file mode 100644
index 000000000..ce425a1af
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rast
@@ -0,0 +1,48 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ DOT3 "..."
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "y"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs
new file mode 100644
index 000000000..7b4c62658
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs
@@ -0,0 +1 @@
+fn foo(..., (x, y): (i32, i32)) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rast
new file mode 100644
index 000000000..6a2046d9e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rast
@@ -0,0 +1,112 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ PAREN_PAT
+ L_PAREN "("
+ OR_PAT
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ REF_PAT
+ AMP "&"
+ PAREN_PAT
+ L_PAREN "("
+ OR_PAT
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_PAT
+ L_PAREN "("
+ OR_PAT
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ SLICE_PAT
+ L_BRACK "["
+ OR_PAT
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COMMA ","
+ R_BRACK "]"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rs
new file mode 100644
index 000000000..a26316605
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rs
@@ -0,0 +1,8 @@
+fn main() {
+ match () {
+ (_ | _) => (),
+ &(_ | _) => (),
+ (_ | _,) => (),
+ [_ | _,] => (),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast
new file mode 100644
index 000000000..8a525c6e0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast
@@ -0,0 +1,26 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs
new file mode 100644
index 000000000..1ebbe5b03
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs
@@ -0,0 +1 @@
+type Foo = fn(_: bar);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rast
new file mode 100644
index 000000000..9f0c5a761
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "E"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ VARIANT
+ NAME
+ IDENT "X"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "10"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rs
new file mode 100644
index 000000000..c8c5c0f17
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rs
@@ -0,0 +1 @@
+enum E { X(i32) = 10 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rast
new file mode 100644
index 000000000..f667c1972
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rast
@@ -0,0 +1,38 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "v"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ LITERAL
+ INT_NUMBER "1"
+ R_CURLY "}"
+ AMP "&"
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rs
new file mode 100644
index 000000000..e325e4667
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rs
@@ -0,0 +1 @@
+fn f() { v = {1}&2; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rast
new file mode 100644
index 000000000..93238bd8f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ CALL_EXPR
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rs
new file mode 100644
index 000000000..061118d3a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rs
@@ -0,0 +1 @@
+fn main() { || -> i32 { 92 }(); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast
new file mode 100644
index 000000000..45cd4d2aa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast
@@ -0,0 +1,57 @@
+SOURCE_FILE
+ MACRO_RULES
+ MACRO_RULES_KW "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ NAME
+ IDENT "m"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_PAREN "("
+ WHITESPACE " "
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "i"
+ COLON ":"
+ IDENT "ident"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ MACRO_RULES
+ MACRO_RULES_KW "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ NAME
+ IDENT "m"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_BRACK "["
+ WHITESPACE " "
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "i"
+ COLON ":"
+ IDENT "ident"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs
new file mode 100644
index 000000000..6033a28cd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs
@@ -0,0 +1,2 @@
+macro_rules! m ( ($i:ident) => {} );
+macro_rules! m [ ($i:ident) => {} ];
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rast
new file mode 100644
index 000000000..0adb678fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "try"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "Ok"
+ TOKEN_TREE
+ L_PAREN "("
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs
new file mode 100644
index 000000000..61a6b46a0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs
@@ -0,0 +1 @@
+fn foo() { try!(Ok(())); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rast
new file mode 100644
index 000000000..31aa58de2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ YIELD_EXPR
+ YIELD_KW "yield"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ YIELD_EXPR
+ YIELD_KW "yield"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rs
new file mode 100644
index 000000000..596e221f7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ yield;
+ yield 1;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rast
new file mode 100644
index 000000000..ac45c5695
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+ STRUCT
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ IN_KW "in"
+ WHITESPACE " "
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ R_PAREN ")"
+ WHITESPACE " "
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ IN_KW "in"
+ WHITESPACE " "
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ R_PAREN ")"
+ WHITESPACE " "
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rs
new file mode 100644
index 000000000..2856dbd84
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rs
@@ -0,0 +1,2 @@
+pub(in super::A) struct S;
+pub(in crate) struct S;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rast
new file mode 100644
index 000000000..e6916ae97
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rast
@@ -0,0 +1,24 @@
+SOURCE_FILE
+ MACRO_RULES
+ MACRO_RULES_KW "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ NAME
+ IDENT "try"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs
new file mode 100644
index 000000000..2e2ab6e60
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs
@@ -0,0 +1 @@
+macro_rules! try { () => {} }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rast
new file mode 100644
index 000000000..f7c7aaabc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rast
@@ -0,0 +1,24 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rs
new file mode 100644
index 000000000..3252d6f36
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rs
@@ -0,0 +1 @@
+impl const Send for S {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rast
new file mode 100644
index 000000000..181251d4f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rast
@@ -0,0 +1,28 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ BLOCK_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'label"
+ COLON ":"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rs
new file mode 100644
index 000000000..18b4ff4b1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rs
@@ -0,0 +1 @@
+fn f() { 'label: {}; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rast
new file mode 100644
index 000000000..7c2f7b34c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rast
@@ -0,0 +1,43 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ ASYNC_KW "async"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rs
new file mode 100644
index 000000000..05c20a68f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rs
@@ -0,0 +1,3 @@
+impl T for Foo {
+ default async unsafe fn foo() {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rast
new file mode 100644
index 000000000..06b37e239
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rast
@@ -0,0 +1,41 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ ASYNC_KW "async"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rs
new file mode 100644
index 000000000..78c3b4d85
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rs
@@ -0,0 +1,3 @@
+impl T for Foo {
+ default async fn foo() {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rast
new file mode 100644
index 000000000..b180d0b72
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ IMPL
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rs
new file mode 100644
index 000000000..96340f84a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rs
@@ -0,0 +1,3 @@
+default unsafe impl T for Foo {
+ default unsafe fn foo() {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rast
new file mode 100644
index 000000000..7a8e8cf1d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rast
@@ -0,0 +1,24 @@
+SOURCE_FILE
+ IMPL
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rs
new file mode 100644
index 000000000..a6836cbd5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rs
@@ -0,0 +1 @@
+default impl T for Foo {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rast
new file mode 100644
index 000000000..297f7575c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rast
@@ -0,0 +1,39 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ PATH_PAT
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ INFER_TYPE
+ UNDERSCORE "_"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rs
new file mode 100644
index 000000000..ebe26834d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rs
@@ -0,0 +1 @@
+fn main() { let <_>::Foo = (); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast
new file mode 100644
index 000000000..3d3587a70
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1u32"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs
new file mode 100644
index 000000000..1360eda05
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs
@@ -0,0 +1 @@
+fn f() { let 0 .. = 1u32; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rast
new file mode 100644
index 000000000..5a5aca96f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ EXTERN_CRATE
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME_REF
+ IDENT "foo"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rs
new file mode 100644
index 000000000..fc76e17dd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rs
@@ -0,0 +1 @@
+extern crate foo as bar;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rast
new file mode 100644
index 000000000..edea4245f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rast
@@ -0,0 +1,10 @@
+SOURCE_FILE
+ EXTERN_CRATE
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME_REF
+ SELF_KW "self"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rs
new file mode 100644
index 000000000..c969ed109
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rs
@@ -0,0 +1 @@
+extern crate self;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rast
new file mode 100644
index 000000000..4d505916c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rast
@@ -0,0 +1,8 @@
+SOURCE_FILE
+ MODULE
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rs
new file mode 100644
index 000000000..f21af614d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rs
@@ -0,0 +1 @@
+mod a;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rast
new file mode 100644
index 000000000..d5e3f3493
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rast
@@ -0,0 +1,12 @@
+SOURCE_FILE
+ MODULE
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ WHITESPACE " "
+ ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rs
new file mode 100644
index 000000000..16b1b43e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rs
@@ -0,0 +1 @@
+mod b { }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rast
new file mode 100644
index 000000000..6e5f6c2d2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rast
@@ -0,0 +1,25 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "String"
+ COMMA ","
+ WHITESPACE " "
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rs
new file mode 100644
index 000000000..b4e05717e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rs
@@ -0,0 +1 @@
+struct S(String, usize);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rast
new file mode 100644
index 000000000..78f968207
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rast
@@ -0,0 +1,11 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rs
new file mode 100644
index 000000000..5f1a34f49
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rs
@@ -0,0 +1 @@
+struct S {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rast
new file mode 100644
index 000000000..909983c9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rast
@@ -0,0 +1,20 @@
+SOURCE_FILE
+ CONST
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rs
new file mode 100644
index 000000000..6d5f5be65
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rs
@@ -0,0 +1 @@
+const C: u32 = 92;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rast
new file mode 100644
index 000000000..065d7e7e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rast
@@ -0,0 +1,35 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "b"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f32"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rs
new file mode 100644
index 000000000..a3bd7787d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rs
@@ -0,0 +1 @@
+struct S { a: i32, b: f32 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rast
new file mode 100644
index 000000000..d81b4ff26
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rast
@@ -0,0 +1,19 @@
+SOURCE_FILE
+ CONST
+ CONST_KW "const"
+ WHITESPACE " "
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rs
new file mode 100644
index 000000000..c1d5cdfc6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rs
@@ -0,0 +1 @@
+const _: u32 = 0;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rast
new file mode 100644
index 000000000..5cf305d26
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rast
@@ -0,0 +1,27 @@
+SOURCE_FILE
+ MACRO_DEF
+ MACRO_KW "macro"
+ WHITESPACE " "
+ NAME
+ IDENT "m"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "i"
+ COLON ":"
+ IDENT "ident"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rs
new file mode 100644
index 000000000..5ed0c777d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rs
@@ -0,0 +1 @@
+macro m { ($i:ident) => {} }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rast
new file mode 100644
index 000000000..af608fc4a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rast
@@ -0,0 +1,35 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "U"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "i"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f32"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rs
new file mode 100644
index 000000000..5edf50de3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rs
@@ -0,0 +1 @@
+struct U { i: i32, f: f32 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rast
new file mode 100644
index 000000000..01f212e71
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rast
@@ -0,0 +1,35 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "X"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Debug"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Display"
+ R_ANGLE ">"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rs
new file mode 100644
index 000000000..4a51926a6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rs
@@ -0,0 +1 @@
+trait X<U: Debug + Display> {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rast
new file mode 100644
index 000000000..438dea6f4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rast
@@ -0,0 +1,8 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rs
new file mode 100644
index 000000000..28377c276
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rs
@@ -0,0 +1 @@
+struct S;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rast
new file mode 100644
index 000000000..8662423f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rast
@@ -0,0 +1,24 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ STAR "*"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ STAR "*"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rs
new file mode 100644
index 000000000..b8c613440
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rs
@@ -0,0 +1,2 @@
+use *;
+use std::{*};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rast
new file mode 100644
index 000000000..bab831456
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Hash"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rs
new file mode 100644
index 000000000..e6ad2b56a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rs
@@ -0,0 +1 @@
+trait T: Hash + Clone {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rast
new file mode 100644
index 000000000..46cd8ee66
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rs
new file mode 100644
index 000000000..52a6a806f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rs
@@ -0,0 +1 @@
+trait T where Self: Copy {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rast
new file mode 100644
index 000000000..ef0dd6ba1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "stdlib"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ UNDERSCORE "_"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rs
new file mode 100644
index 000000000..19a6906a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rs
@@ -0,0 +1,2 @@
+use std as stdlib;
+use Trait as _;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rast
new file mode 100644
index 000000000..9cb3c8a5c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rast
@@ -0,0 +1,26 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rs
new file mode 100644
index 000000000..915e2c932
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rs
@@ -0,0 +1 @@
+impl S { #![attr] }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast
new file mode 100644
index 000000000..4443d9d14
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast
@@ -0,0 +1,96 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "Z"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "U"
+ R_ANGLE ">"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "U"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "Z"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "U"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rs
new file mode 100644
index 000000000..a90d54b01
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rs
@@ -0,0 +1,2 @@
+trait Z<U> = T<U> where U: Copy;
+trait Z<U> = where Self: T<U>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rast
new file mode 100644
index 000000000..98231cdc2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "outer"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "tree"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "inner"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "tree"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rs
new file mode 100644
index 000000000..3cc394348
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rs
@@ -0,0 +1 @@
+use outer::tree::{inner::tree};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rast
new file mode 100644
index 000000000..ede22dbaf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rast
@@ -0,0 +1,72 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "std"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "collections"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "m"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "m"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "m"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rs
new file mode 100644
index 000000000..5b22f8852
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rs
@@ -0,0 +1,6 @@
+use ::std;
+use std::collections;
+
+use self::m;
+use super::m;
+use crate::m;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rast
new file mode 100644
index 000000000..ed3cafae1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rast
@@ -0,0 +1,20 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "collections"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rs
new file mode 100644
index 000000000..c3086f51a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rs
@@ -0,0 +1 @@
+use std::{collections};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rast
new file mode 100644
index 000000000..b4dc1f25d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rast
@@ -0,0 +1,26 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ COLON2 "::"
+ STAR "*"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ COLON2 "::"
+ STAR "*"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs
new file mode 100644
index 000000000..caae0ba02
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs
@@ -0,0 +1,2 @@
+use ::*;
+use std::{::*};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rast
new file mode 100644
index 000000000..d255adb5a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rast
@@ -0,0 +1,13 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ STAR "*"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rs
new file mode 100644
index 000000000..dd601cffe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rs
@@ -0,0 +1 @@
+use std::*;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rast
new file mode 100644
index 000000000..28a216e87
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rast
@@ -0,0 +1,46 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "lt_attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "t_attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rs
new file mode 100644
index 000000000..0509f81da
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rs
@@ -0,0 +1 @@
+fn foo<#[lt_attr] 'a, #[t_attr] T>() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rast
new file mode 100644
index 000000000..25761ed8c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "collections"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rs
new file mode 100644
index 000000000..48ac87b14
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rs
@@ -0,0 +1 @@
+use std::collections;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rast
new file mode 100644
index 000000000..c595031f3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rast
@@ -0,0 +1,25 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rs
new file mode 100644
index 000000000..2bb38ece8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rs
@@ -0,0 +1 @@
+fn f<'a: 'b>() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rast
new file mode 100644
index 000000000..ea8866da2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ CONST_ARG
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "90"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ R_CURLY "}"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rs
new file mode 100644
index 000000000..1c279db28
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rs
@@ -0,0 +1 @@
+type T = S<{90 + 2}>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rast
new file mode 100644
index 000000000..becb77e04
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rs
new file mode 100644
index 000000000..b250bc6bf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rs
@@ -0,0 +1 @@
+fn f<T: Clone>() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rast
new file mode 100644
index 000000000..1e0300717
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ CONST_ARG
+ LITERAL
+ INT_NUMBER "92"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rs
new file mode 100644
index 000000000..8b5e5dbe1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rs
@@ -0,0 +1 @@
+type T = S<92>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rast
new file mode 100644
index 000000000..becb77e04
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rs
new file mode 100644
index 000000000..b250bc6bf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rs
@@ -0,0 +1 @@
+fn f<T: Clone>() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rast
new file mode 100644
index 000000000..f2e4e0106
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rast
@@ -0,0 +1,37 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "StreamingIterator"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rs
new file mode 100644
index 000000000..daae97e4f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rs
@@ -0,0 +1 @@
+type T = StreamingIterator<Item<'a>: Clone>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rast
new file mode 100644
index 000000000..dbd7ff306
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'static"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rs
new file mode 100644
index 000000000..41715aa27
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rs
@@ -0,0 +1 @@
+type T = S<'static>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rast
new file mode 100644
index 000000000..970431840
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rast
@@ -0,0 +1,41 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "StreamingIterator"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rs
new file mode 100644
index 000000000..359141747
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rs
@@ -0,0 +1 @@
+type T = StreamingIterator<Item<'a> = &'a T>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rast
new file mode 100644
index 000000000..11002bf98
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "N"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "MAX"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs
new file mode 100644
index 000000000..f3da43ca0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs
@@ -0,0 +1 @@
+struct A<const N: i32 = i32::MAX>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rast
new file mode 100644
index 000000000..03d414e33
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rast
@@ -0,0 +1,27 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ CONST_ARG
+ LITERAL
+ STRING "\"hello\""
+ COMMA ","
+ WHITESPACE " "
+ CONST_ARG
+ LITERAL
+ INT_NUMBER "0xdeadbeef"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rs
new file mode 100644
index 000000000..7eacada73
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rs
@@ -0,0 +1 @@
+type T = S<"hello", 0xdeadbeef>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rast
new file mode 100644
index 000000000..5a01f154b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rast
@@ -0,0 +1,25 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rs
new file mode 100644
index 000000000..f2ccc558b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rs
@@ -0,0 +1 @@
+type T = S<i32>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rast
new file mode 100644
index 000000000..e504badbd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rast
@@ -0,0 +1,24 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ CONST_ARG
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "92"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rs
new file mode 100644
index 000000000..d0a87bdc0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rs
@@ -0,0 +1 @@
+type T = S<-92>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rast
new file mode 100644
index 000000000..aea23e463
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ CONST_ARG
+ LITERAL
+ TRUE_KW "true"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rs
new file mode 100644
index 000000000..4b92e2d48
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rs
@@ -0,0 +1 @@
+type T = S<true>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rast
new file mode 100644
index 000000000..1b6399158
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rs
new file mode 100644
index 000000000..232c0db41
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rs
@@ -0,0 +1 @@
+fn f() { let x = 92; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rast
new file mode 100644
index 000000000..ce7f1a35e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rast
@@ -0,0 +1,51 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "opt"
+ WHITESPACE " "
+ LET_ELSE
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rs
new file mode 100644
index 000000000..8303de06f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rs
@@ -0,0 +1 @@
+fn f() { let Some(x) = opt else { return }; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rast
new file mode 100644
index 000000000..ac8e1d93c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rast
@@ -0,0 +1,31 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rs
new file mode 100644
index 000000000..a94161dff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rs
@@ -0,0 +1 @@
+fn f() { let x: i32; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rast
new file mode 100644
index 000000000..88f8a7345
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ MACRO_TYPE
+ MACRO_CALL
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "syn"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Token"
+ BANG "!"
+ TOKEN_TREE
+ L_BRACK "["
+ UNDERSCORE "_"
+ R_BRACK "]"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rs
new file mode 100644
index 000000000..8d43a53d9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rs
@@ -0,0 +1 @@
+type A = Foo<syn::Token![_]>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rast
new file mode 100644
index 000000000..a23ddf69f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "MyStruct"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rs
new file mode 100644
index 000000000..00d8feba9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rs
@@ -0,0 +1 @@
+struct MyStruct(pub (u32, u32));
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rast
new file mode 100644
index 000000000..fb8aa5acc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rast
@@ -0,0 +1,44 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rs
new file mode 100644
index 000000000..22a5b5f3e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ S { .. } = S {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rast
new file mode 100644
index 000000000..5f53d3451
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rast
@@ -0,0 +1,50 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "None"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rs
new file mode 100644
index 000000000..91acfb3a0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ _ = 1;
+ Some(_) = None;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rast
new file mode 100644
index 000000000..0607ff54f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rast
@@ -0,0 +1,34 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "N"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ R_CURLY "}"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rs
new file mode 100644
index 000000000..551bde0b0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rs
@@ -0,0 +1 @@
+struct A<const N: i32 = { 1 }>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rast
new file mode 100644
index 000000000..f14080c90
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rast
@@ -0,0 +1,95 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ BLOCK_EXPR
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ BLOCK_EXPR
+ CONST_KW "const"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ BLOCK_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ BLOCK_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rs
new file mode 100644
index 000000000..c57d24b2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rs
@@ -0,0 +1,4 @@
+fn f() { unsafe { } }
+fn f() { const { } }
+fn f() { async { } }
+fn f() { async move { } }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rast
new file mode 100644
index 000000000..7210b7389
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rs
new file mode 100644
index 000000000..a602d07f0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rs
@@ -0,0 +1 @@
+type Foo where Foo: Copy = ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rast
new file mode 100644
index 000000000..fa2733e7f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rast
@@ -0,0 +1,105 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "N"
+ EQ "="
+ CONST_ARG
+ LITERAL
+ INT_NUMBER "3"
+ R_ANGLE ">"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ CONST
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "TEST"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "N"
+ EQ "="
+ CONST_ARG
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "TEST"
+ R_CURLY "}"
+ R_ANGLE ">"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rs
new file mode 100644
index 000000000..b43c4e36a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rs
@@ -0,0 +1,3 @@
+fn foo<F: Foo<N=3>>() {}
+const TEST: usize = 3;
+fn bar<F: Foo<N={TEST}>>() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rast
new file mode 100644
index 000000000..8e5231365
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "N"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rs
new file mode 100644
index 000000000..879ecffa7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rs
@@ -0,0 +1 @@
+struct A<const N: i32 = -1>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rast
new file mode 100644
index 000000000..56e2d1095
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rast
@@ -0,0 +1,47 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ QUESTION "?"
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sized"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rs
new file mode 100644
index 000000000..f80dd90d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rs
@@ -0,0 +1 @@
+fn f<T>() where T: ?for<> Sized {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rast
new file mode 100644
index 000000000..40b9ef804
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rast
@@ -0,0 +1 @@
+SOURCE_FILE
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rs
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rast
new file mode 100644
index 000000000..0e9639f23
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rast
@@ -0,0 +1,39 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_ANGLE ">"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COMMA ","
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rs
new file mode 100644
index 000000000..512aeb3e7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rs
@@ -0,0 +1,3 @@
+struct S<T: Copy> {
+ f: T,
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rast
new file mode 100644
index 000000000..dd52e5850
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "foo"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE "\n"
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rs
new file mode 100644
index 000000000..cc3866d25
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rs
@@ -0,0 +1,3 @@
+struct S {
+ foo: u32
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rast
new file mode 100644
index 000000000..698957189
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rast
@@ -0,0 +1,2 @@
+SOURCE_FILE
+ SHEBANG "#!/use/bin/env rusti"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rs
new file mode 100644
index 000000000..53dc9e617
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rs
@@ -0,0 +1 @@
+#!/use/bin/env rusti \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rast
new file mode 100644
index 000000000..756d20e4d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rs
new file mode 100644
index 000000000..03210551c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rs
@@ -0,0 +1,2 @@
+fn foo() {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rast
new file mode 100644
index 000000000..cb63ba80e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rast
@@ -0,0 +1,194 @@
+SOURCE_FILE
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ TOKEN_TREE
+ L_PAREN "("
+ TRUE_KW "true"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "ident"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "ident"
+ COMMA ","
+ WHITESPACE " "
+ INT_NUMBER "100"
+ COMMA ","
+ WHITESPACE " "
+ TRUE_KW "true"
+ COMMA ","
+ WHITESPACE " "
+ STRING "\"true\""
+ COMMA ","
+ WHITESPACE " "
+ IDENT "ident"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ INT_NUMBER "100"
+ COMMA ","
+ WHITESPACE " "
+ IDENT "ident"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"hello\""
+ COMMA ","
+ WHITESPACE " "
+ IDENT "ident"
+ TOKEN_TREE
+ L_PAREN "("
+ INT_NUMBER "100"
+ R_PAREN ")"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ TOKEN_TREE
+ L_PAREN "("
+ INT_NUMBER "100"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "enabled"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TRUE_KW "true"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "enabled"
+ TOKEN_TREE
+ L_PAREN "("
+ TRUE_KW "true"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"hello\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "repr"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "C"
+ COMMA ","
+ WHITESPACE " "
+ IDENT "align"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ INT_NUMBER "4"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "repr"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "C"
+ COMMA ","
+ WHITESPACE " "
+ IDENT "align"
+ TOKEN_TREE
+ L_PAREN "("
+ INT_NUMBER "4"
+ R_PAREN ")"
+ R_PAREN ")"
+ R_BRACK "]"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rs
new file mode 100644
index 000000000..e81f8b1e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rs
@@ -0,0 +1,10 @@
+#![attr]
+#![attr(true)]
+#![attr(ident)]
+#![attr(ident, 100, true, "true", ident = 100, ident = "hello", ident(100))]
+#![attr(100)]
+#![attr(enabled = true)]
+#![enabled(true)]
+#![attr("hello")]
+#![repr(C, align = 4)]
+#![repr(C, align(4))] \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rast
new file mode 100644
index 000000000..8b9259fd6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rast
@@ -0,0 +1,40 @@
+SOURCE_FILE
+ EXTERN_CRATE
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ EXTERN_CRATE
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME_REF
+ IDENT "foo"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ EXTERN_CRATE
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME_REF
+ SELF_KW "self"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "baz"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rs
new file mode 100644
index 000000000..ab81a608c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rs
@@ -0,0 +1,3 @@
+extern crate foo;
+extern crate foo as bar;
+extern crate self as baz;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rast
new file mode 100644
index 000000000..adee67181
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rast
@@ -0,0 +1,77 @@
+SOURCE_FILE
+ MODULE
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ WHITESPACE " "
+ ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ MODULE
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ WHITESPACE " "
+ ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ MODULE
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "e"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ MODULE
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ WHITESPACE " "
+ ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rs
new file mode 100644
index 000000000..4ff0d9795
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rs
@@ -0,0 +1,12 @@
+mod c {
+ fn foo() {
+ }
+ struct S {}
+}
+
+mod d {
+ #![attr]
+ mod e;
+ mod f {
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rast
new file mode 100644
index 000000000..04a44ef7e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rast
@@ -0,0 +1,21 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "bar"
+ SEMICOLON ";"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rs
new file mode 100644
index 000000000..05a6aff83
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rs
@@ -0,0 +1,2 @@
+use foo;
+use ::bar; \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rast
new file mode 100644
index 000000000..ddadec817
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "baz"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "baz"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rs
new file mode 100644
index 000000000..1e71b7a6c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rs
@@ -0,0 +1,2 @@
+use ::foo::bar::baz;
+use foo::bar::baz;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rast
new file mode 100644
index 000000000..dbb9bc54d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rast
@@ -0,0 +1,61 @@
+SOURCE_FILE
+ FN
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "test"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Ignore"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ MODULE
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "path"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ STRING "\"a.rs\""
+ R_BRACK "]"
+ WHITESPACE "\n"
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rs
new file mode 100644
index 000000000..6f04cb171
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rs
@@ -0,0 +1,6 @@
+#[cfg(test)]
+#[Ignore]
+fn foo() {}
+
+#[path = "a.rs"]
+mod b;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rast
new file mode 100644
index 000000000..a95bc2301
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rast
@@ -0,0 +1,133 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ MACRO_DEF
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ MACRO_KW "macro"
+ WHITESPACE " "
+ NAME
+ IDENT "m"
+ TOKEN_TREE
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ COLON ":"
+ IDENT "ident"
+ R_PAREN ")"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ R_PAREN ")"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ R_PAREN ")"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ IN_KW "in"
+ WHITESPACE " "
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "baz"
+ R_PAREN ")"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "e"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rs
new file mode 100644
index 000000000..129d486fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rs
@@ -0,0 +1,6 @@
+fn a() {}
+pub fn b() {}
+pub macro m($:ident) {}
+pub(crate) fn c() {}
+pub(super) fn d() {}
+pub(in foo::bar::baz) fn e() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rast
new file mode 100644
index 000000000..8a0149cac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rs
new file mode 100644
index 000000000..9d9eb9917
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rs
@@ -0,0 +1,2 @@
+use self::foo;
+use super::super::bar;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rast
new file mode 100644
index 000000000..b37edc365
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rast
@@ -0,0 +1,95 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ STAR "*"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ COLON2 "::"
+ STAR "*"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ USE_TREE_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ STAR "*"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "c"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rs
new file mode 100644
index 000000000..5e4aa3a33
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rs
@@ -0,0 +1,7 @@
+use *;
+use ::*;
+use ::{};
+use {};
+use foo::*;
+use foo::{};
+use ::foo::{a, b, c};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rast
new file mode 100644
index 000000000..ddf8aad6f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rast
@@ -0,0 +1,65 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ STAR "*"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ COLON2 "::"
+ STAR "*"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "foo"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "x"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs
new file mode 100644
index 000000000..46a0783a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs
@@ -0,0 +1,2 @@
+use foo as bar;
+use foo::{a as b, *, ::*, ::foo as x};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rast
new file mode 100644
index 000000000..eb2724e2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rast
@@ -0,0 +1,93 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "D"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ WHITESPACE "\n "
+ RECORD_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "E"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "y"
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rs
new file mode 100644
index 000000000..69638350c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rs
@@ -0,0 +1,10 @@
+struct A;
+struct B {}
+struct C();
+
+struct D {
+ a: u32,
+ pub b: u32
+}
+
+struct E(pub x, y,);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rast
new file mode 100644
index 000000000..7c914e254
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ FN
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "a"
+ COMMA ","
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rs
new file mode 100644
index 000000000..fe0a7bb97
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rs
@@ -0,0 +1,2 @@
+#[foo(a,)]
+fn foo() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rast
new file mode 100644
index 000000000..11ebc7efb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rast
@@ -0,0 +1,274 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S1"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S2"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S3"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "u"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S4"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S5"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S6"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S7"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S8"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S9"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S10"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COMMA ","
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S11"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COMMA ","
+ WHITESPACE " "
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S12"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ PLUS "+"
+ COMMA ","
+ WHITESPACE " "
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ COMMA ","
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S13"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S14"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S15"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rs
new file mode 100644
index 000000000..88c544923
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rs
@@ -0,0 +1,17 @@
+struct S1<T>;
+struct S2<T>(u32);
+struct S3<T> { u: u32 }
+
+struct S4<>;
+struct S5<'a>;
+struct S6<'a:>;
+struct S7<'a: 'b>;
+struct S8<'a: 'b + >;
+struct S9<'a: 'b + 'c>;
+struct S10<'a,>;
+struct S11<'a, 'b>;
+struct S12<'a: 'b+, 'b: 'c,>;
+
+struct S13<T>;
+struct S14<T, U>;
+struct S15<'a, T, U>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rast
new file mode 100644
index 000000000..dd47e3aa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rast
@@ -0,0 +1,155 @@
+SOURCE_FILE
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "E1"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "E2"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "E3"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "X"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "E4"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "X"
+ COMMA ","
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "E5"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "A"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "C"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ WHITESPACE "\n "
+ RECORD_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f64"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "F"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "D"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "E"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rs
new file mode 100644
index 000000000..7a1afa0e6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rs
@@ -0,0 +1,25 @@
+enum E1 {
+}
+
+enum E2<T> {
+}
+
+enum E3 {
+ X
+}
+
+enum E4 {
+ X,
+}
+
+enum E5 {
+ A,
+ B = 92,
+ C {
+ a: u32,
+ pub b: f64,
+ },
+ F {},
+ D(u32,),
+ E(),
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rast
new file mode 100644
index 000000000..043a966ff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rast
@@ -0,0 +1,283 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ TYPE_BOUND_LIST
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "D"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "E"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'d"
+ WHITESPACE " "
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "F"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'d"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "G"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "H"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "I"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ TYPE_BOUND_LIST
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ COLON ":"
+ TYPE_BOUND_LIST
+ COMMA ","
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "K"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'d"
+ COMMA ","
+ WHITESPACE " "
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'d"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'d"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ R_ANGLE ">"
+ SEMICOLON ";"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rs
new file mode 100644
index 000000000..712898978
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rs
@@ -0,0 +1,10 @@
+struct A<T>;
+struct B<T:>;
+struct C<T: 'a>;
+struct D<T: 'a + >;
+struct E<T: 'a + 'd >;
+struct F<T: 'a + 'd + Clone>;
+struct G<T: Clone + Copy>;
+struct H<T: ::Foo + self::Bar + 'a>;
+struct I<T:, U:,>;
+struct K<'a: 'd, 'd: 'a + 'b, T: 'a + 'd + Clone>; \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rast
new file mode 100644
index 000000000..ef2fb66dd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rast
@@ -0,0 +1,21 @@
+SOURCE_FILE
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rs
new file mode 100644
index 000000000..f5fe0e6ef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rs
@@ -0,0 +1,5 @@
+extern {
+}
+
+extern "C" {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rast
new file mode 100644
index 000000000..b164e828e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rast
@@ -0,0 +1,41 @@
+SOURCE_FILE
+ STATIC
+ STATIC_KW "static"
+ WHITESPACE " "
+ NAME
+ IDENT "FOO"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STATIC
+ STATIC_KW "static"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "BAR"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rs
new file mode 100644
index 000000000..5fb92ce33
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rs
@@ -0,0 +1,2 @@
+static FOO: u32 = 1;
+static mut BAR: i32 = 92;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rast
new file mode 100644
index 000000000..40b9ef804
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rast
@@ -0,0 +1 @@
+SOURCE_FILE
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rs
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rast
new file mode 100644
index 000000000..9c5f5ac64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rs
new file mode 100644
index 000000000..289809809
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rs
@@ -0,0 +1,3 @@
+fn main() {
+ extern fn f() {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rast
new file mode 100644
index 000000000..ca9a3df86
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ CONST_KW "const"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rs
new file mode 100644
index 000000000..7641a3d28
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rs
@@ -0,0 +1,3 @@
+fn main() {
+ const fn f() {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rast
new file mode 100644
index 000000000..88ebd1095
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rast
@@ -0,0 +1,43 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ BLOCK_EXPR
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rs
new file mode 100644
index 000000000..f3c5ff938
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rs
@@ -0,0 +1,4 @@
+fn main() {
+ unsafe fn f() {}
+ unsafe { 92 }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rast
new file mode 100644
index 000000000..ae08c0756
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rast
@@ -0,0 +1,186 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "binding_power"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ STAR "*"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ WHITESPACE " "
+ PERCENT "%"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "4"
+ WHITESPACE " "
+ MINUS "-"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "5"
+ WHITESPACE " "
+ SLASH "/"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "6"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ STAR "*"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ SHL "<<"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ AMP "&"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ SHR ">>"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ CARET "^"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ AMP "&"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ CARET "^"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ AMP2 "&&"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ COMMENT "//1 || 2 && 2;"
+ WHITESPACE "\n "
+ COMMENT "//1 .. 2 || 3;"
+ WHITESPACE "\n "
+ COMMENT "//1 = 2 .. 3;"
+ WHITESPACE "\n "
+ COMMENT "//---&*1 - --2 * 9;"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rs
new file mode 100644
index 000000000..cc9598470
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rs
@@ -0,0 +1,14 @@
+fn binding_power() {
+ let x = 1 + 2 * 3 % 4 - 5 / 6;
+ 1 + 2 * 3;
+ 1 << 2 + 3;
+ 1 & 2 >> 3;
+ 1 ^ 2 & 3;
+ 1 | 2 ^ 3;
+ 1 == 2 | 3;
+ 1 && 2 == 3;
+ //1 || 2 && 2;
+ //1 .. 2 || 3;
+ //1 = 2 .. 3;
+ //---&*1 - --2 * 9;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rast
new file mode 100644
index 000000000..5acc54e71
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rast
@@ -0,0 +1,152 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RANGE_EXPR
+ DOT2 ".."
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ RANGE_EXPR
+ DOT2 ".."
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "z"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ FALSE_KW "false"
+ DOT2 ".."
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ SEMICOLON ";"
+ WHITESPACE "\n \n "
+ EXPR_STMT
+ RANGE_EXPR
+ DOT2EQ "..="
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ RANGE_EXPR
+ DOT2EQ "..="
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "z"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ FALSE_KW "false"
+ DOT2EQ "..="
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs
new file mode 100644
index 000000000..f9ff444d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs
@@ -0,0 +1,11 @@
+fn foo() {
+ ..1 + 1;
+ ..z = 2;
+ x = false..1 == 1;
+ let x = 1..;
+
+ ..=1 + 1;
+ ..=z = 2;
+ x = false..=1 == 1;
+ let x = 1..;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rast
new file mode 100644
index 000000000..44211c7c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rast
@@ -0,0 +1,64 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ CHAR "'c'u32"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ STRING "\"string\"invalid"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ BYTE "b'b'_suff"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ BYTE_STRING "b\"bs\"invalid"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rs
new file mode 100644
index 000000000..261aad1fb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rs
@@ -0,0 +1,6 @@
+fn main() {
+ let _ = 'c'u32;
+ let _ = "string"invalid;
+ let _ = b'b'_suff;
+ let _ = b"bs"invalid;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rast
new file mode 100644
index 000000000..44423581e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rast
@@ -0,0 +1,61 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "Runnable"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "handler"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "TraitWithExpr"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "fn_with_expr"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ ARRAY_TYPE
+ L_BRACK "["
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ R_BRACK "]"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rs
new file mode 100644
index 000000000..ac30843ef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rs
@@ -0,0 +1,7 @@
+trait Runnable {
+ fn handler();
+}
+
+trait TraitWithExpr {
+ fn fn_with_expr(x: [i32; 1]);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rast
new file mode 100644
index 000000000..70b527808
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rast
@@ -0,0 +1,973 @@
+SOURCE_FILE
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "socket"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "domain"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "ty"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "protocol"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bind"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "fd"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "addr"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "sockaddr"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "connect"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "address"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "sockaddr"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "listen"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "backlog"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "getsockname"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "address"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "sockaddr"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "address_len"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "getsockopt"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "sockfd"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "level"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "optname"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "optval"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_void"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "optlen"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "setsockopt"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "level"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "name"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "value"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_void"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "option_len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "getpeername"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "address"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "sockaddr"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "address_len"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "sendto"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "buf"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_void"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "size_t"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "flags"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "addr"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "sockaddr"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "addrlen"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "ssize_t"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "send"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "buf"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_void"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "size_t"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "flags"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "ssize_t"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "recvfrom"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "buf"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_void"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "size_t"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "flags"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "addr"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "sockaddr"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "addrlen"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "ssize_t"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "recv"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "buf"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_void"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "size_t"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "flags"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "ssize_t"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rs
new file mode 100644
index 000000000..b33ac273c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rs
@@ -0,0 +1,29 @@
+extern {
+ pub fn socket(domain: ::c_int, ty: ::c_int, protocol: ::c_int) -> ::c_int;
+ pub fn bind(fd: ::c_int, addr: *const sockaddr, len: socklen_t) -> ::c_int;
+ pub fn connect(socket: ::c_int, address: *const sockaddr,
+ len: socklen_t) -> ::c_int;
+ pub fn listen(socket: ::c_int, backlog: ::c_int) -> ::c_int;
+ pub fn getsockname(socket: ::c_int, address: *mut sockaddr,
+ address_len: *mut socklen_t) -> ::c_int;
+ pub fn getsockopt(sockfd: ::c_int,
+ level: ::c_int,
+ optname: ::c_int,
+ optval: *mut ::c_void,
+ optlen: *mut ::socklen_t) -> ::c_int;
+ pub fn setsockopt(socket: ::c_int, level: ::c_int, name: ::c_int,
+ value: *const ::c_void,
+ option_len: socklen_t) -> ::c_int;
+ pub fn getpeername(socket: ::c_int, address: *mut sockaddr,
+ address_len: *mut socklen_t) -> ::c_int;
+ pub fn sendto(socket: ::c_int, buf: *const ::c_void, len: ::size_t,
+ flags: ::c_int, addr: *const sockaddr,
+ addrlen: socklen_t) -> ::ssize_t;
+ pub fn send(socket: ::c_int, buf: *const ::c_void, len: ::size_t,
+ flags: ::c_int) -> ::ssize_t;
+ pub fn recvfrom(socket: ::c_int, buf: *mut ::c_void, len: ::size_t,
+ flags: ::c_int, addr: *mut ::sockaddr,
+ addrlen: *mut ::socklen_t) -> ::ssize_t;
+ pub fn recv(socket: ::c_int, buf: *mut ::c_void, len: ::size_t,
+ flags: ::c_int) -> ::ssize_t;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast
new file mode 100644
index 000000000..86f6af97c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast
@@ -0,0 +1,93 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "test_serialization"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "SER"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "SER"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Serialize"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'de"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Deserialize"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'de"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "PartialEq"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "fmt"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Debug"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rs
new file mode 100644
index 000000000..588170fbe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rs
@@ -0,0 +1,4 @@
+fn test_serialization<SER>()
+where
+ SER: Serialize + for<'de> Deserialize<'de> + PartialEq + std::fmt::Debug,
+{}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rast
new file mode 100644
index 000000000..df1acd6b8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rast
@@ -0,0 +1,223 @@
+SOURCE_FILE
+ FN
+ COMMENT "// format with label break value."
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'empty_block"
+ COLON ":"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'block"
+ COLON ":"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "do_thing"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "condition_not_met"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'block"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "do_next_thing"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "condition_not_met"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'block"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "do_last_thing"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "result"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'block"
+ COLON ":"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ COMMENT "// comment"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'block"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ COMMENT "/* comment */"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'block"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ LITERAL
+ INT_NUMBER "3"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rs
new file mode 100644
index 000000000..728d78137
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rs
@@ -0,0 +1,28 @@
+// format with label break value.
+fn main() {
+ 'empty_block: {}
+
+ 'block: {
+ do_thing();
+ if condition_not_met() {
+ break 'block;
+ }
+ do_next_thing();
+ if condition_not_met() {
+ break 'block;
+ }
+ do_last_thing();
+ }
+
+ let result = 'block: {
+ if foo() {
+ // comment
+ break 'block 1;
+ }
+ if bar() {
+ /* comment */
+ break 'block 2;
+ }
+ 3
+ };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rast
new file mode 100644
index 000000000..2b3b86ebf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rast
@@ -0,0 +1,43 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "make_query"
+ ARG_LIST
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "module_map"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "module_tree"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rs
new file mode 100644
index 000000000..f1ed30220
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rs
@@ -0,0 +1,3 @@
+fn main() {
+ make_query(crate::module_map::module_tree);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rast
new file mode 100644
index 000000000..318d492ab
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rast
@@ -0,0 +1,2339 @@
+SOURCE_FILE
+ COMMENT "//! Adapted from a `rustc` test, which can be found at "
+ WHITESPACE "\n"
+ COMMENT "//! https://github.com/rust-lang/rust/blob/6d34ec18c7d7e574553f6347ecf08e1e1c45c13d/src/test/run-pass/weird-exprs.rs."
+ WHITESPACE "\n"
+ COMMENT "//! "
+ WHITESPACE "\n"
+ COMMENT "//! Reported to rust-analyzer in https://github.com/rust-lang/rust-analyzer/issues/290"
+ WHITESPACE "\n\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "allow"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "non_camel_case_types"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "allow"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "dead_code"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "allow"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "unreachable_code"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "allow"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "unused_parens"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "recursion_limit"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ STRING "\"128\""
+ R_BRACK "]"
+ WHITESPACE "\n\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cell"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Cell"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "mem"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "swap"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ COMMENT "// Just a grab bag of stuff that you wouldn't want to actually write."
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "strange"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "funny"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "_x"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ARG_LIST
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "what"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "the"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Cell"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ R_ANGLE ">"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ PREFIX_EXPR
+ BANG "!"
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ IDENT "get"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ IDENT "set"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ TRUE_KW "true"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Cell"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "new"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ FALSE_KW "false"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "dont"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "the"
+ ARG_LIST
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ R_PAREN ")"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "dont"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "i"
+ DOT "."
+ IDENT "get"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "zombiejesus"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RETURN_EXPR
+ RETURN_KW "return"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "notsure"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "_x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "isize"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "_y"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "_z"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ WHITESPACE " "
+ L_ANGLE "<"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_a"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_x"
+ WHITESPACE " "
+ PLUSEQ "+="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_b"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "swap"
+ ARG_LIST
+ L_PAREN "("
+ REF_EXPR
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_y"
+ COMMA ","
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_z"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "swap"
+ ARG_LIST
+ L_PAREN "("
+ REF_EXPR
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_y"
+ COMMA ","
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_z"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "canttouchthis"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "p"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_a"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ TOKEN_TREE
+ L_PAREN "("
+ TRUE_KW "true"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "p"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_c"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "p"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_b"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "println"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"{}\""
+ COMMA ","
+ WHITESPACE " "
+ INT_NUMBER "0"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "angrydome"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ WHITESPACE " "
+ PLUSEQ "+="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE " "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ CONTINUE_EXPR
+ CONTINUE_KW "continue"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ MATCH_ARM
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ COMMA ","
+ WHITESPACE " "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "panic"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"wat\""
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "evil_lincoln"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_evil"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "println"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"lincoln\""
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "dots"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert_eq"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "String"
+ COLON ":"
+ COLON ":"
+ IDENT "from"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"..................................................\""
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ IDENT "format"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"{:?}\""
+ COMMA ","
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE "\n "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "u8"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "u8"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ NEQ "!="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0u8"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert_eq"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ INT_NUMBER "8u8"
+ COMMA ","
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IDENT "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ IDENT "u8"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MOD_KW "mod"
+ WHITESPACE " "
+ IDENT "u8"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ IDENT "u8"
+ L_ANGLE "<"
+ LIFETIME_IDENT "'u8"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME_IDENT "'u8"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LIFETIME_IDENT "'u8"
+ R_ANGLE ">"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "u8"
+ COLON ":"
+ WHITESPACE " "
+ AMP "&"
+ LIFETIME_IDENT "'u8"
+ WHITESPACE " "
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ MINUS "-"
+ R_ANGLE ">"
+ WHITESPACE " "
+ AMP "&"
+ LIFETIME_IDENT "'u8"
+ WHITESPACE " "
+ IDENT "u8"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ STRING "\"u8\""
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ IDENT "u8"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ IDENT "u8"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "u8"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_KW "let"
+ WHITESPACE " "
+ AMP "&"
+ IDENT "u8"
+ COLON ":"
+ WHITESPACE " "
+ AMP "&"
+ IDENT "u8"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ IDENT "u8"
+ COLON ":"
+ COLON ":"
+ IDENT "u8"
+ TOKEN_TREE
+ L_PAREN "("
+ AMP "&"
+ INT_NUMBER "8u8"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ CRATE_KW "crate"
+ COLON ":"
+ COLON ":"
+ IDENT "u8"
+ TOKEN_TREE
+ L_PAREN "("
+ INT_NUMBER "0u8"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ IDENT "u8"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "fishy"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert_eq"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "String"
+ COLON ":"
+ COLON ":"
+ IDENT "from"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"><>\""
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ IDENT "String"
+ COLON ":"
+ COLON ":"
+ L_ANGLE "<"
+ R_ANGLE ">"
+ COLON ":"
+ COLON ":"
+ IDENT "from"
+ COLON ":"
+ COLON ":"
+ L_ANGLE "<"
+ R_ANGLE ">"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"><>\""
+ R_PAREN ")"
+ DOT "."
+ IDENT "chars"
+ COLON ":"
+ COLON ":"
+ L_ANGLE "<"
+ R_ANGLE ">"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ DOT "."
+ IDENT "rev"
+ COLON ":"
+ COLON ":"
+ L_ANGLE "<"
+ R_ANGLE ">"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ DOT "."
+ IDENT "collect"
+ COLON ":"
+ COLON ":"
+ L_ANGLE "<"
+ IDENT "String"
+ R_ANGLE ">"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "union"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ UNION
+ UNION_KW "union"
+ WHITESPACE " "
+ NAME
+ IDENT "union"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'union"
+ R_ANGLE ">"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "union"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'union"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "union"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'union"
+ R_ANGLE ">"
+ COMMA ","
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "special_characters"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "val"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PREFIX_EXPR
+ BANG "!"
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ CALL_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PARAM
+ TUPLE_PAT
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ COLON ":"
+ TUPLE_TYPE
+ L_PAREN "("
+ INFER_TYPE
+ UNDERSCORE "_"
+ COMMA ","
+ INFER_TYPE
+ UNDERSCORE "_"
+ R_PAREN ")"
+ COMMA ","
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "__"
+ AT "@"
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ PIPE "|"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "__"
+ R_PAREN ")"
+ ARG_LIST
+ L_PAREN "("
+ TUPLE_EXPR
+ L_PAREN "("
+ REF_EXPR
+ AMP "&"
+ PREFIX_EXPR
+ STAR "*"
+ LITERAL
+ STRING "\"\\\\\""
+ COMMA ","
+ LITERAL
+ CHAR "'🤔'"
+ R_PAREN ")"
+ COMMENT "/**/"
+ COMMA ","
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ R_PAREN ")"
+ EQ2 "=="
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ EXPR_STMT
+ REF_EXPR
+ AMP "&"
+ INDEX_EXPR
+ ARRAY_EXPR
+ L_BRACK "["
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ R_BRACK "]"
+ L_BRACK "["
+ RANGE_EXPR
+ DOT2 ".."
+ R_BRACK "]"
+ SEMICOLON ";"
+ R_CURLY "}"
+ R_PAREN ")"
+ COMMENT "//"
+ WHITESPACE "\n "
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ BANG "!"
+ IDENT "val"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "punch_card"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "fmt"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Debug"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "ktulhu"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ EXPR_STMT
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "strange"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "funny"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "what"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "zombiejesus"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "notsure"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "canttouchthis"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "angrydome"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "evil_lincoln"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "dots"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "8u8"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "fishy"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "union"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "special_characters"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "punch_card"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "ktulhu"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rs
new file mode 100644
index 000000000..fb7d706b0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rs
@@ -0,0 +1,154 @@
+//! Adapted from a `rustc` test, which can be found at
+//! https://github.com/rust-lang/rust/blob/6d34ec18c7d7e574553f6347ecf08e1e1c45c13d/src/test/run-pass/weird-exprs.rs.
+//!
+//! Reported to rust-analyzer in https://github.com/rust-lang/rust-analyzer/issues/290
+
+#![allow(non_camel_case_types)]
+#![allow(dead_code)]
+#![allow(unreachable_code)]
+#![allow(unused_parens)]
+
+#![recursion_limit = "128"]
+
+use std::cell::Cell;
+use std::mem::swap;
+
+// Just a grab bag of stuff that you wouldn't want to actually write.
+
+fn strange() -> bool { let _x: bool = return true; }
+
+fn funny() {
+ fn f(_x: ()) { }
+ f(return);
+}
+
+fn what() {
+ fn the(x: &Cell<bool>) {
+ return while !x.get() { x.set(true); };
+ }
+ let i = &Cell::new(false);
+ let dont = {||the(i)};
+ dont();
+ assert!((i.get()));
+}
+
+fn zombiejesus() {
+ loop {
+ while (return) {
+ if (return) {
+ match (return) {
+ 1 => {
+ if (return) {
+ return
+ } else {
+ return
+ }
+ }
+ _ => { return }
+ };
+ } else if (return) {
+ return;
+ }
+ }
+ if (return) { break; }
+ }
+}
+
+fn notsure() {
+ let mut _x: isize;
+ let mut _y = (_x = 0) == (_x = 0);
+ let mut _z = (_x = 0) < (_x = 0);
+ let _a = (_x += 0) == (_x = 0);
+ let _b = swap(&mut _y, &mut _z) == swap(&mut _y, &mut _z);
+}
+
+fn canttouchthis() -> usize {
+ fn p() -> bool { true }
+ let _a = (assert!((true)) == (assert!(p())));
+ let _c = (assert!((p())) == ());
+ let _b: bool = (println!("{}", 0) == (return 0));
+}
+
+fn angrydome() {
+ loop { if break { } }
+ let mut i = 0;
+ loop { i += 1; if i == 1 { match (continue) { 1 => { }, _ => panic!("wat") } }
+ break; }
+}
+
+fn evil_lincoln() { let _evil = println!("lincoln"); }
+
+fn dots() {
+ assert_eq!(String::from(".................................................."),
+ format!("{:?}", .. .. .. .. .. .. .. .. .. .. .. .. ..
+ .. .. .. .. .. .. .. .. .. .. .. ..));
+}
+
+fn u8(u8: u8) {
+ if u8 != 0u8 {
+ assert_eq!(8u8, {
+ macro_rules! u8 {
+ (u8) => {
+ mod u8 {
+ pub fn u8<'u8: 'u8 + 'u8>(u8: &'u8 u8) -> &'u8 u8 {
+ "u8";
+ u8
+ }
+ }
+ };
+ }
+
+ u8!(u8);
+ let &u8: &u8 = u8::u8(&8u8);
+ crate::u8(0u8);
+ u8
+ });
+ }
+}
+
+fn fishy() {
+ assert_eq!(String::from("><>"),
+ String::<>::from::<>("><>").chars::<>().rev::<>().collect::<String>());
+}
+
+fn union() {
+ union union<'union> { union: &'union union<'union>, }
+}
+
+fn special_characters() {
+ let val = !((|(..):(_,_),__@_|__)((&*"\\",'🤔')/**/,{})=={&[..=..][..];})//
+ ;
+ assert!(!val);
+}
+
+fn punch_card() -> impl std::fmt::Debug {
+ ..=..=.. .. .. .. .. .. .. .. .. .. .. ..=.. ..
+ ..=.. ..=.. .. .. .. .. .. .. .. .. ..=..=..=..
+ ..=.. ..=.. ..=.. ..=.. .. ..=..=.. .. ..=.. ..
+ ..=..=.. .. ..=.. ..=.. ..=.. .. .. .. ..=.. ..
+ ..=.. ..=.. ..=.. ..=.. .. ..=.. .. .. ..=.. ..
+ ..=.. ..=.. ..=.. ..=.. .. .. ..=.. .. ..=.. ..
+ ..=.. ..=.. .. ..=..=.. ..=..=.. .. .. ..=.. ..
+}
+
+fn ktulhu() {
+ ;;;();;;;;;;;;()
+}
+
+pub fn main() {
+ strange();
+ funny();
+ what();
+ zombiejesus();
+ notsure();
+ canttouchthis();
+ angrydome();
+ evil_lincoln();
+ dots();
+ u8(8u8);
+ fishy();
+ union();
+ special_characters();
+ punch_card();
+ ktulhu();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rast
new file mode 100644
index 000000000..9382020e2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rast
@@ -0,0 +1,93 @@
+SOURCE_FILE
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/311"
+ WHITESPACE "\n\n"
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "S"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "String"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Eq"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ METHOD_CALL_EXPR
+ LITERAL
+ STRING "\"\""
+ DOT "."
+ NAME_REF
+ IDENT "to_owned"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rs
new file mode 100644
index 000000000..f8a085dc7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rs
@@ -0,0 +1,8 @@
+// https://github.com/rust-lang/rust-analyzer/issues/311
+
+pub fn foo<S: Iterator>() -> String
+where
+ <S as Iterator>::Item: Eq,
+{
+ "".to_owned()
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rast
new file mode 100644
index 000000000..b4a3fc629
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/357"
+ WHITESPACE "\n\n"
+ COMMENT "//! docs"
+ WHITESPACE "\n"
+ MODULE
+ COMMENT "// non-docs"
+ WHITESPACE "\n"
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rs
new file mode 100644
index 000000000..05f6cf05c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rs
@@ -0,0 +1,5 @@
+// https://github.com/rust-lang/rust-analyzer/issues/357
+
+//! docs
+// non-docs
+mod foo {} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rast
new file mode 100644
index 000000000..e89763042
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rast
@@ -0,0 +1,43 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "test"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u64"
+ COMMA ","
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u64"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rs
new file mode 100644
index 000000000..8bfc341a5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rs
@@ -0,0 +1 @@
+fn test() where (u64, u64): Foo {} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rast
new file mode 100644
index 000000000..2eeed781c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "r#foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rs
new file mode 100644
index 000000000..8380d1e79
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rs
@@ -0,0 +1,2 @@
+fn r#foo() {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rast
new file mode 100644
index 000000000..ceb918420
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "r#foo"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE "\n"
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rs
new file mode 100644
index 000000000..098a60a72
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rs
@@ -0,0 +1,3 @@
+struct S {
+ r#foo: u32
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rast
new file mode 100644
index 000000000..dacf0ce74
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rast
@@ -0,0 +1,50 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "r#struct"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "r#trait"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "r#struct"
+ WHITESPACE " "
+ STAR "*"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rs
new file mode 100644
index 000000000..d59a6d347
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rs
@@ -0,0 +1 @@
+fn foo() { let r#struct = 92; let r#trait = r#struct * 2; } \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rast
new file mode 100644
index 000000000..a536b0e88
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rast
@@ -0,0 +1,127 @@
+SOURCE_FILE
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/596"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "unimplemented"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "baz"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "baz"
+ ARG_LIST
+ L_PAREN "("
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rs
new file mode 100644
index 000000000..09b18982e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rs
@@ -0,0 +1,15 @@
+// https://github.com/rust-lang/rust-analyzer/issues/596
+
+struct Foo;
+
+impl Foo {
+ fn bar() -> bool {
+ unimplemented!()
+ }
+}
+
+fn baz(_: bool) {}
+
+fn main() {
+ baz(<Foo>::bar())
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rast
new file mode 100644
index 000000000..3b02c3f96
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rast
@@ -0,0 +1,110 @@
+SOURCE_FILE
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/674"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "Repr"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "raw"
+ COLON ":"
+ WHITESPACE " "
+ ARRAY_TYPE
+ L_BRACK "["
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ SEMICOLON ";"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ R_BRACK "]"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "abc"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ INDEX_EXPR
+ FIELD_EXPR
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Repr"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "raw"
+ COLON ":"
+ WHITESPACE " "
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "0"
+ R_BRACK "]"
+ WHITESPACE " "
+ R_CURLY "}"
+ DOT "."
+ NAME_REF
+ IDENT "raw"
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "0"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Repr"
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "raw"
+ COLON ":"
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "0"
+ R_BRACK "]"
+ R_CURLY "}"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rs
new file mode 100644
index 000000000..961dc8c7d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rs
@@ -0,0 +1,8 @@
+// https://github.com/rust-lang/rust-analyzer/issues/674
+
+struct Repr { raw: [u8; 1] }
+
+fn abc() {
+ Repr { raw: [0] }.raw[0] = 0;
+ Repr{raw:[0]}();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rast
new file mode 100644
index 000000000..f3c20337e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rast
@@ -0,0 +1,77 @@
+SOURCE_FILE
+ FN
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/677"
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "feature"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"backtrace\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "exit_code"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "panic"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "catch_unwind"
+ ARG_LIST
+ L_PAREN "("
+ CLOSURE_EXPR
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "main"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rs
new file mode 100644
index 000000000..7d1524879
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rs
@@ -0,0 +1,5 @@
+// https://github.com/rust-lang/rust-analyzer/issues/677
+fn main() {
+ #[cfg(feature = "backtrace")]
+ let exit_code = panic::catch_unwind(move || main());
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rast
new file mode 100644
index 000000000..bef138071
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rast
@@ -0,0 +1,230 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "inner"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Inner attributes allowed here\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ COMMENT "//! As are ModuleDoc style comments"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Inner attributes are allowed in blocks used as statements\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Being validated is not affected by duplcates\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ COMMENT "//! As are ModuleDoc style comments"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Inner attributes are allowed in blocks when they are the last statement of another block\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ COMMENT "//! As are ModuleDoc style comments"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "outer"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Outer attributes are always allowed\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ IMPL
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/689"
+ WHITESPACE "\n"
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Whatever"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "salsa_event"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ AMP "&"
+ NAME
+ SELF_KW "self"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "event_fn"
+ COLON ":"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Event"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ R_ANGLE ">"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "allow"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "unused_variables"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ COMMENT "// this is `inner_attr` of the block"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rs
new file mode 100644
index 000000000..f16c4566e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rs
@@ -0,0 +1,24 @@
+fn inner() {
+ #![doc("Inner attributes allowed here")]
+ //! As are ModuleDoc style comments
+ {
+ #![doc("Inner attributes are allowed in blocks used as statements")]
+ #![doc("Being validated is not affected by duplcates")]
+ //! As are ModuleDoc style comments
+ };
+ {
+ #![doc("Inner attributes are allowed in blocks when they are the last statement of another block")]
+ //! As are ModuleDoc style comments
+ }
+}
+
+fn outer() {
+ let _ = #[doc("Outer attributes are always allowed")] {};
+}
+
+// https://github.com/rust-lang/rust-analyzer/issues/689
+impl Whatever {
+ fn salsa_event(&self, event_fn: impl Fn() -> Event<Self>) {
+ #![allow(unused_variables)] // this is `inner_attr` of the block
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rast
new file mode 100644
index 000000000..4eb51cfdf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ COMMENT "//! This is a doc comment"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"This is also a doc comment\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rs
new file mode 100644
index 000000000..fe67e2df4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rs
@@ -0,0 +1,4 @@
+extern "C" {
+ //! This is a doc comment
+ #![doc("This is also a doc comment")]
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rast
new file mode 100644
index 000000000..c7eb3687d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rast
@@ -0,0 +1,323 @@
+SOURCE_FILE
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/972"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ TUPLE_EXPR
+ L_PAREN "("
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ TUPLE_PAT
+ L_PAREN "("
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ ARG_LIST
+ L_PAREN "("
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ L_PAREN "("
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "B"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i8"
+ COMMA ","
+ WHITESPACE " "
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i8"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ RANGE_PAT
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "128"
+ DOT2EQ "..="
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "127"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i8"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rs
new file mode 100644
index 000000000..13dc46afa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rs
@@ -0,0 +1,27 @@
+// https://github.com/rust-lang/rust-analyzer/issues/972
+
+fn main() {
+ match Some(-1) {
+ Some(-1) => (),
+ _ => (),
+ }
+
+ match Some((-1, -1)) {
+ Some((-1, -1)) => (),
+ _ => (),
+ }
+
+ match A::B(-1, -1) {
+ A::B(-1, -1) => (),
+ _ => (),
+ }
+
+ if let Some(-1) = Some(-1) {
+ }
+}
+
+enum A {
+ B(i8, i8)
+}
+
+fn foo(-128..=127: i8) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rast
new file mode 100644
index 000000000..e0f163b1a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rast
@@ -0,0 +1,201 @@
+SOURCE_FILE
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/pull/983"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "compound_assignment"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ PLUSEQ "+="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ MINUSEQ "-="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ STAREQ "*="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ PERCENTEQ "%="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "4"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ SLASHEQ "/="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "5"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ PIPEEQ "|="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "6"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ AMPEQ "&="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "7"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ CARETEQ "^="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "8"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ LTEQ "<="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "9"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ GTEQ ">="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "10"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ SHREQ ">>="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "11"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ SHLEQ "<<="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "12"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rs
new file mode 100644
index 000000000..1a6a9bdf5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rs
@@ -0,0 +1,17 @@
+// https://github.com/rust-lang/rust-analyzer/pull/983
+
+fn compound_assignment() {
+ let mut a = 0;
+ a += 1;
+ a -= 2;
+ a *= 3;
+ a %= 4;
+ a /= 5;
+ a |= 6;
+ a &= 7;
+ a ^= 8;
+ a <= 9;
+ a >= 10;
+ a >>= 11;
+ a <<= 12;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rast
new file mode 100644
index 000000000..f376821e2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rs
new file mode 100644
index 000000000..4781b3225
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ async {};
+ async move {};
+}
+
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rast
new file mode 100644
index 000000000..53ddf35cc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rast
@@ -0,0 +1,92 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "future"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Future"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Output"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_ANGLE ">"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ BLOCK_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "12"
+ WHITESPACE " "
+ R_CURLY "}"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rs
new file mode 100644
index 000000000..ec4612cff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rs
@@ -0,0 +1,5 @@
+fn foo(x: impl std::future::Future<Output = i32>) {}
+
+fn main() {
+ foo(async move { 12 })
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rast
new file mode 100644
index 000000000..f8b11e778
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rast
@@ -0,0 +1,548 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g1"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr1"
+ R_BRACK "]"
+ WHITESPACE " "
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr2"
+ R_BRACK "]"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "pat"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Type"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g2"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr1"
+ R_BRACK "]"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "printf"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "format"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i8"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ DOT3 "..."
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "FnMut"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ R_PAREN ")"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u64"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ ATTR
+ POUND "#"
+ WHITESPACE " "
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "must_use"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g1"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g2"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ AMP "&"
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g3"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g4"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g5"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Rc"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ R_ANGLE ">"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs
new file mode 100644
index 000000000..de350d858
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs
@@ -0,0 +1,21 @@
+fn g1(#[attr1] #[attr2] pat: Type) {}
+fn g2(#[attr1] x: u8) {}
+
+extern "C" { fn printf(format: *const i8, #[attr] ...) -> i32; }
+
+fn foo<F: FnMut(#[attr] &mut Foo<'a>)>(){}
+
+trait Foo {
+ fn bar(#[attr] _: u64, # [attr] mut x: i32);
+}
+
+impl S {
+ fn f(#[must_use] self) {}
+ fn g1(#[attr] self) {}
+ fn g2(#[attr] &self) {}
+ fn g3<'a>(#[attr] &mut self) {}
+ fn g4<'a>(#[attr] &'a self) {}
+ fn g5<'a>(#[attr] &'a mut self) {}
+ fn c(#[attr] self: Self) {}
+ fn d(#[attr] self: Rc<Self>) {}
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rast
new file mode 100644
index 000000000..0c9dd432f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rast
@@ -0,0 +1,81 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_x"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ METHOD_CALL_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ R_CURLY "}"
+ R_PAREN ")"
+ DOT "."
+ NAME_REF
+ IDENT "sum"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_ANGLE ">"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rs
new file mode 100644
index 000000000..b51b19630
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ for _x in 0 .. (0 .. {1 + 2}).sum::<u32>() {
+ break;
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rast
new file mode 100644
index 000000000..b94d43beb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rast
@@ -0,0 +1,37 @@
+SOURCE_FILE
+ MACRO_RULES
+ COMMENT "/// Some docs"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "macro_export"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ MACRO_RULES_KW "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rs
new file mode 100644
index 000000000..b59c23c56
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rs
@@ -0,0 +1,5 @@
+/// Some docs
+#[macro_export]
+macro_rules! foo {
+ () => {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rast
new file mode 100644
index 000000000..4e1e31f37
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rast
@@ -0,0 +1,126 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Baz"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "FnMut"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Y"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "FnMut"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Y"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rs
new file mode 100644
index 000000000..0d3f5722a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rs
@@ -0,0 +1,5 @@
+fn a() -> Foo<bar::Baz> {}
+
+fn b(_: impl FnMut(x::Y)) {}
+
+fn c(_: impl FnMut(&x::Y)) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rast
new file mode 100644
index 000000000..684f499df
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rast
@@ -0,0 +1,50 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "X"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ R_PAREN ")"
+ COLON ":"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "X"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs
new file mode 100644
index 000000000..cd204f65e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs
@@ -0,0 +1,5 @@
+type X = ();
+
+fn main() {
+ let ():::X = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rast
new file mode 100644
index 000000000..55ce31275
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rast
@@ -0,0 +1,65 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BIN_EXPR
+ CAST_EXPR
+ METHOD_CALL_EXPR
+ LITERAL
+ FLOAT_NUMBER "1.0f32"
+ DOT "."
+ NAME_REF
+ IDENT "floor"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i64"
+ WHITESPACE " "
+ NEQ "!="
+ WHITESPACE " "
+ CAST_EXPR
+ METHOD_CALL_EXPR
+ LITERAL
+ FLOAT_NUMBER "1.0f32"
+ DOT "."
+ NAME_REF
+ IDENT "floor"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i64"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rs
new file mode 100644
index 000000000..6210683ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rs
@@ -0,0 +1,3 @@
+fn main() {
+ if 1.0f32.floor() as i64 != 1.0f32.floor() as i64 {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rast
new file mode 100644
index 000000000..67837e475
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rast
@@ -0,0 +1,59 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rs
new file mode 100644
index 000000000..31c12522f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rs
@@ -0,0 +1,5 @@
+fn foo(x: i32) {}
+
+fn main() {
+ foo(loop {});
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rast
new file mode 100644
index 000000000..683d5070a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rast
@@ -0,0 +1,97 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ PREFIX_EXPR
+ STAR "*"
+ REF_EXPR
+ AMP "&"
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CAST_EXPR
+ PREFIX_EXPR
+ STAR "*"
+ REF_EXPR
+ AMP "&"
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u64"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PREFIX_EXPR
+ STAR "*"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ REF_EXPR
+ AMP "&"
+ INDEX_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RANGE_EXPR
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rs
new file mode 100644
index 000000000..100fccc64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rs
@@ -0,0 +1,7 @@
+fn foo() {
+ 1 + *&2 + 3;
+ *&1 as u64;
+ *x(1);
+ &x[1];
+ -1..2;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rast
new file mode 100644
index 000000000..79bc7f971
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rast
@@ -0,0 +1,100 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ METHOD_CALL_EXPR
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ R_BRACK "]"
+ DOT "."
+ NAME_REF
+ IDENT "into_iter"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rs
new file mode 100644
index 000000000..6e8b718aa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rs
@@ -0,0 +1,5 @@
+fn main() {
+ Some(for _ in [1].into_iter() {});
+ Some(loop { break; });
+ Some(while true {});
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rast
new file mode 100644
index 000000000..81fc02b6f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rast
@@ -0,0 +1,56 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RANGE_EXPR
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ WHITESPACE " "
+ DOT2 ".."
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RANGE_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ WHITESPACE " "
+ DOT2 ".."
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rs
new file mode 100644
index 000000000..f063ffadb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rs
@@ -0,0 +1,4 @@
+fn main() {
+ 0 as usize ..;
+ 1 + 2 as usize ..;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rast
new file mode 100644
index 000000000..2f56e9041
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rast
@@ -0,0 +1,27 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rs
new file mode 100644
index 000000000..2c4ed11e1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rs
@@ -0,0 +1,4 @@
+fn main() {
+ match .. {
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rast
new file mode 100644
index 000000000..3915ed750
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rast
@@ -0,0 +1,177 @@
+SOURCE_FILE
+ MACRO_DEF
+ MACRO_KW "macro"
+ WHITESPACE " "
+ NAME
+ IDENT "parse_use_trees"
+ TOKEN_TREE
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "s"
+ COLON ":"
+ IDENT "expr"
+ R_PAREN ")"
+ COMMA ","
+ STAR "*"
+ WHITESPACE " "
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ COMMA ","
+ R_PAREN ")"
+ STAR "*"
+ R_PAREN ")"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IDENT "vec"
+ BANG "!"
+ TOKEN_TREE
+ L_BRACK "["
+ WHITESPACE "\n "
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "parse_use_tree"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "s"
+ R_PAREN ")"
+ COMMA ","
+ R_PAREN ")"
+ STAR "*"
+ WHITESPACE "\n "
+ R_BRACK "]"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "test"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "test_use_tree_merge"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MACRO_DEF
+ MACRO_KW "macro"
+ WHITESPACE " "
+ NAME
+ IDENT "test_merge"
+ TOKEN_TREE
+ TOKEN_TREE
+ L_PAREN "("
+ TOKEN_TREE
+ L_BRACK "["
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "input"
+ COLON ":"
+ IDENT "expr"
+ R_PAREN ")"
+ COMMA ","
+ STAR "*"
+ WHITESPACE " "
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ COMMA ","
+ R_PAREN ")"
+ STAR "*"
+ R_BRACK "]"
+ COMMA ","
+ WHITESPACE " "
+ TOKEN_TREE
+ L_BRACK "["
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "output"
+ COLON ":"
+ IDENT "expr"
+ R_PAREN ")"
+ COMMA ","
+ STAR "*"
+ WHITESPACE " "
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ COMMA ","
+ R_PAREN ")"
+ STAR "*"
+ R_BRACK "]"
+ R_PAREN ")"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IDENT "assert_eq"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ WHITESPACE "\n "
+ IDENT "merge_use_trees"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "parse_use_trees"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "input"
+ COMMA ","
+ R_PAREN ")"
+ STAR "*"
+ R_PAREN ")"
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ IDENT "parse_use_trees"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "output"
+ COMMA ","
+ R_PAREN ")"
+ STAR "*"
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rs
new file mode 100644
index 000000000..781047ba1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rs
@@ -0,0 +1,15 @@
+macro parse_use_trees($($s:expr),* $(,)*) {
+ vec![
+ $(parse_use_tree($s),)*
+ ]
+}
+
+#[test]
+fn test_use_tree_merge() {
+ macro test_merge([$($input:expr),* $(,)*], [$($output:expr),* $(,)*]) {
+ assert_eq!(
+ merge_use_trees(parse_use_trees!($($input,)*)),
+ parse_use_trees!($($output,)*),
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rast
new file mode 100644
index 000000000..a86b21d27
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rast
@@ -0,0 +1,198 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f1"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ COMMA ","
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f2"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ WHITESPACE " "
+ R_CURLY "}"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f3"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "NewType"
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "NewType"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f4"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_PAT
+ AMP "&"
+ REF_PAT
+ AMP "&"
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u64"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rs
new file mode 100644
index 000000000..3b666af8e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rs
@@ -0,0 +1,7 @@
+trait T {
+ fn f1((a, b): (usize, usize)) {}
+ fn f2(S { a, b }: S) {}
+ fn f3(NewType(a): NewType) {}
+ fn f4(&&a: &&usize) {}
+ fn bar(_: u64, mut x: i32);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rast
new file mode 100644
index 000000000..e36399123
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rast
@@ -0,0 +1,134 @@
+SOURCE_FILE
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ DOT3 "..."
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ DOT3 "..."
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "never"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "w"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "t"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "f"
+ R_BRACK "]"
+ COLON ":"
+ WHITESPACE " "
+ DOT3 "..."
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rs
new file mode 100644
index 000000000..a16afbaf3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rs
@@ -0,0 +1,5 @@
+extern "C" {
+ fn a(_: *mut u8, ...,);
+ fn b(_: *mut u8, _: ...);
+ fn c(_: *mut u8, #[cfg(never)] [w, t, f]: ...,);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rast
new file mode 100644
index 000000000..18cecc810
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rast
@@ -0,0 +1,166 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "U"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f1"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ COMMA ","
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f2"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ WHITESPACE " "
+ R_CURLY "}"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f3"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "NewType"
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "NewType"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f4"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_PAT
+ AMP "&"
+ REF_PAT
+ AMP "&"
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rs
new file mode 100644
index 000000000..b49e872d7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rs
@@ -0,0 +1,6 @@
+impl U {
+ fn f1((a, b): (usize, usize)) {}
+ fn f2(S { a, b }: S) {}
+ fn f3(NewType(a): NewType) {}
+ fn f4(&&a: &&usize) {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rast
new file mode 100644
index 000000000..3ffcb48f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rast
@@ -0,0 +1,17 @@
+SOURCE_FILE
+ FN
+ COMMENT "/// Example"
+ WHITESPACE "\n\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "test"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rs
new file mode 100644
index 000000000..1fafe216b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rs
@@ -0,0 +1,3 @@
+/// Example
+
+fn test() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rast
new file mode 100644
index 000000000..ba7b6042a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rast
@@ -0,0 +1,61 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rs
new file mode 100644
index 000000000..29f3655e0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rs
@@ -0,0 +1 @@
+fn f<T>() where T: Fn() -> u8 + Send {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rast
new file mode 100644
index 000000000..a4303098a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rast
@@ -0,0 +1,222 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ TYPE_ALIAS
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ CONST
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ TYPE_ALIAS
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ CONST
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ IMPL
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ IMPL
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rs
new file mode 100644
index 000000000..e443e3495
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rs
@@ -0,0 +1,16 @@
+trait T {
+ default type T = Bar;
+ default const f: u8 = 0;
+ default fn foo() {}
+ default unsafe fn bar() {}
+}
+
+impl T for Foo {
+ default type T = Bar;
+ default const f: u8 = 0;
+ default fn foo() {}
+ default unsafe fn bar() {}
+}
+
+default impl T for () {}
+default unsafe impl T for () {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast
new file mode 100644
index 000000000..136fce93d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast
@@ -0,0 +1,413 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_trait"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "str"
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_ref"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Debug"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_parens"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PAREN_TYPE
+ L_PAREN "("
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "str"
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_slice"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ SLICE_TYPE
+ L_BRACK "["
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ R_BRACK "]"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Eq"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_qpath"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "_t"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Baz"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_for_fn"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rs
new file mode 100644
index 000000000..9058c4619
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rs
@@ -0,0 +1,30 @@
+fn for_trait<F>()
+where
+ for<'a> F: Fn(&'a str),
+{
+}
+fn for_ref<F>()
+where
+ for<'a> &'a F: Debug,
+{
+}
+fn for_parens<F>()
+where
+ for<'a> (&'a F): Fn(&'a str),
+{
+}
+fn for_slice<F>()
+where
+ for<'a> [&'a F]: Eq,
+{
+}
+fn for_qpath<T>(_t: &T)
+where
+ for<'a> <&'a T as Baz>::Foo: Iterator,
+{
+}
+fn for_for_fn<T>()
+where
+ for<'a> for<'b> fn(&'a T, &'b T): Copy,
+{
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rast
new file mode 100644
index 000000000..41fc5691a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rast
@@ -0,0 +1,238 @@
+SOURCE_FILE
+ FN
+ ASYNC_KW "async"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ CONST_KW "const"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ CONST_KW "const"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ ASYNC_KW "async"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ CONST_KW "const"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ TRAIT
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ TRAIT
+ AUTO_KW "auto"
+ WHITESPACE " "
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ TRAIT
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ AUTO_KW "auto"
+ WHITESPACE " "
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ IMPL
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ IMPL
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ IMPL
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ EXTERN_BLOCK
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C++\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs
new file mode 100644
index 000000000..6d27a082c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs
@@ -0,0 +1,18 @@
+async fn foo() {}
+extern fn foo() {}
+const fn foo() {}
+const unsafe fn foo() {}
+unsafe extern "C" fn foo() {}
+unsafe fn foo() {}
+async unsafe fn foo() {}
+const unsafe fn bar() {}
+
+unsafe trait T {}
+auto trait T {}
+unsafe auto trait T {}
+
+unsafe impl Foo {}
+default impl Foo {}
+unsafe default impl Foo {}
+
+unsafe extern "C++" {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rast
new file mode 100644
index 000000000..9e8f4e197
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rast
@@ -0,0 +1,204 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PAREN_TYPE
+ L_PAREN "("
+ DYN_TRAIT_TYPE
+ DYN_KW "dyn"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sync"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PAREN_TYPE
+ L_PAREN "("
+ DYN_TRAIT_TYPE
+ DYN_KW "dyn"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sync"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PAREN_TYPE
+ L_PAREN "("
+ DYN_TRAIT_TYPE
+ DYN_KW "dyn"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'static"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CAST_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ REF_EXPR
+ AMP "&"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ R_PAREN ")"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ PAREN_TYPE
+ L_PAREN "("
+ DYN_TRAIT_TYPE
+ DYN_KW "dyn"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Add"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Other"
+ COMMA ","
+ WHITESPACE " "
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Output"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Addable"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Other"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rs
new file mode 100644
index 000000000..97eb79c48
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rs
@@ -0,0 +1,6 @@
+type Foo<'a> = &'a (dyn Send + Sync);
+type Foo = *const (dyn Send + Sync);
+type Foo = fn() -> (dyn Send + 'static);
+fn main() {
+ let b = (&a) as &(dyn Add<Other, Output = Addable> + Other);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rast
new file mode 100644
index 000000000..3d00b27ab
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rast
@@ -0,0 +1,59 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ TRY_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ R_BRACK "]"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "lhs"
+ QUESTION "?"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ AWAIT_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_BRACK "]"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "rhs"
+ DOT "."
+ AWAIT_KW "await"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rs
new file mode 100644
index 000000000..d8b7a3832
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rs
@@ -0,0 +1,3 @@
+fn f() {
+ (#[a] lhs? + #[b] rhs.await)
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rast
new file mode 100644
index 000000000..1cafc775c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rast
@@ -0,0 +1,72 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ R_BRACK "]"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ TRY_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ R_BRACK "]"
+ WHITESPACE " "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ QUESTION "?"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ REF_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "C"
+ R_BRACK "]"
+ WHITESPACE " "
+ AMP "&"
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rs
new file mode 100644
index 000000000..b4d5204bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ #[A] { #[B] bar!()? }
+ #[C] &()
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rast
new file mode 100644
index 000000000..e8b836dfb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rast
@@ -0,0 +1,352 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "0"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ TUPLE_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ COMMA ","
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "default"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ BIN_EXPR
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ RANGE_EXPR
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ METHOD_CALL_EXPR
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ DOT "."
+ WHITESPACE "\n "
+ NAME_REF
+ IDENT "Ok"
+ ARG_LIST
+ L_PAREN "("
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ ARRAY_EXPR
+ L_BRACK "["
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ TUPLE_EXPR
+ L_PAREN "("
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ COMMA ","
+ WHITESPACE " "
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rs
new file mode 100644
index 000000000..9d3e86603
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rs
@@ -0,0 +1,14 @@
+fn foo() {
+ let (mut a, mut b) = (0, 1);
+ (b, a, ..) = (a, b);
+ (_) = ..;
+ struct S { a: i32 }
+ S { .. } = S { ..S::default() };
+ Some(..) = Some(0).
+ Ok(_) = 0;
+ let (a, b);
+ [a, .., b] = [1, .., 2];
+ (_, _) = (a, b);
+ (_) = (a, b);
+ _ = (a, b);
+}
diff --git a/src/tools/rust-analyzer/crates/paths/Cargo.toml b/src/tools/rust-analyzer/crates/paths/Cargo.toml
new file mode 100644
index 000000000..5e83de7d9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/paths/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "paths"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+# Adding this dep sadly puts a lot of rust-analyzer crates after the
+# serde-derive crate. Even though we don't activate the derive feature here,
+# someone else in the crate graph certainly does!
+# serde = "1"
diff --git a/src/tools/rust-analyzer/crates/paths/src/lib.rs b/src/tools/rust-analyzer/crates/paths/src/lib.rs
new file mode 100644
index 000000000..025093f4a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/paths/src/lib.rs
@@ -0,0 +1,299 @@
+//! Thin wrappers around `std::path`, distinguishing between absolute and
+//! relative paths.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::{
+ borrow::Borrow,
+ ffi::OsStr,
+ ops,
+ path::{Component, Path, PathBuf},
+};
+
+/// Wrapper around an absolute [`PathBuf`].
+#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct AbsPathBuf(PathBuf);
+
+impl From<AbsPathBuf> for PathBuf {
+ fn from(AbsPathBuf(path_buf): AbsPathBuf) -> PathBuf {
+ path_buf
+ }
+}
+
+impl ops::Deref for AbsPathBuf {
+ type Target = AbsPath;
+ fn deref(&self) -> &AbsPath {
+ self.as_path()
+ }
+}
+
+impl AsRef<Path> for AbsPathBuf {
+ fn as_ref(&self) -> &Path {
+ self.0.as_path()
+ }
+}
+
+impl AsRef<AbsPath> for AbsPathBuf {
+ fn as_ref(&self) -> &AbsPath {
+ self.as_path()
+ }
+}
+
+impl Borrow<AbsPath> for AbsPathBuf {
+ fn borrow(&self) -> &AbsPath {
+ self.as_path()
+ }
+}
+
+impl TryFrom<PathBuf> for AbsPathBuf {
+ type Error = PathBuf;
+ fn try_from(path_buf: PathBuf) -> Result<AbsPathBuf, PathBuf> {
+ if !path_buf.is_absolute() {
+ return Err(path_buf);
+ }
+ Ok(AbsPathBuf(path_buf))
+ }
+}
+
+impl TryFrom<&str> for AbsPathBuf {
+ type Error = PathBuf;
+ fn try_from(path: &str) -> Result<AbsPathBuf, PathBuf> {
+ AbsPathBuf::try_from(PathBuf::from(path))
+ }
+}
+
+impl PartialEq<AbsPath> for AbsPathBuf {
+ fn eq(&self, other: &AbsPath) -> bool {
+ self.as_path() == other
+ }
+}
+
+impl AbsPathBuf {
+ /// Wrap the given absolute path in `AbsPathBuf`
+ ///
+ /// # Panics
+ ///
+ /// Panics if `path` is not absolute.
+ pub fn assert(path: PathBuf) -> AbsPathBuf {
+ AbsPathBuf::try_from(path)
+ .unwrap_or_else(|path| panic!("expected absolute path, got {}", path.display()))
+ }
+
+ /// Coerces to an `AbsPath` slice.
+ ///
+ /// Equivalent of [`PathBuf::as_path`] for `AbsPathBuf`.
+ pub fn as_path(&self) -> &AbsPath {
+ AbsPath::assert(self.0.as_path())
+ }
+
+ /// Equivalent of [`PathBuf::pop`] for `AbsPathBuf`.
+ ///
+ /// Note that this won't remove the root component, so `self` will still be
+ /// absolute.
+ pub fn pop(&mut self) -> bool {
+ self.0.pop()
+ }
+}
+
+/// Wrapper around an absolute [`Path`].
+#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
+#[repr(transparent)]
+pub struct AbsPath(Path);
+
+impl AsRef<Path> for AbsPath {
+ fn as_ref(&self) -> &Path {
+ &self.0
+ }
+}
+
+impl<'a> TryFrom<&'a Path> for &'a AbsPath {
+ type Error = &'a Path;
+ fn try_from(path: &'a Path) -> Result<&'a AbsPath, &'a Path> {
+ if !path.is_absolute() {
+ return Err(path);
+ }
+ Ok(AbsPath::assert(path))
+ }
+}
+
+impl AbsPath {
+ /// Wrap the given absolute path in `AbsPath`
+ ///
+ /// # Panics
+ ///
+ /// Panics if `path` is not absolute.
+ pub fn assert(path: &Path) -> &AbsPath {
+ assert!(path.is_absolute());
+ unsafe { &*(path as *const Path as *const AbsPath) }
+ }
+
+ /// Equivalent of [`Path::parent`] for `AbsPath`.
+ pub fn parent(&self) -> Option<&AbsPath> {
+ self.0.parent().map(AbsPath::assert)
+ }
+
+ /// Equivalent of [`Path::join`] for `AbsPath`.
+ pub fn join(&self, path: impl AsRef<Path>) -> AbsPathBuf {
+ self.as_ref().join(path).try_into().unwrap()
+ }
+
+ /// Normalize the given path:
+ /// - Removes repeated separators: `/a//b` becomes `/a/b`
+ /// - Removes occurrences of `.` and resolves `..`.
+ /// - Removes trailing slashes: `/a/b/` becomes `/a/b`.
+ ///
+ /// # Example
+ /// ```
+ /// # use paths::AbsPathBuf;
+ /// let abs_path_buf = AbsPathBuf::assert("/a/../../b/.//c//".into());
+ /// let normalized = abs_path_buf.normalize();
+ /// assert_eq!(normalized, AbsPathBuf::assert("/b/c".into()));
+ /// ```
+ pub fn normalize(&self) -> AbsPathBuf {
+ AbsPathBuf(normalize_path(&self.0))
+ }
+
+ /// Equivalent of [`Path::to_path_buf`] for `AbsPath`.
+ pub fn to_path_buf(&self) -> AbsPathBuf {
+ AbsPathBuf::try_from(self.0.to_path_buf()).unwrap()
+ }
+
+ /// Equivalent of [`Path::strip_prefix`] for `AbsPath`.
+ ///
+ /// Returns a relative path.
+ pub fn strip_prefix(&self, base: &AbsPath) -> Option<&RelPath> {
+ self.0.strip_prefix(base).ok().map(RelPath::new_unchecked)
+ }
+ pub fn starts_with(&self, base: &AbsPath) -> bool {
+ self.0.starts_with(&base.0)
+ }
+ pub fn ends_with(&self, suffix: &RelPath) -> bool {
+ self.0.ends_with(&suffix.0)
+ }
+
+ // region:delegate-methods
+
+ // Note that we deliberately don't implement `Deref<Target = Path>` here.
+ //
+ // The problem with `Path` is that it directly exposes convenience IO-ing
+ // methods. For example, `Path::exists` delegates to `fs::metadata`.
+ //
+ // For `AbsPath`, we want to make sure that this is a POD type, and that all
+ // IO goes via `fs`. That way, it becomes easier to mock IO when we need it.
+
+ pub fn file_name(&self) -> Option<&OsStr> {
+ self.0.file_name()
+ }
+ pub fn extension(&self) -> Option<&OsStr> {
+ self.0.extension()
+ }
+ pub fn file_stem(&self) -> Option<&OsStr> {
+ self.0.file_stem()
+ }
+ pub fn as_os_str(&self) -> &OsStr {
+ self.0.as_os_str()
+ }
+ pub fn display(&self) -> std::path::Display<'_> {
+ self.0.display()
+ }
+ #[deprecated(note = "use std::fs::metadata().is_ok() instead")]
+ pub fn exists(&self) -> bool {
+ self.0.exists()
+ }
+ // endregion:delegate-methods
+}
+
+/// Wrapper around a relative [`PathBuf`].
+#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct RelPathBuf(PathBuf);
+
+impl From<RelPathBuf> for PathBuf {
+ fn from(RelPathBuf(path_buf): RelPathBuf) -> PathBuf {
+ path_buf
+ }
+}
+
+impl ops::Deref for RelPathBuf {
+ type Target = RelPath;
+ fn deref(&self) -> &RelPath {
+ self.as_path()
+ }
+}
+
+impl AsRef<Path> for RelPathBuf {
+ fn as_ref(&self) -> &Path {
+ self.0.as_path()
+ }
+}
+
+impl TryFrom<PathBuf> for RelPathBuf {
+ type Error = PathBuf;
+ fn try_from(path_buf: PathBuf) -> Result<RelPathBuf, PathBuf> {
+ if !path_buf.is_relative() {
+ return Err(path_buf);
+ }
+ Ok(RelPathBuf(path_buf))
+ }
+}
+
+impl TryFrom<&str> for RelPathBuf {
+ type Error = PathBuf;
+ fn try_from(path: &str) -> Result<RelPathBuf, PathBuf> {
+ RelPathBuf::try_from(PathBuf::from(path))
+ }
+}
+
+impl RelPathBuf {
+ /// Coerces to a `RelPath` slice.
+ ///
+ /// Equivalent of [`PathBuf::as_path`] for `RelPathBuf`.
+ pub fn as_path(&self) -> &RelPath {
+ RelPath::new_unchecked(self.0.as_path())
+ }
+}
+
+/// Wrapper around a relative [`Path`].
+#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
+#[repr(transparent)]
+pub struct RelPath(Path);
+
+impl AsRef<Path> for RelPath {
+ fn as_ref(&self) -> &Path {
+ &self.0
+ }
+}
+
+impl RelPath {
+ /// Creates a new `RelPath` from `path`, without checking if it is relative.
+ pub fn new_unchecked(path: &Path) -> &RelPath {
+ unsafe { &*(path as *const Path as *const RelPath) }
+ }
+}
+
+/// Taken from <https://github.com/rust-lang/cargo/blob/79c769c3d7b4c2cf6a93781575b7f592ef974255/src/cargo/util/paths.rs#L60-L85>
+fn normalize_path(path: &Path) -> PathBuf {
+ let mut components = path.components().peekable();
+ let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().copied() {
+ components.next();
+ PathBuf::from(c.as_os_str())
+ } else {
+ PathBuf::new()
+ };
+
+ for component in components {
+ match component {
+ Component::Prefix(..) => unreachable!(),
+ Component::RootDir => {
+ ret.push(component.as_os_str());
+ }
+ Component::CurDir => {}
+ Component::ParentDir => {
+ ret.pop();
+ }
+ Component::Normal(c) => {
+ ret.push(c);
+ }
+ }
+ }
+ ret
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
new file mode 100644
index 000000000..85a1c13fe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
@@ -0,0 +1,31 @@
+[package]
+name = "proc-macro-api"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+object = { version = "0.29.0", default-features = false, features = [
+ "std",
+ "read_core",
+ "elf",
+ "macho",
+ "pe",
+] }
+serde = { version = "1.0.137", features = ["derive"] }
+serde_json = { version = "1.0.81", features = ["unbounded_depth"] }
+tracing = "0.1.35"
+memmap2 = "0.5.4"
+snap = "1.0.5"
+
+paths = { path = "../paths", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+# Intentionally *not* depend on anything salsa-related
+# base-db = { path = "../base-db", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
new file mode 100644
index 000000000..d7010e825
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -0,0 +1,181 @@
+//! Client-side Proc-Macro crate
+//!
+//! We separate proc-macro expanding logic to an extern program to allow
+//! different implementations (e.g. wasm or dylib loading). And this crate
+//! is used to provide basic infrastructure for communication between two
+//! processes: Client (RA itself), Server (the external program)
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+pub mod msg;
+mod process;
+mod version;
+
+use paths::AbsPathBuf;
+use std::{
+ ffi::OsStr,
+ fmt, io,
+ sync::{Arc, Mutex},
+};
+
+use serde::{Deserialize, Serialize};
+use tt::Subtree;
+
+use crate::{
+ msg::{ExpandMacro, FlatTree, PanicMessage},
+ process::ProcMacroProcessSrv,
+};
+
+pub use version::{read_dylib_info, read_version, RustCInfo};
+
+#[derive(Copy, Clone, Eq, PartialEq, Debug, Serialize, Deserialize)]
+pub enum ProcMacroKind {
+ CustomDerive,
+ FuncLike,
+ Attr,
+}
+
+/// A handle to an external process which load dylibs with macros (.so or .dll)
+/// and runs actual macro expansion functions.
+#[derive(Debug)]
+pub struct ProcMacroServer {
+ /// Currently, the proc macro process expands all procedural macros sequentially.
+ ///
+ /// That means that concurrent salsa requests may block each other when expanding proc macros,
+ /// which is unfortunate, but simple and good enough for the time being.
+ ///
+ /// Therefore, we just wrap the `ProcMacroProcessSrv` in a mutex here.
+ process: Arc<Mutex<ProcMacroProcessSrv>>,
+}
+
+pub struct MacroDylib {
+ path: AbsPathBuf,
+}
+
+impl MacroDylib {
+ // FIXME: this is buggy due to TOCTOU, we should check the version in the
+ // macro process instead.
+ pub fn new(path: AbsPathBuf) -> io::Result<MacroDylib> {
+ let _p = profile::span("MacroDylib::new");
+
+ let info = version::read_dylib_info(&path)?;
+ if info.version.0 < 1 || info.version.1 < 47 {
+ let msg = format!("proc-macro {} built by {:#?} is not supported by Rust Analyzer, please update your rust version.", path.display(), info);
+ return Err(io::Error::new(io::ErrorKind::InvalidData, msg));
+ }
+
+ Ok(MacroDylib { path })
+ }
+}
+
+/// A handle to a specific macro (a `#[proc_macro]` annotated function).
+///
+/// It exists withing a context of a specific [`ProcMacroProcess`] -- currently
+/// we share a single expander process for all macros.
+#[derive(Debug, Clone)]
+pub struct ProcMacro {
+ process: Arc<Mutex<ProcMacroProcessSrv>>,
+ dylib_path: AbsPathBuf,
+ name: String,
+ kind: ProcMacroKind,
+}
+
+impl Eq for ProcMacro {}
+impl PartialEq for ProcMacro {
+ fn eq(&self, other: &Self) -> bool {
+ self.name == other.name
+ && self.kind == other.kind
+ && self.dylib_path == other.dylib_path
+ && Arc::ptr_eq(&self.process, &other.process)
+ }
+}
+
+pub struct ServerError {
+ pub message: String,
+ pub io: Option<io::Error>,
+}
+
+impl fmt::Display for ServerError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.message.fmt(f)?;
+ if let Some(io) = &self.io {
+ f.write_str(": ")?;
+ io.fmt(f)?;
+ }
+ Ok(())
+ }
+}
+
+pub struct MacroPanic {
+ pub message: String,
+}
+
+impl ProcMacroServer {
+ /// Spawns an external process as the proc macro server and returns a client connected to it.
+ pub fn spawn(
+ process_path: AbsPathBuf,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+ ) -> io::Result<ProcMacroServer> {
+ let process = ProcMacroProcessSrv::run(process_path, args)?;
+ Ok(ProcMacroServer { process: Arc::new(Mutex::new(process)) })
+ }
+
+ pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> {
+ let _p = profile::span("ProcMacroClient::by_dylib_path");
+ let macros =
+ self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?;
+
+ match macros {
+ Ok(macros) => Ok(macros
+ .into_iter()
+ .map(|(name, kind)| ProcMacro {
+ process: self.process.clone(),
+ name,
+ kind,
+ dylib_path: dylib.path.clone(),
+ })
+ .collect()),
+ Err(message) => Err(ServerError { message, io: None }),
+ }
+ }
+}
+
+impl ProcMacro {
+ pub fn name(&self) -> &str {
+ &self.name
+ }
+
+ pub fn kind(&self) -> ProcMacroKind {
+ self.kind
+ }
+
+ pub fn expand(
+ &self,
+ subtree: &Subtree,
+ attr: Option<&Subtree>,
+ env: Vec<(String, String)>,
+ ) -> Result<Result<Subtree, PanicMessage>, ServerError> {
+ let current_dir = env
+ .iter()
+ .find(|(name, _)| name == "CARGO_MANIFEST_DIR")
+ .map(|(_, value)| value.clone());
+
+ let task = ExpandMacro {
+ macro_body: FlatTree::new(subtree),
+ macro_name: self.name.to_string(),
+ attributes: attr.map(FlatTree::new),
+ lib: self.dylib_path.to_path_buf().into(),
+ env,
+ current_dir,
+ };
+
+ let request = msg::Request::ExpandMacro(task);
+ let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?;
+ match response {
+ msg::Response::ExpandMacro(it) => Ok(it.map(FlatTree::to_subtree)),
+ msg::Response::ListMacros { .. } => {
+ Err(ServerError { message: "unexpected response".to_string(), io: None })
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
new file mode 100644
index 000000000..f9c2b9fda
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
@@ -0,0 +1,154 @@
+//! Defines messages for cross-process message passing based on `ndjson` wire protocol
+pub(crate) mod flat;
+
+use std::{
+ io::{self, BufRead, Write},
+ path::PathBuf,
+};
+
+use serde::{de::DeserializeOwned, Deserialize, Serialize};
+
+use crate::ProcMacroKind;
+
+pub use crate::msg::flat::FlatTree;
+
+#[derive(Debug, Serialize, Deserialize)]
+pub enum Request {
+ ListMacros { dylib_path: PathBuf },
+ ExpandMacro(ExpandMacro),
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub enum Response {
+ ListMacros(Result<Vec<(String, ProcMacroKind)>, String>),
+ ExpandMacro(Result<FlatTree, PanicMessage>),
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct PanicMessage(pub String);
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct ExpandMacro {
+ /// Argument of macro call.
+ ///
+ /// In custom derive this will be a struct or enum; in attribute-like macro - underlying
+ /// item; in function-like macro - the macro body.
+ pub macro_body: FlatTree,
+
+ /// Name of macro to expand.
+ ///
+ /// In custom derive this is the name of the derived trait (`Serialize`, `Getters`, etc.).
+ /// In attribute-like and function-like macros - single name of macro itself (`show_streams`).
+ pub macro_name: String,
+
+ /// Possible attributes for the attribute-like macros.
+ pub attributes: Option<FlatTree>,
+
+ pub lib: PathBuf,
+
+ /// Environment variables to set during macro expansion.
+ pub env: Vec<(String, String)>,
+
+ pub current_dir: Option<String>,
+}
+
+pub trait Message: Serialize + DeserializeOwned {
+ fn read(inp: &mut impl BufRead, buf: &mut String) -> io::Result<Option<Self>> {
+ Ok(match read_json(inp, buf)? {
+ None => None,
+ Some(text) => {
+ let mut deserializer = serde_json::Deserializer::from_str(text);
+ // Note that some proc-macro generate very deep syntax tree
+ // We have to disable the current limit of serde here
+ deserializer.disable_recursion_limit();
+ Some(Self::deserialize(&mut deserializer)?)
+ }
+ })
+ }
+ fn write(self, out: &mut impl Write) -> io::Result<()> {
+ let text = serde_json::to_string(&self)?;
+ write_json(out, &text)
+ }
+}
+
+impl Message for Request {}
+impl Message for Response {}
+
+fn read_json<'a>(inp: &mut impl BufRead, buf: &'a mut String) -> io::Result<Option<&'a String>> {
+ loop {
+ buf.clear();
+
+ inp.read_line(buf)?;
+ buf.pop(); // Remove trailing '\n'
+
+ if buf.is_empty() {
+ return Ok(None);
+ }
+
+ // Some ill behaved macro try to use stdout for debugging
+ // We ignore it here
+ if !buf.starts_with('{') {
+ tracing::error!("proc-macro tried to print : {}", buf);
+ continue;
+ }
+
+ return Ok(Some(buf));
+ }
+}
+
+fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
+ tracing::debug!("> {}", msg);
+ out.write_all(msg.as_bytes())?;
+ out.write_all(b"\n")?;
+ out.flush()?;
+ Ok(())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use tt::*;
+
+ fn fixture_token_tree() -> Subtree {
+ let mut subtree = Subtree::default();
+ subtree
+ .token_trees
+ .push(TokenTree::Leaf(Ident { text: "struct".into(), id: TokenId(0) }.into()));
+ subtree
+ .token_trees
+ .push(TokenTree::Leaf(Ident { text: "Foo".into(), id: TokenId(1) }.into()));
+ subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal {
+ text: "Foo".into(),
+ id: TokenId::unspecified(),
+ })));
+ subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct {
+ char: '@',
+ id: TokenId::unspecified(),
+ spacing: Spacing::Joint,
+ })));
+ subtree.token_trees.push(TokenTree::Subtree(Subtree {
+ delimiter: Some(Delimiter { id: TokenId(2), kind: DelimiterKind::Brace }),
+ token_trees: vec![],
+ }));
+ subtree
+ }
+
+ #[test]
+ fn test_proc_macro_rpc_works() {
+ let tt = fixture_token_tree();
+ let task = ExpandMacro {
+ macro_body: FlatTree::new(&tt),
+ macro_name: Default::default(),
+ attributes: None,
+ lib: std::env::current_dir().unwrap(),
+ env: Default::default(),
+ current_dir: Default::default(),
+ };
+
+ let json = serde_json::to_string(&task).unwrap();
+ // println!("{}", json);
+ let back: ExpandMacro = serde_json::from_str(&json).unwrap();
+
+ assert_eq!(tt, back.macro_body.to_subtree());
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
new file mode 100644
index 000000000..8437444e1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
@@ -0,0 +1,328 @@
+//! Serialization-friendly representation of `tt::Subtree`.
+//!
+//! It is possible to serialize `Subtree` as is, as a tree, but using
+//! arbitrary-nested trees in JSON is problematic, as they can cause the JSON
+//! parser to overflow the stack.
+//!
+//! Additionally, such implementation would be pretty verbose, and we do care
+//! about performance here a bit.
+//!
+//! So what this module does is dumping a `tt::Subtree` into a bunch of flat
+//! array of numbers. See the test in the parent module to get an example
+//! output.
+//!
+//! ```json
+//! {
+//! // Array of subtrees, each subtree is represented by 4 numbers:
+//! // id of delimiter, delimiter kind, index of first child in `token_tree`,
+//! // index of last child in `token_tree`
+//! "subtree":[4294967295,0,0,5,2,2,5,5],
+//! // 2 ints per literal: [token id, index into `text`]
+//! "literal":[4294967295,1],
+//! // 3 ints per punct: [token id, char, spacing]
+//! "punct":[4294967295,64,1],
+//! // 2 ints per ident: [token id, index into `text`]
+//! "ident": [0,0,1,1],
+//! // children of all subtrees, concatenated. Each child is represented as `index << 2 | tag`
+//! // where tag denotes one of subtree, literal, punct or ident.
+//! "token_tree":[3,7,1,4],
+//! // Strings shared by idents and literals
+//! "text": ["struct","Foo"]
+//! }
+//! ```
+//!
+//! We probably should replace most of the code here with bincode someday, but,
+//! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for
+//! the time being.
+
+use std::{
+ collections::{HashMap, VecDeque},
+ convert::TryInto,
+};
+
+use serde::{Deserialize, Serialize};
+use tt::TokenId;
+
+#[derive(Serialize, Deserialize, Debug)]
+pub struct FlatTree {
+ subtree: Vec<u32>,
+ literal: Vec<u32>,
+ punct: Vec<u32>,
+ ident: Vec<u32>,
+ token_tree: Vec<u32>,
+ text: Vec<String>,
+}
+
+struct SubtreeRepr {
+ id: tt::TokenId,
+ kind: Option<tt::DelimiterKind>,
+ tt: [u32; 2],
+}
+
+struct LiteralRepr {
+ id: tt::TokenId,
+ text: u32,
+}
+
+struct PunctRepr {
+ id: tt::TokenId,
+ char: char,
+ spacing: tt::Spacing,
+}
+
+struct IdentRepr {
+ id: tt::TokenId,
+ text: u32,
+}
+
+impl FlatTree {
+ pub fn new(subtree: &tt::Subtree) -> FlatTree {
+ let mut w = Writer {
+ string_table: HashMap::new(),
+ work: VecDeque::new(),
+
+ subtree: Vec::new(),
+ literal: Vec::new(),
+ punct: Vec::new(),
+ ident: Vec::new(),
+ token_tree: Vec::new(),
+ text: Vec::new(),
+ };
+ w.write(subtree);
+
+ return FlatTree {
+ subtree: write_vec(w.subtree, SubtreeRepr::write),
+ literal: write_vec(w.literal, LiteralRepr::write),
+ punct: write_vec(w.punct, PunctRepr::write),
+ ident: write_vec(w.ident, IdentRepr::write),
+ token_tree: w.token_tree,
+ text: w.text,
+ };
+
+ fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
+ xs.into_iter().flat_map(f).collect()
+ }
+ }
+
+ pub fn to_subtree(self) -> tt::Subtree {
+ return Reader {
+ subtree: read_vec(self.subtree, SubtreeRepr::read),
+ literal: read_vec(self.literal, LiteralRepr::read),
+ punct: read_vec(self.punct, PunctRepr::read),
+ ident: read_vec(self.ident, IdentRepr::read),
+ token_tree: self.token_tree,
+ text: self.text,
+ }
+ .read();
+
+ fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
+ let mut chunks = xs.chunks_exact(N);
+ let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
+ assert!(chunks.remainder().is_empty());
+ res
+ }
+ }
+}
+
+impl SubtreeRepr {
+ fn write(self) -> [u32; 4] {
+ let kind = match self.kind {
+ None => 0,
+ Some(tt::DelimiterKind::Parenthesis) => 1,
+ Some(tt::DelimiterKind::Brace) => 2,
+ Some(tt::DelimiterKind::Bracket) => 3,
+ };
+ [self.id.0, kind, self.tt[0], self.tt[1]]
+ }
+ fn read([id, kind, lo, len]: [u32; 4]) -> SubtreeRepr {
+ let kind = match kind {
+ 0 => None,
+ 1 => Some(tt::DelimiterKind::Parenthesis),
+ 2 => Some(tt::DelimiterKind::Brace),
+ 3 => Some(tt::DelimiterKind::Bracket),
+ other => panic!("bad kind {}", other),
+ };
+ SubtreeRepr { id: TokenId(id), kind, tt: [lo, len] }
+ }
+}
+
+impl LiteralRepr {
+ fn write(self) -> [u32; 2] {
+ [self.id.0, self.text]
+ }
+ fn read([id, text]: [u32; 2]) -> LiteralRepr {
+ LiteralRepr { id: TokenId(id), text }
+ }
+}
+
+impl PunctRepr {
+ fn write(self) -> [u32; 3] {
+ let spacing = match self.spacing {
+ tt::Spacing::Alone => 0,
+ tt::Spacing::Joint => 1,
+ };
+ [self.id.0, self.char as u32, spacing]
+ }
+ fn read([id, char, spacing]: [u32; 3]) -> PunctRepr {
+ let spacing = match spacing {
+ 0 => tt::Spacing::Alone,
+ 1 => tt::Spacing::Joint,
+ other => panic!("bad spacing {}", other),
+ };
+ PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing }
+ }
+}
+
+impl IdentRepr {
+ fn write(self) -> [u32; 2] {
+ [self.id.0, self.text]
+ }
+ fn read(data: [u32; 2]) -> IdentRepr {
+ IdentRepr { id: TokenId(data[0]), text: data[1] }
+ }
+}
+
+struct Writer<'a> {
+ work: VecDeque<(usize, &'a tt::Subtree)>,
+ string_table: HashMap<&'a str, u32>,
+
+ subtree: Vec<SubtreeRepr>,
+ literal: Vec<LiteralRepr>,
+ punct: Vec<PunctRepr>,
+ ident: Vec<IdentRepr>,
+ token_tree: Vec<u32>,
+ text: Vec<String>,
+}
+
+impl<'a> Writer<'a> {
+ fn write(&mut self, root: &'a tt::Subtree) {
+ self.enqueue(root);
+ while let Some((idx, subtree)) = self.work.pop_front() {
+ self.subtree(idx, subtree);
+ }
+ }
+
+ fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) {
+ let mut first_tt = self.token_tree.len();
+ let n_tt = subtree.token_trees.len();
+ self.token_tree.resize(first_tt + n_tt, !0);
+
+ self.subtree[idx].tt = [first_tt as u32, (first_tt + n_tt) as u32];
+
+ for child in &subtree.token_trees {
+ let idx_tag = match child {
+ tt::TokenTree::Subtree(it) => {
+ let idx = self.enqueue(it);
+ idx << 2 | 0b00
+ }
+ tt::TokenTree::Leaf(leaf) => match leaf {
+ tt::Leaf::Literal(lit) => {
+ let idx = self.literal.len() as u32;
+ let text = self.intern(&lit.text);
+ self.literal.push(LiteralRepr { id: lit.id, text });
+ idx << 2 | 0b01
+ }
+ tt::Leaf::Punct(punct) => {
+ let idx = self.punct.len() as u32;
+ self.punct.push(PunctRepr {
+ char: punct.char,
+ spacing: punct.spacing,
+ id: punct.id,
+ });
+ idx << 2 | 0b10
+ }
+ tt::Leaf::Ident(ident) => {
+ let idx = self.ident.len() as u32;
+ let text = self.intern(&ident.text);
+ self.ident.push(IdentRepr { id: ident.id, text });
+ idx << 2 | 0b11
+ }
+ },
+ };
+ self.token_tree[first_tt] = idx_tag;
+ first_tt += 1;
+ }
+ }
+
+ fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 {
+ let idx = self.subtree.len();
+ let delimiter_id = subtree.delimiter.map_or(TokenId::unspecified(), |it| it.id);
+ let delimiter_kind = subtree.delimiter.map(|it| it.kind);
+ self.subtree.push(SubtreeRepr { id: delimiter_id, kind: delimiter_kind, tt: [!0, !0] });
+ self.work.push_back((idx, subtree));
+ idx as u32
+ }
+
+ pub(crate) fn intern(&mut self, text: &'a str) -> u32 {
+ let table = &mut self.text;
+ *self.string_table.entry(text).or_insert_with(|| {
+ let idx = table.len();
+ table.push(text.to_string());
+ idx as u32
+ })
+ }
+}
+
+struct Reader {
+ subtree: Vec<SubtreeRepr>,
+ literal: Vec<LiteralRepr>,
+ punct: Vec<PunctRepr>,
+ ident: Vec<IdentRepr>,
+ token_tree: Vec<u32>,
+ text: Vec<String>,
+}
+
+impl Reader {
+ pub(crate) fn read(self) -> tt::Subtree {
+ let mut res: Vec<Option<tt::Subtree>> = vec![None; self.subtree.len()];
+ for i in (0..self.subtree.len()).rev() {
+ let repr = &self.subtree[i];
+ let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
+ let s = tt::Subtree {
+ delimiter: repr.kind.map(|kind| tt::Delimiter { id: repr.id, kind }),
+ token_trees: token_trees
+ .iter()
+ .copied()
+ .map(|idx_tag| {
+ let tag = idx_tag & 0b11;
+ let idx = (idx_tag >> 2) as usize;
+ match tag {
+ // XXX: we iterate subtrees in reverse to guarantee
+ // that this unwrap doesn't fire.
+ 0b00 => res[idx].take().unwrap().into(),
+ 0b01 => {
+ let repr = &self.literal[idx];
+ tt::Leaf::Literal(tt::Literal {
+ text: self.text[repr.text as usize].as_str().into(),
+ id: repr.id,
+ })
+ .into()
+ }
+ 0b10 => {
+ let repr = &self.punct[idx];
+ tt::Leaf::Punct(tt::Punct {
+ char: repr.char,
+ spacing: repr.spacing,
+ id: repr.id,
+ })
+ .into()
+ }
+ 0b11 => {
+ let repr = &self.ident[idx];
+ tt::Leaf::Ident(tt::Ident {
+ text: self.text[repr.text as usize].as_str().into(),
+ id: repr.id,
+ })
+ .into()
+ }
+ other => panic!("bad tag: {}", other),
+ }
+ })
+ .collect(),
+ };
+ res[i] = Some(s);
+ }
+
+ res[0].take().unwrap()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
new file mode 100644
index 000000000..c4018d3b3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -0,0 +1,107 @@
+//! Handle process life-time and message passing for proc-macro client
+
+use std::{
+ ffi::{OsStr, OsString},
+ io::{self, BufRead, BufReader, Write},
+ process::{Child, ChildStdin, ChildStdout, Command, Stdio},
+};
+
+use paths::{AbsPath, AbsPathBuf};
+use stdx::JodChild;
+
+use crate::{
+ msg::{Message, Request, Response},
+ ProcMacroKind, ServerError,
+};
+
+#[derive(Debug)]
+pub(crate) struct ProcMacroProcessSrv {
+ _process: Process,
+ stdin: ChildStdin,
+ stdout: BufReader<ChildStdout>,
+}
+
+impl ProcMacroProcessSrv {
+ pub(crate) fn run(
+ process_path: AbsPathBuf,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+ ) -> io::Result<ProcMacroProcessSrv> {
+ let mut process = Process::run(process_path, args)?;
+ let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
+
+ let srv = ProcMacroProcessSrv { _process: process, stdin, stdout };
+
+ Ok(srv)
+ }
+
+ pub(crate) fn find_proc_macros(
+ &mut self,
+ dylib_path: &AbsPath,
+ ) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> {
+ let request = Request::ListMacros { dylib_path: dylib_path.to_path_buf().into() };
+
+ let response = self.send_task(request)?;
+
+ match response {
+ Response::ListMacros(it) => Ok(it),
+ Response::ExpandMacro { .. } => {
+ Err(ServerError { message: "unexpected response".to_string(), io: None })
+ }
+ }
+ }
+
+ pub(crate) fn send_task(&mut self, req: Request) -> Result<Response, ServerError> {
+ let mut buf = String::new();
+ send_request(&mut self.stdin, &mut self.stdout, req, &mut buf)
+ }
+}
+
+#[derive(Debug)]
+struct Process {
+ child: JodChild,
+}
+
+impl Process {
+ fn run(
+ path: AbsPathBuf,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+ ) -> io::Result<Process> {
+ let args: Vec<OsString> = args.into_iter().map(|s| s.as_ref().into()).collect();
+ let child = JodChild(mk_child(&path, &args)?);
+ Ok(Process { child })
+ }
+
+ fn stdio(&mut self) -> Option<(ChildStdin, BufReader<ChildStdout>)> {
+ let stdin = self.child.stdin.take()?;
+ let stdout = self.child.stdout.take()?;
+ let read = BufReader::new(stdout);
+
+ Some((stdin, read))
+ }
+}
+
+fn mk_child(
+ path: &AbsPath,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+) -> io::Result<Child> {
+ Command::new(path.as_os_str())
+ .args(args)
+ .env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable")
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::inherit())
+ .spawn()
+}
+
+fn send_request(
+ mut writer: &mut impl Write,
+ mut reader: &mut impl BufRead,
+ req: Request,
+ buf: &mut String,
+) -> Result<Response, ServerError> {
+ req.write(&mut writer)
+ .map_err(|err| ServerError { message: "failed to write request".into(), io: Some(err) })?;
+ let res = Response::read(&mut reader, buf)
+ .map_err(|err| ServerError { message: "failed to read response".into(), io: Some(err) })?;
+ res.ok_or_else(|| ServerError { message: "server exited".into(), io: None })
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
new file mode 100644
index 000000000..030531b80
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
@@ -0,0 +1,151 @@
+//! Reading proc-macro rustc version information from binary data
+
+use std::{
+ fs::File,
+ io::{self, Read},
+};
+
+use memmap2::Mmap;
+use object::read::{File as BinaryFile, Object, ObjectSection};
+use paths::AbsPath;
+use snap::read::FrameDecoder as SnapDecoder;
+
+#[derive(Debug)]
+pub struct RustCInfo {
+ pub version: (usize, usize, usize),
+ pub channel: String,
+ pub commit: Option<String>,
+ pub date: Option<String>,
+ // something like "rustc 1.58.1 (db9d1b20b 2022-01-20)"
+ pub version_string: String,
+}
+
+/// Read rustc dylib information
+pub fn read_dylib_info(dylib_path: &AbsPath) -> io::Result<RustCInfo> {
+ macro_rules! err {
+ ($e:literal) => {
+ io::Error::new(io::ErrorKind::InvalidData, $e)
+ };
+ }
+
+ let ver_str = read_version(dylib_path)?;
+ let mut items = ver_str.split_whitespace();
+ let tag = items.next().ok_or_else(|| err!("version format error"))?;
+ if tag != "rustc" {
+ return Err(err!("version format error (No rustc tag)"));
+ }
+
+ let version_part = items.next().ok_or_else(|| err!("no version string"))?;
+ let mut version_parts = version_part.split('-');
+ let version = version_parts.next().ok_or_else(|| err!("no version"))?;
+ let channel = version_parts.next().unwrap_or_default().to_string();
+
+ let commit = match items.next() {
+ Some(commit) => {
+ match commit.len() {
+ 0 => None,
+ _ => Some(commit[1..].to_string() /* remove ( */),
+ }
+ }
+ None => None,
+ };
+ let date = match items.next() {
+ Some(date) => {
+ match date.len() {
+ 0 => None,
+ _ => Some(date[0..date.len() - 2].to_string() /* remove ) */),
+ }
+ }
+ None => None,
+ };
+
+ let version_numbers = version
+ .split('.')
+ .map(|it| it.parse::<usize>())
+ .collect::<Result<Vec<_>, _>>()
+ .map_err(|_| err!("version number error"))?;
+
+ if version_numbers.len() != 3 {
+ return Err(err!("version number format error"));
+ }
+ let version = (version_numbers[0], version_numbers[1], version_numbers[2]);
+
+ Ok(RustCInfo { version, channel, commit, date, version_string: ver_str })
+}
+
+/// This is used inside read_version() to locate the ".rustc" section
+/// from a proc macro crate's binary file.
+fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'a [u8]> {
+ BinaryFile::parse(dylib_binary)
+ .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?
+ .section_by_name(section_name)
+ .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "section read error"))?
+ .data()
+ .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
+}
+
+/// Check the version of rustc that was used to compile a proc macro crate's
+///
+/// binary file.
+/// A proc macro crate binary's ".rustc" section has following byte layout:
+/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes
+/// * ff060000 734e6150 is followed, it's the snappy format magic bytes,
+/// means bytes from here(including this sequence) are compressed in
+/// snappy compression format. Version info is inside here, so decompress
+/// this.
+/// The bytes you get after decompressing the snappy format portion has
+/// following layout:
+/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes(again)
+/// * [crate root bytes] next 4 bytes is to store crate root position,
+/// according to rustc's source code comment
+/// * [length byte] next 1 byte tells us how many bytes we should read next
+/// for the version string's utf8 bytes
+/// * [version string bytes encoded in utf8] <- GET THIS BOI
+/// * [some more bytes that we don't really care but about still there] :-)
+/// Check this issue for more about the bytes layout:
+/// <https://github.com/rust-lang/rust-analyzer/issues/6174>
+pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
+ let dylib_file = File::open(dylib_path)?;
+ let dylib_mmaped = unsafe { Mmap::map(&dylib_file) }?;
+
+ let dot_rustc = read_section(&dylib_mmaped, ".rustc")?;
+
+ // check if magic is valid
+ if &dot_rustc[0..4] != b"rust" {
+ return Err(io::Error::new(
+ io::ErrorKind::InvalidData,
+ format!("unknown metadata magic, expected `rust`, found `{:?}`", &dot_rustc[0..4]),
+ ));
+ }
+ let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]);
+ // Last supported version is:
+ // https://github.com/rust-lang/rust/commit/0696e79f2740ad89309269b460579e548a5cd632
+ match version {
+ 5 | 6 => {}
+ _ => {
+ return Err(io::Error::new(
+ io::ErrorKind::InvalidData,
+ format!("unsupported metadata version {}", version),
+ ));
+ }
+ }
+
+ let snappy_portion = &dot_rustc[8..];
+
+ let mut snappy_decoder = SnapDecoder::new(snappy_portion);
+
+ // the bytes before version string bytes, so this basically is:
+ // 8 bytes for [b'r',b'u',b's',b't',0,0,0,5]
+ // 4 bytes for [crate root bytes]
+ // 1 byte for length of version string
+ // so 13 bytes in total, and we should check the 13th byte
+ // to know the length
+ let mut bytes_before_version = [0u8; 13];
+ snappy_decoder.read_exact(&mut bytes_before_version)?;
+ let length = bytes_before_version[12];
+
+ let mut version_string_utf8 = vec![0u8; length as usize];
+ snappy_decoder.read_exact(&mut version_string_utf8)?;
+ let version_string = String::from_utf8(version_string_utf8);
+ version_string.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml
new file mode 100644
index 000000000..9d0da5dee
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "proc-macro-srv-cli"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[dependencies]
+proc-macro-srv = { version = "0.0.0", path = "../proc-macro-srv" }
+
+[features]
+sysroot-abi = ["proc-macro-srv/sysroot-abi"]
+
+[[bin]]
+name = "rust-analyzer-proc-macro-srv"
+path = "src/main.rs"
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
new file mode 100644
index 000000000..ac9fa9f5a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
@@ -0,0 +1,19 @@
+//! A standalone binary for `proc-macro-srv`.
+
+use proc_macro_srv::cli;
+
+fn main() -> std::io::Result<()> {
+ let v = std::env::var("RUST_ANALYZER_INTERNALS_DO_NOT_USE");
+ match v.as_deref() {
+ Ok("this is unstable") => {
+ // very well, if you must
+ }
+ _ => {
+ eprintln!("If you're rust-analyzer, you can use this tool by exporting RUST_ANALYZER_INTERNALS_DO_NOT_USE='this is unstable'.");
+ eprintln!("If not, you probably shouldn't use this tool. But do what you want: I'm an error message, not a cop.");
+ std::process::exit(122);
+ }
+ }
+
+ cli::run()
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
new file mode 100644
index 000000000..5746eac0b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
@@ -0,0 +1,36 @@
+[package]
+name = "proc-macro-srv"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+object = { version = "0.29.0", default-features = false, features = [
+ "std",
+ "read_core",
+ "elf",
+ "macho",
+ "pe",
+] }
+libloading = "0.7.3"
+memmap2 = "0.5.4"
+
+tt = { path = "../tt", version = "0.0.0" }
+mbe = { path = "../mbe", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
+proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
+crossbeam = "0.8.1"
+
+[dev-dependencies]
+expect-test = "1.4.0"
+
+# used as proc macro test targets
+proc-macro-test = { path = "../proc-macro-test" }
+
+[features]
+sysroot-abi = []
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs
new file mode 100644
index 000000000..a8c732f31
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs
@@ -0,0 +1,25 @@
+//! Determine rustc version `proc-macro-srv` (and thus the sysroot ABI) is
+//! build with and make it accessible at runtime for ABI selection.
+
+use std::{env, fs::File, io::Write, path::PathBuf, process::Command};
+
+fn main() {
+ let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+ path.push("rustc_version.rs");
+ let mut f = File::create(&path).unwrap();
+
+ let rustc = env::var("RUSTC").expect("proc-macro-srv's build script expects RUSTC to be set");
+ let output = Command::new(rustc).arg("--version").output().expect("rustc --version must run");
+ let version_string = std::str::from_utf8(&output.stdout[..])
+ .expect("rustc --version output must be UTF-8")
+ .trim();
+
+ write!(
+ f,
+ "
+ #[allow(dead_code)]
+ pub(crate) const RUSTC_VERSION_STRING: &str = {version_string:?};
+ "
+ )
+ .unwrap();
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs
new file mode 100644
index 000000000..1c91ac0fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs
@@ -0,0 +1,104 @@
+//! Macro ABI for version 1.58 of rustc
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub(crate) struct Abi {
+ exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+ fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+ Self { message: p.as_str().map(|s| s.to_string()) }
+ }
+}
+
+impl Abi {
+ pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+ let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+ lib.get(symbol_name.as_bytes())?;
+ Ok(Self { exported_macros: macros.to_vec() })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ let parsed_body = ra_server::TokenStream::with_subtree(macro_body.clone());
+
+ let parsed_attributes = attributes.map_or(ra_server::TokenStream::new(), |attr| {
+ ra_server::TokenStream::with_subtree(attr.clone())
+ });
+
+ for proc_macro in &self.exported_macros {
+ match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive {
+ trait_name, client, ..
+ } if *trait_name == macro_name => {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_attributes,
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.exported_macros
+ .iter()
+ .map(|proc_macro| match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ (trait_name.to_string(), ProcMacroKind::CustomDerive)
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ (name.to_string(), ProcMacroKind::FuncLike)
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ (name.to_string(), ProcMacroKind::Attr)
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs
new file mode 100644
index 000000000..d82669d3e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs
@@ -0,0 +1,143 @@
+//! Buffer management for same-process client<->server communication.
+
+use std::io::{self, Write};
+use std::mem;
+use std::ops::{Deref, DerefMut};
+use std::slice;
+
+#[repr(C)]
+pub struct Buffer<T: Copy> {
+ data: *mut T,
+ len: usize,
+ capacity: usize,
+ reserve: extern "C" fn(Buffer<T>, usize) -> Buffer<T>,
+ drop: extern "C" fn(Buffer<T>),
+}
+
+unsafe impl<T: Copy + Sync> Sync for Buffer<T> {}
+unsafe impl<T: Copy + Send> Send for Buffer<T> {}
+
+impl<T: Copy> Default for Buffer<T> {
+ fn default() -> Self {
+ Self::from(vec![])
+ }
+}
+
+impl<T: Copy> Deref for Buffer<T> {
+ type Target = [T];
+ fn deref(&self) -> &[T] {
+ unsafe { slice::from_raw_parts(self.data as *const T, self.len) }
+ }
+}
+
+impl<T: Copy> DerefMut for Buffer<T> {
+ fn deref_mut(&mut self) -> &mut [T] {
+ unsafe { slice::from_raw_parts_mut(self.data, self.len) }
+ }
+}
+
+impl<T: Copy> Buffer<T> {
+ pub(super) fn new() -> Self {
+ Self::default()
+ }
+
+ pub(super) fn clear(&mut self) {
+ self.len = 0;
+ }
+
+ pub(super) fn take(&mut self) -> Self {
+ mem::take(self)
+ }
+
+ // We have the array method separate from extending from a slice. This is
+ // because in the case of small arrays, codegen can be more efficient
+ // (avoiding a memmove call). With extend_from_slice, LLVM at least
+ // currently is not able to make that optimization.
+ pub(super) fn extend_from_array<const N: usize>(&mut self, xs: &[T; N]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ pub(super) fn extend_from_slice(&mut self, xs: &[T]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ pub(super) fn push(&mut self, v: T) {
+ // The code here is taken from Vec::push, and we know that reserve()
+ // will panic if we're exceeding isize::MAX bytes and so there's no need
+ // to check for overflow.
+ if self.len == self.capacity {
+ let b = self.take();
+ *self = (b.reserve)(b, 1);
+ }
+ unsafe {
+ *self.data.add(self.len) = v;
+ self.len += 1;
+ }
+ }
+}
+
+impl Write for Buffer<u8> {
+ fn write(&mut self, xs: &[u8]) -> io::Result<usize> {
+ self.extend_from_slice(xs);
+ Ok(xs.len())
+ }
+
+ fn write_all(&mut self, xs: &[u8]) -> io::Result<()> {
+ self.extend_from_slice(xs);
+ Ok(())
+ }
+
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+impl<T: Copy> Drop for Buffer<T> {
+ fn drop(&mut self) {
+ let b = self.take();
+ (b.drop)(b);
+ }
+}
+
+impl<T: Copy> From<Vec<T>> for Buffer<T> {
+ fn from(mut v: Vec<T>) -> Self {
+ let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity());
+ mem::forget(v);
+
+ // This utility function is nested in here because it can *only*
+ // be safely called on `Buffer`s created by *this* `proc_macro`.
+ fn to_vec<T: Copy>(b: Buffer<T>) -> Vec<T> {
+ unsafe {
+ let Buffer { data, len, capacity, .. } = b;
+ mem::forget(b);
+ Vec::from_raw_parts(data, len, capacity)
+ }
+ }
+
+ extern "C" fn reserve<T: Copy>(b: Buffer<T>, additional: usize) -> Buffer<T> {
+ let mut v = to_vec(b);
+ v.reserve(additional);
+ Buffer::from(v)
+ }
+
+ extern "C" fn drop<T: Copy>(b: Buffer<T>) {
+ mem::drop(to_vec(b));
+ }
+
+ Buffer { data, len, capacity, reserve, drop }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs
new file mode 100644
index 000000000..ed0e91da3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs
@@ -0,0 +1,485 @@
+//! Client-side types.
+
+use super::*;
+
+macro_rules! define_handles {
+ (
+ 'owned: $($oty:ident,)*
+ 'interned: $($ity:ident,)*
+ ) => {
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub struct HandleCounters {
+ $($oty: AtomicUsize,)*
+ $($ity: AtomicUsize,)*
+ }
+
+ impl HandleCounters {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ extern "C" fn get() -> &'static Self {
+ static COUNTERS: HandleCounters = HandleCounters {
+ $($oty: AtomicUsize::new(1),)*
+ $($ity: AtomicUsize::new(1),)*
+ };
+ &COUNTERS
+ }
+ }
+
+ // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub(super) struct HandleStore<S: server::Types> {
+ $($oty: handle::OwnedStore<S::$oty>,)*
+ $($ity: handle::InternedStore<S::$ity>,)*
+ }
+
+ impl<S: server::Types> HandleStore<S> {
+ pub(super) fn new(handle_counters: &'static HandleCounters) -> Self {
+ HandleStore {
+ $($oty: handle::OwnedStore::new(&handle_counters.$oty),)*
+ $($ity: handle::InternedStore::new(&handle_counters.$ity),)*
+ }
+ }
+ }
+
+ $(
+ #[repr(C)]
+ pub(crate) struct $oty(handle::Handle);
+
+ // Forward `Drop::drop` to the inherent `drop` method.
+ impl Drop for $oty {
+ fn drop(&mut self) {
+ $oty(self.0).drop();
+ }
+ }
+
+ impl<S> Encode<S> for $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ let handle = self.0;
+ mem::forget(self);
+ handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$oty.take(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S> Encode<S> for &$oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> Decode<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &'s HandleStore<server::MarkedTypes<S>>) -> Self {
+ &s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S> Encode<S> for &mut $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s mut Marked<S::$oty, $oty>
+ {
+ fn decode(
+ r: &mut Reader<'_>,
+ s: &'s mut HandleStore<server::MarkedTypes<S>>
+ ) -> Self {
+ &mut s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$oty.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $oty {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $oty(handle::Handle::decode(r, s))
+ }
+ }
+ )*
+
+ $(
+ #[repr(C)]
+ #[derive(Copy, Clone, PartialEq, Eq, Hash)]
+ pub(crate) struct $ity(handle::Handle);
+
+ impl<S> Encode<S> for $ity {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$ity.copy(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$ity.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ity {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $ity(handle::Handle::decode(r, s))
+ }
+ }
+ )*
+ }
+}
+define_handles! {
+ 'owned:
+ FreeFunctions,
+ TokenStream,
+ TokenStreamBuilder,
+ TokenStreamIter,
+ Group,
+ Literal,
+ SourceFile,
+ MultiSpan,
+ Diagnostic,
+
+ 'interned:
+ Punct,
+ Ident,
+ Span,
+}
+
+// FIXME(eddyb) generate these impls by pattern-matching on the
+// names of methods - also could use the presence of `fn drop`
+// to distinguish between 'owned and 'interned, above.
+// Alternatively, special 'modes" could be listed of types in with_api
+// instead of pattern matching on methods, here and in server decl.
+
+impl Clone for TokenStream {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for TokenStreamIter {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Group {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Literal {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Literal")
+ // format the kind without quotes, as in `kind: Float`
+ .field("kind", &format_args!("{}", &self.debug_kind()))
+ .field("symbol", &self.symbol())
+ // format `Some("...")` on one line even in {:#?} mode
+ .field("suffix", &format_args!("{:?}", &self.suffix()))
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl Clone for SourceFile {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.debug())
+ }
+}
+
+macro_rules! define_client_side {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(impl $name {
+ $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* {
+ Bridge::with(|bridge| {
+ let mut b = bridge.cached_buffer.take();
+
+ b.clear();
+ api_tags::Method::$name(api_tags::$name::$method).encode(&mut b, &mut ());
+ reverse_encode!(b; $($arg),*);
+
+ b = bridge.dispatch.call(b);
+
+ let r = Result::<_, PanicMessage>::decode(&mut &b[..], &mut ());
+
+ bridge.cached_buffer = b;
+
+ r.unwrap_or_else(|e| panic::resume_unwind(e.into()))
+ })
+ })*
+ })*
+ }
+}
+with_api!(self, self, define_client_side);
+
+enum BridgeState<'a> {
+ /// No server is currently connected to this client.
+ NotConnected,
+
+ /// A server is connected and available for requests.
+ Connected(Bridge<'a>),
+
+ /// Access to the bridge is being exclusively acquired
+ /// (e.g., during `BridgeState::with`).
+ InUse,
+}
+
+enum BridgeStateL {}
+
+impl<'a> scoped_cell::ApplyL<'a> for BridgeStateL {
+ type Out = BridgeState<'a>;
+}
+
+thread_local! {
+ static BRIDGE_STATE: scoped_cell::ScopedCell<BridgeStateL> =
+ scoped_cell::ScopedCell::new(BridgeState::NotConnected);
+}
+
+impl BridgeState<'_> {
+ /// Take exclusive control of the thread-local
+ /// `BridgeState`, and pass it to `f`, mutably.
+ /// The state will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ ///
+ /// N.B., while `f` is running, the thread-local state
+ /// is `BridgeState::InUse`.
+ fn with<R>(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R {
+ BRIDGE_STATE.with(|state| {
+ state.replace(BridgeState::InUse, |mut state| {
+ // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone
+ f(&mut *state)
+ })
+ })
+ }
+}
+
+impl Bridge<'_> {
+ pub(crate) fn is_available() -> bool {
+ BridgeState::with(|state| match state {
+ BridgeState::Connected(_) | BridgeState::InUse => true,
+ BridgeState::NotConnected => false,
+ })
+ }
+
+ fn enter<R>(self, f: impl FnOnce() -> R) -> R {
+ let force_show_panics = self.force_show_panics;
+ // Hide the default panic output within `proc_macro` expansions.
+ // NB. the server can't do this because it may use a different libstd.
+ static HIDE_PANICS_DURING_EXPANSION: Once = Once::new();
+ HIDE_PANICS_DURING_EXPANSION.call_once(|| {
+ let prev = panic::take_hook();
+ panic::set_hook(Box::new(move |info| {
+ let show = BridgeState::with(|state| match state {
+ BridgeState::NotConnected => true,
+ BridgeState::Connected(_) | BridgeState::InUse => force_show_panics,
+ });
+ if show {
+ prev(info)
+ }
+ }));
+ });
+
+ BRIDGE_STATE.with(|state| state.set(BridgeState::Connected(self), f))
+ }
+
+ fn with<R>(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R {
+ BridgeState::with(|state| match state {
+ BridgeState::NotConnected => {
+ panic!("procedural macro API is used outside of a procedural macro");
+ }
+ BridgeState::InUse => {
+ panic!("procedural macro API is used while it's already in use");
+ }
+ BridgeState::Connected(bridge) => f(bridge),
+ })
+ }
+}
+
+/// A client-side "global object" (usually a function pointer),
+/// which may be using a different `proc_macro` from the one
+/// used by the server, but can be interacted with compatibly.
+///
+/// N.B., `F` must have FFI-friendly memory layout (e.g., a pointer).
+/// The call ABI of function pointers used for `F` doesn't
+/// need to match between server and client, since it's only
+/// passed between them and (eventually) called by the client.
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub struct Client<F> {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters,
+ pub(super) run: extern "C" fn(Bridge<'_>, F) -> Buffer<u8>,
+ pub(super) f: F,
+}
+
+/// Client-side helper for handling client panics, entering the bridge,
+/// deserializing input and serializing output.
+// FIXME(eddyb) maybe replace `Bridge::enter` with this?
+fn run_client<A: for<'a, 's> DecodeMut<'a, 's, ()>, R: Encode<()>>(
+ mut bridge: Bridge<'_>,
+ f: impl FnOnce(A) -> R,
+) -> Buffer<u8> {
+ // The initial `cached_buffer` contains the input.
+ let mut b = bridge.cached_buffer.take();
+
+ panic::catch_unwind(panic::AssertUnwindSafe(|| {
+ bridge.enter(|| {
+ let reader = &mut &b[..];
+ let input = A::decode(reader, &mut ());
+
+ // Put the `cached_buffer` back in the `Bridge`, for requests.
+ Bridge::with(|bridge| bridge.cached_buffer = b.take());
+
+ let output = f(input);
+
+ // Take the `cached_buffer` back out, for the output value.
+ b = Bridge::with(|bridge| bridge.cached_buffer.take());
+
+ // HACK(eddyb) Separate encoding a success value (`Ok(output)`)
+ // from encoding a panic (`Err(e: PanicMessage)`) to avoid
+ // having handles outside the `bridge.enter(|| ...)` scope, and
+ // to catch panics that could happen while encoding the success.
+ //
+ // Note that panics should be impossible beyond this point, but
+ // this is defensively trying to avoid any accidental panicking
+ // reaching the `extern "C"` (which should `abort` but might not
+ // at the moment, so this is also potentially preventing UB).
+ b.clear();
+ Ok::<_, ()>(output).encode(&mut b, &mut ());
+ })
+ }))
+ .map_err(PanicMessage::from)
+ .unwrap_or_else(|e| {
+ b.clear();
+ Err::<(), _>(e).encode(&mut b, &mut ());
+ });
+ b
+}
+
+impl Client<fn(super::super::TokenStream) -> super::super::TokenStream> {
+ pub fn expand1(f: fn(super::super::TokenStream) -> super::super::TokenStream) -> Self {
+ extern "C" fn run(
+ bridge: Bridge<'_>,
+ f: impl FnOnce(super::super::TokenStream) -> super::super::TokenStream,
+ ) -> Buffer<u8> {
+ run_client(bridge, |input| f(super::super::TokenStream(input)).0)
+ }
+ Client { get_handle_counters: HandleCounters::get, run, f }
+ }
+}
+
+impl Client<fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream> {
+ pub fn expand2(
+ f: fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream,
+ ) -> Self {
+ extern "C" fn run(
+ bridge: Bridge<'_>,
+ f: impl FnOnce(
+ super::super::TokenStream,
+ super::super::TokenStream,
+ ) -> super::super::TokenStream,
+ ) -> Buffer<u8> {
+ run_client(bridge, |(input, input2)| {
+ f(super::super::TokenStream(input), super::super::TokenStream(input2)).0
+ })
+ }
+ Client { get_handle_counters: HandleCounters::get, run, f }
+ }
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub enum ProcMacro {
+ CustomDerive {
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ client: Client<fn(super::super::TokenStream) -> super::super::TokenStream>,
+ },
+
+ Attr {
+ name: &'static str,
+ client: Client<
+ fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream,
+ >,
+ },
+
+ Bang {
+ name: &'static str,
+ client: Client<fn(super::super::TokenStream) -> super::super::TokenStream>,
+ },
+}
+
+impl ProcMacro {
+ pub fn name(&self) -> &'static str {
+ match self {
+ ProcMacro::CustomDerive { trait_name, .. } => trait_name,
+ ProcMacro::Attr { name, .. } => name,
+ ProcMacro::Bang { name, .. } => name,
+ }
+ }
+
+ pub fn custom_derive(
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ expand: fn(super::super::TokenStream) -> super::super::TokenStream,
+ ) -> Self {
+ ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) }
+ }
+
+ pub fn attr(
+ name: &'static str,
+ expand: fn(
+ super::super::TokenStream,
+ super::super::TokenStream,
+ ) -> super::super::TokenStream,
+ ) -> Self {
+ ProcMacro::Attr { name, client: Client::expand2(expand) }
+ }
+
+ pub fn bang(
+ name: &'static str,
+ expand: fn(super::super::TokenStream) -> super::super::TokenStream,
+ ) -> Self {
+ ProcMacro::Bang { name, client: Client::expand1(expand) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs
new file mode 100644
index 000000000..5be71cc3d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs
@@ -0,0 +1,24 @@
+//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`.
+
+#[repr(C)]
+pub struct Closure<'a, A, R> {
+ call: unsafe extern "C" fn(&mut Env, A) -> R,
+ env: &'a mut Env,
+}
+
+struct Env;
+
+impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> {
+ fn from(f: &'a mut F) -> Self {
+ unsafe extern "C" fn call<A, R, F: FnMut(A) -> R>(env: &mut Env, arg: A) -> R {
+ (*(env as *mut _ as *mut F))(arg)
+ }
+ Closure { call: call::<A, R, F>, env: unsafe { &mut *(f as *mut _ as *mut Env) } }
+ }
+}
+
+impl<'a, A, R> Closure<'a, A, R> {
+ pub fn call(&mut self, arg: A) -> R {
+ unsafe { (self.call)(self.env, arg) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs
new file mode 100644
index 000000000..bcbb86812
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs
@@ -0,0 +1,70 @@
+//! Server-side handles and storage for per-handle data.
+
+use std::collections::{BTreeMap, HashMap};
+use std::hash::Hash;
+use std::num::NonZeroU32;
+use std::ops::{Index, IndexMut};
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+pub(super) type Handle = NonZeroU32;
+
+pub(super) struct OwnedStore<T: 'static> {
+ counter: &'static AtomicUsize,
+ data: BTreeMap<Handle, T>,
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ // Ensure the handle counter isn't 0, which would panic later,
+ // when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`.
+ assert_ne!(counter.load(Ordering::SeqCst), 0);
+
+ OwnedStore { counter, data: BTreeMap::new() }
+ }
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let counter = self.counter.fetch_add(1, Ordering::SeqCst);
+ let handle = Handle::new(counter as u32).expect("`proc_macro` handle counter overflowed");
+ assert!(self.data.insert(handle, x).is_none());
+ handle
+ }
+
+ pub(super) fn take(&mut self, h: Handle) -> T {
+ self.data.remove(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> Index<Handle> for OwnedStore<T> {
+ type Output = T;
+ fn index(&self, h: Handle) -> &T {
+ self.data.get(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> IndexMut<Handle> for OwnedStore<T> {
+ fn index_mut(&mut self, h: Handle) -> &mut T {
+ self.data.get_mut(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+pub(super) struct InternedStore<T: 'static> {
+ owned: OwnedStore<T>,
+ interner: HashMap<T, Handle>,
+}
+
+impl<T: Copy + Eq + Hash> InternedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ InternedStore { owned: OwnedStore::new(counter), interner: HashMap::new() }
+ }
+
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let owned = &mut self.owned;
+ *self.interner.entry(x).or_insert_with(|| owned.alloc(x))
+ }
+
+ pub(super) fn copy(&mut self, h: Handle) -> T {
+ self.owned[h]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs
new file mode 100644
index 000000000..b7968c529
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs
@@ -0,0 +1,429 @@
+//! Internal interface for communicating between a `proc_macro` client
+//! (a proc macro crate) and a `proc_macro` server (a compiler front-end).
+//!
+//! Serialization (with C ABI buffers) and unique integer handles are employed
+//! to allow safely interfacing between two copies of `proc_macro` built
+//! (from the same source) by different compilers with potentially mismatching
+//! Rust ABIs (e.g., stage0/bin/rustc vs stage1/bin/rustc during bootstrap).
+
+#![deny(unsafe_code)]
+
+pub use super::{Delimiter, Level, LineColumn, Spacing};
+use std::fmt;
+use std::hash::Hash;
+use std::marker;
+use std::mem;
+use std::ops::Bound;
+use std::panic;
+use std::sync::atomic::AtomicUsize;
+use std::sync::Once;
+use std::thread;
+
+/// Higher-order macro describing the server RPC API, allowing automatic
+/// generation of type-safe Rust APIs, both client-side and server-side.
+///
+/// `with_api!(MySelf, my_self, my_macro)` expands to:
+/// ```rust,ignore (pseudo-code)
+/// my_macro! {
+/// // ...
+/// Literal {
+/// // ...
+/// fn character(ch: char) -> MySelf::Literal;
+/// // ...
+/// fn span(my_self: &MySelf::Literal) -> MySelf::Span;
+/// fn set_span(my_self: &mut MySelf::Literal, span: MySelf::Span);
+/// },
+/// // ...
+/// }
+/// ```
+///
+/// The first two arguments serve to customize the arguments names
+/// and argument/return types, to enable several different usecases:
+///
+/// If `my_self` is just `self`, then each `fn` signature can be used
+/// as-is for a method. If it's anything else (`self_` in practice),
+/// then the signatures don't have a special `self` argument, and
+/// can, therefore, have a different one introduced.
+///
+/// If `MySelf` is just `Self`, then the types are only valid inside
+/// a trait or a trait impl, where the trait has associated types
+/// for each of the API types. If non-associated types are desired,
+/// a module name (`self` in practice) can be used instead of `Self`.
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ FreeFunctions {
+ fn drop($self: $S::FreeFunctions);
+ fn track_env_var(var: &str, value: Option<&str>);
+ fn track_path(path: &str);
+ },
+ TokenStream {
+ fn drop($self: $S::TokenStream);
+ fn clone($self: &$S::TokenStream) -> $S::TokenStream;
+ fn new() -> $S::TokenStream;
+ fn is_empty($self: &$S::TokenStream) -> bool;
+ fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
+ fn from_str(src: &str) -> $S::TokenStream;
+ fn to_string($self: &$S::TokenStream) -> String;
+ fn from_token_tree(
+ tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>,
+ ) -> $S::TokenStream;
+ fn into_iter($self: $S::TokenStream) -> $S::TokenStreamIter;
+ },
+ TokenStreamBuilder {
+ fn drop($self: $S::TokenStreamBuilder);
+ fn new() -> $S::TokenStreamBuilder;
+ fn push($self: &mut $S::TokenStreamBuilder, stream: $S::TokenStream);
+ fn build($self: $S::TokenStreamBuilder) -> $S::TokenStream;
+ },
+ TokenStreamIter {
+ fn drop($self: $S::TokenStreamIter);
+ fn clone($self: &$S::TokenStreamIter) -> $S::TokenStreamIter;
+ fn next(
+ $self: &mut $S::TokenStreamIter,
+ ) -> Option<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
+ },
+ Group {
+ fn drop($self: $S::Group);
+ fn clone($self: &$S::Group) -> $S::Group;
+ fn new(delimiter: Delimiter, stream: $S::TokenStream) -> $S::Group;
+ fn delimiter($self: &$S::Group) -> Delimiter;
+ fn stream($self: &$S::Group) -> $S::TokenStream;
+ fn span($self: &$S::Group) -> $S::Span;
+ fn span_open($self: &$S::Group) -> $S::Span;
+ fn span_close($self: &$S::Group) -> $S::Span;
+ fn set_span($self: &mut $S::Group, span: $S::Span);
+ },
+ Punct {
+ fn new(ch: char, spacing: Spacing) -> $S::Punct;
+ fn as_char($self: $S::Punct) -> char;
+ fn spacing($self: $S::Punct) -> Spacing;
+ fn span($self: $S::Punct) -> $S::Span;
+ fn with_span($self: $S::Punct, span: $S::Span) -> $S::Punct;
+ },
+ Ident {
+ fn new(string: &str, span: $S::Span, is_raw: bool) -> $S::Ident;
+ fn span($self: $S::Ident) -> $S::Span;
+ fn with_span($self: $S::Ident, span: $S::Span) -> $S::Ident;
+ },
+ Literal {
+ fn drop($self: $S::Literal);
+ fn clone($self: &$S::Literal) -> $S::Literal;
+ fn from_str(s: &str) -> Result<$S::Literal, ()>;
+ fn to_string($self: &$S::Literal) -> String;
+ fn debug_kind($self: &$S::Literal) -> String;
+ fn symbol($self: &$S::Literal) -> String;
+ fn suffix($self: &$S::Literal) -> Option<String>;
+ fn integer(n: &str) -> $S::Literal;
+ fn typed_integer(n: &str, kind: &str) -> $S::Literal;
+ fn float(n: &str) -> $S::Literal;
+ fn f32(n: &str) -> $S::Literal;
+ fn f64(n: &str) -> $S::Literal;
+ fn string(string: &str) -> $S::Literal;
+ fn character(ch: char) -> $S::Literal;
+ fn byte_string(bytes: &[u8]) -> $S::Literal;
+ fn span($self: &$S::Literal) -> $S::Span;
+ fn set_span($self: &mut $S::Literal, span: $S::Span);
+ fn subspan(
+ $self: &$S::Literal,
+ start: Bound<usize>,
+ end: Bound<usize>,
+ ) -> Option<$S::Span>;
+ },
+ SourceFile {
+ fn drop($self: $S::SourceFile);
+ fn clone($self: &$S::SourceFile) -> $S::SourceFile;
+ fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool;
+ fn path($self: &$S::SourceFile) -> String;
+ fn is_real($self: &$S::SourceFile) -> bool;
+ },
+ MultiSpan {
+ fn drop($self: $S::MultiSpan);
+ fn new() -> $S::MultiSpan;
+ fn push($self: &mut $S::MultiSpan, span: $S::Span);
+ },
+ Diagnostic {
+ fn drop($self: $S::Diagnostic);
+ fn new(level: Level, msg: &str, span: $S::MultiSpan) -> $S::Diagnostic;
+ fn sub(
+ $self: &mut $S::Diagnostic,
+ level: Level,
+ msg: &str,
+ span: $S::MultiSpan,
+ );
+ fn emit($self: $S::Diagnostic);
+ },
+ Span {
+ fn debug($self: $S::Span) -> String;
+ fn def_site() -> $S::Span;
+ fn call_site() -> $S::Span;
+ fn mixed_site() -> $S::Span;
+ fn source_file($self: $S::Span) -> $S::SourceFile;
+ fn parent($self: $S::Span) -> Option<$S::Span>;
+ fn source($self: $S::Span) -> $S::Span;
+ fn start($self: $S::Span) -> LineColumn;
+ fn end($self: $S::Span) -> LineColumn;
+ fn before($self: $S::Span) -> $S::Span;
+ fn after($self: $S::Span) -> $S::Span;
+ fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
+ fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
+ fn source_text($self: $S::Span) -> Option<String>;
+ fn save_span($self: $S::Span) -> usize;
+ fn recover_proc_macro_span(id: usize) -> $S::Span;
+ },
+ }
+ };
+}
+
+// FIXME(eddyb) this calls `encode` for each argument, but in reverse,
+// to avoid borrow conflicts from borrows started by `&mut` arguments.
+macro_rules! reverse_encode {
+ ($writer:ident;) => {};
+ ($writer:ident; $first:ident $(, $rest:ident)*) => {
+ reverse_encode!($writer; $($rest),*);
+ $first.encode(&mut $writer, &mut ());
+ }
+}
+
+// FIXME(eddyb) this calls `decode` for each argument, but in reverse,
+// to avoid borrow conflicts from borrows started by `&mut` arguments.
+macro_rules! reverse_decode {
+ ($reader:ident, $s:ident;) => {};
+ ($reader:ident, $s:ident; $first:ident: $first_ty:ty $(, $rest:ident: $rest_ty:ty)*) => {
+ reverse_decode!($reader, $s; $($rest: $rest_ty),*);
+ let $first = <$first_ty>::decode(&mut $reader, $s);
+ }
+}
+
+#[allow(unsafe_code)]
+mod buffer;
+#[forbid(unsafe_code)]
+pub mod client;
+#[allow(unsafe_code)]
+mod closure;
+#[forbid(unsafe_code)]
+mod handle;
+#[macro_use]
+#[forbid(unsafe_code)]
+mod rpc;
+#[allow(unsafe_code)]
+mod scoped_cell;
+#[forbid(unsafe_code)]
+pub mod server;
+
+use buffer::Buffer;
+pub use rpc::PanicMessage;
+use rpc::{Decode, DecodeMut, Encode, Reader, Writer};
+
+/// An active connection between a server and a client.
+/// The server creates the bridge (`Bridge::run_server` in `server.rs`),
+/// then passes it to the client through the function pointer in the `run`
+/// field of `client::Client`. The client holds its copy of the `Bridge`
+/// in TLS during its execution (`Bridge::{enter, with}` in `client.rs`).
+#[repr(C)]
+pub struct Bridge<'a> {
+ /// Reusable buffer (only `clear`-ed, never shrunk), primarily
+ /// used for making requests, but also for passing input to client.
+ cached_buffer: Buffer<u8>,
+
+ /// Server-side function that the client uses to make requests.
+ dispatch: closure::Closure<'a, Buffer<u8>, Buffer<u8>>,
+
+ /// If 'true', always invoke the default panic hook
+ force_show_panics: bool,
+}
+
+#[forbid(unsafe_code)]
+#[allow(non_camel_case_types)]
+mod api_tags {
+ use super::rpc::{DecodeMut, Encode, Reader, Writer};
+
+ macro_rules! declare_tags {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(
+ pub(super) enum $name {
+ $($method),*
+ }
+ rpc_encode_decode!(enum $name { $($method),* });
+ )*
+
+
+ pub(super) enum Method {
+ $($name($name)),*
+ }
+ rpc_encode_decode!(enum Method { $($name(m)),* });
+ }
+ }
+ with_api!(self, self, declare_tags);
+}
+
+/// Helper to wrap associated types to allow trait impl dispatch.
+/// That is, normally a pair of impls for `T::Foo` and `T::Bar`
+/// can overlap, but if the impls are, instead, on types like
+/// `Marked<T::Foo, Foo>` and `Marked<T::Bar, Bar>`, they can't.
+trait Mark {
+ type Unmarked;
+ fn mark(unmarked: Self::Unmarked) -> Self;
+}
+
+/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details).
+trait Unmark {
+ type Unmarked;
+ fn unmark(self) -> Self::Unmarked;
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+struct Marked<T, M> {
+ value: T,
+ _marker: marker::PhantomData<M>,
+}
+
+impl<T, M> Mark for Marked<T, M> {
+ type Unmarked = T;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ Marked { value: unmarked, _marker: marker::PhantomData }
+ }
+}
+impl<T, M> Unmark for Marked<T, M> {
+ type Unmarked = T;
+ fn unmark(self) -> Self::Unmarked {
+ self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a Marked<T, M> {
+ type Unmarked = &'a T;
+ fn unmark(self) -> Self::Unmarked {
+ &self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
+ type Unmarked = &'a mut T;
+ fn unmark(self) -> Self::Unmarked {
+ &mut self.value
+ }
+}
+
+impl<T: Mark> Mark for Option<T> {
+ type Unmarked = Option<T::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked.map(T::mark)
+ }
+}
+impl<T: Unmark> Unmark for Option<T> {
+ type Unmarked = Option<T::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ self.map(T::unmark)
+ }
+}
+
+impl<T: Mark, E: Mark> Mark for Result<T, E> {
+ type Unmarked = Result<T::Unmarked, E::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked.map(T::mark).map_err(E::mark)
+ }
+}
+impl<T: Unmark, E: Unmark> Unmark for Result<T, E> {
+ type Unmarked = Result<T::Unmarked, E::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ self.map(T::unmark).map_err(E::unmark)
+ }
+}
+
+macro_rules! mark_noop {
+ ($($ty:ty),* $(,)?) => {
+ $(
+ impl Mark for $ty {
+ type Unmarked = Self;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked
+ }
+ }
+ impl Unmark for $ty {
+ type Unmarked = Self;
+ fn unmark(self) -> Self::Unmarked {
+ self
+ }
+ }
+ )*
+ }
+}
+mark_noop! {
+ (),
+ bool,
+ char,
+ &'_ [u8],
+ &'_ str,
+ String,
+ usize,
+ Delimiter,
+ Level,
+ LineColumn,
+ Spacing,
+ Bound<usize>,
+}
+
+rpc_encode_decode!(
+ enum Delimiter {
+ Parenthesis,
+ Brace,
+ Bracket,
+ None,
+ }
+);
+rpc_encode_decode!(
+ enum Level {
+ Error,
+ Warning,
+ Note,
+ Help,
+ }
+);
+rpc_encode_decode!(struct LineColumn { line, column });
+rpc_encode_decode!(
+ enum Spacing {
+ Alone,
+ Joint,
+ }
+);
+
+#[derive(Clone)]
+pub enum TokenTree<G, P, I, L> {
+ Group(G),
+ Punct(P),
+ Ident(I),
+ Literal(L),
+}
+
+impl<G: Mark, P: Mark, I: Mark, L: Mark> Mark for TokenTree<G, P, I, L> {
+ type Unmarked = TokenTree<G::Unmarked, P::Unmarked, I::Unmarked, L::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ match unmarked {
+ TokenTree::Group(tt) => TokenTree::Group(G::mark(tt)),
+ TokenTree::Punct(tt) => TokenTree::Punct(P::mark(tt)),
+ TokenTree::Ident(tt) => TokenTree::Ident(I::mark(tt)),
+ TokenTree::Literal(tt) => TokenTree::Literal(L::mark(tt)),
+ }
+ }
+}
+impl<G: Unmark, P: Unmark, I: Unmark, L: Unmark> Unmark for TokenTree<G, P, I, L> {
+ type Unmarked = TokenTree<G::Unmarked, P::Unmarked, I::Unmarked, L::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ match self {
+ TokenTree::Group(tt) => TokenTree::Group(tt.unmark()),
+ TokenTree::Punct(tt) => TokenTree::Punct(tt.unmark()),
+ TokenTree::Ident(tt) => TokenTree::Ident(tt.unmark()),
+ TokenTree::Literal(tt) => TokenTree::Literal(tt.unmark()),
+ }
+ }
+}
+
+rpc_encode_decode!(
+ enum TokenTree<G, P, I, L> {
+ Group(tt),
+ Punct(tt),
+ Ident(tt),
+ Literal(tt),
+ }
+);
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs
new file mode 100644
index 000000000..d50564d01
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs
@@ -0,0 +1,305 @@
+//! Serialization for client-server communication.
+
+use std::any::Any;
+use std::char;
+use std::io::Write;
+use std::num::NonZeroU32;
+use std::ops::Bound;
+use std::str;
+
+pub(super) type Writer = super::buffer::Buffer<u8>;
+
+pub(super) trait Encode<S>: Sized {
+ fn encode(self, w: &mut Writer, s: &mut S);
+}
+
+pub(super) type Reader<'a> = &'a [u8];
+
+pub(super) trait Decode<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s S) -> Self;
+}
+
+pub(super) trait DecodeMut<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self;
+}
+
+macro_rules! rpc_encode_decode {
+ (le $ty:ty) => {
+ impl<S> Encode<S> for $ty {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ty {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ const N: usize = ::std::mem::size_of::<$ty>();
+
+ let mut bytes = [0; N];
+ bytes.copy_from_slice(&r[..N]);
+ *r = &r[N..];
+
+ Self::from_le_bytes(bytes)
+ }
+ }
+ };
+ (struct $name:ident { $($field:ident),* $(,)? }) => {
+ impl<S> Encode<S> for $name {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ $(self.$field.encode(w, s);)*
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $name {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $name {
+ $($field: DecodeMut::decode(r, s)),*
+ }
+ }
+ }
+ };
+ (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match self {
+ $($name::$variant $(($field))* => {
+ tag::$variant.encode(w, s);
+ $($field.encode(w, s);)*
+ })*
+ }
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match u8::decode(r, s) {
+ $(tag::$variant => {
+ $(let $field = DecodeMut::decode(r, s);)*
+ $name::$variant $(($field))*
+ })*
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+}
+
+impl<S> Encode<S> for () {
+ fn encode(self, _: &mut Writer, _: &mut S) {}
+}
+
+impl<S> DecodeMut<'_, '_, S> for () {
+ fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {}
+}
+
+impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.push(self);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for u8 {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ let x = r[0];
+ *r = &r[1..];
+ x
+ }
+}
+
+rpc_encode_decode!(le u32);
+rpc_encode_decode!(le usize);
+
+impl<S> Encode<S> for bool {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u8).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for bool {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match u8::decode(r, s) {
+ 0 => false,
+ 1 => true,
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl<S> Encode<S> for char {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u32).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for char {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ char::from_u32(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for NonZeroU32 {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.get().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for NonZeroU32 {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ Self::new(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S, A: Encode<S>, B: Encode<S>> Encode<S> for (A, B) {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ self.1.encode(w, s);
+ }
+}
+
+impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S>
+ for (A, B)
+{
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ (DecodeMut::decode(r, s), DecodeMut::decode(r, s))
+ }
+}
+
+rpc_encode_decode!(
+ enum Bound<T> {
+ Included(x),
+ Excluded(x),
+ Unbounded,
+ }
+);
+
+rpc_encode_decode!(
+ enum Option<T> {
+ None,
+ Some(x),
+ }
+);
+
+rpc_encode_decode!(
+ enum Result<T, E> {
+ Ok(x),
+ Err(e),
+ }
+);
+
+impl<S> Encode<S> for &[u8] {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ w.write_all(self).unwrap();
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let xs = &r[..len];
+ *r = &r[len..];
+ xs
+ }
+}
+
+impl<S> Encode<S> for &str {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_bytes().encode(w, s);
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a str {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ str::from_utf8(<&[u8]>::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for String {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self[..].encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for String {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ <&str>::decode(r, s).to_string()
+ }
+}
+
+/// Simplified version of panic payloads, ignoring
+/// types other than `&'static str` and `String`.
+pub enum PanicMessage {
+ StaticStr(&'static str),
+ String(String),
+ Unknown,
+}
+
+impl From<Box<dyn Any + Send>> for PanicMessage {
+ fn from(payload: Box<dyn Any + Send + 'static>) -> Self {
+ if let Some(s) = payload.downcast_ref::<&'static str>() {
+ return PanicMessage::StaticStr(s);
+ }
+ if let Ok(s) = payload.downcast::<String>() {
+ return PanicMessage::String(*s);
+ }
+ PanicMessage::Unknown
+ }
+}
+
+impl Into<Box<dyn Any + Send>> for PanicMessage {
+ fn into(self) -> Box<dyn Any + Send> {
+ match self {
+ PanicMessage::StaticStr(s) => Box::new(s),
+ PanicMessage::String(s) => Box::new(s),
+ PanicMessage::Unknown => {
+ struct UnknownPanicMessage;
+ Box::new(UnknownPanicMessage)
+ }
+ }
+ }
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<&str> {
+ match self {
+ PanicMessage::StaticStr(s) => Some(s),
+ PanicMessage::String(s) => Some(s),
+ PanicMessage::Unknown => None,
+ }
+ }
+}
+
+impl<S> Encode<S> for PanicMessage {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_str().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for PanicMessage {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match Option::<String>::decode(r, s) {
+ Some(s) => PanicMessage::String(s),
+ None => PanicMessage::Unknown,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs
new file mode 100644
index 000000000..b0c2e5b9c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs
@@ -0,0 +1,81 @@
+//! `Cell` variant for (scoped) existential lifetimes.
+
+use std::cell::Cell;
+use std::mem;
+use std::ops::{Deref, DerefMut};
+
+/// Type lambda application, with a lifetime.
+#[allow(unused_lifetimes)]
+pub trait ApplyL<'a> {
+ type Out;
+}
+
+/// Type lambda taking a lifetime, i.e., `Lifetime -> Type`.
+pub trait LambdaL: for<'a> ApplyL<'a> {}
+
+impl<T: for<'a> ApplyL<'a>> LambdaL for T {}
+
+// HACK(eddyb) work around projection limitations with a newtype
+// FIXME(#52812) replace with `&'a mut <T as ApplyL<'b>>::Out`
+pub struct RefMutL<'a, 'b, T: LambdaL>(&'a mut <T as ApplyL<'b>>::Out);
+
+impl<'a, 'b, T: LambdaL> Deref for RefMutL<'a, 'b, T> {
+ type Target = <T as ApplyL<'b>>::Out;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+
+impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.0
+ }
+}
+
+pub struct ScopedCell<T: LambdaL>(Cell<<T as ApplyL<'static>>::Out>);
+
+impl<T: LambdaL> ScopedCell<T> {
+ pub fn new(value: <T as ApplyL<'static>>::Out) -> Self {
+ ScopedCell(Cell::new(value))
+ }
+
+ /// Sets the value in `self` to `replacement` while
+ /// running `f`, which gets the old value, mutably.
+ /// The old value will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ pub fn replace<'a, R>(
+ &self,
+ replacement: <T as ApplyL<'a>>::Out,
+ f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R,
+ ) -> R {
+ /// Wrapper that ensures that the cell always gets filled
+ /// (with the original state, optionally changed by `f`),
+ /// even if `f` had panicked.
+ struct PutBackOnDrop<'a, T: LambdaL> {
+ cell: &'a ScopedCell<T>,
+ value: Option<<T as ApplyL<'static>>::Out>,
+ }
+
+ impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> {
+ fn drop(&mut self) {
+ self.cell.0.set(self.value.take().unwrap());
+ }
+ }
+
+ let mut put_back_on_drop = PutBackOnDrop {
+ cell: self,
+ value: Some(self.0.replace(unsafe {
+ let erased = mem::transmute_copy(&replacement);
+ mem::forget(replacement);
+ erased
+ })),
+ };
+
+ f(RefMutL(put_back_on_drop.value.as_mut().unwrap()))
+ }
+
+ /// Sets the value in `self` to `value` while running `f`.
+ pub fn set<R>(&self, value: <T as ApplyL<'_>>::Out, f: impl FnOnce() -> R) -> R {
+ self.replace(value, |_| f())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs
new file mode 100644
index 000000000..06a197913
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs
@@ -0,0 +1,352 @@
+//! Server-side traits.
+
+use super::*;
+
+// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+use super::client::HandleStore;
+
+/// Declare an associated item of one of the traits below, optionally
+/// adjusting it (i.e., adding bounds to types and default bodies to methods).
+macro_rules! associated_item {
+ (type FreeFunctions) =>
+ (type FreeFunctions: 'static;);
+ (type TokenStream) =>
+ (type TokenStream: 'static + Clone;);
+ (type TokenStreamBuilder) =>
+ (type TokenStreamBuilder: 'static;);
+ (type TokenStreamIter) =>
+ (type TokenStreamIter: 'static + Clone;);
+ (type Group) =>
+ (type Group: 'static + Clone;);
+ (type Punct) =>
+ (type Punct: 'static + Copy + Eq + Hash;);
+ (type Ident) =>
+ (type Ident: 'static + Copy + Eq + Hash;);
+ (type Literal) =>
+ (type Literal: 'static + Clone;);
+ (type SourceFile) =>
+ (type SourceFile: 'static + Clone;);
+ (type MultiSpan) =>
+ (type MultiSpan: 'static;);
+ (type Diagnostic) =>
+ (type Diagnostic: 'static;);
+ (type Span) =>
+ (type Span: 'static + Copy + Eq + Hash;);
+ (fn drop(&mut self, $arg:ident: $arg_ty:ty)) =>
+ (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) });
+ (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) =>
+ (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() });
+ ($($item:tt)*) => ($($item)*;)
+}
+
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ pub trait Types {
+ $(associated_item!(type $name);)*
+ }
+
+ $(pub trait $name: Types {
+ $(associated_item!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {}
+ impl<S: Types $(+ $name)*> Server for S {}
+ }
+}
+with_api!(Self, self_, declare_server_traits);
+
+pub(super) struct MarkedTypes<S: Types>(S);
+
+macro_rules! define_mark_types_impls {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ impl<S: Types> Types for MarkedTypes<S> {
+ $(type $name = Marked<S::$name, client::$name>;)*
+ }
+
+ $(impl<S: $name> $name for MarkedTypes<S> {
+ $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? {
+ <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*))
+ })*
+ })*
+ }
+}
+with_api!(Self, self_, define_mark_types_impls);
+
+struct Dispatcher<S: Types> {
+ handle_store: HandleStore<S>,
+ server: S,
+}
+
+macro_rules! define_dispatcher_impl {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ // FIXME(eddyb) `pub` only for `ExecutionStrategy` below.
+ pub trait DispatcherTrait {
+ // HACK(eddyb) these are here to allow `Self::$name` to work below.
+ $(type $name;)*
+ fn dispatch(&mut self, b: Buffer<u8>) -> Buffer<u8>;
+ }
+
+ impl<S: Server> DispatcherTrait for Dispatcher<MarkedTypes<S>> {
+ $(type $name = <MarkedTypes<S> as Types>::$name;)*
+ fn dispatch(&mut self, mut b: Buffer<u8>) -> Buffer<u8> {
+ let Dispatcher { handle_store, server } = self;
+
+ let mut reader = &b[..];
+ match api_tags::Method::decode(&mut reader, &mut ()) {
+ $(api_tags::Method::$name(m) => match m {
+ $(api_tags::$name::$method => {
+ let mut call_method = || {
+ reverse_decode!(reader, handle_store; $($arg: $arg_ty),*);
+ $name::$method(server, $($arg),*)
+ };
+ // HACK(eddyb) don't use `panic::catch_unwind` in a panic.
+ // If client and server happen to use the same `libstd`,
+ // `catch_unwind` asserts that the panic counter was 0,
+ // even when the closure passed to it didn't panic.
+ let r = if thread::panicking() {
+ Ok(call_method())
+ } else {
+ panic::catch_unwind(panic::AssertUnwindSafe(call_method))
+ .map_err(PanicMessage::from)
+ };
+
+ b.clear();
+ r.encode(&mut b, handle_store);
+ })*
+ }),*
+ }
+ b
+ }
+ }
+ }
+}
+with_api!(Self, self_, define_dispatcher_impl);
+
+pub trait ExecutionStrategy {
+ fn run_bridge_and_client<D: Copy + Send + 'static>(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer<u8>,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+ ) -> Buffer<u8>;
+}
+
+pub struct SameThread;
+
+impl ExecutionStrategy for SameThread {
+ fn run_bridge_and_client<D: Copy + Send + 'static>(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer<u8>,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+ ) -> Buffer<u8> {
+ let mut dispatch = |b| dispatcher.dispatch(b);
+
+ run_client(
+ Bridge { cached_buffer: input, dispatch: (&mut dispatch).into(), force_show_panics },
+ client_data,
+ )
+ }
+}
+
+// NOTE(eddyb) Two implementations are provided, the second one is a bit
+// faster but neither is anywhere near as fast as same-thread execution.
+
+pub struct CrossThread1;
+
+impl ExecutionStrategy for CrossThread1 {
+ fn run_bridge_and_client<D: Copy + Send + 'static>(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer<u8>,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+ ) -> Buffer<u8> {
+ use std::sync::mpsc::channel;
+
+ let (req_tx, req_rx) = channel();
+ let (res_tx, res_rx) = channel();
+
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |b| {
+ req_tx.send(b).unwrap();
+ res_rx.recv().unwrap()
+ };
+
+ run_client(
+ Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ },
+ client_data,
+ )
+ });
+
+ for b in req_rx {
+ res_tx.send(dispatcher.dispatch(b)).unwrap();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+pub struct CrossThread2;
+
+impl ExecutionStrategy for CrossThread2 {
+ fn run_bridge_and_client<D: Copy + Send + 'static>(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer<u8>,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+ ) -> Buffer<u8> {
+ use std::sync::{Arc, Mutex};
+
+ enum State<T> {
+ Req(T),
+ Res(T),
+ }
+
+ let mut state = Arc::new(Mutex::new(State::Res(Buffer::new())));
+
+ let server_thread = thread::current();
+ let state2 = state.clone();
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |b| {
+ *state2.lock().unwrap() = State::Req(b);
+ server_thread.unpark();
+ loop {
+ thread::park();
+ if let State::Res(b) = &mut *state2.lock().unwrap() {
+ break b.take();
+ }
+ }
+ };
+
+ let r = run_client(
+ Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ },
+ client_data,
+ );
+
+ // Wake up the server so it can exit the dispatch loop.
+ drop(state2);
+ server_thread.unpark();
+
+ r
+ });
+
+ // Check whether `state2` was dropped, to know when to stop.
+ while Arc::get_mut(&mut state).is_none() {
+ thread::park();
+ let mut b = match &mut *state.lock().unwrap() {
+ State::Req(b) => b.take(),
+ _ => continue,
+ };
+ b = dispatcher.dispatch(b.take());
+ *state.lock().unwrap() = State::Res(b);
+ join_handle.thread().unpark();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+fn run_server<
+ S: Server,
+ I: Encode<HandleStore<MarkedTypes<S>>>,
+ O: for<'a, 's> DecodeMut<'a, 's, HandleStore<MarkedTypes<S>>>,
+ D: Copy + Send + 'static,
+>(
+ strategy: &impl ExecutionStrategy,
+ handle_counters: &'static client::HandleCounters,
+ server: S,
+ input: I,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+) -> Result<O, PanicMessage> {
+ let mut dispatcher =
+ Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) };
+
+ let mut b = Buffer::new();
+ input.encode(&mut b, &mut dispatcher.handle_store);
+
+ b = strategy.run_bridge_and_client(
+ &mut dispatcher,
+ b,
+ run_client,
+ client_data,
+ force_show_panics,
+ );
+
+ Result::decode(&mut &b[..], &mut dispatcher.handle_store)
+}
+
+impl client::Client<fn(super::super::TokenStream) -> super::super::TokenStream> {
+ pub fn run<S: Server>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage> {
+ let client::Client { get_handle_counters, run, f } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ run,
+ f,
+ force_show_panics,
+ )
+ .map(<MarkedTypes<S> as Types>::TokenStream::unmark)
+ }
+}
+
+impl
+ client::Client<
+ fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream,
+ >
+{
+ pub fn run<S: Server>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ input2: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage> {
+ let client::Client { get_handle_counters, run, f } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ (
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ <MarkedTypes<S> as Types>::TokenStream::mark(input2),
+ ),
+ run,
+ f,
+ force_show_panics,
+ )
+ .map(<MarkedTypes<S> as Types>::TokenStream::unmark)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs
new file mode 100644
index 000000000..cda239f87
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs
@@ -0,0 +1,166 @@
+//! lib-proc-macro diagnostic
+//!
+//! Copy from <https://github.com/rust-lang/rust/blob/6050e523bae6de61de4e060facc43dc512adaccd/src/libproc_macro/diagnostic.rs>
+//! augmented with removing unstable features
+
+use super::Span;
+
+/// An enum representing a diagnostic level.
+#[derive(Copy, Clone, Debug)]
+#[non_exhaustive]
+pub enum Level {
+ /// An error.
+ Error,
+ /// A warning.
+ Warning,
+ /// A note.
+ Note,
+ /// A help message.
+ Help,
+}
+
+/// Trait implemented by types that can be converted into a set of `Span`s.
+pub trait MultiSpan {
+ /// Converts `self` into a `Vec<Span>`.
+ fn into_spans(self) -> Vec<Span>;
+}
+
+impl MultiSpan for Span {
+ fn into_spans(self) -> Vec<Span> {
+ vec![self]
+ }
+}
+
+impl MultiSpan for Vec<Span> {
+ fn into_spans(self) -> Vec<Span> {
+ self
+ }
+}
+
+impl<'a> MultiSpan for &'a [Span] {
+ fn into_spans(self) -> Vec<Span> {
+ self.to_vec()
+ }
+}
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+macro_rules! diagnostic_child_methods {
+ ($spanned:ident, $regular:ident, $level:expr) => {
+ #[doc = concat!("Adds a new child diagnostics message to `self` with the [`",
+ stringify!($level), "`] level, and the given `spans` and `message`.")]
+ pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ self.children.push(Diagnostic::spanned(spans, $level, message));
+ self
+ }
+
+ #[doc = concat!("Adds a new child diagnostic message to `self` with the [`",
+ stringify!($level), "`] level, and the given `message`.")]
+ pub fn $regular<T: Into<String>>(mut self, message: T) -> Diagnostic {
+ self.children.push(Diagnostic::new($level, message));
+ self
+ }
+ };
+}
+
+/// Iterator over the children diagnostics of a `Diagnostic`.
+#[derive(Debug, Clone)]
+pub struct Children<'a>(std::slice::Iter<'a, Diagnostic>);
+
+impl<'a> Iterator for Children<'a> {
+ type Item = &'a Diagnostic;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+
+ /// Creates a new diagnostic with the given `level` and `message` pointing to
+ /// the given set of `spans`.
+ pub fn spanned<S, T>(spans: S, level: Level, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ Diagnostic { level, message: message.into(), spans: spans.into_spans(), children: vec![] }
+ }
+
+ diagnostic_child_methods!(span_error, error, Level::Error);
+ diagnostic_child_methods!(span_warning, warning, Level::Warning);
+ diagnostic_child_methods!(span_note, note, Level::Note);
+ diagnostic_child_methods!(span_help, help, Level::Help);
+
+ /// Returns the diagnostic `level` for `self`.
+ pub fn level(&self) -> Level {
+ self.level
+ }
+
+ /// Sets the level in `self` to `level`.
+ pub fn set_level(&mut self, level: Level) {
+ self.level = level;
+ }
+
+ /// Returns the message in `self`.
+ pub fn message(&self) -> &str {
+ &self.message
+ }
+
+ /// Sets the message in `self` to `message`.
+ pub fn set_message<T: Into<String>>(&mut self, message: T) {
+ self.message = message.into();
+ }
+
+ /// Returns the `Span`s in `self`.
+ pub fn spans(&self) -> &[Span] {
+ &self.spans
+ }
+
+ /// Sets the `Span`s in `self` to `spans`.
+ pub fn set_spans<S: MultiSpan>(&mut self, spans: S) {
+ self.spans = spans.into_spans();
+ }
+
+ /// Returns an iterator over the children diagnostics of `self`.
+ pub fn children(&self) -> Children<'_> {
+ Children(self.children.iter())
+ }
+
+ /// Emit the diagnostic.
+ pub fn emit(self) {
+ fn to_internal(spans: Vec<Span>) -> super::bridge::client::MultiSpan {
+ let mut multi_span = super::bridge::client::MultiSpan::new();
+ for span in spans {
+ multi_span.push(span.0);
+ }
+ multi_span
+ }
+
+ let mut diag = super::bridge::client::Diagnostic::new(
+ self.level,
+ &self.message[..],
+ to_internal(self.spans),
+ );
+ for c in self.children {
+ diag.sub(c.level, &c.message[..], to_internal(c.spans));
+ }
+ diag.emit();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs
new file mode 100644
index 000000000..4a07f2277
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs
@@ -0,0 +1,1056 @@
+//! A support library for macro authors when defining new macros.
+//!
+//! This library, provided by the standard distribution, provides the types
+//! consumed in the interfaces of procedurally defined macro definitions such as
+//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and
+//! custom derive attributes`#[proc_macro_derive]`.
+//!
+//! See [the book] for more.
+//!
+//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes
+
+#[doc(hidden)]
+pub mod bridge;
+
+mod diagnostic;
+
+pub use diagnostic::{Diagnostic, Level, MultiSpan};
+
+use std::cmp::Ordering;
+use std::ops::RangeBounds;
+use std::path::PathBuf;
+use std::str::FromStr;
+use std::{error, fmt, iter, mem};
+
+/// Determines whether proc_macro has been made accessible to the currently
+/// running program.
+///
+/// The proc_macro crate is only intended for use inside the implementation of
+/// procedural macros. All the functions in this crate panic if invoked from
+/// outside of a procedural macro, such as from a build script or unit test or
+/// ordinary Rust binary.
+///
+/// With consideration for Rust libraries that are designed to support both
+/// macro and non-macro use cases, `proc_macro::is_available()` provides a
+/// non-panicking way to detect whether the infrastructure required to use the
+/// API of proc_macro is presently available. Returns true if invoked from
+/// inside of a procedural macro, false if invoked from any other binary.
+pub fn is_available() -> bool {
+ bridge::Bridge::is_available()
+}
+
+/// The main type provided by this crate, representing an abstract stream of
+/// tokens, or, more specifically, a sequence of token trees.
+/// The type provide interfaces for iterating over those token trees and, conversely,
+/// collecting a number of token trees into one stream.
+///
+/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]`
+/// and `#[proc_macro_derive]` definitions.
+#[derive(Clone)]
+pub struct TokenStream(bridge::client::TokenStream);
+
+/// Error returned from `TokenStream::from_str`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct LexError;
+
+impl fmt::Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("cannot parse string into token stream")
+ }
+}
+
+impl error::Error for LexError {}
+
+/// Error returned from `TokenStream::expand_expr`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct ExpandError;
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("macro expansion failed")
+ }
+}
+
+impl error::Error for ExpandError {}
+
+impl TokenStream {
+ /// Returns an empty `TokenStream` containing no token trees.
+ pub fn new() -> TokenStream {
+ TokenStream(bridge::client::TokenStream::new())
+ }
+
+ /// Checks if this `TokenStream` is empty.
+ pub fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+
+ /// Parses this `TokenStream` as an expression and attempts to expand any
+ /// macros within it. Returns the expanded `TokenStream`.
+ ///
+ /// Currently only expressions expanding to literals will succeed, although
+ /// this may be relaxed in the future.
+ ///
+ /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded,
+ /// report an error, failing compilation, and/or return an `Err(..)`. The
+ /// specific behavior for any error condition, and what conditions are
+ /// considered errors, is unspecified and may change in the future.
+ pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
+ match bridge::client::TokenStream::expand_expr(&self.0) {
+ Ok(stream) => Ok(TokenStream(stream)),
+ Err(_) => Err(ExpandError),
+ }
+ }
+}
+
+/// Attempts to break the string into tokens and parse those tokens into a token stream.
+/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+/// or characters not existing in the language.
+/// All tokens in the parsed stream get `Span::call_site()` spans.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+/// change these errors into `LexError`s later.
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ Ok(TokenStream(bridge::client::TokenStream::from_str(src)))
+ }
+}
+
+/// Prints the token stream as a string that is supposed to be losslessly convertible back
+/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// Prints token in a form convenient for debugging.
+impl fmt::Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl Default for TokenStream {
+ fn default() -> Self {
+ TokenStream::new()
+ }
+}
+
+pub use quote::{quote, quote_span};
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream(bridge::client::TokenStream::from_token_tree(match tree {
+ TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
+ TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
+ TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
+ TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
+ }))
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl iter::FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl iter::FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = bridge::client::TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream.0));
+ TokenStream(builder.build())
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ // FIXME(eddyb) Use an optimized implementation if/when possible.
+ *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect();
+ }
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use super::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ #[derive(Clone)]
+ pub struct IntoIter(bridge::client::TokenStreamIter);
+
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.0.next().map(|tree| match tree {
+ bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)),
+ bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)),
+ bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)),
+ bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)),
+ })
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+ IntoIter(self.0.into_iter())
+ }
+ }
+}
+
+/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
+/// For example, `quote!(a + b)` will produce an expression, that, when evaluated, constructs
+/// the `TokenStream` `[Ident("a"), Punct('+', Alone), Ident("b")]`.
+///
+/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
+/// To quote `$` itself, use `$$`.
+//pub macro quote($($t:tt)*) {
+//[> compiler built-in <]
+//}
+
+#[doc(hidden)]
+mod quote;
+
+/// A region of source code, along with macro expansion information.
+#[derive(Copy, Clone)]
+pub struct Span(bridge::client::Span);
+
+macro_rules! diagnostic_method {
+ ($name:ident, $level:expr) => {
+ /// Creates a new `Diagnostic` with the given `message` at the span
+ /// `self`.
+ pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
+ Diagnostic::spanned(self, $level, message)
+ }
+ };
+}
+
+impl Span {
+ /// A span that resolves at the macro definition site.
+ pub fn def_site() -> Span {
+ Span(bridge::client::Span::def_site())
+ }
+
+ /// The span of the invocation of the current procedural macro.
+ /// Identifiers created with this span will be resolved as if they were written
+ /// directly at the macro call location (call-site hygiene) and other code
+ /// at the macro call site will be able to refer to them as well.
+ pub fn call_site() -> Span {
+ Span(bridge::client::Span::call_site())
+ }
+
+ /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro
+ /// definition site (local variables, labels, `$crate`) and sometimes at the macro
+ /// call site (everything else).
+ /// The span location is taken from the call-site.
+ pub fn mixed_site() -> Span {
+ Span(bridge::client::Span::mixed_site())
+ }
+
+ /// The original source file into which this span points.
+ pub fn source_file(&self) -> SourceFile {
+ SourceFile(self.0.source_file())
+ }
+
+ /// The `Span` for the tokens in the previous macro expansion from which
+ /// `self` was generated from, if any.
+ pub fn parent(&self) -> Option<Span> {
+ self.0.parent().map(Span)
+ }
+
+ /// The span for the origin source code that `self` was generated from. If
+ /// this `Span` wasn't generated from other macro expansions then the return
+ /// value is the same as `*self`.
+ pub fn source(&self) -> Span {
+ Span(self.0.source())
+ }
+
+ /// Gets the starting line/column in the source file for this span.
+ pub fn start(&self) -> LineColumn {
+ self.0.start().add_1_to_column()
+ }
+
+ /// Gets the ending line/column in the source file for this span.
+ pub fn end(&self) -> LineColumn {
+ self.0.end().add_1_to_column()
+ }
+
+ /// Creates an empty span pointing to directly before this span.
+ pub fn before(&self) -> Span {
+ Span(self.0.before())
+ }
+
+ /// Creates an empty span pointing to directly after this span.
+ pub fn after(&self) -> Span {
+ Span(self.0.after())
+ }
+
+ /// Creates a new span encompassing `self` and `other`.
+ ///
+ /// Returns `None` if `self` and `other` are from different files.
+ pub fn join(&self, other: Span) -> Option<Span> {
+ self.0.join(other.0).map(Span)
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span(self.0.resolved_at(other.0))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+ pub fn located_at(&self, other: Span) -> Span {
+ other.resolved_at(*self)
+ }
+
+ /// Compares to spans to see if they're equal.
+ pub fn eq(&self, other: &Span) -> bool {
+ self.0 == other.0
+ }
+
+ /// Returns the source text behind a span. This preserves the original source
+ /// code, including spaces and comments. It only returns a result if the span
+ /// corresponds to real source code.
+ ///
+ /// Note: The observable result of a macro should only rely on the tokens and
+ /// not on this source text. The result of this function is a best effort to
+ /// be used for diagnostics only.
+ pub fn source_text(&self) -> Option<String> {
+ self.0.source_text()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn save_span(&self) -> usize {
+ self.0.save_span()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn recover_proc_macro_span(id: usize) -> Span {
+ Span(bridge::client::Span::recover_proc_macro_span(id))
+ }
+
+ diagnostic_method!(error, Level::Error);
+ diagnostic_method!(warning, Level::Warning);
+ diagnostic_method!(note, Level::Note);
+ diagnostic_method!(help, Level::Help);
+}
+
+/// Prints a span in a form convenient for debugging.
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// A line-column pair representing the start or end of a `Span`.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends (inclusive).
+ pub line: usize,
+ /// The 1-indexed column (number of bytes in UTF-8 encoding) in the source
+ /// file on which the span starts or ends (inclusive).
+ pub column: usize,
+}
+
+impl LineColumn {
+ fn add_1_to_column(self) -> Self {
+ LineColumn { line: self.line, column: self.column + 1 }
+ }
+}
+
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line.cmp(&other.line).then(self.column.cmp(&other.column))
+ }
+}
+
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+/// The source file of a given `Span`.
+#[derive(Clone)]
+pub struct SourceFile(bridge::client::SourceFile);
+
+impl SourceFile {
+ /// Gets the path to this source file.
+ ///
+ /// ### Note
+ /// If the code span associated with this `SourceFile` was generated by an external macro, this
+ /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check.
+ ///
+ /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
+ /// the command line, the path as given might not actually be valid.
+ ///
+ /// [`is_real`]: Self::is_real
+ pub fn path(&self) -> PathBuf {
+ PathBuf::from(self.0.path())
+ }
+
+ /// Returns `true` if this source file is a real source file, and not generated by an external
+ /// macro's expansion.
+ pub fn is_real(&self) -> bool {
+ // This is a hack until intercrate spans are implemented and we can have real source files
+ // for spans generated in external macros.
+ // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
+ self.0.is_real()
+ }
+}
+
+impl fmt::Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+}
+
+impl PartialEq for SourceFile {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.eq(&other.0)
+ }
+}
+
+impl Eq for SourceFile {}
+
+/// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`).
+#[derive(Clone)]
+pub enum TokenTree {
+ /// A token stream surrounded by bracket delimiters.
+ Group(Group),
+ /// An identifier.
+ Ident(Ident),
+ /// A single punctuation character (`+`, `,`, `$`, etc.).
+ Punct(Punct),
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+}
+
+impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+ match *self {
+ TokenTree::Group(ref t) => t.span(),
+ TokenTree::Ident(ref t) => t.span(),
+ TokenTree::Punct(ref t) => t.span(),
+ TokenTree::Literal(ref t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+ match *self {
+ TokenTree::Group(ref mut t) => t.set_span(span),
+ TokenTree::Ident(ref mut t) => t.set_span(span),
+ TokenTree::Punct(ref mut t) => t.set_span(span),
+ TokenTree::Literal(ref mut t) => t.set_span(span),
+ }
+ }
+}
+
+/// Prints token tree in a form convenient for debugging.
+impl fmt::Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+ match *self {
+ TokenTree::Group(ref tt) => tt.fmt(f),
+ TokenTree::Ident(ref tt) => tt.fmt(f),
+ TokenTree::Punct(ref tt) => tt.fmt(f),
+ TokenTree::Literal(ref tt) => tt.fmt(f),
+ }
+ }
+}
+
+impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
+}
+
+impl From<Ident> for TokenTree {
+ fn from(g: Ident) -> TokenTree {
+ TokenTree::Ident(g)
+ }
+}
+
+impl From<Punct> for TokenTree {
+ fn from(g: Punct) -> TokenTree {
+ TokenTree::Punct(g)
+ }
+}
+
+impl From<Literal> for TokenTree {
+ fn from(g: Literal) -> TokenTree {
+ TokenTree::Literal(g)
+ }
+}
+
+/// Prints the token tree as a string that is supposed to be losslessly convertible back
+/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// A delimited token stream.
+///
+/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s.
+#[derive(Clone)]
+pub struct Group(bridge::client::Group);
+
+/// Describes how a sequence of token trees is delimited.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Delimiter {
+ /// `( ... )`
+ Parenthesis,
+ /// `{ ... }`
+ Brace,
+ /// `[ ... ]`
+ Bracket,
+ /// `Ø ... Ø`
+ /// An implicit delimiter, that may, for example, appear around tokens coming from a
+ /// "macro variable" `$var`. It is important to preserve operator priorities in cases like
+ /// `$var * 3` where `$var` is `1 + 2`.
+ /// Implicit delimiters might not survive roundtrip of a token stream through a string.
+ None,
+}
+
+impl Group {
+ /// Creates a new `Group` with the given delimiter and token stream.
+ ///
+ /// This constructor will set the span for this group to
+ /// `Span::call_site()`. To change the span you can use the `set_span`
+ /// method below.
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group(bridge::client::Group::new(delimiter, stream.0))
+ }
+
+ /// Returns the delimiter of this `Group`
+ pub fn delimiter(&self) -> Delimiter {
+ self.0.delimiter()
+ }
+
+ /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
+ ///
+ /// Note that the returned token stream does not include the delimiter
+ /// returned above.
+ pub fn stream(&self) -> TokenStream {
+ TokenStream(self.0.stream())
+ }
+
+ /// Returns the span for the delimiters of this token stream, spanning the
+ /// entire `Group`.
+ ///
+ /// ```text
+ /// pub fn span(&self) -> Span {
+ /// ^^^^^^^
+ /// ```
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Returns the span pointing to the opening delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_open(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_open(&self) -> Span {
+ Span(self.0.span_open())
+ }
+
+ /// Returns the span pointing to the closing delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_close(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_close(&self) -> Span {
+ Span(self.0.span_close())
+ }
+
+ /// Configures the span for this `Group`'s delimiters, but not its internal
+ /// tokens.
+ ///
+ /// This method will **not** set the span of all the internal tokens spanned
+ /// by this group, but rather it will only set the span of the delimiter
+ /// tokens at the level of the `Group`.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+}
+
+/// Prints the group as a string that should be losslessly convertible back
+/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters.
+impl fmt::Display for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Group")
+ .field("delimiter", &self.delimiter())
+ .field("stream", &self.stream())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A `Punct` is a single punctuation character such as `+`, `-` or `#`.
+///
+/// Multi-character operators like `+=` are represented as two instances of `Punct` with different
+/// forms of `Spacing` returned.
+#[derive(Clone)]
+pub struct Punct(bridge::client::Punct);
+
+/// Describes whether a `Punct` is followed immediately by another `Punct` ([`Spacing::Joint`]) or
+/// by a different token or whitespace ([`Spacing::Alone`]).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Spacing {
+ /// A `Punct` is not immediately followed by another `Punct`.
+ /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`.
+ Alone,
+ /// A `Punct` is immediately followed by another `Punct`.
+ /// E.g. `+` is `Joint` in `+=` and `++`.
+ ///
+ /// Additionally, single quote `'` can join with identifiers to form lifetimes: `'ident`.
+ Joint,
+}
+
+impl Punct {
+ /// Creates a new `Punct` from the given character and spacing.
+ /// The `ch` argument must be a valid punctuation character permitted by the language,
+ /// otherwise the function will panic.
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ Punct(bridge::client::Punct::new(ch, spacing))
+ }
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+ self.0.as_char()
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether it's immediately
+ /// followed by another `Punct` in the token stream, so they can potentially be combined into
+ /// a multi-character operator (`Joint`), or it's followed by some other token or whitespace
+ /// (`Alone`) so the operator has certainly ended.
+ pub fn spacing(&self) -> Spacing {
+ self.0.spacing()
+ }
+
+ /// Returns the span for this punctuation character.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configure the span for this punctuation character.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the punctuation character as a string that should be losslessly convertible
+/// back into the same character.
+impl fmt::Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Punct")
+ .field("ch", &self.as_char())
+ .field("spacing", &self.spacing())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl PartialEq<char> for Punct {
+ fn eq(&self, rhs: &char) -> bool {
+ self.as_char() == *rhs
+ }
+}
+
+impl PartialEq<Punct> for char {
+ fn eq(&self, rhs: &Punct) -> bool {
+ *self == rhs.as_char()
+ }
+}
+
+/// An identifier (`ident`).
+#[derive(Clone)]
+pub struct Ident(bridge::client::Ident);
+
+impl Ident {
+ /// Creates a new `Ident` with the given `string` as well as the specified
+ /// `span`.
+ /// The `string` argument must be a valid identifier permitted by the
+ /// language (including keywords, e.g. `self` or `fn`). Otherwise, the function will panic.
+ ///
+ /// Note that `span`, currently in rustc, configures the hygiene information
+ /// for this identifier.
+ ///
+ /// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene
+ /// meaning that identifiers created with this span will be resolved as if they were written
+ /// directly at the location of the macro call, and other code at the macro call site will be
+ /// able to refer to them as well.
+ ///
+ /// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene
+ /// meaning that identifiers created with this span will be resolved at the location of the
+ /// macro definition and other code at the macro call site will not be able to refer to them.
+ ///
+ /// Due to the current importance of hygiene this constructor, unlike other
+ /// tokens, requires a `Span` to be specified at construction.
+ pub fn new(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, false))
+ }
+
+ /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
+ /// The `string` argument be a valid identifier permitted by the language
+ /// (including keywords, e.g. `fn`). Keywords which are usable in path segments
+ /// (e.g. `self`, `super`) are not supported, and will cause a panic.
+ pub fn new_raw(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, true))
+ }
+
+ /// Returns the span of this `Ident`, encompassing the entire string returned
+ /// by [`to_string`](Self::to_string).
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span of this `Ident`, possibly changing its hygiene context.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the identifier as a string that should be losslessly convertible
+/// back into the same identifier.
+impl fmt::Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Ident")
+ .field("ident", &self.to_string())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A literal string (`"hello"`), byte string (`b"hello"`),
+/// character (`'a'`), byte character (`b'a'`), an integer or floating point number
+/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s.
+#[derive(Clone)]
+pub struct Literal(bridge::client::Literal);
+
+macro_rules! suffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new suffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1u32` where the integer
+ /// value specified is the first part of the token and the integral is
+ /// also suffixed at the end.
+ /// Literals created from negative numbers might not survive round-trips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::typed_integer(&n.to_string(), stringify!($kind)))
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new unsuffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1` where the integer
+ /// value specified is the first part of the token. No suffix is
+ /// specified on this token, meaning that invocations like
+ /// `Literal::i8_unsuffixed(1)` are equivalent to
+ /// `Literal::u32_unsuffixed(1)`.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::integer(&n.to_string()))
+ }
+ )*)
+}
+
+impl Literal {
+ suffixed_int_literals! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+ }
+
+ unsuffixed_int_literals! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_unsuffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f32` where the value
+ /// specified is the preceding part of the token and `f32` is the suffix of
+ /// the token. This token will always be inferred to be an `f32` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_suffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ Literal(bridge::client::Literal::f32(&n.to_string()))
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_unsuffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f64` where the value
+ /// specified is the preceding part of the token and `f64` is the suffix of
+ /// the token. This token will always be inferred to be an `f64` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_suffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ Literal(bridge::client::Literal::f64(&n.to_string()))
+ }
+
+ /// String literal.
+ pub fn string(string: &str) -> Literal {
+ Literal(bridge::client::Literal::string(string))
+ }
+
+ /// Character literal.
+ pub fn character(ch: char) -> Literal {
+ Literal(bridge::client::Literal::character(ch))
+ }
+
+ /// Byte string literal.
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ Literal(bridge::client::Literal::byte_string(bytes))
+ }
+
+ /// Returns the span encompassing this literal.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span associated for this literal.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+
+ /// Returns a `Span` that is a subset of `self.span()` containing only the
+ /// source bytes in range `range`. Returns `None` if the would-be trimmed
+ /// span is outside the bounds of `self`.
+ // FIXME(SergioBenitez): check that the byte range starts and ends at a
+ // UTF-8 boundary of the source. otherwise, it's likely that a panic will
+ // occur elsewhere when the source text is printed.
+ // FIXME(SergioBenitez): there is no way for the user to know what
+ // `self.span()` actually maps to, so this method can currently only be
+ // called blindly. For example, `to_string()` for the character 'c' returns
+ // "'\u{63}'"; there is no way for the user to know whether the source text
+ // was 'c' or whether it was '\u{63}'.
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ self.0.subspan(range.start_bound().cloned(), range.end_bound().cloned()).map(Span)
+ }
+}
+
+/// Parse a single literal from its stringified representation.
+///
+/// In order to parse successfully, the input string must not contain anything
+/// but the literal token. Specifically, it must not contain whitespace or
+/// comments in addition to the literal.
+///
+/// The resulting literal token will have a `Span::call_site()` span.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We
+/// reserve the right to change these errors into `LexError`s later.
+impl FromStr for Literal {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<Self, LexError> {
+ match bridge::client::Literal::from_str(src) {
+ Ok(literal) => Ok(Literal(literal)),
+ Err(()) => Err(LexError),
+ }
+ }
+}
+
+/// Prints the literal as a string that should be losslessly convertible
+/// back into the same literal (except for possible rounding for floating point literals).
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// Tracked access to environment variables.
+pub mod tracked_env {
+ use std::env::{self, VarError};
+ use std::ffi::OsStr;
+
+ /// Retrieve an environment variable and add it to build dependency info.
+ /// Build system executing the compiler will know that the variable was accessed during
+ /// compilation, and will be able to rerun the build when the value of that variable changes.
+ /// Besides the dependency tracking this function should be equivalent to `env::var` from the
+ /// standard library, except that the argument must be UTF-8.
+ pub fn var<K: AsRef<OsStr> + AsRef<str>>(key: K) -> Result<String, VarError> {
+ let key: &str = key.as_ref();
+ let value = env::var(key);
+ super::bridge::client::FreeFunctions::track_env_var(key, value.as_deref().ok());
+ value
+ }
+}
+
+/// Tracked access to additional files.
+pub mod tracked_path {
+
+ /// Track a file explicitly.
+ ///
+ /// Commonly used for tracking asset preprocessing.
+ pub fn path<P: AsRef<str>>(path: P) {
+ let path: &str = path.as_ref();
+ super::bridge::client::FreeFunctions::track_path(path);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs
new file mode 100644
index 000000000..b539ab9c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs
@@ -0,0 +1,140 @@
+//! # Quasiquoter
+//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
+
+//! This quasiquoter uses macros 2.0 hygiene to reliably access
+//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
+
+use super::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+
+macro_rules! quote_tt {
+ (($($t:tt)*)) => { Group::new(Delimiter::Parenthesis, quote!($($t)*)) };
+ ([$($t:tt)*]) => { Group::new(Delimiter::Bracket, quote!($($t)*)) };
+ ({$($t:tt)*}) => { Group::new(Delimiter::Brace, quote!($($t)*)) };
+ (,) => { Punct::new(',', Spacing::Alone) };
+ (.) => { Punct::new('.', Spacing::Alone) };
+ (:) => { Punct::new(':', Spacing::Alone) };
+ (;) => { Punct::new(';', Spacing::Alone) };
+ (!) => { Punct::new('!', Spacing::Alone) };
+ (<) => { Punct::new('<', Spacing::Alone) };
+ (>) => { Punct::new('>', Spacing::Alone) };
+ (&) => { Punct::new('&', Spacing::Alone) };
+ (=) => { Punct::new('=', Spacing::Alone) };
+ ($i:ident) => { Ident::new(stringify!($i), Span::def_site()) };
+}
+
+macro_rules! quote_ts {
+ ((@ $($t:tt)*)) => { $($t)* };
+ (::) => {
+ [
+ TokenTree::from(Punct::new(':', Spacing::Joint)),
+ TokenTree::from(Punct::new(':', Spacing::Alone)),
+ ].iter()
+ .cloned()
+ .map(|mut x| {
+ x.set_span(Span::def_site());
+ x
+ })
+ .collect::<TokenStream>()
+ };
+ ($t:tt) => { TokenTree::from(quote_tt!($t)) };
+}
+
+/// Simpler version of the real `quote!` macro, implemented solely
+/// through `macro_rules`, for bootstrapping the real implementation
+/// (see the `quote` function), which does not have access to the
+/// real `quote!` macro due to the `proc_macro` crate not being
+/// able to depend on itself.
+///
+/// Note: supported tokens are a subset of the real `quote!`, but
+/// unquoting is different: instead of `$x`, this uses `(@ expr)`.
+macro_rules! quote {
+ () => { TokenStream::new() };
+ ($($t:tt)*) => {
+ [
+ $(TokenStream::from(quote_ts!($t)),)*
+ ].iter().cloned().collect::<TokenStream>()
+ };
+}
+
+/// Quote a `TokenStream` into a `TokenStream`.
+/// This is the actual implementation of the `quote!()` proc macro.
+///
+/// It is loaded by the compiler in `register_builtin_macros`.
+pub fn quote(stream: TokenStream) -> TokenStream {
+ if stream.is_empty() {
+ return quote!(crate::TokenStream::new());
+ }
+ let proc_macro_crate = quote!(crate);
+ let mut after_dollar = false;
+ let tokens = stream
+ .into_iter()
+ .filter_map(|tree| {
+ if after_dollar {
+ after_dollar = false;
+ match tree {
+ TokenTree::Ident(_) => {
+ return Some(quote!(Into::<crate::TokenStream>::into(
+ Clone::clone(&(@ tree))),));
+ }
+ TokenTree::Punct(ref tt) if tt.as_char() == '$' => {}
+ _ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
+ }
+ } else if let TokenTree::Punct(ref tt) = tree {
+ if tt.as_char() == '$' {
+ after_dollar = true;
+ return None;
+ }
+ }
+
+ Some(quote!(crate::TokenStream::from((@ match tree {
+ TokenTree::Punct(tt) => quote!(crate::TokenTree::Punct(crate::Punct::new(
+ (@ TokenTree::from(Literal::character(tt.as_char()))),
+ (@ match tt.spacing() {
+ Spacing::Alone => quote!(crate::Spacing::Alone),
+ Spacing::Joint => quote!(crate::Spacing::Joint),
+ }),
+ ))),
+ TokenTree::Group(tt) => quote!(crate::TokenTree::Group(crate::Group::new(
+ (@ match tt.delimiter() {
+ Delimiter::Parenthesis => quote!(crate::Delimiter::Parenthesis),
+ Delimiter::Brace => quote!(crate::Delimiter::Brace),
+ Delimiter::Bracket => quote!(crate::Delimiter::Bracket),
+ Delimiter::None => quote!(crate::Delimiter::None),
+ }),
+ (@ quote(tt.stream())),
+ ))),
+ TokenTree::Ident(tt) => quote!(crate::TokenTree::Ident(crate::Ident::new(
+ (@ TokenTree::from(Literal::string(&tt.to_string()))),
+ (@ quote_span(proc_macro_crate.clone(), tt.span())),
+ ))),
+ TokenTree::Literal(tt) => quote!(crate::TokenTree::Literal({
+ let mut iter = (@ TokenTree::from(Literal::string(&tt.to_string())))
+ .parse::<crate::TokenStream>()
+ .unwrap()
+ .into_iter();
+ if let (Some(crate::TokenTree::Literal(mut lit)), None) =
+ (iter.next(), iter.next())
+ {
+ lit.set_span((@ quote_span(proc_macro_crate.clone(), tt.span())));
+ lit
+ } else {
+ unreachable!()
+ }
+ }))
+ })),))
+ })
+ .collect::<TokenStream>();
+
+ if after_dollar {
+ panic!("unexpected trailing `$` in `quote!`");
+ }
+
+ quote!([(@ tokens)].iter().cloned().collect::<crate::TokenStream>())
+}
+
+/// Quote a `Span` into a `TokenStream`.
+/// This is needed to implement a custom quoter.
+pub fn quote_span(proc_macro_crate: TokenStream, span: Span) -> TokenStream {
+ let id = span.save_span();
+ quote!((@ proc_macro_crate ) ::Span::recover_proc_macro_span((@ TokenTree::from(Literal::usize_unsuffixed(id)))))
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs
new file mode 100644
index 000000000..ebdfca00d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs
@@ -0,0 +1,819 @@
+//! Rustc proc-macro server implementation with tt
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::bridge::{self, server};
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Bound;
+use std::{ascii, vec::IntoIter};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Debug, Clone)]
+pub struct TokenStream {
+ pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream { token_trees: Default::default() }
+ }
+
+ pub fn with_subtree(subtree: tt::Subtree) -> Self {
+ if subtree.delimiter.is_some() {
+ TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+ } else {
+ TokenStream { token_trees: subtree.token_trees }
+ }
+ }
+
+ pub fn into_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self.token_trees }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.token_trees.is_empty()
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { token_trees: vec![tree] }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ for item in streams {
+ for tkn in item {
+ match tkn {
+ tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+ self.token_trees.extend(subtree.token_trees);
+ }
+ _ => {
+ self.token_trees.push(tkn);
+ }
+ }
+ }
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct SourceFile {
+ // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+}
+
+// Rustc Server Ident has to be `Copyable`
+// We use a stub here for bypassing
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct IdentId(u32);
+
+#[derive(Clone, Hash, Eq, PartialEq)]
+struct IdentData(tt::Ident);
+
+#[derive(Default)]
+struct IdentInterner {
+ idents: HashMap<IdentData, u32>,
+ ident_data: Vec<IdentData>,
+}
+
+impl IdentInterner {
+ fn intern(&mut self, data: &IdentData) -> u32 {
+ if let Some(index) = self.idents.get(data) {
+ return *index;
+ }
+
+ let index = self.idents.len() as u32;
+ self.ident_data.push(data.clone());
+ self.idents.insert(data.clone(), index);
+ index
+ }
+
+ fn get(&self, index: u32) -> &IdentData {
+ &self.ident_data[index as usize]
+ }
+
+ #[allow(unused)]
+ fn get_mut(&mut self, index: u32) -> &mut IdentData {
+ self.ident_data.get_mut(index as usize).expect("Should be consistent")
+ }
+}
+
+pub struct TokenStreamBuilder {
+ acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use std::str::FromStr;
+
+ use super::{TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = super::IntoIter<TokenTree>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.token_trees.into_iter()
+ }
+ }
+
+ type LexError = String;
+
+ /// Attempts to break the string into tokens and parse those tokens into a token stream.
+ /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+ /// or characters not existing in the language.
+ /// All tokens in the parsed stream get `Span::call_site()` spans.
+ ///
+ /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+ /// change these errors into `LexError`s later.
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let (subtree, _token_map) =
+ mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+ let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ Ok(TokenStream::with_subtree(subtree))
+ }
+ }
+
+ impl ToString for TokenStream {
+ fn to_string(&self) -> String {
+ tt::pretty(&self.token_trees)
+ }
+ }
+
+ fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: subtree
+ .delimiter
+ .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+ token_trees: subtree
+ .token_trees
+ .into_iter()
+ .map(token_tree_replace_token_ids_with_unspecified)
+ .collect(),
+ }
+ }
+
+ fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ }
+ }
+ }
+
+ fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+ }
+ tt::Leaf::Punct(punct) => {
+ tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+ }
+ tt::Leaf::Ident(ident) => {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+ }
+ }
+ }
+}
+
+impl TokenStreamBuilder {
+ fn new() -> TokenStreamBuilder {
+ TokenStreamBuilder { acc: TokenStream::new() }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ self.acc.extend(stream.into_iter())
+ }
+
+ fn build(self) -> TokenStream {
+ self.acc
+ }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+ trees: IntoIter<TokenTree>,
+}
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+ ident_interner: IdentInterner,
+ // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type TokenStreamBuilder = TokenStreamBuilder;
+ type TokenStreamIter = TokenStreamIter;
+ type Group = Group;
+ type Punct = Punct;
+ type Ident = IdentId;
+ type Literal = Literal;
+ type SourceFile = SourceFile;
+ type Diagnostic = Diagnostic;
+ type Span = Span;
+ type MultiSpan = Vec<Span>;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+ // FIXME: track env var accesses
+ // https://github.com/rust-lang/rust/pull/71858
+ }
+ fn track_path(&mut self, _path: &str) {}
+}
+
+impl server::TokenStream for RustAnalyzer {
+ fn new(&mut self) -> Self::TokenStream {
+ Self::TokenStream::new()
+ }
+
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ use std::str::FromStr;
+
+ Self::TokenStream::from_str(src).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Ident(IdentId(index)) => {
+ let IdentData(ident) = self.ident_interner.get(index).clone();
+ let ident: tt::Ident = ident;
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let leaf = tt::Leaf::from(p);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+ }
+ }
+
+ fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
+ let trees: Vec<TokenTree> = stream.into_iter().collect();
+ TokenStreamIter { trees: trees.into_iter() }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+}
+
+impl server::TokenStreamBuilder for RustAnalyzer {
+ fn new(&mut self) -> Self::TokenStreamBuilder {
+ Self::TokenStreamBuilder::new()
+ }
+ fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
+ builder.push(stream)
+ }
+ fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
+ builder.build()
+ }
+}
+
+impl server::TokenStreamIter for RustAnalyzer {
+ fn next(
+ &mut self,
+ iter: &mut Self::TokenStreamIter,
+ ) -> Option<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
+ iter.trees.next().map(|tree| match tree {
+ TokenTree::Subtree(group) => bridge::TokenTree::Group(group),
+ TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
+ }
+ TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal),
+ TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct),
+ })
+ }
+}
+
+fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+ let kind = match d {
+ bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
+ bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ bridge::Delimiter::None => return None,
+ };
+ Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
+ match d.map(|it| it.kind) {
+ Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
+ Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
+ Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
+ None => bridge::Delimiter::None,
+ }
+}
+
+fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
+ match spacing {
+ bridge::Spacing::Alone => Spacing::Alone,
+ bridge::Spacing::Joint => Spacing::Joint,
+ }
+}
+
+fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
+ match spacing {
+ Spacing::Alone => bridge::Spacing::Alone,
+ Spacing::Joint => bridge::Spacing::Joint,
+ }
+}
+
+impl server::Group for RustAnalyzer {
+ fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group {
+ Self::Group { delimiter: delim_to_internal(delimiter), token_trees: stream.token_trees }
+ }
+ fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
+ delim_to_external(group.delimiter)
+ }
+
+ // NOTE: Return value of do not include delimiter
+ fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
+ TokenStream { token_trees: group.token_trees.clone() }
+ }
+
+ fn span(&mut self, group: &Self::Group) -> Self::Span {
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+
+ fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
+ if let Some(delim) = &mut group.delimiter {
+ delim.id = span;
+ }
+ }
+
+ fn span_open(&mut self, group: &Self::Group) -> Self::Span {
+ // FIXME we only store one `TokenId` for the delimiters
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+
+ fn span_close(&mut self, group: &Self::Group) -> Self::Span {
+ // FIXME we only store one `TokenId` for the delimiters
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+}
+
+impl server::Punct for RustAnalyzer {
+ fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct {
+ tt::Punct {
+ char: ch,
+ spacing: spacing_to_internal(spacing),
+ id: tt::TokenId::unspecified(),
+ }
+ }
+ fn as_char(&mut self, punct: Self::Punct) -> char {
+ punct.char
+ }
+ fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing {
+ spacing_to_external(punct.spacing)
+ }
+ fn span(&mut self, punct: Self::Punct) -> Self::Span {
+ punct.id
+ }
+ fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
+ tt::Punct { id: span, ..punct }
+ }
+}
+
+impl server::Ident for RustAnalyzer {
+ fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
+ IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
+ }
+
+ fn span(&mut self, ident: Self::Ident) -> Self::Span {
+ self.ident_interner.get(ident.0).0.id
+ }
+ fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+ let data = self.ident_interner.get(ident.0);
+ let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+ IdentId(self.ident_interner.intern(&new))
+ }
+}
+
+impl server::Literal for RustAnalyzer {
+ fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
+ // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
+ // They must still be present to be ABI-compatible and work with upstream proc_macro.
+ "".to_owned()
+ }
+ fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
+ Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+ }
+ fn symbol(&mut self, literal: &Self::Literal) -> String {
+ literal.text.to_string()
+ }
+ fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
+ None
+ }
+
+ fn to_string(&mut self, literal: &Self::Literal) -> String {
+ literal.to_string()
+ }
+
+ fn integer(&mut self, n: &str) -> Self::Literal {
+ let n = match n.parse::<i128>() {
+ Ok(n) => n.to_string(),
+ Err(_) => n.parse::<u128>().unwrap().to_string(),
+ };
+ Literal { text: n.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+ macro_rules! def_suffixed_integer {
+ ($kind:ident, $($ty:ty),*) => {
+ match $kind {
+ $(
+ stringify!($ty) => {
+ let n: $ty = n.parse().unwrap();
+ format!(concat!("{}", stringify!($ty)), n)
+ }
+ )*
+ _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
+ }
+ }
+ }
+
+ let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
+
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn float(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let mut text = f64::to_string(&n);
+ if !text.contains('.') {
+ text += ".0"
+ }
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f32(&mut self, n: &str) -> Self::Literal {
+ let n: f32 = n.parse().unwrap();
+ let text = format!("{}f32", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f64(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let text = format!("{}f64", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn string(&mut self, string: &str) -> Self::Literal {
+ let mut escaped = String::new();
+ for ch in string.chars() {
+ escaped.extend(ch.escape_debug());
+ }
+ Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn character(&mut self, ch: char) -> Self::Literal {
+ Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+ let string = bytes
+ .iter()
+ .cloned()
+ .flat_map(ascii::escape_default)
+ .map(Into::<char>::into)
+ .collect::<String>();
+
+ Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+ literal.id
+ }
+
+ fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+ literal.id = span;
+ }
+
+ fn subspan(
+ &mut self,
+ _literal: &Self::Literal,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+}
+
+impl server::SourceFile for RustAnalyzer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+ fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+ let mut diag = Diagnostic::new(level, msg);
+ diag.spans = spans;
+ diag
+ }
+
+ fn sub(
+ &mut self,
+ _diag: &mut Self::Diagnostic,
+ _level: Level,
+ _msg: &str,
+ _spans: Self::MultiSpan,
+ ) {
+ // FIXME handle diagnostic
+ //
+ }
+
+ fn emit(&mut self, _diag: Self::Diagnostic) {
+ // FIXME handle diagnostic
+ // diag.emit()
+ }
+}
+
+impl server::Span for RustAnalyzer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn def_site(&mut self) -> Self::Span {
+ // MySpan(self.span_interner.intern(&MySpanData(Span::def_site())))
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+ fn call_site(&mut self) -> Self::Span {
+ // MySpan(self.span_interner.intern(&MySpanData(Span::call_site())))
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub
+ tt::TokenId::unspecified()
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME handle span
+ span
+ }
+ fn start(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn end(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn mixed_site(&mut self) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn after(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+
+ fn before(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+ fn new(&mut self) -> Self::MultiSpan {
+ // FIXME handle span
+ vec![]
+ }
+
+ fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+ //TODP
+ other.push(span)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::proc_macro::bridge::server::Literal;
+ use super::*;
+
+ #[test]
+ fn test_ra_server_literals() {
+ let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
+ assert_eq!(srv.integer("1234").text, "1234");
+
+ assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
+ assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
+ assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
+ assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
+ assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
+ assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
+ assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
+ assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
+ assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
+ assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
+ assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
+ assert_eq!(srv.float("0").text, "0.0");
+ assert_eq!(srv.float("15684.5867").text, "15684.5867");
+ assert_eq!(srv.f32("15684.58").text, "15684.58f32");
+ assert_eq!(srv.f64("15684.58").text, "15684.58f64");
+
+ assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
+ assert_eq!(srv.character('c').text, "'c'");
+ assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+ // u128::max
+ assert_eq!(
+ srv.integer("340282366920938463463374607431768211455").text,
+ "340282366920938463463374607431768211455"
+ );
+ // i128::min
+ assert_eq!(
+ srv.integer("-170141183460469231731687303715884105728").text,
+ "-170141183460469231731687303715884105728"
+ );
+ }
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Brace,
+ }),
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ use std::str::FromStr;
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ }),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)").unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);").unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_").unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs
new file mode 100644
index 000000000..76e89e319
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs
@@ -0,0 +1,105 @@
+//! Macro ABI for version 1.63 of rustc
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub use ra_server::TokenStream;
+
+pub(crate) struct Abi {
+ exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+ fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+ Self { message: p.as_str().map(|s| s.to_string()) }
+ }
+}
+
+impl Abi {
+ pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+ let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+ lib.get(symbol_name.as_bytes())?;
+ Ok(Self { exported_macros: macros.to_vec() })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ let parsed_body = TokenStream::with_subtree(macro_body.clone());
+
+ let parsed_attributes =
+ attributes.map_or(TokenStream::new(), |attr| TokenStream::with_subtree(attr.clone()));
+
+ for proc_macro in &self.exported_macros {
+ match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive {
+ trait_name, client, ..
+ } if *trait_name == macro_name => {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_attributes,
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.exported_macros
+ .iter()
+ .map(|proc_macro| match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ (trait_name.to_string(), ProcMacroKind::CustomDerive)
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ (name.to_string(), ProcMacroKind::FuncLike)
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ (name.to_string(), ProcMacroKind::Attr)
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/buffer.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/buffer.rs
new file mode 100644
index 000000000..48030f8d8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/buffer.rs
@@ -0,0 +1,156 @@
+//! Buffer management for same-process client<->server communication.
+
+use std::io::{self, Write};
+use std::mem;
+use std::ops::{Deref, DerefMut};
+use std::slice;
+
+#[repr(C)]
+pub struct Buffer {
+ data: *mut u8,
+ len: usize,
+ capacity: usize,
+ reserve: extern "C" fn(Buffer, usize) -> Buffer,
+ drop: extern "C" fn(Buffer),
+}
+
+unsafe impl Sync for Buffer {}
+unsafe impl Send for Buffer {}
+
+impl Default for Buffer {
+ #[inline]
+ fn default() -> Self {
+ Self::from(vec![])
+ }
+}
+
+impl Deref for Buffer {
+ type Target = [u8];
+ #[inline]
+ fn deref(&self) -> &[u8] {
+ unsafe { slice::from_raw_parts(self.data as *const u8, self.len) }
+ }
+}
+
+impl DerefMut for Buffer {
+ #[inline]
+ fn deref_mut(&mut self) -> &mut [u8] {
+ unsafe { slice::from_raw_parts_mut(self.data, self.len) }
+ }
+}
+
+impl Buffer {
+ #[inline]
+ pub(super) fn new() -> Self {
+ Self::default()
+ }
+
+ #[inline]
+ pub(super) fn clear(&mut self) {
+ self.len = 0;
+ }
+
+ #[inline]
+ pub(super) fn take(&mut self) -> Self {
+ mem::take(self)
+ }
+
+ // We have the array method separate from extending from a slice. This is
+ // because in the case of small arrays, codegen can be more efficient
+ // (avoiding a memmove call). With extend_from_slice, LLVM at least
+ // currently is not able to make that optimization.
+ #[inline]
+ pub(super) fn extend_from_array<const N: usize>(&mut self, xs: &[u8; N]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ #[inline]
+ pub(super) fn extend_from_slice(&mut self, xs: &[u8]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ #[inline]
+ pub(super) fn push(&mut self, v: u8) {
+ // The code here is taken from Vec::push, and we know that reserve()
+ // will panic if we're exceeding isize::MAX bytes and so there's no need
+ // to check for overflow.
+ if self.len == self.capacity {
+ let b = self.take();
+ *self = (b.reserve)(b, 1);
+ }
+ unsafe {
+ *self.data.add(self.len) = v;
+ self.len += 1;
+ }
+ }
+}
+
+impl Write for Buffer {
+ #[inline]
+ fn write(&mut self, xs: &[u8]) -> io::Result<usize> {
+ self.extend_from_slice(xs);
+ Ok(xs.len())
+ }
+
+ #[inline]
+ fn write_all(&mut self, xs: &[u8]) -> io::Result<()> {
+ self.extend_from_slice(xs);
+ Ok(())
+ }
+
+ #[inline]
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+impl Drop for Buffer {
+ #[inline]
+ fn drop(&mut self) {
+ let b = self.take();
+ (b.drop)(b);
+ }
+}
+
+impl From<Vec<u8>> for Buffer {
+ fn from(mut v: Vec<u8>) -> Self {
+ let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity());
+ mem::forget(v);
+
+ // This utility function is nested in here because it can *only*
+ // be safely called on `Buffer`s created by *this* `proc_macro`.
+ fn to_vec(b: Buffer) -> Vec<u8> {
+ unsafe {
+ let Buffer { data, len, capacity, .. } = b;
+ mem::forget(b);
+ Vec::from_raw_parts(data, len, capacity)
+ }
+ }
+
+ extern "C" fn reserve(b: Buffer, additional: usize) -> Buffer {
+ let mut v = to_vec(b);
+ v.reserve(additional);
+ Buffer::from(v)
+ }
+
+ extern "C" fn drop(b: Buffer) {
+ mem::drop(to_vec(b));
+ }
+
+ Buffer { data, len, capacity, reserve, drop }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs
new file mode 100644
index 000000000..102027d14
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs
@@ -0,0 +1,510 @@
+//! Client-side types.
+
+use super::*;
+
+use std::marker::PhantomData;
+
+macro_rules! define_handles {
+ (
+ 'owned: $($oty:ident,)*
+ 'interned: $($ity:ident,)*
+ ) => {
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub struct HandleCounters {
+ $($oty: AtomicUsize,)*
+ $($ity: AtomicUsize,)*
+ }
+
+ impl HandleCounters {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ extern "C" fn get() -> &'static Self {
+ static COUNTERS: HandleCounters = HandleCounters {
+ $($oty: AtomicUsize::new(1),)*
+ $($ity: AtomicUsize::new(1),)*
+ };
+ &COUNTERS
+ }
+ }
+
+ // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub(super) struct HandleStore<S: server::Types> {
+ $($oty: handle::OwnedStore<S::$oty>,)*
+ $($ity: handle::InternedStore<S::$ity>,)*
+ }
+
+ impl<S: server::Types> HandleStore<S> {
+ pub(super) fn new(handle_counters: &'static HandleCounters) -> Self {
+ HandleStore {
+ $($oty: handle::OwnedStore::new(&handle_counters.$oty),)*
+ $($ity: handle::InternedStore::new(&handle_counters.$ity),)*
+ }
+ }
+ }
+
+ $(
+ #[repr(C)]
+ pub(crate) struct $oty {
+ handle: handle::Handle,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual
+ // way of doing this, but that requires unstable features.
+ // rust-analyzer uses this code and avoids unstable features.
+ _marker: PhantomData<*mut ()>,
+ }
+
+ // Forward `Drop::drop` to the inherent `drop` method.
+ impl Drop for $oty {
+ fn drop(&mut self) {
+ $oty {
+ handle: self.handle,
+ _marker: PhantomData,
+ }.drop();
+ }
+ }
+
+ impl<S> Encode<S> for $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ let handle = self.handle;
+ mem::forget(self);
+ handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$oty.take(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S> Encode<S> for &$oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> Decode<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &'s HandleStore<server::MarkedTypes<S>>) -> Self {
+ &s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S> Encode<S> for &mut $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s mut Marked<S::$oty, $oty>
+ {
+ fn decode(
+ r: &mut Reader<'_>,
+ s: &'s mut HandleStore<server::MarkedTypes<S>>
+ ) -> Self {
+ &mut s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$oty.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $oty {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $oty {
+ handle: handle::Handle::decode(r, s),
+ _marker: PhantomData,
+ }
+ }
+ }
+ )*
+
+ $(
+ #[repr(C)]
+ #[derive(Copy, Clone, PartialEq, Eq, Hash)]
+ pub(crate) struct $ity {
+ handle: handle::Handle,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual
+ // way of doing this, but that requires unstable features.
+ // rust-analyzer uses this code and avoids unstable features.
+ _marker: PhantomData<*mut ()>,
+ }
+
+ impl<S> Encode<S> for $ity {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$ity.copy(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$ity.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ity {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $ity {
+ handle: handle::Handle::decode(r, s),
+ _marker: PhantomData,
+ }
+ }
+ }
+ )*
+ }
+}
+define_handles! {
+ 'owned:
+ FreeFunctions,
+ TokenStream,
+ Group,
+ Literal,
+ SourceFile,
+ MultiSpan,
+ Diagnostic,
+
+ 'interned:
+ Punct,
+ Ident,
+ Span,
+}
+
+// FIXME(eddyb) generate these impls by pattern-matching on the
+// names of methods - also could use the presence of `fn drop`
+// to distinguish between 'owned and 'interned, above.
+// Alternatively, special "modes" could be listed of types in with_api
+// instead of pattern matching on methods, here and in server decl.
+
+impl Clone for TokenStream {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Group {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Literal {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Literal")
+ // format the kind without quotes, as in `kind: Float`
+ .field("kind", &format_args!("{}", &self.debug_kind()))
+ .field("symbol", &self.symbol())
+ // format `Some("...")` on one line even in {:#?} mode
+ .field("suffix", &format_args!("{:?}", &self.suffix()))
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl Clone for SourceFile {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.debug())
+ }
+}
+
+macro_rules! define_client_side {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(impl $name {
+ $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* {
+ Bridge::with(|bridge| {
+ let mut buf = bridge.cached_buffer.take();
+
+ buf.clear();
+ api_tags::Method::$name(api_tags::$name::$method).encode(&mut buf, &mut ());
+ reverse_encode!(buf; $($arg),*);
+
+ buf = bridge.dispatch.call(buf);
+
+ let r = Result::<_, PanicMessage>::decode(&mut &buf[..], &mut ());
+
+ bridge.cached_buffer = buf;
+
+ r.unwrap_or_else(|e| panic::resume_unwind(e.into()))
+ })
+ })*
+ })*
+ }
+}
+with_api!(self, self, define_client_side);
+
+enum BridgeState<'a> {
+ /// No server is currently connected to this client.
+ NotConnected,
+
+ /// A server is connected and available for requests.
+ Connected(Bridge<'a>),
+
+ /// Access to the bridge is being exclusively acquired
+ /// (e.g., during `BridgeState::with`).
+ InUse,
+}
+
+enum BridgeStateL {}
+
+impl<'a> scoped_cell::ApplyL<'a> for BridgeStateL {
+ type Out = BridgeState<'a>;
+}
+
+thread_local! {
+ static BRIDGE_STATE: scoped_cell::ScopedCell<BridgeStateL> =
+ scoped_cell::ScopedCell::new(BridgeState::NotConnected);
+}
+
+impl BridgeState<'_> {
+ /// Take exclusive control of the thread-local
+ /// `BridgeState`, and pass it to `f`, mutably.
+ /// The state will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ ///
+ /// N.B., while `f` is running, the thread-local state
+ /// is `BridgeState::InUse`.
+ fn with<R>(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R {
+ BRIDGE_STATE.with(|state| {
+ state.replace(BridgeState::InUse, |mut state| {
+ // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone
+ f(&mut *state)
+ })
+ })
+ }
+}
+
+impl Bridge<'_> {
+ pub(crate) fn is_available() -> bool {
+ BridgeState::with(|state| match state {
+ BridgeState::Connected(_) | BridgeState::InUse => true,
+ BridgeState::NotConnected => false,
+ })
+ }
+
+ fn enter<R>(self, f: impl FnOnce() -> R) -> R {
+ let force_show_panics = self.force_show_panics;
+ // Hide the default panic output within `proc_macro` expansions.
+ // NB. the server can't do this because it may use a different libstd.
+ static HIDE_PANICS_DURING_EXPANSION: Once = Once::new();
+ HIDE_PANICS_DURING_EXPANSION.call_once(|| {
+ let prev = panic::take_hook();
+ panic::set_hook(Box::new(move |info| {
+ let show = BridgeState::with(|state| match state {
+ BridgeState::NotConnected => true,
+ BridgeState::Connected(_) | BridgeState::InUse => force_show_panics,
+ });
+ if show {
+ prev(info)
+ }
+ }));
+ });
+
+ BRIDGE_STATE.with(|state| state.set(BridgeState::Connected(self), f))
+ }
+
+ fn with<R>(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R {
+ BridgeState::with(|state| match state {
+ BridgeState::NotConnected => {
+ panic!("procedural macro API is used outside of a procedural macro");
+ }
+ BridgeState::InUse => {
+ panic!("procedural macro API is used while it's already in use");
+ }
+ BridgeState::Connected(bridge) => f(bridge),
+ })
+ }
+}
+
+/// A client-side RPC entry-point, which may be using a different `proc_macro`
+/// from the one used by the server, but can be invoked compatibly.
+///
+/// Note that the (phantom) `I` ("input") and `O` ("output") type parameters
+/// decorate the `Client<I, O>` with the RPC "interface" of the entry-point, but
+/// do not themselves participate in ABI, at all, only facilitate type-checking.
+///
+/// E.g. `Client<TokenStream, TokenStream>` is the common proc macro interface,
+/// used for `#[proc_macro] fn foo(input: TokenStream) -> TokenStream`,
+/// indicating that the RPC input and output will be serialized token streams,
+/// and forcing the use of APIs that take/return `S::TokenStream`, server-side.
+#[repr(C)]
+pub struct Client<I, O> {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters,
+
+ pub(super) run: extern "C" fn(Bridge<'_>) -> Buffer,
+
+ pub(super) _marker: PhantomData<fn(I) -> O>,
+}
+
+impl<I, O> Copy for Client<I, O> {}
+impl<I, O> Clone for Client<I, O> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+/// Client-side helper for handling client panics, entering the bridge,
+/// deserializing input and serializing output.
+// FIXME(eddyb) maybe replace `Bridge::enter` with this?
+fn run_client<A: for<'a, 's> DecodeMut<'a, 's, ()>, R: Encode<()>>(
+ mut bridge: Bridge<'_>,
+ f: impl FnOnce(A) -> R,
+) -> Buffer {
+ // The initial `cached_buffer` contains the input.
+ let mut buf = bridge.cached_buffer.take();
+
+ panic::catch_unwind(panic::AssertUnwindSafe(|| {
+ bridge.enter(|| {
+ let reader = &mut &buf[..];
+ let input = A::decode(reader, &mut ());
+
+ // Put the `cached_buffer` back in the `Bridge`, for requests.
+ Bridge::with(|bridge| bridge.cached_buffer = buf.take());
+
+ let output = f(input);
+
+ // Take the `cached_buffer` back out, for the output value.
+ buf = Bridge::with(|bridge| bridge.cached_buffer.take());
+
+ // HACK(eddyb) Separate encoding a success value (`Ok(output)`)
+ // from encoding a panic (`Err(e: PanicMessage)`) to avoid
+ // having handles outside the `bridge.enter(|| ...)` scope, and
+ // to catch panics that could happen while encoding the success.
+ //
+ // Note that panics should be impossible beyond this point, but
+ // this is defensively trying to avoid any accidental panicking
+ // reaching the `extern "C"` (which should `abort` but might not
+ // at the moment, so this is also potentially preventing UB).
+ buf.clear();
+ Ok::<_, ()>(output).encode(&mut buf, &mut ());
+ })
+ }))
+ .map_err(PanicMessage::from)
+ .unwrap_or_else(|e| {
+ buf.clear();
+ Err::<(), _>(e).encode(&mut buf, &mut ());
+ });
+ buf
+}
+
+impl Client<super::super::TokenStream, super::super::TokenStream> {
+ pub const fn expand1(
+ f: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ Client {
+ get_handle_counters: HandleCounters::get,
+ run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
+ run_client(bridge, |input| f(super::super::TokenStream(input)).0)
+ }),
+ _marker: PhantomData,
+ }
+ }
+}
+
+impl Client<(super::super::TokenStream, super::super::TokenStream), super::super::TokenStream> {
+ pub const fn expand2(
+ f: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream
+ + Copy,
+ ) -> Self {
+ Client {
+ get_handle_counters: HandleCounters::get,
+ run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
+ run_client(bridge, |(input, input2)| {
+ f(super::super::TokenStream(input), super::super::TokenStream(input2)).0
+ })
+ }),
+ _marker: PhantomData,
+ }
+ }
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub enum ProcMacro {
+ CustomDerive {
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ client: Client<super::super::TokenStream, super::super::TokenStream>,
+ },
+
+ Attr {
+ name: &'static str,
+ client: Client<
+ (super::super::TokenStream, super::super::TokenStream),
+ super::super::TokenStream,
+ >,
+ },
+
+ Bang {
+ name: &'static str,
+ client: Client<super::super::TokenStream, super::super::TokenStream>,
+ },
+}
+
+impl ProcMacro {
+ pub fn name(&self) -> &'static str {
+ match self {
+ ProcMacro::CustomDerive { trait_name, .. } => trait_name,
+ ProcMacro::Attr { name, .. } => name,
+ ProcMacro::Bang { name, .. } => name,
+ }
+ }
+
+ pub const fn custom_derive(
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) }
+ }
+
+ pub const fn attr(
+ name: &'static str,
+ expand: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream
+ + Copy,
+ ) -> Self {
+ ProcMacro::Attr { name, client: Client::expand2(expand) }
+ }
+
+ pub const fn bang(
+ name: &'static str,
+ expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ ProcMacro::Bang { name, client: Client::expand1(expand) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/closure.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/closure.rs
new file mode 100644
index 000000000..d371ae3ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/closure.rs
@@ -0,0 +1,32 @@
+//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`.
+
+use std::marker::PhantomData;
+
+#[repr(C)]
+pub struct Closure<'a, A, R> {
+ call: unsafe extern "C" fn(*mut Env, A) -> R,
+ env: *mut Env,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing
+ // this, but that requires unstable features. rust-analyzer uses this code
+ // and avoids unstable features.
+ //
+ // The `'a` lifetime parameter represents the lifetime of `Env`.
+ _marker: PhantomData<*mut &'a mut ()>,
+}
+
+struct Env;
+
+impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> {
+ fn from(f: &'a mut F) -> Self {
+ unsafe extern "C" fn call<A, R, F: FnMut(A) -> R>(env: *mut Env, arg: A) -> R {
+ (*(env as *mut _ as *mut F))(arg)
+ }
+ Closure { call: call::<A, R, F>, env: f as *mut _ as *mut Env, _marker: PhantomData }
+ }
+}
+
+impl<'a, A, R> Closure<'a, A, R> {
+ pub fn call(&mut self, arg: A) -> R {
+ unsafe { (self.call)(self.env, arg) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs
new file mode 100644
index 000000000..c219a9465
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs
@@ -0,0 +1,89 @@
+//! Server-side handles and storage for per-handle data.
+
+use std::collections::{BTreeMap, HashMap};
+use std::hash::{BuildHasher, Hash};
+use std::num::NonZeroU32;
+use std::ops::{Index, IndexMut};
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+pub(super) type Handle = NonZeroU32;
+
+/// A store that associates values of type `T` with numeric handles. A value can
+/// be looked up using its handle.
+pub(super) struct OwnedStore<T: 'static> {
+ counter: &'static AtomicUsize,
+ data: BTreeMap<Handle, T>,
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ // Ensure the handle counter isn't 0, which would panic later,
+ // when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`.
+ assert_ne!(counter.load(Ordering::SeqCst), 0);
+
+ OwnedStore { counter, data: BTreeMap::new() }
+ }
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let counter = self.counter.fetch_add(1, Ordering::SeqCst);
+ let handle = Handle::new(counter as u32).expect("`proc_macro` handle counter overflowed");
+ assert!(self.data.insert(handle, x).is_none());
+ handle
+ }
+
+ pub(super) fn take(&mut self, h: Handle) -> T {
+ self.data.remove(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> Index<Handle> for OwnedStore<T> {
+ type Output = T;
+ fn index(&self, h: Handle) -> &T {
+ self.data.get(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> IndexMut<Handle> for OwnedStore<T> {
+ fn index_mut(&mut self, h: Handle) -> &mut T {
+ self.data.get_mut(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+// HACK(eddyb) deterministic `std::collections::hash_map::RandomState` replacement
+// that doesn't require adding any dependencies to `proc_macro` (like `rustc-hash`).
+#[derive(Clone)]
+struct NonRandomState;
+
+impl BuildHasher for NonRandomState {
+ type Hasher = std::collections::hash_map::DefaultHasher;
+ #[inline]
+ fn build_hasher(&self) -> Self::Hasher {
+ Self::Hasher::new()
+ }
+}
+
+/// Like `OwnedStore`, but avoids storing any value more than once.
+pub(super) struct InternedStore<T: 'static> {
+ owned: OwnedStore<T>,
+ interner: HashMap<T, Handle, NonRandomState>,
+}
+
+impl<T: Copy + Eq + Hash> InternedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ InternedStore {
+ owned: OwnedStore::new(counter),
+ interner: HashMap::with_hasher(NonRandomState),
+ }
+ }
+
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let owned = &mut self.owned;
+ *self.interner.entry(x).or_insert_with(|| owned.alloc(x))
+ }
+
+ pub(super) fn copy(&mut self, h: Handle) -> T {
+ self.owned[h]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs
new file mode 100644
index 000000000..4967da493
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs
@@ -0,0 +1,451 @@
+//! Internal interface for communicating between a `proc_macro` client
+//! (a proc macro crate) and a `proc_macro` server (a compiler front-end).
+//!
+//! Serialization (with C ABI buffers) and unique integer handles are employed
+//! to allow safely interfacing between two copies of `proc_macro` built
+//! (from the same source) by different compilers with potentially mismatching
+//! Rust ABIs (e.g., stage0/bin/rustc vs stage1/bin/rustc during bootstrap).
+
+#![deny(unsafe_code)]
+
+pub use super::{Delimiter, Level, LineColumn, Spacing};
+use std::fmt;
+use std::hash::Hash;
+use std::marker;
+use std::mem;
+use std::ops::Bound;
+use std::panic;
+use std::sync::atomic::AtomicUsize;
+use std::sync::Once;
+use std::thread;
+
+/// Higher-order macro describing the server RPC API, allowing automatic
+/// generation of type-safe Rust APIs, both client-side and server-side.
+///
+/// `with_api!(MySelf, my_self, my_macro)` expands to:
+/// ```rust,ignore (pseudo-code)
+/// my_macro! {
+/// // ...
+/// Literal {
+/// // ...
+/// fn character(ch: char) -> MySelf::Literal;
+/// // ...
+/// fn span(my_self: &MySelf::Literal) -> MySelf::Span;
+/// fn set_span(my_self: &mut MySelf::Literal, span: MySelf::Span);
+/// },
+/// // ...
+/// }
+/// ```
+///
+/// The first two arguments serve to customize the arguments names
+/// and argument/return types, to enable several different usecases:
+///
+/// If `my_self` is just `self`, then each `fn` signature can be used
+/// as-is for a method. If it's anything else (`self_` in practice),
+/// then the signatures don't have a special `self` argument, and
+/// can, therefore, have a different one introduced.
+///
+/// If `MySelf` is just `Self`, then the types are only valid inside
+/// a trait or a trait impl, where the trait has associated types
+/// for each of the API types. If non-associated types are desired,
+/// a module name (`self` in practice) can be used instead of `Self`.
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ FreeFunctions {
+ fn drop($self: $S::FreeFunctions);
+ fn track_env_var(var: &str, value: Option<&str>);
+ fn track_path(path: &str);
+ },
+ TokenStream {
+ fn drop($self: $S::TokenStream);
+ fn clone($self: &$S::TokenStream) -> $S::TokenStream;
+ fn is_empty($self: &$S::TokenStream) -> bool;
+ fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
+ fn from_str(src: &str) -> $S::TokenStream;
+ fn to_string($self: &$S::TokenStream) -> String;
+ fn from_token_tree(
+ tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>,
+ ) -> $S::TokenStream;
+ fn concat_trees(
+ base: Option<$S::TokenStream>,
+ trees: Vec<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>,
+ ) -> $S::TokenStream;
+ fn concat_streams(
+ base: Option<$S::TokenStream>,
+ streams: Vec<$S::TokenStream>,
+ ) -> $S::TokenStream;
+ fn into_trees(
+ $self: $S::TokenStream
+ ) -> Vec<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
+ },
+ Group {
+ fn drop($self: $S::Group);
+ fn clone($self: &$S::Group) -> $S::Group;
+ fn new(delimiter: Delimiter, stream: Option<$S::TokenStream>) -> $S::Group;
+ fn delimiter($self: &$S::Group) -> Delimiter;
+ fn stream($self: &$S::Group) -> $S::TokenStream;
+ fn span($self: &$S::Group) -> $S::Span;
+ fn span_open($self: &$S::Group) -> $S::Span;
+ fn span_close($self: &$S::Group) -> $S::Span;
+ fn set_span($self: &mut $S::Group, span: $S::Span);
+ },
+ Punct {
+ fn new(ch: char, spacing: Spacing) -> $S::Punct;
+ fn as_char($self: $S::Punct) -> char;
+ fn spacing($self: $S::Punct) -> Spacing;
+ fn span($self: $S::Punct) -> $S::Span;
+ fn with_span($self: $S::Punct, span: $S::Span) -> $S::Punct;
+ },
+ Ident {
+ fn new(string: &str, span: $S::Span, is_raw: bool) -> $S::Ident;
+ fn span($self: $S::Ident) -> $S::Span;
+ fn with_span($self: $S::Ident, span: $S::Span) -> $S::Ident;
+ },
+ Literal {
+ fn drop($self: $S::Literal);
+ fn clone($self: &$S::Literal) -> $S::Literal;
+ fn from_str(s: &str) -> Result<$S::Literal, ()>;
+ fn to_string($self: &$S::Literal) -> String;
+ fn debug_kind($self: &$S::Literal) -> String;
+ fn symbol($self: &$S::Literal) -> String;
+ fn suffix($self: &$S::Literal) -> Option<String>;
+ fn integer(n: &str) -> $S::Literal;
+ fn typed_integer(n: &str, kind: &str) -> $S::Literal;
+ fn float(n: &str) -> $S::Literal;
+ fn f32(n: &str) -> $S::Literal;
+ fn f64(n: &str) -> $S::Literal;
+ fn string(string: &str) -> $S::Literal;
+ fn character(ch: char) -> $S::Literal;
+ fn byte_string(bytes: &[u8]) -> $S::Literal;
+ fn span($self: &$S::Literal) -> $S::Span;
+ fn set_span($self: &mut $S::Literal, span: $S::Span);
+ fn subspan(
+ $self: &$S::Literal,
+ start: Bound<usize>,
+ end: Bound<usize>,
+ ) -> Option<$S::Span>;
+ },
+ SourceFile {
+ fn drop($self: $S::SourceFile);
+ fn clone($self: &$S::SourceFile) -> $S::SourceFile;
+ fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool;
+ fn path($self: &$S::SourceFile) -> String;
+ fn is_real($self: &$S::SourceFile) -> bool;
+ },
+ MultiSpan {
+ fn drop($self: $S::MultiSpan);
+ fn new() -> $S::MultiSpan;
+ fn push($self: &mut $S::MultiSpan, span: $S::Span);
+ },
+ Diagnostic {
+ fn drop($self: $S::Diagnostic);
+ fn new(level: Level, msg: &str, span: $S::MultiSpan) -> $S::Diagnostic;
+ fn sub(
+ $self: &mut $S::Diagnostic,
+ level: Level,
+ msg: &str,
+ span: $S::MultiSpan,
+ );
+ fn emit($self: $S::Diagnostic);
+ },
+ Span {
+ fn debug($self: $S::Span) -> String;
+ fn def_site() -> $S::Span;
+ fn call_site() -> $S::Span;
+ fn mixed_site() -> $S::Span;
+ fn source_file($self: $S::Span) -> $S::SourceFile;
+ fn parent($self: $S::Span) -> Option<$S::Span>;
+ fn source($self: $S::Span) -> $S::Span;
+ fn start($self: $S::Span) -> LineColumn;
+ fn end($self: $S::Span) -> LineColumn;
+ fn before($self: $S::Span) -> $S::Span;
+ fn after($self: $S::Span) -> $S::Span;
+ fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
+ fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
+ fn source_text($self: $S::Span) -> Option<String>;
+ fn save_span($self: $S::Span) -> usize;
+ fn recover_proc_macro_span(id: usize) -> $S::Span;
+ },
+ }
+ };
+}
+
+// FIXME(eddyb) this calls `encode` for each argument, but in reverse,
+// to match the ordering in `reverse_decode`.
+macro_rules! reverse_encode {
+ ($writer:ident;) => {};
+ ($writer:ident; $first:ident $(, $rest:ident)*) => {
+ reverse_encode!($writer; $($rest),*);
+ $first.encode(&mut $writer, &mut ());
+ }
+}
+
+// FIXME(eddyb) this calls `decode` for each argument, but in reverse,
+// to avoid borrow conflicts from borrows started by `&mut` arguments.
+macro_rules! reverse_decode {
+ ($reader:ident, $s:ident;) => {};
+ ($reader:ident, $s:ident; $first:ident: $first_ty:ty $(, $rest:ident: $rest_ty:ty)*) => {
+ reverse_decode!($reader, $s; $($rest: $rest_ty),*);
+ let $first = <$first_ty>::decode(&mut $reader, $s);
+ }
+}
+
+#[allow(unsafe_code)]
+mod buffer;
+#[forbid(unsafe_code)]
+pub mod client;
+#[allow(unsafe_code)]
+mod closure;
+#[forbid(unsafe_code)]
+mod handle;
+#[macro_use]
+#[forbid(unsafe_code)]
+mod rpc;
+#[allow(unsafe_code)]
+mod scoped_cell;
+#[allow(unsafe_code)]
+mod selfless_reify;
+#[forbid(unsafe_code)]
+pub mod server;
+
+use buffer::Buffer;
+pub use rpc::PanicMessage;
+use rpc::{Decode, DecodeMut, Encode, Reader, Writer};
+
+/// An active connection between a server and a client.
+/// The server creates the bridge (`Bridge::run_server` in `server.rs`),
+/// then passes it to the client through the function pointer in the `run`
+/// field of `client::Client`. The client holds its copy of the `Bridge`
+/// in TLS during its execution (`Bridge::{enter, with}` in `client.rs`).
+#[repr(C)]
+pub struct Bridge<'a> {
+ /// Reusable buffer (only `clear`-ed, never shrunk), primarily
+ /// used for making requests, but also for passing input to client.
+ cached_buffer: Buffer,
+
+ /// Server-side function that the client uses to make requests.
+ dispatch: closure::Closure<'a, Buffer, Buffer>,
+
+ /// If 'true', always invoke the default panic hook
+ force_show_panics: bool,
+
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing
+ // this, but that requires unstable features. rust-analyzer uses this code
+ // and avoids unstable features.
+ _marker: marker::PhantomData<*mut ()>,
+}
+
+#[forbid(unsafe_code)]
+#[allow(non_camel_case_types)]
+mod api_tags {
+ use super::rpc::{DecodeMut, Encode, Reader, Writer};
+
+ macro_rules! declare_tags {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(
+ pub(super) enum $name {
+ $($method),*
+ }
+ rpc_encode_decode!(enum $name { $($method),* });
+ )*
+
+ pub(super) enum Method {
+ $($name($name)),*
+ }
+ rpc_encode_decode!(enum Method { $($name(m)),* });
+ }
+ }
+ with_api!(self, self, declare_tags);
+}
+
+/// Helper to wrap associated types to allow trait impl dispatch.
+/// That is, normally a pair of impls for `T::Foo` and `T::Bar`
+/// can overlap, but if the impls are, instead, on types like
+/// `Marked<T::Foo, Foo>` and `Marked<T::Bar, Bar>`, they can't.
+trait Mark {
+ type Unmarked;
+ fn mark(unmarked: Self::Unmarked) -> Self;
+}
+
+/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details).
+trait Unmark {
+ type Unmarked;
+ fn unmark(self) -> Self::Unmarked;
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+struct Marked<T, M> {
+ value: T,
+ _marker: marker::PhantomData<M>,
+}
+
+impl<T, M> Mark for Marked<T, M> {
+ type Unmarked = T;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ Marked { value: unmarked, _marker: marker::PhantomData }
+ }
+}
+impl<T, M> Unmark for Marked<T, M> {
+ type Unmarked = T;
+ fn unmark(self) -> Self::Unmarked {
+ self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a Marked<T, M> {
+ type Unmarked = &'a T;
+ fn unmark(self) -> Self::Unmarked {
+ &self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
+ type Unmarked = &'a mut T;
+ fn unmark(self) -> Self::Unmarked {
+ &mut self.value
+ }
+}
+
+impl<T: Mark> Mark for Vec<T> {
+ type Unmarked = Vec<T::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ // Should be a no-op due to std's in-place collect optimizations.
+ unmarked.into_iter().map(T::mark).collect()
+ }
+}
+impl<T: Unmark> Unmark for Vec<T> {
+ type Unmarked = Vec<T::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ // Should be a no-op due to std's in-place collect optimizations.
+ self.into_iter().map(T::unmark).collect()
+ }
+}
+
+macro_rules! mark_noop {
+ ($($ty:ty),* $(,)?) => {
+ $(
+ impl Mark for $ty {
+ type Unmarked = Self;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked
+ }
+ }
+ impl Unmark for $ty {
+ type Unmarked = Self;
+ fn unmark(self) -> Self::Unmarked {
+ self
+ }
+ }
+ )*
+ }
+}
+mark_noop! {
+ (),
+ bool,
+ char,
+ &'_ [u8],
+ &'_ str,
+ String,
+ usize,
+ Delimiter,
+ Level,
+ LineColumn,
+ Spacing,
+}
+
+rpc_encode_decode!(
+ enum Delimiter {
+ Parenthesis,
+ Brace,
+ Bracket,
+ None,
+ }
+);
+rpc_encode_decode!(
+ enum Level {
+ Error,
+ Warning,
+ Note,
+ Help,
+ }
+);
+rpc_encode_decode!(struct LineColumn { line, column });
+rpc_encode_decode!(
+ enum Spacing {
+ Alone,
+ Joint,
+ }
+);
+
+macro_rules! mark_compound {
+ (enum $name:ident <$($T:ident),+> { $($variant:ident $(($field:ident))?),* $(,)? }) => {
+ impl<$($T: Mark),+> Mark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ match unmarked {
+ $($name::$variant $(($field))? => {
+ $name::$variant $((Mark::mark($field)))?
+ })*
+ }
+ }
+ }
+
+ impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn unmark(self) -> Self::Unmarked {
+ match self {
+ $($name::$variant $(($field))? => {
+ $name::$variant $((Unmark::unmark($field)))?
+ })*
+ }
+ }
+ }
+ }
+}
+
+macro_rules! compound_traits {
+ ($($t:tt)*) => {
+ rpc_encode_decode!($($t)*);
+ mark_compound!($($t)*);
+ };
+}
+
+compound_traits!(
+ enum Bound<T> {
+ Included(x),
+ Excluded(x),
+ Unbounded,
+ }
+);
+
+compound_traits!(
+ enum Option<T> {
+ Some(t),
+ None,
+ }
+);
+
+compound_traits!(
+ enum Result<T, E> {
+ Ok(t),
+ Err(e),
+ }
+);
+
+#[derive(Clone)]
+pub enum TokenTree<G, P, I, L> {
+ Group(G),
+ Punct(P),
+ Ident(I),
+ Literal(L),
+}
+
+compound_traits!(
+ enum TokenTree<G, P, I, L> {
+ Group(tt),
+ Punct(tt),
+ Ident(tt),
+ Literal(tt),
+ }
+);
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs
new file mode 100644
index 000000000..e9d7a46c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs
@@ -0,0 +1,304 @@
+//! Serialization for client-server communication.
+
+use std::any::Any;
+use std::char;
+use std::io::Write;
+use std::num::NonZeroU32;
+use std::str;
+
+pub(super) type Writer = super::buffer::Buffer;
+
+pub(super) trait Encode<S>: Sized {
+ fn encode(self, w: &mut Writer, s: &mut S);
+}
+
+pub(super) type Reader<'a> = &'a [u8];
+
+pub(super) trait Decode<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s S) -> Self;
+}
+
+pub(super) trait DecodeMut<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self;
+}
+
+macro_rules! rpc_encode_decode {
+ (le $ty:ty) => {
+ impl<S> Encode<S> for $ty {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ty {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ const N: usize = ::std::mem::size_of::<$ty>();
+
+ let mut bytes = [0; N];
+ bytes.copy_from_slice(&r[..N]);
+ *r = &r[N..];
+
+ Self::from_le_bytes(bytes)
+ }
+ }
+ };
+ (struct $name:ident $(<$($T:ident),+>)? { $($field:ident),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ $(self.$field.encode(w, s);)*
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ $name {
+ $($field: DecodeMut::decode(r, s)),*
+ }
+ }
+ }
+ };
+ (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match self {
+ $($name::$variant $(($field))* => {
+ tag::$variant.encode(w, s);
+ $($field.encode(w, s);)*
+ })*
+ }
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match u8::decode(r, s) {
+ $(tag::$variant => {
+ $(let $field = DecodeMut::decode(r, s);)*
+ $name::$variant $(($field))*
+ })*
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+}
+
+impl<S> Encode<S> for () {
+ fn encode(self, _: &mut Writer, _: &mut S) {}
+}
+
+impl<S> DecodeMut<'_, '_, S> for () {
+ fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {}
+}
+
+impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.push(self);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for u8 {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ let x = r[0];
+ *r = &r[1..];
+ x
+ }
+}
+
+rpc_encode_decode!(le u32);
+rpc_encode_decode!(le usize);
+
+impl<S> Encode<S> for bool {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u8).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for bool {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match u8::decode(r, s) {
+ 0 => false,
+ 1 => true,
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl<S> Encode<S> for char {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u32).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for char {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ char::from_u32(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for NonZeroU32 {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.get().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for NonZeroU32 {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ Self::new(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S, A: Encode<S>, B: Encode<S>> Encode<S> for (A, B) {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ self.1.encode(w, s);
+ }
+}
+
+impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S>
+ for (A, B)
+{
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ (DecodeMut::decode(r, s), DecodeMut::decode(r, s))
+ }
+}
+
+impl<S> Encode<S> for &[u8] {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ w.write_all(self).unwrap();
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let xs = &r[..len];
+ *r = &r[len..];
+ xs
+ }
+}
+
+impl<S> Encode<S> for &str {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_bytes().encode(w, s);
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a str {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ str::from_utf8(<&[u8]>::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for String {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self[..].encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for String {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ <&str>::decode(r, s).to_string()
+ }
+}
+
+impl<S, T: Encode<S>> Encode<S> for Vec<T> {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ for x in self {
+ x.encode(w, s);
+ }
+ }
+}
+
+impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec<T> {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let mut vec = Vec::with_capacity(len);
+ for _ in 0..len {
+ vec.push(T::decode(r, s));
+ }
+ vec
+ }
+}
+
+/// Simplified version of panic payloads, ignoring
+/// types other than `&'static str` and `String`.
+pub enum PanicMessage {
+ StaticStr(&'static str),
+ String(String),
+ Unknown,
+}
+
+impl From<Box<dyn Any + Send>> for PanicMessage {
+ fn from(payload: Box<dyn Any + Send + 'static>) -> Self {
+ if let Some(s) = payload.downcast_ref::<&'static str>() {
+ return PanicMessage::StaticStr(s);
+ }
+ if let Ok(s) = payload.downcast::<String>() {
+ return PanicMessage::String(*s);
+ }
+ PanicMessage::Unknown
+ }
+}
+
+impl Into<Box<dyn Any + Send>> for PanicMessage {
+ fn into(self) -> Box<dyn Any + Send> {
+ match self {
+ PanicMessage::StaticStr(s) => Box::new(s),
+ PanicMessage::String(s) => Box::new(s),
+ PanicMessage::Unknown => {
+ struct UnknownPanicMessage;
+ Box::new(UnknownPanicMessage)
+ }
+ }
+ }
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<&str> {
+ match self {
+ PanicMessage::StaticStr(s) => Some(s),
+ PanicMessage::String(s) => Some(s),
+ PanicMessage::Unknown => None,
+ }
+ }
+}
+
+impl<S> Encode<S> for PanicMessage {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_str().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for PanicMessage {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match Option::<String>::decode(r, s) {
+ Some(s) => PanicMessage::String(s),
+ None => PanicMessage::Unknown,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/scoped_cell.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/scoped_cell.rs
new file mode 100644
index 000000000..2cde1f65a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/scoped_cell.rs
@@ -0,0 +1,81 @@
+//! `Cell` variant for (scoped) existential lifetimes.
+
+use std::cell::Cell;
+use std::mem;
+use std::ops::{Deref, DerefMut};
+
+/// Type lambda application, with a lifetime.
+#[allow(unused_lifetimes)]
+pub trait ApplyL<'a> {
+ type Out;
+}
+
+/// Type lambda taking a lifetime, i.e., `Lifetime -> Type`.
+pub trait LambdaL: for<'a> ApplyL<'a> {}
+
+impl<T: for<'a> ApplyL<'a>> LambdaL for T {}
+
+// HACK(eddyb) work around projection limitations with a newtype
+// FIXME(#52812) replace with `&'a mut <T as ApplyL<'b>>::Out`
+pub struct RefMutL<'a, 'b, T: LambdaL>(&'a mut <T as ApplyL<'b>>::Out);
+
+impl<'a, 'b, T: LambdaL> Deref for RefMutL<'a, 'b, T> {
+ type Target = <T as ApplyL<'b>>::Out;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+
+impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.0
+ }
+}
+
+pub struct ScopedCell<T: LambdaL>(Cell<<T as ApplyL<'static>>::Out>);
+
+impl<T: LambdaL> ScopedCell<T> {
+ pub const fn new(value: <T as ApplyL<'static>>::Out) -> Self {
+ ScopedCell(Cell::new(value))
+ }
+
+ /// Sets the value in `self` to `replacement` while
+ /// running `f`, which gets the old value, mutably.
+ /// The old value will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ pub fn replace<'a, R>(
+ &self,
+ replacement: <T as ApplyL<'a>>::Out,
+ f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R,
+ ) -> R {
+ /// Wrapper that ensures that the cell always gets filled
+ /// (with the original state, optionally changed by `f`),
+ /// even if `f` had panicked.
+ struct PutBackOnDrop<'a, T: LambdaL> {
+ cell: &'a ScopedCell<T>,
+ value: Option<<T as ApplyL<'static>>::Out>,
+ }
+
+ impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> {
+ fn drop(&mut self) {
+ self.cell.0.set(self.value.take().unwrap());
+ }
+ }
+
+ let mut put_back_on_drop = PutBackOnDrop {
+ cell: self,
+ value: Some(self.0.replace(unsafe {
+ let erased = mem::transmute_copy(&replacement);
+ mem::forget(replacement);
+ erased
+ })),
+ };
+
+ f(RefMutL(put_back_on_drop.value.as_mut().unwrap()))
+ }
+
+ /// Sets the value in `self` to `value` while running `f`.
+ pub fn set<R>(&self, value: <T as ApplyL<'_>>::Out, f: impl FnOnce() -> R) -> R {
+ self.replace(value, |_| f())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/selfless_reify.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/selfless_reify.rs
new file mode 100644
index 000000000..4ee4bb87c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/selfless_reify.rs
@@ -0,0 +1,83 @@
+//! Abstraction for creating `fn` pointers from any callable that *effectively*
+//! has the equivalent of implementing `Default`, even if the compiler neither
+//! provides `Default` nor allows reifying closures (i.e. creating `fn` pointers)
+//! other than those with absolutely no captures.
+//!
+//! More specifically, for a closure-like type to be "effectively `Default`":
+//! * it must be a ZST (zero-sized type): no information contained within, so
+//! that `Default`'s return value (if it were implemented) is unambiguous
+//! * it must be `Copy`: no captured "unique ZST tokens" or any other similar
+//! types that would make duplicating values at will unsound
+//! * combined with the ZST requirement, this confers a kind of "telecopy"
+//! ability: similar to `Copy`, but without keeping the value around, and
+//! instead "reconstructing" it (a noop given it's a ZST) when needed
+//! * it must be *provably* inhabited: no captured uninhabited types or any
+//! other types that cannot be constructed by the user of this abstraction
+//! * the proof is a value of the closure-like type itself, in a sense the
+//! "seed" for the "telecopy" process made possible by ZST + `Copy`
+//! * this requirement is the only reason an abstraction limited to a specific
+//! usecase is required: ZST + `Copy` can be checked with *at worst* a panic
+//! at the "attempted `::default()` call" time, but that doesn't guarantee
+//! that the value can be soundly created, and attempting to use the typical
+//! "proof ZST token" approach leads yet again to having a ZST + `Copy` type
+//! that is not proof of anything without a value (i.e. isomorphic to a
+//! newtype of the type it's trying to prove the inhabitation of)
+//!
+//! A more flexible (and safer) solution to the general problem could exist once
+//! `const`-generic parameters can have type parameters in their types:
+//!
+//! ```rust,ignore (needs future const-generics)
+//! extern "C" fn ffi_wrapper<
+//! A, R,
+//! F: Fn(A) -> R,
+//! const f: F, // <-- this `const`-generic is not yet allowed
+//! >(arg: A) -> R {
+//! f(arg)
+//! }
+//! ```
+
+use std::mem;
+
+// FIXME(eddyb) this could be `trait` impls except for the `const fn` requirement.
+macro_rules! define_reify_functions {
+ ($(
+ fn $name:ident $(<$($param:ident),*>)?
+ for $(extern $abi:tt)? fn($($arg:ident: $arg_ty:ty),*) -> $ret_ty:ty;
+ )+) => {
+ $(pub const fn $name<
+ $($($param,)*)?
+ F: Fn($($arg_ty),*) -> $ret_ty + Copy
+ >(f: F) -> $(extern $abi)? fn($($arg_ty),*) -> $ret_ty {
+ // FIXME(eddyb) describe the `F` type (e.g. via `type_name::<F>`) once panic
+ // formatting becomes possible in `const fn`.
+ assert!(mem::size_of::<F>() == 0, "selfless_reify: closure must be zero-sized");
+
+ $(extern $abi)? fn wrapper<
+ $($($param,)*)?
+ F: Fn($($arg_ty),*) -> $ret_ty + Copy
+ >($($arg: $arg_ty),*) -> $ret_ty {
+ let f = unsafe {
+ // SAFETY: `F` satisfies all criteria for "out of thin air"
+ // reconstructability (see module-level doc comment).
+ mem::MaybeUninit::<F>::uninit().assume_init()
+ };
+ f($($arg),*)
+ }
+ let _f_proof = f;
+ wrapper::<
+ $($($param,)*)?
+ F
+ >
+ })+
+ }
+}
+
+define_reify_functions! {
+ fn _reify_to_extern_c_fn_unary<A, R> for extern "C" fn(arg: A) -> R;
+
+ // HACK(eddyb) this abstraction is used with `for<'a> fn(Bridge<'a>) -> T`
+ // but that doesn't work with just `reify_to_extern_c_fn_unary` because of
+ // the `fn` pointer type being "higher-ranked" (i.e. the `for<'a>` binder).
+ // FIXME(eddyb) try to remove the lifetime from `Bridge`, that'd help.
+ fn reify_to_extern_c_fn_hrt_bridge<R> for extern "C" fn(bridge: super::Bridge<'_>) -> R;
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs
new file mode 100644
index 000000000..0fb3c6985
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs
@@ -0,0 +1,332 @@
+//! Server-side traits.
+
+use super::*;
+
+// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+use super::client::HandleStore;
+
+pub trait Types {
+ type FreeFunctions: 'static;
+ type TokenStream: 'static + Clone;
+ type Group: 'static + Clone;
+ type Punct: 'static + Copy + Eq + Hash;
+ type Ident: 'static + Copy + Eq + Hash;
+ type Literal: 'static + Clone;
+ type SourceFile: 'static + Clone;
+ type MultiSpan: 'static;
+ type Diagnostic: 'static;
+ type Span: 'static + Copy + Eq + Hash;
+}
+
+/// Declare an associated fn of one of the traits below, adding necessary
+/// default bodies.
+macro_rules! associated_fn {
+ (fn drop(&mut self, $arg:ident: $arg_ty:ty)) =>
+ (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) });
+
+ (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) =>
+ (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() });
+
+ ($($item:tt)*) => ($($item)*;)
+}
+
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ $(pub trait $name: Types {
+ $(associated_fn!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {}
+ impl<S: Types $(+ $name)*> Server for S {}
+ }
+}
+with_api!(Self, self_, declare_server_traits);
+
+pub(super) struct MarkedTypes<S: Types>(S);
+
+macro_rules! define_mark_types_impls {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ impl<S: Types> Types for MarkedTypes<S> {
+ $(type $name = Marked<S::$name, client::$name>;)*
+ }
+
+ $(impl<S: $name> $name for MarkedTypes<S> {
+ $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? {
+ <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*))
+ })*
+ })*
+ }
+}
+with_api!(Self, self_, define_mark_types_impls);
+
+struct Dispatcher<S: Types> {
+ handle_store: HandleStore<S>,
+ server: S,
+}
+
+macro_rules! define_dispatcher_impl {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ // FIXME(eddyb) `pub` only for `ExecutionStrategy` below.
+ pub trait DispatcherTrait {
+ // HACK(eddyb) these are here to allow `Self::$name` to work below.
+ $(type $name;)*
+ fn dispatch(&mut self, buf: Buffer) -> Buffer;
+ }
+
+ impl<S: Server> DispatcherTrait for Dispatcher<MarkedTypes<S>> {
+ $(type $name = <MarkedTypes<S> as Types>::$name;)*
+ fn dispatch(&mut self, mut buf: Buffer) -> Buffer {
+ let Dispatcher { handle_store, server } = self;
+
+ let mut reader = &buf[..];
+ match api_tags::Method::decode(&mut reader, &mut ()) {
+ $(api_tags::Method::$name(m) => match m {
+ $(api_tags::$name::$method => {
+ let mut call_method = || {
+ reverse_decode!(reader, handle_store; $($arg: $arg_ty),*);
+ $name::$method(server, $($arg),*)
+ };
+ // HACK(eddyb) don't use `panic::catch_unwind` in a panic.
+ // If client and server happen to use the same `libstd`,
+ // `catch_unwind` asserts that the panic counter was 0,
+ // even when the closure passed to it didn't panic.
+ let r = if thread::panicking() {
+ Ok(call_method())
+ } else {
+ panic::catch_unwind(panic::AssertUnwindSafe(call_method))
+ .map_err(PanicMessage::from)
+ };
+
+ buf.clear();
+ r.encode(&mut buf, handle_store);
+ })*
+ }),*
+ }
+ buf
+ }
+ }
+ }
+}
+with_api!(Self, self_, define_dispatcher_impl);
+
+pub trait ExecutionStrategy {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer;
+}
+
+pub struct SameThread;
+
+impl ExecutionStrategy for SameThread {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ let mut dispatch = |buf| dispatcher.dispatch(buf);
+
+ run_client(Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ })
+ }
+}
+
+// NOTE(eddyb) Two implementations are provided, the second one is a bit
+// faster but neither is anywhere near as fast as same-thread execution.
+
+pub struct CrossThread1;
+
+impl ExecutionStrategy for CrossThread1 {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ use std::sync::mpsc::channel;
+
+ let (req_tx, req_rx) = channel();
+ let (res_tx, res_rx) = channel();
+
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |buf| {
+ req_tx.send(buf).unwrap();
+ res_rx.recv().unwrap()
+ };
+
+ run_client(Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ })
+ });
+
+ for b in req_rx {
+ res_tx.send(dispatcher.dispatch(b)).unwrap();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+pub struct CrossThread2;
+
+impl ExecutionStrategy for CrossThread2 {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ use std::sync::{Arc, Mutex};
+
+ enum State<T> {
+ Req(T),
+ Res(T),
+ }
+
+ let mut state = Arc::new(Mutex::new(State::Res(Buffer::new())));
+
+ let server_thread = thread::current();
+ let state2 = state.clone();
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |b| {
+ *state2.lock().unwrap() = State::Req(b);
+ server_thread.unpark();
+ loop {
+ thread::park();
+ if let State::Res(b) = &mut *state2.lock().unwrap() {
+ break b.take();
+ }
+ }
+ };
+
+ let r = run_client(Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ });
+
+ // Wake up the server so it can exit the dispatch loop.
+ drop(state2);
+ server_thread.unpark();
+
+ r
+ });
+
+ // Check whether `state2` was dropped, to know when to stop.
+ while Arc::get_mut(&mut state).is_none() {
+ thread::park();
+ let mut b = match &mut *state.lock().unwrap() {
+ State::Req(b) => b.take(),
+ _ => continue,
+ };
+ b = dispatcher.dispatch(b.take());
+ *state.lock().unwrap() = State::Res(b);
+ join_handle.thread().unpark();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+fn run_server<
+ S: Server,
+ I: Encode<HandleStore<MarkedTypes<S>>>,
+ O: for<'a, 's> DecodeMut<'a, 's, HandleStore<MarkedTypes<S>>>,
+>(
+ strategy: &impl ExecutionStrategy,
+ handle_counters: &'static client::HandleCounters,
+ server: S,
+ input: I,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+) -> Result<O, PanicMessage> {
+ let mut dispatcher =
+ Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) };
+
+ let mut buf = Buffer::new();
+ input.encode(&mut buf, &mut dispatcher.handle_store);
+
+ buf = strategy.run_bridge_and_client(&mut dispatcher, buf, run_client, force_show_panics);
+
+ Result::decode(&mut &buf[..], &mut dispatcher.handle_store)
+}
+
+impl client::Client<super::super::TokenStream, super::super::TokenStream> {
+ pub fn run<S>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage>
+ where
+ S: Server,
+ S::TokenStream: Default,
+ {
+ let client::Client { get_handle_counters, run, _marker } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ run,
+ force_show_panics,
+ )
+ .map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
+ }
+}
+
+impl
+ client::Client<
+ (super::super::TokenStream, super::super::TokenStream),
+ super::super::TokenStream,
+ >
+{
+ pub fn run<S>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ input2: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage>
+ where
+ S: Server,
+ S::TokenStream: Default,
+ {
+ let client::Client { get_handle_counters, run, _marker } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ (
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ <MarkedTypes<S> as Types>::TokenStream::mark(input2),
+ ),
+ run,
+ force_show_panics,
+ )
+ .map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/diagnostic.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/diagnostic.rs
new file mode 100644
index 000000000..3fade2dc4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/diagnostic.rs
@@ -0,0 +1,166 @@
+//! lib-proc-macro diagnostic
+//!
+//! Copy from <https://github.com/rust-lang/rust/blob/e45d9973b2665897a768312e971b82cc62633103/src/libproc_macro/diagnostic.rs>
+//! augmented with removing unstable features
+
+use super::Span;
+
+/// An enum representing a diagnostic level.
+#[derive(Copy, Clone, Debug)]
+#[non_exhaustive]
+pub enum Level {
+ /// An error.
+ Error,
+ /// A warning.
+ Warning,
+ /// A note.
+ Note,
+ /// A help message.
+ Help,
+}
+
+/// Trait implemented by types that can be converted into a set of `Span`s.
+pub trait MultiSpan {
+ /// Converts `self` into a `Vec<Span>`.
+ fn into_spans(self) -> Vec<Span>;
+}
+
+impl MultiSpan for Span {
+ fn into_spans(self) -> Vec<Span> {
+ vec![self]
+ }
+}
+
+impl MultiSpan for Vec<Span> {
+ fn into_spans(self) -> Vec<Span> {
+ self
+ }
+}
+
+impl<'a> MultiSpan for &'a [Span] {
+ fn into_spans(self) -> Vec<Span> {
+ self.to_vec()
+ }
+}
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+macro_rules! diagnostic_child_methods {
+ ($spanned:ident, $regular:ident, $level:expr) => {
+ #[doc = concat!("Adds a new child diagnostics message to `self` with the [`",
+ stringify!($level), "`] level, and the given `spans` and `message`.")]
+ pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ self.children.push(Diagnostic::spanned(spans, $level, message));
+ self
+ }
+
+ #[doc = concat!("Adds a new child diagnostic message to `self` with the [`",
+ stringify!($level), "`] level, and the given `message`.")]
+ pub fn $regular<T: Into<String>>(mut self, message: T) -> Diagnostic {
+ self.children.push(Diagnostic::new($level, message));
+ self
+ }
+ };
+}
+
+/// Iterator over the children diagnostics of a `Diagnostic`.
+#[derive(Debug, Clone)]
+pub struct Children<'a>(std::slice::Iter<'a, Diagnostic>);
+
+impl<'a> Iterator for Children<'a> {
+ type Item = &'a Diagnostic;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+
+ /// Creates a new diagnostic with the given `level` and `message` pointing to
+ /// the given set of `spans`.
+ pub fn spanned<S, T>(spans: S, level: Level, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ Diagnostic { level, message: message.into(), spans: spans.into_spans(), children: vec![] }
+ }
+
+ diagnostic_child_methods!(span_error, error, Level::Error);
+ diagnostic_child_methods!(span_warning, warning, Level::Warning);
+ diagnostic_child_methods!(span_note, note, Level::Note);
+ diagnostic_child_methods!(span_help, help, Level::Help);
+
+ /// Returns the diagnostic `level` for `self`.
+ pub fn level(&self) -> Level {
+ self.level
+ }
+
+ /// Sets the level in `self` to `level`.
+ pub fn set_level(&mut self, level: Level) {
+ self.level = level;
+ }
+
+ /// Returns the message in `self`.
+ pub fn message(&self) -> &str {
+ &self.message
+ }
+
+ /// Sets the message in `self` to `message`.
+ pub fn set_message<T: Into<String>>(&mut self, message: T) {
+ self.message = message.into();
+ }
+
+ /// Returns the `Span`s in `self`.
+ pub fn spans(&self) -> &[Span] {
+ &self.spans
+ }
+
+ /// Sets the `Span`s in `self` to `spans`.
+ pub fn set_spans<S: MultiSpan>(&mut self, spans: S) {
+ self.spans = spans.into_spans();
+ }
+
+ /// Returns an iterator over the children diagnostics of `self`.
+ pub fn children(&self) -> Children<'_> {
+ Children(self.children.iter())
+ }
+
+ /// Emit the diagnostic.
+ pub fn emit(self) {
+ fn to_internal(spans: Vec<Span>) -> super::bridge::client::MultiSpan {
+ let mut multi_span = super::bridge::client::MultiSpan::new();
+ for span in spans {
+ multi_span.push(span.0);
+ }
+ multi_span
+ }
+
+ let mut diag = super::bridge::client::Diagnostic::new(
+ self.level,
+ &self.message[..],
+ to_internal(self.spans),
+ );
+ for c in self.children {
+ diag.sub(c.level, &c.message[..], to_internal(c.spans));
+ }
+ diag.emit();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs
new file mode 100644
index 000000000..c50a16bf4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs
@@ -0,0 +1,1106 @@
+//! A support library for macro authors when defining new macros.
+//!
+//! This library, provided by the standard distribution, provides the types
+//! consumed in the interfaces of procedurally defined macro definitions such as
+//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and
+//! custom derive attributes`#[proc_macro_derive]`.
+//!
+//! See [the book] for more.
+//!
+//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes
+
+#[doc(hidden)]
+pub mod bridge;
+
+mod diagnostic;
+
+pub use diagnostic::{Diagnostic, Level, MultiSpan};
+
+use std::cmp::Ordering;
+use std::ops::RangeBounds;
+use std::path::PathBuf;
+use std::str::FromStr;
+use std::{error, fmt, iter, mem};
+
+/// Determines whether proc_macro has been made accessible to the currently
+/// running program.
+///
+/// The proc_macro crate is only intended for use inside the implementation of
+/// procedural macros. All the functions in this crate panic if invoked from
+/// outside of a procedural macro, such as from a build script or unit test or
+/// ordinary Rust binary.
+///
+/// With consideration for Rust libraries that are designed to support both
+/// macro and non-macro use cases, `proc_macro::is_available()` provides a
+/// non-panicking way to detect whether the infrastructure required to use the
+/// API of proc_macro is presently available. Returns true if invoked from
+/// inside of a procedural macro, false if invoked from any other binary.
+pub fn is_available() -> bool {
+ bridge::Bridge::is_available()
+}
+
+/// The main type provided by this crate, representing an abstract stream of
+/// tokens, or, more specifically, a sequence of token trees.
+/// The type provide interfaces for iterating over those token trees and, conversely,
+/// collecting a number of token trees into one stream.
+///
+/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]`
+/// and `#[proc_macro_derive]` definitions.
+#[derive(Clone)]
+pub struct TokenStream(Option<bridge::client::TokenStream>);
+
+/// Error returned from `TokenStream::from_str`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct LexError;
+
+impl fmt::Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("cannot parse string into token stream")
+ }
+}
+
+impl error::Error for LexError {}
+
+/// Error returned from `TokenStream::expand_expr`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct ExpandError;
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("macro expansion failed")
+ }
+}
+
+impl error::Error for ExpandError {}
+
+impl TokenStream {
+ /// Returns an empty `TokenStream` containing no token trees.
+ pub fn new() -> TokenStream {
+ TokenStream(None)
+ }
+
+ /// Checks if this `TokenStream` is empty.
+ pub fn is_empty(&self) -> bool {
+ self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true)
+ }
+
+ /// Parses this `TokenStream` as an expression and attempts to expand any
+ /// macros within it. Returns the expanded `TokenStream`.
+ ///
+ /// Currently only expressions expanding to literals will succeed, although
+ /// this may be relaxed in the future.
+ ///
+ /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded,
+ /// report an error, failing compilation, and/or return an `Err(..)`. The
+ /// specific behavior for any error condition, and what conditions are
+ /// considered errors, is unspecified and may change in the future.
+ pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
+ let stream = self.0.as_ref().ok_or(ExpandError)?;
+ match bridge::client::TokenStream::expand_expr(stream) {
+ Ok(stream) => Ok(TokenStream(Some(stream))),
+ Err(_) => Err(ExpandError),
+ }
+ }
+}
+
+/// Attempts to break the string into tokens and parse those tokens into a token stream.
+/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+/// or characters not existing in the language.
+/// All tokens in the parsed stream get `Span::call_site()` spans.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+/// change these errors into `LexError`s later.
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src))))
+ }
+}
+
+/// Prints the token stream as a string that is supposed to be losslessly convertible back
+/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// Prints token in a form convenient for debugging.
+impl fmt::Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl Default for TokenStream {
+ fn default() -> Self {
+ TokenStream::new()
+ }
+}
+
+pub use quote::{quote, quote_span};
+
+fn tree_to_bridge_tree(
+ tree: TokenTree,
+) -> bridge::TokenTree<
+ bridge::client::Group,
+ bridge::client::Punct,
+ bridge::client::Ident,
+ bridge::client::Literal,
+> {
+ match tree {
+ TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
+ TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
+ TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
+ TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree))))
+ }
+}
+
+/// Non-generic helper for implementing `FromIterator<TokenStream>` and
+/// `Extend<TokenStream>` with less monomorphization in calling crates.
+struct ConcatStreamsHelper {
+ streams: Vec<bridge::client::TokenStream>,
+}
+
+impl ConcatStreamsHelper {
+ fn new(capacity: usize) -> Self {
+ ConcatStreamsHelper { streams: Vec::with_capacity(capacity) }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ if let Some(stream) = stream.0 {
+ self.streams.push(stream);
+ }
+ }
+
+ fn build(mut self) -> TokenStream {
+ if self.streams.len() <= 1 {
+ TokenStream(self.streams.pop())
+ } else {
+ TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams)))
+ }
+ }
+
+ fn append_to(mut self, stream: &mut TokenStream) {
+ if self.streams.is_empty() {
+ return;
+ }
+ let base = stream.0.take();
+ if base.is_none() && self.streams.len() == 1 {
+ stream.0 = self.streams.pop();
+ } else {
+ stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams));
+ }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl iter::FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl iter::FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let iter = streams.into_iter();
+ let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);
+ iter.for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ // FIXME(eddyb) Use an optimized implementation if/when possible.
+ *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect();
+ }
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use super::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ #[derive(Clone)]
+ pub struct IntoIter(
+ std::vec::IntoIter<
+ bridge::TokenTree<
+ bridge::client::Group,
+ bridge::client::Punct,
+ bridge::client::Ident,
+ bridge::client::Literal,
+ >,
+ >,
+ );
+
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.0.next().map(|tree| match tree {
+ bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)),
+ bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)),
+ bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)),
+ bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)),
+ })
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+ IntoIter(self.0.map(|v| v.into_trees()).unwrap_or_default().into_iter())
+ }
+ }
+}
+
+#[doc(hidden)]
+mod quote;
+
+/// A region of source code, along with macro expansion information.
+#[derive(Copy, Clone)]
+pub struct Span(bridge::client::Span);
+
+macro_rules! diagnostic_method {
+ ($name:ident, $level:expr) => {
+ /// Creates a new `Diagnostic` with the given `message` at the span
+ /// `self`.
+ pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
+ Diagnostic::spanned(self, $level, message)
+ }
+ };
+}
+
+impl Span {
+ /// A span that resolves at the macro definition site.
+ pub fn def_site() -> Span {
+ Span(bridge::client::Span::def_site())
+ }
+
+ /// The span of the invocation of the current procedural macro.
+ /// Identifiers created with this span will be resolved as if they were written
+ /// directly at the macro call location (call-site hygiene) and other code
+ /// at the macro call site will be able to refer to them as well.
+ pub fn call_site() -> Span {
+ Span(bridge::client::Span::call_site())
+ }
+
+ /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro
+ /// definition site (local variables, labels, `$crate`) and sometimes at the macro
+ /// call site (everything else).
+ /// The span location is taken from the call-site.
+ pub fn mixed_site() -> Span {
+ Span(bridge::client::Span::mixed_site())
+ }
+
+ /// The original source file into which this span points.
+ pub fn source_file(&self) -> SourceFile {
+ SourceFile(self.0.source_file())
+ }
+
+ /// The `Span` for the tokens in the previous macro expansion from which
+ /// `self` was generated from, if any.
+ pub fn parent(&self) -> Option<Span> {
+ self.0.parent().map(Span)
+ }
+
+ /// The span for the origin source code that `self` was generated from. If
+ /// this `Span` wasn't generated from other macro expansions then the return
+ /// value is the same as `*self`.
+ pub fn source(&self) -> Span {
+ Span(self.0.source())
+ }
+
+ /// Gets the starting line/column in the source file for this span.
+ pub fn start(&self) -> LineColumn {
+ self.0.start().add_1_to_column()
+ }
+
+ /// Gets the ending line/column in the source file for this span.
+ pub fn end(&self) -> LineColumn {
+ self.0.end().add_1_to_column()
+ }
+
+ /// Creates an empty span pointing to directly before this span.
+ pub fn before(&self) -> Span {
+ Span(self.0.before())
+ }
+
+ /// Creates an empty span pointing to directly after this span.
+ pub fn after(&self) -> Span {
+ Span(self.0.after())
+ }
+
+ /// Creates a new span encompassing `self` and `other`.
+ ///
+ /// Returns `None` if `self` and `other` are from different files.
+ pub fn join(&self, other: Span) -> Option<Span> {
+ self.0.join(other.0).map(Span)
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span(self.0.resolved_at(other.0))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+ pub fn located_at(&self, other: Span) -> Span {
+ other.resolved_at(*self)
+ }
+
+ /// Compares to spans to see if they're equal.
+ pub fn eq(&self, other: &Span) -> bool {
+ self.0 == other.0
+ }
+
+ /// Returns the source text behind a span. This preserves the original source
+ /// code, including spaces and comments. It only returns a result if the span
+ /// corresponds to real source code.
+ ///
+ /// Note: The observable result of a macro should only rely on the tokens and
+ /// not on this source text. The result of this function is a best effort to
+ /// be used for diagnostics only.
+ pub fn source_text(&self) -> Option<String> {
+ self.0.source_text()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn save_span(&self) -> usize {
+ self.0.save_span()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn recover_proc_macro_span(id: usize) -> Span {
+ Span(bridge::client::Span::recover_proc_macro_span(id))
+ }
+
+ diagnostic_method!(error, Level::Error);
+ diagnostic_method!(warning, Level::Warning);
+ diagnostic_method!(note, Level::Note);
+ diagnostic_method!(help, Level::Help);
+}
+
+/// Prints a span in a form convenient for debugging.
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// A line-column pair representing the start or end of a `Span`.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends (inclusive).
+ pub line: usize,
+ /// The 1-indexed column (number of bytes in UTF-8 encoding) in the source
+ /// file on which the span starts or ends (inclusive).
+ pub column: usize,
+}
+
+impl LineColumn {
+ fn add_1_to_column(self) -> Self {
+ LineColumn { line: self.line, column: self.column + 1 }
+ }
+}
+
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line.cmp(&other.line).then(self.column.cmp(&other.column))
+ }
+}
+
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+/// The source file of a given `Span`.
+#[derive(Clone)]
+pub struct SourceFile(bridge::client::SourceFile);
+
+impl SourceFile {
+ /// Gets the path to this source file.
+ ///
+ /// ### Note
+ /// If the code span associated with this `SourceFile` was generated by an external macro, this
+ /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check.
+ ///
+ /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
+ /// the command line, the path as given might not actually be valid.
+ ///
+ /// [`is_real`]: Self::is_real
+ pub fn path(&self) -> PathBuf {
+ PathBuf::from(self.0.path())
+ }
+
+ /// Returns `true` if this source file is a real source file, and not generated by an external
+ /// macro's expansion.
+ pub fn is_real(&self) -> bool {
+ // This is a hack until intercrate spans are implemented and we can have real source files
+ // for spans generated in external macros.
+ // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
+ self.0.is_real()
+ }
+}
+
+impl fmt::Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+}
+
+impl PartialEq for SourceFile {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.eq(&other.0)
+ }
+}
+
+impl Eq for SourceFile {}
+
+/// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`).
+#[derive(Clone)]
+pub enum TokenTree {
+ /// A token stream surrounded by bracket delimiters.
+ Group(Group),
+ /// An identifier.
+ Ident(Ident),
+ /// A single punctuation character (`+`, `,`, `$`, etc.).
+ Punct(Punct),
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+}
+
+impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+ match *self {
+ TokenTree::Group(ref t) => t.span(),
+ TokenTree::Ident(ref t) => t.span(),
+ TokenTree::Punct(ref t) => t.span(),
+ TokenTree::Literal(ref t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+ match *self {
+ TokenTree::Group(ref mut t) => t.set_span(span),
+ TokenTree::Ident(ref mut t) => t.set_span(span),
+ TokenTree::Punct(ref mut t) => t.set_span(span),
+ TokenTree::Literal(ref mut t) => t.set_span(span),
+ }
+ }
+}
+
+/// Prints token tree in a form convenient for debugging.
+impl fmt::Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+ match *self {
+ TokenTree::Group(ref tt) => tt.fmt(f),
+ TokenTree::Ident(ref tt) => tt.fmt(f),
+ TokenTree::Punct(ref tt) => tt.fmt(f),
+ TokenTree::Literal(ref tt) => tt.fmt(f),
+ }
+ }
+}
+
+impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
+}
+
+impl From<Ident> for TokenTree {
+ fn from(g: Ident) -> TokenTree {
+ TokenTree::Ident(g)
+ }
+}
+
+impl From<Punct> for TokenTree {
+ fn from(g: Punct) -> TokenTree {
+ TokenTree::Punct(g)
+ }
+}
+
+impl From<Literal> for TokenTree {
+ fn from(g: Literal) -> TokenTree {
+ TokenTree::Literal(g)
+ }
+}
+
+/// Prints the token tree as a string that is supposed to be losslessly convertible back
+/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// A delimited token stream.
+///
+/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s.
+#[derive(Clone)]
+pub struct Group(bridge::client::Group);
+
+/// Describes how a sequence of token trees is delimited.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Delimiter {
+ /// `( ... )`
+ Parenthesis,
+ /// `{ ... }`
+ Brace,
+ /// `[ ... ]`
+ Bracket,
+ /// `Ø ... Ø`
+ /// An invisible delimiter, that may, for example, appear around tokens coming from a
+ /// "macro variable" `$var`. It is important to preserve operator priorities in cases like
+ /// `$var * 3` where `$var` is `1 + 2`.
+ /// Invisible delimiters might not survive roundtrip of a token stream through a string.
+ None,
+}
+
+impl Group {
+ /// Creates a new `Group` with the given delimiter and token stream.
+ ///
+ /// This constructor will set the span for this group to
+ /// `Span::call_site()`. To change the span you can use the `set_span`
+ /// method below.
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group(bridge::client::Group::new(delimiter, stream.0))
+ }
+
+ /// Returns the delimiter of this `Group`
+ pub fn delimiter(&self) -> Delimiter {
+ self.0.delimiter()
+ }
+
+ /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
+ ///
+ /// Note that the returned token stream does not include the delimiter
+ /// returned above.
+ pub fn stream(&self) -> TokenStream {
+ TokenStream(Some(self.0.stream()))
+ }
+
+ /// Returns the span for the delimiters of this token stream, spanning the
+ /// entire `Group`.
+ ///
+ /// ```text
+ /// pub fn span(&self) -> Span {
+ /// ^^^^^^^
+ /// ```
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Returns the span pointing to the opening delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_open(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_open(&self) -> Span {
+ Span(self.0.span_open())
+ }
+
+ /// Returns the span pointing to the closing delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_close(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_close(&self) -> Span {
+ Span(self.0.span_close())
+ }
+
+ /// Configures the span for this `Group`'s delimiters, but not its internal
+ /// tokens.
+ ///
+ /// This method will **not** set the span of all the internal tokens spanned
+ /// by this group, but rather it will only set the span of the delimiter
+ /// tokens at the level of the `Group`.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+}
+
+/// Prints the group as a string that should be losslessly convertible back
+/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters.
+impl fmt::Display for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Group")
+ .field("delimiter", &self.delimiter())
+ .field("stream", &self.stream())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A `Punct` is a single punctuation character such as `+`, `-` or `#`.
+///
+/// Multi-character operators like `+=` are represented as two instances of `Punct` with different
+/// forms of `Spacing` returned.
+#[derive(Clone)]
+pub struct Punct(bridge::client::Punct);
+
+/// Describes whether a `Punct` is followed immediately by another `Punct` ([`Spacing::Joint`]) or
+/// by a different token or whitespace ([`Spacing::Alone`]).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Spacing {
+ /// A `Punct` is not immediately followed by another `Punct`.
+ /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`.
+ Alone,
+ /// A `Punct` is immediately followed by another `Punct`.
+ /// E.g. `+` is `Joint` in `+=` and `++`.
+ ///
+ /// Additionally, single quote `'` can join with identifiers to form lifetimes: `'ident`.
+ Joint,
+}
+
+impl Punct {
+ /// Creates a new `Punct` from the given character and spacing.
+ /// The `ch` argument must be a valid punctuation character permitted by the language,
+ /// otherwise the function will panic.
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ Punct(bridge::client::Punct::new(ch, spacing))
+ }
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+ self.0.as_char()
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether it's immediately
+ /// followed by another `Punct` in the token stream, so they can potentially be combined into
+ /// a multi-character operator (`Joint`), or it's followed by some other token or whitespace
+ /// (`Alone`) so the operator has certainly ended.
+ pub fn spacing(&self) -> Spacing {
+ self.0.spacing()
+ }
+
+ /// Returns the span for this punctuation character.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configure the span for this punctuation character.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the punctuation character as a string that should be losslessly convertible
+/// back into the same character.
+impl fmt::Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Punct")
+ .field("ch", &self.as_char())
+ .field("spacing", &self.spacing())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl PartialEq<char> for Punct {
+ fn eq(&self, rhs: &char) -> bool {
+ self.as_char() == *rhs
+ }
+}
+
+impl PartialEq<Punct> for char {
+ fn eq(&self, rhs: &Punct) -> bool {
+ *self == rhs.as_char()
+ }
+}
+
+/// An identifier (`ident`).
+#[derive(Clone)]
+pub struct Ident(bridge::client::Ident);
+
+impl Ident {
+ /// Creates a new `Ident` with the given `string` as well as the specified
+ /// `span`.
+ /// The `string` argument must be a valid identifier permitted by the
+ /// language (including keywords, e.g. `self` or `fn`). Otherwise, the function will panic.
+ ///
+ /// Note that `span`, currently in rustc, configures the hygiene information
+ /// for this identifier.
+ ///
+ /// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene
+ /// meaning that identifiers created with this span will be resolved as if they were written
+ /// directly at the location of the macro call, and other code at the macro call site will be
+ /// able to refer to them as well.
+ ///
+ /// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene
+ /// meaning that identifiers created with this span will be resolved at the location of the
+ /// macro definition and other code at the macro call site will not be able to refer to them.
+ ///
+ /// Due to the current importance of hygiene this constructor, unlike other
+ /// tokens, requires a `Span` to be specified at construction.
+ pub fn new(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, false))
+ }
+
+ /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
+ /// The `string` argument be a valid identifier permitted by the language
+ /// (including keywords, e.g. `fn`). Keywords which are usable in path segments
+ /// (e.g. `self`, `super`) are not supported, and will cause a panic.
+ pub fn new_raw(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, true))
+ }
+
+ /// Returns the span of this `Ident`, encompassing the entire string returned
+ /// by [`to_string`](Self::to_string).
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span of this `Ident`, possibly changing its hygiene context.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the identifier as a string that should be losslessly convertible
+/// back into the same identifier.
+impl fmt::Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Ident")
+ .field("ident", &self.to_string())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A literal string (`"hello"`), byte string (`b"hello"`),
+/// character (`'a'`), byte character (`b'a'`), an integer or floating point number
+/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s.
+#[derive(Clone)]
+pub struct Literal(bridge::client::Literal);
+
+macro_rules! suffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new suffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1u32` where the integer
+ /// value specified is the first part of the token and the integral is
+ /// also suffixed at the end.
+ /// Literals created from negative numbers might not survive round-trips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::typed_integer(&n.to_string(), stringify!($kind)))
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new unsuffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1` where the integer
+ /// value specified is the first part of the token. No suffix is
+ /// specified on this token, meaning that invocations like
+ /// `Literal::i8_unsuffixed(1)` are equivalent to
+ /// `Literal::u32_unsuffixed(1)`.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::integer(&n.to_string()))
+ }
+ )*)
+}
+
+impl Literal {
+ suffixed_int_literals! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+ }
+
+ unsuffixed_int_literals! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_unsuffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f32` where the value
+ /// specified is the preceding part of the token and `f32` is the suffix of
+ /// the token. This token will always be inferred to be an `f32` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_suffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ Literal(bridge::client::Literal::f32(&n.to_string()))
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_unsuffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f64` where the value
+ /// specified is the preceding part of the token and `f64` is the suffix of
+ /// the token. This token will always be inferred to be an `f64` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_suffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ Literal(bridge::client::Literal::f64(&n.to_string()))
+ }
+
+ /// String literal.
+ pub fn string(string: &str) -> Literal {
+ Literal(bridge::client::Literal::string(string))
+ }
+
+ /// Character literal.
+ pub fn character(ch: char) -> Literal {
+ Literal(bridge::client::Literal::character(ch))
+ }
+
+ /// Byte string literal.
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ Literal(bridge::client::Literal::byte_string(bytes))
+ }
+
+ /// Returns the span encompassing this literal.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span associated for this literal.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+
+ /// Returns a `Span` that is a subset of `self.span()` containing only the
+ /// source bytes in range `range`. Returns `None` if the would-be trimmed
+ /// span is outside the bounds of `self`.
+ // FIXME(SergioBenitez): check that the byte range starts and ends at a
+ // UTF-8 boundary of the source. otherwise, it's likely that a panic will
+ // occur elsewhere when the source text is printed.
+ // FIXME(SergioBenitez): there is no way for the user to know what
+ // `self.span()` actually maps to, so this method can currently only be
+ // called blindly. For example, `to_string()` for the character 'c' returns
+ // "'\u{63}'"; there is no way for the user to know whether the source text
+ // was 'c' or whether it was '\u{63}'.
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ self.0.subspan(range.start_bound().cloned(), range.end_bound().cloned()).map(Span)
+ }
+}
+
+/// Parse a single literal from its stringified representation.
+///
+/// In order to parse successfully, the input string must not contain anything
+/// but the literal token. Specifically, it must not contain whitespace or
+/// comments in addition to the literal.
+///
+/// The resulting literal token will have a `Span::call_site()` span.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We
+/// reserve the right to change these errors into `LexError`s later.
+impl FromStr for Literal {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<Self, LexError> {
+ match bridge::client::Literal::from_str(src) {
+ Ok(literal) => Ok(Literal(literal)),
+ Err(()) => Err(LexError),
+ }
+ }
+}
+
+/// Prints the literal as a string that should be losslessly convertible
+/// back into the same literal (except for possible rounding for floating point literals).
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// Tracked access to environment variables.
+pub mod tracked_env {
+ use std::env::{self, VarError};
+ use std::ffi::OsStr;
+
+ /// Retrieve an environment variable and add it to build dependency info.
+ /// Build system executing the compiler will know that the variable was accessed during
+ /// compilation, and will be able to rerun the build when the value of that variable changes.
+ /// Besides the dependency tracking this function should be equivalent to `env::var` from the
+ /// standard library, except that the argument must be UTF-8.
+ pub fn var<K: AsRef<OsStr> + AsRef<str>>(key: K) -> Result<String, VarError> {
+ let key: &str = key.as_ref();
+ let value = env::var(key);
+ super::bridge::client::FreeFunctions::track_env_var(key, value.as_deref().ok());
+ value
+ }
+}
+
+/// Tracked access to additional files.
+pub mod tracked_path {
+
+ /// Track a file explicitly.
+ ///
+ /// Commonly used for tracking asset preprocessing.
+ pub fn path<P: AsRef<str>>(path: P) {
+ let path: &str = path.as_ref();
+ super::bridge::client::FreeFunctions::track_path(path);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/quote.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/quote.rs
new file mode 100644
index 000000000..39309faa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/quote.rs
@@ -0,0 +1,139 @@
+//! # Quasiquoter
+//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
+
+//! This quasiquoter uses macros 2.0 hygiene to reliably access
+//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
+
+use super::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+
+macro_rules! quote_tt {
+ (($($t:tt)*)) => { Group::new(Delimiter::Parenthesis, quote!($($t)*)) };
+ ([$($t:tt)*]) => { Group::new(Delimiter::Bracket, quote!($($t)*)) };
+ ({$($t:tt)*}) => { Group::new(Delimiter::Brace, quote!($($t)*)) };
+ (,) => { Punct::new(',', Spacing::Alone) };
+ (.) => { Punct::new('.', Spacing::Alone) };
+ (;) => { Punct::new(';', Spacing::Alone) };
+ (!) => { Punct::new('!', Spacing::Alone) };
+ (<) => { Punct::new('<', Spacing::Alone) };
+ (>) => { Punct::new('>', Spacing::Alone) };
+ (&) => { Punct::new('&', Spacing::Alone) };
+ (=) => { Punct::new('=', Spacing::Alone) };
+ ($i:ident) => { Ident::new(stringify!($i), Span::def_site()) };
+}
+
+macro_rules! quote_ts {
+ ((@ $($t:tt)*)) => { $($t)* };
+ (::) => {
+ [
+ TokenTree::from(Punct::new(':', Spacing::Joint)),
+ TokenTree::from(Punct::new(':', Spacing::Alone)),
+ ].iter()
+ .cloned()
+ .map(|mut x| {
+ x.set_span(Span::def_site());
+ x
+ })
+ .collect::<TokenStream>()
+ };
+ ($t:tt) => { TokenTree::from(quote_tt!($t)) };
+}
+
+/// Simpler version of the real `quote!` macro, implemented solely
+/// through `macro_rules`, for bootstrapping the real implementation
+/// (see the `quote` function), which does not have access to the
+/// real `quote!` macro due to the `proc_macro` crate not being
+/// able to depend on itself.
+///
+/// Note: supported tokens are a subset of the real `quote!`, but
+/// unquoting is different: instead of `$x`, this uses `(@ expr)`.
+macro_rules! quote {
+ () => { TokenStream::new() };
+ ($($t:tt)*) => {
+ [
+ $(TokenStream::from(quote_ts!($t)),)*
+ ].iter().cloned().collect::<TokenStream>()
+ };
+}
+
+/// Quote a `TokenStream` into a `TokenStream`.
+/// This is the actual implementation of the `quote!()` proc macro.
+///
+/// It is loaded by the compiler in `register_builtin_macros`.
+pub fn quote(stream: TokenStream) -> TokenStream {
+ if stream.is_empty() {
+ return quote!(super::TokenStream::new());
+ }
+ let proc_macro_crate = quote!(crate);
+ let mut after_dollar = false;
+ let tokens = stream
+ .into_iter()
+ .filter_map(|tree| {
+ if after_dollar {
+ after_dollar = false;
+ match tree {
+ TokenTree::Ident(_) => {
+ return Some(quote!(Into::<super::TokenStream>::into(
+ Clone::clone(&(@ tree))),));
+ }
+ TokenTree::Punct(ref tt) if tt.as_char() == '$' => {}
+ _ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
+ }
+ } else if let TokenTree::Punct(ref tt) = tree {
+ if tt.as_char() == '$' {
+ after_dollar = true;
+ return None;
+ }
+ }
+
+ Some(quote!(super::TokenStream::from((@ match tree {
+ TokenTree::Punct(tt) => quote!(super::TokenTree::Punct(super::Punct::new(
+ (@ TokenTree::from(Literal::character(tt.as_char()))),
+ (@ match tt.spacing() {
+ Spacing::Alone => quote!(super::Spacing::Alone),
+ Spacing::Joint => quote!(super::Spacing::Joint),
+ }),
+ ))),
+ TokenTree::Group(tt) => quote!(super::TokenTree::Group(super::Group::new(
+ (@ match tt.delimiter() {
+ Delimiter::Parenthesis => quote!(super::Delimiter::Parenthesis),
+ Delimiter::Brace => quote!(super::Delimiter::Brace),
+ Delimiter::Bracket => quote!(super::Delimiter::Bracket),
+ Delimiter::None => quote!(super::Delimiter::None),
+ }),
+ (@ quote(tt.stream())),
+ ))),
+ TokenTree::Ident(tt) => quote!(super::TokenTree::Ident(super::Ident::new(
+ (@ TokenTree::from(Literal::string(&tt.to_string()))),
+ (@ quote_span(proc_macro_crate.clone(), tt.span())),
+ ))),
+ TokenTree::Literal(tt) => quote!(super::TokenTree::Literal({
+ let mut iter = (@ TokenTree::from(Literal::string(&tt.to_string())))
+ .parse::<super::TokenStream>()
+ .unwrap()
+ .into_iter();
+ if let (Some(super::TokenTree::Literal(mut lit)), None) =
+ (iter.next(), iter.next())
+ {
+ lit.set_span((@ quote_span(proc_macro_crate.clone(), tt.span())));
+ lit
+ } else {
+ unreachable!()
+ }
+ }))
+ })),))
+ })
+ .collect::<TokenStream>();
+
+ if after_dollar {
+ panic!("unexpected trailing `$` in `quote!`");
+ }
+
+ quote!([(@ tokens)].iter().cloned().collect::<super::TokenStream>())
+}
+
+/// Quote a `Span` into a `TokenStream`.
+/// This is needed to implement a custom quoter.
+pub fn quote_span(proc_macro_crate: TokenStream, span: Span) -> TokenStream {
+ let id = span.save_span();
+ quote!((@ proc_macro_crate ) ::Span::recover_proc_macro_span((@ TokenTree::from(Literal::usize_unsuffixed(id)))))
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs
new file mode 100644
index 000000000..05a565fbf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs
@@ -0,0 +1,834 @@
+//! Rustc proc-macro server implementation with tt
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::bridge::{self, server};
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Bound;
+use std::{ascii, vec::IntoIter};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Debug, Default, Clone)]
+pub struct TokenStream {
+ pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream::default()
+ }
+
+ pub fn with_subtree(subtree: tt::Subtree) -> Self {
+ if subtree.delimiter.is_some() {
+ TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+ } else {
+ TokenStream { token_trees: subtree.token_trees }
+ }
+ }
+
+ pub fn into_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self.token_trees }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.token_trees.is_empty()
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { token_trees: vec![tree] }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ for item in streams {
+ for tkn in item {
+ match tkn {
+ tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+ self.token_trees.extend(subtree.token_trees);
+ }
+ _ => {
+ self.token_trees.push(tkn);
+ }
+ }
+ }
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct SourceFile {
+ // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+}
+
+// Rustc Server Ident has to be `Copyable`
+// We use a stub here for bypassing
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct IdentId(u32);
+
+#[derive(Clone, Hash, Eq, PartialEq)]
+struct IdentData(tt::Ident);
+
+#[derive(Default)]
+struct IdentInterner {
+ idents: HashMap<IdentData, u32>,
+ ident_data: Vec<IdentData>,
+}
+
+impl IdentInterner {
+ fn intern(&mut self, data: &IdentData) -> u32 {
+ if let Some(index) = self.idents.get(data) {
+ return *index;
+ }
+
+ let index = self.idents.len() as u32;
+ self.ident_data.push(data.clone());
+ self.idents.insert(data.clone(), index);
+ index
+ }
+
+ fn get(&self, index: u32) -> &IdentData {
+ &self.ident_data[index as usize]
+ }
+
+ #[allow(unused)]
+ fn get_mut(&mut self, index: u32) -> &mut IdentData {
+ self.ident_data.get_mut(index as usize).expect("Should be consistent")
+ }
+}
+
+pub struct TokenStreamBuilder {
+ acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use std::str::FromStr;
+
+ use super::{TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = super::IntoIter<TokenTree>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.token_trees.into_iter()
+ }
+ }
+
+ type LexError = String;
+
+ /// Attempts to break the string into tokens and parse those tokens into a token stream.
+ /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+ /// or characters not existing in the language.
+ /// All tokens in the parsed stream get `Span::call_site()` spans.
+ ///
+ /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+ /// change these errors into `LexError`s later.
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let (subtree, _token_map) =
+ mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+ let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ Ok(TokenStream::with_subtree(subtree))
+ }
+ }
+
+ impl ToString for TokenStream {
+ fn to_string(&self) -> String {
+ tt::pretty(&self.token_trees)
+ }
+ }
+
+ fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: subtree
+ .delimiter
+ .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+ token_trees: subtree
+ .token_trees
+ .into_iter()
+ .map(token_tree_replace_token_ids_with_unspecified)
+ .collect(),
+ }
+ }
+
+ fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ }
+ }
+ }
+
+ fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+ }
+ tt::Leaf::Punct(punct) => {
+ tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+ }
+ tt::Leaf::Ident(ident) => {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+ }
+ }
+ }
+}
+
+impl TokenStreamBuilder {
+ fn new() -> TokenStreamBuilder {
+ TokenStreamBuilder { acc: TokenStream::new() }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ self.acc.extend(stream.into_iter())
+ }
+
+ fn build(self) -> TokenStream {
+ self.acc
+ }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+ trees: IntoIter<TokenTree>,
+}
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+ ident_interner: IdentInterner,
+ // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type Group = Group;
+ type Punct = Punct;
+ type Ident = IdentId;
+ type Literal = Literal;
+ type SourceFile = SourceFile;
+ type Diagnostic = Diagnostic;
+ type Span = Span;
+ type MultiSpan = Vec<Span>;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+ // FIXME: track env var accesses
+ // https://github.com/rust-lang/rust/pull/71858
+ }
+ fn track_path(&mut self, _path: &str) {}
+}
+
+impl server::TokenStream for RustAnalyzer {
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ use std::str::FromStr;
+
+ Self::TokenStream::from_str(src).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Ident(IdentId(index)) => {
+ let IdentData(ident) = self.ident_interner.get(index).clone();
+ let ident: tt::Ident = ident;
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let leaf = tt::Leaf::from(p);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+ }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+
+ fn concat_trees(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ trees: Vec<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for tree in trees {
+ builder.push(self.from_token_tree(tree));
+ }
+ builder.build()
+ }
+
+ fn concat_streams(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ streams: Vec<Self::TokenStream>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for stream in streams {
+ builder.push(stream);
+ }
+ builder.build()
+ }
+
+ fn into_trees(
+ &mut self,
+ stream: Self::TokenStream,
+ ) -> Vec<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
+ stream
+ .into_iter()
+ .map(|tree| match tree {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => bridge::TokenTree::Literal(lit),
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct),
+ tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(subtree),
+ })
+ .collect()
+ }
+}
+
+fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+ let kind = match d {
+ bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
+ bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ bridge::Delimiter::None => return None,
+ };
+ Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
+ match d.map(|it| it.kind) {
+ Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
+ Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
+ Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
+ None => bridge::Delimiter::None,
+ }
+}
+
+fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
+ match spacing {
+ bridge::Spacing::Alone => Spacing::Alone,
+ bridge::Spacing::Joint => Spacing::Joint,
+ }
+}
+
+fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
+ match spacing {
+ Spacing::Alone => bridge::Spacing::Alone,
+ Spacing::Joint => bridge::Spacing::Joint,
+ }
+}
+
+impl server::Group for RustAnalyzer {
+ fn new(
+ &mut self,
+ delimiter: bridge::Delimiter,
+ stream: Option<Self::TokenStream>,
+ ) -> Self::Group {
+ Self::Group {
+ delimiter: delim_to_internal(delimiter),
+ token_trees: stream.unwrap_or_default().token_trees,
+ }
+ }
+ fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
+ delim_to_external(group.delimiter)
+ }
+
+ // NOTE: Return value of do not include delimiter
+ fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
+ TokenStream { token_trees: group.token_trees.clone() }
+ }
+
+ fn span(&mut self, group: &Self::Group) -> Self::Span {
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+
+ fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
+ if let Some(delim) = &mut group.delimiter {
+ delim.id = span;
+ }
+ }
+
+ fn span_open(&mut self, group: &Self::Group) -> Self::Span {
+ // FIXME we only store one `TokenId` for the delimiters
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+
+ fn span_close(&mut self, group: &Self::Group) -> Self::Span {
+ // FIXME we only store one `TokenId` for the delimiters
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+}
+
+impl server::Punct for RustAnalyzer {
+ fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct {
+ tt::Punct {
+ char: ch,
+ spacing: spacing_to_internal(spacing),
+ id: tt::TokenId::unspecified(),
+ }
+ }
+ fn as_char(&mut self, punct: Self::Punct) -> char {
+ punct.char
+ }
+ fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing {
+ spacing_to_external(punct.spacing)
+ }
+ fn span(&mut self, punct: Self::Punct) -> Self::Span {
+ punct.id
+ }
+ fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
+ tt::Punct { id: span, ..punct }
+ }
+}
+
+impl server::Ident for RustAnalyzer {
+ fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
+ IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
+ }
+
+ fn span(&mut self, ident: Self::Ident) -> Self::Span {
+ self.ident_interner.get(ident.0).0.id
+ }
+ fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+ let data = self.ident_interner.get(ident.0);
+ let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+ IdentId(self.ident_interner.intern(&new))
+ }
+}
+
+impl server::Literal for RustAnalyzer {
+ fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
+ // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
+ // They must still be present to be ABI-compatible and work with upstream proc_macro.
+ "".to_owned()
+ }
+ fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
+ Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+ }
+ fn symbol(&mut self, literal: &Self::Literal) -> String {
+ literal.text.to_string()
+ }
+ fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
+ None
+ }
+
+ fn to_string(&mut self, literal: &Self::Literal) -> String {
+ literal.to_string()
+ }
+
+ fn integer(&mut self, n: &str) -> Self::Literal {
+ let n = match n.parse::<i128>() {
+ Ok(n) => n.to_string(),
+ Err(_) => n.parse::<u128>().unwrap().to_string(),
+ };
+ Literal { text: n.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+ macro_rules! def_suffixed_integer {
+ ($kind:ident, $($ty:ty),*) => {
+ match $kind {
+ $(
+ stringify!($ty) => {
+ let n: $ty = n.parse().unwrap();
+ format!(concat!("{}", stringify!($ty)), n)
+ }
+ )*
+ _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
+ }
+ }
+ }
+
+ let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
+
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn float(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let mut text = f64::to_string(&n);
+ if !text.contains('.') {
+ text += ".0"
+ }
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f32(&mut self, n: &str) -> Self::Literal {
+ let n: f32 = n.parse().unwrap();
+ let text = format!("{}f32", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f64(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let text = format!("{}f64", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn string(&mut self, string: &str) -> Self::Literal {
+ let mut escaped = String::new();
+ for ch in string.chars() {
+ escaped.extend(ch.escape_debug());
+ }
+ Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn character(&mut self, ch: char) -> Self::Literal {
+ Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+ let string = bytes
+ .iter()
+ .cloned()
+ .flat_map(ascii::escape_default)
+ .map(Into::<char>::into)
+ .collect::<String>();
+
+ Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+ literal.id
+ }
+
+ fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+ literal.id = span;
+ }
+
+ fn subspan(
+ &mut self,
+ _literal: &Self::Literal,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+}
+
+impl server::SourceFile for RustAnalyzer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+ fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+ let mut diag = Diagnostic::new(level, msg);
+ diag.spans = spans;
+ diag
+ }
+
+ fn sub(
+ &mut self,
+ _diag: &mut Self::Diagnostic,
+ _level: Level,
+ _msg: &str,
+ _spans: Self::MultiSpan,
+ ) {
+ // FIXME handle diagnostic
+ //
+ }
+
+ fn emit(&mut self, _diag: Self::Diagnostic) {
+ // FIXME handle diagnostic
+ // diag.emit()
+ }
+}
+
+impl server::Span for RustAnalyzer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn def_site(&mut self) -> Self::Span {
+ // MySpan(self.span_interner.intern(&MySpanData(Span::def_site())))
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+ fn call_site(&mut self) -> Self::Span {
+ // MySpan(self.span_interner.intern(&MySpanData(Span::call_site())))
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub
+ tt::TokenId::unspecified()
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME handle span
+ span
+ }
+ fn start(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn end(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn mixed_site(&mut self) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn after(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+
+ fn before(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+ fn new(&mut self) -> Self::MultiSpan {
+ // FIXME handle span
+ vec![]
+ }
+
+ fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+ //TODP
+ other.push(span)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::proc_macro::bridge::server::Literal;
+ use super::*;
+
+ #[test]
+ fn test_ra_server_literals() {
+ let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
+ assert_eq!(srv.integer("1234").text, "1234");
+
+ assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
+ assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
+ assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
+ assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
+ assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
+ assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
+ assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
+ assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
+ assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
+ assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
+ assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
+ assert_eq!(srv.float("0").text, "0.0");
+ assert_eq!(srv.float("15684.5867").text, "15684.5867");
+ assert_eq!(srv.f32("15684.58").text, "15684.58f32");
+ assert_eq!(srv.f64("15684.58").text, "15684.58f64");
+
+ assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
+ assert_eq!(srv.character('c').text, "'c'");
+ assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+ // u128::max
+ assert_eq!(
+ srv.integer("340282366920938463463374607431768211455").text,
+ "340282366920938463463374607431768211455"
+ );
+ // i128::min
+ assert_eq!(
+ srv.integer("-170141183460469231731687303715884105728").text,
+ "-170141183460469231731687303715884105728"
+ );
+ }
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Brace,
+ }),
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ use std::str::FromStr;
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ }),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)").unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);").unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_").unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs
new file mode 100644
index 000000000..9d56f0eaf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs
@@ -0,0 +1,105 @@
+//! Proc macro ABI.
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub use ra_server::TokenStream;
+
+pub(crate) struct Abi {
+ exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+ fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+ Self { message: p.as_str().map(|s| s.to_string()) }
+ }
+}
+
+impl Abi {
+ pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+ let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+ lib.get(symbol_name.as_bytes())?;
+ Ok(Self { exported_macros: macros.to_vec() })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ let parsed_body = TokenStream::with_subtree(macro_body.clone());
+
+ let parsed_attributes =
+ attributes.map_or(TokenStream::new(), |attr| TokenStream::with_subtree(attr.clone()));
+
+ for proc_macro in &self.exported_macros {
+ match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive {
+ trait_name, client, ..
+ } if *trait_name == macro_name => {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_attributes,
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.exported_macros
+ .iter()
+ .map(|proc_macro| match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ (trait_name.to_string(), ProcMacroKind::CustomDerive)
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ (name.to_string(), ProcMacroKind::FuncLike)
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ (name.to_string(), ProcMacroKind::Attr)
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs
new file mode 100644
index 000000000..48030f8d8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs
@@ -0,0 +1,156 @@
+//! Buffer management for same-process client<->server communication.
+
+use std::io::{self, Write};
+use std::mem;
+use std::ops::{Deref, DerefMut};
+use std::slice;
+
+#[repr(C)]
+pub struct Buffer {
+ data: *mut u8,
+ len: usize,
+ capacity: usize,
+ reserve: extern "C" fn(Buffer, usize) -> Buffer,
+ drop: extern "C" fn(Buffer),
+}
+
+unsafe impl Sync for Buffer {}
+unsafe impl Send for Buffer {}
+
+impl Default for Buffer {
+ #[inline]
+ fn default() -> Self {
+ Self::from(vec![])
+ }
+}
+
+impl Deref for Buffer {
+ type Target = [u8];
+ #[inline]
+ fn deref(&self) -> &[u8] {
+ unsafe { slice::from_raw_parts(self.data as *const u8, self.len) }
+ }
+}
+
+impl DerefMut for Buffer {
+ #[inline]
+ fn deref_mut(&mut self) -> &mut [u8] {
+ unsafe { slice::from_raw_parts_mut(self.data, self.len) }
+ }
+}
+
+impl Buffer {
+ #[inline]
+ pub(super) fn new() -> Self {
+ Self::default()
+ }
+
+ #[inline]
+ pub(super) fn clear(&mut self) {
+ self.len = 0;
+ }
+
+ #[inline]
+ pub(super) fn take(&mut self) -> Self {
+ mem::take(self)
+ }
+
+ // We have the array method separate from extending from a slice. This is
+ // because in the case of small arrays, codegen can be more efficient
+ // (avoiding a memmove call). With extend_from_slice, LLVM at least
+ // currently is not able to make that optimization.
+ #[inline]
+ pub(super) fn extend_from_array<const N: usize>(&mut self, xs: &[u8; N]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ #[inline]
+ pub(super) fn extend_from_slice(&mut self, xs: &[u8]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ #[inline]
+ pub(super) fn push(&mut self, v: u8) {
+ // The code here is taken from Vec::push, and we know that reserve()
+ // will panic if we're exceeding isize::MAX bytes and so there's no need
+ // to check for overflow.
+ if self.len == self.capacity {
+ let b = self.take();
+ *self = (b.reserve)(b, 1);
+ }
+ unsafe {
+ *self.data.add(self.len) = v;
+ self.len += 1;
+ }
+ }
+}
+
+impl Write for Buffer {
+ #[inline]
+ fn write(&mut self, xs: &[u8]) -> io::Result<usize> {
+ self.extend_from_slice(xs);
+ Ok(xs.len())
+ }
+
+ #[inline]
+ fn write_all(&mut self, xs: &[u8]) -> io::Result<()> {
+ self.extend_from_slice(xs);
+ Ok(())
+ }
+
+ #[inline]
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+impl Drop for Buffer {
+ #[inline]
+ fn drop(&mut self) {
+ let b = self.take();
+ (b.drop)(b);
+ }
+}
+
+impl From<Vec<u8>> for Buffer {
+ fn from(mut v: Vec<u8>) -> Self {
+ let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity());
+ mem::forget(v);
+
+ // This utility function is nested in here because it can *only*
+ // be safely called on `Buffer`s created by *this* `proc_macro`.
+ fn to_vec(b: Buffer) -> Vec<u8> {
+ unsafe {
+ let Buffer { data, len, capacity, .. } = b;
+ mem::forget(b);
+ Vec::from_raw_parts(data, len, capacity)
+ }
+ }
+
+ extern "C" fn reserve(b: Buffer, additional: usize) -> Buffer {
+ let mut v = to_vec(b);
+ v.reserve(additional);
+ Buffer::from(v)
+ }
+
+ extern "C" fn drop(b: Buffer) {
+ mem::drop(to_vec(b));
+ }
+
+ Buffer { data, len, capacity, reserve, drop }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs
new file mode 100644
index 000000000..22bda8ba5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs
@@ -0,0 +1,529 @@
+//! Client-side types.
+
+use super::*;
+
+use std::marker::PhantomData;
+
+macro_rules! define_handles {
+ (
+ 'owned: $($oty:ident,)*
+ 'interned: $($ity:ident,)*
+ ) => {
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub struct HandleCounters {
+ $($oty: AtomicUsize,)*
+ $($ity: AtomicUsize,)*
+ }
+
+ impl HandleCounters {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ extern "C" fn get() -> &'static Self {
+ static COUNTERS: HandleCounters = HandleCounters {
+ $($oty: AtomicUsize::new(1),)*
+ $($ity: AtomicUsize::new(1),)*
+ };
+ &COUNTERS
+ }
+ }
+
+ // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub(super) struct HandleStore<S: server::Types> {
+ $($oty: handle::OwnedStore<S::$oty>,)*
+ $($ity: handle::InternedStore<S::$ity>,)*
+ }
+
+ impl<S: server::Types> HandleStore<S> {
+ pub(super) fn new(handle_counters: &'static HandleCounters) -> Self {
+ HandleStore {
+ $($oty: handle::OwnedStore::new(&handle_counters.$oty),)*
+ $($ity: handle::InternedStore::new(&handle_counters.$ity),)*
+ }
+ }
+ }
+
+ $(
+ #[repr(C)]
+ pub(crate) struct $oty {
+ handle: handle::Handle,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual
+ // way of doing this, but that requires unstable features.
+ // rust-analyzer uses this code and avoids unstable features.
+ _marker: PhantomData<*mut ()>,
+ }
+
+ // Forward `Drop::drop` to the inherent `drop` method.
+ impl Drop for $oty {
+ fn drop(&mut self) {
+ $oty {
+ handle: self.handle,
+ _marker: PhantomData,
+ }.drop();
+ }
+ }
+
+ impl<S> Encode<S> for $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ let handle = self.handle;
+ mem::forget(self);
+ handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$oty.take(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S> Encode<S> for &$oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> Decode<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &'s HandleStore<server::MarkedTypes<S>>) -> Self {
+ &s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S> Encode<S> for &mut $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s mut Marked<S::$oty, $oty>
+ {
+ fn decode(
+ r: &mut Reader<'_>,
+ s: &'s mut HandleStore<server::MarkedTypes<S>>
+ ) -> Self {
+ &mut s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$oty.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $oty {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $oty {
+ handle: handle::Handle::decode(r, s),
+ _marker: PhantomData,
+ }
+ }
+ }
+ )*
+
+ $(
+ #[repr(C)]
+ #[derive(Copy, Clone, PartialEq, Eq, Hash)]
+ pub(crate) struct $ity {
+ handle: handle::Handle,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual
+ // way of doing this, but that requires unstable features.
+ // rust-analyzer uses this code and avoids unstable features.
+ _marker: PhantomData<*mut ()>,
+ }
+
+ impl<S> Encode<S> for $ity {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$ity.copy(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$ity.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ity {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $ity {
+ handle: handle::Handle::decode(r, s),
+ _marker: PhantomData,
+ }
+ }
+ }
+ )*
+ }
+}
+define_handles! {
+ 'owned:
+ FreeFunctions,
+ TokenStream,
+ Literal,
+ SourceFile,
+ MultiSpan,
+ Diagnostic,
+
+ 'interned:
+ Ident,
+ Span,
+}
+
+// FIXME(eddyb) generate these impls by pattern-matching on the
+// names of methods - also could use the presence of `fn drop`
+// to distinguish between 'owned and 'interned, above.
+// Alternatively, special "modes" could be listed of types in with_api
+// instead of pattern matching on methods, here and in server decl.
+
+impl Clone for TokenStream {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Literal {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Literal")
+ // format the kind without quotes, as in `kind: Float`
+ .field("kind", &format_args!("{}", &self.debug_kind()))
+ .field("symbol", &self.symbol())
+ // format `Some("...")` on one line even in {:#?} mode
+ .field("suffix", &format_args!("{:?}", &self.suffix()))
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl Clone for SourceFile {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Span {
+ pub(crate) fn def_site() -> Span {
+ Bridge::with(|bridge| bridge.globals.def_site)
+ }
+
+ pub(crate) fn call_site() -> Span {
+ Bridge::with(|bridge| bridge.globals.call_site)
+ }
+
+ pub(crate) fn mixed_site() -> Span {
+ Bridge::with(|bridge| bridge.globals.mixed_site)
+ }
+}
+
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.debug())
+ }
+}
+
+macro_rules! define_client_side {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(impl $name {
+ $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* {
+ Bridge::with(|bridge| {
+ let mut buf = bridge.cached_buffer.take();
+
+ buf.clear();
+ api_tags::Method::$name(api_tags::$name::$method).encode(&mut buf, &mut ());
+ reverse_encode!(buf; $($arg),*);
+
+ buf = bridge.dispatch.call(buf);
+
+ let r = Result::<_, PanicMessage>::decode(&mut &buf[..], &mut ());
+
+ bridge.cached_buffer = buf;
+
+ r.unwrap_or_else(|e| panic::resume_unwind(e.into()))
+ })
+ })*
+ })*
+ }
+}
+with_api!(self, self, define_client_side);
+
+struct Bridge<'a> {
+ /// Reusable buffer (only `clear`-ed, never shrunk), primarily
+ /// used for making requests.
+ cached_buffer: Buffer,
+
+ /// Server-side function that the client uses to make requests.
+ dispatch: closure::Closure<'a, Buffer, Buffer>,
+
+ /// Provided globals for this macro expansion.
+ globals: ExpnGlobals<Span>,
+}
+
+enum BridgeState<'a> {
+ /// No server is currently connected to this client.
+ NotConnected,
+
+ /// A server is connected and available for requests.
+ Connected(Bridge<'a>),
+
+ /// Access to the bridge is being exclusively acquired
+ /// (e.g., during `BridgeState::with`).
+ InUse,
+}
+
+enum BridgeStateL {}
+
+impl<'a> scoped_cell::ApplyL<'a> for BridgeStateL {
+ type Out = BridgeState<'a>;
+}
+
+thread_local! {
+ static BRIDGE_STATE: scoped_cell::ScopedCell<BridgeStateL> =
+ scoped_cell::ScopedCell::new(BridgeState::NotConnected);
+}
+
+impl BridgeState<'_> {
+ /// Take exclusive control of the thread-local
+ /// `BridgeState`, and pass it to `f`, mutably.
+ /// The state will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ ///
+ /// N.B., while `f` is running, the thread-local state
+ /// is `BridgeState::InUse`.
+ fn with<R>(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R {
+ BRIDGE_STATE.with(|state| {
+ state.replace(BridgeState::InUse, |mut state| {
+ // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone
+ f(&mut *state)
+ })
+ })
+ }
+}
+
+impl Bridge<'_> {
+ fn with<R>(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R {
+ BridgeState::with(|state| match state {
+ BridgeState::NotConnected => {
+ panic!("procedural macro API is used outside of a procedural macro");
+ }
+ BridgeState::InUse => {
+ panic!("procedural macro API is used while it's already in use");
+ }
+ BridgeState::Connected(bridge) => f(bridge),
+ })
+ }
+}
+
+pub(crate) fn is_available() -> bool {
+ BridgeState::with(|state| match state {
+ BridgeState::Connected(_) | BridgeState::InUse => true,
+ BridgeState::NotConnected => false,
+ })
+}
+
+/// A client-side RPC entry-point, which may be using a different `proc_macro`
+/// from the one used by the server, but can be invoked compatibly.
+///
+/// Note that the (phantom) `I` ("input") and `O` ("output") type parameters
+/// decorate the `Client<I, O>` with the RPC "interface" of the entry-point, but
+/// do not themselves participate in ABI, at all, only facilitate type-checking.
+///
+/// E.g. `Client<TokenStream, TokenStream>` is the common proc macro interface,
+/// used for `#[proc_macro] fn foo(input: TokenStream) -> TokenStream`,
+/// indicating that the RPC input and output will be serialized token streams,
+/// and forcing the use of APIs that take/return `S::TokenStream`, server-side.
+#[repr(C)]
+pub struct Client<I, O> {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters,
+
+ pub(super) run: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+
+ pub(super) _marker: PhantomData<fn(I) -> O>,
+}
+
+impl<I, O> Copy for Client<I, O> {}
+impl<I, O> Clone for Client<I, O> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+fn maybe_install_panic_hook(force_show_panics: bool) {
+ // Hide the default panic output within `proc_macro` expansions.
+ // NB. the server can't do this because it may use a different libstd.
+ static HIDE_PANICS_DURING_EXPANSION: Once = Once::new();
+ HIDE_PANICS_DURING_EXPANSION.call_once(|| {
+ let prev = panic::take_hook();
+ panic::set_hook(Box::new(move |info| {
+ let show = BridgeState::with(|state| match state {
+ BridgeState::NotConnected => true,
+ BridgeState::Connected(_) | BridgeState::InUse => force_show_panics,
+ });
+ if show {
+ prev(info)
+ }
+ }));
+ });
+}
+
+/// Client-side helper for handling client panics, entering the bridge,
+/// deserializing input and serializing output.
+// FIXME(eddyb) maybe replace `Bridge::enter` with this?
+fn run_client<A: for<'a, 's> DecodeMut<'a, 's, ()>, R: Encode<()>>(
+ config: BridgeConfig<'_>,
+ f: impl FnOnce(A) -> R,
+) -> Buffer {
+ let BridgeConfig { input: mut buf, dispatch, force_show_panics, .. } = config;
+
+ panic::catch_unwind(panic::AssertUnwindSafe(|| {
+ maybe_install_panic_hook(force_show_panics);
+
+ let reader = &mut &buf[..];
+ let (globals, input) = <(ExpnGlobals<Span>, A)>::decode(reader, &mut ());
+
+ // Put the buffer we used for input back in the `Bridge` for requests.
+ let new_state =
+ BridgeState::Connected(Bridge { cached_buffer: buf.take(), dispatch, globals });
+
+ BRIDGE_STATE.with(|state| {
+ state.set(new_state, || {
+ let output = f(input);
+
+ // Take the `cached_buffer` back out, for the output value.
+ buf = Bridge::with(|bridge| bridge.cached_buffer.take());
+
+ // HACK(eddyb) Separate encoding a success value (`Ok(output)`)
+ // from encoding a panic (`Err(e: PanicMessage)`) to avoid
+ // having handles outside the `bridge.enter(|| ...)` scope, and
+ // to catch panics that could happen while encoding the success.
+ //
+ // Note that panics should be impossible beyond this point, but
+ // this is defensively trying to avoid any accidental panicking
+ // reaching the `extern "C"` (which should `abort` but might not
+ // at the moment, so this is also potentially preventing UB).
+ buf.clear();
+ Ok::<_, ()>(output).encode(&mut buf, &mut ());
+ })
+ })
+ }))
+ .map_err(PanicMessage::from)
+ .unwrap_or_else(|e| {
+ buf.clear();
+ Err::<(), _>(e).encode(&mut buf, &mut ());
+ });
+ buf
+}
+
+impl Client<super::super::TokenStream, super::super::TokenStream> {
+ pub const fn expand1(
+ f: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ Client {
+ get_handle_counters: HandleCounters::get,
+ run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
+ run_client(bridge, |input| f(super::super::TokenStream(input)).0)
+ }),
+ _marker: PhantomData,
+ }
+ }
+}
+
+impl Client<(super::super::TokenStream, super::super::TokenStream), super::super::TokenStream> {
+ pub const fn expand2(
+ f: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream
+ + Copy,
+ ) -> Self {
+ Client {
+ get_handle_counters: HandleCounters::get,
+ run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
+ run_client(bridge, |(input, input2)| {
+ f(super::super::TokenStream(input), super::super::TokenStream(input2)).0
+ })
+ }),
+ _marker: PhantomData,
+ }
+ }
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub enum ProcMacro {
+ CustomDerive {
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ client: Client<super::super::TokenStream, super::super::TokenStream>,
+ },
+
+ Attr {
+ name: &'static str,
+ client: Client<
+ (super::super::TokenStream, super::super::TokenStream),
+ super::super::TokenStream,
+ >,
+ },
+
+ Bang {
+ name: &'static str,
+ client: Client<super::super::TokenStream, super::super::TokenStream>,
+ },
+}
+
+impl ProcMacro {
+ pub fn name(&self) -> &'static str {
+ match self {
+ ProcMacro::CustomDerive { trait_name, .. } => trait_name,
+ ProcMacro::Attr { name, .. } => name,
+ ProcMacro::Bang { name, .. } => name,
+ }
+ }
+
+ pub const fn custom_derive(
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) }
+ }
+
+ pub const fn attr(
+ name: &'static str,
+ expand: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream
+ + Copy,
+ ) -> Self {
+ ProcMacro::Attr { name, client: Client::expand2(expand) }
+ }
+
+ pub const fn bang(
+ name: &'static str,
+ expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ ProcMacro::Bang { name, client: Client::expand1(expand) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs
new file mode 100644
index 000000000..d371ae3ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs
@@ -0,0 +1,32 @@
+//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`.
+
+use std::marker::PhantomData;
+
+#[repr(C)]
+pub struct Closure<'a, A, R> {
+ call: unsafe extern "C" fn(*mut Env, A) -> R,
+ env: *mut Env,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing
+ // this, but that requires unstable features. rust-analyzer uses this code
+ // and avoids unstable features.
+ //
+ // The `'a` lifetime parameter represents the lifetime of `Env`.
+ _marker: PhantomData<*mut &'a mut ()>,
+}
+
+struct Env;
+
+impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> {
+ fn from(f: &'a mut F) -> Self {
+ unsafe extern "C" fn call<A, R, F: FnMut(A) -> R>(env: *mut Env, arg: A) -> R {
+ (*(env as *mut _ as *mut F))(arg)
+ }
+ Closure { call: call::<A, R, F>, env: f as *mut _ as *mut Env, _marker: PhantomData }
+ }
+}
+
+impl<'a, A, R> Closure<'a, A, R> {
+ pub fn call(&mut self, arg: A) -> R {
+ unsafe { (self.call)(self.env, arg) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs
new file mode 100644
index 000000000..c219a9465
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs
@@ -0,0 +1,89 @@
+//! Server-side handles and storage for per-handle data.
+
+use std::collections::{BTreeMap, HashMap};
+use std::hash::{BuildHasher, Hash};
+use std::num::NonZeroU32;
+use std::ops::{Index, IndexMut};
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+pub(super) type Handle = NonZeroU32;
+
+/// A store that associates values of type `T` with numeric handles. A value can
+/// be looked up using its handle.
+pub(super) struct OwnedStore<T: 'static> {
+ counter: &'static AtomicUsize,
+ data: BTreeMap<Handle, T>,
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ // Ensure the handle counter isn't 0, which would panic later,
+ // when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`.
+ assert_ne!(counter.load(Ordering::SeqCst), 0);
+
+ OwnedStore { counter, data: BTreeMap::new() }
+ }
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let counter = self.counter.fetch_add(1, Ordering::SeqCst);
+ let handle = Handle::new(counter as u32).expect("`proc_macro` handle counter overflowed");
+ assert!(self.data.insert(handle, x).is_none());
+ handle
+ }
+
+ pub(super) fn take(&mut self, h: Handle) -> T {
+ self.data.remove(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> Index<Handle> for OwnedStore<T> {
+ type Output = T;
+ fn index(&self, h: Handle) -> &T {
+ self.data.get(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> IndexMut<Handle> for OwnedStore<T> {
+ fn index_mut(&mut self, h: Handle) -> &mut T {
+ self.data.get_mut(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+// HACK(eddyb) deterministic `std::collections::hash_map::RandomState` replacement
+// that doesn't require adding any dependencies to `proc_macro` (like `rustc-hash`).
+#[derive(Clone)]
+struct NonRandomState;
+
+impl BuildHasher for NonRandomState {
+ type Hasher = std::collections::hash_map::DefaultHasher;
+ #[inline]
+ fn build_hasher(&self) -> Self::Hasher {
+ Self::Hasher::new()
+ }
+}
+
+/// Like `OwnedStore`, but avoids storing any value more than once.
+pub(super) struct InternedStore<T: 'static> {
+ owned: OwnedStore<T>,
+ interner: HashMap<T, Handle, NonRandomState>,
+}
+
+impl<T: Copy + Eq + Hash> InternedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ InternedStore {
+ owned: OwnedStore::new(counter),
+ interner: HashMap::with_hasher(NonRandomState),
+ }
+ }
+
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let owned = &mut self.owned;
+ *self.interner.entry(x).or_insert_with(|| owned.alloc(x))
+ }
+
+ pub(super) fn copy(&mut self, h: Handle) -> T {
+ self.owned[h]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs
new file mode 100644
index 000000000..ffd440793
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs
@@ -0,0 +1,493 @@
+//! Internal interface for communicating between a `proc_macro` client
+//! (a proc macro crate) and a `proc_macro` server (a compiler front-end).
+//!
+//! Serialization (with C ABI buffers) and unique integer handles are employed
+//! to allow safely interfacing between two copies of `proc_macro` built
+//! (from the same source) by different compilers with potentially mismatching
+//! Rust ABIs (e.g., stage0/bin/rustc vs stage1/bin/rustc during bootstrap).
+
+#![deny(unsafe_code)]
+
+pub use super::{Delimiter, Level, LineColumn, Spacing};
+use std::fmt;
+use std::hash::Hash;
+use std::marker;
+use std::mem;
+use std::ops::Bound;
+use std::panic;
+use std::sync::atomic::AtomicUsize;
+use std::sync::Once;
+use std::thread;
+
+/// Higher-order macro describing the server RPC API, allowing automatic
+/// generation of type-safe Rust APIs, both client-side and server-side.
+///
+/// `with_api!(MySelf, my_self, my_macro)` expands to:
+/// ```rust,ignore (pseudo-code)
+/// my_macro! {
+/// // ...
+/// Literal {
+/// // ...
+/// fn character(ch: char) -> MySelf::Literal;
+/// // ...
+/// fn span(my_self: &MySelf::Literal) -> MySelf::Span;
+/// fn set_span(my_self: &mut MySelf::Literal, span: MySelf::Span);
+/// },
+/// // ...
+/// }
+/// ```
+///
+/// The first two arguments serve to customize the arguments names
+/// and argument/return types, to enable several different usecases:
+///
+/// If `my_self` is just `self`, then each `fn` signature can be used
+/// as-is for a method. If it's anything else (`self_` in practice),
+/// then the signatures don't have a special `self` argument, and
+/// can, therefore, have a different one introduced.
+///
+/// If `MySelf` is just `Self`, then the types are only valid inside
+/// a trait or a trait impl, where the trait has associated types
+/// for each of the API types. If non-associated types are desired,
+/// a module name (`self` in practice) can be used instead of `Self`.
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ FreeFunctions {
+ fn drop($self: $S::FreeFunctions);
+ fn track_env_var(var: &str, value: Option<&str>);
+ fn track_path(path: &str);
+ },
+ TokenStream {
+ fn drop($self: $S::TokenStream);
+ fn clone($self: &$S::TokenStream) -> $S::TokenStream;
+ fn is_empty($self: &$S::TokenStream) -> bool;
+ fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
+ fn from_str(src: &str) -> $S::TokenStream;
+ fn to_string($self: &$S::TokenStream) -> String;
+ fn from_token_tree(
+ tree: TokenTree<$S::TokenStream, $S::Span, $S::Ident, $S::Literal>,
+ ) -> $S::TokenStream;
+ fn concat_trees(
+ base: Option<$S::TokenStream>,
+ trees: Vec<TokenTree<$S::TokenStream, $S::Span, $S::Ident, $S::Literal>>,
+ ) -> $S::TokenStream;
+ fn concat_streams(
+ base: Option<$S::TokenStream>,
+ streams: Vec<$S::TokenStream>,
+ ) -> $S::TokenStream;
+ fn into_trees(
+ $self: $S::TokenStream
+ ) -> Vec<TokenTree<$S::TokenStream, $S::Span, $S::Ident, $S::Literal>>;
+ },
+ Ident {
+ fn new(string: &str, span: $S::Span, is_raw: bool) -> $S::Ident;
+ fn span($self: $S::Ident) -> $S::Span;
+ fn with_span($self: $S::Ident, span: $S::Span) -> $S::Ident;
+ },
+ Literal {
+ fn drop($self: $S::Literal);
+ fn clone($self: &$S::Literal) -> $S::Literal;
+ fn from_str(s: &str) -> Result<$S::Literal, ()>;
+ fn to_string($self: &$S::Literal) -> String;
+ fn debug_kind($self: &$S::Literal) -> String;
+ fn symbol($self: &$S::Literal) -> String;
+ fn suffix($self: &$S::Literal) -> Option<String>;
+ fn integer(n: &str) -> $S::Literal;
+ fn typed_integer(n: &str, kind: &str) -> $S::Literal;
+ fn float(n: &str) -> $S::Literal;
+ fn f32(n: &str) -> $S::Literal;
+ fn f64(n: &str) -> $S::Literal;
+ fn string(string: &str) -> $S::Literal;
+ fn character(ch: char) -> $S::Literal;
+ fn byte_string(bytes: &[u8]) -> $S::Literal;
+ fn span($self: &$S::Literal) -> $S::Span;
+ fn set_span($self: &mut $S::Literal, span: $S::Span);
+ fn subspan(
+ $self: &$S::Literal,
+ start: Bound<usize>,
+ end: Bound<usize>,
+ ) -> Option<$S::Span>;
+ },
+ SourceFile {
+ fn drop($self: $S::SourceFile);
+ fn clone($self: &$S::SourceFile) -> $S::SourceFile;
+ fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool;
+ fn path($self: &$S::SourceFile) -> String;
+ fn is_real($self: &$S::SourceFile) -> bool;
+ },
+ MultiSpan {
+ fn drop($self: $S::MultiSpan);
+ fn new() -> $S::MultiSpan;
+ fn push($self: &mut $S::MultiSpan, span: $S::Span);
+ },
+ Diagnostic {
+ fn drop($self: $S::Diagnostic);
+ fn new(level: Level, msg: &str, span: $S::MultiSpan) -> $S::Diagnostic;
+ fn sub(
+ $self: &mut $S::Diagnostic,
+ level: Level,
+ msg: &str,
+ span: $S::MultiSpan,
+ );
+ fn emit($self: $S::Diagnostic);
+ },
+ Span {
+ fn debug($self: $S::Span) -> String;
+ fn source_file($self: $S::Span) -> $S::SourceFile;
+ fn parent($self: $S::Span) -> Option<$S::Span>;
+ fn source($self: $S::Span) -> $S::Span;
+ fn start($self: $S::Span) -> LineColumn;
+ fn end($self: $S::Span) -> LineColumn;
+ fn before($self: $S::Span) -> $S::Span;
+ fn after($self: $S::Span) -> $S::Span;
+ fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
+ fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
+ fn source_text($self: $S::Span) -> Option<String>;
+ fn save_span($self: $S::Span) -> usize;
+ fn recover_proc_macro_span(id: usize) -> $S::Span;
+ },
+ }
+ };
+}
+
+// FIXME(eddyb) this calls `encode` for each argument, but in reverse,
+// to match the ordering in `reverse_decode`.
+macro_rules! reverse_encode {
+ ($writer:ident;) => {};
+ ($writer:ident; $first:ident $(, $rest:ident)*) => {
+ reverse_encode!($writer; $($rest),*);
+ $first.encode(&mut $writer, &mut ());
+ }
+}
+
+// FIXME(eddyb) this calls `decode` for each argument, but in reverse,
+// to avoid borrow conflicts from borrows started by `&mut` arguments.
+macro_rules! reverse_decode {
+ ($reader:ident, $s:ident;) => {};
+ ($reader:ident, $s:ident; $first:ident: $first_ty:ty $(, $rest:ident: $rest_ty:ty)*) => {
+ reverse_decode!($reader, $s; $($rest: $rest_ty),*);
+ let $first = <$first_ty>::decode(&mut $reader, $s);
+ }
+}
+
+#[allow(unsafe_code)]
+mod buffer;
+#[forbid(unsafe_code)]
+pub mod client;
+#[allow(unsafe_code)]
+mod closure;
+#[forbid(unsafe_code)]
+mod handle;
+#[macro_use]
+#[forbid(unsafe_code)]
+mod rpc;
+#[allow(unsafe_code)]
+mod scoped_cell;
+#[allow(unsafe_code)]
+mod selfless_reify;
+#[forbid(unsafe_code)]
+pub mod server;
+
+use buffer::Buffer;
+pub use rpc::PanicMessage;
+use rpc::{Decode, DecodeMut, Encode, Reader, Writer};
+
+/// Configuration for establishing an active connection between a server and a
+/// client. The server creates the bridge config (`run_server` in `server.rs`),
+/// then passes it to the client through the function pointer in the `run` field
+/// of `client::Client`. The client constructs a local `Bridge` from the config
+/// in TLS during its execution (`Bridge::{enter, with}` in `client.rs`).
+#[repr(C)]
+pub struct BridgeConfig<'a> {
+ /// Buffer used to pass initial input to the client.
+ input: Buffer,
+
+ /// Server-side function that the client uses to make requests.
+ dispatch: closure::Closure<'a, Buffer, Buffer>,
+
+ /// If 'true', always invoke the default panic hook
+ force_show_panics: bool,
+
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing
+ // this, but that requires unstable features. rust-analyzer uses this code
+ // and avoids unstable features.
+ _marker: marker::PhantomData<*mut ()>,
+}
+
+#[forbid(unsafe_code)]
+#[allow(non_camel_case_types)]
+mod api_tags {
+ use super::rpc::{DecodeMut, Encode, Reader, Writer};
+
+ macro_rules! declare_tags {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(
+ pub(super) enum $name {
+ $($method),*
+ }
+ rpc_encode_decode!(enum $name { $($method),* });
+ )*
+
+ pub(super) enum Method {
+ $($name($name)),*
+ }
+ rpc_encode_decode!(enum Method { $($name(m)),* });
+ }
+ }
+ with_api!(self, self, declare_tags);
+}
+
+/// Helper to wrap associated types to allow trait impl dispatch.
+/// That is, normally a pair of impls for `T::Foo` and `T::Bar`
+/// can overlap, but if the impls are, instead, on types like
+/// `Marked<T::Foo, Foo>` and `Marked<T::Bar, Bar>`, they can't.
+trait Mark {
+ type Unmarked;
+ fn mark(unmarked: Self::Unmarked) -> Self;
+}
+
+/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details).
+trait Unmark {
+ type Unmarked;
+ fn unmark(self) -> Self::Unmarked;
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+struct Marked<T, M> {
+ value: T,
+ _marker: marker::PhantomData<M>,
+}
+
+impl<T, M> Mark for Marked<T, M> {
+ type Unmarked = T;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ Marked { value: unmarked, _marker: marker::PhantomData }
+ }
+}
+impl<T, M> Unmark for Marked<T, M> {
+ type Unmarked = T;
+ fn unmark(self) -> Self::Unmarked {
+ self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a Marked<T, M> {
+ type Unmarked = &'a T;
+ fn unmark(self) -> Self::Unmarked {
+ &self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
+ type Unmarked = &'a mut T;
+ fn unmark(self) -> Self::Unmarked {
+ &mut self.value
+ }
+}
+
+impl<T: Mark> Mark for Vec<T> {
+ type Unmarked = Vec<T::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ // Should be a no-op due to std's in-place collect optimizations.
+ unmarked.into_iter().map(T::mark).collect()
+ }
+}
+impl<T: Unmark> Unmark for Vec<T> {
+ type Unmarked = Vec<T::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ // Should be a no-op due to std's in-place collect optimizations.
+ self.into_iter().map(T::unmark).collect()
+ }
+}
+
+macro_rules! mark_noop {
+ ($($ty:ty),* $(,)?) => {
+ $(
+ impl Mark for $ty {
+ type Unmarked = Self;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked
+ }
+ }
+ impl Unmark for $ty {
+ type Unmarked = Self;
+ fn unmark(self) -> Self::Unmarked {
+ self
+ }
+ }
+ )*
+ }
+}
+mark_noop! {
+ (),
+ bool,
+ char,
+ &'_ [u8],
+ &'_ str,
+ String,
+ u8,
+ usize,
+ Delimiter,
+ Level,
+ LineColumn,
+ Spacing,
+}
+
+rpc_encode_decode!(
+ enum Delimiter {
+ Parenthesis,
+ Brace,
+ Bracket,
+ None,
+ }
+);
+rpc_encode_decode!(
+ enum Level {
+ Error,
+ Warning,
+ Note,
+ Help,
+ }
+);
+rpc_encode_decode!(struct LineColumn { line, column });
+rpc_encode_decode!(
+ enum Spacing {
+ Alone,
+ Joint,
+ }
+);
+
+macro_rules! mark_compound {
+ (struct $name:ident <$($T:ident),+> { $($field:ident),* $(,)? }) => {
+ impl<$($T: Mark),+> Mark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ $name {
+ $($field: Mark::mark(unmarked.$field)),*
+ }
+ }
+ }
+ impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn unmark(self) -> Self::Unmarked {
+ $name {
+ $($field: Unmark::unmark(self.$field)),*
+ }
+ }
+ }
+ };
+ (enum $name:ident <$($T:ident),+> { $($variant:ident $(($field:ident))?),* $(,)? }) => {
+ impl<$($T: Mark),+> Mark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ match unmarked {
+ $($name::$variant $(($field))? => {
+ $name::$variant $((Mark::mark($field)))?
+ })*
+ }
+ }
+ }
+ impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn unmark(self) -> Self::Unmarked {
+ match self {
+ $($name::$variant $(($field))? => {
+ $name::$variant $((Unmark::unmark($field)))?
+ })*
+ }
+ }
+ }
+ }
+}
+
+macro_rules! compound_traits {
+ ($($t:tt)*) => {
+ rpc_encode_decode!($($t)*);
+ mark_compound!($($t)*);
+ };
+}
+
+compound_traits!(
+ enum Bound<T> {
+ Included(x),
+ Excluded(x),
+ Unbounded,
+ }
+);
+
+compound_traits!(
+ enum Option<T> {
+ Some(t),
+ None,
+ }
+);
+
+compound_traits!(
+ enum Result<T, E> {
+ Ok(t),
+ Err(e),
+ }
+);
+
+#[derive(Copy, Clone)]
+pub struct DelimSpan<Span> {
+ pub open: Span,
+ pub close: Span,
+ pub entire: Span,
+}
+
+impl<Span: Copy> DelimSpan<Span> {
+ pub fn from_single(span: Span) -> Self {
+ DelimSpan { open: span, close: span, entire: span }
+ }
+}
+
+compound_traits!(struct DelimSpan<Span> { open, close, entire });
+
+#[derive(Clone)]
+pub struct Group<TokenStream, Span> {
+ pub delimiter: Delimiter,
+ pub stream: Option<TokenStream>,
+ pub span: DelimSpan<Span>,
+}
+
+compound_traits!(struct Group<TokenStream, Span> { delimiter, stream, span });
+
+#[derive(Clone)]
+pub struct Punct<Span> {
+ pub ch: u8,
+ pub joint: bool,
+ pub span: Span,
+}
+
+compound_traits!(struct Punct<Span> { ch, joint, span });
+
+#[derive(Clone)]
+pub enum TokenTree<TokenStream, Span, Ident, Literal> {
+ Group(Group<TokenStream, Span>),
+ Punct(Punct<Span>),
+ Ident(Ident),
+ Literal(Literal),
+}
+
+compound_traits!(
+ enum TokenTree<TokenStream, Span, Ident, Literal> {
+ Group(tt),
+ Punct(tt),
+ Ident(tt),
+ Literal(tt),
+ }
+);
+
+/// Globals provided alongside the initial inputs for a macro expansion.
+/// Provides values such as spans which are used frequently to avoid RPC.
+#[derive(Clone)]
+pub struct ExpnGlobals<Span> {
+ pub def_site: Span,
+ pub call_site: Span,
+ pub mixed_site: Span,
+}
+
+compound_traits!(
+ struct ExpnGlobals<Span> { def_site, call_site, mixed_site }
+);
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs
new file mode 100644
index 000000000..e9d7a46c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs
@@ -0,0 +1,304 @@
+//! Serialization for client-server communication.
+
+use std::any::Any;
+use std::char;
+use std::io::Write;
+use std::num::NonZeroU32;
+use std::str;
+
+pub(super) type Writer = super::buffer::Buffer;
+
+pub(super) trait Encode<S>: Sized {
+ fn encode(self, w: &mut Writer, s: &mut S);
+}
+
+pub(super) type Reader<'a> = &'a [u8];
+
+pub(super) trait Decode<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s S) -> Self;
+}
+
+pub(super) trait DecodeMut<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self;
+}
+
+macro_rules! rpc_encode_decode {
+ (le $ty:ty) => {
+ impl<S> Encode<S> for $ty {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ty {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ const N: usize = ::std::mem::size_of::<$ty>();
+
+ let mut bytes = [0; N];
+ bytes.copy_from_slice(&r[..N]);
+ *r = &r[N..];
+
+ Self::from_le_bytes(bytes)
+ }
+ }
+ };
+ (struct $name:ident $(<$($T:ident),+>)? { $($field:ident),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ $(self.$field.encode(w, s);)*
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ $name {
+ $($field: DecodeMut::decode(r, s)),*
+ }
+ }
+ }
+ };
+ (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match self {
+ $($name::$variant $(($field))* => {
+ tag::$variant.encode(w, s);
+ $($field.encode(w, s);)*
+ })*
+ }
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match u8::decode(r, s) {
+ $(tag::$variant => {
+ $(let $field = DecodeMut::decode(r, s);)*
+ $name::$variant $(($field))*
+ })*
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+}
+
+impl<S> Encode<S> for () {
+ fn encode(self, _: &mut Writer, _: &mut S) {}
+}
+
+impl<S> DecodeMut<'_, '_, S> for () {
+ fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {}
+}
+
+impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.push(self);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for u8 {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ let x = r[0];
+ *r = &r[1..];
+ x
+ }
+}
+
+rpc_encode_decode!(le u32);
+rpc_encode_decode!(le usize);
+
+impl<S> Encode<S> for bool {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u8).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for bool {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match u8::decode(r, s) {
+ 0 => false,
+ 1 => true,
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl<S> Encode<S> for char {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u32).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for char {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ char::from_u32(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for NonZeroU32 {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.get().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for NonZeroU32 {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ Self::new(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S, A: Encode<S>, B: Encode<S>> Encode<S> for (A, B) {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ self.1.encode(w, s);
+ }
+}
+
+impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S>
+ for (A, B)
+{
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ (DecodeMut::decode(r, s), DecodeMut::decode(r, s))
+ }
+}
+
+impl<S> Encode<S> for &[u8] {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ w.write_all(self).unwrap();
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let xs = &r[..len];
+ *r = &r[len..];
+ xs
+ }
+}
+
+impl<S> Encode<S> for &str {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_bytes().encode(w, s);
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a str {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ str::from_utf8(<&[u8]>::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for String {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self[..].encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for String {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ <&str>::decode(r, s).to_string()
+ }
+}
+
+impl<S, T: Encode<S>> Encode<S> for Vec<T> {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ for x in self {
+ x.encode(w, s);
+ }
+ }
+}
+
+impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec<T> {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let mut vec = Vec::with_capacity(len);
+ for _ in 0..len {
+ vec.push(T::decode(r, s));
+ }
+ vec
+ }
+}
+
+/// Simplified version of panic payloads, ignoring
+/// types other than `&'static str` and `String`.
+pub enum PanicMessage {
+ StaticStr(&'static str),
+ String(String),
+ Unknown,
+}
+
+impl From<Box<dyn Any + Send>> for PanicMessage {
+ fn from(payload: Box<dyn Any + Send + 'static>) -> Self {
+ if let Some(s) = payload.downcast_ref::<&'static str>() {
+ return PanicMessage::StaticStr(s);
+ }
+ if let Ok(s) = payload.downcast::<String>() {
+ return PanicMessage::String(*s);
+ }
+ PanicMessage::Unknown
+ }
+}
+
+impl Into<Box<dyn Any + Send>> for PanicMessage {
+ fn into(self) -> Box<dyn Any + Send> {
+ match self {
+ PanicMessage::StaticStr(s) => Box::new(s),
+ PanicMessage::String(s) => Box::new(s),
+ PanicMessage::Unknown => {
+ struct UnknownPanicMessage;
+ Box::new(UnknownPanicMessage)
+ }
+ }
+ }
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<&str> {
+ match self {
+ PanicMessage::StaticStr(s) => Some(s),
+ PanicMessage::String(s) => Some(s),
+ PanicMessage::Unknown => None,
+ }
+ }
+}
+
+impl<S> Encode<S> for PanicMessage {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_str().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for PanicMessage {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match Option::<String>::decode(r, s) {
+ Some(s) => PanicMessage::String(s),
+ None => PanicMessage::Unknown,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs
new file mode 100644
index 000000000..2cde1f65a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs
@@ -0,0 +1,81 @@
+//! `Cell` variant for (scoped) existential lifetimes.
+
+use std::cell::Cell;
+use std::mem;
+use std::ops::{Deref, DerefMut};
+
+/// Type lambda application, with a lifetime.
+#[allow(unused_lifetimes)]
+pub trait ApplyL<'a> {
+ type Out;
+}
+
+/// Type lambda taking a lifetime, i.e., `Lifetime -> Type`.
+pub trait LambdaL: for<'a> ApplyL<'a> {}
+
+impl<T: for<'a> ApplyL<'a>> LambdaL for T {}
+
+// HACK(eddyb) work around projection limitations with a newtype
+// FIXME(#52812) replace with `&'a mut <T as ApplyL<'b>>::Out`
+pub struct RefMutL<'a, 'b, T: LambdaL>(&'a mut <T as ApplyL<'b>>::Out);
+
+impl<'a, 'b, T: LambdaL> Deref for RefMutL<'a, 'b, T> {
+ type Target = <T as ApplyL<'b>>::Out;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+
+impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.0
+ }
+}
+
+pub struct ScopedCell<T: LambdaL>(Cell<<T as ApplyL<'static>>::Out>);
+
+impl<T: LambdaL> ScopedCell<T> {
+ pub const fn new(value: <T as ApplyL<'static>>::Out) -> Self {
+ ScopedCell(Cell::new(value))
+ }
+
+ /// Sets the value in `self` to `replacement` while
+ /// running `f`, which gets the old value, mutably.
+ /// The old value will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ pub fn replace<'a, R>(
+ &self,
+ replacement: <T as ApplyL<'a>>::Out,
+ f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R,
+ ) -> R {
+ /// Wrapper that ensures that the cell always gets filled
+ /// (with the original state, optionally changed by `f`),
+ /// even if `f` had panicked.
+ struct PutBackOnDrop<'a, T: LambdaL> {
+ cell: &'a ScopedCell<T>,
+ value: Option<<T as ApplyL<'static>>::Out>,
+ }
+
+ impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> {
+ fn drop(&mut self) {
+ self.cell.0.set(self.value.take().unwrap());
+ }
+ }
+
+ let mut put_back_on_drop = PutBackOnDrop {
+ cell: self,
+ value: Some(self.0.replace(unsafe {
+ let erased = mem::transmute_copy(&replacement);
+ mem::forget(replacement);
+ erased
+ })),
+ };
+
+ f(RefMutL(put_back_on_drop.value.as_mut().unwrap()))
+ }
+
+ /// Sets the value in `self` to `value` while running `f`.
+ pub fn set<R>(&self, value: <T as ApplyL<'_>>::Out, f: impl FnOnce() -> R) -> R {
+ self.replace(value, |_| f())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs
new file mode 100644
index 000000000..907ad256e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs
@@ -0,0 +1,84 @@
+//! Abstraction for creating `fn` pointers from any callable that *effectively*
+//! has the equivalent of implementing `Default`, even if the compiler neither
+//! provides `Default` nor allows reifying closures (i.e. creating `fn` pointers)
+//! other than those with absolutely no captures.
+//!
+//! More specifically, for a closure-like type to be "effectively `Default`":
+//! * it must be a ZST (zero-sized type): no information contained within, so
+//! that `Default`'s return value (if it were implemented) is unambiguous
+//! * it must be `Copy`: no captured "unique ZST tokens" or any other similar
+//! types that would make duplicating values at will unsound
+//! * combined with the ZST requirement, this confers a kind of "telecopy"
+//! ability: similar to `Copy`, but without keeping the value around, and
+//! instead "reconstructing" it (a noop given it's a ZST) when needed
+//! * it must be *provably* inhabited: no captured uninhabited types or any
+//! other types that cannot be constructed by the user of this abstraction
+//! * the proof is a value of the closure-like type itself, in a sense the
+//! "seed" for the "telecopy" process made possible by ZST + `Copy`
+//! * this requirement is the only reason an abstraction limited to a specific
+//! usecase is required: ZST + `Copy` can be checked with *at worst* a panic
+//! at the "attempted `::default()` call" time, but that doesn't guarantee
+//! that the value can be soundly created, and attempting to use the typical
+//! "proof ZST token" approach leads yet again to having a ZST + `Copy` type
+//! that is not proof of anything without a value (i.e. isomorphic to a
+//! newtype of the type it's trying to prove the inhabitation of)
+//!
+//! A more flexible (and safer) solution to the general problem could exist once
+//! `const`-generic parameters can have type parameters in their types:
+//!
+//! ```rust,ignore (needs future const-generics)
+//! extern "C" fn ffi_wrapper<
+//! A, R,
+//! F: Fn(A) -> R,
+//! const f: F, // <-- this `const`-generic is not yet allowed
+//! >(arg: A) -> R {
+//! f(arg)
+//! }
+//! ```
+
+use std::mem;
+
+// FIXME(eddyb) this could be `trait` impls except for the `const fn` requirement.
+macro_rules! define_reify_functions {
+ ($(
+ fn $name:ident $(<$($param:ident),*>)?
+ for $(extern $abi:tt)? fn($($arg:ident: $arg_ty:ty),*) -> $ret_ty:ty;
+ )+) => {
+ $(pub const fn $name<
+ $($($param,)*)?
+ F: Fn($($arg_ty),*) -> $ret_ty + Copy
+ >(f: F) -> $(extern $abi)? fn($($arg_ty),*) -> $ret_ty {
+ // FIXME(eddyb) describe the `F` type (e.g. via `type_name::<F>`) once panic
+ // formatting becomes possible in `const fn`.
+ assert!(mem::size_of::<F>() == 0, "selfless_reify: closure must be zero-sized");
+
+ $(extern $abi)? fn wrapper<
+ $($($param,)*)?
+ F: Fn($($arg_ty),*) -> $ret_ty + Copy
+ >($($arg: $arg_ty),*) -> $ret_ty {
+ let f = unsafe {
+ // SAFETY: `F` satisfies all criteria for "out of thin air"
+ // reconstructability (see module-level doc comment).
+ mem::MaybeUninit::<F>::uninit().assume_init()
+ };
+ f($($arg),*)
+ }
+ let _f_proof = f;
+ wrapper::<
+ $($($param,)*)?
+ F
+ >
+ })+
+ }
+}
+
+define_reify_functions! {
+ fn _reify_to_extern_c_fn_unary<A, R> for extern "C" fn(arg: A) -> R;
+
+ // HACK(eddyb) this abstraction is used with `for<'a> fn(BridgeConfig<'a>)
+ // -> T` but that doesn't work with just `reify_to_extern_c_fn_unary`
+ // because of the `fn` pointer type being "higher-ranked" (i.e. the
+ // `for<'a>` binder).
+ // FIXME(eddyb) try to remove the lifetime from `BridgeConfig`, that'd help.
+ fn reify_to_extern_c_fn_hrt_bridge<R> for extern "C" fn(bridge: super::BridgeConfig<'_>) -> R;
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs
new file mode 100644
index 000000000..6e7a8d8c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs
@@ -0,0 +1,339 @@
+//! Server-side traits.
+
+use super::*;
+
+// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+use super::client::HandleStore;
+
+pub trait Types {
+ type FreeFunctions: 'static;
+ type TokenStream: 'static + Clone;
+ type Ident: 'static + Copy + Eq + Hash;
+ type Literal: 'static + Clone;
+ type SourceFile: 'static + Clone;
+ type MultiSpan: 'static;
+ type Diagnostic: 'static;
+ type Span: 'static + Copy + Eq + Hash;
+}
+
+/// Declare an associated fn of one of the traits below, adding necessary
+/// default bodies.
+macro_rules! associated_fn {
+ (fn drop(&mut self, $arg:ident: $arg_ty:ty)) =>
+ (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) });
+
+ (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) =>
+ (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() });
+
+ ($($item:tt)*) => ($($item)*;)
+}
+
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ $(pub trait $name: Types {
+ $(associated_fn!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span>;
+ }
+ }
+}
+with_api!(Self, self_, declare_server_traits);
+
+pub(super) struct MarkedTypes<S: Types>(S);
+
+impl<S: Server> Server for MarkedTypes<S> {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span> {
+ <_>::mark(Server::globals(&mut self.0))
+ }
+}
+
+macro_rules! define_mark_types_impls {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ impl<S: Types> Types for MarkedTypes<S> {
+ $(type $name = Marked<S::$name, client::$name>;)*
+ }
+
+ $(impl<S: $name> $name for MarkedTypes<S> {
+ $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? {
+ <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*))
+ })*
+ })*
+ }
+}
+with_api!(Self, self_, define_mark_types_impls);
+
+struct Dispatcher<S: Types> {
+ handle_store: HandleStore<S>,
+ server: S,
+}
+
+macro_rules! define_dispatcher_impl {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ // FIXME(eddyb) `pub` only for `ExecutionStrategy` below.
+ pub trait DispatcherTrait {
+ // HACK(eddyb) these are here to allow `Self::$name` to work below.
+ $(type $name;)*
+ fn dispatch(&mut self, buf: Buffer) -> Buffer;
+ }
+
+ impl<S: Server> DispatcherTrait for Dispatcher<MarkedTypes<S>> {
+ $(type $name = <MarkedTypes<S> as Types>::$name;)*
+ fn dispatch(&mut self, mut buf: Buffer) -> Buffer {
+ let Dispatcher { handle_store, server } = self;
+
+ let mut reader = &buf[..];
+ match api_tags::Method::decode(&mut reader, &mut ()) {
+ $(api_tags::Method::$name(m) => match m {
+ $(api_tags::$name::$method => {
+ let mut call_method = || {
+ reverse_decode!(reader, handle_store; $($arg: $arg_ty),*);
+ $name::$method(server, $($arg),*)
+ };
+ // HACK(eddyb) don't use `panic::catch_unwind` in a panic.
+ // If client and server happen to use the same `libstd`,
+ // `catch_unwind` asserts that the panic counter was 0,
+ // even when the closure passed to it didn't panic.
+ let r = if thread::panicking() {
+ Ok(call_method())
+ } else {
+ panic::catch_unwind(panic::AssertUnwindSafe(call_method))
+ .map_err(PanicMessage::from)
+ };
+
+ buf.clear();
+ r.encode(&mut buf, handle_store);
+ })*
+ }),*
+ }
+ buf
+ }
+ }
+ }
+}
+with_api!(Self, self_, define_dispatcher_impl);
+
+pub trait ExecutionStrategy {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer;
+}
+
+pub struct SameThread;
+
+impl ExecutionStrategy for SameThread {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ let mut dispatch = |buf| dispatcher.dispatch(buf);
+
+ run_client(BridgeConfig {
+ input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ })
+ }
+}
+
+// NOTE(eddyb) Two implementations are provided, the second one is a bit
+// faster but neither is anywhere near as fast as same-thread execution.
+
+pub struct CrossThread1;
+
+impl ExecutionStrategy for CrossThread1 {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ use std::sync::mpsc::channel;
+
+ let (req_tx, req_rx) = channel();
+ let (res_tx, res_rx) = channel();
+
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |buf| {
+ req_tx.send(buf).unwrap();
+ res_rx.recv().unwrap()
+ };
+
+ run_client(BridgeConfig {
+ input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ })
+ });
+
+ for b in req_rx {
+ res_tx.send(dispatcher.dispatch(b)).unwrap();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+pub struct CrossThread2;
+
+impl ExecutionStrategy for CrossThread2 {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ use std::sync::{Arc, Mutex};
+
+ enum State<T> {
+ Req(T),
+ Res(T),
+ }
+
+ let mut state = Arc::new(Mutex::new(State::Res(Buffer::new())));
+
+ let server_thread = thread::current();
+ let state2 = state.clone();
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |b| {
+ *state2.lock().unwrap() = State::Req(b);
+ server_thread.unpark();
+ loop {
+ thread::park();
+ if let State::Res(b) = &mut *state2.lock().unwrap() {
+ break b.take();
+ }
+ }
+ };
+
+ let r = run_client(BridgeConfig {
+ input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ });
+
+ // Wake up the server so it can exit the dispatch loop.
+ drop(state2);
+ server_thread.unpark();
+
+ r
+ });
+
+ // Check whether `state2` was dropped, to know when to stop.
+ while Arc::get_mut(&mut state).is_none() {
+ thread::park();
+ let mut b = match &mut *state.lock().unwrap() {
+ State::Req(b) => b.take(),
+ _ => continue,
+ };
+ b = dispatcher.dispatch(b.take());
+ *state.lock().unwrap() = State::Res(b);
+ join_handle.thread().unpark();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+fn run_server<
+ S: Server,
+ I: Encode<HandleStore<MarkedTypes<S>>>,
+ O: for<'a, 's> DecodeMut<'a, 's, HandleStore<MarkedTypes<S>>>,
+>(
+ strategy: &impl ExecutionStrategy,
+ handle_counters: &'static client::HandleCounters,
+ server: S,
+ input: I,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+) -> Result<O, PanicMessage> {
+ let mut dispatcher =
+ Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) };
+
+ let globals = dispatcher.server.globals();
+
+ let mut buf = Buffer::new();
+ (globals, input).encode(&mut buf, &mut dispatcher.handle_store);
+
+ buf = strategy.run_bridge_and_client(&mut dispatcher, buf, run_client, force_show_panics);
+
+ Result::decode(&mut &buf[..], &mut dispatcher.handle_store)
+}
+
+impl client::Client<super::super::TokenStream, super::super::TokenStream> {
+ pub fn run<S>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage>
+ where
+ S: Server,
+ S::TokenStream: Default,
+ {
+ let client::Client { get_handle_counters, run, _marker } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ run,
+ force_show_panics,
+ )
+ .map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
+ }
+}
+
+impl
+ client::Client<
+ (super::super::TokenStream, super::super::TokenStream),
+ super::super::TokenStream,
+ >
+{
+ pub fn run<S>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ input2: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage>
+ where
+ S: Server,
+ S::TokenStream: Default,
+ {
+ let client::Client { get_handle_counters, run, _marker } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ (
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ <MarkedTypes<S> as Types>::TokenStream::mark(input2),
+ ),
+ run,
+ force_show_panics,
+ )
+ .map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs
new file mode 100644
index 000000000..3fade2dc4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs
@@ -0,0 +1,166 @@
+//! lib-proc-macro diagnostic
+//!
+//! Copy from <https://github.com/rust-lang/rust/blob/e45d9973b2665897a768312e971b82cc62633103/src/libproc_macro/diagnostic.rs>
+//! augmented with removing unstable features
+
+use super::Span;
+
+/// An enum representing a diagnostic level.
+#[derive(Copy, Clone, Debug)]
+#[non_exhaustive]
+pub enum Level {
+ /// An error.
+ Error,
+ /// A warning.
+ Warning,
+ /// A note.
+ Note,
+ /// A help message.
+ Help,
+}
+
+/// Trait implemented by types that can be converted into a set of `Span`s.
+pub trait MultiSpan {
+ /// Converts `self` into a `Vec<Span>`.
+ fn into_spans(self) -> Vec<Span>;
+}
+
+impl MultiSpan for Span {
+ fn into_spans(self) -> Vec<Span> {
+ vec![self]
+ }
+}
+
+impl MultiSpan for Vec<Span> {
+ fn into_spans(self) -> Vec<Span> {
+ self
+ }
+}
+
+impl<'a> MultiSpan for &'a [Span] {
+ fn into_spans(self) -> Vec<Span> {
+ self.to_vec()
+ }
+}
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+macro_rules! diagnostic_child_methods {
+ ($spanned:ident, $regular:ident, $level:expr) => {
+ #[doc = concat!("Adds a new child diagnostics message to `self` with the [`",
+ stringify!($level), "`] level, and the given `spans` and `message`.")]
+ pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ self.children.push(Diagnostic::spanned(spans, $level, message));
+ self
+ }
+
+ #[doc = concat!("Adds a new child diagnostic message to `self` with the [`",
+ stringify!($level), "`] level, and the given `message`.")]
+ pub fn $regular<T: Into<String>>(mut self, message: T) -> Diagnostic {
+ self.children.push(Diagnostic::new($level, message));
+ self
+ }
+ };
+}
+
+/// Iterator over the children diagnostics of a `Diagnostic`.
+#[derive(Debug, Clone)]
+pub struct Children<'a>(std::slice::Iter<'a, Diagnostic>);
+
+impl<'a> Iterator for Children<'a> {
+ type Item = &'a Diagnostic;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+
+ /// Creates a new diagnostic with the given `level` and `message` pointing to
+ /// the given set of `spans`.
+ pub fn spanned<S, T>(spans: S, level: Level, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ Diagnostic { level, message: message.into(), spans: spans.into_spans(), children: vec![] }
+ }
+
+ diagnostic_child_methods!(span_error, error, Level::Error);
+ diagnostic_child_methods!(span_warning, warning, Level::Warning);
+ diagnostic_child_methods!(span_note, note, Level::Note);
+ diagnostic_child_methods!(span_help, help, Level::Help);
+
+ /// Returns the diagnostic `level` for `self`.
+ pub fn level(&self) -> Level {
+ self.level
+ }
+
+ /// Sets the level in `self` to `level`.
+ pub fn set_level(&mut self, level: Level) {
+ self.level = level;
+ }
+
+ /// Returns the message in `self`.
+ pub fn message(&self) -> &str {
+ &self.message
+ }
+
+ /// Sets the message in `self` to `message`.
+ pub fn set_message<T: Into<String>>(&mut self, message: T) {
+ self.message = message.into();
+ }
+
+ /// Returns the `Span`s in `self`.
+ pub fn spans(&self) -> &[Span] {
+ &self.spans
+ }
+
+ /// Sets the `Span`s in `self` to `spans`.
+ pub fn set_spans<S: MultiSpan>(&mut self, spans: S) {
+ self.spans = spans.into_spans();
+ }
+
+ /// Returns an iterator over the children diagnostics of `self`.
+ pub fn children(&self) -> Children<'_> {
+ Children(self.children.iter())
+ }
+
+ /// Emit the diagnostic.
+ pub fn emit(self) {
+ fn to_internal(spans: Vec<Span>) -> super::bridge::client::MultiSpan {
+ let mut multi_span = super::bridge::client::MultiSpan::new();
+ for span in spans {
+ multi_span.push(span.0);
+ }
+ multi_span
+ }
+
+ let mut diag = super::bridge::client::Diagnostic::new(
+ self.level,
+ &self.message[..],
+ to_internal(self.spans),
+ );
+ for c in self.children {
+ diag.sub(c.level, &c.message[..], to_internal(c.spans));
+ }
+ diag.emit();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs
new file mode 100644
index 000000000..be62c73ef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs
@@ -0,0 +1,1125 @@
+//! A support library for macro authors when defining new macros.
+//!
+//! This library, provided by the standard distribution, provides the types
+//! consumed in the interfaces of procedurally defined macro definitions such as
+//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and
+//! custom derive attributes`#[proc_macro_derive]`.
+//!
+//! See [the book] for more.
+//!
+//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes
+
+#[doc(hidden)]
+pub mod bridge;
+
+mod diagnostic;
+
+pub use diagnostic::{Diagnostic, Level, MultiSpan};
+
+use std::cmp::Ordering;
+use std::ops::RangeBounds;
+use std::path::PathBuf;
+use std::str::FromStr;
+use std::{error, fmt, iter, mem};
+
+/// Determines whether proc_macro has been made accessible to the currently
+/// running program.
+///
+/// The proc_macro crate is only intended for use inside the implementation of
+/// procedural macros. All the functions in this crate panic if invoked from
+/// outside of a procedural macro, such as from a build script or unit test or
+/// ordinary Rust binary.
+///
+/// With consideration for Rust libraries that are designed to support both
+/// macro and non-macro use cases, `proc_macro::is_available()` provides a
+/// non-panicking way to detect whether the infrastructure required to use the
+/// API of proc_macro is presently available. Returns true if invoked from
+/// inside of a procedural macro, false if invoked from any other binary.
+pub fn is_available() -> bool {
+ bridge::client::is_available()
+}
+
+/// The main type provided by this crate, representing an abstract stream of
+/// tokens, or, more specifically, a sequence of token trees.
+/// The type provide interfaces for iterating over those token trees and, conversely,
+/// collecting a number of token trees into one stream.
+///
+/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]`
+/// and `#[proc_macro_derive]` definitions.
+#[derive(Clone)]
+pub struct TokenStream(Option<bridge::client::TokenStream>);
+
+/// Error returned from `TokenStream::from_str`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct LexError;
+
+impl fmt::Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("cannot parse string into token stream")
+ }
+}
+
+impl error::Error for LexError {}
+
+/// Error returned from `TokenStream::expand_expr`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct ExpandError;
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("macro expansion failed")
+ }
+}
+
+impl error::Error for ExpandError {}
+
+impl TokenStream {
+ /// Returns an empty `TokenStream` containing no token trees.
+ pub fn new() -> TokenStream {
+ TokenStream(None)
+ }
+
+ /// Checks if this `TokenStream` is empty.
+ pub fn is_empty(&self) -> bool {
+ self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true)
+ }
+
+ /// Parses this `TokenStream` as an expression and attempts to expand any
+ /// macros within it. Returns the expanded `TokenStream`.
+ ///
+ /// Currently only expressions expanding to literals will succeed, although
+ /// this may be relaxed in the future.
+ ///
+ /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded,
+ /// report an error, failing compilation, and/or return an `Err(..)`. The
+ /// specific behavior for any error condition, and what conditions are
+ /// considered errors, is unspecified and may change in the future.
+ pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
+ let stream = self.0.as_ref().ok_or(ExpandError)?;
+ match bridge::client::TokenStream::expand_expr(stream) {
+ Ok(stream) => Ok(TokenStream(Some(stream))),
+ Err(_) => Err(ExpandError),
+ }
+ }
+}
+
+/// Attempts to break the string into tokens and parse those tokens into a token stream.
+/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+/// or characters not existing in the language.
+/// All tokens in the parsed stream get `Span::call_site()` spans.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+/// change these errors into `LexError`s later.
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src))))
+ }
+}
+
+/// Prints the token stream as a string that is supposed to be losslessly convertible back
+/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// Prints token in a form convenient for debugging.
+impl fmt::Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl Default for TokenStream {
+ fn default() -> Self {
+ TokenStream::new()
+ }
+}
+
+pub use quote::{quote, quote_span};
+
+fn tree_to_bridge_tree(
+ tree: TokenTree,
+) -> bridge::TokenTree<
+ bridge::client::TokenStream,
+ bridge::client::Span,
+ bridge::client::Ident,
+ bridge::client::Literal,
+> {
+ match tree {
+ TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
+ TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
+ TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
+ TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree))))
+ }
+}
+
+/// Non-generic helper for implementing `FromIterator<TokenStream>` and
+/// `Extend<TokenStream>` with less monomorphization in calling crates.
+struct ConcatStreamsHelper {
+ streams: Vec<bridge::client::TokenStream>,
+}
+
+impl ConcatStreamsHelper {
+ fn new(capacity: usize) -> Self {
+ ConcatStreamsHelper { streams: Vec::with_capacity(capacity) }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ if let Some(stream) = stream.0 {
+ self.streams.push(stream);
+ }
+ }
+
+ fn build(mut self) -> TokenStream {
+ if self.streams.len() <= 1 {
+ TokenStream(self.streams.pop())
+ } else {
+ TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams)))
+ }
+ }
+
+ fn append_to(mut self, stream: &mut TokenStream) {
+ if self.streams.is_empty() {
+ return;
+ }
+ let base = stream.0.take();
+ if base.is_none() && self.streams.len() == 1 {
+ stream.0 = self.streams.pop();
+ } else {
+ stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams));
+ }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl iter::FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl iter::FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let iter = streams.into_iter();
+ let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);
+ iter.for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ // FIXME(eddyb) Use an optimized implementation if/when possible.
+ *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect();
+ }
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use super::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ #[derive(Clone)]
+ pub struct IntoIter(
+ std::vec::IntoIter<
+ bridge::TokenTree<
+ bridge::client::TokenStream,
+ bridge::client::Span,
+ bridge::client::Ident,
+ bridge::client::Literal,
+ >,
+ >,
+ );
+
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.0.next().map(|tree| match tree {
+ bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)),
+ bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)),
+ bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)),
+ bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)),
+ })
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+ IntoIter(self.0.map(|v| v.into_trees()).unwrap_or_default().into_iter())
+ }
+ }
+}
+
+#[doc(hidden)]
+mod quote;
+
+/// A region of source code, along with macro expansion information.
+#[derive(Copy, Clone)]
+pub struct Span(bridge::client::Span);
+
+macro_rules! diagnostic_method {
+ ($name:ident, $level:expr) => {
+ /// Creates a new `Diagnostic` with the given `message` at the span
+ /// `self`.
+ pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
+ Diagnostic::spanned(self, $level, message)
+ }
+ };
+}
+
+impl Span {
+ /// A span that resolves at the macro definition site.
+ pub fn def_site() -> Span {
+ Span(bridge::client::Span::def_site())
+ }
+
+ /// The span of the invocation of the current procedural macro.
+ /// Identifiers created with this span will be resolved as if they were written
+ /// directly at the macro call location (call-site hygiene) and other code
+ /// at the macro call site will be able to refer to them as well.
+ pub fn call_site() -> Span {
+ Span(bridge::client::Span::call_site())
+ }
+
+ /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro
+ /// definition site (local variables, labels, `$crate`) and sometimes at the macro
+ /// call site (everything else).
+ /// The span location is taken from the call-site.
+ pub fn mixed_site() -> Span {
+ Span(bridge::client::Span::mixed_site())
+ }
+
+ /// The original source file into which this span points.
+ pub fn source_file(&self) -> SourceFile {
+ SourceFile(self.0.source_file())
+ }
+
+ /// The `Span` for the tokens in the previous macro expansion from which
+ /// `self` was generated from, if any.
+ pub fn parent(&self) -> Option<Span> {
+ self.0.parent().map(Span)
+ }
+
+ /// The span for the origin source code that `self` was generated from. If
+ /// this `Span` wasn't generated from other macro expansions then the return
+ /// value is the same as `*self`.
+ pub fn source(&self) -> Span {
+ Span(self.0.source())
+ }
+
+ /// Gets the starting line/column in the source file for this span.
+ pub fn start(&self) -> LineColumn {
+ self.0.start().add_1_to_column()
+ }
+
+ /// Gets the ending line/column in the source file for this span.
+ pub fn end(&self) -> LineColumn {
+ self.0.end().add_1_to_column()
+ }
+
+ /// Creates an empty span pointing to directly before this span.
+ pub fn before(&self) -> Span {
+ Span(self.0.before())
+ }
+
+ /// Creates an empty span pointing to directly after this span.
+ pub fn after(&self) -> Span {
+ Span(self.0.after())
+ }
+
+ /// Creates a new span encompassing `self` and `other`.
+ ///
+ /// Returns `None` if `self` and `other` are from different files.
+ pub fn join(&self, other: Span) -> Option<Span> {
+ self.0.join(other.0).map(Span)
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span(self.0.resolved_at(other.0))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+ pub fn located_at(&self, other: Span) -> Span {
+ other.resolved_at(*self)
+ }
+
+ /// Compares to spans to see if they're equal.
+ pub fn eq(&self, other: &Span) -> bool {
+ self.0 == other.0
+ }
+
+ /// Returns the source text behind a span. This preserves the original source
+ /// code, including spaces and comments. It only returns a result if the span
+ /// corresponds to real source code.
+ ///
+ /// Note: The observable result of a macro should only rely on the tokens and
+ /// not on this source text. The result of this function is a best effort to
+ /// be used for diagnostics only.
+ pub fn source_text(&self) -> Option<String> {
+ self.0.source_text()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn save_span(&self) -> usize {
+ self.0.save_span()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn recover_proc_macro_span(id: usize) -> Span {
+ Span(bridge::client::Span::recover_proc_macro_span(id))
+ }
+
+ diagnostic_method!(error, Level::Error);
+ diagnostic_method!(warning, Level::Warning);
+ diagnostic_method!(note, Level::Note);
+ diagnostic_method!(help, Level::Help);
+}
+
+/// Prints a span in a form convenient for debugging.
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// A line-column pair representing the start or end of a `Span`.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends (inclusive).
+ pub line: usize,
+ /// The 1-indexed column (number of bytes in UTF-8 encoding) in the source
+ /// file on which the span starts or ends (inclusive).
+ pub column: usize,
+}
+
+impl LineColumn {
+ fn add_1_to_column(self) -> Self {
+ LineColumn { line: self.line, column: self.column + 1 }
+ }
+}
+
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line.cmp(&other.line).then(self.column.cmp(&other.column))
+ }
+}
+
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+/// The source file of a given `Span`.
+#[derive(Clone)]
+pub struct SourceFile(bridge::client::SourceFile);
+
+impl SourceFile {
+ /// Gets the path to this source file.
+ ///
+ /// ### Note
+ /// If the code span associated with this `SourceFile` was generated by an external macro, this
+ /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check.
+ ///
+ /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
+ /// the command line, the path as given might not actually be valid.
+ ///
+ /// [`is_real`]: Self::is_real
+ pub fn path(&self) -> PathBuf {
+ PathBuf::from(self.0.path())
+ }
+
+ /// Returns `true` if this source file is a real source file, and not generated by an external
+ /// macro's expansion.
+ pub fn is_real(&self) -> bool {
+ // This is a hack until intercrate spans are implemented and we can have real source files
+ // for spans generated in external macros.
+ // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
+ self.0.is_real()
+ }
+}
+
+impl fmt::Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+}
+
+impl PartialEq for SourceFile {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.eq(&other.0)
+ }
+}
+
+impl Eq for SourceFile {}
+
+/// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`).
+#[derive(Clone)]
+pub enum TokenTree {
+ /// A token stream surrounded by bracket delimiters.
+ Group(Group),
+ /// An identifier.
+ Ident(Ident),
+ /// A single punctuation character (`+`, `,`, `$`, etc.).
+ Punct(Punct),
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+}
+
+impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+ match *self {
+ TokenTree::Group(ref t) => t.span(),
+ TokenTree::Ident(ref t) => t.span(),
+ TokenTree::Punct(ref t) => t.span(),
+ TokenTree::Literal(ref t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+ match *self {
+ TokenTree::Group(ref mut t) => t.set_span(span),
+ TokenTree::Ident(ref mut t) => t.set_span(span),
+ TokenTree::Punct(ref mut t) => t.set_span(span),
+ TokenTree::Literal(ref mut t) => t.set_span(span),
+ }
+ }
+}
+
+/// Prints token tree in a form convenient for debugging.
+impl fmt::Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+ match *self {
+ TokenTree::Group(ref tt) => tt.fmt(f),
+ TokenTree::Ident(ref tt) => tt.fmt(f),
+ TokenTree::Punct(ref tt) => tt.fmt(f),
+ TokenTree::Literal(ref tt) => tt.fmt(f),
+ }
+ }
+}
+
+impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
+}
+
+impl From<Ident> for TokenTree {
+ fn from(g: Ident) -> TokenTree {
+ TokenTree::Ident(g)
+ }
+}
+
+impl From<Punct> for TokenTree {
+ fn from(g: Punct) -> TokenTree {
+ TokenTree::Punct(g)
+ }
+}
+
+impl From<Literal> for TokenTree {
+ fn from(g: Literal) -> TokenTree {
+ TokenTree::Literal(g)
+ }
+}
+
+/// Prints the token tree as a string that is supposed to be losslessly convertible back
+/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// A delimited token stream.
+///
+/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s.
+#[derive(Clone)]
+pub struct Group(bridge::Group<bridge::client::TokenStream, bridge::client::Span>);
+
+/// Describes how a sequence of token trees is delimited.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Delimiter {
+ /// `( ... )`
+ Parenthesis,
+ /// `{ ... }`
+ Brace,
+ /// `[ ... ]`
+ Bracket,
+ /// `Ø ... Ø`
+ /// An invisible delimiter, that may, for example, appear around tokens coming from a
+ /// "macro variable" `$var`. It is important to preserve operator priorities in cases like
+ /// `$var * 3` where `$var` is `1 + 2`.
+ /// Invisible delimiters might not survive roundtrip of a token stream through a string.
+ None,
+}
+
+impl Group {
+ /// Creates a new `Group` with the given delimiter and token stream.
+ ///
+ /// This constructor will set the span for this group to
+ /// `Span::call_site()`. To change the span you can use the `set_span`
+ /// method below.
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group(bridge::Group {
+ delimiter,
+ stream: stream.0,
+ span: bridge::DelimSpan::from_single(Span::call_site().0),
+ })
+ }
+
+ /// Returns the delimiter of this `Group`
+ pub fn delimiter(&self) -> Delimiter {
+ self.0.delimiter
+ }
+
+ /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
+ ///
+ /// Note that the returned token stream does not include the delimiter
+ /// returned above.
+ pub fn stream(&self) -> TokenStream {
+ TokenStream(self.0.stream.clone())
+ }
+
+ /// Returns the span for the delimiters of this token stream, spanning the
+ /// entire `Group`.
+ ///
+ /// ```text
+ /// pub fn span(&self) -> Span {
+ /// ^^^^^^^
+ /// ```
+ pub fn span(&self) -> Span {
+ Span(self.0.span.entire)
+ }
+
+ /// Returns the span pointing to the opening delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_open(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_open(&self) -> Span {
+ Span(self.0.span.open)
+ }
+
+ /// Returns the span pointing to the closing delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_close(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_close(&self) -> Span {
+ Span(self.0.span.close)
+ }
+
+ /// Configures the span for this `Group`'s delimiters, but not its internal
+ /// tokens.
+ ///
+ /// This method will **not** set the span of all the internal tokens spanned
+ /// by this group, but rather it will only set the span of the delimiter
+ /// tokens at the level of the `Group`.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.span = bridge::DelimSpan::from_single(span.0);
+ }
+}
+
+/// Prints the group as a string that should be losslessly convertible back
+/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters.
+impl fmt::Display for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Group")
+ .field("delimiter", &self.delimiter())
+ .field("stream", &self.stream())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A `Punct` is a single punctuation character such as `+`, `-` or `#`.
+///
+/// Multi-character operators like `+=` are represented as two instances of `Punct` with different
+/// forms of `Spacing` returned.
+#[derive(Clone)]
+pub struct Punct(bridge::Punct<bridge::client::Span>);
+
+/// Describes whether a `Punct` is followed immediately by another `Punct` ([`Spacing::Joint`]) or
+/// by a different token or whitespace ([`Spacing::Alone`]).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Spacing {
+ /// A `Punct` is not immediately followed by another `Punct`.
+ /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`.
+ Alone,
+ /// A `Punct` is immediately followed by another `Punct`.
+ /// E.g. `+` is `Joint` in `+=` and `++`.
+ ///
+ /// Additionally, single quote `'` can join with identifiers to form lifetimes: `'ident`.
+ Joint,
+}
+
+impl Punct {
+ /// Creates a new `Punct` from the given character and spacing.
+ /// The `ch` argument must be a valid punctuation character permitted by the language,
+ /// otherwise the function will panic.
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ const LEGAL_CHARS: &[char] = &[
+ '=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^', '&', '|', '@', '.', ',', ';',
+ ':', '#', '$', '?', '\'',
+ ];
+ if !LEGAL_CHARS.contains(&ch) {
+ panic!("unsupported character `{:?}`", ch);
+ }
+ Punct(bridge::Punct {
+ ch: ch as u8,
+ joint: spacing == Spacing::Joint,
+ span: Span::call_site().0,
+ })
+ }
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+ self.0.ch as char
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether it's immediately
+ /// followed by another `Punct` in the token stream, so they can potentially be combined into
+ /// a multi-character operator (`Joint`), or it's followed by some other token or whitespace
+ /// (`Alone`) so the operator has certainly ended.
+ pub fn spacing(&self) -> Spacing {
+ if self.0.joint {
+ Spacing::Joint
+ } else {
+ Spacing::Alone
+ }
+ }
+
+ /// Returns the span for this punctuation character.
+ pub fn span(&self) -> Span {
+ Span(self.0.span)
+ }
+
+ /// Configure the span for this punctuation character.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.span = span.0;
+ }
+}
+
+/// Prints the punctuation character as a string that should be losslessly convertible
+/// back into the same character.
+impl fmt::Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Punct")
+ .field("ch", &self.as_char())
+ .field("spacing", &self.spacing())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl PartialEq<char> for Punct {
+ fn eq(&self, rhs: &char) -> bool {
+ self.as_char() == *rhs
+ }
+}
+
+impl PartialEq<Punct> for char {
+ fn eq(&self, rhs: &Punct) -> bool {
+ *self == rhs.as_char()
+ }
+}
+
+/// An identifier (`ident`).
+#[derive(Clone)]
+pub struct Ident(bridge::client::Ident);
+
+impl Ident {
+ /// Creates a new `Ident` with the given `string` as well as the specified
+ /// `span`.
+ /// The `string` argument must be a valid identifier permitted by the
+ /// language (including keywords, e.g. `self` or `fn`). Otherwise, the function will panic.
+ ///
+ /// Note that `span`, currently in rustc, configures the hygiene information
+ /// for this identifier.
+ ///
+ /// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene
+ /// meaning that identifiers created with this span will be resolved as if they were written
+ /// directly at the location of the macro call, and other code at the macro call site will be
+ /// able to refer to them as well.
+ ///
+ /// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene
+ /// meaning that identifiers created with this span will be resolved at the location of the
+ /// macro definition and other code at the macro call site will not be able to refer to them.
+ ///
+ /// Due to the current importance of hygiene this constructor, unlike other
+ /// tokens, requires a `Span` to be specified at construction.
+ pub fn new(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, false))
+ }
+
+ /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
+ /// The `string` argument be a valid identifier permitted by the language
+ /// (including keywords, e.g. `fn`). Keywords which are usable in path segments
+ /// (e.g. `self`, `super`) are not supported, and will cause a panic.
+ pub fn new_raw(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, true))
+ }
+
+ /// Returns the span of this `Ident`, encompassing the entire string returned
+ /// by [`to_string`](Self::to_string).
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span of this `Ident`, possibly changing its hygiene context.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the identifier as a string that should be losslessly convertible
+/// back into the same identifier.
+impl fmt::Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Ident")
+ .field("ident", &self.to_string())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A literal string (`"hello"`), byte string (`b"hello"`),
+/// character (`'a'`), byte character (`b'a'`), an integer or floating point number
+/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s.
+#[derive(Clone)]
+pub struct Literal(bridge::client::Literal);
+
+macro_rules! suffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new suffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1u32` where the integer
+ /// value specified is the first part of the token and the integral is
+ /// also suffixed at the end.
+ /// Literals created from negative numbers might not survive round-trips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::typed_integer(&n.to_string(), stringify!($kind)))
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new unsuffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1` where the integer
+ /// value specified is the first part of the token. No suffix is
+ /// specified on this token, meaning that invocations like
+ /// `Literal::i8_unsuffixed(1)` are equivalent to
+ /// `Literal::u32_unsuffixed(1)`.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::integer(&n.to_string()))
+ }
+ )*)
+}
+
+impl Literal {
+ suffixed_int_literals! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+ }
+
+ unsuffixed_int_literals! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_unsuffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f32` where the value
+ /// specified is the preceding part of the token and `f32` is the suffix of
+ /// the token. This token will always be inferred to be an `f32` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_suffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ Literal(bridge::client::Literal::f32(&n.to_string()))
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_unsuffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f64` where the value
+ /// specified is the preceding part of the token and `f64` is the suffix of
+ /// the token. This token will always be inferred to be an `f64` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_suffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ Literal(bridge::client::Literal::f64(&n.to_string()))
+ }
+
+ /// String literal.
+ pub fn string(string: &str) -> Literal {
+ Literal(bridge::client::Literal::string(string))
+ }
+
+ /// Character literal.
+ pub fn character(ch: char) -> Literal {
+ Literal(bridge::client::Literal::character(ch))
+ }
+
+ /// Byte string literal.
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ Literal(bridge::client::Literal::byte_string(bytes))
+ }
+
+ /// Returns the span encompassing this literal.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span associated for this literal.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+
+ /// Returns a `Span` that is a subset of `self.span()` containing only the
+ /// source bytes in range `range`. Returns `None` if the would-be trimmed
+ /// span is outside the bounds of `self`.
+ // FIXME(SergioBenitez): check that the byte range starts and ends at a
+ // UTF-8 boundary of the source. otherwise, it's likely that a panic will
+ // occur elsewhere when the source text is printed.
+ // FIXME(SergioBenitez): there is no way for the user to know what
+ // `self.span()` actually maps to, so this method can currently only be
+ // called blindly. For example, `to_string()` for the character 'c' returns
+ // "'\u{63}'"; there is no way for the user to know whether the source text
+ // was 'c' or whether it was '\u{63}'.
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ self.0.subspan(range.start_bound().cloned(), range.end_bound().cloned()).map(Span)
+ }
+}
+
+/// Parse a single literal from its stringified representation.
+///
+/// In order to parse successfully, the input string must not contain anything
+/// but the literal token. Specifically, it must not contain whitespace or
+/// comments in addition to the literal.
+///
+/// The resulting literal token will have a `Span::call_site()` span.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We
+/// reserve the right to change these errors into `LexError`s later.
+impl FromStr for Literal {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<Self, LexError> {
+ match bridge::client::Literal::from_str(src) {
+ Ok(literal) => Ok(Literal(literal)),
+ Err(()) => Err(LexError),
+ }
+ }
+}
+
+/// Prints the literal as a string that should be losslessly convertible
+/// back into the same literal (except for possible rounding for floating point literals).
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// Tracked access to environment variables.
+pub mod tracked_env {
+ use std::env::{self, VarError};
+ use std::ffi::OsStr;
+
+ /// Retrieve an environment variable and add it to build dependency info.
+ /// Build system executing the compiler will know that the variable was accessed during
+ /// compilation, and will be able to rerun the build when the value of that variable changes.
+ /// Besides the dependency tracking this function should be equivalent to `env::var` from the
+ /// standard library, except that the argument must be UTF-8.
+ pub fn var<K: AsRef<OsStr> + AsRef<str>>(key: K) -> Result<String, VarError> {
+ let key: &str = key.as_ref();
+ let value = env::var(key);
+ super::bridge::client::FreeFunctions::track_env_var(key, value.as_deref().ok());
+ value
+ }
+}
+
+/// Tracked access to additional files.
+pub mod tracked_path {
+
+ /// Track a file explicitly.
+ ///
+ /// Commonly used for tracking asset preprocessing.
+ pub fn path<P: AsRef<str>>(path: P) {
+ let path: &str = path.as_ref();
+ super::bridge::client::FreeFunctions::track_path(path);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs
new file mode 100644
index 000000000..39309faa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs
@@ -0,0 +1,139 @@
+//! # Quasiquoter
+//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
+
+//! This quasiquoter uses macros 2.0 hygiene to reliably access
+//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
+
+use super::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+
+macro_rules! quote_tt {
+ (($($t:tt)*)) => { Group::new(Delimiter::Parenthesis, quote!($($t)*)) };
+ ([$($t:tt)*]) => { Group::new(Delimiter::Bracket, quote!($($t)*)) };
+ ({$($t:tt)*}) => { Group::new(Delimiter::Brace, quote!($($t)*)) };
+ (,) => { Punct::new(',', Spacing::Alone) };
+ (.) => { Punct::new('.', Spacing::Alone) };
+ (;) => { Punct::new(';', Spacing::Alone) };
+ (!) => { Punct::new('!', Spacing::Alone) };
+ (<) => { Punct::new('<', Spacing::Alone) };
+ (>) => { Punct::new('>', Spacing::Alone) };
+ (&) => { Punct::new('&', Spacing::Alone) };
+ (=) => { Punct::new('=', Spacing::Alone) };
+ ($i:ident) => { Ident::new(stringify!($i), Span::def_site()) };
+}
+
+macro_rules! quote_ts {
+ ((@ $($t:tt)*)) => { $($t)* };
+ (::) => {
+ [
+ TokenTree::from(Punct::new(':', Spacing::Joint)),
+ TokenTree::from(Punct::new(':', Spacing::Alone)),
+ ].iter()
+ .cloned()
+ .map(|mut x| {
+ x.set_span(Span::def_site());
+ x
+ })
+ .collect::<TokenStream>()
+ };
+ ($t:tt) => { TokenTree::from(quote_tt!($t)) };
+}
+
+/// Simpler version of the real `quote!` macro, implemented solely
+/// through `macro_rules`, for bootstrapping the real implementation
+/// (see the `quote` function), which does not have access to the
+/// real `quote!` macro due to the `proc_macro` crate not being
+/// able to depend on itself.
+///
+/// Note: supported tokens are a subset of the real `quote!`, but
+/// unquoting is different: instead of `$x`, this uses `(@ expr)`.
+macro_rules! quote {
+ () => { TokenStream::new() };
+ ($($t:tt)*) => {
+ [
+ $(TokenStream::from(quote_ts!($t)),)*
+ ].iter().cloned().collect::<TokenStream>()
+ };
+}
+
+/// Quote a `TokenStream` into a `TokenStream`.
+/// This is the actual implementation of the `quote!()` proc macro.
+///
+/// It is loaded by the compiler in `register_builtin_macros`.
+pub fn quote(stream: TokenStream) -> TokenStream {
+ if stream.is_empty() {
+ return quote!(super::TokenStream::new());
+ }
+ let proc_macro_crate = quote!(crate);
+ let mut after_dollar = false;
+ let tokens = stream
+ .into_iter()
+ .filter_map(|tree| {
+ if after_dollar {
+ after_dollar = false;
+ match tree {
+ TokenTree::Ident(_) => {
+ return Some(quote!(Into::<super::TokenStream>::into(
+ Clone::clone(&(@ tree))),));
+ }
+ TokenTree::Punct(ref tt) if tt.as_char() == '$' => {}
+ _ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
+ }
+ } else if let TokenTree::Punct(ref tt) = tree {
+ if tt.as_char() == '$' {
+ after_dollar = true;
+ return None;
+ }
+ }
+
+ Some(quote!(super::TokenStream::from((@ match tree {
+ TokenTree::Punct(tt) => quote!(super::TokenTree::Punct(super::Punct::new(
+ (@ TokenTree::from(Literal::character(tt.as_char()))),
+ (@ match tt.spacing() {
+ Spacing::Alone => quote!(super::Spacing::Alone),
+ Spacing::Joint => quote!(super::Spacing::Joint),
+ }),
+ ))),
+ TokenTree::Group(tt) => quote!(super::TokenTree::Group(super::Group::new(
+ (@ match tt.delimiter() {
+ Delimiter::Parenthesis => quote!(super::Delimiter::Parenthesis),
+ Delimiter::Brace => quote!(super::Delimiter::Brace),
+ Delimiter::Bracket => quote!(super::Delimiter::Bracket),
+ Delimiter::None => quote!(super::Delimiter::None),
+ }),
+ (@ quote(tt.stream())),
+ ))),
+ TokenTree::Ident(tt) => quote!(super::TokenTree::Ident(super::Ident::new(
+ (@ TokenTree::from(Literal::string(&tt.to_string()))),
+ (@ quote_span(proc_macro_crate.clone(), tt.span())),
+ ))),
+ TokenTree::Literal(tt) => quote!(super::TokenTree::Literal({
+ let mut iter = (@ TokenTree::from(Literal::string(&tt.to_string())))
+ .parse::<super::TokenStream>()
+ .unwrap()
+ .into_iter();
+ if let (Some(super::TokenTree::Literal(mut lit)), None) =
+ (iter.next(), iter.next())
+ {
+ lit.set_span((@ quote_span(proc_macro_crate.clone(), tt.span())));
+ lit
+ } else {
+ unreachable!()
+ }
+ }))
+ })),))
+ })
+ .collect::<TokenStream>();
+
+ if after_dollar {
+ panic!("unexpected trailing `$` in `quote!`");
+ }
+
+ quote!([(@ tokens)].iter().cloned().collect::<super::TokenStream>())
+}
+
+/// Quote a `Span` into a `TokenStream`.
+/// This is needed to implement a custom quoter.
+pub fn quote_span(proc_macro_crate: TokenStream, span: Span) -> TokenStream {
+ let id = span.save_span();
+ quote!((@ proc_macro_crate ) ::Span::recover_proc_macro_span((@ TokenTree::from(Literal::usize_unsuffixed(id)))))
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs
new file mode 100644
index 000000000..7e8e67856
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs
@@ -0,0 +1,792 @@
+//! Rustc proc-macro server implementation with tt
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::bridge::{self, server};
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Bound;
+use std::{ascii, vec::IntoIter};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Debug, Default, Clone)]
+pub struct TokenStream {
+ pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream::default()
+ }
+
+ pub fn with_subtree(subtree: tt::Subtree) -> Self {
+ if subtree.delimiter.is_some() {
+ TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+ } else {
+ TokenStream { token_trees: subtree.token_trees }
+ }
+ }
+
+ pub fn into_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self.token_trees }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.token_trees.is_empty()
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { token_trees: vec![tree] }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ for item in streams {
+ for tkn in item {
+ match tkn {
+ tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+ self.token_trees.extend(subtree.token_trees);
+ }
+ _ => {
+ self.token_trees.push(tkn);
+ }
+ }
+ }
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct SourceFile {
+ // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+}
+
+// Rustc Server Ident has to be `Copyable`
+// We use a stub here for bypassing
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct IdentId(u32);
+
+#[derive(Clone, Hash, Eq, PartialEq)]
+struct IdentData(tt::Ident);
+
+#[derive(Default)]
+struct IdentInterner {
+ idents: HashMap<IdentData, u32>,
+ ident_data: Vec<IdentData>,
+}
+
+impl IdentInterner {
+ fn intern(&mut self, data: &IdentData) -> u32 {
+ if let Some(index) = self.idents.get(data) {
+ return *index;
+ }
+
+ let index = self.idents.len() as u32;
+ self.ident_data.push(data.clone());
+ self.idents.insert(data.clone(), index);
+ index
+ }
+
+ fn get(&self, index: u32) -> &IdentData {
+ &self.ident_data[index as usize]
+ }
+
+ #[allow(unused)]
+ fn get_mut(&mut self, index: u32) -> &mut IdentData {
+ self.ident_data.get_mut(index as usize).expect("Should be consistent")
+ }
+}
+
+pub struct TokenStreamBuilder {
+ acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use std::str::FromStr;
+
+ use super::{TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = super::IntoIter<TokenTree>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.token_trees.into_iter()
+ }
+ }
+
+ type LexError = String;
+
+ /// Attempts to break the string into tokens and parse those tokens into a token stream.
+ /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+ /// or characters not existing in the language.
+ /// All tokens in the parsed stream get `Span::call_site()` spans.
+ ///
+ /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+ /// change these errors into `LexError`s later.
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let (subtree, _token_map) =
+ mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+ let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ Ok(TokenStream::with_subtree(subtree))
+ }
+ }
+
+ impl ToString for TokenStream {
+ fn to_string(&self) -> String {
+ tt::pretty(&self.token_trees)
+ }
+ }
+
+ fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: subtree
+ .delimiter
+ .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+ token_trees: subtree
+ .token_trees
+ .into_iter()
+ .map(token_tree_replace_token_ids_with_unspecified)
+ .collect(),
+ }
+ }
+
+ fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ }
+ }
+ }
+
+ fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+ }
+ tt::Leaf::Punct(punct) => {
+ tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+ }
+ tt::Leaf::Ident(ident) => {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+ }
+ }
+ }
+}
+
+impl TokenStreamBuilder {
+ fn new() -> TokenStreamBuilder {
+ TokenStreamBuilder { acc: TokenStream::new() }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ self.acc.extend(stream.into_iter())
+ }
+
+ fn build(self) -> TokenStream {
+ self.acc
+ }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+ trees: IntoIter<TokenTree>,
+}
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+ ident_interner: IdentInterner,
+ // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type Ident = IdentId;
+ type Literal = Literal;
+ type SourceFile = SourceFile;
+ type Diagnostic = Diagnostic;
+ type Span = Span;
+ type MultiSpan = Vec<Span>;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+ // FIXME: track env var accesses
+ // https://github.com/rust-lang/rust/pull/71858
+ }
+ fn track_path(&mut self, _path: &str) {}
+}
+
+impl server::TokenStream for RustAnalyzer {
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ use std::str::FromStr;
+
+ Self::TokenStream::from_str(src).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let group = Group {
+ delimiter: delim_to_internal(group.delimiter),
+ token_trees: match group.stream {
+ Some(stream) => stream.into_iter().collect(),
+ None => Vec::new(),
+ },
+ };
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Ident(IdentId(index)) => {
+ let IdentData(ident) = self.ident_interner.get(index).clone();
+ let ident: tt::Ident = ident;
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let punct = tt::Punct {
+ char: p.ch as char,
+ spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
+ id: p.span,
+ };
+ let leaf = tt::Leaf::from(punct);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+ }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+
+ fn concat_trees(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for tree in trees {
+ builder.push(self.from_token_tree(tree));
+ }
+ builder.build()
+ }
+
+ fn concat_streams(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ streams: Vec<Self::TokenStream>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for stream in streams {
+ builder.push(stream);
+ }
+ builder.build()
+ }
+
+ fn into_trees(
+ &mut self,
+ stream: Self::TokenStream,
+ ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>> {
+ stream
+ .into_iter()
+ .map(|tree| match tree {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => bridge::TokenTree::Literal(lit),
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
+ bridge::TokenTree::Punct(bridge::Punct {
+ ch: punct.char as u8,
+ joint: punct.spacing == Spacing::Joint,
+ span: punct.id,
+ })
+ }
+ tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
+ delimiter: delim_to_external(subtree.delimiter),
+ stream: if subtree.token_trees.is_empty() {
+ None
+ } else {
+ Some(subtree.token_trees.into_iter().collect())
+ },
+ span: bridge::DelimSpan::from_single(
+ subtree.delimiter.map_or(Span::unspecified(), |del| del.id),
+ ),
+ }),
+ })
+ .collect()
+ }
+}
+
+fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+ let kind = match d {
+ bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
+ bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ bridge::Delimiter::None => return None,
+ };
+ Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
+ match d.map(|it| it.kind) {
+ Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
+ Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
+ Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
+ None => bridge::Delimiter::None,
+ }
+}
+
+fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
+ match spacing {
+ bridge::Spacing::Alone => Spacing::Alone,
+ bridge::Spacing::Joint => Spacing::Joint,
+ }
+}
+
+fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
+ match spacing {
+ Spacing::Alone => bridge::Spacing::Alone,
+ Spacing::Joint => bridge::Spacing::Joint,
+ }
+}
+
+impl server::Ident for RustAnalyzer {
+ fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
+ IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
+ }
+
+ fn span(&mut self, ident: Self::Ident) -> Self::Span {
+ self.ident_interner.get(ident.0).0.id
+ }
+ fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+ let data = self.ident_interner.get(ident.0);
+ let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+ IdentId(self.ident_interner.intern(&new))
+ }
+}
+
+impl server::Literal for RustAnalyzer {
+ fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
+ // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
+ // They must still be present to be ABI-compatible and work with upstream proc_macro.
+ "".to_owned()
+ }
+ fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
+ Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+ }
+ fn symbol(&mut self, literal: &Self::Literal) -> String {
+ literal.text.to_string()
+ }
+ fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
+ None
+ }
+
+ fn to_string(&mut self, literal: &Self::Literal) -> String {
+ literal.to_string()
+ }
+
+ fn integer(&mut self, n: &str) -> Self::Literal {
+ let n = match n.parse::<i128>() {
+ Ok(n) => n.to_string(),
+ Err(_) => n.parse::<u128>().unwrap().to_string(),
+ };
+ Literal { text: n.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+ macro_rules! def_suffixed_integer {
+ ($kind:ident, $($ty:ty),*) => {
+ match $kind {
+ $(
+ stringify!($ty) => {
+ let n: $ty = n.parse().unwrap();
+ format!(concat!("{}", stringify!($ty)), n)
+ }
+ )*
+ _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
+ }
+ }
+ }
+
+ let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
+
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn float(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let mut text = f64::to_string(&n);
+ if !text.contains('.') {
+ text += ".0"
+ }
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f32(&mut self, n: &str) -> Self::Literal {
+ let n: f32 = n.parse().unwrap();
+ let text = format!("{}f32", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f64(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let text = format!("{}f64", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn string(&mut self, string: &str) -> Self::Literal {
+ let mut escaped = String::new();
+ for ch in string.chars() {
+ escaped.extend(ch.escape_debug());
+ }
+ Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn character(&mut self, ch: char) -> Self::Literal {
+ Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+ let string = bytes
+ .iter()
+ .cloned()
+ .flat_map(ascii::escape_default)
+ .map(Into::<char>::into)
+ .collect::<String>();
+
+ Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+ literal.id
+ }
+
+ fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+ literal.id = span;
+ }
+
+ fn subspan(
+ &mut self,
+ _literal: &Self::Literal,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+}
+
+impl server::SourceFile for RustAnalyzer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+ fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+ let mut diag = Diagnostic::new(level, msg);
+ diag.spans = spans;
+ diag
+ }
+
+ fn sub(
+ &mut self,
+ _diag: &mut Self::Diagnostic,
+ _level: Level,
+ _msg: &str,
+ _spans: Self::MultiSpan,
+ ) {
+ // FIXME handle diagnostic
+ //
+ }
+
+ fn emit(&mut self, _diag: Self::Diagnostic) {
+ // FIXME handle diagnostic
+ // diag.emit()
+ }
+}
+
+impl server::Span for RustAnalyzer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub
+ tt::TokenId::unspecified()
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME handle span
+ span
+ }
+ fn start(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn end(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn after(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+
+ fn before(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+ fn new(&mut self) -> Self::MultiSpan {
+ // FIXME handle span
+ vec![]
+ }
+
+ fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+ //TODP
+ other.push(span)
+ }
+}
+
+impl server::Server for RustAnalyzer {
+ fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
+ bridge::ExpnGlobals {
+ def_site: Span::unspecified(),
+ call_site: Span::unspecified(),
+ mixed_site: Span::unspecified(),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::proc_macro::bridge::server::Literal;
+ use super::*;
+
+ #[test]
+ fn test_ra_server_literals() {
+ let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
+ assert_eq!(srv.integer("1234").text, "1234");
+
+ assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
+ assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
+ assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
+ assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
+ assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
+ assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
+ assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
+ assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
+ assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
+ assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
+ assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
+ assert_eq!(srv.float("0").text, "0.0");
+ assert_eq!(srv.float("15684.5867").text, "15684.5867");
+ assert_eq!(srv.f32("15684.58").text, "15684.58f32");
+ assert_eq!(srv.f64("15684.58").text, "15684.58f64");
+
+ assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
+ assert_eq!(srv.character('c').text, "'c'");
+ assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+ // u128::max
+ assert_eq!(
+ srv.integer("340282366920938463463374607431768211455").text,
+ "340282366920938463463374607431768211455"
+ );
+ // i128::min
+ assert_eq!(
+ srv.integer("-170141183460469231731687303715884105728").text,
+ "-170141183460469231731687303715884105728"
+ );
+ }
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Brace,
+ }),
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ use std::str::FromStr;
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ }),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)").unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);").unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_").unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
new file mode 100644
index 000000000..44712f419
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
@@ -0,0 +1,102 @@
+//! Proc macro ABI
+
+extern crate proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub(crate) struct Abi {
+ exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+ fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+ Self { message: p.as_str().map(|s| s.to_string()) }
+ }
+}
+
+impl Abi {
+ pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+ let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+ lib.get(symbol_name.as_bytes())?;
+ Ok(Self { exported_macros: macros.to_vec() })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ let parsed_body = ra_server::TokenStream::with_subtree(macro_body.clone());
+
+ let parsed_attributes = attributes.map_or(ra_server::TokenStream::new(), |attr| {
+ ra_server::TokenStream::with_subtree(attr.clone())
+ });
+
+ for proc_macro in &self.exported_macros {
+ match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive {
+ trait_name, client, ..
+ } if *trait_name == macro_name => {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_attributes,
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.exported_macros
+ .iter()
+ .map(|proc_macro| match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ (trait_name.to_string(), ProcMacroKind::CustomDerive)
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ (name.to_string(), ProcMacroKind::FuncLike)
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ (name.to_string(), ProcMacroKind::Attr)
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs
new file mode 100644
index 000000000..46882845a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs
@@ -0,0 +1,518 @@
+//! proc-macro server implementation
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::{
+ self,
+ bridge::{self, server},
+};
+
+mod token_stream;
+pub use token_stream::TokenStream;
+use token_stream::TokenStreamBuilder;
+
+mod symbol;
+pub use symbol::*;
+
+use std::{iter::FromIterator, ops::Bound};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Clone)]
+pub struct SourceFile {
+ // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+ // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type SourceFile = SourceFile;
+ type MultiSpan = Vec<Span>;
+ type Diagnostic = Diagnostic;
+ type Span = Span;
+ type Symbol = Symbol;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+ // FIXME: track env var accesses
+ // https://github.com/rust-lang/rust/pull/71858
+ }
+ fn track_path(&mut self, _path: &str) {}
+
+ fn literal_from_str(
+ &mut self,
+ s: &str,
+ ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
+ // FIXME: keep track of LitKind and Suffix
+ Ok(bridge::Literal {
+ kind: bridge::LitKind::Err,
+ symbol: Symbol::intern(s),
+ suffix: None,
+ span: tt::TokenId::unspecified(),
+ })
+ }
+}
+
+impl server::TokenStream for RustAnalyzer {
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ use std::str::FromStr;
+
+ Self::TokenStream::from_str(src).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let group = Group {
+ delimiter: delim_to_internal(group.delimiter),
+ token_trees: match group.stream {
+ Some(stream) => stream.into_iter().collect(),
+ None => Vec::new(),
+ },
+ };
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Ident(ident) => {
+ // FIXME: handle raw idents
+ let text = ident.sym.text();
+ let ident: tt::Ident = tt::Ident { text, id: ident.span };
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let literal = LiteralFormatter(literal);
+ let text = literal
+ .with_stringify_parts(|parts| tt::SmolStr::from_iter(parts.iter().copied()));
+
+ let literal = tt::Literal { text, id: literal.0.span };
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let punct = tt::Punct {
+ char: p.ch as char,
+ spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
+ id: p.span,
+ };
+ let leaf = tt::Leaf::from(punct);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+ }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+
+ fn concat_trees(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for tree in trees {
+ builder.push(self.from_token_tree(tree));
+ }
+ builder.build()
+ }
+
+ fn concat_streams(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ streams: Vec<Self::TokenStream>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for stream in streams {
+ builder.push(stream);
+ }
+ builder.build()
+ }
+
+ fn into_trees(
+ &mut self,
+ stream: Self::TokenStream,
+ ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
+ stream
+ .into_iter()
+ .map(|tree| match tree {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(bridge::Ident {
+ sym: Symbol::intern(&ident.text),
+ // FIXME: handle raw idents
+ is_raw: false,
+ span: ident.id,
+ })
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
+ bridge::TokenTree::Literal(bridge::Literal {
+ // FIXME: handle literal kinds
+ kind: bridge::LitKind::Err,
+ symbol: Symbol::intern(&lit.text),
+ // FIXME: handle suffixes
+ suffix: None,
+ span: lit.id,
+ })
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
+ bridge::TokenTree::Punct(bridge::Punct {
+ ch: punct.char as u8,
+ joint: punct.spacing == Spacing::Joint,
+ span: punct.id,
+ })
+ }
+ tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
+ delimiter: delim_to_external(subtree.delimiter),
+ stream: if subtree.token_trees.is_empty() {
+ None
+ } else {
+ Some(subtree.token_trees.into_iter().collect())
+ },
+ span: bridge::DelimSpan::from_single(
+ subtree.delimiter.map_or(Span::unspecified(), |del| del.id),
+ ),
+ }),
+ })
+ .collect()
+ }
+}
+
+fn delim_to_internal(d: proc_macro::Delimiter) -> Option<tt::Delimiter> {
+ let kind = match d {
+ proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace,
+ proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ proc_macro::Delimiter::None => return None,
+ };
+ Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> proc_macro::Delimiter {
+ match d.map(|it| it.kind) {
+ Some(tt::DelimiterKind::Parenthesis) => proc_macro::Delimiter::Parenthesis,
+ Some(tt::DelimiterKind::Brace) => proc_macro::Delimiter::Brace,
+ Some(tt::DelimiterKind::Bracket) => proc_macro::Delimiter::Bracket,
+ None => proc_macro::Delimiter::None,
+ }
+}
+
+fn spacing_to_internal(spacing: proc_macro::Spacing) -> Spacing {
+ match spacing {
+ proc_macro::Spacing::Alone => Spacing::Alone,
+ proc_macro::Spacing::Joint => Spacing::Joint,
+ }
+}
+
+fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing {
+ match spacing {
+ Spacing::Alone => proc_macro::Spacing::Alone,
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ }
+}
+
+impl server::SourceFile for RustAnalyzer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+ fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+ let mut diag = Diagnostic::new(level, msg);
+ diag.spans = spans;
+ diag
+ }
+
+ fn sub(
+ &mut self,
+ _diag: &mut Self::Diagnostic,
+ _level: Level,
+ _msg: &str,
+ _spans: Self::MultiSpan,
+ ) {
+ // FIXME handle diagnostic
+ //
+ }
+
+ fn emit(&mut self, _diag: Self::Diagnostic) {
+ // FIXME handle diagnostic
+ // diag.emit()
+ }
+}
+
+impl server::Span for RustAnalyzer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub
+ tt::TokenId::unspecified()
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME handle span
+ span
+ }
+ fn start(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn end(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn subspan(
+ &mut self,
+ span: Self::Span,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // Just return the span again, because some macros will unwrap the result.
+ Some(span)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn after(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+
+ fn before(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+ fn new(&mut self) -> Self::MultiSpan {
+ // FIXME handle span
+ vec![]
+ }
+
+ fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+ //TODP
+ other.push(span)
+ }
+}
+
+impl server::Symbol for RustAnalyzer {
+ fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
+ // FIXME: nfc-normalize and validate idents
+ Ok(<Self as server::Server>::intern_symbol(string))
+ }
+}
+
+impl server::Server for RustAnalyzer {
+ fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
+ bridge::ExpnGlobals {
+ def_site: Span::unspecified(),
+ call_site: Span::unspecified(),
+ mixed_site: Span::unspecified(),
+ }
+ }
+
+ fn intern_symbol(ident: &str) -> Self::Symbol {
+ Symbol::intern(&tt::SmolStr::from(ident))
+ }
+
+ fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
+ f(symbol.text().as_str())
+ }
+}
+
+struct LiteralFormatter(bridge::Literal<tt::TokenId, Symbol>);
+
+impl LiteralFormatter {
+ /// Invokes the callback with a `&[&str]` consisting of each part of the
+ /// literal's representation. This is done to allow the `ToString` and
+ /// `Display` implementations to borrow references to symbol values, and
+ /// both be optimized to reduce overhead.
+ fn with_stringify_parts<R>(&self, f: impl FnOnce(&[&str]) -> R) -> R {
+ /// Returns a string containing exactly `num` '#' characters.
+ /// Uses a 256-character source string literal which is always safe to
+ /// index with a `u8` index.
+ fn get_hashes_str(num: u8) -> &'static str {
+ const HASHES: &str = "\
+ ################################################################\
+ ################################################################\
+ ################################################################\
+ ################################################################\
+ ";
+ const _: () = assert!(HASHES.len() == 256);
+ &HASHES[..num as usize]
+ }
+
+ self.with_symbol_and_suffix(|symbol, suffix| match self.0.kind {
+ bridge::LitKind::Byte => f(&["b'", symbol, "'", suffix]),
+ bridge::LitKind::Char => f(&["'", symbol, "'", suffix]),
+ bridge::LitKind::Str => f(&["\"", symbol, "\"", suffix]),
+ bridge::LitKind::StrRaw(n) => {
+ let hashes = get_hashes_str(n);
+ f(&["r", hashes, "\"", symbol, "\"", hashes, suffix])
+ }
+ bridge::LitKind::ByteStr => f(&["b\"", symbol, "\"", suffix]),
+ bridge::LitKind::ByteStrRaw(n) => {
+ let hashes = get_hashes_str(n);
+ f(&["br", hashes, "\"", symbol, "\"", hashes, suffix])
+ }
+ _ => f(&[symbol, suffix]),
+ })
+ }
+
+ fn with_symbol_and_suffix<R>(&self, f: impl FnOnce(&str, &str) -> R) -> R {
+ let symbol = self.0.symbol.text();
+ let suffix = self.0.suffix.map(|s| s.text()).unwrap_or_default();
+ f(symbol.as_str(), suffix.as_str())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Brace,
+ }),
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ use std::str::FromStr;
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ }),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)").unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);").unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_").unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs
new file mode 100644
index 000000000..51dfba2ea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs
@@ -0,0 +1,46 @@
+//! Symbol interner for proc-macro-srv
+
+use std::{cell::RefCell, collections::HashMap};
+use tt::SmolStr;
+
+thread_local! {
+ static SYMBOL_INTERNER: RefCell<SymbolInterner> = Default::default();
+}
+
+// ID for an interned symbol.
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct Symbol(u32);
+
+impl Symbol {
+ pub fn intern(data: &str) -> Symbol {
+ SYMBOL_INTERNER.with(|i| i.borrow_mut().intern(data))
+ }
+
+ pub fn text(&self) -> SmolStr {
+ SYMBOL_INTERNER.with(|i| i.borrow().get(self).clone())
+ }
+}
+
+#[derive(Default)]
+struct SymbolInterner {
+ idents: HashMap<SmolStr, u32>,
+ ident_data: Vec<SmolStr>,
+}
+
+impl SymbolInterner {
+ fn intern(&mut self, data: &str) -> Symbol {
+ if let Some(index) = self.idents.get(data) {
+ return Symbol(*index);
+ }
+
+ let index = self.idents.len() as u32;
+ let data = SmolStr::from(data);
+ self.ident_data.push(data.clone());
+ self.idents.insert(data, index);
+ Symbol(index)
+ }
+
+ fn get(&self, sym: &Symbol) -> &SmolStr {
+ &self.ident_data[sym.0 as usize]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs
new file mode 100644
index 000000000..113bb52c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs
@@ -0,0 +1,179 @@
+//! TokenStream implementation used by sysroot ABI
+
+use tt::TokenTree;
+
+#[derive(Debug, Default, Clone)]
+pub struct TokenStream {
+ pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream::default()
+ }
+
+ pub fn with_subtree(subtree: tt::Subtree) -> Self {
+ if subtree.delimiter.is_some() {
+ TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+ } else {
+ TokenStream { token_trees: subtree.token_trees }
+ }
+ }
+
+ pub fn into_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self.token_trees }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.token_trees.is_empty()
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { token_trees: vec![tree] }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ for item in streams {
+ for tkn in item {
+ match tkn {
+ tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+ self.token_trees.extend(subtree.token_trees);
+ }
+ _ => {
+ self.token_trees.push(tkn);
+ }
+ }
+ }
+ }
+ }
+}
+
+pub struct TokenStreamBuilder {
+ acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use std::str::FromStr;
+
+ use super::{TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = std::vec::IntoIter<TokenTree>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.token_trees.into_iter()
+ }
+ }
+
+ type LexError = String;
+
+ /// Attempts to break the string into tokens and parse those tokens into a token stream.
+ /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+ /// or characters not existing in the language.
+ /// All tokens in the parsed stream get `Span::call_site()` spans.
+ ///
+ /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+ /// change these errors into `LexError`s later.
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let (subtree, _token_map) =
+ mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+ let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ Ok(TokenStream::with_subtree(subtree))
+ }
+ }
+
+ impl ToString for TokenStream {
+ fn to_string(&self) -> String {
+ tt::pretty(&self.token_trees)
+ }
+ }
+
+ fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: subtree
+ .delimiter
+ .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+ token_trees: subtree
+ .token_trees
+ .into_iter()
+ .map(token_tree_replace_token_ids_with_unspecified)
+ .collect(),
+ }
+ }
+
+ fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ }
+ }
+ }
+
+ fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+ }
+ tt::Leaf::Punct(punct) => {
+ tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+ }
+ tt::Leaf::Ident(ident) => {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+ }
+ }
+ }
+}
+
+impl TokenStreamBuilder {
+ pub(super) fn new() -> TokenStreamBuilder {
+ TokenStreamBuilder { acc: TokenStream::new() }
+ }
+
+ pub(super) fn push(&mut self, stream: TokenStream) {
+ self.acc.extend(stream.into_iter())
+ }
+
+ pub(super) fn build(self) -> TokenStream {
+ self.acc
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs
new file mode 100644
index 000000000..bcf3f1184
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs
@@ -0,0 +1,155 @@
+//! Procedural macros are implemented by compiling the macro providing crate
+//! to a dynamic library with a particular ABI which the compiler uses to expand
+//! macros. Unfortunately this ABI is not specified and can change from version
+//! to version of the compiler. To support this we copy the ABI from the rust
+//! compiler into submodules of this module (e.g proc_macro_srv::abis::abi_1_47).
+//!
+//! All of these ABIs are subsumed in the `Abi` enum, which exposes a simple
+//! interface the rest of rust analyzer can use to talk to the macro
+//! provider.
+//!
+//! # Adding a new ABI
+//!
+//! To add a new ABI you'll need to copy the source of the target proc_macro
+//! crate from the source tree of the Rust compiler into this directory tree.
+//! Then you'll need to modify it
+//! - Remove any feature! or other things which won't compile on stable
+//! - change any absolute imports to relative imports within the ABI tree
+//!
+//! Then you'll need to add a branch to the `Abi` enum and an implementation of
+//! `Abi::expand`, `Abi::list_macros` and `Abi::from_lib` for the new ABI. See
+//! `proc_macro_srv/src/abis/abi_1_47/mod.rs` for an example. Finally you'll
+//! need to update the conditionals in `Abi::from_lib` to return your new ABI
+//! for the relevant versions of the rust compiler
+//!
+
+mod abi_1_58;
+mod abi_1_63;
+mod abi_1_64;
+#[cfg(feature = "sysroot-abi")]
+mod abi_sysroot;
+
+// see `build.rs`
+include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
+
+// Used by `test/utils.rs`
+#[cfg(test)]
+pub(crate) use abi_1_64::TokenStream as TestTokenStream;
+
+use super::dylib::LoadProcMacroDylibError;
+pub(crate) use abi_1_58::Abi as Abi_1_58;
+pub(crate) use abi_1_63::Abi as Abi_1_63;
+pub(crate) use abi_1_64::Abi as Abi_1_64;
+#[cfg(feature = "sysroot-abi")]
+pub(crate) use abi_sysroot::Abi as Abi_Sysroot;
+use libloading::Library;
+use proc_macro_api::{ProcMacroKind, RustCInfo};
+
+pub struct PanicMessage {
+ message: Option<String>,
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<String> {
+ self.message.clone()
+ }
+}
+
+pub(crate) enum Abi {
+ Abi1_58(Abi_1_58),
+ Abi1_63(Abi_1_63),
+ Abi1_64(Abi_1_64),
+ #[cfg(feature = "sysroot-abi")]
+ AbiSysroot(Abi_Sysroot),
+}
+
+impl Abi {
+ /// Load a new ABI.
+ ///
+ /// # Arguments
+ ///
+ /// *`lib` - The dynamic library containing the macro implementations
+ /// *`symbol_name` - The symbol name the macros can be found attributes
+ /// *`info` - RustCInfo about the compiler that was used to compile the
+ /// macro crate. This is the information we use to figure out
+ /// which ABI to return
+ pub fn from_lib(
+ lib: &Library,
+ symbol_name: String,
+ info: RustCInfo,
+ ) -> Result<Abi, LoadProcMacroDylibError> {
+ // the sysroot ABI relies on `extern proc_macro` with unstable features,
+ // instead of a snapshot of the proc macro bridge's source code. it's only
+ // enabled if we have an exact version match.
+ #[cfg(feature = "sysroot-abi")]
+ {
+ if info.version_string == RUSTC_VERSION_STRING {
+ let inner = unsafe { Abi_Sysroot::from_lib(lib, symbol_name) }?;
+ return Ok(Abi::AbiSysroot(inner));
+ }
+
+ // if we reached this point, versions didn't match. in testing, we
+ // want that to panic - this could mean that the format of `rustc
+ // --version` no longer matches the format of the version string
+ // stored in the `.rustc` section, and we want to catch that in-tree
+ // with `x.py test`
+ #[cfg(test)]
+ {
+ let allow_mismatch = std::env::var("PROC_MACRO_SRV_ALLOW_SYSROOT_MISMATCH");
+ if let Ok("1") = allow_mismatch.as_deref() {
+ // only used by rust-analyzer developers, when working on the
+ // sysroot ABI from the rust-analyzer repository - which should
+ // only happen pre-subtree. this can be removed later.
+ } else {
+ panic!(
+ "sysroot ABI mismatch: dylib rustc version (read from .rustc section): {:?} != proc-macro-srv version (read from 'rustc --version'): {:?}",
+ info.version_string, RUSTC_VERSION_STRING
+ );
+ }
+ }
+ }
+
+ // FIXME: this should use exclusive ranges when they're stable
+ // https://github.com/rust-lang/rust/issues/37854
+ match (info.version.0, info.version.1) {
+ (1, 58..=62) => {
+ let inner = unsafe { Abi_1_58::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_58(inner))
+ }
+ (1, 63) => {
+ let inner = unsafe { Abi_1_63::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_63(inner))
+ }
+ (1, 64..) => {
+ let inner = unsafe { Abi_1_64::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_64(inner))
+ }
+ _ => Err(LoadProcMacroDylibError::UnsupportedABI),
+ }
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ match self {
+ Self::Abi1_58(abi) => abi.expand(macro_name, macro_body, attributes),
+ Self::Abi1_63(abi) => abi.expand(macro_name, macro_body, attributes),
+ Self::Abi1_64(abi) => abi.expand(macro_name, macro_body, attributes),
+ #[cfg(feature = "sysroot-abi")]
+ Self::AbiSysroot(abi) => abi.expand(macro_name, macro_body, attributes),
+ }
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ match self {
+ Self::Abi1_58(abi) => abi.list_macros(),
+ Self::Abi1_63(abi) => abi.list_macros(),
+ Self::Abi1_64(abi) => abi.list_macros(),
+ #[cfg(feature = "sysroot-abi")]
+ Self::AbiSysroot(abi) => abi.list_macros(),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs
new file mode 100644
index 000000000..f1e131c13
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs
@@ -0,0 +1,31 @@
+//! Driver for proc macro server
+use std::io;
+
+use proc_macro_api::msg::{self, Message};
+
+use crate::ProcMacroSrv;
+
+pub fn run() -> io::Result<()> {
+ let mut srv = ProcMacroSrv::default();
+ let mut buf = String::new();
+
+ while let Some(req) = read_request(&mut buf)? {
+ let res = match req {
+ msg::Request::ListMacros { dylib_path } => {
+ msg::Response::ListMacros(srv.list_macros(&dylib_path))
+ }
+ msg::Request::ExpandMacro(task) => msg::Response::ExpandMacro(srv.expand(task)),
+ };
+ write_response(res)?
+ }
+
+ Ok(())
+}
+
+fn read_request(buf: &mut String) -> io::Result<Option<msg::Request>> {
+ msg::Request::read(&mut io::stdin().lock(), buf)
+}
+
+fn write_response(msg: msg::Response) -> io::Result<()> {
+ msg.write(&mut io::stdout().lock())
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
new file mode 100644
index 000000000..2b6c070fe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
@@ -0,0 +1,199 @@
+//! Handles dynamic library loading for proc macro
+
+use std::{
+ convert::TryInto,
+ fmt,
+ fs::File,
+ io,
+ path::{Path, PathBuf},
+};
+
+use libloading::Library;
+use memmap2::Mmap;
+use object::Object;
+use paths::AbsPath;
+use proc_macro_api::{read_dylib_info, ProcMacroKind};
+
+use super::abis::Abi;
+
+const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
+
+fn invalid_data_err(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> io::Error {
+ io::Error::new(io::ErrorKind::InvalidData, e)
+}
+
+fn is_derive_registrar_symbol(symbol: &str) -> bool {
+ symbol.contains(NEW_REGISTRAR_SYMBOL)
+}
+
+fn find_registrar_symbol(file: &Path) -> io::Result<Option<String>> {
+ let file = File::open(file)?;
+ let buffer = unsafe { Mmap::map(&file)? };
+
+ Ok(object::File::parse(&*buffer)
+ .map_err(invalid_data_err)?
+ .exports()
+ .map_err(invalid_data_err)?
+ .into_iter()
+ .map(|export| export.name())
+ .filter_map(|sym| String::from_utf8(sym.into()).ok())
+ .find(|sym| is_derive_registrar_symbol(sym))
+ .map(|sym| {
+ // From MacOS docs:
+ // https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man3/dlsym.3.html
+ // Unlike other dyld API's, the symbol name passed to dlsym() must NOT be
+ // prepended with an underscore.
+ if cfg!(target_os = "macos") && sym.starts_with('_') {
+ sym[1..].to_owned()
+ } else {
+ sym
+ }
+ }))
+}
+
+/// Loads dynamic library in platform dependent manner.
+///
+/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described
+/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample)
+/// and [here](https://github.com/rust-lang/rust/issues/60593).
+///
+/// Usage of RTLD_DEEPBIND
+/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1)
+///
+/// It seems that on Windows that behaviour is default, so we do nothing in that case.
+#[cfg(windows)]
+fn load_library(file: &Path) -> Result<Library, libloading::Error> {
+ unsafe { Library::new(file) }
+}
+
+#[cfg(unix)]
+fn load_library(file: &Path) -> Result<Library, libloading::Error> {
+ use libloading::os::unix::Library as UnixLibrary;
+ use std::os::raw::c_int;
+
+ const RTLD_NOW: c_int = 0x00002;
+ const RTLD_DEEPBIND: c_int = 0x00008;
+
+ unsafe { UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) }
+}
+
+#[derive(Debug)]
+pub enum LoadProcMacroDylibError {
+ Io(io::Error),
+ LibLoading(libloading::Error),
+ UnsupportedABI,
+}
+
+impl fmt::Display for LoadProcMacroDylibError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::Io(e) => e.fmt(f),
+ Self::UnsupportedABI => write!(f, "unsupported ABI version"),
+ Self::LibLoading(e) => e.fmt(f),
+ }
+ }
+}
+
+impl From<io::Error> for LoadProcMacroDylibError {
+ fn from(e: io::Error) -> Self {
+ LoadProcMacroDylibError::Io(e)
+ }
+}
+
+impl From<libloading::Error> for LoadProcMacroDylibError {
+ fn from(e: libloading::Error) -> Self {
+ LoadProcMacroDylibError::LibLoading(e)
+ }
+}
+
+struct ProcMacroLibraryLibloading {
+ // Hold on to the library so it doesn't unload
+ _lib: Library,
+ abi: Abi,
+}
+
+impl ProcMacroLibraryLibloading {
+ fn open(file: &Path) -> Result<Self, LoadProcMacroDylibError> {
+ let symbol_name = find_registrar_symbol(file)?.ok_or_else(|| {
+ invalid_data_err(format!("Cannot find registrar symbol in file {}", file.display()))
+ })?;
+
+ let abs_file: &AbsPath = file.try_into().map_err(|_| {
+ invalid_data_err(format!("expected an absolute path, got {}", file.display()))
+ })?;
+ let version_info = read_dylib_info(abs_file)?;
+
+ let lib = load_library(file).map_err(invalid_data_err)?;
+ let abi = Abi::from_lib(&lib, symbol_name, version_info)?;
+ Ok(ProcMacroLibraryLibloading { _lib: lib, abi })
+ }
+}
+
+pub struct Expander {
+ inner: ProcMacroLibraryLibloading,
+}
+
+impl Expander {
+ pub fn new(lib: &Path) -> Result<Expander, LoadProcMacroDylibError> {
+ // Some libraries for dynamic loading require canonicalized path even when it is
+ // already absolute
+ let lib = lib.canonicalize()?;
+
+ let lib = ensure_file_with_lock_free_access(&lib)?;
+
+ let library = ProcMacroLibraryLibloading::open(lib.as_ref())?;
+
+ Ok(Expander { inner: library })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, String> {
+ let result = self.inner.abi.expand(macro_name, macro_body, attributes);
+ result.map_err(|e| e.as_str().unwrap_or_else(|| "<unknown error>".to_string()))
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.inner.abi.list_macros()
+ }
+}
+
+/// Copy the dylib to temp directory to prevent locking in Windows
+#[cfg(windows)]
+fn ensure_file_with_lock_free_access(path: &Path) -> io::Result<PathBuf> {
+ use std::collections::hash_map::RandomState;
+ use std::ffi::OsString;
+ use std::hash::{BuildHasher, Hasher};
+
+ if std::env::var("RA_DONT_COPY_PROC_MACRO_DLL").is_ok() {
+ return Ok(path.to_path_buf());
+ }
+
+ let mut to = std::env::temp_dir();
+
+ let file_name = path.file_name().ok_or_else(|| {
+ io::Error::new(
+ io::ErrorKind::InvalidInput,
+ format!("File path is invalid: {}", path.display()),
+ )
+ })?;
+
+ // Generate a unique number by abusing `HashMap`'s hasher.
+ // Maybe this will also "inspire" a libs team member to finally put `rand` in libstd.
+ let t = RandomState::new().build_hasher().finish();
+
+ let mut unique_name = OsString::from(t.to_string());
+ unique_name.push(file_name);
+
+ to.push(unique_name);
+ std::fs::copy(path, &to).unwrap();
+ Ok(to)
+}
+
+#[cfg(unix)]
+fn ensure_file_with_lock_free_access(path: &Path) -> io::Result<PathBuf> {
+ Ok(path.to_path_buf())
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
new file mode 100644
index 000000000..4c205b9ca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
@@ -0,0 +1,160 @@
+//! RA Proc Macro Server
+//!
+//! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code.
+//! The general idea here is based on <https://github.com/fedochet/rust-proc-macro-expander>.
+//!
+//! But we adapt it to better fit RA needs:
+//!
+//! * We use `tt` for proc-macro `TokenStream` server, it is easier to manipulate and interact with
+//! RA than `proc-macro2` token stream.
+//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
+//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![cfg_attr(
+ feature = "sysroot-abi",
+ feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)
+)]
+#![allow(unreachable_pub)]
+
+mod dylib;
+mod abis;
+
+use std::{
+ collections::{hash_map::Entry, HashMap},
+ env,
+ ffi::OsString,
+ fs,
+ path::{Path, PathBuf},
+ time::SystemTime,
+};
+
+use proc_macro_api::{
+ msg::{ExpandMacro, FlatTree, PanicMessage},
+ ProcMacroKind,
+};
+
+#[derive(Default)]
+pub(crate) struct ProcMacroSrv {
+ expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>,
+}
+
+const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
+
+impl ProcMacroSrv {
+ pub fn expand(&mut self, task: ExpandMacro) -> Result<FlatTree, PanicMessage> {
+ let expander = self.expander(task.lib.as_ref()).map_err(|err| {
+ debug_assert!(false, "should list macros before asking to expand");
+ PanicMessage(format!("failed to load macro: {}", err))
+ })?;
+
+ let prev_env = EnvSnapshot::new();
+ for (k, v) in &task.env {
+ env::set_var(k, v);
+ }
+ let prev_working_dir = match task.current_dir {
+ Some(dir) => {
+ let prev_working_dir = std::env::current_dir().ok();
+ if let Err(err) = std::env::set_current_dir(&dir) {
+ eprintln!("Failed to set the current working dir to {}. Error: {:?}", dir, err)
+ }
+ prev_working_dir
+ }
+ None => None,
+ };
+
+ let macro_body = task.macro_body.to_subtree();
+ let attributes = task.attributes.map(|it| it.to_subtree());
+ // FIXME: replace this with std's scoped threads once they stabilize
+ // (then remove dependency on crossbeam)
+ let result = crossbeam::scope(|s| {
+ let res = match s
+ .builder()
+ .stack_size(EXPANDER_STACK_SIZE)
+ .name(task.macro_name.clone())
+ .spawn(|_| {
+ expander
+ .expand(&task.macro_name, &macro_body, attributes.as_ref())
+ .map(|it| FlatTree::new(&it))
+ }) {
+ Ok(handle) => handle.join(),
+ Err(e) => std::panic::resume_unwind(Box::new(e)),
+ };
+
+ match res {
+ Ok(res) => res,
+ Err(e) => std::panic::resume_unwind(e),
+ }
+ });
+ let result = match result {
+ Ok(result) => result,
+ Err(e) => std::panic::resume_unwind(e),
+ };
+
+ prev_env.rollback();
+
+ if let Some(dir) = prev_working_dir {
+ if let Err(err) = std::env::set_current_dir(&dir) {
+ eprintln!(
+ "Failed to set the current working dir to {}. Error: {:?}",
+ dir.display(),
+ err
+ )
+ }
+ }
+
+ result.map_err(PanicMessage)
+ }
+
+ pub(crate) fn list_macros(
+ &mut self,
+ dylib_path: &Path,
+ ) -> Result<Vec<(String, ProcMacroKind)>, String> {
+ let expander = self.expander(dylib_path)?;
+ Ok(expander.list_macros())
+ }
+
+ fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> {
+ let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| {
+ format!("Failed to get file metadata for {}: {:?}", path.display(), err)
+ })?;
+
+ Ok(match self.expanders.entry((path.to_path_buf(), time)) {
+ Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| {
+ format!("Cannot create expander for {}: {:?}", path.display(), err)
+ })?),
+ Entry::Occupied(e) => e.into_mut(),
+ })
+ }
+}
+
+struct EnvSnapshot {
+ vars: HashMap<OsString, OsString>,
+}
+
+impl EnvSnapshot {
+ fn new() -> EnvSnapshot {
+ EnvSnapshot { vars: env::vars_os().collect() }
+ }
+
+ fn rollback(self) {
+ let mut old_vars = self.vars;
+ for (name, value) in env::vars_os() {
+ let old_value = old_vars.remove(&name);
+ if old_value != Some(value) {
+ match old_value {
+ None => env::remove_var(name),
+ Some(old_value) => env::set_var(name, old_value),
+ }
+ }
+ }
+ for (name, old_value) in old_vars {
+ env::set_var(name, old_value)
+ }
+ }
+}
+
+pub mod cli;
+
+#[cfg(test)]
+mod tests;
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
new file mode 100644
index 000000000..07222907f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
@@ -0,0 +1,166 @@
+//! proc-macro tests
+
+#[macro_use]
+mod utils;
+use expect_test::expect;
+use paths::AbsPathBuf;
+use utils::*;
+
+#[test]
+fn test_derive_empty() {
+ assert_expand("DeriveEmpty", r#"struct S;"#, expect![[r#"SUBTREE $"#]]);
+}
+
+#[test]
+fn test_derive_error() {
+ assert_expand(
+ "DeriveError",
+ r#"struct S;"#,
+ expect![[r##"
+ SUBTREE $
+ IDENT compile_error 4294967295
+ PUNCH ! [alone] 4294967295
+ SUBTREE () 4294967295
+ LITERAL "#[derive(DeriveError)] struct S ;" 4294967295
+ PUNCH ; [alone] 4294967295"##]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_noop() {
+ assert_expand(
+ "fn_like_noop",
+ r#"ident, 0, 1, []"#,
+ expect![[r#"
+ SUBTREE $
+ IDENT ident 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 0 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 1 4294967295
+ PUNCH , [alone] 4294967295
+ SUBTREE [] 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_clone_ident_subtree() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ r#"ident, []"#,
+ expect![[r#"
+ SUBTREE $
+ IDENT ident 4294967295
+ PUNCH , [alone] 4294967295
+ SUBTREE [] 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_clone_raw_ident() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ "r#async",
+ expect![[r#"
+ SUBTREE $
+ IDENT async 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_mk_literals() {
+ assert_expand(
+ "fn_like_mk_literals",
+ r#""#,
+ expect![[r#"
+ SUBTREE $
+ LITERAL b"byte_string" 4294967295
+ LITERAL 'c' 4294967295
+ LITERAL "string" 4294967295
+ LITERAL 3.14f64 4294967295
+ LITERAL 3.14 4294967295
+ LITERAL 123i64 4294967295
+ LITERAL 123 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_mk_idents() {
+ // FIXME: this test is wrong: raw should be 'r#raw' but ABIs 1.64 and below
+ // simply ignore `is_raw` when implementing the `Ident` interface.
+ assert_expand(
+ "fn_like_mk_idents",
+ r#""#,
+ expect![[r#"
+ SUBTREE $
+ IDENT standard 4294967295
+ IDENT raw 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_clone_literals() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#,
+ expect![[r#"
+ SUBTREE $
+ LITERAL 1u16 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 2_u32 4294967295
+ PUNCH , [alone] 4294967295
+ PUNCH - [alone] 4294967295
+ LITERAL 4i64 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 3.14f32 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL "hello bridge" 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_attr_macro() {
+ // Corresponds to
+ // #[proc_macro_test::attr_error(some arguments)]
+ // mod m {}
+ assert_expand_attr(
+ "attr_error",
+ r#"mod m {}"#,
+ r#"some arguments"#,
+ expect![[r##"
+ SUBTREE $
+ IDENT compile_error 4294967295
+ PUNCH ! [alone] 4294967295
+ SUBTREE () 4294967295
+ LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295
+ PUNCH ; [alone] 4294967295"##]],
+ );
+}
+
+/// Tests that we find and classify all proc macros correctly.
+#[test]
+fn list_test_macros() {
+ let res = list().join("\n");
+
+ expect![[r#"
+ fn_like_noop [FuncLike]
+ fn_like_panic [FuncLike]
+ fn_like_error [FuncLike]
+ fn_like_clone_tokens [FuncLike]
+ fn_like_mk_literals [FuncLike]
+ fn_like_mk_idents [FuncLike]
+ attr_noop [Attr]
+ attr_panic [Attr]
+ attr_error [Attr]
+ DeriveEmpty [CustomDerive]
+ DerivePanic [CustomDerive]
+ DeriveError [CustomDerive]"#]]
+ .assert_eq(&res);
+}
+
+#[test]
+fn test_version_check() {
+ let path = AbsPathBuf::assert(fixtures::proc_macro_test_dylib_path());
+ let info = proc_macro_api::read_dylib_info(&path).unwrap();
+ assert!(info.version.1 >= 50);
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
new file mode 100644
index 000000000..f881fe868
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
@@ -0,0 +1,47 @@
+//! utils used in proc-macro tests
+
+use crate::dylib;
+use crate::ProcMacroSrv;
+use expect_test::Expect;
+use std::str::FromStr;
+
+pub mod fixtures {
+ pub fn proc_macro_test_dylib_path() -> std::path::PathBuf {
+ proc_macro_test::PROC_MACRO_TEST_LOCATION.into()
+ }
+}
+
+fn parse_string(code: &str) -> Option<crate::abis::TestTokenStream> {
+ // This is a bit strange. We need to parse a string into a token stream into
+ // order to create a tt::SubTree from it in fixtures. `into_subtree` is
+ // implemented by all the ABIs we have so we arbitrarily choose one ABI to
+ // write a `parse_string` function for and use that. The tests don't really
+ // care which ABI we're using as the `into_subtree` function isn't part of
+ // the ABI and shouldn't change between ABI versions.
+ crate::abis::TestTokenStream::from_str(code).ok()
+}
+
+pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) {
+ assert_expand_impl(macro_name, ra_fixture, None, expect);
+}
+
+pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, expect: Expect) {
+ assert_expand_impl(macro_name, ra_fixture, Some(attr_args), expect);
+}
+
+fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) {
+ let path = fixtures::proc_macro_test_dylib_path();
+ let expander = dylib::Expander::new(&path).unwrap();
+ let fixture = parse_string(input).unwrap();
+ let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree());
+
+ let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap();
+ expect.assert_eq(&format!("{:?}", res));
+}
+
+pub(crate) fn list() -> Vec<String> {
+ let dylib_path = fixtures::proc_macro_test_dylib_path();
+ let mut srv = ProcMacroSrv::default();
+ let res = srv.list_macros(&dylib_path).unwrap();
+ res.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect()
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
new file mode 100644
index 000000000..684477191
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "proc-macro-test"
+version = "0.0.0"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+publish = false
+
+[lib]
+doctest = false
+
+[build-dependencies]
+proc-macro-test-impl = { path = "imp", version = "0.0.0" }
+toolchain = { path = "../toolchain", version = "0.0.0" }
+cargo_metadata = "0.15.0"
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/build.rs b/src/tools/rust-analyzer/crates/proc-macro-test/build.rs
new file mode 100644
index 000000000..a80c96261
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/build.rs
@@ -0,0 +1,106 @@
+//! This will build the proc macro in `imp`, and copy the resulting dylib artifact into the
+//! `OUT_DIR`.
+//!
+//! `proc-macro-test` itself contains only a path to that artifact.
+//!
+//! The `PROC_MACRO_TEST_TOOLCHAIN` environment variable can be exported to use
+//! a specific rustup toolchain: this allows testing against older ABIs (e.g.
+//! 1.58) and future ABIs (stage1, nightly)
+
+use std::{
+ env, fs,
+ path::{Path, PathBuf},
+ process::Command,
+};
+
+use cargo_metadata::Message;
+
+fn main() {
+ println!("cargo:rerun-if-changed=imp");
+ println!("cargo:rerun-if-env-changed=PROC_MACRO_TEST_TOOLCHAIN");
+
+ let out_dir = env::var_os("OUT_DIR").unwrap();
+ let out_dir = Path::new(&out_dir);
+
+ let name = "proc-macro-test-impl";
+ let version = "0.0.0";
+
+ let imp_dir = std::env::current_dir().unwrap().join("imp");
+
+ let staging_dir = out_dir.join("proc-macro-test-imp-staging");
+ // this'll error out if the staging dir didn't previously exist. using
+ // `std::fs::exists` would suffer from TOCTOU so just do our best to
+ // wipe it and ignore errors.
+ let _ = std::fs::remove_dir_all(&staging_dir);
+
+ println!("Creating {}", staging_dir.display());
+ std::fs::create_dir_all(&staging_dir).unwrap();
+
+ let src_dir = staging_dir.join("src");
+ println!("Creating {}", src_dir.display());
+ std::fs::create_dir_all(src_dir).unwrap();
+
+ for item_els in [&["Cargo.toml"][..], &["src", "lib.rs"]] {
+ let mut src = imp_dir.clone();
+ let mut dst = staging_dir.clone();
+ for el in item_els {
+ src.push(el);
+ dst.push(el);
+ }
+ println!("Copying {} to {}", src.display(), dst.display());
+ std::fs::copy(src, dst).unwrap();
+ }
+
+ let target_dir = out_dir.join("target");
+
+ let mut cmd = if let Ok(toolchain) = std::env::var("PROC_MACRO_TEST_TOOLCHAIN") {
+ // leverage rustup to find user-specific toolchain
+ let mut cmd = Command::new("cargo");
+ cmd.arg(format!("+{toolchain}"));
+ cmd
+ } else {
+ Command::new(toolchain::cargo())
+ };
+
+ cmd.current_dir(&staging_dir)
+ .args(&["build", "-p", "proc-macro-test-impl", "--message-format", "json"])
+ // Explicit override the target directory to avoid using the same one which the parent
+ // cargo is using, or we'll deadlock.
+ // This can happen when `CARGO_TARGET_DIR` is set or global config forces all cargo
+ // instance to use the same target directory.
+ .arg("--target-dir")
+ .arg(&target_dir);
+
+ println!("Running {:?}", cmd);
+
+ let output = cmd.output().unwrap();
+ if !output.status.success() {
+ println!("proc-macro-test-impl failed to build");
+ println!("============ stdout ============");
+ println!("{}", String::from_utf8_lossy(&output.stdout));
+ println!("============ stderr ============");
+ println!("{}", String::from_utf8_lossy(&output.stderr));
+ panic!("proc-macro-test-impl failed to build");
+ }
+
+ let mut artifact_path = None;
+ for message in Message::parse_stream(output.stdout.as_slice()) {
+ match message.unwrap() {
+ Message::CompilerArtifact(artifact) => {
+ if artifact.target.kind.contains(&"proc-macro".to_string()) {
+ let repr = format!("{} {}", name, version);
+ if artifact.package_id.repr.starts_with(&repr) {
+ artifact_path = Some(PathBuf::from(&artifact.filenames[0]));
+ }
+ }
+ }
+ _ => (), // Unknown message
+ }
+ }
+
+ // This file is under `target_dir` and is already under `OUT_DIR`.
+ let artifact_path = artifact_path.expect("no dylib for proc-macro-test-impl found");
+
+ let info_path = out_dir.join("proc_macro_test_location.txt");
+ fs::write(info_path, artifact_path.to_str().unwrap()).unwrap();
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml
new file mode 100644
index 000000000..2d1fc3c5c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "proc-macro-test-impl"
+version = "0.0.0"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+publish = false
+
+[lib]
+doctest = false
+proc-macro = true
+
+[workspace]
+
+[dependencies]
+# this crate should not have any dependencies, since it uses its own workspace,
+# and its own `Cargo.lock`
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs
new file mode 100644
index 000000000..feeacdb64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs
@@ -0,0 +1,114 @@
+//! Exports a few trivial procedural macros for testing.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
+
+#[proc_macro]
+pub fn fn_like_noop(args: TokenStream) -> TokenStream {
+ args
+}
+
+#[proc_macro]
+pub fn fn_like_panic(args: TokenStream) -> TokenStream {
+ panic!("fn_like_panic!({})", args);
+}
+
+#[proc_macro]
+pub fn fn_like_error(args: TokenStream) -> TokenStream {
+ format!("compile_error!(\"fn_like_error!({})\");", args).parse().unwrap()
+}
+
+#[proc_macro]
+pub fn fn_like_clone_tokens(args: TokenStream) -> TokenStream {
+ clone_stream(args)
+}
+
+#[proc_macro]
+pub fn fn_like_mk_literals(_args: TokenStream) -> TokenStream {
+ let trees: Vec<TokenTree> = vec![
+ TokenTree::from(Literal::byte_string(b"byte_string")),
+ TokenTree::from(Literal::character('c')),
+ TokenTree::from(Literal::string("string")),
+ // as of 2022-07-21, there's no method on `Literal` to build a raw
+ // string or a raw byte string
+ TokenTree::from(Literal::f64_suffixed(3.14)),
+ TokenTree::from(Literal::f64_unsuffixed(3.14)),
+ TokenTree::from(Literal::i64_suffixed(123)),
+ TokenTree::from(Literal::i64_unsuffixed(123)),
+ ];
+ TokenStream::from_iter(trees)
+}
+
+#[proc_macro]
+pub fn fn_like_mk_idents(_args: TokenStream) -> TokenStream {
+ let trees: Vec<TokenTree> = vec![
+ TokenTree::from(Ident::new("standard", Span::call_site())),
+ TokenTree::from(Ident::new_raw("raw", Span::call_site())),
+ ];
+ TokenStream::from_iter(trees)
+}
+
+#[proc_macro_attribute]
+pub fn attr_noop(_args: TokenStream, item: TokenStream) -> TokenStream {
+ item
+}
+
+#[proc_macro_attribute]
+pub fn attr_panic(args: TokenStream, item: TokenStream) -> TokenStream {
+ panic!("#[attr_panic {}] {}", args, item);
+}
+
+#[proc_macro_attribute]
+pub fn attr_error(args: TokenStream, item: TokenStream) -> TokenStream {
+ format!("compile_error!(\"#[attr_error({})] {}\");", args, item).parse().unwrap()
+}
+
+#[proc_macro_derive(DeriveEmpty)]
+pub fn derive_empty(_item: TokenStream) -> TokenStream {
+ TokenStream::new()
+}
+
+#[proc_macro_derive(DerivePanic)]
+pub fn derive_panic(item: TokenStream) -> TokenStream {
+ panic!("#[derive(DerivePanic)] {}", item);
+}
+
+#[proc_macro_derive(DeriveError)]
+pub fn derive_error(item: TokenStream) -> TokenStream {
+ format!("compile_error!(\"#[derive(DeriveError)] {}\");", item).parse().unwrap()
+}
+
+fn clone_stream(ts: TokenStream) -> TokenStream {
+ ts.into_iter().map(clone_tree).collect()
+}
+
+fn clone_tree(t: TokenTree) -> TokenTree {
+ match t {
+ TokenTree::Group(orig) => {
+ let mut new = Group::new(orig.delimiter(), clone_stream(orig.stream()));
+ new.set_span(orig.span());
+ TokenTree::Group(new)
+ }
+ TokenTree::Ident(orig) => {
+ let s = orig.to_string();
+ if let Some(rest) = s.strip_prefix("r#") {
+ TokenTree::Ident(Ident::new_raw(rest, orig.span()))
+ } else {
+ TokenTree::Ident(Ident::new(&s, orig.span()))
+ }
+ }
+ TokenTree::Punct(orig) => {
+ let mut new = Punct::new(orig.as_char(), orig.spacing());
+ new.set_span(orig.span());
+ TokenTree::Punct(new)
+ }
+ TokenTree::Literal(orig) => {
+ // this goes through `literal_from_str` as of 2022-07-18, cf.
+ // https://github.com/rust-lang/rust/commit/b34c79f8f1ef4d0149ad4bf77e1759c07a9a01a8
+ let mut new: Literal = orig.to_string().parse().unwrap();
+ new.set_span(orig.span());
+ TokenTree::Literal(new)
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs
new file mode 100644
index 000000000..6d57bc81e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs
@@ -0,0 +1,6 @@
+//! Exports a few trivial procedural macros for testing.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+pub static PROC_MACRO_TEST_LOCATION: &str =
+ include_str!(concat!(env!("OUT_DIR"), "/proc_macro_test_location.txt"));
diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml
new file mode 100644
index 000000000..0b78a45a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml
@@ -0,0 +1,31 @@
+[package]
+name = "profile"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+once_cell = "1.12.0"
+cfg-if = "1.0.0"
+libc = "0.2.126"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+countme = { version = "3.0.1", features = ["enable"] }
+jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true }
+
+[target.'cfg(target_os = "linux")'.dependencies]
+perf-event = "0.4.7"
+
+[target.'cfg(windows)'.dependencies]
+winapi = { version = "0.3.9", features = ["processthreadsapi", "psapi"] }
+
+[features]
+cpu_profiler = []
+jemalloc = ["jemalloc-ctl"]
+
+# Uncomment to enable for the whole crate graph
+# default = [ "cpu_profiler" ]
diff --git a/src/tools/rust-analyzer/crates/profile/src/google_cpu_profiler.rs b/src/tools/rust-analyzer/crates/profile/src/google_cpu_profiler.rs
new file mode 100644
index 000000000..cae6caeaa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/src/google_cpu_profiler.rs
@@ -0,0 +1,44 @@
+//! https://github.com/gperftools/gperftools
+
+use std::{
+ ffi::CString,
+ os::raw::c_char,
+ path::Path,
+ sync::atomic::{AtomicUsize, Ordering},
+};
+
+#[link(name = "profiler")]
+#[allow(non_snake_case)]
+extern "C" {
+ fn ProfilerStart(fname: *const c_char) -> i32;
+ fn ProfilerStop();
+}
+
+const OFF: usize = 0;
+const ON: usize = 1;
+const PENDING: usize = 2;
+
+fn transition(current: usize, new: usize) -> bool {
+ static STATE: AtomicUsize = AtomicUsize::new(OFF);
+
+ STATE.compare_exchange(current, new, Ordering::SeqCst, Ordering::SeqCst).is_ok()
+}
+
+pub(crate) fn start(path: &Path) {
+ if !transition(OFF, PENDING) {
+ panic!("profiler already started");
+ }
+ let path = CString::new(path.display().to_string()).unwrap();
+ if unsafe { ProfilerStart(path.as_ptr()) } == 0 {
+ panic!("profiler failed to start")
+ }
+ assert!(transition(PENDING, ON));
+}
+
+pub(crate) fn stop() {
+ if !transition(ON, PENDING) {
+ panic!("profiler is not started")
+ }
+ unsafe { ProfilerStop() };
+ assert!(transition(PENDING, OFF));
+}
diff --git a/src/tools/rust-analyzer/crates/profile/src/hprof.rs b/src/tools/rust-analyzer/crates/profile/src/hprof.rs
new file mode 100644
index 000000000..b562c193e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/src/hprof.rs
@@ -0,0 +1,326 @@
+//! Simple hierarchical profiler
+use std::{
+ cell::RefCell,
+ collections::{BTreeMap, HashSet},
+ env, fmt,
+ io::{stderr, Write},
+ sync::{
+ atomic::{AtomicBool, Ordering},
+ RwLock,
+ },
+ time::{Duration, Instant},
+};
+
+use once_cell::sync::Lazy;
+
+use crate::tree::{Idx, Tree};
+
+/// Filtering syntax
+/// env RA_PROFILE=* // dump everything
+/// env RA_PROFILE=foo|bar|baz // enabled only selected entries
+/// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms
+pub fn init() {
+ countme::enable(env::var("RA_COUNT").is_ok());
+ let spec = env::var("RA_PROFILE").unwrap_or_default();
+ init_from(&spec);
+}
+
+pub fn init_from(spec: &str) {
+ let filter = if spec.is_empty() { Filter::disabled() } else { Filter::from_spec(spec) };
+ filter.install();
+}
+
+type Label = &'static str;
+
+/// This function starts a profiling scope in the current execution stack with a given description.
+/// It returns a `Profile` struct that measures elapsed time between this method invocation and `Profile` struct drop.
+/// It supports nested profiling scopes in case when this function is invoked multiple times at the execution stack.
+/// In this case the profiling information will be nested at the output.
+/// Profiling information is being printed in the stderr.
+///
+/// # Example
+/// ```
+/// profile::init_from("profile1|profile2@2");
+/// profiling_function1();
+///
+/// fn profiling_function1() {
+/// let _p = profile::span("profile1");
+/// profiling_function2();
+/// }
+///
+/// fn profiling_function2() {
+/// let _p = profile::span("profile2");
+/// }
+/// ```
+/// This will print in the stderr the following:
+/// ```text
+/// 0ms - profile
+/// 0ms - profile2
+/// ```
+#[inline]
+pub fn span(label: Label) -> ProfileSpan {
+ debug_assert!(!label.is_empty());
+
+ let enabled = PROFILING_ENABLED.load(Ordering::Relaxed);
+ if enabled && with_profile_stack(|stack| stack.push(label)) {
+ ProfileSpan(Some(ProfilerImpl { label, detail: None }))
+ } else {
+ ProfileSpan(None)
+ }
+}
+
+#[inline]
+pub fn heartbeat_span() -> HeartbeatSpan {
+ let enabled = PROFILING_ENABLED.load(Ordering::Relaxed);
+ HeartbeatSpan::new(enabled)
+}
+
+#[inline]
+pub fn heartbeat() {
+ let enabled = PROFILING_ENABLED.load(Ordering::Relaxed);
+ if enabled {
+ with_profile_stack(|it| it.heartbeat(1));
+ }
+}
+
+pub struct ProfileSpan(Option<ProfilerImpl>);
+
+struct ProfilerImpl {
+ label: Label,
+ detail: Option<String>,
+}
+
+impl ProfileSpan {
+ pub fn detail(mut self, detail: impl FnOnce() -> String) -> ProfileSpan {
+ if let Some(profiler) = &mut self.0 {
+ profiler.detail = Some(detail());
+ }
+ self
+ }
+}
+
+impl Drop for ProfilerImpl {
+ #[inline]
+ fn drop(&mut self) {
+ with_profile_stack(|it| it.pop(self.label, self.detail.take()));
+ }
+}
+
+pub struct HeartbeatSpan {
+ enabled: bool,
+}
+
+impl HeartbeatSpan {
+ #[inline]
+ pub fn new(enabled: bool) -> Self {
+ if enabled {
+ with_profile_stack(|it| it.heartbeats(true));
+ }
+ Self { enabled }
+ }
+}
+
+impl Drop for HeartbeatSpan {
+ fn drop(&mut self) {
+ if self.enabled {
+ with_profile_stack(|it| it.heartbeats(false));
+ }
+ }
+}
+
+static PROFILING_ENABLED: AtomicBool = AtomicBool::new(false);
+static FILTER: Lazy<RwLock<Filter>> = Lazy::new(Default::default);
+
+fn with_profile_stack<T>(f: impl FnOnce(&mut ProfileStack) -> T) -> T {
+ thread_local!(static STACK: RefCell<ProfileStack> = RefCell::new(ProfileStack::new()));
+ STACK.with(|it| f(&mut *it.borrow_mut()))
+}
+
+#[derive(Default, Clone, Debug)]
+struct Filter {
+ depth: usize,
+ allowed: HashSet<String>,
+ longer_than: Duration,
+ heartbeat_longer_than: Duration,
+ version: usize,
+}
+
+impl Filter {
+ fn disabled() -> Filter {
+ Filter::default()
+ }
+
+ fn from_spec(mut spec: &str) -> Filter {
+ let longer_than = if let Some(idx) = spec.rfind('>') {
+ let longer_than = spec[idx + 1..].parse().expect("invalid profile longer_than");
+ spec = &spec[..idx];
+ Duration::from_millis(longer_than)
+ } else {
+ Duration::new(0, 0)
+ };
+ let heartbeat_longer_than = longer_than;
+
+ let depth = if let Some(idx) = spec.rfind('@') {
+ let depth: usize = spec[idx + 1..].parse().expect("invalid profile depth");
+ spec = &spec[..idx];
+ depth
+ } else {
+ 999
+ };
+ let allowed =
+ if spec == "*" { HashSet::new() } else { spec.split('|').map(String::from).collect() };
+ Filter { depth, allowed, longer_than, heartbeat_longer_than, version: 0 }
+ }
+
+ fn install(mut self) {
+ PROFILING_ENABLED.store(self.depth > 0, Ordering::SeqCst);
+ let mut old = FILTER.write().unwrap();
+ self.version = old.version + 1;
+ *old = self;
+ }
+}
+
+struct ProfileStack {
+ frames: Vec<Frame>,
+ filter: Filter,
+ messages: Tree<Message>,
+ heartbeats: bool,
+}
+
+struct Frame {
+ t: Instant,
+ heartbeats: u32,
+}
+
+#[derive(Default)]
+struct Message {
+ duration: Duration,
+ label: Label,
+ detail: Option<String>,
+}
+
+impl ProfileStack {
+ fn new() -> ProfileStack {
+ ProfileStack {
+ frames: Vec::new(),
+ messages: Tree::default(),
+ filter: Default::default(),
+ heartbeats: false,
+ }
+ }
+
+ fn push(&mut self, label: Label) -> bool {
+ if self.frames.is_empty() {
+ if let Ok(f) = FILTER.try_read() {
+ if f.version > self.filter.version {
+ self.filter = f.clone();
+ }
+ };
+ }
+ if self.frames.len() > self.filter.depth {
+ return false;
+ }
+ let allowed = &self.filter.allowed;
+ if self.frames.is_empty() && !allowed.is_empty() && !allowed.contains(label) {
+ return false;
+ }
+
+ self.frames.push(Frame { t: Instant::now(), heartbeats: 0 });
+ self.messages.start();
+ true
+ }
+
+ fn pop(&mut self, label: Label, detail: Option<String>) {
+ let frame = self.frames.pop().unwrap();
+ let duration = frame.t.elapsed();
+
+ if self.heartbeats {
+ self.heartbeat(frame.heartbeats);
+ let avg_span = duration / (frame.heartbeats + 1);
+ if avg_span > self.filter.heartbeat_longer_than {
+ eprintln!("Too few heartbeats {} ({}/{:?})?", label, frame.heartbeats, duration);
+ }
+ }
+
+ self.messages.finish(Message { duration, label, detail });
+ if self.frames.is_empty() {
+ let longer_than = self.filter.longer_than;
+ // Convert to millis for comparison to avoid problems with rounding
+ // (otherwise we could print `0ms` despite user's `>0` filter when
+ // `duration` is just a few nanos).
+ if duration.as_millis() > longer_than.as_millis() {
+ if let Some(root) = self.messages.root() {
+ print(&self.messages, root, 0, longer_than, &mut stderr().lock());
+ }
+ }
+ self.messages.clear();
+ }
+ }
+
+ fn heartbeats(&mut self, yes: bool) {
+ self.heartbeats = yes;
+ }
+ fn heartbeat(&mut self, n: u32) {
+ if let Some(frame) = self.frames.last_mut() {
+ frame.heartbeats += n;
+ }
+ }
+}
+
+fn print(
+ tree: &Tree<Message>,
+ curr: Idx<Message>,
+ level: u32,
+ longer_than: Duration,
+ out: &mut impl Write,
+) {
+ let current_indent = " ".repeat(level as usize);
+ let detail = tree[curr].detail.as_ref().map(|it| format!(" @ {}", it)).unwrap_or_default();
+ writeln!(
+ out,
+ "{}{} - {}{}",
+ current_indent,
+ ms(tree[curr].duration),
+ tree[curr].label,
+ detail,
+ )
+ .expect("printing profiling info");
+
+ let mut accounted_for = Duration::default();
+ let mut short_children = BTreeMap::new(); // Use `BTreeMap` to get deterministic output.
+ for child in tree.children(curr) {
+ accounted_for += tree[child].duration;
+
+ if tree[child].duration.as_millis() > longer_than.as_millis() {
+ print(tree, child, level + 1, longer_than, out);
+ } else {
+ let (total_duration, cnt) =
+ short_children.entry(tree[child].label).or_insert((Duration::default(), 0));
+ *total_duration += tree[child].duration;
+ *cnt += 1;
+ }
+ }
+
+ for (child_msg, (duration, count)) in &short_children {
+ writeln!(out, " {}{} - {} ({} calls)", current_indent, ms(*duration), child_msg, count)
+ .expect("printing profiling info");
+ }
+
+ let unaccounted = tree[curr].duration - accounted_for;
+ if tree.children(curr).next().is_some() && unaccounted > longer_than {
+ writeln!(out, " {}{} - ???", current_indent, ms(unaccounted))
+ .expect("printing profiling info");
+ }
+}
+
+#[allow(non_camel_case_types)]
+struct ms(Duration);
+
+impl fmt::Display for ms {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.0.as_millis() {
+ 0 => f.write_str(" 0 "),
+ n => write!(f, "{:5}ms", n),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/profile/src/lib.rs b/src/tools/rust-analyzer/crates/profile/src/lib.rs
new file mode 100644
index 000000000..00f7952e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/src/lib.rs
@@ -0,0 +1,130 @@
+//! A collection of tools for profiling rust-analyzer.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod stop_watch;
+mod memory_usage;
+#[cfg(feature = "cpu_profiler")]
+mod google_cpu_profiler;
+mod hprof;
+mod tree;
+
+use std::cell::RefCell;
+
+pub use crate::{
+ hprof::{heartbeat, heartbeat_span, init, init_from, span},
+ memory_usage::{Bytes, MemoryUsage},
+ stop_watch::{StopWatch, StopWatchSpan},
+};
+
+pub use countme;
+/// Include `_c: Count<Self>` field in important structs to count them.
+///
+/// To view the counts, run with `RA_COUNT=1`. The overhead of disabled count is
+/// almost zero.
+pub use countme::Count;
+
+thread_local!(static IN_SCOPE: RefCell<bool> = RefCell::new(false));
+
+/// Allows to check if the current code is withing some dynamic scope, can be
+/// useful during debugging to figure out why a function is called.
+pub struct Scope {
+ prev: bool,
+}
+
+impl Scope {
+ #[must_use]
+ pub fn enter() -> Scope {
+ let prev = IN_SCOPE.with(|slot| std::mem::replace(&mut *slot.borrow_mut(), true));
+ Scope { prev }
+ }
+ pub fn is_active() -> bool {
+ IN_SCOPE.with(|slot| *slot.borrow())
+ }
+}
+
+impl Drop for Scope {
+ fn drop(&mut self) {
+ IN_SCOPE.with(|slot| *slot.borrow_mut() = self.prev);
+ }
+}
+
+/// A wrapper around google_cpu_profiler.
+///
+/// Usage:
+/// 1. Install gpref_tools (<https://github.com/gperftools/gperftools>), probably packaged with your Linux distro.
+/// 2. Build with `cpu_profiler` feature.
+/// 3. Run the code, the *raw* output would be in the `./out.profile` file.
+/// 4. Install pprof for visualization (<https://github.com/google/pprof>).
+/// 5. Bump sampling frequency to once per ms: `export CPUPROFILE_FREQUENCY=1000`
+/// 6. Use something like `pprof -svg target/release/rust-analyzer ./out.profile` to see the results.
+///
+/// For example, here's how I run profiling on NixOS:
+///
+/// ```bash
+/// $ bat -p shell.nix
+/// with import <nixpkgs> {};
+/// mkShell {
+/// buildInputs = [ gperftools ];
+/// shellHook = ''
+/// export LD_LIBRARY_PATH="${gperftools}/lib:"
+/// '';
+/// }
+/// $ set -x CPUPROFILE_FREQUENCY 1000
+/// $ nix-shell --run 'cargo test --release --package rust-analyzer --lib -- benchmarks::benchmark_integrated_highlighting --exact --nocapture'
+/// $ pprof -svg target/release/deps/rust_analyzer-8739592dc93d63cb crates/rust-analyzer/out.profile > profile.svg
+/// ```
+///
+/// See this diff for how to profile completions:
+///
+/// <https://github.com/rust-lang/rust-analyzer/pull/5306>
+#[derive(Debug)]
+pub struct CpuSpan {
+ _private: (),
+}
+
+#[must_use]
+pub fn cpu_span() -> CpuSpan {
+ #[cfg(feature = "cpu_profiler")]
+ {
+ google_cpu_profiler::start("./out.profile".as_ref())
+ }
+
+ #[cfg(not(feature = "cpu_profiler"))]
+ {
+ eprintln!(
+ r#"cpu profiling is disabled, uncomment `default = [ "cpu_profiler" ]` in Cargo.toml to enable."#
+ );
+ }
+
+ CpuSpan { _private: () }
+}
+
+impl Drop for CpuSpan {
+ fn drop(&mut self) {
+ #[cfg(feature = "cpu_profiler")]
+ {
+ google_cpu_profiler::stop();
+ let profile_data = std::env::current_dir().unwrap().join("out.profile");
+ eprintln!("Profile data saved to:\n\n {}\n", profile_data.display());
+ let mut cmd = std::process::Command::new("pprof");
+ cmd.arg("-svg").arg(std::env::current_exe().unwrap()).arg(&profile_data);
+ let out = cmd.output();
+
+ match out {
+ Ok(out) if out.status.success() => {
+ let svg = profile_data.with_extension("svg");
+ std::fs::write(&svg, &out.stdout).unwrap();
+ eprintln!("Profile rendered to:\n\n {}\n", svg.display());
+ }
+ _ => {
+ eprintln!("Failed to run:\n\n {:?}\n", cmd);
+ }
+ }
+ }
+ }
+}
+
+pub fn memory_usage() -> MemoryUsage {
+ MemoryUsage::now()
+}
diff --git a/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs b/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs
new file mode 100644
index 000000000..ee882b4cb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs
@@ -0,0 +1,127 @@
+//! Like [`std::time::Instant`], but for memory.
+//!
+//! Measures the total size of all currently allocated objects.
+use std::fmt;
+
+use cfg_if::cfg_if;
+
+#[derive(Copy, Clone)]
+pub struct MemoryUsage {
+ pub allocated: Bytes,
+}
+
+impl fmt::Display for MemoryUsage {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.allocated.fmt(f)
+ }
+}
+
+impl std::ops::Sub for MemoryUsage {
+ type Output = MemoryUsage;
+ fn sub(self, rhs: MemoryUsage) -> MemoryUsage {
+ MemoryUsage { allocated: self.allocated - rhs.allocated }
+ }
+}
+
+impl MemoryUsage {
+ pub fn now() -> MemoryUsage {
+ cfg_if! {
+ if #[cfg(all(feature = "jemalloc", not(target_env = "msvc")))] {
+ jemalloc_ctl::epoch::advance().unwrap();
+ MemoryUsage {
+ allocated: Bytes(jemalloc_ctl::stats::allocated::read().unwrap() as isize),
+ }
+ } else if #[cfg(all(target_os = "linux", target_env = "gnu"))] {
+ memusage_linux()
+ } else if #[cfg(windows)] {
+ // There doesn't seem to be an API for determining heap usage, so we try to
+ // approximate that by using the Commit Charge value.
+
+ use winapi::um::processthreadsapi::*;
+ use winapi::um::psapi::*;
+ use std::mem::{MaybeUninit, size_of};
+
+ let proc = unsafe { GetCurrentProcess() };
+ let mut mem_counters = MaybeUninit::uninit();
+ let cb = size_of::<PROCESS_MEMORY_COUNTERS>();
+ let ret = unsafe { GetProcessMemoryInfo(proc, mem_counters.as_mut_ptr(), cb as u32) };
+ assert!(ret != 0);
+
+ let usage = unsafe { mem_counters.assume_init().PagefileUsage };
+ MemoryUsage { allocated: Bytes(usage as isize) }
+ } else {
+ MemoryUsage { allocated: Bytes(0) }
+ }
+ }
+ }
+}
+
+#[cfg(all(target_os = "linux", target_env = "gnu", not(feature = "jemalloc")))]
+fn memusage_linux() -> MemoryUsage {
+ // Linux/glibc has 2 APIs for allocator introspection that we can use: mallinfo and mallinfo2.
+ // mallinfo uses `int` fields and cannot handle memory usage exceeding 2 GB.
+ // mallinfo2 is very recent, so its presence needs to be detected at runtime.
+ // Both are abysmally slow.
+
+ use std::ffi::CStr;
+ use std::sync::atomic::{AtomicUsize, Ordering};
+
+ static MALLINFO2: AtomicUsize = AtomicUsize::new(1);
+
+ let mut mallinfo2 = MALLINFO2.load(Ordering::Relaxed);
+ if mallinfo2 == 1 {
+ let cstr = CStr::from_bytes_with_nul(b"mallinfo2\0").unwrap();
+ mallinfo2 = unsafe { libc::dlsym(libc::RTLD_DEFAULT, cstr.as_ptr()) } as usize;
+ // NB: races don't matter here, since they'll always store the same value
+ MALLINFO2.store(mallinfo2, Ordering::Relaxed);
+ }
+
+ if mallinfo2 == 0 {
+ // mallinfo2 does not exist, use mallinfo.
+ let alloc = unsafe { libc::mallinfo() }.uordblks as isize;
+ MemoryUsage { allocated: Bytes(alloc) }
+ } else {
+ let mallinfo2: fn() -> libc::mallinfo2 = unsafe { std::mem::transmute(mallinfo2) };
+ let alloc = mallinfo2().uordblks as isize;
+ MemoryUsage { allocated: Bytes(alloc) }
+ }
+}
+
+#[derive(Default, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
+pub struct Bytes(isize);
+
+impl Bytes {
+ pub fn megabytes(self) -> isize {
+ self.0 / 1024 / 1024
+ }
+}
+
+impl fmt::Display for Bytes {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let bytes = self.0;
+ let mut value = bytes;
+ let mut suffix = "b";
+ if value.abs() > 4096 {
+ value /= 1024;
+ suffix = "kb";
+ if value.abs() > 4096 {
+ value /= 1024;
+ suffix = "mb";
+ }
+ }
+ f.pad(&format!("{}{}", value, suffix))
+ }
+}
+
+impl std::ops::AddAssign<usize> for Bytes {
+ fn add_assign(&mut self, x: usize) {
+ self.0 += x as isize;
+ }
+}
+
+impl std::ops::Sub for Bytes {
+ type Output = Bytes;
+ fn sub(self, rhs: Bytes) -> Bytes {
+ Bytes(self.0 - rhs.0)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs b/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs
new file mode 100644
index 000000000..625832848
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs
@@ -0,0 +1,101 @@
+//! Like `std::time::Instant`, but also measures memory & CPU cycles.
+use std::{
+ fmt,
+ time::{Duration, Instant},
+};
+
+use crate::MemoryUsage;
+
+pub struct StopWatch {
+ time: Instant,
+ #[cfg(target_os = "linux")]
+ counter: Option<perf_event::Counter>,
+ memory: Option<MemoryUsage>,
+}
+
+pub struct StopWatchSpan {
+ pub time: Duration,
+ pub instructions: Option<u64>,
+ pub memory: Option<MemoryUsage>,
+}
+
+impl StopWatch {
+ pub fn start() -> StopWatch {
+ #[cfg(target_os = "linux")]
+ let counter = {
+ // When debugging rust-analyzer using rr, the perf-related syscalls cause it to abort.
+ // We allow disabling perf by setting the env var `RA_DISABLE_PERF`.
+
+ use once_cell::sync::Lazy;
+ static PERF_ENABLED: Lazy<bool> =
+ Lazy::new(|| std::env::var_os("RA_DISABLE_PERF").is_none());
+
+ if *PERF_ENABLED {
+ let mut counter = perf_event::Builder::new()
+ .build()
+ .map_err(|err| eprintln!("Failed to create perf counter: {}", err))
+ .ok();
+ if let Some(counter) = &mut counter {
+ if let Err(err) = counter.enable() {
+ eprintln!("Failed to start perf counter: {}", err)
+ }
+ }
+ counter
+ } else {
+ None
+ }
+ };
+ let time = Instant::now();
+ StopWatch {
+ time,
+ #[cfg(target_os = "linux")]
+ counter,
+ memory: None,
+ }
+ }
+ pub fn memory(mut self, yes: bool) -> StopWatch {
+ if yes {
+ self.memory = Some(MemoryUsage::now());
+ }
+ self
+ }
+ pub fn elapsed(&mut self) -> StopWatchSpan {
+ let time = self.time.elapsed();
+
+ #[cfg(target_os = "linux")]
+ let instructions = self.counter.as_mut().and_then(|it| {
+ it.read().map_err(|err| eprintln!("Failed to read perf counter: {}", err)).ok()
+ });
+ #[cfg(not(target_os = "linux"))]
+ let instructions = None;
+
+ let memory = self.memory.map(|it| MemoryUsage::now() - it);
+ StopWatchSpan { time, instructions, memory }
+ }
+}
+
+impl fmt::Display for StopWatchSpan {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{:.2?}", self.time)?;
+ if let Some(mut instructions) = self.instructions {
+ let mut prefix = "";
+ if instructions > 10000 {
+ instructions /= 1000;
+ prefix = "k";
+ }
+ if instructions > 10000 {
+ instructions /= 1000;
+ prefix = "m";
+ }
+ if instructions > 10000 {
+ instructions /= 1000;
+ prefix = "g";
+ }
+ write!(f, ", {}{}instr", instructions, prefix)?;
+ }
+ if let Some(memory) = self.memory {
+ write!(f, ", {}", memory)?;
+ }
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/profile/src/tree.rs b/src/tools/rust-analyzer/crates/profile/src/tree.rs
new file mode 100644
index 000000000..62f0c30b5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/src/tree.rs
@@ -0,0 +1,84 @@
+//! A simple tree implementation which tries to not allocate all over the place.
+use std::ops;
+
+use la_arena::Arena;
+
+#[derive(Default)]
+pub(crate) struct Tree<T> {
+ nodes: Arena<Node<T>>,
+ current_path: Vec<(Idx<T>, Option<Idx<T>>)>,
+}
+
+pub(crate) type Idx<T> = la_arena::Idx<Node<T>>;
+
+impl<T> Tree<T> {
+ pub(crate) fn start(&mut self)
+ where
+ T: Default,
+ {
+ let me = self.nodes.alloc(Node::new(T::default()));
+ if let Some((parent, last_child)) = self.current_path.last_mut() {
+ let slot = match *last_child {
+ Some(last_child) => &mut self.nodes[last_child].next_sibling,
+ None => &mut self.nodes[*parent].first_child,
+ };
+ let prev = slot.replace(me);
+ assert!(prev.is_none());
+ *last_child = Some(me);
+ }
+
+ self.current_path.push((me, None));
+ }
+
+ pub(crate) fn finish(&mut self, data: T) {
+ let (me, _last_child) = self.current_path.pop().unwrap();
+ self.nodes[me].data = data;
+ }
+
+ pub(crate) fn root(&self) -> Option<Idx<T>> {
+ self.nodes.iter().next().map(|(idx, _)| idx)
+ }
+
+ pub(crate) fn children(&self, idx: Idx<T>) -> impl Iterator<Item = Idx<T>> + '_ {
+ NodeIter { nodes: &self.nodes, next: self.nodes[idx].first_child }
+ }
+ pub(crate) fn clear(&mut self) {
+ self.nodes.clear();
+ self.current_path.clear();
+ }
+}
+
+impl<T> ops::Index<Idx<T>> for Tree<T> {
+ type Output = T;
+ fn index(&self, index: Idx<T>) -> &T {
+ &self.nodes[index].data
+ }
+}
+
+pub(crate) struct Node<T> {
+ data: T,
+ first_child: Option<Idx<T>>,
+ next_sibling: Option<Idx<T>>,
+}
+
+impl<T> Node<T> {
+ fn new(data: T) -> Node<T> {
+ Node { data, first_child: None, next_sibling: None }
+ }
+}
+
+struct NodeIter<'a, T> {
+ nodes: &'a Arena<Node<T>>,
+ next: Option<Idx<T>>,
+}
+
+impl<'a, T> Iterator for NodeIter<'a, T> {
+ type Item = Idx<T>;
+
+ fn next(&mut self) -> Option<Idx<T>> {
+ self.next.map(|next| {
+ self.next = self.nodes[next].next_sibling;
+ next
+ })
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
new file mode 100644
index 000000000..bc75d6faa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
@@ -0,0 +1,28 @@
+[package]
+name = "project-model"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+tracing = "0.1.35"
+rustc-hash = "1.1.0"
+cargo_metadata = "0.15.0"
+semver = "1.0.10"
+serde = { version = "1.0.137", features = ["derive"] }
+serde_json = "1.0.81"
+anyhow = "1.0.57"
+expect-test = "1.4.0"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+
+cfg = { path = "../cfg", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+toolchain = { path = "../toolchain", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
new file mode 100644
index 000000000..ee7f8339a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
@@ -0,0 +1,238 @@
+//! Workspace information we get from cargo consists of two pieces. The first is
+//! the output of `cargo metadata`. The second is the output of running
+//! `build.rs` files (`OUT_DIR` env var, extra cfg flags) and compiling proc
+//! macro.
+//!
+//! This module implements this second part. We use "build script" terminology
+//! here, but it covers procedural macros as well.
+
+use std::{cell::RefCell, io, path::PathBuf, process::Command};
+
+use cargo_metadata::{camino::Utf8Path, Message};
+use la_arena::ArenaMap;
+use paths::AbsPathBuf;
+use rustc_hash::FxHashMap;
+use serde::Deserialize;
+
+use crate::{cfg_flag::CfgFlag, CargoConfig, CargoWorkspace, Package};
+
+#[derive(Debug, Default, Clone, PartialEq, Eq)]
+pub struct WorkspaceBuildScripts {
+ outputs: ArenaMap<Package, Option<BuildScriptOutput>>,
+ error: Option<String>,
+}
+
+#[derive(Debug, Clone, Default, PartialEq, Eq)]
+pub(crate) struct BuildScriptOutput {
+ /// List of config flags defined by this package's build script.
+ pub(crate) cfgs: Vec<CfgFlag>,
+ /// List of cargo-related environment variables with their value.
+ ///
+ /// If the package has a build script which defines environment variables,
+ /// they can also be found here.
+ pub(crate) envs: Vec<(String, String)>,
+ /// Directory where a build script might place its output.
+ pub(crate) out_dir: Option<AbsPathBuf>,
+ /// Path to the proc-macro library file if this package exposes proc-macros.
+ pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
+}
+
+impl WorkspaceBuildScripts {
+ fn build_command(config: &CargoConfig) -> Command {
+ if let Some([program, args @ ..]) = config.run_build_script_command.as_deref() {
+ let mut cmd = Command::new(program);
+ cmd.args(args);
+ return cmd;
+ }
+
+ let mut cmd = Command::new(toolchain::cargo());
+
+ cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]);
+
+ // --all-targets includes tests, benches and examples in addition to the
+ // default lib and bins. This is an independent concept from the --targets
+ // flag below.
+ cmd.arg("--all-targets");
+
+ if let Some(target) = &config.target {
+ cmd.args(&["--target", target]);
+ }
+
+ if config.all_features {
+ cmd.arg("--all-features");
+ } else {
+ if config.no_default_features {
+ cmd.arg("--no-default-features");
+ }
+ if !config.features.is_empty() {
+ cmd.arg("--features");
+ cmd.arg(config.features.join(" "));
+ }
+ }
+
+ cmd
+ }
+
+ pub(crate) fn run(
+ config: &CargoConfig,
+ workspace: &CargoWorkspace,
+ progress: &dyn Fn(String),
+ ) -> io::Result<WorkspaceBuildScripts> {
+ let mut cmd = Self::build_command(config);
+
+ if config.wrap_rustc_in_build_scripts {
+ // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
+ // that to compile only proc macros and build scripts during the initial
+ // `cargo check`.
+ let myself = std::env::current_exe()?;
+ cmd.env("RUSTC_WRAPPER", myself);
+ cmd.env("RA_RUSTC_WRAPPER", "1");
+ }
+
+ cmd.current_dir(workspace.workspace_root());
+
+ let mut res = WorkspaceBuildScripts::default();
+ let outputs = &mut res.outputs;
+ // NB: Cargo.toml could have been modified between `cargo metadata` and
+ // `cargo check`. We shouldn't assume that package ids we see here are
+ // exactly those from `config`.
+ let mut by_id: FxHashMap<String, Package> = FxHashMap::default();
+ for package in workspace.packages() {
+ outputs.insert(package, None);
+ by_id.insert(workspace[package].id.clone(), package);
+ }
+
+ let errors = RefCell::new(String::new());
+ let push_err = |err: &str| {
+ let mut e = errors.borrow_mut();
+ e.push_str(err);
+ e.push('\n');
+ };
+
+ tracing::info!("Running build scripts: {:?}", cmd);
+ let output = stdx::process::spawn_with_streaming_output(
+ cmd,
+ &mut |line| {
+ // Copy-pasted from existing cargo_metadata. It seems like we
+ // should be using serde_stacker here?
+ let mut deserializer = serde_json::Deserializer::from_str(line);
+ deserializer.disable_recursion_limit();
+ let message = Message::deserialize(&mut deserializer)
+ .unwrap_or_else(|_| Message::TextLine(line.to_string()));
+
+ match message {
+ Message::BuildScriptExecuted(message) => {
+ let package = match by_id.get(&message.package_id.repr) {
+ Some(&it) => it,
+ None => return,
+ };
+ let cfgs = {
+ let mut acc = Vec::new();
+ for cfg in message.cfgs {
+ match cfg.parse::<CfgFlag>() {
+ Ok(it) => acc.push(it),
+ Err(err) => {
+ push_err(&format!(
+ "invalid cfg from cargo-metadata: {}",
+ err
+ ));
+ return;
+ }
+ };
+ }
+ acc
+ };
+ // cargo_metadata crate returns default (empty) path for
+ // older cargos, which is not absolute, so work around that.
+ let out_dir = message.out_dir.into_os_string();
+ if !out_dir.is_empty() {
+ let data = outputs[package].get_or_insert_with(Default::default);
+ data.out_dir = Some(AbsPathBuf::assert(PathBuf::from(out_dir)));
+ data.cfgs = cfgs;
+ }
+ if !message.env.is_empty() {
+ outputs[package].get_or_insert_with(Default::default).envs =
+ message.env;
+ }
+ }
+ Message::CompilerArtifact(message) => {
+ let package = match by_id.get(&message.package_id.repr) {
+ Some(it) => *it,
+ None => return,
+ };
+
+ progress(format!("metadata {}", message.target.name));
+
+ if message.target.kind.iter().any(|k| k == "proc-macro") {
+ // Skip rmeta file
+ if let Some(filename) =
+ message.filenames.iter().find(|name| is_dylib(name))
+ {
+ let filename = AbsPathBuf::assert(PathBuf::from(&filename));
+ outputs[package]
+ .get_or_insert_with(Default::default)
+ .proc_macro_dylib_path = Some(filename);
+ }
+ }
+ }
+ Message::CompilerMessage(message) => {
+ progress(message.target.name);
+
+ if let Some(diag) = message.message.rendered.as_deref() {
+ push_err(diag);
+ }
+ }
+ Message::BuildFinished(_) => {}
+ Message::TextLine(_) => {}
+ _ => {}
+ }
+ },
+ &mut |line| {
+ push_err(line);
+ },
+ )?;
+
+ for package in workspace.packages() {
+ if let Some(package_build_data) = &mut outputs[package] {
+ tracing::info!(
+ "{}: {:?}",
+ workspace[package].manifest.parent().display(),
+ package_build_data,
+ );
+ // inject_cargo_env(package, package_build_data);
+ if let Some(out_dir) = &package_build_data.out_dir {
+ // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
+ if let Some(out_dir) = out_dir.as_os_str().to_str().map(|s| s.to_owned()) {
+ package_build_data.envs.push(("OUT_DIR".to_string(), out_dir));
+ }
+ }
+ }
+ }
+
+ let mut errors = errors.into_inner();
+ if !output.status.success() {
+ if errors.is_empty() {
+ errors = "cargo check failed".to_string();
+ }
+ res.error = Some(errors);
+ }
+
+ Ok(res)
+ }
+
+ pub fn error(&self) -> Option<&str> {
+ self.error.as_deref()
+ }
+
+ pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> {
+ self.outputs.get(idx)?.as_ref()
+ }
+}
+
+// FIXME: File a better way to know if it is a dylib.
+fn is_dylib(path: &Utf8Path) -> bool {
+ match path.extension().map(|e| e.to_string().to_lowercase()) {
+ None => false,
+ Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
new file mode 100644
index 000000000..597880c2c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
@@ -0,0 +1,504 @@
+//! See [`CargoWorkspace`].
+
+use std::iter;
+use std::path::PathBuf;
+use std::{ops, process::Command};
+
+use anyhow::{Context, Result};
+use base_db::Edition;
+use cargo_metadata::{CargoOpt, MetadataCommand};
+use la_arena::{Arena, Idx};
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::FxHashMap;
+use serde::Deserialize;
+use serde_json::from_value;
+
+use crate::CfgOverrides;
+use crate::{utf8_stdout, ManifestPath};
+
+/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
+/// workspace. It pretty closely mirrors `cargo metadata` output.
+///
+/// Note that internally, rust analyzer uses a different structure:
+/// `CrateGraph`. `CrateGraph` is lower-level: it knows only about the crates,
+/// while this knows about `Packages` & `Targets`: purely cargo-related
+/// concepts.
+///
+/// We use absolute paths here, `cargo metadata` guarantees to always produce
+/// abs paths.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct CargoWorkspace {
+ packages: Arena<PackageData>,
+ targets: Arena<TargetData>,
+ workspace_root: AbsPathBuf,
+}
+
+impl ops::Index<Package> for CargoWorkspace {
+ type Output = PackageData;
+ fn index(&self, index: Package) -> &PackageData {
+ &self.packages[index]
+ }
+}
+
+impl ops::Index<Target> for CargoWorkspace {
+ type Output = TargetData;
+ fn index(&self, index: Target) -> &TargetData {
+ &self.targets[index]
+ }
+}
+
+/// Describes how to set the rustc source directory.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum RustcSource {
+ /// Explicit path for the rustc source directory.
+ Path(AbsPathBuf),
+ /// Try to automatically detect where the rustc source directory is.
+ Discover,
+}
+
+/// Crates to disable `#[cfg(test)]` on.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum UnsetTestCrates {
+ None,
+ Only(Vec<String>),
+ All,
+}
+
+impl Default for UnsetTestCrates {
+ fn default() -> Self {
+ Self::None
+ }
+}
+
+#[derive(Default, Clone, Debug, PartialEq, Eq)]
+pub struct CargoConfig {
+ /// Do not activate the `default` feature.
+ pub no_default_features: bool,
+
+ /// Activate all available features
+ pub all_features: bool,
+
+ /// List of features to activate.
+ /// This will be ignored if `cargo_all_features` is true.
+ pub features: Vec<String>,
+
+ /// rustc target
+ pub target: Option<String>,
+
+ /// Don't load sysroot crates (`std`, `core` & friends). Might be useful
+ /// when debugging isolated issues.
+ pub no_sysroot: bool,
+
+ /// rustc private crate source
+ pub rustc_source: Option<RustcSource>,
+
+ /// crates to disable `#[cfg(test)]` on
+ pub unset_test_crates: UnsetTestCrates,
+
+ pub wrap_rustc_in_build_scripts: bool,
+
+ pub run_build_script_command: Option<Vec<String>>,
+}
+
+impl CargoConfig {
+ pub fn cfg_overrides(&self) -> CfgOverrides {
+ match &self.unset_test_crates {
+ UnsetTestCrates::None => CfgOverrides::Selective(iter::empty().collect()),
+ UnsetTestCrates::Only(unset_test_crates) => CfgOverrides::Selective(
+ unset_test_crates
+ .iter()
+ .cloned()
+ .zip(iter::repeat_with(|| {
+ cfg::CfgDiff::new(Vec::new(), vec![cfg::CfgAtom::Flag("test".into())])
+ .unwrap()
+ }))
+ .collect(),
+ ),
+ UnsetTestCrates::All => CfgOverrides::Wildcard(
+ cfg::CfgDiff::new(Vec::new(), vec![cfg::CfgAtom::Flag("test".into())]).unwrap(),
+ ),
+ }
+ }
+}
+
+pub type Package = Idx<PackageData>;
+
+pub type Target = Idx<TargetData>;
+
+/// Information associated with a cargo crate
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct PackageData {
+ /// Version given in the `Cargo.toml`
+ pub version: semver::Version,
+ /// Name as given in the `Cargo.toml`
+ pub name: String,
+ /// Repository as given in the `Cargo.toml`
+ pub repository: Option<String>,
+ /// Path containing the `Cargo.toml`
+ pub manifest: ManifestPath,
+ /// Targets provided by the crate (lib, bin, example, test, ...)
+ pub targets: Vec<Target>,
+ /// Does this package come from the local filesystem (and is editable)?
+ pub is_local: bool,
+ // Whether this package is a member of the workspace
+ pub is_member: bool,
+ /// List of packages this package depends on
+ pub dependencies: Vec<PackageDependency>,
+ /// Rust edition for this package
+ pub edition: Edition,
+ /// Features provided by the crate, mapped to the features required by that feature.
+ pub features: FxHashMap<String, Vec<String>>,
+ /// List of features enabled on this package
+ pub active_features: Vec<String>,
+ /// String representation of package id
+ pub id: String,
+ /// The contents of [package.metadata.rust-analyzer]
+ pub metadata: RustAnalyzerPackageMetaData,
+}
+
+#[derive(Deserialize, Default, Debug, Clone, Eq, PartialEq)]
+pub struct RustAnalyzerPackageMetaData {
+ pub rustc_private: bool,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct PackageDependency {
+ pub pkg: Package,
+ pub name: String,
+ pub kind: DepKind,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord)]
+pub enum DepKind {
+ /// Available to the library, binary, and dev targets in the package (but not the build script).
+ Normal,
+ /// Available only to test and bench targets (and the library target, when built with `cfg(test)`).
+ Dev,
+ /// Available only to the build script target.
+ Build,
+}
+
+impl DepKind {
+ fn iter(list: &[cargo_metadata::DepKindInfo]) -> impl Iterator<Item = Self> + '_ {
+ let mut dep_kinds = Vec::new();
+ if list.is_empty() {
+ dep_kinds.push(Self::Normal);
+ }
+ for info in list {
+ let kind = match info.kind {
+ cargo_metadata::DependencyKind::Normal => Self::Normal,
+ cargo_metadata::DependencyKind::Development => Self::Dev,
+ cargo_metadata::DependencyKind::Build => Self::Build,
+ cargo_metadata::DependencyKind::Unknown => continue,
+ };
+ dep_kinds.push(kind);
+ }
+ dep_kinds.sort_unstable();
+ dep_kinds.dedup();
+ dep_kinds.into_iter()
+ }
+}
+
+/// Information associated with a package's target
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct TargetData {
+ /// Package that provided this target
+ pub package: Package,
+ /// Name as given in the `Cargo.toml` or generated from the file name
+ pub name: String,
+ /// Path to the main source file of the target
+ pub root: AbsPathBuf,
+ /// Kind of target
+ pub kind: TargetKind,
+ /// Is this target a proc-macro
+ pub is_proc_macro: bool,
+ /// Required features of the target without which it won't build
+ pub required_features: Vec<String>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum TargetKind {
+ Bin,
+ /// Any kind of Cargo lib crate-type (dylib, rlib, proc-macro, ...).
+ Lib,
+ Example,
+ Test,
+ Bench,
+ BuildScript,
+ Other,
+}
+
+impl TargetKind {
+ fn new(kinds: &[String]) -> TargetKind {
+ for kind in kinds {
+ return match kind.as_str() {
+ "bin" => TargetKind::Bin,
+ "test" => TargetKind::Test,
+ "bench" => TargetKind::Bench,
+ "example" => TargetKind::Example,
+ "custom-build" => TargetKind::BuildScript,
+ "proc-macro" => TargetKind::Lib,
+ _ if kind.contains("lib") => TargetKind::Lib,
+ _ => continue,
+ };
+ }
+ TargetKind::Other
+ }
+}
+
+#[derive(Deserialize, Default)]
+// Deserialise helper for the cargo metadata
+struct PackageMetadata {
+ #[serde(rename = "rust-analyzer")]
+ rust_analyzer: Option<RustAnalyzerPackageMetaData>,
+}
+
+impl CargoWorkspace {
+ pub fn fetch_metadata(
+ cargo_toml: &ManifestPath,
+ current_dir: &AbsPath,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> Result<cargo_metadata::Metadata> {
+ let target = config
+ .target
+ .clone()
+ .or_else(|| cargo_config_build_target(cargo_toml))
+ .or_else(|| rustc_discover_host_triple(cargo_toml));
+
+ let mut meta = MetadataCommand::new();
+ meta.cargo_path(toolchain::cargo());
+ meta.manifest_path(cargo_toml.to_path_buf());
+ if config.all_features {
+ meta.features(CargoOpt::AllFeatures);
+ } else {
+ if config.no_default_features {
+ // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
+ // https://github.com/oli-obk/cargo_metadata/issues/79
+ meta.features(CargoOpt::NoDefaultFeatures);
+ }
+ if !config.features.is_empty() {
+ meta.features(CargoOpt::SomeFeatures(config.features.clone()));
+ }
+ }
+ meta.current_dir(current_dir.as_os_str());
+
+ if let Some(target) = target {
+ meta.other_options(vec![String::from("--filter-platform"), target]);
+ }
+
+ // FIXME: Fetching metadata is a slow process, as it might require
+ // calling crates.io. We should be reporting progress here, but it's
+ // unclear whether cargo itself supports it.
+ progress("metadata".to_string());
+
+ let meta =
+ meta.exec().with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))?;
+
+ Ok(meta)
+ }
+
+ pub fn new(mut meta: cargo_metadata::Metadata) -> CargoWorkspace {
+ let mut pkg_by_id = FxHashMap::default();
+ let mut packages = Arena::default();
+ let mut targets = Arena::default();
+
+ let ws_members = &meta.workspace_members;
+
+ meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
+ for meta_pkg in &meta.packages {
+ let cargo_metadata::Package {
+ id,
+ edition,
+ name,
+ manifest_path,
+ version,
+ metadata,
+ repository,
+ ..
+ } = meta_pkg;
+ let meta = from_value::<PackageMetadata>(metadata.clone()).unwrap_or_default();
+ let edition = match edition {
+ cargo_metadata::Edition::E2015 => Edition::Edition2015,
+ cargo_metadata::Edition::E2018 => Edition::Edition2018,
+ cargo_metadata::Edition::E2021 => Edition::Edition2021,
+ _ => {
+ tracing::error!("Unsupported edition `{:?}`", edition);
+ Edition::CURRENT
+ }
+ };
+ // We treat packages without source as "local" packages. That includes all members of
+ // the current workspace, as well as any path dependency outside the workspace.
+ let is_local = meta_pkg.source.is_none();
+ let is_member = ws_members.contains(id);
+
+ let pkg = packages.alloc(PackageData {
+ id: id.repr.clone(),
+ name: name.clone(),
+ version: version.clone(),
+ manifest: AbsPathBuf::assert(PathBuf::from(&manifest_path)).try_into().unwrap(),
+ targets: Vec::new(),
+ is_local,
+ is_member,
+ edition,
+ repository: repository.clone(),
+ dependencies: Vec::new(),
+ features: meta_pkg.features.clone().into_iter().collect(),
+ active_features: Vec::new(),
+ metadata: meta.rust_analyzer.unwrap_or_default(),
+ });
+ let pkg_data = &mut packages[pkg];
+ pkg_by_id.insert(id, pkg);
+ for meta_tgt in &meta_pkg.targets {
+ let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"];
+ let tgt = targets.alloc(TargetData {
+ package: pkg,
+ name: meta_tgt.name.clone(),
+ root: AbsPathBuf::assert(PathBuf::from(&meta_tgt.src_path)),
+ kind: TargetKind::new(meta_tgt.kind.as_slice()),
+ is_proc_macro,
+ required_features: meta_tgt.required_features.clone(),
+ });
+ pkg_data.targets.push(tgt);
+ }
+ }
+ let resolve = meta.resolve.expect("metadata executed with deps");
+ for mut node in resolve.nodes {
+ let source = match pkg_by_id.get(&node.id) {
+ Some(&src) => src,
+ // FIXME: replace this and a similar branch below with `.unwrap`, once
+ // https://github.com/rust-lang/cargo/issues/7841
+ // is fixed and hits stable (around 1.43-is probably?).
+ None => {
+ tracing::error!("Node id do not match in cargo metadata, ignoring {}", node.id);
+ continue;
+ }
+ };
+ node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg));
+ for (dep_node, kind) in node
+ .deps
+ .iter()
+ .flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind)))
+ {
+ let pkg = match pkg_by_id.get(&dep_node.pkg) {
+ Some(&pkg) => pkg,
+ None => {
+ tracing::error!(
+ "Dep node id do not match in cargo metadata, ignoring {}",
+ dep_node.pkg
+ );
+ continue;
+ }
+ };
+ let dep = PackageDependency { name: dep_node.name.clone(), pkg, kind };
+ packages[source].dependencies.push(dep);
+ }
+ packages[source].active_features.extend(node.features);
+ }
+
+ let workspace_root =
+ AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string()));
+
+ CargoWorkspace { packages, targets, workspace_root }
+ }
+
+ pub fn packages<'a>(&'a self) -> impl Iterator<Item = Package> + ExactSizeIterator + 'a {
+ self.packages.iter().map(|(id, _pkg)| id)
+ }
+
+ pub fn target_by_root(&self, root: &AbsPath) -> Option<Target> {
+ self.packages()
+ .filter(|&pkg| self[pkg].is_member)
+ .find_map(|pkg| self[pkg].targets.iter().find(|&&it| &self[it].root == root))
+ .copied()
+ }
+
+ pub fn workspace_root(&self) -> &AbsPath {
+ &self.workspace_root
+ }
+
+ pub fn package_flag(&self, package: &PackageData) -> String {
+ if self.is_unique(&*package.name) {
+ package.name.clone()
+ } else {
+ format!("{}:{}", package.name, package.version)
+ }
+ }
+
+ pub fn parent_manifests(&self, manifest_path: &ManifestPath) -> Option<Vec<ManifestPath>> {
+ let mut found = false;
+ let parent_manifests = self
+ .packages()
+ .filter_map(|pkg| {
+ if !found && &self[pkg].manifest == manifest_path {
+ found = true
+ }
+ self[pkg].dependencies.iter().find_map(|dep| {
+ if &self[dep.pkg].manifest == manifest_path {
+ return Some(self[pkg].manifest.clone());
+ }
+ None
+ })
+ })
+ .collect::<Vec<ManifestPath>>();
+
+ // some packages has this pkg as dep. return their manifests
+ if parent_manifests.len() > 0 {
+ return Some(parent_manifests);
+ }
+
+ // this pkg is inside this cargo workspace, fallback to workspace root
+ if found {
+ return Some(vec![
+ ManifestPath::try_from(self.workspace_root().join("Cargo.toml")).ok()?
+ ]);
+ }
+
+ // not in this workspace
+ None
+ }
+
+ fn is_unique(&self, name: &str) -> bool {
+ self.packages.iter().filter(|(_, v)| v.name == name).count() == 1
+ }
+}
+
+fn rustc_discover_host_triple(cargo_toml: &ManifestPath) -> Option<String> {
+ let mut rustc = Command::new(toolchain::rustc());
+ rustc.current_dir(cargo_toml.parent()).arg("-vV");
+ tracing::debug!("Discovering host platform by {:?}", rustc);
+ match utf8_stdout(rustc) {
+ Ok(stdout) => {
+ let field = "host: ";
+ let target = stdout.lines().find_map(|l| l.strip_prefix(field));
+ if let Some(target) = target {
+ Some(target.to_string())
+ } else {
+ // If we fail to resolve the host platform, it's not the end of the world.
+ tracing::info!("rustc -vV did not report host platform, got:\n{}", stdout);
+ None
+ }
+ }
+ Err(e) => {
+ tracing::warn!("Failed to discover host platform: {}", e);
+ None
+ }
+ }
+}
+
+fn cargo_config_build_target(cargo_toml: &ManifestPath) -> Option<String> {
+ let mut cargo_config = Command::new(toolchain::cargo());
+ cargo_config
+ .current_dir(cargo_toml.parent())
+ .args(&["-Z", "unstable-options", "config", "get", "build.target"])
+ .env("RUSTC_BOOTSTRAP", "1");
+ // if successful we receive `build.target = "target-triple"`
+ tracing::debug!("Discovering cargo config target by {:?}", cargo_config);
+ match utf8_stdout(cargo_config) {
+ Ok(stdout) => stdout
+ .strip_prefix("build.target = \"")
+ .and_then(|stdout| stdout.strip_suffix('"'))
+ .map(ToOwned::to_owned),
+ Err(_) => None,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/cfg_flag.rs b/src/tools/rust-analyzer/crates/project-model/src/cfg_flag.rs
new file mode 100644
index 000000000..f3dd8f513
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/cfg_flag.rs
@@ -0,0 +1,63 @@
+//! Parsing of CfgFlags as command line arguments, as in
+//!
+//! rustc main.rs --cfg foo --cfg 'feature="bar"'
+use std::{fmt, str::FromStr};
+
+use cfg::CfgOptions;
+
+#[derive(Clone, Eq, PartialEq, Debug)]
+pub enum CfgFlag {
+ Atom(String),
+ KeyValue { key: String, value: String },
+}
+
+impl FromStr for CfgFlag {
+ type Err = String;
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let res = match s.split_once('=') {
+ Some((key, value)) => {
+ if !(value.starts_with('"') && value.ends_with('"')) {
+ return Err(format!("Invalid cfg ({:?}), value should be in quotes", s));
+ }
+ let key = key.to_string();
+ let value = value[1..value.len() - 1].to_string();
+ CfgFlag::KeyValue { key, value }
+ }
+ None => CfgFlag::Atom(s.into()),
+ };
+ Ok(res)
+ }
+}
+
+impl<'de> serde::Deserialize<'de> for CfgFlag {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom)
+ }
+}
+
+impl Extend<CfgFlag> for CfgOptions {
+ fn extend<T: IntoIterator<Item = CfgFlag>>(&mut self, iter: T) {
+ for cfg_flag in iter {
+ match cfg_flag {
+ CfgFlag::Atom(it) => self.insert_atom(it.into()),
+ CfgFlag::KeyValue { key, value } => self.insert_key_value(key.into(), value.into()),
+ }
+ }
+ }
+}
+
+impl fmt::Display for CfgFlag {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ CfgFlag::Atom(atom) => f.write_str(atom),
+ CfgFlag::KeyValue { key, value } => {
+ f.write_str(key)?;
+ f.write_str("=")?;
+ f.write_str(value)
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
new file mode 100644
index 000000000..e3f83084a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
@@ -0,0 +1,159 @@
+//! In rust-analyzer, we maintain a strict separation between pure abstract
+//! semantic project model and a concrete model of a particular build system.
+//!
+//! Pure model is represented by the [`base_db::CrateGraph`] from another crate.
+//!
+//! In this crate, we are conserned with "real world" project models.
+//!
+//! Specifically, here we have a representation for a Cargo project
+//! ([`CargoWorkspace`]) and for manually specified layout ([`ProjectJson`]).
+//!
+//! Roughly, the things we do here are:
+//!
+//! * Project discovery (where's the relevant Cargo.toml for the current dir).
+//! * Custom build steps (`build.rs` code generation and compilation of
+//! procedural macros).
+//! * Lowering of concrete model to a [`base_db::CrateGraph`]
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod manifest_path;
+mod cargo_workspace;
+mod cfg_flag;
+mod project_json;
+mod sysroot;
+mod workspace;
+mod rustc_cfg;
+mod build_scripts;
+
+#[cfg(test)]
+mod tests;
+
+use std::{
+ fs::{self, read_dir, ReadDir},
+ io,
+ process::Command,
+};
+
+use anyhow::{bail, format_err, Context, Result};
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::FxHashSet;
+
+pub use crate::{
+ build_scripts::WorkspaceBuildScripts,
+ cargo_workspace::{
+ CargoConfig, CargoWorkspace, Package, PackageData, PackageDependency, RustcSource, Target,
+ TargetData, TargetKind, UnsetTestCrates,
+ },
+ manifest_path::ManifestPath,
+ project_json::{ProjectJson, ProjectJsonData},
+ sysroot::Sysroot,
+ workspace::{CfgOverrides, PackageRoot, ProjectWorkspace},
+};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub enum ProjectManifest {
+ ProjectJson(ManifestPath),
+ CargoToml(ManifestPath),
+}
+
+impl ProjectManifest {
+ pub fn from_manifest_file(path: AbsPathBuf) -> Result<ProjectManifest> {
+ let path = ManifestPath::try_from(path)
+ .map_err(|path| format_err!("bad manifest path: {}", path.display()))?;
+ if path.file_name().unwrap_or_default() == "rust-project.json" {
+ return Ok(ProjectManifest::ProjectJson(path));
+ }
+ if path.file_name().unwrap_or_default() == "Cargo.toml" {
+ return Ok(ProjectManifest::CargoToml(path));
+ }
+ bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display())
+ }
+
+ pub fn discover_single(path: &AbsPath) -> Result<ProjectManifest> {
+ let mut candidates = ProjectManifest::discover(path)?;
+ let res = match candidates.pop() {
+ None => bail!("no projects"),
+ Some(it) => it,
+ };
+
+ if !candidates.is_empty() {
+ bail!("more than one project")
+ }
+ Ok(res)
+ }
+
+ pub fn discover(path: &AbsPath) -> io::Result<Vec<ProjectManifest>> {
+ if let Some(project_json) = find_in_parent_dirs(path, "rust-project.json") {
+ return Ok(vec![ProjectManifest::ProjectJson(project_json)]);
+ }
+ return find_cargo_toml(path)
+ .map(|paths| paths.into_iter().map(ProjectManifest::CargoToml).collect());
+
+ fn find_cargo_toml(path: &AbsPath) -> io::Result<Vec<ManifestPath>> {
+ match find_in_parent_dirs(path, "Cargo.toml") {
+ Some(it) => Ok(vec![it]),
+ None => Ok(find_cargo_toml_in_child_dir(read_dir(path)?)),
+ }
+ }
+
+ fn find_in_parent_dirs(path: &AbsPath, target_file_name: &str) -> Option<ManifestPath> {
+ if path.file_name().unwrap_or_default() == target_file_name {
+ if let Ok(manifest) = ManifestPath::try_from(path.to_path_buf()) {
+ return Some(manifest);
+ }
+ }
+
+ let mut curr = Some(path);
+
+ while let Some(path) = curr {
+ let candidate = path.join(target_file_name);
+ if fs::metadata(&candidate).is_ok() {
+ if let Ok(manifest) = ManifestPath::try_from(candidate) {
+ return Some(manifest);
+ }
+ }
+ curr = path.parent();
+ }
+
+ None
+ }
+
+ fn find_cargo_toml_in_child_dir(entities: ReadDir) -> Vec<ManifestPath> {
+ // Only one level down to avoid cycles the easy way and stop a runaway scan with large projects
+ entities
+ .filter_map(Result::ok)
+ .map(|it| it.path().join("Cargo.toml"))
+ .filter(|it| it.exists())
+ .map(AbsPathBuf::assert)
+ .filter_map(|it| it.try_into().ok())
+ .collect()
+ }
+ }
+
+ pub fn discover_all(paths: &[AbsPathBuf]) -> Vec<ProjectManifest> {
+ let mut res = paths
+ .iter()
+ .filter_map(|it| ProjectManifest::discover(it.as_ref()).ok())
+ .flatten()
+ .collect::<FxHashSet<_>>()
+ .into_iter()
+ .collect::<Vec<_>>();
+ res.sort();
+ res
+ }
+}
+
+fn utf8_stdout(mut cmd: Command) -> Result<String> {
+ let output = cmd.output().with_context(|| format!("{:?} failed", cmd))?;
+ if !output.status.success() {
+ match String::from_utf8(output.stderr) {
+ Ok(stderr) if !stderr.is_empty() => {
+ bail!("{:?} failed, {}\nstderr:\n{}", cmd, output.status, stderr)
+ }
+ _ => bail!("{:?} failed, {}", cmd, output.status),
+ }
+ }
+ let stdout = String::from_utf8(output.stdout)?;
+ Ok(stdout.trim().to_string())
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
new file mode 100644
index 000000000..4910fd3d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
@@ -0,0 +1,51 @@
+//! See [`ManifestPath`].
+use std::{ops, path::Path};
+
+use paths::{AbsPath, AbsPathBuf};
+
+/// More or less [`AbsPathBuf`] with non-None parent.
+///
+/// We use it to store path to Cargo.toml, as we frequently use the parent dir
+/// as a working directory to spawn various commands, and its nice to not have
+/// to `.unwrap()` everywhere.
+///
+/// This could have been named `AbsNonRootPathBuf`, as we don't enforce that
+/// this stores manifest files in particular, but we only use this for manifests
+/// at the moment in practice.
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct ManifestPath {
+ file: AbsPathBuf,
+}
+
+impl TryFrom<AbsPathBuf> for ManifestPath {
+ type Error = AbsPathBuf;
+
+ fn try_from(file: AbsPathBuf) -> Result<Self, Self::Error> {
+ if file.parent().is_none() {
+ Err(file)
+ } else {
+ Ok(ManifestPath { file })
+ }
+ }
+}
+
+impl ManifestPath {
+ // Shadow `parent` from `Deref`.
+ pub fn parent(&self) -> &AbsPath {
+ self.file.parent().unwrap()
+ }
+}
+
+impl ops::Deref for ManifestPath {
+ type Target = AbsPath;
+
+ fn deref(&self) -> &Self::Target {
+ &*self.file
+ }
+}
+
+impl AsRef<Path> for ManifestPath {
+ fn as_ref(&self) -> &Path {
+ self.file.as_ref()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
new file mode 100644
index 000000000..63d1d0ace
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
@@ -0,0 +1,198 @@
+//! `rust-project.json` file format.
+//!
+//! This format is spiritually a serialization of [`base_db::CrateGraph`]. The
+//! idea here is that people who do not use Cargo, can instead teach their build
+//! system to generate `rust-project.json` which can be ingested by
+//! rust-analyzer.
+
+use std::path::PathBuf;
+
+use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, Edition};
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::FxHashMap;
+use serde::{de, Deserialize};
+
+use crate::cfg_flag::CfgFlag;
+
+/// Roots and crates that compose this Rust project.
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct ProjectJson {
+ /// e.g. `path/to/sysroot`
+ pub(crate) sysroot: Option<AbsPathBuf>,
+ /// e.g. `path/to/sysroot/lib/rustlib/src/rust`
+ pub(crate) sysroot_src: Option<AbsPathBuf>,
+ project_root: AbsPathBuf,
+ crates: Vec<Crate>,
+}
+
+/// A crate points to the root module of a crate and lists the dependencies of the crate. This is
+/// useful in creating the crate graph.
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct Crate {
+ pub(crate) display_name: Option<CrateDisplayName>,
+ pub(crate) root_module: AbsPathBuf,
+ pub(crate) edition: Edition,
+ pub(crate) version: Option<String>,
+ pub(crate) deps: Vec<Dependency>,
+ pub(crate) cfg: Vec<CfgFlag>,
+ pub(crate) target: Option<String>,
+ pub(crate) env: FxHashMap<String, String>,
+ pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
+ pub(crate) is_workspace_member: bool,
+ pub(crate) include: Vec<AbsPathBuf>,
+ pub(crate) exclude: Vec<AbsPathBuf>,
+ pub(crate) is_proc_macro: bool,
+ pub(crate) repository: Option<String>,
+}
+
+impl ProjectJson {
+ /// Create a new ProjectJson instance.
+ ///
+ /// # Arguments
+ ///
+ /// * `base` - The path to the workspace root (i.e. the folder containing `rust-project.json`)
+ /// * `data` - The parsed contents of `rust-project.json`, or project json that's passed via
+ /// configuration.
+ pub fn new(base: &AbsPath, data: ProjectJsonData) -> ProjectJson {
+ ProjectJson {
+ sysroot: data.sysroot.map(|it| base.join(it)),
+ sysroot_src: data.sysroot_src.map(|it| base.join(it)),
+ project_root: base.to_path_buf(),
+ crates: data
+ .crates
+ .into_iter()
+ .map(|crate_data| {
+ let is_workspace_member = crate_data.is_workspace_member.unwrap_or_else(|| {
+ crate_data.root_module.is_relative()
+ && !crate_data.root_module.starts_with("..")
+ || crate_data.root_module.starts_with(base)
+ });
+ let root_module = base.join(crate_data.root_module).normalize();
+ let (include, exclude) = match crate_data.source {
+ Some(src) => {
+ let absolutize = |dirs: Vec<PathBuf>| {
+ dirs.into_iter()
+ .map(|it| base.join(it).normalize())
+ .collect::<Vec<_>>()
+ };
+ (absolutize(src.include_dirs), absolutize(src.exclude_dirs))
+ }
+ None => (vec![root_module.parent().unwrap().to_path_buf()], Vec::new()),
+ };
+
+ Crate {
+ display_name: crate_data
+ .display_name
+ .map(CrateDisplayName::from_canonical_name),
+ root_module,
+ edition: crate_data.edition.into(),
+ version: crate_data.version.as_ref().map(ToString::to_string),
+ deps: crate_data
+ .deps
+ .into_iter()
+ .map(|dep_data| {
+ Dependency::new(dep_data.name, CrateId(dep_data.krate as u32))
+ })
+ .collect::<Vec<_>>(),
+ cfg: crate_data.cfg,
+ target: crate_data.target,
+ env: crate_data.env,
+ proc_macro_dylib_path: crate_data
+ .proc_macro_dylib_path
+ .map(|it| base.join(it)),
+ is_workspace_member,
+ include,
+ exclude,
+ is_proc_macro: crate_data.is_proc_macro,
+ repository: crate_data.repository,
+ }
+ })
+ .collect::<Vec<_>>(),
+ }
+ }
+ /// Returns the number of crates in the project.
+ pub fn n_crates(&self) -> usize {
+ self.crates.len()
+ }
+ /// Returns an iterator over the crates in the project.
+ pub fn crates(&self) -> impl Iterator<Item = (CrateId, &Crate)> + '_ {
+ self.crates.iter().enumerate().map(|(idx, krate)| (CrateId(idx as u32), krate))
+ }
+ /// Returns the path to the project's root folder.
+ pub fn path(&self) -> &AbsPath {
+ &self.project_root
+ }
+}
+
+#[derive(Deserialize, Debug, Clone)]
+pub struct ProjectJsonData {
+ sysroot: Option<PathBuf>,
+ sysroot_src: Option<PathBuf>,
+ crates: Vec<CrateData>,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+struct CrateData {
+ display_name: Option<String>,
+ root_module: PathBuf,
+ edition: EditionData,
+ #[serde(default)]
+ version: Option<semver::Version>,
+ deps: Vec<DepData>,
+ #[serde(default)]
+ cfg: Vec<CfgFlag>,
+ target: Option<String>,
+ #[serde(default)]
+ env: FxHashMap<String, String>,
+ proc_macro_dylib_path: Option<PathBuf>,
+ is_workspace_member: Option<bool>,
+ source: Option<CrateSource>,
+ #[serde(default)]
+ is_proc_macro: bool,
+ #[serde(default)]
+ repository: Option<String>,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename = "edition")]
+enum EditionData {
+ #[serde(rename = "2015")]
+ Edition2015,
+ #[serde(rename = "2018")]
+ Edition2018,
+ #[serde(rename = "2021")]
+ Edition2021,
+}
+
+impl From<EditionData> for Edition {
+ fn from(data: EditionData) -> Self {
+ match data {
+ EditionData::Edition2015 => Edition::Edition2015,
+ EditionData::Edition2018 => Edition::Edition2018,
+ EditionData::Edition2021 => Edition::Edition2021,
+ }
+ }
+}
+
+#[derive(Deserialize, Debug, Clone)]
+struct DepData {
+ /// Identifies a crate by position in the crates array.
+ #[serde(rename = "crate")]
+ krate: usize,
+ #[serde(deserialize_with = "deserialize_crate_name")]
+ name: CrateName,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+struct CrateSource {
+ include_dirs: Vec<PathBuf>,
+ exclude_dirs: Vec<PathBuf>,
+}
+
+fn deserialize_crate_name<'de, D>(de: D) -> Result<CrateName, D::Error>
+where
+ D: de::Deserializer<'de>,
+{
+ let name = String::deserialize(de)?;
+ CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {:?}", err)))
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs
new file mode 100644
index 000000000..17e244d06
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs
@@ -0,0 +1,60 @@
+//! Runs `rustc --print cfg` to get built-in cfg flags.
+
+use std::process::Command;
+
+use anyhow::Result;
+
+use crate::{cfg_flag::CfgFlag, utf8_stdout, ManifestPath};
+
+pub(crate) fn get(cargo_toml: Option<&ManifestPath>, target: Option<&str>) -> Vec<CfgFlag> {
+ let _p = profile::span("rustc_cfg::get");
+ let mut res = Vec::with_capacity(6 * 2 + 1);
+
+ // Some nightly-only cfgs, which are required for stdlib
+ res.push(CfgFlag::Atom("target_thread_local".into()));
+ for ty in ["8", "16", "32", "64", "cas", "ptr"] {
+ for key in ["target_has_atomic", "target_has_atomic_load_store"] {
+ res.push(CfgFlag::KeyValue { key: key.to_string(), value: ty.into() });
+ }
+ }
+
+ match get_rust_cfgs(cargo_toml, target) {
+ Ok(rustc_cfgs) => {
+ tracing::debug!(
+ "rustc cfgs found: {:?}",
+ rustc_cfgs
+ .lines()
+ .map(|it| it.parse::<CfgFlag>().map(|it| it.to_string()))
+ .collect::<Vec<_>>()
+ );
+ res.extend(rustc_cfgs.lines().filter_map(|it| it.parse().ok()));
+ }
+ Err(e) => tracing::error!("failed to get rustc cfgs: {e:?}"),
+ }
+
+ res
+}
+
+fn get_rust_cfgs(cargo_toml: Option<&ManifestPath>, target: Option<&str>) -> Result<String> {
+ if let Some(cargo_toml) = cargo_toml {
+ let mut cargo_config = Command::new(toolchain::cargo());
+ cargo_config
+ .current_dir(cargo_toml.parent())
+ .args(&["-Z", "unstable-options", "rustc", "--print", "cfg"])
+ .env("RUSTC_BOOTSTRAP", "1");
+ if let Some(target) = target {
+ cargo_config.args(&["--target", target]);
+ }
+ match utf8_stdout(cargo_config) {
+ Ok(it) => return Ok(it),
+ Err(e) => tracing::debug!("{e:?}: falling back to querying rustc for cfgs"),
+ }
+ }
+ // using unstable cargo features failed, fall back to using plain rustc
+ let mut cmd = Command::new(toolchain::rustc());
+ cmd.args(&["--print", "cfg", "-O"]);
+ if let Some(target) = target {
+ cmd.args(&["--target", target]);
+ }
+ utf8_stdout(cmd)
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
new file mode 100644
index 000000000..362bb0f5e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
@@ -0,0 +1,232 @@
+//! Loads "sysroot" crate.
+//!
+//! One confusing point here is that normally sysroot is a bunch of `.rlib`s,
+//! but we can't process `.rlib` and need source code instead. The source code
+//! is typically installed with `rustup component add rust-src` command.
+
+use std::{env, fs, iter, ops, path::PathBuf, process::Command};
+
+use anyhow::{format_err, Result};
+use la_arena::{Arena, Idx};
+use paths::{AbsPath, AbsPathBuf};
+
+use crate::{utf8_stdout, ManifestPath};
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Sysroot {
+ root: AbsPathBuf,
+ src_root: AbsPathBuf,
+ crates: Arena<SysrootCrateData>,
+}
+
+pub(crate) type SysrootCrate = Idx<SysrootCrateData>;
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct SysrootCrateData {
+ pub name: String,
+ pub root: ManifestPath,
+ pub deps: Vec<SysrootCrate>,
+}
+
+impl ops::Index<SysrootCrate> for Sysroot {
+ type Output = SysrootCrateData;
+ fn index(&self, index: SysrootCrate) -> &SysrootCrateData {
+ &self.crates[index]
+ }
+}
+
+impl Sysroot {
+ /// Returns sysroot "root" directory, where `bin/`, `etc/`, `lib/`, `libexec/`
+ /// subfolder live, like:
+ /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu`
+ pub fn root(&self) -> &AbsPath {
+ &self.root
+ }
+
+ /// Returns the sysroot "source" directory, where stdlib sources are located, like:
+ /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library`
+ pub fn src_root(&self) -> &AbsPath {
+ &self.src_root
+ }
+
+ pub fn public_deps(&self) -> impl Iterator<Item = (&'static str, SysrootCrate, bool)> + '_ {
+ // core is added as a dependency before std in order to
+ // mimic rustcs dependency order
+ ["core", "alloc", "std"]
+ .into_iter()
+ .zip(iter::repeat(true))
+ .chain(iter::once(("test", false)))
+ .filter_map(move |(name, prelude)| Some((name, self.by_name(name)?, prelude)))
+ }
+
+ pub fn proc_macro(&self) -> Option<SysrootCrate> {
+ self.by_name("proc_macro")
+ }
+
+ pub fn crates<'a>(&'a self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + 'a {
+ self.crates.iter().map(|(id, _data)| id)
+ }
+
+ pub fn discover(dir: &AbsPath) -> Result<Sysroot> {
+ tracing::debug!("Discovering sysroot for {}", dir.display());
+ let sysroot_dir = discover_sysroot_dir(dir)?;
+ let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir, dir)?;
+ let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?;
+ Ok(res)
+ }
+
+ pub fn discover_rustc(cargo_toml: &ManifestPath) -> Option<ManifestPath> {
+ tracing::debug!("Discovering rustc source for {}", cargo_toml.display());
+ let current_dir = cargo_toml.parent();
+ discover_sysroot_dir(current_dir).ok().and_then(|sysroot_dir| get_rustc_src(&sysroot_dir))
+ }
+
+ pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf) -> Result<Sysroot> {
+ let mut sysroot =
+ Sysroot { root: sysroot_dir, src_root: sysroot_src_dir, crates: Arena::default() };
+
+ for path in SYSROOT_CRATES.trim().lines() {
+ let name = path.split('/').last().unwrap();
+ let root = [format!("{}/src/lib.rs", path), format!("lib{}/lib.rs", path)]
+ .into_iter()
+ .map(|it| sysroot.src_root.join(it))
+ .filter_map(|it| ManifestPath::try_from(it).ok())
+ .find(|it| fs::metadata(it).is_ok());
+
+ if let Some(root) = root {
+ sysroot.crates.alloc(SysrootCrateData {
+ name: name.into(),
+ root,
+ deps: Vec::new(),
+ });
+ }
+ }
+
+ if let Some(std) = sysroot.by_name("std") {
+ for dep in STD_DEPS.trim().lines() {
+ if let Some(dep) = sysroot.by_name(dep) {
+ sysroot.crates[std].deps.push(dep)
+ }
+ }
+ }
+
+ if let Some(alloc) = sysroot.by_name("alloc") {
+ if let Some(core) = sysroot.by_name("core") {
+ sysroot.crates[alloc].deps.push(core);
+ }
+ }
+
+ if let Some(proc_macro) = sysroot.by_name("proc_macro") {
+ if let Some(std) = sysroot.by_name("std") {
+ sysroot.crates[proc_macro].deps.push(std);
+ }
+ }
+
+ if sysroot.by_name("core").is_none() {
+ let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
+ " (`RUST_SRC_PATH` might be incorrect, try unsetting it)"
+ } else {
+ ""
+ };
+ anyhow::bail!(
+ "could not find libcore in sysroot path `{}`{}",
+ sysroot.src_root.as_path().display(),
+ var_note,
+ );
+ }
+
+ Ok(sysroot)
+ }
+
+ fn by_name(&self, name: &str) -> Option<SysrootCrate> {
+ let (id, _data) = self.crates.iter().find(|(_id, data)| data.name == name)?;
+ Some(id)
+ }
+}
+
+fn discover_sysroot_dir(current_dir: &AbsPath) -> Result<AbsPathBuf> {
+ let mut rustc = Command::new(toolchain::rustc());
+ rustc.current_dir(current_dir).args(&["--print", "sysroot"]);
+ tracing::debug!("Discovering sysroot by {:?}", rustc);
+ let stdout = utf8_stdout(rustc)?;
+ Ok(AbsPathBuf::assert(PathBuf::from(stdout)))
+}
+
+fn discover_sysroot_src_dir(
+ sysroot_path: &AbsPathBuf,
+ current_dir: &AbsPath,
+) -> Result<AbsPathBuf> {
+ if let Ok(path) = env::var("RUST_SRC_PATH") {
+ let path = AbsPathBuf::try_from(path.as_str())
+ .map_err(|path| format_err!("RUST_SRC_PATH must be absolute: {}", path.display()))?;
+ let core = path.join("core");
+ if fs::metadata(&core).is_ok() {
+ tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {}", path.display());
+ return Ok(path);
+ }
+ tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core);
+ }
+
+ get_rust_src(sysroot_path)
+ .or_else(|| {
+ let mut rustup = Command::new(toolchain::rustup());
+ rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]);
+ utf8_stdout(rustup).ok()?;
+ get_rust_src(sysroot_path)
+ })
+ .ok_or_else(|| {
+ format_err!(
+ "\
+can't load standard library from sysroot
+{}
+(discovered via `rustc --print sysroot`)
+try installing the Rust source the same way you installed rustc",
+ sysroot_path.display(),
+ )
+ })
+}
+
+fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> {
+ let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml");
+ let rustc_src = ManifestPath::try_from(rustc_src).ok()?;
+ tracing::debug!("Checking for rustc source code: {}", rustc_src.display());
+ if fs::metadata(&rustc_src).is_ok() {
+ Some(rustc_src)
+ } else {
+ None
+ }
+}
+
+fn get_rust_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> {
+ let rust_src = sysroot_path.join("lib/rustlib/src/rust/library");
+ tracing::debug!("Checking sysroot: {}", rust_src.display());
+ if fs::metadata(&rust_src).is_ok() {
+ Some(rust_src)
+ } else {
+ None
+ }
+}
+
+const SYSROOT_CRATES: &str = "
+alloc
+core
+panic_abort
+panic_unwind
+proc_macro
+profiler_builtins
+std
+stdarch/crates/std_detect
+term
+test
+unwind";
+
+const STD_DEPS: &str = "
+alloc
+core
+panic_abort
+panic_unwind
+profiler_builtins
+std_detect
+term
+test
+unwind";
diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
new file mode 100644
index 000000000..e304a59c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
@@ -0,0 +1,1820 @@
+use std::{
+ ops::Deref,
+ path::{Path, PathBuf},
+};
+
+use base_db::{CrateGraph, FileId};
+use cfg::{CfgAtom, CfgDiff};
+use expect_test::{expect, Expect};
+use paths::{AbsPath, AbsPathBuf};
+use serde::de::DeserializeOwned;
+
+use crate::{
+ CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot,
+ WorkspaceBuildScripts,
+};
+
+fn load_cargo(file: &str) -> CrateGraph {
+ load_cargo_with_overrides(file, CfgOverrides::default())
+}
+
+fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGraph {
+ let meta = get_test_json_file(file);
+ let cargo_workspace = CargoWorkspace::new(meta);
+ let project_workspace = ProjectWorkspace::Cargo {
+ cargo: cargo_workspace,
+ build_scripts: WorkspaceBuildScripts::default(),
+ sysroot: None,
+ rustc: None,
+ rustc_cfg: Vec::new(),
+ cfg_overrides,
+ };
+ to_crate_graph(project_workspace)
+}
+
+fn load_rust_project(file: &str) -> CrateGraph {
+ let data = get_test_json_file(file);
+ let project = rooted_project_json(data);
+ let sysroot = Some(get_fake_sysroot());
+ let project_workspace = ProjectWorkspace::Json { project, sysroot, rustc_cfg: Vec::new() };
+ to_crate_graph(project_workspace)
+}
+
+fn get_test_json_file<T: DeserializeOwned>(file: &str) -> T {
+ let file = get_test_path(file);
+ let data = std::fs::read_to_string(file).unwrap();
+ let mut json = data.parse::<serde_json::Value>().unwrap();
+ fixup_paths(&mut json);
+ return serde_json::from_value(json).unwrap();
+
+ fn fixup_paths(val: &mut serde_json::Value) {
+ match val {
+ serde_json::Value::String(s) => replace_root(s, true),
+ serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths),
+ serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths),
+ serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => {
+ }
+ }
+ }
+}
+
+fn replace_root(s: &mut String, direction: bool) {
+ if direction {
+ let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" };
+ *s = s.replace("$ROOT$", root)
+ } else {
+ let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" };
+ *s = s.replace(root, "$ROOT$")
+ }
+}
+
+fn get_test_path(file: &str) -> PathBuf {
+ let base = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
+ base.join("test_data").join(file)
+}
+
+fn get_fake_sysroot() -> Sysroot {
+ let sysroot_path = get_test_path("fake-sysroot");
+ // there's no `libexec/` directory with a `proc-macro-srv` binary in that
+ // fake sysroot, so we give them both the same path:
+ let sysroot_dir = AbsPathBuf::assert(sysroot_path);
+ let sysroot_src_dir = sysroot_dir.clone();
+ Sysroot::load(sysroot_dir, sysroot_src_dir).unwrap()
+}
+
+fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
+ let mut root = "$ROOT$".to_string();
+ replace_root(&mut root, true);
+ let path = Path::new(&root);
+ let base = AbsPath::assert(path);
+ ProjectJson::new(base, data)
+}
+
+fn to_crate_graph(project_workspace: ProjectWorkspace) -> CrateGraph {
+ project_workspace.to_crate_graph(&mut |_, _| Ok(Vec::new()), &mut {
+ let mut counter = 0;
+ move |_path| {
+ counter += 1;
+ Some(FileId(counter))
+ }
+ })
+}
+
+fn check_crate_graph(crate_graph: CrateGraph, expect: Expect) {
+ let mut crate_graph = format!("{:#?}", crate_graph);
+ replace_root(&mut crate_graph, false);
+ expect.assert_eq(&crate_graph);
+}
+
+#[test]
+fn cargo_hello_world_project_model_with_wildcard_overrides() {
+ let cfg_overrides = CfgOverrides::Wildcard(
+ CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(),
+ );
+ let crate_graph = load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides);
+ check_crate_graph(
+ crate_graph,
+ expect![[r#"
+ CrateGraph {
+ arena: {
+ CrateId(
+ 0,
+ ): CrateData {
+ root_file_id: FileId(
+ 1,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 2,
+ ): CrateData {
+ root_file_id: FileId(
+ 3,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "an_example",
+ ),
+ canonical_name: "an-example",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 4,
+ ): CrateData {
+ root_file_id: FileId(
+ 5,
+ ),
+ edition: Edition2015,
+ version: Some(
+ "0.2.98",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "libc",
+ ),
+ canonical_name: "libc",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=default",
+ "feature=std",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=align",
+ "feature=const-extern-fn",
+ "feature=default",
+ "feature=extra_traits",
+ "feature=rustc-dep-of-std",
+ "feature=std",
+ "feature=use_std",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_PKG_VERSION": "0.2.98",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "libc",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "libc",
+ "CARGO_PKG_VERSION_PATCH": "98",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "2",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: Some(
+ "https://github.com/rust-lang/libc",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 1,
+ ): CrateData {
+ root_file_id: FileId(
+ 2,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 3,
+ ): CrateData {
+ root_file_id: FileId(
+ 4,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "it",
+ ),
+ canonical_name: "it",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ },
+ }"#]],
+ )
+}
+
+#[test]
+fn cargo_hello_world_project_model_with_selective_overrides() {
+ let cfg_overrides = {
+ CfgOverrides::Selective(
+ std::iter::once((
+ "libc".to_owned(),
+ CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(),
+ ))
+ .collect(),
+ )
+ };
+ let crate_graph = load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides);
+ check_crate_graph(
+ crate_graph,
+ expect![[r#"
+ CrateGraph {
+ arena: {
+ CrateId(
+ 0,
+ ): CrateData {
+ root_file_id: FileId(
+ 1,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 2,
+ ): CrateData {
+ root_file_id: FileId(
+ 3,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "an_example",
+ ),
+ canonical_name: "an-example",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 4,
+ ): CrateData {
+ root_file_id: FileId(
+ 5,
+ ),
+ edition: Edition2015,
+ version: Some(
+ "0.2.98",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "libc",
+ ),
+ canonical_name: "libc",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=default",
+ "feature=std",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=align",
+ "feature=const-extern-fn",
+ "feature=default",
+ "feature=extra_traits",
+ "feature=rustc-dep-of-std",
+ "feature=std",
+ "feature=use_std",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_PKG_VERSION": "0.2.98",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "libc",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "libc",
+ "CARGO_PKG_VERSION_PATCH": "98",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "2",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: Some(
+ "https://github.com/rust-lang/libc",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 1,
+ ): CrateData {
+ root_file_id: FileId(
+ 2,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 3,
+ ): CrateData {
+ root_file_id: FileId(
+ 4,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "it",
+ ),
+ canonical_name: "it",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ },
+ }"#]],
+ )
+}
+
+#[test]
+fn cargo_hello_world_project_model() {
+ let crate_graph = load_cargo("hello-world-metadata.json");
+ check_crate_graph(
+ crate_graph,
+ expect![[r#"
+ CrateGraph {
+ arena: {
+ CrateId(
+ 0,
+ ): CrateData {
+ root_file_id: FileId(
+ 1,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 2,
+ ): CrateData {
+ root_file_id: FileId(
+ 3,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "an_example",
+ ),
+ canonical_name: "an-example",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 4,
+ ): CrateData {
+ root_file_id: FileId(
+ 5,
+ ),
+ edition: Edition2015,
+ version: Some(
+ "0.2.98",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "libc",
+ ),
+ canonical_name: "libc",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=default",
+ "feature=std",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=align",
+ "feature=const-extern-fn",
+ "feature=default",
+ "feature=extra_traits",
+ "feature=rustc-dep-of-std",
+ "feature=std",
+ "feature=use_std",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_PKG_VERSION": "0.2.98",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "libc",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "libc",
+ "CARGO_PKG_VERSION_PATCH": "98",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "2",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: Some(
+ "https://github.com/rust-lang/libc",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 1,
+ ): CrateData {
+ root_file_id: FileId(
+ 2,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 3,
+ ): CrateData {
+ root_file_id: FileId(
+ 4,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "it",
+ ),
+ canonical_name: "it",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ },
+ }"#]],
+ )
+}
+
+#[test]
+fn rust_project_hello_world_project_model() {
+ let crate_graph = load_rust_project("hello-world-project.json");
+ check_crate_graph(
+ crate_graph,
+ expect![[r#"
+ CrateGraph {
+ arena: {
+ CrateId(
+ 0,
+ ): CrateData {
+ root_file_id: FileId(
+ 1,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "alloc",
+ ),
+ canonical_name: "alloc",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 1,
+ ),
+ name: CrateName(
+ "core",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Alloc,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 10,
+ ): CrateData {
+ root_file_id: FileId(
+ 11,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "unwind",
+ ),
+ canonical_name: "unwind",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 7,
+ ): CrateData {
+ root_file_id: FileId(
+ 8,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "std_detect",
+ ),
+ canonical_name: "std_detect",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 4,
+ ): CrateData {
+ root_file_id: FileId(
+ 5,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "proc_macro",
+ ),
+ canonical_name: "proc_macro",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 6,
+ ),
+ name: CrateName(
+ "std",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 1,
+ ): CrateData {
+ root_file_id: FileId(
+ 2,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "core",
+ ),
+ canonical_name: "core",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Core,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 11,
+ ): CrateData {
+ root_file_id: FileId(
+ 12,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello_world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 1,
+ ),
+ name: CrateName(
+ "core",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "alloc",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 6,
+ ),
+ name: CrateName(
+ "std",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 9,
+ ),
+ name: CrateName(
+ "test",
+ ),
+ prelude: false,
+ },
+ ],
+ proc_macro: Err(
+ "no proc macro dylib present",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 8,
+ ): CrateData {
+ root_file_id: FileId(
+ 9,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "term",
+ ),
+ canonical_name: "term",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 5,
+ ): CrateData {
+ root_file_id: FileId(
+ 6,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "profiler_builtins",
+ ),
+ canonical_name: "profiler_builtins",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 2,
+ ): CrateData {
+ root_file_id: FileId(
+ 3,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "panic_abort",
+ ),
+ canonical_name: "panic_abort",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 9,
+ ): CrateData {
+ root_file_id: FileId(
+ 10,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "test",
+ ),
+ canonical_name: "test",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Test,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 6,
+ ): CrateData {
+ root_file_id: FileId(
+ 7,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "std",
+ ),
+ canonical_name: "std",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "alloc",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 1,
+ ),
+ name: CrateName(
+ "core",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 2,
+ ),
+ name: CrateName(
+ "panic_abort",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 3,
+ ),
+ name: CrateName(
+ "panic_unwind",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 5,
+ ),
+ name: CrateName(
+ "profiler_builtins",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 7,
+ ),
+ name: CrateName(
+ "std_detect",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 8,
+ ),
+ name: CrateName(
+ "term",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 9,
+ ),
+ name: CrateName(
+ "test",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 10,
+ ),
+ name: CrateName(
+ "unwind",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Std,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 3,
+ ): CrateData {
+ root_file_id: FileId(
+ 4,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "panic_unwind",
+ ),
+ canonical_name: "panic_unwind",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ },
+ }"#]],
+ );
+}
+
+#[test]
+fn rust_project_is_proc_macro_has_proc_macro_dep() {
+ let crate_graph = load_rust_project("is-proc-macro-project.json");
+ // Since the project only defines one crate (outside the sysroot crates),
+ // it should be the one with the biggest Id.
+ let crate_id = crate_graph.iter().max().unwrap();
+ let crate_data = &crate_graph[crate_id];
+ // Assert that the project crate with `is_proc_macro` has a dependency
+ // on the proc_macro sysroot crate.
+ crate_data.dependencies.iter().find(|&dep| dep.name.deref() == "proc_macro").unwrap();
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
new file mode 100644
index 000000000..b144006b4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
@@ -0,0 +1,1032 @@
+//! Handles lowering of build-system specific workspace information (`cargo
+//! metadata` or `rust-project.json`) into representation stored in the salsa
+//! database -- `CrateGraph`.
+
+use std::{collections::VecDeque, fmt, fs, process::Command};
+
+use anyhow::{format_err, Context, Result};
+use base_db::{
+ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env,
+ FileId, LangCrateOrigin, ProcMacroLoadResult,
+};
+use cfg::{CfgDiff, CfgOptions};
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::always;
+
+use crate::{
+ build_scripts::BuildScriptOutput,
+ cargo_workspace::{DepKind, PackageData, RustcSource},
+ cfg_flag::CfgFlag,
+ rustc_cfg,
+ sysroot::SysrootCrate,
+ utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath, ProjectJson, ProjectManifest, Sysroot,
+ TargetKind, WorkspaceBuildScripts,
+};
+
+/// A set of cfg-overrides per crate.
+///
+/// `Wildcard(..)` is useful e.g. disabling `#[cfg(test)]` on all crates,
+/// without having to first obtain a list of all crates.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum CfgOverrides {
+ /// A single global set of overrides matching all crates.
+ Wildcard(CfgDiff),
+ /// A set of overrides matching specific crates.
+ Selective(FxHashMap<String, CfgDiff>),
+}
+
+impl Default for CfgOverrides {
+ fn default() -> Self {
+ Self::Selective(FxHashMap::default())
+ }
+}
+
+impl CfgOverrides {
+ pub fn len(&self) -> usize {
+ match self {
+ CfgOverrides::Wildcard(_) => 1,
+ CfgOverrides::Selective(hash_map) => hash_map.len(),
+ }
+ }
+}
+
+/// `PackageRoot` describes a package root folder.
+/// Which may be an external dependency, or a member of
+/// the current workspace.
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub struct PackageRoot {
+ /// Is from the local filesystem and may be edited
+ pub is_local: bool,
+ pub include: Vec<AbsPathBuf>,
+ pub exclude: Vec<AbsPathBuf>,
+}
+
+#[derive(Clone, Eq, PartialEq)]
+pub enum ProjectWorkspace {
+ /// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`.
+ Cargo {
+ cargo: CargoWorkspace,
+ build_scripts: WorkspaceBuildScripts,
+ sysroot: Option<Sysroot>,
+ rustc: Option<CargoWorkspace>,
+ /// Holds cfg flags for the current target. We get those by running
+ /// `rustc --print cfg`.
+ ///
+ /// FIXME: make this a per-crate map, as, eg, build.rs might have a
+ /// different target.
+ rustc_cfg: Vec<CfgFlag>,
+ cfg_overrides: CfgOverrides,
+ },
+ /// Project workspace was manually specified using a `rust-project.json` file.
+ Json { project: ProjectJson, sysroot: Option<Sysroot>, rustc_cfg: Vec<CfgFlag> },
+
+ // FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning.
+ // That's not the end user experience we should strive for.
+ // Ideally, you should be able to just open a random detached file in existing cargo projects, and get the basic features working.
+ // That needs some changes on the salsa-level though.
+ // In particular, we should split the unified CrateGraph (which currently has maximal durability) into proper crate graph, and a set of ad hoc roots (with minimal durability).
+ // Then, we need to hide the graph behind the queries such that most queries look only at the proper crate graph, and fall back to ad hoc roots only if there's no results.
+ // After this, we should be able to tweak the logic in reload.rs to add newly opened files, which don't belong to any existing crates, to the set of the detached files.
+ // //
+ /// Project with a set of disjoint files, not belonging to any particular workspace.
+ /// Backed by basic sysroot crates for basic completion and highlighting.
+ DetachedFiles { files: Vec<AbsPathBuf>, sysroot: Sysroot, rustc_cfg: Vec<CfgFlag> },
+}
+
+impl fmt::Debug for ProjectWorkspace {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Make sure this isn't too verbose.
+ match self {
+ ProjectWorkspace::Cargo {
+ cargo,
+ build_scripts: _,
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+ } => f
+ .debug_struct("Cargo")
+ .field("root", &cargo.workspace_root().file_name())
+ .field("n_packages", &cargo.packages().len())
+ .field("sysroot", &sysroot.is_some())
+ .field(
+ "n_rustc_compiler_crates",
+ &rustc.as_ref().map_or(0, |rc| rc.packages().len()),
+ )
+ .field("n_rustc_cfg", &rustc_cfg.len())
+ .field("n_cfg_overrides", &cfg_overrides.len())
+ .finish(),
+ ProjectWorkspace::Json { project, sysroot, rustc_cfg } => {
+ let mut debug_struct = f.debug_struct("Json");
+ debug_struct.field("n_crates", &project.n_crates());
+ if let Some(sysroot) = sysroot {
+ debug_struct.field("n_sysroot_crates", &sysroot.crates().len());
+ }
+ debug_struct.field("n_rustc_cfg", &rustc_cfg.len());
+ debug_struct.finish()
+ }
+ ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f
+ .debug_struct("DetachedFiles")
+ .field("n_files", &files.len())
+ .field("n_sysroot_crates", &sysroot.crates().len())
+ .field("n_rustc_cfg", &rustc_cfg.len())
+ .finish(),
+ }
+ }
+}
+
+impl ProjectWorkspace {
+ pub fn load(
+ manifest: ProjectManifest,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> Result<ProjectWorkspace> {
+ let res = match manifest {
+ ProjectManifest::ProjectJson(project_json) => {
+ let file = fs::read_to_string(&project_json).with_context(|| {
+ format!("Failed to read json file {}", project_json.display())
+ })?;
+ let data = serde_json::from_str(&file).with_context(|| {
+ format!("Failed to deserialize json file {}", project_json.display())
+ })?;
+ let project_location = project_json.parent().to_path_buf();
+ let project_json = ProjectJson::new(&project_location, data);
+ ProjectWorkspace::load_inline(project_json, config.target.as_deref())?
+ }
+ ProjectManifest::CargoToml(cargo_toml) => {
+ let cargo_version = utf8_stdout({
+ let mut cmd = Command::new(toolchain::cargo());
+ cmd.arg("--version");
+ cmd
+ })?;
+
+ let meta = CargoWorkspace::fetch_metadata(
+ &cargo_toml,
+ cargo_toml.parent(),
+ config,
+ progress,
+ )
+ .with_context(|| {
+ format!(
+ "Failed to read Cargo metadata from Cargo.toml file {}, {}",
+ cargo_toml.display(),
+ cargo_version
+ )
+ })?;
+ let cargo = CargoWorkspace::new(meta);
+
+ let sysroot = if config.no_sysroot {
+ None
+ } else {
+ Some(Sysroot::discover(cargo_toml.parent()).with_context(|| {
+ format!(
+ "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
+ cargo_toml.display()
+ )
+ })?)
+ };
+
+ let rustc_dir = match &config.rustc_source {
+ Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(),
+ Some(RustcSource::Discover) => Sysroot::discover_rustc(&cargo_toml),
+ None => None,
+ };
+
+ let rustc = match rustc_dir {
+ Some(rustc_dir) => Some({
+ let meta = CargoWorkspace::fetch_metadata(
+ &rustc_dir,
+ cargo_toml.parent(),
+ config,
+ progress,
+ )
+ .with_context(|| {
+ "Failed to read Cargo metadata for Rust sources".to_string()
+ })?;
+ CargoWorkspace::new(meta)
+ }),
+ None => None,
+ };
+
+ let rustc_cfg = rustc_cfg::get(Some(&cargo_toml), config.target.as_deref());
+
+ let cfg_overrides = config.cfg_overrides();
+ ProjectWorkspace::Cargo {
+ cargo,
+ build_scripts: WorkspaceBuildScripts::default(),
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+ }
+ }
+ };
+
+ Ok(res)
+ }
+
+ pub fn load_inline(
+ project_json: ProjectJson,
+ target: Option<&str>,
+ ) -> Result<ProjectWorkspace> {
+ let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) {
+ (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)?),
+ (Some(sysroot), None) => {
+ // assume sysroot is structured like rustup's and guess `sysroot_src`
+ let sysroot_src =
+ sysroot.join("lib").join("rustlib").join("src").join("rust").join("library");
+
+ Some(Sysroot::load(sysroot, sysroot_src)?)
+ }
+ (None, Some(sysroot_src)) => {
+ // assume sysroot is structured like rustup's and guess `sysroot`
+ let mut sysroot = sysroot_src.clone();
+ for _ in 0..5 {
+ sysroot.pop();
+ }
+ Some(Sysroot::load(sysroot, sysroot_src)?)
+ }
+ (None, None) => None,
+ };
+
+ let rustc_cfg = rustc_cfg::get(None, target);
+ Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg })
+ }
+
+ pub fn load_detached_files(detached_files: Vec<AbsPathBuf>) -> Result<ProjectWorkspace> {
+ let sysroot = Sysroot::discover(
+ detached_files
+ .first()
+ .and_then(|it| it.parent())
+ .ok_or_else(|| format_err!("No detached files to load"))?,
+ )?;
+ let rustc_cfg = rustc_cfg::get(None, None);
+ Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
+ }
+
+ pub fn run_build_scripts(
+ &self,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> Result<WorkspaceBuildScripts> {
+ match self {
+ ProjectWorkspace::Cargo { cargo, .. } => {
+ WorkspaceBuildScripts::run(config, cargo, progress).with_context(|| {
+ format!("Failed to run build scripts for {}", &cargo.workspace_root().display())
+ })
+ }
+ ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => {
+ Ok(WorkspaceBuildScripts::default())
+ }
+ }
+ }
+
+ pub fn set_build_scripts(&mut self, bs: WorkspaceBuildScripts) {
+ match self {
+ ProjectWorkspace::Cargo { build_scripts, .. } => *build_scripts = bs,
+ _ => {
+ always!(bs == WorkspaceBuildScripts::default());
+ }
+ }
+ }
+
+ /// Returns the roots for the current `ProjectWorkspace`
+ /// The return type contains the path and whether or not
+ /// the root is a member of the current workspace
+ pub fn to_roots(&self) -> Vec<PackageRoot> {
+ match self {
+ ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
+ .crates()
+ .map(|(_, krate)| PackageRoot {
+ is_local: krate.is_workspace_member,
+ include: krate.include.clone(),
+ exclude: krate.exclude.clone(),
+ })
+ .collect::<FxHashSet<_>>()
+ .into_iter()
+ .chain(sysroot.as_ref().into_iter().flat_map(|sysroot| {
+ sysroot.crates().map(move |krate| PackageRoot {
+ is_local: false,
+ include: vec![sysroot[krate].root.parent().to_path_buf()],
+ exclude: Vec::new(),
+ })
+ }))
+ .collect::<Vec<_>>(),
+ ProjectWorkspace::Cargo {
+ cargo,
+ sysroot,
+ rustc,
+ rustc_cfg: _,
+ cfg_overrides: _,
+ build_scripts,
+ } => {
+ cargo
+ .packages()
+ .map(|pkg| {
+ let is_local = cargo[pkg].is_local;
+ let pkg_root = cargo[pkg].manifest.parent().to_path_buf();
+
+ let mut include = vec![pkg_root.clone()];
+ let out_dir =
+ build_scripts.get_output(pkg).and_then(|it| it.out_dir.clone());
+ include.extend(out_dir);
+
+ // In case target's path is manually set in Cargo.toml to be
+ // outside the package root, add its parent as an extra include.
+ // An example of this situation would look like this:
+ //
+ // ```toml
+ // [lib]
+ // path = "../../src/lib.rs"
+ // ```
+ let extra_targets = cargo[pkg]
+ .targets
+ .iter()
+ .filter(|&&tgt| cargo[tgt].kind == TargetKind::Lib)
+ .filter_map(|&tgt| cargo[tgt].root.parent())
+ .map(|tgt| tgt.normalize().to_path_buf())
+ .filter(|path| !path.starts_with(&pkg_root));
+ include.extend(extra_targets);
+
+ let mut exclude = vec![pkg_root.join(".git")];
+ if is_local {
+ exclude.push(pkg_root.join("target"));
+ } else {
+ exclude.push(pkg_root.join("tests"));
+ exclude.push(pkg_root.join("examples"));
+ exclude.push(pkg_root.join("benches"));
+ }
+ PackageRoot { is_local, include, exclude }
+ })
+ .chain(sysroot.iter().map(|sysroot| PackageRoot {
+ is_local: false,
+ include: vec![sysroot.src_root().to_path_buf()],
+ exclude: Vec::new(),
+ }))
+ .chain(rustc.iter().flat_map(|rustc| {
+ rustc.packages().map(move |krate| PackageRoot {
+ is_local: false,
+ include: vec![rustc[krate].manifest.parent().to_path_buf()],
+ exclude: Vec::new(),
+ })
+ }))
+ .collect()
+ }
+ ProjectWorkspace::DetachedFiles { files, sysroot, .. } => files
+ .iter()
+ .map(|detached_file| PackageRoot {
+ is_local: true,
+ include: vec![detached_file.clone()],
+ exclude: Vec::new(),
+ })
+ .chain(sysroot.crates().map(|krate| PackageRoot {
+ is_local: false,
+ include: vec![sysroot[krate].root.parent().to_path_buf()],
+ exclude: Vec::new(),
+ }))
+ .collect(),
+ }
+ }
+
+ pub fn n_packages(&self) -> usize {
+ match self {
+ ProjectWorkspace::Json { project, .. } => project.n_crates(),
+ ProjectWorkspace::Cargo { cargo, sysroot, rustc, .. } => {
+ let rustc_package_len = rustc.as_ref().map_or(0, |it| it.packages().len());
+ let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len());
+ cargo.packages().len() + sysroot_package_len + rustc_package_len
+ }
+ ProjectWorkspace::DetachedFiles { sysroot, files, .. } => {
+ sysroot.crates().len() + files.len()
+ }
+ }
+ }
+
+ pub fn to_crate_graph(
+ &self,
+ load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ ) -> CrateGraph {
+ let _p = profile::span("ProjectWorkspace::to_crate_graph");
+
+ let mut crate_graph = match self {
+ ProjectWorkspace::Json { project, sysroot, rustc_cfg } => project_json_to_crate_graph(
+ rustc_cfg.clone(),
+ load_proc_macro,
+ load,
+ project,
+ sysroot,
+ ),
+ ProjectWorkspace::Cargo {
+ cargo,
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+ build_scripts,
+ } => cargo_to_crate_graph(
+ rustc_cfg.clone(),
+ cfg_overrides,
+ load_proc_macro,
+ load,
+ cargo,
+ build_scripts,
+ sysroot.as_ref(),
+ rustc,
+ ),
+ ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => {
+ detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot)
+ }
+ };
+ if crate_graph.patch_cfg_if() {
+ tracing::debug!("Patched std to depend on cfg-if")
+ } else {
+ tracing::debug!("Did not patch std to depend on cfg-if")
+ }
+ crate_graph
+ }
+}
+
+fn project_json_to_crate_graph(
+ rustc_cfg: Vec<CfgFlag>,
+ load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ project: &ProjectJson,
+ sysroot: &Option<Sysroot>,
+) -> CrateGraph {
+ let mut crate_graph = CrateGraph::default();
+ let sysroot_deps = sysroot
+ .as_ref()
+ .map(|sysroot| sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load));
+
+ let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default();
+ let crates: FxHashMap<CrateId, CrateId> = project
+ .crates()
+ .filter_map(|(crate_id, krate)| {
+ let file_path = &krate.root_module;
+ let file_id = load(file_path)?;
+ Some((crate_id, krate, file_id))
+ })
+ .map(|(crate_id, krate, file_id)| {
+ let env = krate.env.clone().into_iter().collect();
+ let proc_macro = match krate.proc_macro_dylib_path.clone() {
+ Some(it) => load_proc_macro(
+ krate.display_name.as_ref().map(|it| it.canonical_name()).unwrap_or(""),
+ &it,
+ ),
+ None => Err("no proc macro dylib present".into()),
+ };
+
+ let target_cfgs = match krate.target.as_deref() {
+ Some(target) => {
+ cfg_cache.entry(target).or_insert_with(|| rustc_cfg::get(None, Some(target)))
+ }
+ None => &rustc_cfg,
+ };
+
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.extend(target_cfgs.iter().chain(krate.cfg.iter()).cloned());
+ (
+ crate_id,
+ crate_graph.add_crate_root(
+ file_id,
+ krate.edition,
+ krate.display_name.clone(),
+ krate.version.clone(),
+ cfg_options.clone(),
+ cfg_options,
+ env,
+ proc_macro,
+ krate.is_proc_macro,
+ if krate.display_name.is_some() {
+ CrateOrigin::CratesIo { repo: krate.repository.clone() }
+ } else {
+ CrateOrigin::CratesIo { repo: None }
+ },
+ ),
+ )
+ })
+ .collect();
+
+ for (from, krate) in project.crates() {
+ if let Some(&from) = crates.get(&from) {
+ if let Some((public_deps, libproc_macro)) = &sysroot_deps {
+ public_deps.add(from, &mut crate_graph);
+ if krate.is_proc_macro {
+ if let Some(proc_macro) = libproc_macro {
+ add_dep(
+ &mut crate_graph,
+ from,
+ CrateName::new("proc_macro").unwrap(),
+ *proc_macro,
+ );
+ }
+ }
+ }
+
+ for dep in &krate.deps {
+ if let Some(&to) = crates.get(&dep.crate_id) {
+ add_dep(&mut crate_graph, from, dep.name.clone(), to)
+ }
+ }
+ }
+ }
+ crate_graph
+}
+
+fn cargo_to_crate_graph(
+ rustc_cfg: Vec<CfgFlag>,
+ override_cfg: &CfgOverrides,
+ load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ cargo: &CargoWorkspace,
+ build_scripts: &WorkspaceBuildScripts,
+ sysroot: Option<&Sysroot>,
+ rustc: &Option<CargoWorkspace>,
+) -> CrateGraph {
+ let _p = profile::span("cargo_to_crate_graph");
+ let mut crate_graph = CrateGraph::default();
+ let (public_deps, libproc_macro) = match sysroot {
+ Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load),
+ None => (SysrootPublicDeps::default(), None),
+ };
+
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.extend(rustc_cfg);
+
+ let mut pkg_to_lib_crate = FxHashMap::default();
+
+ cfg_options.insert_atom("debug_assertions".into());
+
+ let mut pkg_crates = FxHashMap::default();
+ // Does any crate signal to rust-analyzer that they need the rustc_private crates?
+ let mut has_private = false;
+ // Next, create crates for each package, target pair
+ for pkg in cargo.packages() {
+ let mut cfg_options = cfg_options.clone();
+
+ let overrides = match override_cfg {
+ CfgOverrides::Wildcard(cfg_diff) => Some(cfg_diff),
+ CfgOverrides::Selective(cfg_overrides) => cfg_overrides.get(&cargo[pkg].name),
+ };
+
+ // Add test cfg for local crates
+ if cargo[pkg].is_local {
+ cfg_options.insert_atom("test".into());
+ }
+
+ if let Some(overrides) = overrides {
+ // FIXME: this is sort of a hack to deal with #![cfg(not(test))] vanishing such as seen
+ // in ed25519_dalek (#7243), and libcore (#9203) (although you only hit that one while
+ // working on rust-lang/rust as that's the only time it appears outside sysroot).
+ //
+ // A more ideal solution might be to reanalyze crates based on where the cursor is and
+ // figure out the set of cfgs that would have to apply to make it active.
+
+ cfg_options.apply_diff(overrides.clone());
+ };
+
+ has_private |= cargo[pkg].metadata.rustc_private;
+ let mut lib_tgt = None;
+ for &tgt in cargo[pkg].targets.iter() {
+ if cargo[tgt].kind != TargetKind::Lib && !cargo[pkg].is_member {
+ // For non-workspace-members, Cargo does not resolve dev-dependencies, so we don't
+ // add any targets except the library target, since those will not work correctly if
+ // they use dev-dependencies.
+ // In fact, they can break quite badly if multiple client workspaces get merged:
+ // https://github.com/rust-lang/rust-analyzer/issues/11300
+ continue;
+ }
+
+ if let Some(file_id) = load(&cargo[tgt].root) {
+ let crate_id = add_target_crate_root(
+ &mut crate_graph,
+ &cargo[pkg],
+ build_scripts.get_output(pkg),
+ cfg_options.clone(),
+ &mut |path| load_proc_macro(&cargo[tgt].name, path),
+ file_id,
+ &cargo[tgt].name,
+ cargo[tgt].is_proc_macro,
+ );
+ if cargo[tgt].kind == TargetKind::Lib {
+ lib_tgt = Some((crate_id, cargo[tgt].name.clone()));
+ pkg_to_lib_crate.insert(pkg, crate_id);
+ }
+ if let Some(proc_macro) = libproc_macro {
+ add_dep_with_prelude(
+ &mut crate_graph,
+ crate_id,
+ CrateName::new("proc_macro").unwrap(),
+ proc_macro,
+ cargo[tgt].is_proc_macro,
+ );
+ }
+
+ pkg_crates.entry(pkg).or_insert_with(Vec::new).push((crate_id, cargo[tgt].kind));
+ }
+ }
+
+ // Set deps to the core, std and to the lib target of the current package
+ for (from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
+ // Add sysroot deps first so that a lib target named `core` etc. can overwrite them.
+ public_deps.add(*from, &mut crate_graph);
+
+ if let Some((to, name)) = lib_tgt.clone() {
+ if to != *from && *kind != TargetKind::BuildScript {
+ // (build script can not depend on its library target)
+
+ // For root projects with dashes in their name,
+ // cargo metadata does not do any normalization,
+ // so we do it ourselves currently
+ let name = CrateName::normalize_dashes(&name);
+ add_dep(&mut crate_graph, *from, name, to);
+ }
+ }
+ }
+ }
+
+ // Now add a dep edge from all targets of upstream to the lib
+ // target of downstream.
+ for pkg in cargo.packages() {
+ for dep in cargo[pkg].dependencies.iter() {
+ let name = CrateName::new(&dep.name).unwrap();
+ if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
+ for (from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
+ if dep.kind == DepKind::Build && *kind != TargetKind::BuildScript {
+ // Only build scripts may depend on build dependencies.
+ continue;
+ }
+ if dep.kind != DepKind::Build && *kind == TargetKind::BuildScript {
+ // Build scripts may only depend on build dependencies.
+ continue;
+ }
+
+ add_dep(&mut crate_graph, *from, name.clone(), to)
+ }
+ }
+ }
+ }
+
+ if has_private {
+ // If the user provided a path to rustc sources, we add all the rustc_private crates
+ // and create dependencies on them for the crates which opt-in to that
+ if let Some(rustc_workspace) = rustc {
+ handle_rustc_crates(
+ rustc_workspace,
+ load,
+ &mut crate_graph,
+ &cfg_options,
+ override_cfg,
+ load_proc_macro,
+ &mut pkg_to_lib_crate,
+ &public_deps,
+ cargo,
+ &pkg_crates,
+ build_scripts,
+ );
+ }
+ }
+ crate_graph
+}
+
+fn detached_files_to_crate_graph(
+ rustc_cfg: Vec<CfgFlag>,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ detached_files: &[AbsPathBuf],
+ sysroot: &Sysroot,
+) -> CrateGraph {
+ let _p = profile::span("detached_files_to_crate_graph");
+ let mut crate_graph = CrateGraph::default();
+ let (public_deps, _libproc_macro) =
+ sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load);
+
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.extend(rustc_cfg);
+
+ for detached_file in detached_files {
+ let file_id = match load(detached_file) {
+ Some(file_id) => file_id,
+ None => {
+ tracing::error!("Failed to load detached file {:?}", detached_file);
+ continue;
+ }
+ };
+ let display_name = detached_file
+ .file_stem()
+ .and_then(|os_str| os_str.to_str())
+ .map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_string()));
+ let detached_file_crate = crate_graph.add_crate_root(
+ file_id,
+ Edition::CURRENT,
+ display_name,
+ None,
+ cfg_options.clone(),
+ cfg_options.clone(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+
+ public_deps.add(detached_file_crate, &mut crate_graph);
+ }
+ crate_graph
+}
+
+fn handle_rustc_crates(
+ rustc_workspace: &CargoWorkspace,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ crate_graph: &mut CrateGraph,
+ cfg_options: &CfgOptions,
+ override_cfg: &CfgOverrides,
+ load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
+ pkg_to_lib_crate: &mut FxHashMap<la_arena::Idx<crate::PackageData>, CrateId>,
+ public_deps: &SysrootPublicDeps,
+ cargo: &CargoWorkspace,
+ pkg_crates: &FxHashMap<la_arena::Idx<crate::PackageData>, Vec<(CrateId, TargetKind)>>,
+ build_scripts: &WorkspaceBuildScripts,
+) {
+ let mut rustc_pkg_crates = FxHashMap::default();
+ // The root package of the rustc-dev component is rustc_driver, so we match that
+ let root_pkg =
+ rustc_workspace.packages().find(|package| rustc_workspace[*package].name == "rustc_driver");
+ // The rustc workspace might be incomplete (such as if rustc-dev is not
+ // installed for the current toolchain) and `rustc_source` is set to discover.
+ if let Some(root_pkg) = root_pkg {
+ // Iterate through every crate in the dependency subtree of rustc_driver using BFS
+ let mut queue = VecDeque::new();
+ queue.push_back(root_pkg);
+ while let Some(pkg) = queue.pop_front() {
+ // Don't duplicate packages if they are dependended on a diamond pattern
+ // N.B. if this line is ommitted, we try to analyse over 4_800_000 crates
+ // which is not ideal
+ if rustc_pkg_crates.contains_key(&pkg) {
+ continue;
+ }
+ for dep in &rustc_workspace[pkg].dependencies {
+ queue.push_back(dep.pkg);
+ }
+
+ let mut cfg_options = cfg_options.clone();
+
+ let overrides = match override_cfg {
+ CfgOverrides::Wildcard(cfg_diff) => Some(cfg_diff),
+ CfgOverrides::Selective(cfg_overrides) => {
+ cfg_overrides.get(&rustc_workspace[pkg].name)
+ }
+ };
+
+ if let Some(overrides) = overrides {
+ // FIXME: this is sort of a hack to deal with #![cfg(not(test))] vanishing such as seen
+ // in ed25519_dalek (#7243), and libcore (#9203) (although you only hit that one while
+ // working on rust-lang/rust as that's the only time it appears outside sysroot).
+ //
+ // A more ideal solution might be to reanalyze crates based on where the cursor is and
+ // figure out the set of cfgs that would have to apply to make it active.
+
+ cfg_options.apply_diff(overrides.clone());
+ };
+
+ for &tgt in rustc_workspace[pkg].targets.iter() {
+ if rustc_workspace[tgt].kind != TargetKind::Lib {
+ continue;
+ }
+ if let Some(file_id) = load(&rustc_workspace[tgt].root) {
+ let crate_id = add_target_crate_root(
+ crate_graph,
+ &rustc_workspace[pkg],
+ build_scripts.get_output(pkg),
+ cfg_options.clone(),
+ &mut |path| load_proc_macro(&rustc_workspace[tgt].name, path),
+ file_id,
+ &rustc_workspace[tgt].name,
+ rustc_workspace[tgt].is_proc_macro,
+ );
+ pkg_to_lib_crate.insert(pkg, crate_id);
+ // Add dependencies on core / std / alloc for this crate
+ public_deps.add(crate_id, crate_graph);
+ rustc_pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id);
+ }
+ }
+ }
+ }
+ // Now add a dep edge from all targets of upstream to the lib
+ // target of downstream.
+ for pkg in rustc_pkg_crates.keys().copied() {
+ for dep in rustc_workspace[pkg].dependencies.iter() {
+ let name = CrateName::new(&dep.name).unwrap();
+ if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
+ for &from in rustc_pkg_crates.get(&pkg).into_iter().flatten() {
+ add_dep(crate_graph, from, name.clone(), to);
+ }
+ }
+ }
+ }
+ // Add a dependency on the rustc_private crates for all targets of each package
+ // which opts in
+ for dep in rustc_workspace.packages() {
+ let name = CrateName::normalize_dashes(&rustc_workspace[dep].name);
+
+ if let Some(&to) = pkg_to_lib_crate.get(&dep) {
+ for pkg in cargo.packages() {
+ let package = &cargo[pkg];
+ if !package.metadata.rustc_private {
+ continue;
+ }
+ for (from, _) in pkg_crates.get(&pkg).into_iter().flatten() {
+ // Avoid creating duplicate dependencies
+ // This avoids the situation where `from` depends on e.g. `arrayvec`, but
+ // `rust_analyzer` thinks that it should use the one from the `rustc_source`
+ // instead of the one from `crates.io`
+ if !crate_graph[*from].dependencies.iter().any(|d| d.name == name) {
+ add_dep(crate_graph, *from, name.clone(), to);
+ }
+ }
+ }
+ }
+ }
+}
+
+fn add_target_crate_root(
+ crate_graph: &mut CrateGraph,
+ pkg: &PackageData,
+ build_data: Option<&BuildScriptOutput>,
+ cfg_options: CfgOptions,
+ load_proc_macro: &mut dyn FnMut(&AbsPath) -> ProcMacroLoadResult,
+ file_id: FileId,
+ cargo_name: &str,
+ is_proc_macro: bool,
+) -> CrateId {
+ let edition = pkg.edition;
+ let mut potential_cfg_options = cfg_options.clone();
+ potential_cfg_options.extend(
+ pkg.features
+ .iter()
+ .map(|feat| CfgFlag::KeyValue { key: "feature".into(), value: feat.0.into() }),
+ );
+ let cfg_options = {
+ let mut opts = cfg_options;
+ for feature in pkg.active_features.iter() {
+ opts.insert_key_value("feature".into(), feature.into());
+ }
+ if let Some(cfgs) = build_data.as_ref().map(|it| &it.cfgs) {
+ opts.extend(cfgs.iter().cloned());
+ }
+ opts
+ };
+
+ let mut env = Env::default();
+ inject_cargo_env(pkg, &mut env);
+
+ if let Some(envs) = build_data.map(|it| &it.envs) {
+ for (k, v) in envs {
+ env.set(k, v.clone());
+ }
+ }
+
+ let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
+ Some(Some(it)) => load_proc_macro(it),
+ Some(None) => Err("no proc macro dylib present".into()),
+ None => Err("crate has not (yet) been built".into()),
+ };
+
+ let display_name = CrateDisplayName::from_canonical_name(cargo_name.to_string());
+ crate_graph.add_crate_root(
+ file_id,
+ edition,
+ Some(display_name),
+ Some(pkg.version.to_string()),
+ cfg_options,
+ potential_cfg_options,
+ env,
+ proc_macro,
+ is_proc_macro,
+ CrateOrigin::CratesIo { repo: pkg.repository.clone() },
+ )
+}
+
+#[derive(Default)]
+struct SysrootPublicDeps {
+ deps: Vec<(CrateName, CrateId, bool)>,
+}
+
+impl SysrootPublicDeps {
+ /// Makes `from` depend on the public sysroot crates.
+ fn add(&self, from: CrateId, crate_graph: &mut CrateGraph) {
+ for (name, krate, prelude) in &self.deps {
+ add_dep_with_prelude(crate_graph, from, name.clone(), *krate, *prelude);
+ }
+ }
+}
+
+fn sysroot_to_crate_graph(
+ crate_graph: &mut CrateGraph,
+ sysroot: &Sysroot,
+ rustc_cfg: Vec<CfgFlag>,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+) -> (SysrootPublicDeps, Option<CrateId>) {
+ let _p = profile::span("sysroot_to_crate_graph");
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.extend(rustc_cfg);
+ let sysroot_crates: FxHashMap<SysrootCrate, CrateId> = sysroot
+ .crates()
+ .filter_map(|krate| {
+ let file_id = load(&sysroot[krate].root)?;
+
+ let env = Env::default();
+ let display_name = CrateDisplayName::from_canonical_name(sysroot[krate].name.clone());
+ let crate_id = crate_graph.add_crate_root(
+ file_id,
+ Edition::CURRENT,
+ Some(display_name),
+ None,
+ cfg_options.clone(),
+ cfg_options.clone(),
+ env,
+ Err("no proc macro loaded for sysroot crate".into()),
+ false,
+ CrateOrigin::Lang(LangCrateOrigin::from(&*sysroot[krate].name)),
+ );
+ Some((krate, crate_id))
+ })
+ .collect();
+
+ for from in sysroot.crates() {
+ for &to in sysroot[from].deps.iter() {
+ let name = CrateName::new(&sysroot[to].name).unwrap();
+ if let (Some(&from), Some(&to)) = (sysroot_crates.get(&from), sysroot_crates.get(&to)) {
+ add_dep(crate_graph, from, name, to);
+ }
+ }
+ }
+
+ let public_deps = SysrootPublicDeps {
+ deps: sysroot
+ .public_deps()
+ .map(|(name, idx, prelude)| {
+ (CrateName::new(name).unwrap(), sysroot_crates[&idx], prelude)
+ })
+ .collect::<Vec<_>>(),
+ };
+
+ let libproc_macro = sysroot.proc_macro().and_then(|it| sysroot_crates.get(&it).copied());
+ (public_deps, libproc_macro)
+}
+
+fn add_dep(graph: &mut CrateGraph, from: CrateId, name: CrateName, to: CrateId) {
+ add_dep_inner(graph, from, Dependency::new(name, to))
+}
+
+fn add_dep_with_prelude(
+ graph: &mut CrateGraph,
+ from: CrateId,
+ name: CrateName,
+ to: CrateId,
+ prelude: bool,
+) {
+ add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude))
+}
+
+fn add_dep_inner(graph: &mut CrateGraph, from: CrateId, dep: Dependency) {
+ if let Err(err) = graph.add_dep(from, dep) {
+ tracing::error!("{}", err)
+ }
+}
+
+/// Recreates the compile-time environment variables that Cargo sets.
+///
+/// Should be synced with
+/// <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
+///
+/// FIXME: ask Cargo to provide this data instead of re-deriving.
+fn inject_cargo_env(package: &PackageData, env: &mut Env) {
+ // FIXME: Missing variables:
+ // CARGO_BIN_NAME, CARGO_BIN_EXE_<name>
+
+ let manifest_dir = package.manifest.parent();
+ env.set("CARGO_MANIFEST_DIR", manifest_dir.as_os_str().to_string_lossy().into_owned());
+
+ // Not always right, but works for common cases.
+ env.set("CARGO", "cargo".into());
+
+ env.set("CARGO_PKG_VERSION", package.version.to_string());
+ env.set("CARGO_PKG_VERSION_MAJOR", package.version.major.to_string());
+ env.set("CARGO_PKG_VERSION_MINOR", package.version.minor.to_string());
+ env.set("CARGO_PKG_VERSION_PATCH", package.version.patch.to_string());
+ env.set("CARGO_PKG_VERSION_PRE", package.version.pre.to_string());
+
+ env.set("CARGO_PKG_AUTHORS", String::new());
+
+ env.set("CARGO_PKG_NAME", package.name.clone());
+ // FIXME: This isn't really correct (a package can have many crates with different names), but
+ // it's better than leaving the variable unset.
+ env.set("CARGO_CRATE_NAME", CrateName::normalize_dashes(&package.name).to_string());
+ env.set("CARGO_PKG_DESCRIPTION", String::new());
+ env.set("CARGO_PKG_HOMEPAGE", String::new());
+ env.set("CARGO_PKG_REPOSITORY", String::new());
+ env.set("CARGO_PKG_LICENSE", String::new());
+
+ env.set("CARGO_PKG_LICENSE_FILE", String::new());
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/alloc/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/alloc/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/alloc/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/core/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/core/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/core/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_abort/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_abort/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_abort/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_unwind/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_unwind/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_unwind/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/proc_macro/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/proc_macro/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/proc_macro/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/profiler_builtins/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/profiler_builtins/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/profiler_builtins/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/std/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/std/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/std/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/stdarch/crates/std_detect/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/stdarch/crates/std_detect/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/stdarch/crates/std_detect/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/term/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/term/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/term/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/test/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/test/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/test/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/unwind/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/unwind/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/unwind/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/hello-world-metadata.json b/src/tools/rust-analyzer/crates/project-model/test_data/hello-world-metadata.json
new file mode 100644
index 000000000..b6142eeaf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/hello-world-metadata.json
@@ -0,0 +1,245 @@
+{
+ "packages": [
+ {
+ "name": "hello-world",
+ "version": "0.1.0",
+ "id": "hello-world 0.1.0 (path+file://$ROOT$hello-world)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [
+ {
+ "name": "libc",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "req": "^0.2",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": null
+ }
+ ],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "hello-world",
+ "src_path": "$ROOT$hello-world/src/lib.rs",
+ "edition": "2018",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ },
+ {
+ "kind": [
+ "bin"
+ ],
+ "crate_types": [
+ "bin"
+ ],
+ "name": "hello-world",
+ "src_path": "$ROOT$hello-world/src/main.rs",
+ "edition": "2018",
+ "doc": true,
+ "doctest": false,
+ "test": true
+ },
+ {
+ "kind": [
+ "example"
+ ],
+ "crate_types": [
+ "bin"
+ ],
+ "name": "an-example",
+ "src_path": "$ROOT$hello-world/examples/an-example.rs",
+ "edition": "2018",
+ "doc": false,
+ "doctest": false,
+ "test": false
+ },
+ {
+ "kind": [
+ "test"
+ ],
+ "crate_types": [
+ "bin"
+ ],
+ "name": "it",
+ "src_path": "$ROOT$hello-world/tests/it.rs",
+ "edition": "2018",
+ "doc": false,
+ "doctest": false,
+ "test": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "$ROOT$hello-world/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2018",
+ "links": null
+ },
+ {
+ "name": "libc",
+ "version": "0.2.98",
+ "id": "libc 0.2.98 (registry+https://github.com/rust-lang/crates.io-index)",
+ "license": "MIT OR Apache-2.0",
+ "license_file": null,
+ "description": "Raw FFI bindings to platform libraries like libc.\n",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "dependencies": [
+ {
+ "name": "rustc-std-workspace-core",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "req": "^1.0.0",
+ "kind": null,
+ "rename": null,
+ "optional": true,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": null
+ }
+ ],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "libc",
+ "src_path": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98/src/lib.rs",
+ "edition": "2015",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ },
+ {
+ "kind": [
+ "test"
+ ],
+ "crate_types": [
+ "bin"
+ ],
+ "name": "const_fn",
+ "src_path": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98/tests/const_fn.rs",
+ "edition": "2015",
+ "doc": false,
+ "doctest": false,
+ "test": true
+ },
+ {
+ "kind": [
+ "custom-build"
+ ],
+ "crate_types": [
+ "bin"
+ ],
+ "name": "build-script-build",
+ "src_path": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98/build.rs",
+ "edition": "2015",
+ "doc": false,
+ "doctest": false,
+ "test": false
+ }
+ ],
+ "features": {
+ "align": [],
+ "const-extern-fn": [],
+ "default": [
+ "std"
+ ],
+ "extra_traits": [],
+ "rustc-dep-of-std": [
+ "align",
+ "rustc-std-workspace-core"
+ ],
+ "std": [],
+ "use_std": [
+ "std"
+ ]
+ },
+ "manifest_path": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [
+ "The Rust Project Developers"
+ ],
+ "categories": [
+ "external-ffi-bindings",
+ "no-std",
+ "os"
+ ],
+ "keywords": [
+ "libc",
+ "ffi",
+ "bindings",
+ "operating",
+ "system"
+ ],
+ "readme": "README.md",
+ "repository": "https://github.com/rust-lang/libc",
+ "homepage": "https://github.com/rust-lang/libc",
+ "documentation": "https://docs.rs/libc/",
+ "edition": "2015",
+ "links": null
+ }
+ ],
+ "workspace_members": [
+ "hello-world 0.1.0 (path+file://$ROOT$hello-world)"
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "id": "hello-world 0.1.0 (path+file://$ROOT$hello-world)",
+ "dependencies": [
+ "libc 0.2.98 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ "deps": [
+ {
+ "name": "libc",
+ "pkg": "libc 0.2.98 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ]
+ }
+ ],
+ "features": []
+ },
+ {
+ "id": "libc 0.2.98 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": [
+ "default",
+ "std"
+ ]
+ }
+ ],
+ "root": "hello-world 0.1.0 (path+file://$ROOT$hello-world)"
+ },
+ "target_directory": "$ROOT$hello-world/target",
+ "version": 1,
+ "workspace_root": "$ROOT$hello-world",
+ "metadata": null
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/hello-world-project.json b/src/tools/rust-analyzer/crates/project-model/test_data/hello-world-project.json
new file mode 100644
index 000000000..b27ab1f42
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/hello-world-project.json
@@ -0,0 +1,12 @@
+{
+ "sysroot_src": null,
+ "crates": [
+ {
+ "display_name": "hello_world",
+ "root_module": "$ROOT$src/lib.rs",
+ "edition": "2018",
+ "deps": [],
+ "is_workspace_member": true
+ }
+ ]
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/is-proc-macro-project.json b/src/tools/rust-analyzer/crates/project-model/test_data/is-proc-macro-project.json
new file mode 100644
index 000000000..5d500a472
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/is-proc-macro-project.json
@@ -0,0 +1,13 @@
+{
+ "sysroot_src": null,
+ "crates": [
+ {
+ "display_name": "is_proc_macro",
+ "root_module": "$ROOT$src/lib.rs",
+ "edition": "2018",
+ "deps": [],
+ "is_workspace_member": true,
+ "is_proc_macro": true
+ }
+ ]
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
new file mode 100644
index 000000000..07771d1b3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
@@ -0,0 +1,92 @@
+[package]
+name = "rust-analyzer"
+version = "0.0.0"
+authors = ["rust-analyzer Team"]
+homepage = "https://github.com/rust-analyzer/rust-analyzer"
+description = "A language server for the Rust programming language"
+documentation = "https://rust-analyzer.github.io/manual.html"
+license = "MIT OR Apache-2.0"
+autobins = false
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[[bin]]
+name = "rust-analyzer"
+path = "src/bin/main.rs"
+
+[dependencies]
+anyhow = "1.0.57"
+crossbeam-channel = "0.5.5"
+dissimilar = "1.0.4"
+itertools = "0.10.3"
+lsp-types = { version = "0.93.0", features = ["proposed"] }
+parking_lot = "0.12.1"
+xflags = "0.2.4"
+oorandom = "11.1.3"
+rustc-hash = "1.1.0"
+serde = { version = "1.0.137", features = ["derive"] }
+serde_json = { version = "1.0.81", features = ["preserve_order"] }
+threadpool = "1.8.1"
+rayon = "1.5.3"
+num_cpus = "1.13.1"
+mimalloc = { version = "0.1.29", default-features = false, optional = true }
+lsp-server = { version = "0.6.0", path = "../../lib/lsp-server" }
+tracing = "0.1.35"
+tracing-subscriber = { version = "0.3.14", default-features = false, features = [
+ "env-filter",
+ "registry",
+ "fmt",
+ "tracing-log",
+] }
+tracing-log = "0.1.3"
+tracing-tree = "0.2.1"
+always-assert = "0.1.2"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+flycheck = { path = "../flycheck", version = "0.0.0" }
+ide = { path = "../ide", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+project-model = { path = "../project-model", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+vfs = { path = "../vfs", version = "0.0.0" }
+vfs-notify = { path = "../vfs-notify", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+toolchain = { path = "../toolchain", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
+
+# This should only be used in CLI
+ide-ssr = { path = "../ide-ssr", version = "0.0.0" }
+hir = { path = "../hir", version = "0.0.0" }
+hir-def = { path = "../hir-def", version = "0.0.0" }
+hir-ty = { path = "../hir-ty", version = "0.0.0" }
+proc-macro-srv = { path = "../proc-macro-srv", version = "0.0.0" }
+
+[target.'cfg(windows)'.dependencies]
+winapi = "0.3.9"
+
+[target.'cfg(not(target_env = "msvc"))'.dependencies]
+jemallocator = { version = "0.5.0", package = "tikv-jemallocator", optional = true }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+jod-thread = "0.1.2"
+xshell = "0.2.2"
+
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+mbe = { path = "../mbe" }
+
+[features]
+jemalloc = ["jemallocator", "profile/jemalloc"]
+force-always-assert = ["always-assert/force"]
+in-rust-tree = [
+ "proc-macro-srv/sysroot-abi",
+ "sourcegen/in-rust-tree",
+ "ide/in-rust-tree",
+ "syntax/in-rust-tree"
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/build.rs b/src/tools/rust-analyzer/crates/rust-analyzer/build.rs
new file mode 100644
index 000000000..15935e2da
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/build.rs
@@ -0,0 +1,50 @@
+//! Construct version in the `commit-hash date channel` format
+
+use std::{env, path::PathBuf, process::Command};
+
+fn main() {
+ set_rerun();
+ set_commit_info();
+ if option_env!("CFG_RELEASE").is_none() {
+ println!("cargo:rustc-env=POKE_RA_DEVS=1");
+ }
+}
+
+fn set_rerun() {
+ println!("cargo:rerun-if-env-changed=CFG_RELEASE");
+
+ let mut manifest_dir = PathBuf::from(
+ env::var("CARGO_MANIFEST_DIR").expect("`CARGO_MANIFEST_DIR` is always set by cargo."),
+ );
+
+ while manifest_dir.parent().is_some() {
+ let head_ref = manifest_dir.join(".git/HEAD");
+ if head_ref.exists() {
+ println!("cargo:rerun-if-changed={}", head_ref.display());
+ return;
+ }
+
+ manifest_dir.pop();
+ }
+
+ println!("cargo:warning=Could not find `.git/HEAD` from manifest dir!");
+}
+
+fn set_commit_info() {
+ let output = match Command::new("git")
+ .arg("log")
+ .arg("-1")
+ .arg("--date=short")
+ .arg("--format=%H %h %cd")
+ .output()
+ {
+ Ok(output) if output.status.success() => output,
+ _ => return,
+ };
+ let stdout = String::from_utf8(output.stdout).unwrap();
+ let mut parts = stdout.split_whitespace();
+ let mut next = || parts.next().unwrap();
+ println!("cargo:rustc-env=RA_COMMIT_HASH={}", next());
+ println!("cargo:rustc-env=RA_COMMIT_SHORT_HASH={}", next());
+ println!("cargo:rustc-env=RA_COMMIT_DATE={}", next())
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs
new file mode 100644
index 000000000..0b69f75bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs
@@ -0,0 +1,155 @@
+//! Simple logger that logs either to stderr or to a file, using `tracing_subscriber`
+//! filter syntax and `tracing_appender` for non blocking output.
+
+use std::{
+ fmt,
+ fs::File,
+ io::{self, Stderr},
+ sync::Arc,
+};
+
+use rust_analyzer::Result;
+use tracing::{level_filters::LevelFilter, Event, Subscriber};
+use tracing_log::NormalizeEvent;
+use tracing_subscriber::{
+ fmt::{
+ format::Writer, writer::BoxMakeWriter, FmtContext, FormatEvent, FormatFields,
+ FormattedFields, MakeWriter,
+ },
+ layer::SubscriberExt,
+ registry::LookupSpan,
+ util::SubscriberInitExt,
+ EnvFilter, Registry,
+};
+use tracing_tree::HierarchicalLayer;
+
+pub(crate) struct Logger {
+ filter: EnvFilter,
+ file: Option<File>,
+}
+
+struct MakeWriterStderr;
+
+impl<'a> MakeWriter<'a> for MakeWriterStderr {
+ type Writer = Stderr;
+
+ fn make_writer(&'a self) -> Self::Writer {
+ io::stderr()
+ }
+}
+
+impl Logger {
+ pub(crate) fn new(file: Option<File>, filter: Option<&str>) -> Logger {
+ let filter = filter.map_or(EnvFilter::default(), EnvFilter::new);
+
+ Logger { filter, file }
+ }
+
+ pub(crate) fn install(self) -> Result<()> {
+ // The meaning of CHALK_DEBUG I suspected is to tell chalk crates
+ // (i.e. chalk-solve, chalk-ir, chalk-recursive) how to filter tracing
+ // logs. But now we can only have just one filter, which means we have to
+ // merge chalk filter to our main filter (from RA_LOG env).
+ //
+ // The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`.
+ // As the value should only affect chalk crates, we'd better mannually
+ // specify the target. And for simplicity, CHALK_DEBUG only accept the value
+ // that specify level.
+ let chalk_level_dir = std::env::var("CHALK_DEBUG")
+ .map(|val| {
+ val.parse::<LevelFilter>().expect(
+ "invalid CHALK_DEBUG value, expect right log level (like debug or trace)",
+ )
+ })
+ .ok();
+
+ let chalk_layer = HierarchicalLayer::default()
+ .with_indent_lines(true)
+ .with_ansi(false)
+ .with_indent_amount(2)
+ .with_writer(io::stderr);
+
+ let writer = match self.file {
+ Some(file) => BoxMakeWriter::new(Arc::new(file)),
+ None => BoxMakeWriter::new(io::stderr),
+ };
+ let ra_fmt_layer =
+ tracing_subscriber::fmt::layer().event_format(LoggerFormatter).with_writer(writer);
+
+ match chalk_level_dir {
+ Some(val) => {
+ Registry::default()
+ .with(
+ self.filter
+ .add_directive(format!("chalk_solve={}", val).parse()?)
+ .add_directive(format!("chalk_ir={}", val).parse()?)
+ .add_directive(format!("chalk_recursive={}", val).parse()?),
+ )
+ .with(ra_fmt_layer)
+ .with(chalk_layer)
+ .init();
+ }
+ None => {
+ Registry::default().with(self.filter).with(ra_fmt_layer).init();
+ }
+ };
+
+ Ok(())
+ }
+}
+
+#[derive(Debug)]
+struct LoggerFormatter;
+
+impl<S, N> FormatEvent<S, N> for LoggerFormatter
+where
+ S: Subscriber + for<'a> LookupSpan<'a>,
+ N: for<'a> FormatFields<'a> + 'static,
+{
+ fn format_event(
+ &self,
+ ctx: &FmtContext<'_, S, N>,
+ mut writer: Writer<'_>,
+ event: &Event<'_>,
+ ) -> fmt::Result {
+ // Write level and target
+ let level = *event.metadata().level();
+
+ // If this event is issued from `log` crate, then the value of target is
+ // always "log". `tracing-log` has hard coded it for some reason, so we
+ // need to extract it using `normalized_metadata` method which is part of
+ // `tracing_log::NormalizeEvent`.
+ let target = match event.normalized_metadata() {
+ // This event is issued from `log` crate
+ Some(log) => log.target(),
+ None => event.metadata().target(),
+ };
+ write!(writer, "[{} {}] ", level, target)?;
+
+ // Write spans and fields of each span
+ ctx.visit_spans(|span| {
+ write!(writer, "{}", span.name())?;
+
+ let ext = span.extensions();
+
+ // `FormattedFields` is a a formatted representation of the span's
+ // fields, which is stored in its extensions by the `fmt` layer's
+ // `new_span` method. The fields will have been formatted
+ // by the same field formatter that's provided to the event
+ // formatter in the `FmtContext`.
+ let fields = &ext.get::<FormattedFields<N>>().expect("will never be `None`");
+
+ if !fields.is_empty() {
+ write!(writer, "{{{}}}", fields)?;
+ }
+ write!(writer, ": ")?;
+
+ Ok(())
+ })?;
+
+ // Write fields on the event
+ ctx.field_format().format_fields(writer.by_ref(), event)?;
+
+ writeln!(writer)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
new file mode 100644
index 000000000..e9de23cb3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
@@ -0,0 +1,239 @@
+//! Driver for rust-analyzer.
+//!
+//! Based on cli flags, either spawns an LSP server, or runs a batch analysis
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod logger;
+mod rustc_wrapper;
+
+use std::{env, fs, path::Path, process};
+
+use lsp_server::Connection;
+use project_model::ProjectManifest;
+use rust_analyzer::{cli::flags, config::Config, from_json, lsp_ext::supports_utf8, Result};
+use vfs::AbsPathBuf;
+
+#[cfg(all(feature = "mimalloc"))]
+#[global_allocator]
+static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
+
+#[cfg(all(feature = "jemalloc", not(target_env = "msvc")))]
+#[global_allocator]
+static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
+
+fn main() {
+ if std::env::var("RA_RUSTC_WRAPPER").is_ok() {
+ let mut args = std::env::args_os();
+ let _me = args.next().unwrap();
+ let rustc = args.next().unwrap();
+ let code = match rustc_wrapper::run_rustc_skipping_cargo_checking(rustc, args.collect()) {
+ Ok(rustc_wrapper::ExitCode(code)) => code.unwrap_or(102),
+ Err(err) => {
+ eprintln!("{}", err);
+ 101
+ }
+ };
+ process::exit(code);
+ }
+
+ if let Err(err) = try_main() {
+ tracing::error!("Unexpected error: {}", err);
+ eprintln!("{}", err);
+ process::exit(101);
+ }
+}
+
+fn try_main() -> Result<()> {
+ let flags = flags::RustAnalyzer::from_env()?;
+
+ #[cfg(debug_assertions)]
+ if flags.wait_dbg || env::var("RA_WAIT_DBG").is_ok() {
+ #[allow(unused_mut)]
+ let mut d = 4;
+ while d == 4 {
+ d = 4;
+ }
+ }
+
+ let mut log_file = flags.log_file.as_deref();
+
+ let env_log_file = env::var("RA_LOG_FILE").ok();
+ if let Some(env_log_file) = env_log_file.as_deref() {
+ log_file = Some(Path::new(env_log_file));
+ }
+
+ setup_logging(log_file)?;
+ let verbosity = flags.verbosity();
+
+ match flags.subcommand {
+ flags::RustAnalyzerCmd::LspServer(cmd) => {
+ if cmd.print_config_schema {
+ println!("{:#}", Config::json_schema());
+ return Ok(());
+ }
+ if cmd.version {
+ println!("rust-analyzer {}", rust_analyzer::version());
+ return Ok(());
+ }
+ if cmd.help {
+ println!("{}", flags::RustAnalyzer::HELP);
+ return Ok(());
+ }
+ with_extra_thread("LspServer", run_server)?;
+ }
+ flags::RustAnalyzerCmd::ProcMacro(flags::ProcMacro) => {
+ with_extra_thread("MacroExpander", || proc_macro_srv::cli::run().map_err(Into::into))?;
+ }
+ flags::RustAnalyzerCmd::Parse(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::Symbols(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::Highlight(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::AnalysisStats(cmd) => cmd.run(verbosity)?,
+ flags::RustAnalyzerCmd::Diagnostics(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::Ssr(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::Search(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?,
+ }
+ Ok(())
+}
+
+fn setup_logging(log_file: Option<&Path>) -> Result<()> {
+ if cfg!(windows) {
+ // This is required so that windows finds our pdb that is placed right beside the exe.
+ // By default it doesn't look at the folder the exe resides in, only in the current working
+ // directory which we set to the project workspace.
+ // https://docs.microsoft.com/en-us/windows-hardware/drivers/debugger/general-environment-variables
+ // https://docs.microsoft.com/en-us/windows/win32/api/dbghelp/nf-dbghelp-syminitialize
+ if let Ok(path) = env::current_exe() {
+ if let Some(path) = path.parent() {
+ env::set_var("_NT_SYMBOL_PATH", path);
+ }
+ }
+ }
+ if env::var("RUST_BACKTRACE").is_err() {
+ env::set_var("RUST_BACKTRACE", "short");
+ }
+
+ let log_file = match log_file {
+ Some(path) => {
+ if let Some(parent) = path.parent() {
+ let _ = fs::create_dir_all(parent);
+ }
+ Some(fs::File::create(path)?)
+ }
+ None => None,
+ };
+ let filter = env::var("RA_LOG").ok();
+ // deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually useful
+ // information in there for debugging
+ logger::Logger::new(log_file, filter.as_deref().or(Some("error"))).install()?;
+
+ profile::init();
+
+ Ok(())
+}
+
+const STACK_SIZE: usize = 1024 * 1024 * 8;
+
+/// Parts of rust-analyzer can use a lot of stack space, and some operating systems only give us
+/// 1 MB by default (eg. Windows), so this spawns a new thread with hopefully sufficient stack
+/// space.
+fn with_extra_thread(
+ thread_name: impl Into<String>,
+ f: impl FnOnce() -> Result<()> + Send + 'static,
+) -> Result<()> {
+ let handle =
+ std::thread::Builder::new().name(thread_name.into()).stack_size(STACK_SIZE).spawn(f)?;
+ match handle.join() {
+ Ok(res) => res,
+ Err(panic) => std::panic::resume_unwind(panic),
+ }
+}
+
+fn run_server() -> Result<()> {
+ tracing::info!("server version {} will start", rust_analyzer::version());
+
+ let (connection, io_threads) = Connection::stdio();
+
+ let (initialize_id, initialize_params) = connection.initialize_start()?;
+ tracing::info!("InitializeParams: {}", initialize_params);
+ let initialize_params =
+ from_json::<lsp_types::InitializeParams>("InitializeParams", &initialize_params)?;
+
+ let root_path = match initialize_params
+ .root_uri
+ .and_then(|it| it.to_file_path().ok())
+ .and_then(|it| AbsPathBuf::try_from(it).ok())
+ {
+ Some(it) => it,
+ None => {
+ let cwd = env::current_dir()?;
+ AbsPathBuf::assert(cwd)
+ }
+ };
+
+ let mut config = Config::new(root_path, initialize_params.capabilities);
+ if let Some(json) = initialize_params.initialization_options {
+ if let Err(e) = config.update(json) {
+ use lsp_types::{
+ notification::{Notification, ShowMessage},
+ MessageType, ShowMessageParams,
+ };
+ let not = lsp_server::Notification::new(
+ ShowMessage::METHOD.to_string(),
+ ShowMessageParams { typ: MessageType::WARNING, message: e.to_string() },
+ );
+ connection.sender.send(lsp_server::Message::Notification(not)).unwrap();
+ }
+ }
+
+ let server_capabilities = rust_analyzer::server_capabilities(&config);
+
+ let initialize_result = lsp_types::InitializeResult {
+ capabilities: server_capabilities,
+ server_info: Some(lsp_types::ServerInfo {
+ name: String::from("rust-analyzer"),
+ version: Some(rust_analyzer::version().to_string()),
+ }),
+ offset_encoding: if supports_utf8(config.caps()) {
+ Some("utf-8".to_string())
+ } else {
+ None
+ },
+ };
+
+ let initialize_result = serde_json::to_value(initialize_result).unwrap();
+
+ connection.initialize_finish(initialize_id, initialize_result)?;
+
+ if let Some(client_info) = initialize_params.client_info {
+ tracing::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default());
+ }
+
+ if config.linked_projects().is_empty() && config.detached_files().is_empty() {
+ let workspace_roots = initialize_params
+ .workspace_folders
+ .map(|workspaces| {
+ workspaces
+ .into_iter()
+ .filter_map(|it| it.uri.to_file_path().ok())
+ .filter_map(|it| AbsPathBuf::try_from(it).ok())
+ .collect::<Vec<_>>()
+ })
+ .filter(|workspaces| !workspaces.is_empty())
+ .unwrap_or_else(|| vec![config.root_path().clone()]);
+
+ let discovered = ProjectManifest::discover_all(&workspace_roots);
+ tracing::info!("discovered projects: {:?}", discovered);
+ if discovered.is_empty() {
+ tracing::error!("failed to find any projects in {:?}", workspace_roots);
+ }
+ config.discovered_projects = Some(discovered);
+ }
+
+ rust_analyzer::main_loop(config, connection)?;
+
+ io_threads.join()?;
+ tracing::info!("server did shut down");
+ Ok(())
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/rustc_wrapper.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/rustc_wrapper.rs
new file mode 100644
index 000000000..2f6d4706d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/rustc_wrapper.rs
@@ -0,0 +1,46 @@
+//! We setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself during the
+//! initial `cargo check`. That way, we avoid checking the actual project, and
+//! only build proc macros and build.rs.
+//!
+//! Code taken from IntelliJ :0)
+//! https://github.com/intellij-rust/intellij-rust/blob/master/native-helper/src/main.rs
+use std::{
+ ffi::OsString,
+ io,
+ process::{Command, Stdio},
+};
+
+/// ExitCode/ExitStatus are impossible to create :(.
+pub(crate) struct ExitCode(pub(crate) Option<i32>);
+
+pub(crate) fn run_rustc_skipping_cargo_checking(
+ rustc_executable: OsString,
+ args: Vec<OsString>,
+) -> io::Result<ExitCode> {
+ let is_cargo_check = args.iter().any(|arg| {
+ let arg = arg.to_string_lossy();
+ // `cargo check` invokes `rustc` with `--emit=metadata` argument.
+ //
+ // https://doc.rust-lang.org/rustc/command-line-arguments.html#--emit-specifies-the-types-of-output-files-to-generate
+ // link — Generates the crates specified by --crate-type. The default
+ // output filenames depend on the crate type and platform. This
+ // is the default if --emit is not specified.
+ // metadata — Generates a file containing metadata about the crate.
+ // The default output filename is CRATE_NAME.rmeta.
+ arg.starts_with("--emit=") && arg.contains("metadata") && !arg.contains("link")
+ });
+ if is_cargo_check {
+ return Ok(ExitCode(Some(0)));
+ }
+ run_rustc(rustc_executable, args)
+}
+
+fn run_rustc(rustc_executable: OsString, args: Vec<OsString>) -> io::Result<ExitCode> {
+ let mut child = Command::new(rustc_executable)
+ .args(args)
+ .stdin(Stdio::inherit())
+ .stdout(Stdio::inherit())
+ .stderr(Stdio::inherit())
+ .spawn()?;
+ Ok(ExitCode(child.wait()?.code()))
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs
new file mode 100644
index 000000000..cda95cd86
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs
@@ -0,0 +1,210 @@
+//! Advertises the capabilities of the LSP Server.
+use lsp_types::{
+ CallHierarchyServerCapability, ClientCapabilities, CodeActionKind, CodeActionOptions,
+ CodeActionProviderCapability, CodeLensOptions, CompletionOptions,
+ CompletionOptionsCompletionItem, DeclarationCapability, DocumentOnTypeFormattingOptions,
+ FileOperationFilter, FileOperationPattern, FileOperationPatternKind,
+ FileOperationRegistrationOptions, FoldingRangeProviderCapability, HoverProviderCapability,
+ ImplementationProviderCapability, InlayHintOptions, InlayHintServerCapabilities, OneOf,
+ RenameOptions, SaveOptions, SelectionRangeProviderCapability, SemanticTokensFullOptions,
+ SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions,
+ TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
+ TypeDefinitionProviderCapability, WorkDoneProgressOptions,
+ WorkspaceFileOperationsServerCapabilities, WorkspaceServerCapabilities,
+};
+use serde_json::json;
+
+use crate::config::{Config, RustfmtConfig};
+use crate::semantic_tokens;
+
+pub fn server_capabilities(config: &Config) -> ServerCapabilities {
+ ServerCapabilities {
+ text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
+ open_close: Some(true),
+ change: Some(TextDocumentSyncKind::INCREMENTAL),
+ will_save: None,
+ will_save_wait_until: None,
+ save: Some(SaveOptions::default().into()),
+ })),
+ hover_provider: Some(HoverProviderCapability::Simple(true)),
+ completion_provider: Some(CompletionOptions {
+ resolve_provider: completions_resolve_provider(config.caps()),
+ trigger_characters: Some(vec![
+ ":".to_string(),
+ ".".to_string(),
+ "'".to_string(),
+ "(".to_string(),
+ ]),
+ all_commit_characters: None,
+ completion_item: completion_item(&config),
+ work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
+ }),
+ signature_help_provider: Some(SignatureHelpOptions {
+ trigger_characters: Some(vec!["(".to_string(), ",".to_string(), "<".to_string()]),
+ retrigger_characters: None,
+ work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
+ }),
+ declaration_provider: Some(DeclarationCapability::Simple(true)),
+ definition_provider: Some(OneOf::Left(true)),
+ type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)),
+ implementation_provider: Some(ImplementationProviderCapability::Simple(true)),
+ references_provider: Some(OneOf::Left(true)),
+ document_highlight_provider: Some(OneOf::Left(true)),
+ document_symbol_provider: Some(OneOf::Left(true)),
+ workspace_symbol_provider: Some(OneOf::Left(true)),
+ code_action_provider: Some(code_action_capabilities(config.caps())),
+ code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }),
+ document_formatting_provider: Some(OneOf::Left(true)),
+ document_range_formatting_provider: match config.rustfmt() {
+ RustfmtConfig::Rustfmt { enable_range_formatting: true, .. } => Some(OneOf::Left(true)),
+ _ => Some(OneOf::Left(false)),
+ },
+ document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions {
+ first_trigger_character: "=".to_string(),
+ more_trigger_character: Some(more_trigger_character(&config)),
+ }),
+ selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
+ folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),
+ rename_provider: Some(OneOf::Right(RenameOptions {
+ prepare_provider: Some(true),
+ work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
+ })),
+ linked_editing_range_provider: None,
+ document_link_provider: None,
+ color_provider: None,
+ execute_command_provider: None,
+ workspace: Some(WorkspaceServerCapabilities {
+ workspace_folders: None,
+ file_operations: Some(WorkspaceFileOperationsServerCapabilities {
+ did_create: None,
+ will_create: None,
+ did_rename: None,
+ will_rename: Some(FileOperationRegistrationOptions {
+ filters: vec![
+ FileOperationFilter {
+ scheme: Some(String::from("file")),
+ pattern: FileOperationPattern {
+ glob: String::from("**/*.rs"),
+ matches: Some(FileOperationPatternKind::File),
+ options: None,
+ },
+ },
+ FileOperationFilter {
+ scheme: Some(String::from("file")),
+ pattern: FileOperationPattern {
+ glob: String::from("**"),
+ matches: Some(FileOperationPatternKind::Folder),
+ options: None,
+ },
+ },
+ ],
+ }),
+ did_delete: None,
+ will_delete: None,
+ }),
+ }),
+ call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
+ semantic_tokens_provider: Some(
+ SemanticTokensOptions {
+ legend: SemanticTokensLegend {
+ token_types: semantic_tokens::SUPPORTED_TYPES.to_vec(),
+ token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.to_vec(),
+ },
+
+ full: Some(SemanticTokensFullOptions::Delta { delta: Some(true) }),
+ range: Some(true),
+ work_done_progress_options: Default::default(),
+ }
+ .into(),
+ ),
+ moniker_provider: None,
+ inlay_hint_provider: Some(OneOf::Right(InlayHintServerCapabilities::Options(
+ InlayHintOptions {
+ work_done_progress_options: Default::default(),
+ resolve_provider: Some(true),
+ },
+ ))),
+ experimental: Some(json!({
+ "externalDocs": true,
+ "hoverRange": true,
+ "joinLines": true,
+ "matchingBrace": true,
+ "moveItem": true,
+ "onEnter": true,
+ "openCargoToml": true,
+ "parentModule": true,
+ "runnables": {
+ "kinds": [ "cargo" ],
+ },
+ "ssr": true,
+ "workspaceSymbolScopeKindFiltering": true,
+ })),
+ }
+}
+
+fn completions_resolve_provider(client_caps: &ClientCapabilities) -> Option<bool> {
+ if completion_item_edit_resolve(client_caps) {
+ Some(true)
+ } else {
+ tracing::info!("No `additionalTextEdits` completion resolve capability was found in the client capabilities, autoimport completion is disabled");
+ None
+ }
+}
+
+/// Parses client capabilities and returns all completion resolve capabilities rust-analyzer supports.
+pub(crate) fn completion_item_edit_resolve(caps: &ClientCapabilities) -> bool {
+ (|| {
+ Some(
+ caps.text_document
+ .as_ref()?
+ .completion
+ .as_ref()?
+ .completion_item
+ .as_ref()?
+ .resolve_support
+ .as_ref()?
+ .properties
+ .iter()
+ .any(|cap_string| cap_string.as_str() == "additionalTextEdits"),
+ )
+ })() == Some(true)
+}
+
+fn completion_item(config: &Config) -> Option<CompletionOptionsCompletionItem> {
+ Some(CompletionOptionsCompletionItem {
+ label_details_support: Some(config.completion_label_details_support()),
+ })
+}
+
+fn code_action_capabilities(client_caps: &ClientCapabilities) -> CodeActionProviderCapability {
+ client_caps
+ .text_document
+ .as_ref()
+ .and_then(|it| it.code_action.as_ref())
+ .and_then(|it| it.code_action_literal_support.as_ref())
+ .map_or(CodeActionProviderCapability::Simple(true), |_| {
+ CodeActionProviderCapability::Options(CodeActionOptions {
+ // Advertise support for all built-in CodeActionKinds.
+ // Ideally we would base this off of the client capabilities
+ // but the client is supposed to fall back gracefully for unknown values.
+ code_action_kinds: Some(vec![
+ CodeActionKind::EMPTY,
+ CodeActionKind::QUICKFIX,
+ CodeActionKind::REFACTOR,
+ CodeActionKind::REFACTOR_EXTRACT,
+ CodeActionKind::REFACTOR_INLINE,
+ CodeActionKind::REFACTOR_REWRITE,
+ ]),
+ resolve_provider: Some(true),
+ work_done_progress_options: Default::default(),
+ })
+ })
+}
+
+fn more_trigger_character(config: &Config) -> Vec<String> {
+ let mut res = vec![".".to_string(), ">".to_string(), "{".to_string()];
+ if config.snippet_cap() {
+ res.push("<".to_string());
+ }
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs
new file mode 100644
index 000000000..1c39e9391
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs
@@ -0,0 +1,228 @@
+//! See `CargoTargetSpec`
+
+use std::mem;
+
+use cfg::{CfgAtom, CfgExpr};
+use ide::{FileId, RunnableKind, TestId};
+use project_model::{self, ManifestPath, TargetKind};
+use vfs::AbsPathBuf;
+
+use crate::{global_state::GlobalStateSnapshot, Result};
+
+/// Abstract representation of Cargo target.
+///
+/// We use it to cook up the set of cli args we need to pass to Cargo to
+/// build/test/run the target.
+#[derive(Clone)]
+pub(crate) struct CargoTargetSpec {
+ pub(crate) workspace_root: AbsPathBuf,
+ pub(crate) cargo_toml: ManifestPath,
+ pub(crate) package: String,
+ pub(crate) target: String,
+ pub(crate) target_kind: TargetKind,
+ pub(crate) required_features: Vec<String>,
+}
+
+impl CargoTargetSpec {
+ pub(crate) fn runnable_args(
+ snap: &GlobalStateSnapshot,
+ spec: Option<CargoTargetSpec>,
+ kind: &RunnableKind,
+ cfg: &Option<CfgExpr>,
+ ) -> Result<(Vec<String>, Vec<String>)> {
+ let mut args = Vec::new();
+ let mut extra_args = Vec::new();
+
+ match kind {
+ RunnableKind::Test { test_id, attr } => {
+ args.push("test".to_string());
+ extra_args.push(test_id.to_string());
+ if let TestId::Path(_) = test_id {
+ extra_args.push("--exact".to_string());
+ }
+ extra_args.push("--nocapture".to_string());
+ if attr.ignore {
+ extra_args.push("--ignored".to_string());
+ }
+ }
+ RunnableKind::TestMod { path } => {
+ args.push("test".to_string());
+ extra_args.push(path.to_string());
+ extra_args.push("--nocapture".to_string());
+ }
+ RunnableKind::Bench { test_id } => {
+ args.push("bench".to_string());
+ extra_args.push(test_id.to_string());
+ if let TestId::Path(_) = test_id {
+ extra_args.push("--exact".to_string());
+ }
+ extra_args.push("--nocapture".to_string());
+ }
+ RunnableKind::DocTest { test_id } => {
+ args.push("test".to_string());
+ args.push("--doc".to_string());
+ extra_args.push(test_id.to_string());
+ extra_args.push("--nocapture".to_string());
+ }
+ RunnableKind::Bin => {
+ let subcommand = match spec {
+ Some(CargoTargetSpec { target_kind: TargetKind::Test, .. }) => "test",
+ _ => "run",
+ };
+ args.push(subcommand.to_string());
+ }
+ }
+
+ let target_required_features = if let Some(mut spec) = spec {
+ let required_features = mem::take(&mut spec.required_features);
+ spec.push_to(&mut args, kind);
+ required_features
+ } else {
+ Vec::new()
+ };
+
+ let cargo_config = snap.config.cargo();
+ if cargo_config.all_features {
+ args.push("--all-features".to_string());
+
+ for feature in target_required_features {
+ args.push("--features".to_string());
+ args.push(feature);
+ }
+ } else {
+ let mut features = Vec::new();
+ if let Some(cfg) = cfg.as_ref() {
+ required_features(cfg, &mut features);
+ }
+
+ features.extend(cargo_config.features);
+ features.extend(target_required_features);
+
+ features.dedup();
+ for feature in features {
+ args.push("--features".to_string());
+ args.push(feature);
+ }
+ }
+
+ Ok((args, extra_args))
+ }
+
+ pub(crate) fn for_file(
+ global_state_snapshot: &GlobalStateSnapshot,
+ file_id: FileId,
+ ) -> Result<Option<CargoTargetSpec>> {
+ let crate_id = match &*global_state_snapshot.analysis.crate_for(file_id)? {
+ &[crate_id, ..] => crate_id,
+ _ => return Ok(None),
+ };
+ let (cargo_ws, target) = match global_state_snapshot.cargo_target_for_crate_root(crate_id) {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ let target_data = &cargo_ws[target];
+ let package_data = &cargo_ws[target_data.package];
+ let res = CargoTargetSpec {
+ workspace_root: cargo_ws.workspace_root().to_path_buf(),
+ cargo_toml: package_data.manifest.clone(),
+ package: cargo_ws.package_flag(package_data),
+ target: target_data.name.clone(),
+ target_kind: target_data.kind,
+ required_features: target_data.required_features.clone(),
+ };
+
+ Ok(Some(res))
+ }
+
+ pub(crate) fn push_to(self, buf: &mut Vec<String>, kind: &RunnableKind) {
+ buf.push("--package".to_string());
+ buf.push(self.package);
+
+ // Can't mix --doc with other target flags
+ if let RunnableKind::DocTest { .. } = kind {
+ return;
+ }
+ match self.target_kind {
+ TargetKind::Bin => {
+ buf.push("--bin".to_string());
+ buf.push(self.target);
+ }
+ TargetKind::Test => {
+ buf.push("--test".to_string());
+ buf.push(self.target);
+ }
+ TargetKind::Bench => {
+ buf.push("--bench".to_string());
+ buf.push(self.target);
+ }
+ TargetKind::Example => {
+ buf.push("--example".to_string());
+ buf.push(self.target);
+ }
+ TargetKind::Lib => {
+ buf.push("--lib".to_string());
+ }
+ TargetKind::Other | TargetKind::BuildScript => (),
+ }
+ }
+}
+
+/// Fill minimal features needed
+fn required_features(cfg_expr: &CfgExpr, features: &mut Vec<String>) {
+ match cfg_expr {
+ CfgExpr::Atom(CfgAtom::KeyValue { key, value }) if key == "feature" => {
+ features.push(value.to_string())
+ }
+ CfgExpr::All(preds) => {
+ preds.iter().for_each(|cfg| required_features(cfg, features));
+ }
+ CfgExpr::Any(preds) => {
+ for cfg in preds {
+ let len_features = features.len();
+ required_features(cfg, features);
+ if len_features != features.len() {
+ break;
+ }
+ }
+ }
+ _ => {}
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use cfg::CfgExpr;
+ use mbe::syntax_node_to_token_tree;
+ use syntax::{
+ ast::{self, AstNode},
+ SmolStr,
+ };
+
+ fn check(cfg: &str, expected_features: &[&str]) {
+ let cfg_expr = {
+ let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let (tt, _) = syntax_node_to_token_tree(tt.syntax());
+ CfgExpr::parse(&tt)
+ };
+
+ let mut features = vec![];
+ required_features(&cfg_expr, &mut features);
+
+ let expected_features =
+ expected_features.iter().map(|&it| SmolStr::new(it)).collect::<Vec<_>>();
+
+ assert_eq!(features, expected_features);
+ }
+
+ #[test]
+ fn test_cfg_expr_minimal_features_needed() {
+ check(r#"#![cfg(feature = "baz")]"#, &["baz"]);
+ check(r#"#![cfg(all(feature = "baz", feature = "foo"))]"#, &["baz", "foo"]);
+ check(r#"#![cfg(any(feature = "baz", feature = "foo", unix))]"#, &["baz"]);
+ check(r#"#![cfg(foo)]"#, &[]);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
new file mode 100644
index 000000000..6ccdaa86d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
@@ -0,0 +1,69 @@
+//! Various batch processing tasks, intended primarily for debugging.
+
+pub mod flags;
+pub mod load_cargo;
+mod parse;
+mod symbols;
+mod highlight;
+mod analysis_stats;
+mod diagnostics;
+mod ssr;
+mod lsif;
+
+mod progress_report;
+
+use std::io::Read;
+
+use anyhow::Result;
+use ide::AnalysisHost;
+use vfs::Vfs;
+
+#[derive(Clone, Copy)]
+pub enum Verbosity {
+ Spammy,
+ Verbose,
+ Normal,
+ Quiet,
+}
+
+impl Verbosity {
+ pub fn is_verbose(self) -> bool {
+ matches!(self, Verbosity::Verbose | Verbosity::Spammy)
+ }
+ pub fn is_spammy(self) -> bool {
+ matches!(self, Verbosity::Spammy)
+ }
+}
+
+fn read_stdin() -> Result<String> {
+ let mut buff = String::new();
+ std::io::stdin().read_to_string(&mut buff)?;
+ Ok(buff)
+}
+
+fn report_metric(metric: &str, value: u64, unit: &str) {
+ if std::env::var("RA_METRICS").is_err() {
+ return;
+ }
+ println!("METRIC:{}:{}:{}", metric, value, unit)
+}
+
+fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) {
+ let mut mem = host.per_query_memory_usage();
+
+ let before = profile::memory_usage();
+ drop(vfs);
+ let vfs = before.allocated - profile::memory_usage().allocated;
+ mem.push(("VFS".into(), vfs));
+
+ let before = profile::memory_usage();
+ drop(host);
+ mem.push(("Unaccounted".into(), before.allocated - profile::memory_usage().allocated));
+
+ mem.push(("Remaining".into(), profile::memory_usage().allocated));
+
+ for (name, bytes) in mem {
+ // NOTE: Not a debug print, so avoid going through the `eprintln` defined above.
+ eprintln!("{:>8} {}", bytes, name);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
new file mode 100644
index 000000000..f52e1e751
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -0,0 +1,447 @@
+//! Fully type-check project and print various stats, like the number of type
+//! errors.
+
+use std::{
+ env,
+ time::{SystemTime, UNIX_EPOCH},
+};
+
+use hir::{
+ db::{AstDatabase, DefDatabase, HirDatabase},
+ AssocItem, Crate, Function, HasSource, HirDisplay, ModuleDef,
+};
+use hir_def::{
+ body::{BodySourceMap, SyntheticSyntax},
+ expr::ExprId,
+ FunctionId,
+};
+use hir_ty::{TyExt, TypeWalk};
+use ide::{Analysis, AnalysisHost, LineCol, RootDatabase};
+use ide_db::base_db::{
+ salsa::{self, debug::DebugQueryTable, ParallelDatabase},
+ SourceDatabase, SourceDatabaseExt,
+};
+use itertools::Itertools;
+use oorandom::Rand32;
+use profile::{Bytes, StopWatch};
+use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
+use rayon::prelude::*;
+use rustc_hash::FxHashSet;
+use stdx::format_to;
+use syntax::{AstNode, SyntaxNode};
+use vfs::{AbsPathBuf, Vfs, VfsPath};
+
+use crate::cli::{
+ flags::{self, OutputFormat},
+ load_cargo::{load_workspace, LoadCargoConfig},
+ print_memory_usage,
+ progress_report::ProgressReport,
+ report_metric, Result, Verbosity,
+};
+
+/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
+struct Snap<DB>(DB);
+impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
+ fn clone(&self) -> Snap<salsa::Snapshot<DB>> {
+ Snap(self.0.snapshot())
+ }
+}
+
+impl flags::AnalysisStats {
+ pub fn run(self, verbosity: Verbosity) -> Result<()> {
+ let mut rng = {
+ let seed = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64;
+ Rand32::new(seed)
+ };
+
+ let mut cargo_config = CargoConfig::default();
+ cargo_config.no_sysroot = self.no_sysroot;
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: !self.disable_build_scripts,
+ with_proc_macro: !self.disable_proc_macros,
+ prefill_caches: false,
+ };
+ let no_progress = &|_| ();
+
+ let mut db_load_sw = self.stop_watch();
+
+ let path = AbsPathBuf::assert(env::current_dir()?.join(&self.path));
+ let manifest = ProjectManifest::discover_single(&path)?;
+
+ let mut workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
+ let metadata_time = db_load_sw.elapsed();
+
+ let build_scripts_time = if self.disable_build_scripts {
+ None
+ } else {
+ let mut build_scripts_sw = self.stop_watch();
+ let bs = workspace.run_build_scripts(&cargo_config, no_progress)?;
+ workspace.set_build_scripts(bs);
+ Some(build_scripts_sw.elapsed())
+ };
+
+ let (host, vfs, _proc_macro) = load_workspace(workspace, &load_cargo_config)?;
+ let db = host.raw_database();
+ eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
+ eprint!(" (metadata {}", metadata_time);
+ if let Some(build_scripts_time) = build_scripts_time {
+ eprint!("; build {}", build_scripts_time);
+ }
+ eprintln!(")");
+
+ let mut analysis_sw = self.stop_watch();
+ let mut num_crates = 0;
+ let mut visited_modules = FxHashSet::default();
+ let mut visit_queue = Vec::new();
+
+ let mut krates = Crate::all(db);
+ if self.randomize {
+ shuffle(&mut rng, &mut krates);
+ }
+ for krate in krates {
+ let module = krate.root_module(db);
+ let file_id = module.definition_source(db).file_id;
+ let file_id = file_id.original_file(db);
+ let source_root = db.file_source_root(file_id);
+ let source_root = db.source_root(source_root);
+ if !source_root.is_library || self.with_deps {
+ num_crates += 1;
+ visit_queue.push(module);
+ }
+ }
+
+ if self.randomize {
+ shuffle(&mut rng, &mut visit_queue);
+ }
+
+ eprint!(" crates: {}", num_crates);
+ let mut num_decls = 0;
+ let mut funcs = Vec::new();
+ while let Some(module) = visit_queue.pop() {
+ if visited_modules.insert(module) {
+ visit_queue.extend(module.children(db));
+
+ for decl in module.declarations(db) {
+ num_decls += 1;
+ if let ModuleDef::Function(f) = decl {
+ funcs.push(f);
+ }
+ }
+
+ for impl_def in module.impl_defs(db) {
+ for item in impl_def.items(db) {
+ num_decls += 1;
+ if let AssocItem::Function(f) = item {
+ funcs.push(f);
+ }
+ }
+ }
+ }
+ }
+ eprintln!(", mods: {}, decls: {}, fns: {}", visited_modules.len(), num_decls, funcs.len());
+ eprintln!("{:<20} {}", "Item Collection:", analysis_sw.elapsed());
+
+ if self.randomize {
+ shuffle(&mut rng, &mut funcs);
+ }
+
+ if !self.skip_inference {
+ self.run_inference(&host, db, &vfs, &funcs, verbosity);
+ }
+
+ let total_span = analysis_sw.elapsed();
+ eprintln!("{:<20} {}", "Total:", total_span);
+ report_metric("total time", total_span.time.as_millis() as u64, "ms");
+ if let Some(instructions) = total_span.instructions {
+ report_metric("total instructions", instructions, "#instr");
+ }
+ if let Some(memory) = total_span.memory {
+ report_metric("total memory", memory.allocated.megabytes() as u64, "MB");
+ }
+
+ if env::var("RA_COUNT").is_ok() {
+ eprintln!("{}", profile::countme::get_all());
+ }
+
+ if self.source_stats {
+ let mut total_file_size = Bytes::default();
+ for e in ide_db::base_db::ParseQuery.in_db(db).entries::<Vec<_>>() {
+ total_file_size += syntax_len(db.parse(e.key).syntax_node())
+ }
+
+ let mut total_macro_file_size = Bytes::default();
+ for e in hir::db::ParseMacroExpansionQuery.in_db(db).entries::<Vec<_>>() {
+ if let Some((val, _)) = db.parse_macro_expansion(e.key).value {
+ total_macro_file_size += syntax_len(val.syntax_node())
+ }
+ }
+ eprintln!("source files: {}, macro files: {}", total_file_size, total_macro_file_size);
+ }
+
+ if self.memory_usage && verbosity.is_verbose() {
+ print_memory_usage(host, vfs);
+ }
+
+ Ok(())
+ }
+
+ fn run_inference(
+ &self,
+ host: &AnalysisHost,
+ db: &RootDatabase,
+ vfs: &Vfs,
+ funcs: &[Function],
+ verbosity: Verbosity,
+ ) {
+ let mut bar = match verbosity {
+ Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
+ _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
+ _ => ProgressReport::new(funcs.len() as u64),
+ };
+
+ if self.parallel {
+ let mut inference_sw = self.stop_watch();
+ let snap = Snap(db.snapshot());
+ funcs
+ .par_iter()
+ .map_with(snap, |snap, &f| {
+ let f_id = FunctionId::from(f);
+ snap.0.body(f_id.into());
+ snap.0.infer(f_id.into());
+ })
+ .count();
+ eprintln!("{:<20} {}", "Parallel Inference:", inference_sw.elapsed());
+ }
+
+ let mut inference_sw = self.stop_watch();
+ bar.tick();
+ let mut num_exprs = 0;
+ let mut num_exprs_unknown = 0;
+ let mut num_exprs_partially_unknown = 0;
+ let mut num_type_mismatches = 0;
+ let analysis = host.analysis();
+ for f in funcs.iter().copied() {
+ let name = f.name(db);
+ let full_name = f
+ .module(db)
+ .path_to_root(db)
+ .into_iter()
+ .rev()
+ .filter_map(|it| it.name(db))
+ .chain(Some(f.name(db)))
+ .join("::");
+ if let Some(only_name) = self.only.as_deref() {
+ if name.to_string() != only_name && full_name != only_name {
+ continue;
+ }
+ }
+ let mut msg = format!("processing: {}", full_name);
+ if verbosity.is_verbose() {
+ if let Some(src) = f.source(db) {
+ let original_file = src.file_id.original_file(db);
+ let path = vfs.file_path(original_file);
+ let syntax_range = src.value.syntax().text_range();
+ format_to!(msg, " ({} {:?})", path, syntax_range);
+ }
+ }
+ if verbosity.is_spammy() {
+ bar.println(msg.to_string());
+ }
+ bar.set_message(&msg);
+ let f_id = FunctionId::from(f);
+ let (body, sm) = db.body_with_source_map(f_id.into());
+ let inference_result = db.infer(f_id.into());
+ let (previous_exprs, previous_unknown, previous_partially_unknown) =
+ (num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
+ for (expr_id, _) in body.exprs.iter() {
+ let ty = &inference_result[expr_id];
+ num_exprs += 1;
+ let unknown_or_partial = if ty.is_unknown() {
+ num_exprs_unknown += 1;
+ if verbosity.is_spammy() {
+ if let Some((path, start, end)) =
+ expr_syntax_range(db, &analysis, vfs, &sm, expr_id)
+ {
+ bar.println(format!(
+ "{} {}:{}-{}:{}: Unknown type",
+ path,
+ start.line + 1,
+ start.col,
+ end.line + 1,
+ end.col,
+ ));
+ } else {
+ bar.println(format!("{}: Unknown type", name,));
+ }
+ }
+ true
+ } else {
+ let mut is_partially_unknown = false;
+ ty.walk(&mut |ty| {
+ if ty.is_unknown() {
+ is_partially_unknown = true;
+ }
+ });
+ if is_partially_unknown {
+ num_exprs_partially_unknown += 1;
+ }
+ is_partially_unknown
+ };
+ if self.only.is_some() && verbosity.is_spammy() {
+ // in super-verbose mode for just one function, we print every single expression
+ if let Some((_, start, end)) =
+ expr_syntax_range(db, &analysis, vfs, &sm, expr_id)
+ {
+ bar.println(format!(
+ "{}:{}-{}:{}: {}",
+ start.line + 1,
+ start.col,
+ end.line + 1,
+ end.col,
+ ty.display(db)
+ ));
+ } else {
+ bar.println(format!("unknown location: {}", ty.display(db)));
+ }
+ }
+ if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
+ println!(
+ r#"{},type,"{}""#,
+ location_csv(db, &analysis, vfs, &sm, expr_id),
+ ty.display(db)
+ );
+ }
+ if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
+ num_type_mismatches += 1;
+ if verbosity.is_verbose() {
+ if let Some((path, start, end)) =
+ expr_syntax_range(db, &analysis, vfs, &sm, expr_id)
+ {
+ bar.println(format!(
+ "{} {}:{}-{}:{}: Expected {}, got {}",
+ path,
+ start.line + 1,
+ start.col,
+ end.line + 1,
+ end.col,
+ mismatch.expected.display(db),
+ mismatch.actual.display(db)
+ ));
+ } else {
+ bar.println(format!(
+ "{}: Expected {}, got {}",
+ name,
+ mismatch.expected.display(db),
+ mismatch.actual.display(db)
+ ));
+ }
+ }
+ if self.output == Some(OutputFormat::Csv) {
+ println!(
+ r#"{},mismatch,"{}","{}""#,
+ location_csv(db, &analysis, vfs, &sm, expr_id),
+ mismatch.expected.display(db),
+ mismatch.actual.display(db)
+ );
+ }
+ }
+ }
+ if verbosity.is_spammy() {
+ bar.println(format!(
+ "In {}: {} exprs, {} unknown, {} partial",
+ full_name,
+ num_exprs - previous_exprs,
+ num_exprs_unknown - previous_unknown,
+ num_exprs_partially_unknown - previous_partially_unknown
+ ));
+ }
+ bar.inc(1);
+ }
+
+ bar.finish_and_clear();
+ eprintln!(
+ " exprs: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
+ num_exprs,
+ num_exprs_unknown,
+ percentage(num_exprs_unknown, num_exprs),
+ num_exprs_partially_unknown,
+ percentage(num_exprs_partially_unknown, num_exprs),
+ num_type_mismatches
+ );
+ report_metric("unknown type", num_exprs_unknown, "#");
+ report_metric("type mismatches", num_type_mismatches, "#");
+
+ eprintln!("{:<20} {}", "Inference:", inference_sw.elapsed());
+ }
+
+ fn stop_watch(&self) -> StopWatch {
+ StopWatch::start().memory(self.memory_usage)
+ }
+}
+
+fn location_csv(
+ db: &RootDatabase,
+ analysis: &Analysis,
+ vfs: &Vfs,
+ sm: &BodySourceMap,
+ expr_id: ExprId,
+) -> String {
+ let src = match sm.expr_syntax(expr_id) {
+ Ok(s) => s,
+ Err(SyntheticSyntax) => return "synthetic,,".to_string(),
+ };
+ let root = db.parse_or_expand(src.file_id).unwrap();
+ let node = src.map(|e| e.to_node(&root).syntax().clone());
+ let original_range = node.as_ref().original_file_range(db);
+ let path = vfs.file_path(original_range.file_id);
+ let line_index = analysis.file_line_index(original_range.file_id).unwrap();
+ let text_range = original_range.range;
+ let (start, end) =
+ (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
+ format!("{},{}:{},{}:{}", path, start.line + 1, start.col, end.line + 1, end.col)
+}
+
+fn expr_syntax_range(
+ db: &RootDatabase,
+ analysis: &Analysis,
+ vfs: &Vfs,
+ sm: &BodySourceMap,
+ expr_id: ExprId,
+) -> Option<(VfsPath, LineCol, LineCol)> {
+ let src = sm.expr_syntax(expr_id);
+ if let Ok(src) = src {
+ let root = db.parse_or_expand(src.file_id).unwrap();
+ let node = src.map(|e| e.to_node(&root).syntax().clone());
+ let original_range = node.as_ref().original_file_range(db);
+ let path = vfs.file_path(original_range.file_id);
+ let line_index = analysis.file_line_index(original_range.file_id).unwrap();
+ let text_range = original_range.range;
+ let (start, end) =
+ (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
+ Some((path, start, end))
+ } else {
+ None
+ }
+}
+
+fn shuffle<T>(rng: &mut Rand32, slice: &mut [T]) {
+ for i in 0..slice.len() {
+ randomize_first(rng, &mut slice[i..]);
+ }
+
+ fn randomize_first<T>(rng: &mut Rand32, slice: &mut [T]) {
+ assert!(!slice.is_empty());
+ let idx = rng.rand_range(0..slice.len() as u32) as usize;
+ slice.swap(0, idx);
+ }
+}
+
+fn percentage(n: u64, total: u64) -> u64 {
+ (n * 100).checked_div(total).unwrap_or(100)
+}
+
+fn syntax_len(node: SyntaxNode) -> usize {
+ // Macro expanded code doesn't contain whitespace, so erase *all* whitespace
+ // to make macro and non-macro code comparable.
+ node.to_string().replace(|it: char| it.is_ascii_whitespace(), "").len()
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
new file mode 100644
index 000000000..52511ceb5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -0,0 +1,86 @@
+//! Analyze all modules in a project for diagnostics. Exits with a non-zero
+//! status code if any errors are found.
+
+use rustc_hash::FxHashSet;
+
+use hir::{db::HirDatabase, Crate, Module};
+use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity};
+use ide_db::base_db::SourceDatabaseExt;
+
+use crate::cli::{
+ flags,
+ load_cargo::{load_workspace_at, LoadCargoConfig},
+};
+
+impl flags::Diagnostics {
+ pub fn run(self) -> anyhow::Result<()> {
+ let cargo_config = Default::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: !self.disable_build_scripts,
+ with_proc_macro: !self.disable_proc_macros,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) =
+ load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
+ let db = host.raw_database();
+ let analysis = host.analysis();
+
+ let mut found_error = false;
+ let mut visited_files = FxHashSet::default();
+
+ let work = all_modules(db).into_iter().filter(|module| {
+ let file_id = module.definition_source(db).file_id.original_file(db);
+ let source_root = db.file_source_root(file_id);
+ let source_root = db.source_root(source_root);
+ !source_root.is_library
+ });
+
+ for module in work {
+ let file_id = module.definition_source(db).file_id.original_file(db);
+ if !visited_files.contains(&file_id) {
+ let crate_name =
+ module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string();
+ println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id));
+ for diagnostic in analysis
+ .diagnostics(
+ &DiagnosticsConfig::default(),
+ AssistResolveStrategy::None,
+ file_id,
+ )
+ .unwrap()
+ {
+ if matches!(diagnostic.severity, Severity::Error) {
+ found_error = true;
+ }
+
+ println!("{:?}", diagnostic);
+ }
+
+ visited_files.insert(file_id);
+ }
+ }
+
+ println!();
+ println!("diagnostic scan complete");
+
+ if found_error {
+ println!();
+ anyhow::bail!("diagnostic error detected")
+ }
+
+ Ok(())
+ }
+}
+
+fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
+ let mut worklist: Vec<_> =
+ Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect();
+ let mut modules = Vec::new();
+
+ while let Some(module) = worklist.pop() {
+ modules.push(module);
+ worklist.extend(module.children(db));
+ }
+
+ modules
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
new file mode 100644
index 000000000..19907ebdd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
@@ -0,0 +1,248 @@
+//! Grammar for the command-line arguments.
+#![allow(unreachable_pub)]
+use std::{path::PathBuf, str::FromStr};
+
+use ide_ssr::{SsrPattern, SsrRule};
+
+use crate::cli::Verbosity;
+
+xflags::xflags! {
+ src "./src/cli/flags.rs"
+
+ /// LSP server for the Rust programming language.
+ cmd rust-analyzer {
+ /// Verbosity level, can be repeated multiple times.
+ repeated -v, --verbose
+ /// Verbosity level.
+ optional -q, --quiet
+
+ /// Log to the specified file instead of stderr.
+ optional --log-file path: PathBuf
+ /// Flush log records to the file immediately.
+ optional --no-log-buffering
+
+ /// Wait until a debugger is attached to (requires debug build).
+ optional --wait-dbg
+
+ default cmd lsp-server {
+ /// Print version.
+ optional --version
+ /// Print help.
+ optional -h, --help
+
+ /// Dump a LSP config JSON schema.
+ optional --print-config-schema
+ }
+
+ /// Parse stdin.
+ cmd parse {
+ /// Suppress printing.
+ optional --no-dump
+ }
+
+ /// Parse stdin and print the list of symbols.
+ cmd symbols {}
+
+ /// Highlight stdin as html.
+ cmd highlight {
+ /// Enable rainbow highlighting of identifiers.
+ optional --rainbow
+ }
+
+ /// Batch typecheck project and print summary statistics
+ cmd analysis-stats
+ /// Directory with Cargo.toml.
+ required path: PathBuf
+ {
+ optional --output format: OutputFormat
+
+ /// Randomize order in which crates, modules, and items are processed.
+ optional --randomize
+ /// Run type inference in parallel.
+ optional --parallel
+ /// Collect memory usage statistics.
+ optional --memory-usage
+ /// Print the total length of all source and macro files (whitespace is not counted).
+ optional --source-stats
+
+ /// Only analyze items matching this path.
+ optional -o, --only path: String
+ /// Also analyze all dependencies.
+ optional --with-deps
+ /// Don't load sysroot crates (`std`, `core` & friends).
+ optional --no-sysroot
+
+ /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
+ optional --disable-build-scripts
+ /// Don't use expand proc macros.
+ optional --disable-proc-macros
+ /// Only resolve names, don't run type inference.
+ optional --skip-inference
+ }
+
+ cmd diagnostics
+ /// Directory with Cargo.toml.
+ required path: PathBuf
+ {
+ /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
+ optional --disable-build-scripts
+ /// Don't use expand proc macros.
+ optional --disable-proc-macros
+ }
+
+ cmd ssr
+ /// A structured search replace rule (`$a.foo($b) ==> bar($a, $b)`)
+ repeated rule: SsrRule
+ {}
+
+ cmd search
+ /// A structured search replace pattern (`$a.foo($b)`)
+ repeated pattern: SsrPattern
+ {
+ /// Prints debug information for any nodes with source exactly equal to snippet.
+ optional --debug snippet: String
+ }
+
+ cmd proc-macro {}
+
+ cmd lsif
+ required path: PathBuf
+ {}
+ }
+}
+
+// generated start
+// The following code is generated by `xflags` macro.
+// Run `env UPDATE_XFLAGS=1 cargo build` to regenerate.
+#[derive(Debug)]
+pub struct RustAnalyzer {
+ pub verbose: u32,
+ pub quiet: bool,
+ pub log_file: Option<PathBuf>,
+ pub no_log_buffering: bool,
+ pub wait_dbg: bool,
+ pub subcommand: RustAnalyzerCmd,
+}
+
+#[derive(Debug)]
+pub enum RustAnalyzerCmd {
+ LspServer(LspServer),
+ Parse(Parse),
+ Symbols(Symbols),
+ Highlight(Highlight),
+ AnalysisStats(AnalysisStats),
+ Diagnostics(Diagnostics),
+ Ssr(Ssr),
+ Search(Search),
+ ProcMacro(ProcMacro),
+ Lsif(Lsif),
+}
+
+#[derive(Debug)]
+pub struct LspServer {
+ pub version: bool,
+ pub help: bool,
+ pub print_config_schema: bool,
+}
+
+#[derive(Debug)]
+pub struct Parse {
+ pub no_dump: bool,
+}
+
+#[derive(Debug)]
+pub struct Symbols;
+
+#[derive(Debug)]
+pub struct Highlight {
+ pub rainbow: bool,
+}
+
+#[derive(Debug)]
+pub struct AnalysisStats {
+ pub path: PathBuf,
+
+ pub output: Option<OutputFormat>,
+ pub randomize: bool,
+ pub parallel: bool,
+ pub memory_usage: bool,
+ pub source_stats: bool,
+ pub only: Option<String>,
+ pub with_deps: bool,
+ pub no_sysroot: bool,
+ pub disable_build_scripts: bool,
+ pub disable_proc_macros: bool,
+ pub skip_inference: bool,
+}
+
+#[derive(Debug)]
+pub struct Diagnostics {
+ pub path: PathBuf,
+
+ pub disable_build_scripts: bool,
+ pub disable_proc_macros: bool,
+}
+
+#[derive(Debug)]
+pub struct Ssr {
+ pub rule: Vec<SsrRule>,
+}
+
+#[derive(Debug)]
+pub struct Search {
+ pub pattern: Vec<SsrPattern>,
+
+ pub debug: Option<String>,
+}
+
+#[derive(Debug)]
+pub struct ProcMacro;
+
+#[derive(Debug)]
+pub struct Lsif {
+ pub path: PathBuf,
+}
+
+impl RustAnalyzer {
+ pub const HELP: &'static str = Self::HELP_;
+
+ #[allow(dead_code)]
+ pub fn from_env() -> xflags::Result<Self> {
+ Self::from_env_()
+ }
+
+ #[allow(dead_code)]
+ pub fn from_vec(args: Vec<std::ffi::OsString>) -> xflags::Result<Self> {
+ Self::from_vec_(args)
+ }
+}
+// generated end
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum OutputFormat {
+ Csv,
+}
+
+impl RustAnalyzer {
+ pub fn verbosity(&self) -> Verbosity {
+ if self.quiet {
+ return Verbosity::Quiet;
+ }
+ match self.verbose {
+ 0 => Verbosity::Normal,
+ 1 => Verbosity::Verbose,
+ _ => Verbosity::Spammy,
+ }
+ }
+}
+
+impl FromStr for OutputFormat {
+ type Err = String;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s {
+ "csv" => Ok(Self::Csv),
+ _ => Err(format!("unknown output format `{}`", s)),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/highlight.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/highlight.rs
new file mode 100644
index 000000000..4f9b362f1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/highlight.rs
@@ -0,0 +1,14 @@
+//! Read Rust code on stdin, print HTML highlighted version to stdout.
+
+use ide::Analysis;
+
+use crate::cli::{flags, read_stdin};
+
+impl flags::Highlight {
+ pub fn run(self) -> anyhow::Result<()> {
+ let (analysis, file_id) = Analysis::from_single_file(read_stdin()?);
+ let html = analysis.highlight_as_html(file_id, self.rainbow).unwrap();
+ println!("{}", html);
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs
new file mode 100644
index 000000000..5d1c013c3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs
@@ -0,0 +1,164 @@
+//! Loads a Cargo project into a static instance of analysis, without support
+//! for incorporating changes.
+use std::{path::Path, sync::Arc};
+
+use anyhow::Result;
+use crossbeam_channel::{unbounded, Receiver};
+use hir::db::DefDatabase;
+use ide::{AnalysisHost, Change};
+use ide_db::base_db::CrateGraph;
+use proc_macro_api::ProcMacroServer;
+use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
+use vfs::{loader::Handle, AbsPath, AbsPathBuf};
+
+use crate::reload::{load_proc_macro, ProjectFolders, SourceRootConfig};
+
+// Note: Since this type is used by external tools that use rust-analyzer as a library
+// what otherwise would be `pub(crate)` has to be `pub` here instead.
+pub struct LoadCargoConfig {
+ pub load_out_dirs_from_check: bool,
+ pub with_proc_macro: bool,
+ pub prefill_caches: bool,
+}
+
+// Note: Since this function is used by external tools that use rust-analyzer as a library
+// what otherwise would be `pub(crate)` has to be `pub` here instead.
+pub fn load_workspace_at(
+ root: &Path,
+ cargo_config: &CargoConfig,
+ load_config: &LoadCargoConfig,
+ progress: &dyn Fn(String),
+) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
+ let root = AbsPathBuf::assert(std::env::current_dir()?.join(root));
+ let root = ProjectManifest::discover_single(&root)?;
+ let mut workspace = ProjectWorkspace::load(root, cargo_config, progress)?;
+
+ if load_config.load_out_dirs_from_check {
+ let build_scripts = workspace.run_build_scripts(cargo_config, progress)?;
+ workspace.set_build_scripts(build_scripts)
+ }
+
+ load_workspace(workspace, load_config)
+}
+
+// Note: Since this function is used by external tools that use rust-analyzer as a library
+// what otherwise would be `pub(crate)` has to be `pub` here instead.
+//
+// The reason both, `load_workspace_at` and `load_workspace` are `pub` is that some of
+// these tools need access to `ProjectWorkspace`, too, which `load_workspace_at` hides.
+pub fn load_workspace(
+ ws: ProjectWorkspace,
+ load_config: &LoadCargoConfig,
+) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
+ let (sender, receiver) = unbounded();
+ let mut vfs = vfs::Vfs::default();
+ let mut loader = {
+ let loader =
+ vfs_notify::NotifyHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
+ Box::new(loader)
+ };
+
+ let proc_macro_client = if load_config.with_proc_macro {
+ let path = AbsPathBuf::assert(std::env::current_exe()?);
+ Ok(ProcMacroServer::spawn(path, &["proc-macro"]).unwrap())
+ } else {
+ Err("proc macro server not started".to_owned())
+ };
+
+ let crate_graph = ws.to_crate_graph(
+ &mut |_, path: &AbsPath| {
+ load_proc_macro(proc_macro_client.as_ref().map_err(|e| &**e), path, &[])
+ },
+ &mut |path: &AbsPath| {
+ let contents = loader.load_sync(path);
+ let path = vfs::VfsPath::from(path.to_path_buf());
+ vfs.set_file_contents(path.clone(), contents);
+ vfs.file_id(&path)
+ },
+ );
+
+ let project_folders = ProjectFolders::new(&[ws], &[]);
+ loader.set_config(vfs::loader::Config {
+ load: project_folders.load,
+ watch: vec![],
+ version: 0,
+ });
+
+ tracing::debug!("crate graph: {:?}", crate_graph);
+ let host =
+ load_crate_graph(crate_graph, project_folders.source_root_config, &mut vfs, &receiver);
+
+ if load_config.prefill_caches {
+ host.analysis().parallel_prime_caches(1, |_| {})?;
+ }
+ Ok((host, vfs, proc_macro_client.ok()))
+}
+
+fn load_crate_graph(
+ crate_graph: CrateGraph,
+ source_root_config: SourceRootConfig,
+ vfs: &mut vfs::Vfs,
+ receiver: &Receiver<vfs::loader::Message>,
+) -> AnalysisHost {
+ let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
+ let mut host = AnalysisHost::new(lru_cap);
+ let mut analysis_change = Change::new();
+
+ host.raw_database_mut().set_enable_proc_attr_macros(true);
+
+ // wait until Vfs has loaded all roots
+ for task in receiver {
+ match task {
+ vfs::loader::Message::Progress { n_done, n_total, config_version: _ } => {
+ if n_done == n_total {
+ break;
+ }
+ }
+ vfs::loader::Message::Loaded { files } => {
+ for (path, contents) in files {
+ vfs.set_file_contents(path.into(), contents);
+ }
+ }
+ }
+ }
+ let changes = vfs.take_changes();
+ for file in changes {
+ if file.exists() {
+ let contents = vfs.file_contents(file.file_id).to_vec();
+ if let Ok(text) = String::from_utf8(contents) {
+ analysis_change.change_file(file.file_id, Some(Arc::new(text)))
+ }
+ }
+ }
+ let source_roots = source_root_config.partition(vfs);
+ analysis_change.set_roots(source_roots);
+
+ analysis_change.set_crate_graph(crate_graph);
+
+ host.apply_change(analysis_change);
+ host
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use hir::Crate;
+
+ #[test]
+ fn test_loading_rust_analyzer() {
+ let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: false,
+ with_proc_macro: false,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) =
+ load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap();
+
+ let n_crates = Crate::all(host.raw_database()).len();
+ // RA has quite a few crates, but the exact count doesn't matter
+ assert!(n_crates > 20);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
new file mode 100644
index 000000000..491c55a04
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
@@ -0,0 +1,328 @@
+//! LSIF (language server index format) generator
+
+use std::collections::HashMap;
+use std::env;
+use std::time::Instant;
+
+use ide::{
+ Analysis, FileId, FileRange, MonikerKind, PackageInformation, RootDatabase, StaticIndex,
+ StaticIndexedFile, TokenId, TokenStaticData,
+};
+use ide_db::LineIndexDatabase;
+
+use ide_db::base_db::salsa::{self, ParallelDatabase};
+use lsp_types::{self, lsif};
+use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
+use vfs::{AbsPathBuf, Vfs};
+
+use crate::cli::{
+ flags,
+ load_cargo::{load_workspace, LoadCargoConfig},
+ Result,
+};
+use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
+use crate::to_proto;
+use crate::version::version;
+
+/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
+struct Snap<DB>(DB);
+impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
+ fn clone(&self) -> Snap<salsa::Snapshot<DB>> {
+ Snap(self.0.snapshot())
+ }
+}
+
+struct LsifManager<'a> {
+ count: i32,
+ token_map: HashMap<TokenId, Id>,
+ range_map: HashMap<FileRange, Id>,
+ file_map: HashMap<FileId, Id>,
+ package_map: HashMap<PackageInformation, Id>,
+ analysis: &'a Analysis,
+ db: &'a RootDatabase,
+ vfs: &'a Vfs,
+}
+
+#[derive(Clone, Copy)]
+struct Id(i32);
+
+impl From<Id> for lsp_types::NumberOrString {
+ fn from(Id(x): Id) -> Self {
+ lsp_types::NumberOrString::Number(x)
+ }
+}
+
+impl LsifManager<'_> {
+ fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> {
+ LsifManager {
+ count: 0,
+ token_map: HashMap::default(),
+ range_map: HashMap::default(),
+ file_map: HashMap::default(),
+ package_map: HashMap::default(),
+ analysis,
+ db,
+ vfs,
+ }
+ }
+
+ fn add(&mut self, data: lsif::Element) -> Id {
+ let id = Id(self.count);
+ self.emit(&serde_json::to_string(&lsif::Entry { id: id.into(), data }).unwrap());
+ self.count += 1;
+ id
+ }
+
+ fn add_vertex(&mut self, vertex: lsif::Vertex) -> Id {
+ self.add(lsif::Element::Vertex(vertex))
+ }
+
+ fn add_edge(&mut self, edge: lsif::Edge) -> Id {
+ self.add(lsif::Element::Edge(edge))
+ }
+
+ // FIXME: support file in addition to stdout here
+ fn emit(&self, data: &str) {
+ println!("{}", data);
+ }
+
+ fn get_token_id(&mut self, id: TokenId) -> Id {
+ if let Some(x) = self.token_map.get(&id) {
+ return *x;
+ }
+ let result_set_id = self.add_vertex(lsif::Vertex::ResultSet(lsif::ResultSet { key: None }));
+ self.token_map.insert(id, result_set_id);
+ result_set_id
+ }
+
+ fn get_package_id(&mut self, package_information: PackageInformation) -> Id {
+ if let Some(x) = self.package_map.get(&package_information) {
+ return *x;
+ }
+ let pi = package_information.clone();
+ let result_set_id =
+ self.add_vertex(lsif::Vertex::PackageInformation(lsif::PackageInformation {
+ name: pi.name,
+ manager: "cargo".to_string(),
+ uri: None,
+ content: None,
+ repository: Some(lsif::Repository {
+ url: pi.repo,
+ r#type: "git".to_string(),
+ commit_id: None,
+ }),
+ version: Some(pi.version),
+ }));
+ self.package_map.insert(package_information, result_set_id);
+ result_set_id
+ }
+
+ fn get_range_id(&mut self, id: FileRange) -> Id {
+ if let Some(x) = self.range_map.get(&id) {
+ return *x;
+ }
+ let file_id = id.file_id;
+ let doc_id = self.get_file_id(file_id);
+ let line_index = self.db.line_index(file_id);
+ let line_index = LineIndex {
+ index: line_index,
+ encoding: OffsetEncoding::Utf16,
+ endings: LineEndings::Unix,
+ };
+ let range_id = self.add_vertex(lsif::Vertex::Range {
+ range: to_proto::range(&line_index, id.range),
+ tag: None,
+ });
+ self.add_edge(lsif::Edge::Contains(lsif::EdgeDataMultiIn {
+ in_vs: vec![range_id.into()],
+ out_v: doc_id.into(),
+ }));
+ range_id
+ }
+
+ fn get_file_id(&mut self, id: FileId) -> Id {
+ if let Some(x) = self.file_map.get(&id) {
+ return *x;
+ }
+ let path = self.vfs.file_path(id);
+ let path = path.as_path().unwrap();
+ let doc_id = self.add_vertex(lsif::Vertex::Document(lsif::Document {
+ language_id: "rust".to_string(),
+ uri: lsp_types::Url::from_file_path(path).unwrap(),
+ }));
+ self.file_map.insert(id, doc_id);
+ doc_id
+ }
+
+ fn add_token(&mut self, id: TokenId, token: TokenStaticData) {
+ let result_set_id = self.get_token_id(id);
+ if let Some(hover) = token.hover {
+ let hover_id = self.add_vertex(lsif::Vertex::HoverResult {
+ result: lsp_types::Hover {
+ contents: lsp_types::HoverContents::Markup(to_proto::markup_content(
+ hover.markup,
+ ide::HoverDocFormat::Markdown,
+ )),
+ range: None,
+ },
+ });
+ self.add_edge(lsif::Edge::Hover(lsif::EdgeData {
+ in_v: hover_id.into(),
+ out_v: result_set_id.into(),
+ }));
+ }
+ if let Some(moniker) = token.moniker {
+ let package_id = self.get_package_id(moniker.package_information);
+ let moniker_id = self.add_vertex(lsif::Vertex::Moniker(lsp_types::Moniker {
+ scheme: "rust-analyzer".to_string(),
+ identifier: moniker.identifier.to_string(),
+ unique: lsp_types::UniquenessLevel::Scheme,
+ kind: Some(match moniker.kind {
+ MonikerKind::Import => lsp_types::MonikerKind::Import,
+ MonikerKind::Export => lsp_types::MonikerKind::Export,
+ }),
+ }));
+ self.add_edge(lsif::Edge::PackageInformation(lsif::EdgeData {
+ in_v: package_id.into(),
+ out_v: moniker_id.into(),
+ }));
+ self.add_edge(lsif::Edge::Moniker(lsif::EdgeData {
+ in_v: moniker_id.into(),
+ out_v: result_set_id.into(),
+ }));
+ }
+ if let Some(def) = token.definition {
+ let result_id = self.add_vertex(lsif::Vertex::DefinitionResult);
+ let def_vertex = self.get_range_id(def);
+ self.add_edge(lsif::Edge::Item(lsif::Item {
+ document: (*self.file_map.get(&def.file_id).unwrap()).into(),
+ property: None,
+ edge_data: lsif::EdgeDataMultiIn {
+ in_vs: vec![def_vertex.into()],
+ out_v: result_id.into(),
+ },
+ }));
+ self.add_edge(lsif::Edge::Definition(lsif::EdgeData {
+ in_v: result_id.into(),
+ out_v: result_set_id.into(),
+ }));
+ }
+ if !token.references.is_empty() {
+ let result_id = self.add_vertex(lsif::Vertex::ReferenceResult);
+ self.add_edge(lsif::Edge::References(lsif::EdgeData {
+ in_v: result_id.into(),
+ out_v: result_set_id.into(),
+ }));
+ let mut edges = token.references.iter().fold(
+ HashMap::<_, Vec<lsp_types::NumberOrString>>::new(),
+ |mut edges, x| {
+ let entry =
+ edges.entry((x.range.file_id, x.is_definition)).or_insert_with(Vec::new);
+ entry.push((*self.range_map.get(&x.range).unwrap()).into());
+ edges
+ },
+ );
+ for x in token.references {
+ if let Some(vertices) = edges.remove(&(x.range.file_id, x.is_definition)) {
+ self.add_edge(lsif::Edge::Item(lsif::Item {
+ document: (*self.file_map.get(&x.range.file_id).unwrap()).into(),
+ property: Some(if x.is_definition {
+ lsif::ItemKind::Definitions
+ } else {
+ lsif::ItemKind::References
+ }),
+ edge_data: lsif::EdgeDataMultiIn {
+ in_vs: vertices,
+ out_v: result_id.into(),
+ },
+ }));
+ }
+ }
+ }
+ }
+
+ fn add_file(&mut self, file: StaticIndexedFile) {
+ let StaticIndexedFile { file_id, tokens, folds, .. } = file;
+ let doc_id = self.get_file_id(file_id);
+ let text = self.analysis.file_text(file_id).unwrap();
+ let line_index = self.db.line_index(file_id);
+ let line_index = LineIndex {
+ index: line_index,
+ encoding: OffsetEncoding::Utf16,
+ endings: LineEndings::Unix,
+ };
+ let result = folds
+ .into_iter()
+ .map(|it| to_proto::folding_range(&*text, &line_index, false, it))
+ .collect();
+ let folding_id = self.add_vertex(lsif::Vertex::FoldingRangeResult { result });
+ self.add_edge(lsif::Edge::FoldingRange(lsif::EdgeData {
+ in_v: folding_id.into(),
+ out_v: doc_id.into(),
+ }));
+ let tokens_id = tokens
+ .into_iter()
+ .map(|(range, id)| {
+ let range_id = self.add_vertex(lsif::Vertex::Range {
+ range: to_proto::range(&line_index, range),
+ tag: None,
+ });
+ self.range_map.insert(FileRange { file_id, range }, range_id);
+ let result_set_id = self.get_token_id(id);
+ self.add_edge(lsif::Edge::Next(lsif::EdgeData {
+ in_v: result_set_id.into(),
+ out_v: range_id.into(),
+ }));
+ range_id.into()
+ })
+ .collect();
+ self.add_edge(lsif::Edge::Contains(lsif::EdgeDataMultiIn {
+ in_vs: tokens_id,
+ out_v: doc_id.into(),
+ }));
+ }
+}
+
+impl flags::Lsif {
+ pub fn run(self) -> Result<()> {
+ eprintln!("Generating LSIF started...");
+ let now = Instant::now();
+ let cargo_config = CargoConfig::default();
+ let no_progress = &|_| ();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: true,
+ prefill_caches: false,
+ };
+ let path = AbsPathBuf::assert(env::current_dir()?.join(&self.path));
+ let manifest = ProjectManifest::discover_single(&path)?;
+
+ let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
+
+ let (host, vfs, _proc_macro) = load_workspace(workspace, &load_cargo_config)?;
+ let db = host.raw_database();
+ let analysis = host.analysis();
+
+ let si = StaticIndex::compute(&analysis);
+
+ let mut lsif = LsifManager::new(&analysis, db, &vfs);
+ lsif.add_vertex(lsif::Vertex::MetaData(lsif::MetaData {
+ version: String::from("0.5.0"),
+ project_root: lsp_types::Url::from_file_path(path).unwrap(),
+ position_encoding: lsif::Encoding::Utf16,
+ tool_info: Some(lsp_types::lsif::ToolInfo {
+ name: "rust-analyzer".to_string(),
+ args: vec![],
+ version: Some(version().to_string()),
+ }),
+ }));
+ for file in si.files {
+ lsif.add_file(file);
+ }
+ for (id, token) in si.tokens.iter() {
+ lsif.add_token(id, token);
+ }
+ eprintln!("Generating LSIF finished in {:?}", now.elapsed());
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/parse.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/parse.rs
new file mode 100644
index 000000000..5ef8cdff4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/parse.rs
@@ -0,0 +1,17 @@
+//! Read Rust code on stdin, print syntax tree on stdout.
+use syntax::{AstNode, SourceFile};
+
+use crate::cli::{flags, read_stdin};
+
+impl flags::Parse {
+ pub fn run(self) -> anyhow::Result<()> {
+ let _p = profile::span("parsing");
+ let text = read_stdin()?;
+ let file = SourceFile::parse(&text).tree();
+ if !self.no_dump {
+ println!("{:#?}", file.syntax());
+ }
+ std::mem::forget(file);
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs
new file mode 100644
index 000000000..5a2dc39d5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs
@@ -0,0 +1,122 @@
+//! A simple progress bar
+//!
+//! A single thread non-optimized progress bar
+use std::io::{self, Write};
+
+/// A Simple ASCII Progress Bar
+pub(crate) struct ProgressReport {
+ curr: f32,
+ text: String,
+ hidden: bool,
+
+ len: u64,
+ pos: u64,
+ msg: String,
+}
+
+impl ProgressReport {
+ pub(crate) fn new(len: u64) -> ProgressReport {
+ ProgressReport {
+ curr: 0.0,
+ text: String::new(),
+ hidden: false,
+ len,
+ pos: 0,
+ msg: String::new(),
+ }
+ }
+
+ pub(crate) fn hidden() -> ProgressReport {
+ ProgressReport {
+ curr: 0.0,
+ text: String::new(),
+ hidden: true,
+ len: 0,
+ pos: 0,
+ msg: String::new(),
+ }
+ }
+
+ pub(crate) fn set_message(&mut self, msg: &str) {
+ self.msg = msg.to_string();
+ self.tick();
+ }
+
+ pub(crate) fn println<I: Into<String>>(&mut self, msg: I) {
+ self.clear();
+ println!("{}", msg.into());
+ self.tick();
+ }
+
+ pub(crate) fn inc(&mut self, delta: u64) {
+ self.pos += delta;
+ if self.len == 0 {
+ self.set_value(0.0)
+ } else {
+ self.set_value((self.pos as f32) / (self.len as f32))
+ }
+ self.tick();
+ }
+
+ pub(crate) fn finish_and_clear(&mut self) {
+ self.clear();
+ }
+
+ pub(crate) fn tick(&mut self) {
+ if self.hidden {
+ return;
+ }
+ let percent = (self.curr * 100.0) as u32;
+ let text = format!("{}/{} {:3>}% {}", self.pos, self.len, percent, self.msg);
+ self.update_text(&text);
+ }
+
+ fn update_text(&mut self, text: &str) {
+ // Get length of common portion
+ let mut common_prefix_length = 0;
+ let common_length = usize::min(self.text.len(), text.len());
+
+ while common_prefix_length < common_length
+ && text.chars().nth(common_prefix_length).unwrap()
+ == self.text.chars().nth(common_prefix_length).unwrap()
+ {
+ common_prefix_length += 1;
+ }
+
+ // Backtrack to the first differing character
+ let mut output = String::new();
+ output += &'\x08'.to_string().repeat(self.text.len() - common_prefix_length);
+ // Output new suffix
+ output += &text[common_prefix_length..text.len()];
+
+ // If the new text is shorter than the old one: delete overlapping characters
+ if let Some(overlap_count) = self.text.len().checked_sub(text.len()) {
+ if overlap_count > 0 {
+ output += &" ".repeat(overlap_count);
+ output += &"\x08".repeat(overlap_count);
+ }
+ }
+
+ let _ = io::stdout().write(output.as_bytes());
+ let _ = io::stdout().flush();
+ self.text = text.to_string();
+ }
+
+ fn set_value(&mut self, value: f32) {
+ self.curr = f32::max(0.0, f32::min(1.0, value));
+ }
+
+ fn clear(&mut self) {
+ if self.hidden {
+ return;
+ }
+
+ // Fill all last text to space and return the cursor
+ let spaces = " ".repeat(self.text.len());
+ let backspaces = "\x08".repeat(self.text.len());
+ print!("{}{}{}", backspaces, spaces, backspaces);
+ let _ = io::stdout().flush();
+
+ self.text = String::new();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
new file mode 100644
index 000000000..e8291782b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -0,0 +1,86 @@
+//! Applies structured search replace rules from the command line.
+
+use ide_ssr::MatchFinder;
+use project_model::CargoConfig;
+
+use crate::cli::{
+ flags,
+ load_cargo::{load_workspace_at, LoadCargoConfig},
+ Result,
+};
+
+impl flags::Ssr {
+ pub fn run(self) -> Result<()> {
+ use ide_db::base_db::SourceDatabaseExt;
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: true,
+ prefill_caches: false,
+ };
+ let (host, vfs, _proc_macro) = load_workspace_at(
+ &std::env::current_dir()?,
+ &cargo_config,
+ &load_cargo_config,
+ &|_| {},
+ )?;
+ let db = host.raw_database();
+ let mut match_finder = MatchFinder::at_first_file(db)?;
+ for rule in self.rule {
+ match_finder.add_rule(rule)?;
+ }
+ let edits = match_finder.edits();
+ for (file_id, edit) in edits {
+ if let Some(path) = vfs.file_path(file_id).as_path() {
+ let mut contents = db.file_text(file_id).to_string();
+ edit.apply(&mut contents);
+ std::fs::write(path, contents)?;
+ }
+ }
+ Ok(())
+ }
+}
+
+impl flags::Search {
+ /// Searches for `patterns`, printing debug information for any nodes whose text exactly matches
+ /// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful
+ /// for much else.
+ pub fn run(self) -> Result<()> {
+ use ide_db::base_db::SourceDatabaseExt;
+ use ide_db::symbol_index::SymbolsDatabase;
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: true,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) = load_workspace_at(
+ &std::env::current_dir()?,
+ &cargo_config,
+ &load_cargo_config,
+ &|_| {},
+ )?;
+ let db = host.raw_database();
+ let mut match_finder = MatchFinder::at_first_file(db)?;
+ for pattern in self.pattern {
+ match_finder.add_search_pattern(pattern)?;
+ }
+ if let Some(debug_snippet) = &self.debug {
+ for &root in db.local_roots().iter() {
+ let sr = db.source_root(root);
+ for file_id in sr.iter() {
+ for debug_info in match_finder.debug_where_text_equal(file_id, debug_snippet) {
+ println!("{:#?}", debug_info);
+ }
+ }
+ }
+ } else {
+ for m in match_finder.matches().flattened().matches {
+ // We could possibly at some point do something more useful than just printing
+ // the matched text. For now though, that's the easiest thing to do.
+ println!("{}", m.matched_text());
+ }
+ }
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/symbols.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/symbols.rs
new file mode 100644
index 000000000..84659b5ea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/symbols.rs
@@ -0,0 +1,16 @@
+//! Read Rust code on stdin, print syntax tree on stdout.
+use ide::Analysis;
+
+use crate::cli::{flags, read_stdin};
+
+impl flags::Symbols {
+ pub fn run(self) -> anyhow::Result<()> {
+ let text = read_stdin()?;
+ let (analysis, file_id) = Analysis::from_single_file(text);
+ let structure = analysis.file_structure(file_id).unwrap();
+ for s in structure {
+ println!("{:?}", s);
+ }
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
new file mode 100644
index 000000000..ac0fdf85a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -0,0 +1,1985 @@
+//! Config used by the language server.
+//!
+//! We currently get this config from `initialize` LSP request, which is not the
+//! best way to do it, but was the simplest thing we could implement.
+//!
+//! Of particular interest is the `feature_flags` hash map: while other fields
+//! configure the server itself, feature flags are passed into analysis, and
+//! tweak things like automatic insertion of `()` in completions.
+
+use std::{ffi::OsString, fmt, iter, path::PathBuf};
+
+use flycheck::FlycheckConfig;
+use ide::{
+ AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
+ HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayHintsConfig, JoinLinesConfig,
+ Snippet, SnippetScope,
+};
+use ide_db::{
+ imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
+ SnippetCap,
+};
+use itertools::Itertools;
+use lsp_types::{ClientCapabilities, MarkupKind};
+use project_model::{
+ CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource, UnsetTestCrates,
+};
+use rustc_hash::{FxHashMap, FxHashSet};
+use serde::{de::DeserializeOwned, Deserialize};
+use vfs::AbsPathBuf;
+
+use crate::{
+ caps::completion_item_edit_resolve,
+ diagnostics::DiagnosticsMapConfig,
+ line_index::OffsetEncoding,
+ lsp_ext::{self, supports_utf8, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
+};
+
+mod patch_old_style;
+
+// Conventions for configuration keys to preserve maximal extendability without breakage:
+// - Toggles (be it binary true/false or with more options in-between) should almost always suffix as `_enable`
+// This has the benefit of namespaces being extensible, and if the suffix doesn't fit later it can be changed without breakage.
+// - In general be wary of using the namespace of something verbatim, it prevents us from adding subkeys in the future
+// - Don't use abbreviations unless really necessary
+// - foo_command = overrides the subcommand, foo_overrideCommand allows full overwriting, extra args only applies for foo_command
+
+// Defines the server-side configuration of the rust-analyzer. We generate
+// *parts* of VS Code's `package.json` config from this.
+//
+// However, editor specific config, which the server doesn't know about, should
+// be specified directly in `package.json`.
+//
+// To deprecate an option by replacing it with another name use `new_name | `old_name` so that we keep
+// parsing the old name.
+config_data! {
+ struct ConfigData {
+ /// Placeholder expression to use for missing expressions in assists.
+ assist_expressionFillDefault: ExprFillDefaultDef = "\"todo\"",
+
+ /// Warm up caches on project load.
+ cachePriming_enable: bool = "true",
+ /// How many worker threads to handle priming caches. The default `0` means to pick automatically.
+ cachePriming_numThreads: ParallelCachePrimingNumThreads = "0",
+
+ /// Automatically refresh project info via `cargo metadata` on
+ /// `Cargo.toml` or `.cargo/config.toml` changes.
+ cargo_autoreload: bool = "true",
+ /// Run build scripts (`build.rs`) for more precise code analysis.
+ cargo_buildScripts_enable: bool = "true",
+ /// Override the command rust-analyzer uses to run build scripts and
+ /// build procedural macros. The command is required to output json
+ /// and should therefore include `--message-format=json` or a similar
+ /// option.
+ ///
+ /// By default, a cargo invocation will be constructed for the configured
+ /// targets and features, with the following base command line:
+ ///
+ /// ```bash
+ /// cargo check --quiet --workspace --message-format=json --all-targets
+ /// ```
+ /// .
+ cargo_buildScripts_overrideCommand: Option<Vec<String>> = "null",
+ /// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
+ /// avoid checking unnecessary things.
+ cargo_buildScripts_useRustcWrapper: bool = "true",
+ /// List of features to activate.
+ ///
+ /// Set this to `"all"` to pass `--all-features` to cargo.
+ cargo_features: CargoFeatures = "[]",
+ /// Whether to pass `--no-default-features` to cargo.
+ cargo_noDefaultFeatures: bool = "false",
+ /// Internal config for debugging, disables loading of sysroot crates.
+ cargo_noSysroot: bool = "false",
+ /// Compilation target override (target triple).
+ cargo_target: Option<String> = "null",
+ /// Unsets `#[cfg(test)]` for the specified crates.
+ cargo_unsetTest: Vec<String> = "[\"core\"]",
+
+ /// Check all targets and tests (`--all-targets`).
+ checkOnSave_allTargets: bool = "true",
+ /// Cargo command to use for `cargo check`.
+ checkOnSave_command: String = "\"check\"",
+ /// Run specified `cargo check` command for diagnostics on save.
+ checkOnSave_enable: bool = "true",
+ /// Extra arguments for `cargo check`.
+ checkOnSave_extraArgs: Vec<String> = "[]",
+ /// List of features to activate. Defaults to
+ /// `#rust-analyzer.cargo.features#`.
+ ///
+ /// Set to `"all"` to pass `--all-features` to Cargo.
+ checkOnSave_features: Option<CargoFeatures> = "null",
+ /// Whether to pass `--no-default-features` to Cargo. Defaults to
+ /// `#rust-analyzer.cargo.noDefaultFeatures#`.
+ checkOnSave_noDefaultFeatures: Option<bool> = "null",
+ /// Override the command rust-analyzer uses instead of `cargo check` for
+ /// diagnostics on save. The command is required to output json and
+ /// should therefor include `--message-format=json` or a similar option.
+ ///
+ /// If you're changing this because you're using some tool wrapping
+ /// Cargo, you might also want to change
+ /// `#rust-analyzer.cargo.buildScripts.overrideCommand#`.
+ ///
+ /// An example command would be:
+ ///
+ /// ```bash
+ /// cargo check --workspace --message-format=json --all-targets
+ /// ```
+ /// .
+ checkOnSave_overrideCommand: Option<Vec<String>> = "null",
+ /// Check for a specific target. Defaults to
+ /// `#rust-analyzer.cargo.target#`.
+ checkOnSave_target: Option<String> = "null",
+
+ /// Toggles the additional completions that automatically add imports when completed.
+ /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
+ completion_autoimport_enable: bool = "true",
+ /// Toggles the additional completions that automatically show method calls and field accesses
+ /// with `self` prefixed to them when inside a method.
+ completion_autoself_enable: bool = "true",
+ /// Whether to add parenthesis and argument snippets when completing function.
+ completion_callable_snippets: CallableCompletionDef = "\"fill_arguments\"",
+ /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
+ completion_postfix_enable: bool = "true",
+ /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
+ completion_privateEditable_enable: bool = "false",
+ /// Custom completion snippets.
+ // NOTE: Keep this list in sync with the feature docs of user snippets.
+ completion_snippets_custom: FxHashMap<String, SnippetDef> = r#"{
+ "Arc::new": {
+ "postfix": "arc",
+ "body": "Arc::new(${receiver})",
+ "requires": "std::sync::Arc",
+ "description": "Put the expression into an `Arc`",
+ "scope": "expr"
+ },
+ "Rc::new": {
+ "postfix": "rc",
+ "body": "Rc::new(${receiver})",
+ "requires": "std::rc::Rc",
+ "description": "Put the expression into an `Rc`",
+ "scope": "expr"
+ },
+ "Box::pin": {
+ "postfix": "pinbox",
+ "body": "Box::pin(${receiver})",
+ "requires": "std::boxed::Box",
+ "description": "Put the expression into a pinned `Box`",
+ "scope": "expr"
+ },
+ "Ok": {
+ "postfix": "ok",
+ "body": "Ok(${receiver})",
+ "description": "Wrap the expression in a `Result::Ok`",
+ "scope": "expr"
+ },
+ "Err": {
+ "postfix": "err",
+ "body": "Err(${receiver})",
+ "description": "Wrap the expression in a `Result::Err`",
+ "scope": "expr"
+ },
+ "Some": {
+ "postfix": "some",
+ "body": "Some(${receiver})",
+ "description": "Wrap the expression in an `Option::Some`",
+ "scope": "expr"
+ }
+ }"#,
+
+ /// List of rust-analyzer diagnostics to disable.
+ diagnostics_disabled: FxHashSet<String> = "[]",
+ /// Whether to show native rust-analyzer diagnostics.
+ diagnostics_enable: bool = "true",
+ /// Whether to show experimental rust-analyzer diagnostics that might
+ /// have more false positives than usual.
+ diagnostics_experimental_enable: bool = "false",
+ /// Map of prefixes to be substituted when parsing diagnostic file paths.
+ /// This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.
+ diagnostics_remapPrefix: FxHashMap<String, String> = "{}",
+ /// List of warnings that should be displayed with hint severity.
+ ///
+ /// The warnings will be indicated by faded text or three dots in code
+ /// and will not show up in the `Problems Panel`.
+ diagnostics_warningsAsHint: Vec<String> = "[]",
+ /// List of warnings that should be displayed with info severity.
+ ///
+ /// The warnings will be indicated by a blue squiggly underline in code
+ /// and a blue icon in the `Problems Panel`.
+ diagnostics_warningsAsInfo: Vec<String> = "[]",
+
+ /// These directories will be ignored by rust-analyzer. They are
+ /// relative to the workspace root, and globs are not supported. You may
+ /// also need to add the folders to Code's `files.watcherExclude`.
+ files_excludeDirs: Vec<PathBuf> = "[]",
+ /// Controls file watching implementation.
+ files_watcher: FilesWatcherDef = "\"client\"",
+
+ /// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
+ highlightRelated_breakPoints_enable: bool = "true",
+ /// Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
+ highlightRelated_exitPoints_enable: bool = "true",
+ /// Enables highlighting of related references while the cursor is on any identifier.
+ highlightRelated_references_enable: bool = "true",
+ /// Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
+ highlightRelated_yieldPoints_enable: bool = "true",
+
+ /// Whether to show `Debug` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_debug_enable: bool = "true",
+ /// Whether to show HoverActions in Rust files.
+ hover_actions_enable: bool = "true",
+ /// Whether to show `Go to Type Definition` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_gotoTypeDef_enable: bool = "true",
+ /// Whether to show `Implementations` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_implementations_enable: bool = "true",
+ /// Whether to show `References` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_references_enable: bool = "false",
+ /// Whether to show `Run` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_run_enable: bool = "true",
+
+ /// Whether to show documentation on hover.
+ hover_documentation_enable: bool = "true",
+ /// Use markdown syntax for links in hover.
+ hover_links_enable: bool = "true",
+
+ /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
+ imports_granularity_enforce: bool = "false",
+ /// How imports should be grouped into use statements.
+ imports_granularity_group: ImportGranularityDef = "\"crate\"",
+ /// Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
+ imports_group_enable: bool = "true",
+ /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
+ imports_merge_glob: bool = "true",
+ /// The path structure for newly inserted paths to use.
+ imports_prefix: ImportPrefixDef = "\"plain\"",
+
+ /// Whether to show inlay type hints for binding modes.
+ inlayHints_bindingModeHints_enable: bool = "false",
+ /// Whether to show inlay type hints for method chains.
+ inlayHints_chainingHints_enable: bool = "true",
+ /// Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
+ inlayHints_closingBraceHints_enable: bool = "true",
+ /// Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
+ /// to always show them).
+ inlayHints_closingBraceHints_minLines: usize = "25",
+ /// Whether to show inlay type hints for return types of closures.
+ inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = "\"never\"",
+ /// Whether to show inlay type hints for elided lifetimes in function signatures.
+ inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"",
+ /// Whether to prefer using parameter names as the name for elided lifetime hints if possible.
+ inlayHints_lifetimeElisionHints_useParameterNames: bool = "false",
+ /// Maximum length for inlay hints. Set to null to have an unlimited length.
+ inlayHints_maxLength: Option<usize> = "25",
+ /// Whether to show function parameter name inlay hints at the call
+ /// site.
+ inlayHints_parameterHints_enable: bool = "true",
+ /// Whether to show inlay type hints for compiler inserted reborrows.
+ inlayHints_reborrowHints_enable: ReborrowHintsDef = "\"never\"",
+ /// Whether to render leading colons for type hints, and trailing colons for parameter hints.
+ inlayHints_renderColons: bool = "true",
+ /// Whether to show inlay type hints for variables.
+ inlayHints_typeHints_enable: bool = "true",
+ /// Whether to hide inlay type hints for `let` statements that initialize to a closure.
+ /// Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
+ inlayHints_typeHints_hideClosureInitialization: bool = "false",
+ /// Whether to hide inlay type hints for constructors.
+ inlayHints_typeHints_hideNamedConstructor: bool = "false",
+
+ /// Join lines merges consecutive declaration and initialization of an assignment.
+ joinLines_joinAssignments: bool = "true",
+ /// Join lines inserts else between consecutive ifs.
+ joinLines_joinElseIf: bool = "true",
+ /// Join lines removes trailing commas.
+ joinLines_removeTrailingComma: bool = "true",
+ /// Join lines unwraps trivial blocks.
+ joinLines_unwrapTrivialBlock: bool = "true",
+
+ /// Whether to show `Debug` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_debug_enable: bool = "true",
+ /// Whether to show CodeLens in Rust files.
+ lens_enable: bool = "true",
+ /// Internal config: use custom client-side commands even when the
+ /// client doesn't set the corresponding capability.
+ lens_forceCustomCommands: bool = "true",
+ /// Whether to show `Implementations` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_implementations_enable: bool = "true",
+ /// Whether to show `References` lens for Struct, Enum, and Union.
+ /// Only applies when `#rust-analyzer.lens.enable#` is set.
+ lens_references_adt_enable: bool = "false",
+ /// Whether to show `References` lens for Enum Variants.
+ /// Only applies when `#rust-analyzer.lens.enable#` is set.
+ lens_references_enumVariant_enable: bool = "false",
+ /// Whether to show `Method References` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_references_method_enable: bool = "false",
+ /// Whether to show `References` lens for Trait.
+ /// Only applies when `#rust-analyzer.lens.enable#` is set.
+ lens_references_trait_enable: bool = "false",
+ /// Whether to show `Run` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_run_enable: bool = "true",
+
+ /// Disable project auto-discovery in favor of explicitly specified set
+ /// of projects.
+ ///
+ /// Elements must be paths pointing to `Cargo.toml`,
+ /// `rust-project.json`, or JSON objects in `rust-project.json` format.
+ linkedProjects: Vec<ManifestOrProjectJson> = "[]",
+
+ /// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
+ lru_capacity: Option<usize> = "null",
+
+ /// Whether to show `can't find Cargo.toml` error message.
+ notifications_cargoTomlNotFound: bool = "true",
+
+ /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
+ procMacro_attributes_enable: bool = "true",
+ /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
+ procMacro_enable: bool = "true",
+ /// These proc-macros will be ignored when trying to expand them.
+ ///
+ /// This config takes a map of crate names with the exported proc-macro names to ignore as values.
+ procMacro_ignored: FxHashMap<Box<str>, Box<[Box<str>]>> = "{}",
+ /// Internal config, path to proc-macro server executable (typically,
+ /// this is rust-analyzer itself, but we override this in tests).
+ procMacro_server: Option<PathBuf> = "null",
+
+ /// Command to be executed instead of 'cargo' for runnables.
+ runnables_command: Option<String> = "null",
+ /// Additional arguments to be passed to cargo for runnables such as
+ /// tests or binaries. For example, it may be `--release`.
+ runnables_extraArgs: Vec<String> = "[]",
+
+ /// Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
+ /// projects, or "discover" to try to automatically find it if the `rustc-dev` component
+ /// is installed.
+ ///
+ /// Any project which uses rust-analyzer with the rustcPrivate
+ /// crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.
+ ///
+ /// This option does not take effect until rust-analyzer is restarted.
+ rustc_source: Option<String> = "null",
+
+ /// Additional arguments to `rustfmt`.
+ rustfmt_extraArgs: Vec<String> = "[]",
+ /// Advanced option, fully override the command rust-analyzer uses for
+ /// formatting.
+ rustfmt_overrideCommand: Option<Vec<String>> = "null",
+ /// Enables the use of rustfmt's unstable range formatting command for the
+ /// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only
+ /// available on a nightly build.
+ rustfmt_rangeFormatting_enable: bool = "false",
+
+ /// Use semantic tokens for strings.
+ ///
+ /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
+ /// By disabling semantic tokens for strings, other grammars can be used to highlight
+ /// their contents.
+ semanticHighlighting_strings_enable: bool = "true",
+
+ /// Show full signature of the callable. Only shows parameters if disabled.
+ signatureInfo_detail: SignatureDetail = "\"full\"",
+ /// Show documentation.
+ signatureInfo_documentation_enable: bool = "true",
+
+ /// Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.
+ typing_autoClosingAngleBrackets_enable: bool = "false",
+
+ /// Workspace symbol search kind.
+ workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = "\"only_types\"",
+ /// Limits the number of items returned from a workspace symbol search (Defaults to 128).
+ /// Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
+ /// Other clients requires all results upfront and might require a higher limit.
+ workspace_symbol_search_limit: usize = "128",
+ /// Workspace symbol search scope.
+ workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = "\"workspace\"",
+ }
+}
+
+impl Default for ConfigData {
+ fn default() -> Self {
+ ConfigData::from_json(serde_json::Value::Null, &mut Vec::new())
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct Config {
+ pub discovered_projects: Option<Vec<ProjectManifest>>,
+ caps: lsp_types::ClientCapabilities,
+ root_path: AbsPathBuf,
+ data: ConfigData,
+ detached_files: Vec<AbsPathBuf>,
+ snippets: Vec<Snippet>,
+}
+
+type ParallelCachePrimingNumThreads = u8;
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum LinkedProject {
+ ProjectManifest(ProjectManifest),
+ InlineJsonProject(ProjectJson),
+}
+
+impl From<ProjectManifest> for LinkedProject {
+ fn from(v: ProjectManifest) -> Self {
+ LinkedProject::ProjectManifest(v)
+ }
+}
+
+impl From<ProjectJson> for LinkedProject {
+ fn from(v: ProjectJson) -> Self {
+ LinkedProject::InlineJsonProject(v)
+ }
+}
+
+pub struct CallInfoConfig {
+ pub params_only: bool,
+ pub docs: bool,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct LensConfig {
+ // runnables
+ pub run: bool,
+ pub debug: bool,
+
+ // implementations
+ pub implementations: bool,
+
+ // references
+ pub method_refs: bool,
+ pub refs_adt: bool, // for Struct, Enum, Union and Trait
+ pub refs_trait: bool, // for Struct, Enum, Union and Trait
+ pub enum_variant_refs: bool,
+}
+
+impl LensConfig {
+ pub fn any(&self) -> bool {
+ self.run
+ || self.debug
+ || self.implementations
+ || self.method_refs
+ || self.refs_adt
+ || self.refs_trait
+ || self.enum_variant_refs
+ }
+
+ pub fn none(&self) -> bool {
+ !self.any()
+ }
+
+ pub fn runnable(&self) -> bool {
+ self.run || self.debug
+ }
+
+ pub fn references(&self) -> bool {
+ self.method_refs || self.refs_adt || self.refs_trait || self.enum_variant_refs
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct HoverActionsConfig {
+ pub implementations: bool,
+ pub references: bool,
+ pub run: bool,
+ pub debug: bool,
+ pub goto_type_def: bool,
+}
+
+impl HoverActionsConfig {
+ pub const NO_ACTIONS: Self = Self {
+ implementations: false,
+ references: false,
+ run: false,
+ debug: false,
+ goto_type_def: false,
+ };
+
+ pub fn any(&self) -> bool {
+ self.implementations || self.references || self.runnable() || self.goto_type_def
+ }
+
+ pub fn none(&self) -> bool {
+ !self.any()
+ }
+
+ pub fn runnable(&self) -> bool {
+ self.run || self.debug
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct FilesConfig {
+ pub watcher: FilesWatcher,
+ pub exclude: Vec<AbsPathBuf>,
+}
+
+#[derive(Debug, Clone)]
+pub enum FilesWatcher {
+ Client,
+ Server,
+}
+
+#[derive(Debug, Clone)]
+pub struct NotificationsConfig {
+ pub cargo_toml_not_found: bool,
+}
+
+#[derive(Debug, Clone)]
+pub enum RustfmtConfig {
+ Rustfmt { extra_args: Vec<String>, enable_range_formatting: bool },
+ CustomCommand { command: String, args: Vec<String> },
+}
+
+/// Configuration for runnable items, such as `main` function or tests.
+#[derive(Debug, Clone)]
+pub struct RunnablesConfig {
+ /// Custom command to be executed instead of `cargo` for runnables.
+ pub override_cargo: Option<String>,
+ /// Additional arguments for the `cargo`, e.g. `--release`.
+ pub cargo_extra_args: Vec<String>,
+}
+
+/// Configuration for workspace symbol search requests.
+#[derive(Debug, Clone)]
+pub struct WorkspaceSymbolConfig {
+ /// In what scope should the symbol be searched in.
+ pub search_scope: WorkspaceSymbolSearchScope,
+ /// What kind of symbol is being searched for.
+ pub search_kind: WorkspaceSymbolSearchKind,
+ /// How many items are returned at most.
+ pub search_limit: usize,
+}
+
+pub struct ClientCommandsConfig {
+ pub run_single: bool,
+ pub debug_single: bool,
+ pub show_reference: bool,
+ pub goto_location: bool,
+ pub trigger_parameter_hints: bool,
+}
+
+#[derive(Debug)]
+pub struct ConfigUpdateError {
+ errors: Vec<(String, serde_json::Error)>,
+}
+
+impl fmt::Display for ConfigUpdateError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let errors = self.errors.iter().format_with("\n", |(key, e), f| {
+ f(key)?;
+ f(&": ")?;
+ f(e)
+ });
+ write!(
+ f,
+ "rust-analyzer found {} invalid config value{}:\n{}",
+ self.errors.len(),
+ if self.errors.len() == 1 { "" } else { "s" },
+ errors
+ )
+ }
+}
+
+impl Config {
+ pub fn new(root_path: AbsPathBuf, caps: ClientCapabilities) -> Self {
+ Config {
+ caps,
+ data: ConfigData::default(),
+ detached_files: Vec::new(),
+ discovered_projects: None,
+ root_path,
+ snippets: Default::default(),
+ }
+ }
+
+ pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigUpdateError> {
+ tracing::info!("updating config from JSON: {:#}", json);
+ if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) {
+ return Ok(());
+ }
+ let mut errors = Vec::new();
+ self.detached_files =
+ get_field::<Vec<PathBuf>>(&mut json, &mut errors, "detachedFiles", None, "[]")
+ .into_iter()
+ .map(AbsPathBuf::assert)
+ .collect();
+ patch_old_style::patch_json_for_outdated_configs(&mut json);
+ self.data = ConfigData::from_json(json, &mut errors);
+ tracing::debug!("deserialized config data: {:#?}", self.data);
+ self.snippets.clear();
+ for (name, def) in self.data.completion_snippets_custom.iter() {
+ if def.prefix.is_empty() && def.postfix.is_empty() {
+ continue;
+ }
+ let scope = match def.scope {
+ SnippetScopeDef::Expr => SnippetScope::Expr,
+ SnippetScopeDef::Type => SnippetScope::Type,
+ SnippetScopeDef::Item => SnippetScope::Item,
+ };
+ match Snippet::new(
+ &def.prefix,
+ &def.postfix,
+ &def.body,
+ def.description.as_ref().unwrap_or(name),
+ &def.requires,
+ scope,
+ ) {
+ Some(snippet) => self.snippets.push(snippet),
+ None => errors.push((
+ format!("snippet {name} is invalid"),
+ <serde_json::Error as serde::de::Error>::custom(
+ "snippet path is invalid or triggers are missing",
+ ),
+ )),
+ }
+ }
+
+ self.validate(&mut errors);
+
+ if errors.is_empty() {
+ Ok(())
+ } else {
+ Err(ConfigUpdateError { errors })
+ }
+ }
+
+ fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) {
+ use serde::de::Error;
+ if self.data.checkOnSave_command.is_empty() {
+ error_sink.push((
+ "/checkOnSave/command".to_string(),
+ serde_json::Error::custom("expected a non-empty string"),
+ ));
+ }
+ }
+
+ pub fn json_schema() -> serde_json::Value {
+ ConfigData::json_schema()
+ }
+
+ pub fn root_path(&self) -> &AbsPathBuf {
+ &self.root_path
+ }
+
+ pub fn caps(&self) -> &lsp_types::ClientCapabilities {
+ &self.caps
+ }
+
+ pub fn detached_files(&self) -> &[AbsPathBuf] {
+ &self.detached_files
+ }
+}
+
+macro_rules! try_ {
+ ($expr:expr) => {
+ || -> _ { Some($expr) }()
+ };
+}
+macro_rules! try_or {
+ ($expr:expr, $or:expr) => {
+ try_!($expr).unwrap_or($or)
+ };
+}
+
+macro_rules! try_or_def {
+ ($expr:expr) => {
+ try_!($expr).unwrap_or_default()
+ };
+}
+
+impl Config {
+ pub fn linked_projects(&self) -> Vec<LinkedProject> {
+ match self.data.linkedProjects.as_slice() {
+ [] => match self.discovered_projects.as_ref() {
+ Some(discovered_projects) => {
+ let exclude_dirs: Vec<_> = self
+ .data
+ .files_excludeDirs
+ .iter()
+ .map(|p| self.root_path.join(p))
+ .collect();
+ discovered_projects
+ .iter()
+ .filter(|p| {
+ let (ProjectManifest::ProjectJson(path)
+ | ProjectManifest::CargoToml(path)) = p;
+ !exclude_dirs.iter().any(|p| path.starts_with(p))
+ })
+ .cloned()
+ .map(LinkedProject::from)
+ .collect()
+ }
+ None => Vec::new(),
+ },
+ linked_projects => linked_projects
+ .iter()
+ .filter_map(|linked_project| match linked_project {
+ ManifestOrProjectJson::Manifest(it) => {
+ let path = self.root_path.join(it);
+ ProjectManifest::from_manifest_file(path)
+ .map_err(|e| tracing::error!("failed to load linked project: {}", e))
+ .ok()
+ .map(Into::into)
+ }
+ ManifestOrProjectJson::ProjectJson(it) => {
+ Some(ProjectJson::new(&self.root_path, it.clone()).into())
+ }
+ })
+ .collect(),
+ }
+ }
+
+ pub fn did_save_text_document_dynamic_registration(&self) -> bool {
+ let caps = try_or_def!(self.caps.text_document.as_ref()?.synchronization.clone()?);
+ caps.did_save == Some(true) && caps.dynamic_registration == Some(true)
+ }
+
+ pub fn did_change_watched_files_dynamic_registration(&self) -> bool {
+ try_or_def!(
+ self.caps.workspace.as_ref()?.did_change_watched_files.as_ref()?.dynamic_registration?
+ )
+ }
+
+ pub fn prefill_caches(&self) -> bool {
+ self.data.cachePriming_enable
+ }
+
+ pub fn location_link(&self) -> bool {
+ try_or_def!(self.caps.text_document.as_ref()?.definition?.link_support?)
+ }
+
+ pub fn line_folding_only(&self) -> bool {
+ try_or_def!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?)
+ }
+
+ pub fn hierarchical_symbols(&self) -> bool {
+ try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .document_symbol
+ .as_ref()?
+ .hierarchical_document_symbol_support?
+ )
+ }
+
+ pub fn code_action_literals(&self) -> bool {
+ try_!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .code_action
+ .as_ref()?
+ .code_action_literal_support
+ .as_ref()?)
+ .is_some()
+ }
+
+ pub fn work_done_progress(&self) -> bool {
+ try_or_def!(self.caps.window.as_ref()?.work_done_progress?)
+ }
+
+ pub fn will_rename(&self) -> bool {
+ try_or_def!(self.caps.workspace.as_ref()?.file_operations.as_ref()?.will_rename?)
+ }
+
+ pub fn change_annotation_support(&self) -> bool {
+ try_!(self
+ .caps
+ .workspace
+ .as_ref()?
+ .workspace_edit
+ .as_ref()?
+ .change_annotation_support
+ .as_ref()?)
+ .is_some()
+ }
+
+ pub fn code_action_resolve(&self) -> bool {
+ try_or_def!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .code_action
+ .as_ref()?
+ .resolve_support
+ .as_ref()?
+ .properties
+ .as_slice())
+ .iter()
+ .any(|it| it == "edit")
+ }
+
+ pub fn signature_help_label_offsets(&self) -> bool {
+ try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .signature_help
+ .as_ref()?
+ .signature_information
+ .as_ref()?
+ .parameter_information
+ .as_ref()?
+ .label_offset_support?
+ )
+ }
+
+ pub fn completion_label_details_support(&self) -> bool {
+ try_!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .completion
+ .as_ref()?
+ .completion_item
+ .as_ref()?
+ .label_details_support
+ .as_ref()?)
+ .is_some()
+ }
+
+ pub fn offset_encoding(&self) -> OffsetEncoding {
+ if supports_utf8(&self.caps) {
+ OffsetEncoding::Utf8
+ } else {
+ OffsetEncoding::Utf16
+ }
+ }
+
+ fn experimental(&self, index: &'static str) -> bool {
+ try_or_def!(self.caps.experimental.as_ref()?.get(index)?.as_bool()?)
+ }
+
+ pub fn code_action_group(&self) -> bool {
+ self.experimental("codeActionGroup")
+ }
+
+ pub fn server_status_notification(&self) -> bool {
+ self.experimental("serverStatusNotification")
+ }
+
+ pub fn publish_diagnostics(&self) -> bool {
+ self.data.diagnostics_enable
+ }
+
+ pub fn diagnostics(&self) -> DiagnosticsConfig {
+ DiagnosticsConfig {
+ proc_attr_macros_enabled: self.expand_proc_attr_macros(),
+ proc_macros_enabled: self.data.procMacro_enable,
+ disable_experimental: !self.data.diagnostics_experimental_enable,
+ disabled: self.data.diagnostics_disabled.clone(),
+ expr_fill_default: match self.data.assist_expressionFillDefault {
+ ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo,
+ ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
+ },
+ }
+ }
+
+ pub fn diagnostics_map(&self) -> DiagnosticsMapConfig {
+ DiagnosticsMapConfig {
+ remap_prefix: self.data.diagnostics_remapPrefix.clone(),
+ warnings_as_info: self.data.diagnostics_warningsAsInfo.clone(),
+ warnings_as_hint: self.data.diagnostics_warningsAsHint.clone(),
+ }
+ }
+
+ pub fn lru_capacity(&self) -> Option<usize> {
+ self.data.lru_capacity
+ }
+
+ pub fn proc_macro_srv(&self) -> Option<(AbsPathBuf, Vec<OsString>)> {
+ if !self.data.procMacro_enable {
+ return None;
+ }
+ let path = match &self.data.procMacro_server {
+ Some(it) => self.root_path.join(it),
+ None => AbsPathBuf::assert(std::env::current_exe().ok()?),
+ };
+ Some((path, vec!["proc-macro".into()]))
+ }
+
+ pub fn dummy_replacements(&self) -> &FxHashMap<Box<str>, Box<[Box<str>]>> {
+ &self.data.procMacro_ignored
+ }
+
+ pub fn expand_proc_attr_macros(&self) -> bool {
+ self.data.procMacro_enable && self.data.procMacro_attributes_enable
+ }
+
+ pub fn files(&self) -> FilesConfig {
+ FilesConfig {
+ watcher: match self.data.files_watcher {
+ FilesWatcherDef::Client if self.did_change_watched_files_dynamic_registration() => {
+ FilesWatcher::Client
+ }
+ _ => FilesWatcher::Server,
+ },
+ exclude: self.data.files_excludeDirs.iter().map(|it| self.root_path.join(it)).collect(),
+ }
+ }
+
+ pub fn notifications(&self) -> NotificationsConfig {
+ NotificationsConfig { cargo_toml_not_found: self.data.notifications_cargoTomlNotFound }
+ }
+
+ pub fn cargo_autoreload(&self) -> bool {
+ self.data.cargo_autoreload
+ }
+
+ pub fn run_build_scripts(&self) -> bool {
+ self.data.cargo_buildScripts_enable || self.data.procMacro_enable
+ }
+
+ pub fn cargo(&self) -> CargoConfig {
+ let rustc_source = self.data.rustc_source.as_ref().map(|rustc_src| {
+ if rustc_src == "discover" {
+ RustcSource::Discover
+ } else {
+ RustcSource::Path(self.root_path.join(rustc_src))
+ }
+ });
+
+ CargoConfig {
+ no_default_features: self.data.cargo_noDefaultFeatures,
+ all_features: matches!(self.data.cargo_features, CargoFeatures::All),
+ features: match &self.data.cargo_features {
+ CargoFeatures::All => vec![],
+ CargoFeatures::Listed(it) => it.clone(),
+ },
+ target: self.data.cargo_target.clone(),
+ no_sysroot: self.data.cargo_noSysroot,
+ rustc_source,
+ unset_test_crates: UnsetTestCrates::Only(self.data.cargo_unsetTest.clone()),
+ wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper,
+ run_build_script_command: self.data.cargo_buildScripts_overrideCommand.clone(),
+ }
+ }
+
+ pub fn rustfmt(&self) -> RustfmtConfig {
+ match &self.data.rustfmt_overrideCommand {
+ Some(args) if !args.is_empty() => {
+ let mut args = args.clone();
+ let command = args.remove(0);
+ RustfmtConfig::CustomCommand { command, args }
+ }
+ Some(_) | None => RustfmtConfig::Rustfmt {
+ extra_args: self.data.rustfmt_extraArgs.clone(),
+ enable_range_formatting: self.data.rustfmt_rangeFormatting_enable,
+ },
+ }
+ }
+
+ pub fn flycheck(&self) -> Option<FlycheckConfig> {
+ if !self.data.checkOnSave_enable {
+ return None;
+ }
+ let flycheck_config = match &self.data.checkOnSave_overrideCommand {
+ Some(args) if !args.is_empty() => {
+ let mut args = args.clone();
+ let command = args.remove(0);
+ FlycheckConfig::CustomCommand { command, args }
+ }
+ Some(_) | None => FlycheckConfig::CargoCommand {
+ command: self.data.checkOnSave_command.clone(),
+ target_triple: self
+ .data
+ .checkOnSave_target
+ .clone()
+ .or_else(|| self.data.cargo_target.clone()),
+ all_targets: self.data.checkOnSave_allTargets,
+ no_default_features: self
+ .data
+ .checkOnSave_noDefaultFeatures
+ .unwrap_or(self.data.cargo_noDefaultFeatures),
+ all_features: matches!(
+ self.data.checkOnSave_features.as_ref().unwrap_or(&self.data.cargo_features),
+ CargoFeatures::All
+ ),
+ features: match self
+ .data
+ .checkOnSave_features
+ .clone()
+ .unwrap_or_else(|| self.data.cargo_features.clone())
+ {
+ CargoFeatures::All => vec![],
+ CargoFeatures::Listed(it) => it,
+ },
+ extra_args: self.data.checkOnSave_extraArgs.clone(),
+ },
+ };
+ Some(flycheck_config)
+ }
+
+ pub fn runnables(&self) -> RunnablesConfig {
+ RunnablesConfig {
+ override_cargo: self.data.runnables_command.clone(),
+ cargo_extra_args: self.data.runnables_extraArgs.clone(),
+ }
+ }
+
+ pub fn inlay_hints(&self) -> InlayHintsConfig {
+ InlayHintsConfig {
+ render_colons: self.data.inlayHints_renderColons,
+ type_hints: self.data.inlayHints_typeHints_enable,
+ parameter_hints: self.data.inlayHints_parameterHints_enable,
+ chaining_hints: self.data.inlayHints_chainingHints_enable,
+ closure_return_type_hints: match self.data.inlayHints_closureReturnTypeHints_enable {
+ ClosureReturnTypeHintsDef::Always => ide::ClosureReturnTypeHints::Always,
+ ClosureReturnTypeHintsDef::Never => ide::ClosureReturnTypeHints::Never,
+ ClosureReturnTypeHintsDef::WithBlock => ide::ClosureReturnTypeHints::WithBlock,
+ },
+ lifetime_elision_hints: match self.data.inlayHints_lifetimeElisionHints_enable {
+ LifetimeElisionDef::Always => ide::LifetimeElisionHints::Always,
+ LifetimeElisionDef::Never => ide::LifetimeElisionHints::Never,
+ LifetimeElisionDef::SkipTrivial => ide::LifetimeElisionHints::SkipTrivial,
+ },
+ hide_named_constructor_hints: self.data.inlayHints_typeHints_hideNamedConstructor,
+ hide_closure_initialization_hints: self
+ .data
+ .inlayHints_typeHints_hideClosureInitialization,
+ reborrow_hints: match self.data.inlayHints_reborrowHints_enable {
+ ReborrowHintsDef::Always => ide::ReborrowHints::Always,
+ ReborrowHintsDef::Never => ide::ReborrowHints::Never,
+ ReborrowHintsDef::Mutable => ide::ReborrowHints::MutableOnly,
+ },
+ binding_mode_hints: self.data.inlayHints_bindingModeHints_enable,
+ param_names_for_lifetime_elision_hints: self
+ .data
+ .inlayHints_lifetimeElisionHints_useParameterNames,
+ max_length: self.data.inlayHints_maxLength,
+ closing_brace_hints_min_lines: if self.data.inlayHints_closingBraceHints_enable {
+ Some(self.data.inlayHints_closingBraceHints_minLines)
+ } else {
+ None
+ },
+ }
+ }
+
+ fn insert_use_config(&self) -> InsertUseConfig {
+ InsertUseConfig {
+ granularity: match self.data.imports_granularity_group {
+ ImportGranularityDef::Preserve => ImportGranularity::Preserve,
+ ImportGranularityDef::Item => ImportGranularity::Item,
+ ImportGranularityDef::Crate => ImportGranularity::Crate,
+ ImportGranularityDef::Module => ImportGranularity::Module,
+ },
+ enforce_granularity: self.data.imports_granularity_enforce,
+ prefix_kind: match self.data.imports_prefix {
+ ImportPrefixDef::Plain => PrefixKind::Plain,
+ ImportPrefixDef::ByCrate => PrefixKind::ByCrate,
+ ImportPrefixDef::BySelf => PrefixKind::BySelf,
+ },
+ group: self.data.imports_group_enable,
+ skip_glob_imports: !self.data.imports_merge_glob,
+ }
+ }
+
+ pub fn completion(&self) -> CompletionConfig {
+ CompletionConfig {
+ enable_postfix_completions: self.data.completion_postfix_enable,
+ enable_imports_on_the_fly: self.data.completion_autoimport_enable
+ && completion_item_edit_resolve(&self.caps),
+ enable_self_on_the_fly: self.data.completion_autoself_enable,
+ enable_private_editable: self.data.completion_privateEditable_enable,
+ callable: match self.data.completion_callable_snippets {
+ CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments),
+ CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
+ CallableCompletionDef::None => None,
+ },
+ insert_use: self.insert_use_config(),
+ snippet_cap: SnippetCap::new(try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .completion
+ .as_ref()?
+ .completion_item
+ .as_ref()?
+ .snippet_support?
+ )),
+ snippets: self.snippets.clone(),
+ }
+ }
+
+ pub fn snippet_cap(&self) -> bool {
+ self.experimental("snippetTextEdit")
+ }
+
+ pub fn assist(&self) -> AssistConfig {
+ AssistConfig {
+ snippet_cap: SnippetCap::new(self.experimental("snippetTextEdit")),
+ allowed: None,
+ insert_use: self.insert_use_config(),
+ }
+ }
+
+ pub fn join_lines(&self) -> JoinLinesConfig {
+ JoinLinesConfig {
+ join_else_if: self.data.joinLines_joinElseIf,
+ remove_trailing_comma: self.data.joinLines_removeTrailingComma,
+ unwrap_trivial_blocks: self.data.joinLines_unwrapTrivialBlock,
+ join_assignments: self.data.joinLines_joinAssignments,
+ }
+ }
+
+ pub fn call_info(&self) -> CallInfoConfig {
+ CallInfoConfig {
+ params_only: matches!(self.data.signatureInfo_detail, SignatureDetail::Parameters),
+ docs: self.data.signatureInfo_documentation_enable,
+ }
+ }
+
+ pub fn lens(&self) -> LensConfig {
+ LensConfig {
+ run: self.data.lens_enable && self.data.lens_run_enable,
+ debug: self.data.lens_enable && self.data.lens_debug_enable,
+ implementations: self.data.lens_enable && self.data.lens_implementations_enable,
+ method_refs: self.data.lens_enable && self.data.lens_references_method_enable,
+ refs_adt: self.data.lens_enable && self.data.lens_references_adt_enable,
+ refs_trait: self.data.lens_enable && self.data.lens_references_trait_enable,
+ enum_variant_refs: self.data.lens_enable
+ && self.data.lens_references_enumVariant_enable,
+ }
+ }
+
+ pub fn hover_actions(&self) -> HoverActionsConfig {
+ let enable = self.experimental("hoverActions") && self.data.hover_actions_enable;
+ HoverActionsConfig {
+ implementations: enable && self.data.hover_actions_implementations_enable,
+ references: enable && self.data.hover_actions_references_enable,
+ run: enable && self.data.hover_actions_run_enable,
+ debug: enable && self.data.hover_actions_debug_enable,
+ goto_type_def: enable && self.data.hover_actions_gotoTypeDef_enable,
+ }
+ }
+
+ pub fn highlighting_strings(&self) -> bool {
+ self.data.semanticHighlighting_strings_enable
+ }
+
+ pub fn hover(&self) -> HoverConfig {
+ HoverConfig {
+ links_in_hover: self.data.hover_links_enable,
+ documentation: self.data.hover_documentation_enable.then(|| {
+ let is_markdown = try_or_def!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .hover
+ .as_ref()?
+ .content_format
+ .as_ref()?
+ .as_slice())
+ .contains(&MarkupKind::Markdown);
+ if is_markdown {
+ HoverDocFormat::Markdown
+ } else {
+ HoverDocFormat::PlainText
+ }
+ }),
+ }
+ }
+
+ pub fn workspace_symbol(&self) -> WorkspaceSymbolConfig {
+ WorkspaceSymbolConfig {
+ search_scope: match self.data.workspace_symbol_search_scope {
+ WorkspaceSymbolSearchScopeDef::Workspace => WorkspaceSymbolSearchScope::Workspace,
+ WorkspaceSymbolSearchScopeDef::WorkspaceAndDependencies => {
+ WorkspaceSymbolSearchScope::WorkspaceAndDependencies
+ }
+ },
+ search_kind: match self.data.workspace_symbol_search_kind {
+ WorkspaceSymbolSearchKindDef::OnlyTypes => WorkspaceSymbolSearchKind::OnlyTypes,
+ WorkspaceSymbolSearchKindDef::AllSymbols => WorkspaceSymbolSearchKind::AllSymbols,
+ },
+ search_limit: self.data.workspace_symbol_search_limit,
+ }
+ }
+
+ pub fn semantic_tokens_refresh(&self) -> bool {
+ try_or_def!(self.caps.workspace.as_ref()?.semantic_tokens.as_ref()?.refresh_support?)
+ }
+
+ pub fn code_lens_refresh(&self) -> bool {
+ try_or_def!(self.caps.workspace.as_ref()?.code_lens.as_ref()?.refresh_support?)
+ }
+
+ pub fn insert_replace_support(&self) -> bool {
+ try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .completion
+ .as_ref()?
+ .completion_item
+ .as_ref()?
+ .insert_replace_support?
+ )
+ }
+
+ pub fn client_commands(&self) -> ClientCommandsConfig {
+ let commands =
+ try_or!(self.caps.experimental.as_ref()?.get("commands")?, &serde_json::Value::Null);
+ let commands: Option<lsp_ext::ClientCommandOptions> =
+ serde_json::from_value(commands.clone()).ok();
+ let force = commands.is_none() && self.data.lens_forceCustomCommands;
+ let commands = commands.map(|it| it.commands).unwrap_or_default();
+
+ let get = |name: &str| commands.iter().any(|it| it == name) || force;
+
+ ClientCommandsConfig {
+ run_single: get("rust-analyzer.runSingle"),
+ debug_single: get("rust-analyzer.debugSingle"),
+ show_reference: get("rust-analyzer.showReferences"),
+ goto_location: get("rust-analyzer.gotoLocation"),
+ trigger_parameter_hints: get("editor.action.triggerParameterHints"),
+ }
+ }
+
+ pub fn highlight_related(&self) -> HighlightRelatedConfig {
+ HighlightRelatedConfig {
+ references: self.data.highlightRelated_references_enable,
+ break_points: self.data.highlightRelated_breakPoints_enable,
+ exit_points: self.data.highlightRelated_exitPoints_enable,
+ yield_points: self.data.highlightRelated_yieldPoints_enable,
+ }
+ }
+
+ pub fn prime_caches_num_threads(&self) -> u8 {
+ match self.data.cachePriming_numThreads {
+ 0 => num_cpus::get_physical().try_into().unwrap_or(u8::MAX),
+ n => n,
+ }
+ }
+
+ pub fn typing_autoclose_angle(&self) -> bool {
+ self.data.typing_autoClosingAngleBrackets_enable
+ }
+}
+// Deserialization definitions
+
+macro_rules! create_bool_or_string_de {
+ ($ident:ident<$bool:literal, $string:literal>) => {
+ fn $ident<'de, D>(d: D) -> Result<(), D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ struct V;
+ impl<'de> serde::de::Visitor<'de> for V {
+ type Value = ();
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str(concat!(
+ stringify!($bool),
+ " or \"",
+ stringify!($string),
+ "\""
+ ))
+ }
+
+ fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ match v {
+ $bool => Ok(()),
+ _ => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Bool(v),
+ &self,
+ )),
+ }
+ }
+
+ fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ match v {
+ $string => Ok(()),
+ _ => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Str(v),
+ &self,
+ )),
+ }
+ }
+
+ fn visit_enum<A>(self, a: A) -> Result<Self::Value, A::Error>
+ where
+ A: serde::de::EnumAccess<'de>,
+ {
+ use serde::de::VariantAccess;
+ let (variant, va) = a.variant::<&'de str>()?;
+ va.unit_variant()?;
+ match variant {
+ $string => Ok(()),
+ _ => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Str(variant),
+ &self,
+ )),
+ }
+ }
+ }
+ d.deserialize_any(V)
+ }
+ };
+}
+create_bool_or_string_de!(true_or_always<true, "always">);
+create_bool_or_string_de!(false_or_never<false, "never">);
+
+macro_rules! named_unit_variant {
+ ($variant:ident) => {
+ pub(super) fn $variant<'de, D>(deserializer: D) -> Result<(), D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ struct V;
+ impl<'de> serde::de::Visitor<'de> for V {
+ type Value = ();
+ fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.write_str(concat!("\"", stringify!($variant), "\""))
+ }
+ fn visit_str<E: serde::de::Error>(self, value: &str) -> Result<Self::Value, E> {
+ if value == stringify!($variant) {
+ Ok(())
+ } else {
+ Err(E::invalid_value(serde::de::Unexpected::Str(value), &self))
+ }
+ }
+ }
+ deserializer.deserialize_str(V)
+ }
+ };
+}
+
+mod de_unit_v {
+ named_unit_variant!(all);
+ named_unit_variant!(skip_trivial);
+ named_unit_variant!(mutable);
+ named_unit_variant!(with_block);
+}
+
+#[derive(Deserialize, Debug, Clone, Copy)]
+#[serde(rename_all = "snake_case")]
+enum SnippetScopeDef {
+ Expr,
+ Item,
+ Type,
+}
+
+impl Default for SnippetScopeDef {
+ fn default() -> Self {
+ SnippetScopeDef::Expr
+ }
+}
+
+#[derive(Deserialize, Debug, Clone, Default)]
+#[serde(default)]
+struct SnippetDef {
+ #[serde(deserialize_with = "single_or_array")]
+ prefix: Vec<String>,
+ #[serde(deserialize_with = "single_or_array")]
+ postfix: Vec<String>,
+ description: Option<String>,
+ #[serde(deserialize_with = "single_or_array")]
+ body: Vec<String>,
+ #[serde(deserialize_with = "single_or_array")]
+ requires: Vec<String>,
+ scope: SnippetScopeDef,
+}
+
+fn single_or_array<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
+where
+ D: serde::Deserializer<'de>,
+{
+ struct SingleOrVec;
+
+ impl<'de> serde::de::Visitor<'de> for SingleOrVec {
+ type Value = Vec<String>;
+
+ fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ formatter.write_str("string or array of strings")
+ }
+
+ fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ Ok(vec![value.to_owned()])
+ }
+
+ fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
+ where
+ A: serde::de::SeqAccess<'de>,
+ {
+ Deserialize::deserialize(serde::de::value::SeqAccessDeserializer::new(seq))
+ }
+ }
+
+ deserializer.deserialize_any(SingleOrVec)
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum ManifestOrProjectJson {
+ Manifest(PathBuf),
+ ProjectJson(ProjectJsonData),
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum ExprFillDefaultDef {
+ Todo,
+ Default,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum ImportGranularityDef {
+ Preserve,
+ Item,
+ Crate,
+ Module,
+}
+
+#[derive(Deserialize, Debug, Copy, Clone)]
+#[serde(rename_all = "snake_case")]
+enum CallableCompletionDef {
+ FillArguments,
+ AddParentheses,
+ None,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum CargoFeatures {
+ #[serde(deserialize_with = "de_unit_v::all")]
+ All,
+ Listed(Vec<String>),
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum LifetimeElisionDef {
+ #[serde(deserialize_with = "true_or_always")]
+ Always,
+ #[serde(deserialize_with = "false_or_never")]
+ Never,
+ #[serde(deserialize_with = "de_unit_v::skip_trivial")]
+ SkipTrivial,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum ClosureReturnTypeHintsDef {
+ #[serde(deserialize_with = "true_or_always")]
+ Always,
+ #[serde(deserialize_with = "false_or_never")]
+ Never,
+ #[serde(deserialize_with = "de_unit_v::with_block")]
+ WithBlock,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum ReborrowHintsDef {
+ #[serde(deserialize_with = "true_or_always")]
+ Always,
+ #[serde(deserialize_with = "false_or_never")]
+ Never,
+ #[serde(deserialize_with = "de_unit_v::mutable")]
+ Mutable,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum FilesWatcherDef {
+ Client,
+ Notify,
+ Server,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum ImportPrefixDef {
+ Plain,
+ #[serde(alias = "self")]
+ BySelf,
+ #[serde(alias = "crate")]
+ ByCrate,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum WorkspaceSymbolSearchScopeDef {
+ Workspace,
+ WorkspaceAndDependencies,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum SignatureDetail {
+ Full,
+ Parameters,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum WorkspaceSymbolSearchKindDef {
+ OnlyTypes,
+ AllSymbols,
+}
+
+macro_rules! _config_data {
+ (struct $name:ident {
+ $(
+ $(#[doc=$doc:literal])*
+ $field:ident $(| $alias:ident)*: $ty:ty = $default:expr,
+ )*
+ }) => {
+ #[allow(non_snake_case)]
+ #[derive(Debug, Clone)]
+ struct $name { $($field: $ty,)* }
+ impl $name {
+ fn from_json(mut json: serde_json::Value, error_sink: &mut Vec<(String, serde_json::Error)>) -> $name {
+ $name {$(
+ $field: get_field(
+ &mut json,
+ error_sink,
+ stringify!($field),
+ None$(.or(Some(stringify!($alias))))*,
+ $default,
+ ),
+ )*}
+ }
+
+ fn json_schema() -> serde_json::Value {
+ schema(&[
+ $({
+ let field = stringify!($field);
+ let ty = stringify!($ty);
+
+ (field, ty, &[$($doc),*], $default)
+ },)*
+ ])
+ }
+
+ #[cfg(test)]
+ fn manual() -> String {
+ manual(&[
+ $({
+ let field = stringify!($field);
+ let ty = stringify!($ty);
+
+ (field, ty, &[$($doc),*], $default)
+ },)*
+ ])
+ }
+ }
+
+ #[test]
+ fn fields_are_sorted() {
+ [$(stringify!($field)),*].windows(2).for_each(|w| assert!(w[0] <= w[1], "{} <= {} does not hold", w[0], w[1]));
+ }
+ };
+}
+use _config_data as config_data;
+
+fn get_field<T: DeserializeOwned>(
+ json: &mut serde_json::Value,
+ error_sink: &mut Vec<(String, serde_json::Error)>,
+ field: &'static str,
+ alias: Option<&'static str>,
+ default: &str,
+) -> T {
+ let default = serde_json::from_str(default).unwrap();
+ // XXX: check alias first, to work-around the VS Code where it pre-fills the
+ // defaults instead of sending an empty object.
+ alias
+ .into_iter()
+ .chain(iter::once(field))
+ .find_map(move |field| {
+ let mut pointer = field.replace('_', "/");
+ pointer.insert(0, '/');
+ json.pointer_mut(&pointer).and_then(|it| match serde_json::from_value(it.take()) {
+ Ok(it) => Some(it),
+ Err(e) => {
+ tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e);
+ error_sink.push((pointer, e));
+ None
+ }
+ })
+ })
+ .unwrap_or(default)
+}
+
+fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value {
+ for ((f1, ..), (f2, ..)) in fields.iter().zip(&fields[1..]) {
+ fn key(f: &str) -> &str {
+ f.splitn(2, '_').next().unwrap()
+ }
+ assert!(key(f1) <= key(f2), "wrong field order: {:?} {:?}", f1, f2);
+ }
+
+ let map = fields
+ .iter()
+ .map(|(field, ty, doc, default)| {
+ let name = field.replace('_', ".");
+ let name = format!("rust-analyzer.{}", name);
+ let props = field_props(field, ty, doc, default);
+ (name, props)
+ })
+ .collect::<serde_json::Map<_, _>>();
+ map.into()
+}
+
+fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json::Value {
+ let doc = doc_comment_to_string(doc);
+ let doc = doc.trim_end_matches('\n');
+ assert!(
+ doc.ends_with('.') && doc.starts_with(char::is_uppercase),
+ "bad docs for {}: {:?}",
+ field,
+ doc
+ );
+ let default = default.parse::<serde_json::Value>().unwrap();
+
+ let mut map = serde_json::Map::default();
+ macro_rules! set {
+ ($($key:literal: $value:tt),*$(,)?) => {{$(
+ map.insert($key.into(), serde_json::json!($value));
+ )*}};
+ }
+ set!("markdownDescription": doc);
+ set!("default": default);
+
+ match ty {
+ "bool" => set!("type": "boolean"),
+ "usize" => set!("type": "integer", "minimum": 0),
+ "String" => set!("type": "string"),
+ "Vec<String>" => set! {
+ "type": "array",
+ "items": { "type": "string" },
+ },
+ "Vec<PathBuf>" => set! {
+ "type": "array",
+ "items": { "type": "string" },
+ },
+ "FxHashSet<String>" => set! {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ },
+ "FxHashMap<Box<str>, Box<[Box<str>]>>" => set! {
+ "type": "object",
+ },
+ "FxHashMap<String, SnippetDef>" => set! {
+ "type": "object",
+ },
+ "FxHashMap<String, String>" => set! {
+ "type": "object",
+ },
+ "Option<usize>" => set! {
+ "type": ["null", "integer"],
+ "minimum": 0,
+ },
+ "Option<String>" => set! {
+ "type": ["null", "string"],
+ },
+ "Option<PathBuf>" => set! {
+ "type": ["null", "string"],
+ },
+ "Option<bool>" => set! {
+ "type": ["null", "boolean"],
+ },
+ "Option<Vec<String>>" => set! {
+ "type": ["null", "array"],
+ "items": { "type": "string" },
+ },
+ "MergeBehaviorDef" => set! {
+ "type": "string",
+ "enum": ["none", "crate", "module"],
+ "enumDescriptions": [
+ "Do not merge imports at all.",
+ "Merge imports from the same crate into a single `use` statement.",
+ "Merge imports from the same module into a single `use` statement."
+ ],
+ },
+ "ExprFillDefaultDef" => set! {
+ "type": "string",
+ "enum": ["todo", "default"],
+ "enumDescriptions": [
+ "Fill missing expressions with the `todo` macro",
+ "Fill missing expressions with reasonable defaults, `new` or `default` constructors."
+ ],
+ },
+ "ImportGranularityDef" => set! {
+ "type": "string",
+ "enum": ["preserve", "crate", "module", "item"],
+ "enumDescriptions": [
+ "Do not change the granularity of any imports and preserve the original structure written by the developer.",
+ "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
+ "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
+ "Flatten imports so that each has its own use statement."
+ ],
+ },
+ "ImportPrefixDef" => set! {
+ "type": "string",
+ "enum": [
+ "plain",
+ "self",
+ "crate"
+ ],
+ "enumDescriptions": [
+ "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.",
+ "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item. Prefixes `self` in front of the path if it starts with a module.",
+ "Force import paths to be absolute by always starting them with `crate` or the extern crate name they come from."
+ ],
+ },
+ "Vec<ManifestOrProjectJson>" => set! {
+ "type": "array",
+ "items": { "type": ["string", "object"] },
+ },
+ "WorkspaceSymbolSearchScopeDef" => set! {
+ "type": "string",
+ "enum": ["workspace", "workspace_and_dependencies"],
+ "enumDescriptions": [
+ "Search in current workspace only.",
+ "Search in current workspace and dependencies."
+ ],
+ },
+ "WorkspaceSymbolSearchKindDef" => set! {
+ "type": "string",
+ "enum": ["only_types", "all_symbols"],
+ "enumDescriptions": [
+ "Search for types only.",
+ "Search for all symbols kinds."
+ ],
+ },
+ "ParallelCachePrimingNumThreads" => set! {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 255
+ },
+ "LifetimeElisionDef" => set! {
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "skip_trivial"
+ ],
+ "enumDescriptions": [
+ "Always show lifetime elision hints.",
+ "Never show lifetime elision hints.",
+ "Only show lifetime elision hints if a return type is involved."
+ ]
+ },
+ "ClosureReturnTypeHintsDef" => set! {
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "with_block"
+ ],
+ "enumDescriptions": [
+ "Always show type hints for return types of closures.",
+ "Never show type hints for return types of closures.",
+ "Only show type hints for return types of closures with blocks."
+ ]
+ },
+ "ReborrowHintsDef" => set! {
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "mutable"
+ ],
+ "enumDescriptions": [
+ "Always show reborrow hints.",
+ "Never show reborrow hints.",
+ "Only show mutable reborrow hints."
+ ]
+ },
+ "CargoFeatures" => set! {
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "all"
+ ],
+ "enumDescriptions": [
+ "Pass `--all-features` to cargo",
+ ]
+ },
+ {
+ "type": "array",
+ "items": { "type": "string" }
+ }
+ ],
+ },
+ "Option<CargoFeatures>" => set! {
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "all"
+ ],
+ "enumDescriptions": [
+ "Pass `--all-features` to cargo",
+ ]
+ },
+ {
+ "type": "array",
+ "items": { "type": "string" }
+ },
+ { "type": "null" }
+ ],
+ },
+ "CallableCompletionDef" => set! {
+ "type": "string",
+ "enum": [
+ "fill_arguments",
+ "add_parentheses",
+ "none",
+ ],
+ "enumDescriptions": [
+ "Add call parentheses and pre-fill arguments.",
+ "Add call parentheses.",
+ "Do no snippet completions for callables."
+ ]
+ },
+ "SignatureDetail" => set! {
+ "type": "string",
+ "enum": ["full", "parameters"],
+ "enumDescriptions": [
+ "Show the entire signature.",
+ "Show only the parameters."
+ ],
+ },
+ "FilesWatcherDef" => set! {
+ "type": "string",
+ "enum": ["client", "server"],
+ "enumDescriptions": [
+ "Use the client (editor) to watch files for changes",
+ "Use server-side file watching",
+ ],
+ },
+ _ => panic!("missing entry for {}: {}", ty, default),
+ }
+
+ map.into()
+}
+
+#[cfg(test)]
+fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
+ fields
+ .iter()
+ .map(|(field, _ty, doc, default)| {
+ let name = format!("rust-analyzer.{}", field.replace('_', "."));
+ let doc = doc_comment_to_string(*doc);
+ if default.contains('\n') {
+ format!(
+ r#"[[{}]]{}::
++
+--
+Default:
+----
+{}
+----
+{}
+--
+"#,
+ name, name, default, doc
+ )
+ } else {
+ format!("[[{}]]{} (default: `{}`)::\n+\n--\n{}--\n", name, name, default, doc)
+ }
+ })
+ .collect::<String>()
+}
+
+fn doc_comment_to_string(doc: &[&str]) -> String {
+ doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{}\n", it)).collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use std::fs;
+
+ use test_utils::{ensure_file_contents, project_root};
+
+ use super::*;
+
+ #[test]
+ fn generate_package_json_config() {
+ let s = Config::json_schema();
+ let schema = format!("{:#}", s);
+ let mut schema = schema
+ .trim_start_matches('{')
+ .trim_end_matches('}')
+ .replace(" ", " ")
+ .replace('\n', "\n ")
+ .trim_start_matches('\n')
+ .trim_end()
+ .to_string();
+ schema.push_str(",\n");
+
+ // Transform the asciidoc form link to markdown style.
+ //
+ // https://link[text] => [text](https://link)
+ let url_matches = schema.match_indices("https://");
+ let mut url_offsets = url_matches.map(|(idx, _)| idx).collect::<Vec<usize>>();
+ url_offsets.reverse();
+ for idx in url_offsets {
+ let link = &schema[idx..];
+ // matching on whitespace to ignore normal links
+ if let Some(link_end) = link.find(|c| c == ' ' || c == '[') {
+ if link.chars().nth(link_end) == Some('[') {
+ if let Some(link_text_end) = link.find(']') {
+ let link_text = link[link_end..(link_text_end + 1)].to_string();
+
+ schema.replace_range((idx + link_end)..(idx + link_text_end + 1), "");
+ schema.insert(idx, '(');
+ schema.insert(idx + link_end + 1, ')');
+ schema.insert_str(idx, &link_text);
+ }
+ }
+ }
+ }
+
+ let package_json_path = project_root().join("editors/code/package.json");
+ let mut package_json = fs::read_to_string(&package_json_path).unwrap();
+
+ let start_marker = " \"$generated-start\": {},\n";
+ let end_marker = " \"$generated-end\": {}\n";
+
+ let start = package_json.find(start_marker).unwrap() + start_marker.len();
+ let end = package_json.find(end_marker).unwrap();
+
+ let p = remove_ws(&package_json[start..end]);
+ let s = remove_ws(&schema);
+ if !p.contains(&s) {
+ package_json.replace_range(start..end, &schema);
+ ensure_file_contents(&package_json_path, &package_json)
+ }
+ }
+
+ #[test]
+ fn generate_config_documentation() {
+ let docs_path = project_root().join("docs/user/generated_config.adoc");
+ let expected = ConfigData::manual();
+ ensure_file_contents(&docs_path, &expected);
+ }
+
+ fn remove_ws(text: &str) -> String {
+ text.replace(char::is_whitespace, "")
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs
new file mode 100644
index 000000000..472e2e0ee
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs
@@ -0,0 +1,135 @@
+//! See [`patch_json_for_outdated_configs`]
+use serde_json::{json, Value};
+
+/// This function patches the json config to the new expected keys.
+/// That is we try to load old known config keys here and convert them to the new ones.
+/// See https://github.com/rust-lang/rust-analyzer/pull/12010
+pub(super) fn patch_json_for_outdated_configs(json: &mut Value) {
+ let copy = json.clone();
+
+ macro_rules! patch {
+ ($(
+ $($src:ident).+ -> $($dst:ident).+ ;
+ )+) => { $(
+ match copy.pointer(concat!($("/", stringify!($src)),+)).cloned() {
+ Some(Value::Object(_)) | None => (),
+ Some(it) => {
+ let mut last = it;
+ for segment in [$(stringify!($dst)),+].into_iter().rev() {
+ last = Value::Object(serde_json::Map::from_iter(std::iter::once((segment.to_string(), last))));
+ }
+
+ merge(json, last);
+ },
+ }
+ )+ };
+ }
+
+ patch! {
+ assist.allowMergingIntoGlobImports -> imports.merge.glob;
+ assist.exprFillDefault -> assist.expressionFillDefault;
+ assist.importEnforceGranularity -> imports.granularity.enforce;
+ assist.importGranularity -> imports.granularity.group;
+ assist.importMergeBehavior -> imports.granularity.group;
+ assist.importMergeBehaviour -> imports.granularity.group;
+ assist.importGroup -> imports.group.enable;
+ assist.importPrefix -> imports.prefix;
+ primeCaches.enable -> cachePriming.enable;
+ cache.warmup -> cachePriming.enable;
+ cargo.loadOutDirsFromCheck -> cargo.buildScripts.enable;
+ cargo.runBuildScripts -> cargo.buildScripts.enable;
+ cargo.runBuildScriptsCommand -> cargo.buildScripts.overrideCommand;
+ cargo.useRustcWrapperForBuildScripts -> cargo.buildScripts.useRustcWrapper;
+ diagnostics.enableExperimental -> diagnostics.experimental.enable;
+ experimental.procAttrMacros -> procMacro.attributes.enable;
+ highlighting.strings -> semanticHighlighting.strings.enable;
+ highlightRelated.breakPoints -> semanticHighlighting.breakPoints.enable;
+ highlightRelated.exitPoints -> semanticHighlighting.exitPoints.enable;
+ highlightRelated.yieldPoints -> semanticHighlighting.yieldPoints.enable;
+ highlightRelated.references -> semanticHighlighting.references.enable;
+ hover.documentation -> hover.documentation.enable;
+ hover.linksInHover -> hover.links.enable;
+ hoverActions.linksInHover -> hover.links.enable;
+ hoverActions.debug -> hover.actions.debug.enable;
+ hoverActions.enable -> hover.actions.enable;
+ hoverActions.gotoTypeDef -> hover.actions.gotoTypeDef.enable;
+ hoverActions.implementations -> hover.actions.implementations.enable;
+ hoverActions.references -> hover.actions.references.enable;
+ hoverActions.run -> hover.actions.run.enable;
+ inlayHints.chainingHints -> inlayHints.chainingHints.enable;
+ inlayHints.closureReturnTypeHints -> inlayHints.closureReturnTypeHints.enable;
+ inlayHints.hideNamedConstructorHints -> inlayHints.typeHints.hideNamedConstructorHints;
+ inlayHints.parameterHints -> inlayHints.parameterHints.enable;
+ inlayHints.reborrowHints -> inlayHints.reborrowHints.enable;
+ inlayHints.typeHints -> inlayHints.typeHints.enable;
+ lruCapacity -> lru.capacity;
+ runnables.cargoExtraArgs -> runnables.extraArgs ;
+ runnables.overrideCargo -> runnables.command ;
+ rustcSource -> rustc.source;
+ rustfmt.enableRangeFormatting -> rustfmt.rangeFormatting.enable;
+ }
+
+ // completion.snippets -> completion.snippets.custom;
+ if let Some(Value::Object(obj)) = copy.pointer("/completion/snippets").cloned() {
+ if obj.len() != 1 || obj.get("custom").is_none() {
+ merge(
+ json,
+ json! {{
+ "completion": {
+ "snippets": {
+ "custom": obj
+ },
+ },
+ }},
+ );
+ }
+ }
+
+ // callInfo_full -> signatureInfo_detail, signatureInfo_documentation_enable
+ if let Some(Value::Bool(b)) = copy.pointer("/callInfo/full") {
+ let sig_info = match b {
+ true => json!({ "signatureInfo": {
+ "documentation": {"enable": true}},
+ "detail": "full"
+ }),
+ false => json!({ "signatureInfo": {
+ "documentation": {"enable": false}},
+ "detail": "parameters"
+ }),
+ };
+ merge(json, sig_info);
+ }
+
+ // cargo_allFeatures, cargo_features -> cargo_features
+ if let Some(Value::Bool(true)) = copy.pointer("/cargo/allFeatures") {
+ merge(json, json!({ "cargo": { "features": "all" } }));
+ }
+
+ // checkOnSave_allFeatures, checkOnSave_features -> checkOnSave_features
+ if let Some(Value::Bool(true)) = copy.pointer("/checkOnSave/allFeatures") {
+ merge(json, json!({ "checkOnSave": { "features": "all" } }));
+ }
+
+ // completion_addCallArgumentSnippets completion_addCallParenthesis -> completion_callable_snippets
+ let res = match (
+ copy.pointer("/completion/addCallArgumentSnippets"),
+ copy.pointer("/completion/addCallParenthesis"),
+ ) {
+ (Some(Value::Bool(true)), Some(Value::Bool(true))) => json!("fill_arguments"),
+ (Some(Value::Bool(true)), _) => json!("add_parentheses"),
+ (Some(Value::Bool(false)), Some(Value::Bool(false))) => json!("none"),
+ (_, _) => return,
+ };
+ merge(json, json!({ "completion": { "callable": {"snippets": res }} }));
+}
+
+fn merge(dst: &mut Value, src: Value) {
+ match (dst, src) {
+ (Value::Object(dst), Value::Object(src)) => {
+ for (k, v) in src {
+ merge(dst.entry(k).or_insert(v.clone()), v)
+ }
+ }
+ (dst, src) => *dst = src,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
new file mode 100644
index 000000000..202a01adf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
@@ -0,0 +1,109 @@
+//! Book keeping for keeping diagnostics easily in sync with the client.
+pub(crate) mod to_proto;
+
+use std::{mem, sync::Arc};
+
+use ide::FileId;
+use rustc_hash::{FxHashMap, FxHashSet};
+
+use crate::lsp_ext;
+
+pub(crate) type CheckFixes = Arc<FxHashMap<FileId, Vec<Fix>>>;
+
+#[derive(Debug, Default, Clone)]
+pub struct DiagnosticsMapConfig {
+ pub remap_prefix: FxHashMap<String, String>,
+ pub warnings_as_info: Vec<String>,
+ pub warnings_as_hint: Vec<String>,
+}
+
+#[derive(Debug, Default, Clone)]
+pub(crate) struct DiagnosticCollection {
+ // FIXME: should be FxHashMap<FileId, Vec<ra_id::Diagnostic>>
+ pub(crate) native: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>,
+ // FIXME: should be Vec<flycheck::Diagnostic>
+ pub(crate) check: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>,
+ pub(crate) check_fixes: CheckFixes,
+ changes: FxHashSet<FileId>,
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct Fix {
+ // Fixes may be triggerable from multiple ranges.
+ pub(crate) ranges: Vec<lsp_types::Range>,
+ pub(crate) action: lsp_ext::CodeAction,
+}
+
+impl DiagnosticCollection {
+ pub(crate) fn clear_check(&mut self) {
+ Arc::make_mut(&mut self.check_fixes).clear();
+ self.changes.extend(self.check.drain().map(|(key, _value)| key))
+ }
+
+ pub(crate) fn clear_native_for(&mut self, file_id: FileId) {
+ self.native.remove(&file_id);
+ self.changes.insert(file_id);
+ }
+
+ pub(crate) fn add_check_diagnostic(
+ &mut self,
+ file_id: FileId,
+ diagnostic: lsp_types::Diagnostic,
+ fix: Option<Fix>,
+ ) {
+ let diagnostics = self.check.entry(file_id).or_default();
+ for existing_diagnostic in diagnostics.iter() {
+ if are_diagnostics_equal(existing_diagnostic, &diagnostic) {
+ return;
+ }
+ }
+
+ let check_fixes = Arc::make_mut(&mut self.check_fixes);
+ check_fixes.entry(file_id).or_default().extend(fix);
+ diagnostics.push(diagnostic);
+ self.changes.insert(file_id);
+ }
+
+ pub(crate) fn set_native_diagnostics(
+ &mut self,
+ file_id: FileId,
+ diagnostics: Vec<lsp_types::Diagnostic>,
+ ) {
+ if let Some(existing_diagnostics) = self.native.get(&file_id) {
+ if existing_diagnostics.len() == diagnostics.len()
+ && diagnostics
+ .iter()
+ .zip(existing_diagnostics)
+ .all(|(new, existing)| are_diagnostics_equal(new, existing))
+ {
+ return;
+ }
+ }
+
+ self.native.insert(file_id, diagnostics);
+ self.changes.insert(file_id);
+ }
+
+ pub(crate) fn diagnostics_for(
+ &self,
+ file_id: FileId,
+ ) -> impl Iterator<Item = &lsp_types::Diagnostic> {
+ let native = self.native.get(&file_id).into_iter().flatten();
+ let check = self.check.get(&file_id).into_iter().flatten();
+ native.chain(check)
+ }
+
+ pub(crate) fn take_changes(&mut self) -> Option<FxHashSet<FileId>> {
+ if self.changes.is_empty() {
+ return None;
+ }
+ Some(mem::take(&mut self.changes))
+ }
+}
+
+fn are_diagnostics_equal(left: &lsp_types::Diagnostic, right: &lsp_types::Diagnostic) -> bool {
+ left.source == right.source
+ && left.severity == right.severity
+ && left.range == right.range
+ && left.message == right.message
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/clippy_pass_by_ref.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/clippy_pass_by_ref.txt
new file mode 100644
index 000000000..c3b540e31
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/clippy_pass_by_ref.txt
@@ -0,0 +1,301 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/mir/tagset.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 41,
+ character: 23,
+ },
+ end: Position {
+ line: 41,
+ character: 28,
+ },
+ },
+ severity: Some(
+ Warning,
+ ),
+ code: Some(
+ String(
+ "trivially_copy_pass_by_ref",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "rust-lang.github.io",
+ ),
+ ),
+ port: None,
+ path: "/rust-clippy/master/index.html",
+ query: None,
+ fragment: Some(
+ "trivially_copy_pass_by_ref",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "clippy",
+ ),
+ message: "this argument is passed by reference, but would be more efficient if passed by value\n#[warn(clippy::trivially_copy_pass_by_ref)] implied by #[warn(clippy::all)]\nfor further information visit https://rust-lang.github.io/rust-clippy/master/index.html#trivially_copy_pass_by_ref",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/lib.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 8,
+ },
+ end: Position {
+ line: 0,
+ character: 19,
+ },
+ },
+ },
+ message: "lint level defined here",
+ },
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/mir/tagset.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 41,
+ character: 23,
+ },
+ end: Position {
+ line: 41,
+ character: 28,
+ },
+ },
+ },
+ message: "consider passing by value instead: `self`",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/lib.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 8,
+ },
+ end: Position {
+ line: 0,
+ character: 19,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "trivially_copy_pass_by_ref",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "rust-lang.github.io",
+ ),
+ ),
+ port: None,
+ path: "/rust-clippy/master/index.html",
+ query: None,
+ fragment: Some(
+ "trivially_copy_pass_by_ref",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "clippy",
+ ),
+ message: "lint level defined here",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/mir/tagset.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 41,
+ character: 23,
+ },
+ end: Position {
+ line: 41,
+ character: 28,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/mir/tagset.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 41,
+ character: 23,
+ },
+ end: Position {
+ line: 41,
+ character: 28,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "trivially_copy_pass_by_ref",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "rust-lang.github.io",
+ ),
+ ),
+ port: None,
+ path: "/rust-clippy/master/index.html",
+ query: None,
+ fragment: Some(
+ "trivially_copy_pass_by_ref",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "clippy",
+ ),
+ message: "consider passing by value instead: `self`",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/mir/tagset.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 41,
+ character: 23,
+ },
+ end: Position {
+ line: 41,
+ character: 28,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/handles_macro_location.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/handles_macro_location.txt
new file mode 100644
index 000000000..989e5cf66
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/handles_macro_location.txt
@@ -0,0 +1,64 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 1,
+ character: 4,
+ },
+ end: Position {
+ line: 1,
+ character: 26,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: Some(
+ String(
+ "E0277",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0277",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "can't compare `{integer}` with `&str`\nthe trait `std::cmp::PartialEq<&str>` is not implemented for `{integer}`",
+ related_information: None,
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/macro_compiler_error.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/macro_compiler_error.txt
new file mode 100644
index 000000000..fe5cf9b3b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/macro_compiler_error.txt
@@ -0,0 +1,229 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/path.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 271,
+ character: 8,
+ },
+ end: Position {
+ line: 271,
+ character: 50,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: None,
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "Please register your known path in the path module",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/path.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 264,
+ character: 8,
+ },
+ end: Position {
+ line: 264,
+ character: 76,
+ },
+ },
+ },
+ message: "Exact error occurred here",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/data.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 79,
+ character: 15,
+ },
+ end: Position {
+ line: 79,
+ character: 41,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: None,
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "Please register your known path in the path module",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/path.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 264,
+ character: 8,
+ },
+ end: Position {
+ line: 264,
+ character: 76,
+ },
+ },
+ },
+ message: "Exact error occurred here",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/path.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 264,
+ character: 8,
+ },
+ end: Position {
+ line: 264,
+ character: 76,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: None,
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "Please register your known path in the path module",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/path.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 271,
+ character: 8,
+ },
+ end: Position {
+ line: 271,
+ character: 50,
+ },
+ },
+ },
+ message: "Error originated from macro call here",
+ },
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/data.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 79,
+ character: 15,
+ },
+ end: Position {
+ line: 79,
+ character: 41,
+ },
+ },
+ },
+ message: "Error originated from macro call here",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/reasonable_line_numbers_from_empty_file.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/reasonable_line_numbers_from_empty_file.txt
new file mode 100644
index 000000000..df00b330b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/reasonable_line_numbers_from_empty_file.txt
@@ -0,0 +1,64 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/bin/current.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: Some(
+ String(
+ "E0601",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0601",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "`main` function not found in crate `current`\nconsider adding a `main` function to `src/bin/current.rs`",
+ related_information: None,
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_incompatible_type_for_trait.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_incompatible_type_for_trait.txt
new file mode 100644
index 000000000..dc36aa761
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_incompatible_type_for_trait.txt
@@ -0,0 +1,64 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/ty/list_iter.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 51,
+ character: 4,
+ },
+ end: Position {
+ line: 51,
+ character: 47,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: Some(
+ String(
+ "E0053",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0053",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "method `next` has an incompatible type for trait\nexpected type `fn(&mut ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&ty::Ref<M>>`\n found type `fn(&ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&'list ty::Ref<M>>`",
+ related_information: None,
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_mismatched_type.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_mismatched_type.txt
new file mode 100644
index 000000000..d557196c2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_mismatched_type.txt
@@ -0,0 +1,64 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/runtime/compiler_support.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 47,
+ character: 64,
+ },
+ end: Position {
+ line: 47,
+ character: 69,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: Some(
+ String(
+ "E0308",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0308",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "mismatched types\nexpected usize, found u32",
+ related_information: None,
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_range_map_lsp_position.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_range_map_lsp_position.txt
new file mode 100644
index 000000000..a100fa07f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_range_map_lsp_position.txt
@@ -0,0 +1,184 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/test_diagnostics/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 17,
+ },
+ end: Position {
+ line: 3,
+ character: 27,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: Some(
+ String(
+ "E0308",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0308",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "mismatched types\nexpected `u32`, found `&str`",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/test_diagnostics/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 11,
+ },
+ end: Position {
+ line: 3,
+ character: 14,
+ },
+ },
+ },
+ message: "expected due to this",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/test_diagnostics/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 11,
+ },
+ end: Position {
+ line: 3,
+ character: 14,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "E0308",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0308",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "expected due to this",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/test_diagnostics/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 17,
+ },
+ end: Position {
+ line: 3,
+ character: 27,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable.txt
new file mode 100644
index 000000000..1c5c33622
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable.txt
@@ -0,0 +1,212 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ severity: Some(
+ Warning,
+ ),
+ code: Some(
+ String(
+ "unused_variables",
+ ),
+ ),
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "unused variable: `foo`\n#[warn(unused_variables)] on by default",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ },
+ message: "consider prefixing with an underscore: `_foo`",
+ },
+ ],
+ ),
+ tags: Some(
+ [
+ Unnecessary,
+ ],
+ ),
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "unused_variables",
+ ),
+ ),
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "consider prefixing with an underscore: `_foo`",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: Some(
+ Fix {
+ ranges: [
+ Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ ],
+ action: CodeAction {
+ title: "consider prefixing with an underscore: `_foo`",
+ group: None,
+ kind: Some(
+ CodeActionKind(
+ "quickfix",
+ ),
+ ),
+ command: None,
+ edit: Some(
+ SnippetWorkspaceEdit {
+ changes: Some(
+ {
+ Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ }: [
+ TextEdit {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ new_text: "_foo",
+ },
+ ],
+ },
+ ),
+ document_changes: None,
+ change_annotations: None,
+ },
+ ),
+ is_preferred: Some(
+ true,
+ ),
+ data: None,
+ },
+ },
+ ),
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_hint.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_hint.txt
new file mode 100644
index 000000000..3ab3412d9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_hint.txt
@@ -0,0 +1,212 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "unused_variables",
+ ),
+ ),
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "unused variable: `foo`\n#[warn(unused_variables)] on by default",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ },
+ message: "consider prefixing with an underscore: `_foo`",
+ },
+ ],
+ ),
+ tags: Some(
+ [
+ Unnecessary,
+ ],
+ ),
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "unused_variables",
+ ),
+ ),
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "consider prefixing with an underscore: `_foo`",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: Some(
+ Fix {
+ ranges: [
+ Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ ],
+ action: CodeAction {
+ title: "consider prefixing with an underscore: `_foo`",
+ group: None,
+ kind: Some(
+ CodeActionKind(
+ "quickfix",
+ ),
+ ),
+ command: None,
+ edit: Some(
+ SnippetWorkspaceEdit {
+ changes: Some(
+ {
+ Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ }: [
+ TextEdit {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ new_text: "_foo",
+ },
+ ],
+ },
+ ),
+ document_changes: None,
+ change_annotations: None,
+ },
+ ),
+ is_preferred: Some(
+ true,
+ ),
+ data: None,
+ },
+ },
+ ),
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_info.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_info.txt
new file mode 100644
index 000000000..0702420aa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_info.txt
@@ -0,0 +1,212 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ severity: Some(
+ Information,
+ ),
+ code: Some(
+ String(
+ "unused_variables",
+ ),
+ ),
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "unused variable: `foo`\n#[warn(unused_variables)] on by default",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ },
+ message: "consider prefixing with an underscore: `_foo`",
+ },
+ ],
+ ),
+ tags: Some(
+ [
+ Unnecessary,
+ ],
+ ),
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "unused_variables",
+ ),
+ ),
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "consider prefixing with an underscore: `_foo`",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: Some(
+ Fix {
+ ranges: [
+ Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ ],
+ action: CodeAction {
+ title: "consider prefixing with an underscore: `_foo`",
+ group: None,
+ kind: Some(
+ CodeActionKind(
+ "quickfix",
+ ),
+ ),
+ command: None,
+ edit: Some(
+ SnippetWorkspaceEdit {
+ changes: Some(
+ {
+ Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ }: [
+ TextEdit {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ new_text: "_foo",
+ },
+ ],
+ },
+ ),
+ document_changes: None,
+ change_annotations: None,
+ },
+ ),
+ is_preferred: Some(
+ true,
+ ),
+ data: None,
+ },
+ },
+ ),
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_wrong_number_of_parameters.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_wrong_number_of_parameters.txt
new file mode 100644
index 000000000..8ec92888c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_wrong_number_of_parameters.txt
@@ -0,0 +1,184 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/ty/select.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 103,
+ character: 17,
+ },
+ end: Position {
+ line: 103,
+ character: 29,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: Some(
+ String(
+ "E0061",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0061",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "this function takes 2 parameters but 3 parameters were supplied\nexpected 2 parameters",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/ty/select.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 218,
+ character: 4,
+ },
+ end: Position {
+ line: 230,
+ character: 5,
+ },
+ },
+ },
+ message: "defined here",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/ty/select.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 218,
+ character: 4,
+ },
+ end: Position {
+ line: 230,
+ character: 5,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "E0061",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0061",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "defined here",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/ty/select.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 103,
+ character: 17,
+ },
+ end: Position {
+ line: 103,
+ character: 29,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/snap_multi_line_fix.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/snap_multi_line_fix.txt
new file mode 100644
index 000000000..4365e450d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/snap_multi_line_fix.txt
@@ -0,0 +1,388 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 4,
+ },
+ end: Position {
+ line: 3,
+ character: 5,
+ },
+ },
+ severity: Some(
+ Warning,
+ ),
+ code: Some(
+ String(
+ "let_and_return",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "rust-lang.github.io",
+ ),
+ ),
+ port: None,
+ path: "/rust-clippy/master/index.html",
+ query: None,
+ fragment: Some(
+ "let_and_return",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "clippy",
+ ),
+ message: "returning the result of a let binding from a block\n`#[warn(clippy::let_and_return)]` on by default\nfor further information visit https://rust-lang.github.io/rust-clippy/master/index.html#let_and_return",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 2,
+ character: 4,
+ },
+ end: Position {
+ line: 2,
+ character: 30,
+ },
+ },
+ },
+ message: "unnecessary let binding",
+ },
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 2,
+ character: 4,
+ },
+ end: Position {
+ line: 2,
+ character: 30,
+ },
+ },
+ },
+ message: "return the expression directly: `(0..10).collect()`",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 2,
+ character: 4,
+ },
+ end: Position {
+ line: 2,
+ character: 30,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "let_and_return",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "rust-lang.github.io",
+ ),
+ ),
+ port: None,
+ path: "/rust-clippy/master/index.html",
+ query: None,
+ fragment: Some(
+ "let_and_return",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "clippy",
+ ),
+ message: "unnecessary let binding",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 4,
+ },
+ end: Position {
+ line: 3,
+ character: 5,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 2,
+ character: 4,
+ },
+ end: Position {
+ line: 2,
+ character: 30,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "let_and_return",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "rust-lang.github.io",
+ ),
+ ),
+ port: None,
+ path: "/rust-clippy/master/index.html",
+ query: None,
+ fragment: Some(
+ "let_and_return",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "clippy",
+ ),
+ message: "return the expression directly: `(0..10).collect()`",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 4,
+ },
+ end: Position {
+ line: 3,
+ character: 5,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: Some(
+ Fix {
+ ranges: [
+ Range {
+ start: Position {
+ line: 2,
+ character: 4,
+ },
+ end: Position {
+ line: 2,
+ character: 30,
+ },
+ },
+ Range {
+ start: Position {
+ line: 3,
+ character: 4,
+ },
+ end: Position {
+ line: 3,
+ character: 5,
+ },
+ },
+ ],
+ action: CodeAction {
+ title: "return the expression directly: `(0..10).collect()`",
+ group: None,
+ kind: Some(
+ CodeActionKind(
+ "quickfix",
+ ),
+ ),
+ command: None,
+ edit: Some(
+ SnippetWorkspaceEdit {
+ changes: Some(
+ {
+ Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ }: [
+ TextEdit {
+ range: Range {
+ start: Position {
+ line: 2,
+ character: 4,
+ },
+ end: Position {
+ line: 2,
+ character: 30,
+ },
+ },
+ new_text: "",
+ },
+ TextEdit {
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 4,
+ },
+ end: Position {
+ line: 3,
+ character: 5,
+ },
+ },
+ new_text: "(0..10).collect()",
+ },
+ ],
+ },
+ ),
+ document_changes: None,
+ change_annotations: None,
+ },
+ ),
+ is_preferred: Some(
+ true,
+ ),
+ data: None,
+ },
+ },
+ ),
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
new file mode 100644
index 000000000..cff4bd7f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
@@ -0,0 +1,1843 @@
+//! This module provides the functionality needed to convert diagnostics from
+//! `cargo check` json format to the LSP diagnostic format.
+use std::collections::HashMap;
+
+use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan};
+use itertools::Itertools;
+use stdx::format_to;
+use vfs::{AbsPath, AbsPathBuf};
+
+use crate::{
+ global_state::GlobalStateSnapshot, line_index::OffsetEncoding, lsp_ext,
+ to_proto::url_from_abs_path,
+};
+
+use super::{DiagnosticsMapConfig, Fix};
+
+/// Determines the LSP severity from a diagnostic
+fn diagnostic_severity(
+ config: &DiagnosticsMapConfig,
+ level: flycheck::DiagnosticLevel,
+ code: Option<flycheck::DiagnosticCode>,
+) -> Option<lsp_types::DiagnosticSeverity> {
+ let res = match level {
+ DiagnosticLevel::Ice => lsp_types::DiagnosticSeverity::ERROR,
+ DiagnosticLevel::Error => lsp_types::DiagnosticSeverity::ERROR,
+ DiagnosticLevel::Warning => match &code {
+ // HACK: special case for `warnings` rustc lint.
+ Some(code)
+ if config.warnings_as_hint.iter().any(|lint| {
+ lint == "warnings" || ide_db::helpers::lint_eq_or_in_group(&code.code, lint)
+ }) =>
+ {
+ lsp_types::DiagnosticSeverity::HINT
+ }
+ // HACK: special case for `warnings` rustc lint.
+ Some(code)
+ if config.warnings_as_info.iter().any(|lint| {
+ lint == "warnings" || ide_db::helpers::lint_eq_or_in_group(&code.code, lint)
+ }) =>
+ {
+ lsp_types::DiagnosticSeverity::INFORMATION
+ }
+ _ => lsp_types::DiagnosticSeverity::WARNING,
+ },
+ DiagnosticLevel::Note => lsp_types::DiagnosticSeverity::INFORMATION,
+ DiagnosticLevel::Help => lsp_types::DiagnosticSeverity::HINT,
+ _ => return None,
+ };
+ Some(res)
+}
+
+/// Checks whether a file name is from macro invocation and does not refer to an actual file.
+fn is_dummy_macro_file(file_name: &str) -> bool {
+ // FIXME: current rustc does not seem to emit `<macro file>` files anymore?
+ file_name.starts_with('<') && file_name.ends_with('>')
+}
+
+/// Converts a Rust span to a LSP location
+fn location(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ span: &DiagnosticSpan,
+ snap: &GlobalStateSnapshot,
+) -> lsp_types::Location {
+ let file_name = resolve_path(config, workspace_root, &span.file_name);
+ let uri = url_from_abs_path(&file_name);
+
+ let range = {
+ let offset_encoding = snap.config.offset_encoding();
+ lsp_types::Range::new(
+ position(&offset_encoding, span, span.line_start, span.column_start),
+ position(&offset_encoding, span, span.line_end, span.column_end),
+ )
+ };
+ lsp_types::Location::new(uri, range)
+}
+
+fn position(
+ offset_encoding: &OffsetEncoding,
+ span: &DiagnosticSpan,
+ line_offset: usize,
+ column_offset: usize,
+) -> lsp_types::Position {
+ let line_index = line_offset - span.line_start;
+
+ let mut true_column_offset = column_offset;
+ if let Some(line) = span.text.get(line_index) {
+ if line.text.chars().count() == line.text.len() {
+ // all one byte utf-8 char
+ return lsp_types::Position {
+ line: (line_offset as u32).saturating_sub(1),
+ character: (column_offset as u32).saturating_sub(1),
+ };
+ }
+ let mut char_offset = 0;
+ let len_func = match offset_encoding {
+ OffsetEncoding::Utf8 => char::len_utf8,
+ OffsetEncoding::Utf16 => char::len_utf16,
+ };
+ for c in line.text.chars() {
+ char_offset += 1;
+ if char_offset > column_offset {
+ break;
+ }
+ true_column_offset += len_func(c) - 1;
+ }
+ }
+
+ lsp_types::Position {
+ line: (line_offset as u32).saturating_sub(1),
+ character: (true_column_offset as u32).saturating_sub(1),
+ }
+}
+
+/// Extracts a suitable "primary" location from a rustc diagnostic.
+///
+/// This takes locations pointing into the standard library, or generally outside the current
+/// workspace into account and tries to avoid those, in case macros are involved.
+fn primary_location(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ span: &DiagnosticSpan,
+ snap: &GlobalStateSnapshot,
+) -> lsp_types::Location {
+ let span_stack = std::iter::successors(Some(span), |span| Some(&span.expansion.as_ref()?.span));
+ for span in span_stack.clone() {
+ let abs_path = resolve_path(config, workspace_root, &span.file_name);
+ if !is_dummy_macro_file(&span.file_name) && abs_path.starts_with(workspace_root) {
+ return location(config, workspace_root, span, snap);
+ }
+ }
+
+ // Fall back to the outermost macro invocation if no suitable span comes up.
+ let last_span = span_stack.last().unwrap();
+ location(config, workspace_root, last_span, snap)
+}
+
+/// Converts a secondary Rust span to a LSP related information
+///
+/// If the span is unlabelled this will return `None`.
+fn diagnostic_related_information(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ span: &DiagnosticSpan,
+ snap: &GlobalStateSnapshot,
+) -> Option<lsp_types::DiagnosticRelatedInformation> {
+ let message = span.label.clone()?;
+ let location = location(config, workspace_root, span, snap);
+ Some(lsp_types::DiagnosticRelatedInformation { location, message })
+}
+
+/// Resolves paths applying any matching path prefix remappings, and then
+/// joining the path to the workspace root.
+fn resolve_path(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ file_name: &str,
+) -> AbsPathBuf {
+ match config
+ .remap_prefix
+ .iter()
+ .find_map(|(from, to)| file_name.strip_prefix(from).map(|file_name| (to, file_name)))
+ {
+ Some((to, file_name)) => workspace_root.join(format!("{}{}", to, file_name)),
+ None => workspace_root.join(file_name),
+ }
+}
+
+struct SubDiagnostic {
+ related: lsp_types::DiagnosticRelatedInformation,
+ suggested_fix: Option<Fix>,
+}
+
+enum MappedRustChildDiagnostic {
+ SubDiagnostic(SubDiagnostic),
+ MessageLine(String),
+}
+
+fn map_rust_child_diagnostic(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ rd: &flycheck::Diagnostic,
+ snap: &GlobalStateSnapshot,
+) -> MappedRustChildDiagnostic {
+ let spans: Vec<&DiagnosticSpan> = rd.spans.iter().filter(|s| s.is_primary).collect();
+ if spans.is_empty() {
+ // `rustc` uses these spanless children as a way to print multi-line
+ // messages
+ return MappedRustChildDiagnostic::MessageLine(rd.message.clone());
+ }
+
+ let mut edit_map: HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>> = HashMap::new();
+ let mut suggested_replacements = Vec::new();
+ for &span in &spans {
+ if let Some(suggested_replacement) = &span.suggested_replacement {
+ if !suggested_replacement.is_empty() {
+ suggested_replacements.push(suggested_replacement);
+ }
+ let location = location(config, workspace_root, span, snap);
+ let edit = lsp_types::TextEdit::new(location.range, suggested_replacement.clone());
+
+ // Only actually emit a quickfix if the suggestion is "valid enough".
+ // We accept both "MaybeIncorrect" and "MachineApplicable". "MaybeIncorrect" means that
+ // the suggestion is *complete* (contains no placeholders where code needs to be
+ // inserted), but might not be what the user wants, or might need minor adjustments.
+ if matches!(
+ span.suggestion_applicability,
+ None | Some(Applicability::MaybeIncorrect | Applicability::MachineApplicable)
+ ) {
+ edit_map.entry(location.uri).or_default().push(edit);
+ }
+ }
+ }
+
+ // rustc renders suggestion diagnostics by appending the suggested replacement, so do the same
+ // here, otherwise the diagnostic text is missing useful information.
+ let mut message = rd.message.clone();
+ if !suggested_replacements.is_empty() {
+ message.push_str(": ");
+ let suggestions =
+ suggested_replacements.iter().map(|suggestion| format!("`{}`", suggestion)).join(", ");
+ message.push_str(&suggestions);
+ }
+
+ if edit_map.is_empty() {
+ MappedRustChildDiagnostic::SubDiagnostic(SubDiagnostic {
+ related: lsp_types::DiagnosticRelatedInformation {
+ location: location(config, workspace_root, spans[0], snap),
+ message,
+ },
+ suggested_fix: None,
+ })
+ } else {
+ MappedRustChildDiagnostic::SubDiagnostic(SubDiagnostic {
+ related: lsp_types::DiagnosticRelatedInformation {
+ location: location(config, workspace_root, spans[0], snap),
+ message: message.clone(),
+ },
+ suggested_fix: Some(Fix {
+ ranges: spans
+ .iter()
+ .map(|&span| location(config, workspace_root, span, snap).range)
+ .collect(),
+ action: lsp_ext::CodeAction {
+ title: message,
+ group: None,
+ kind: Some(lsp_types::CodeActionKind::QUICKFIX),
+ edit: Some(lsp_ext::SnippetWorkspaceEdit {
+ // FIXME: there's no good reason to use edit_map here....
+ changes: Some(edit_map),
+ document_changes: None,
+ change_annotations: None,
+ }),
+ is_preferred: Some(true),
+ data: None,
+ command: None,
+ },
+ }),
+ })
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct MappedRustDiagnostic {
+ pub(crate) url: lsp_types::Url,
+ pub(crate) diagnostic: lsp_types::Diagnostic,
+ pub(crate) fix: Option<Fix>,
+}
+
+/// Converts a Rust root diagnostic to LSP form
+///
+/// This flattens the Rust diagnostic by:
+///
+/// 1. Creating a LSP diagnostic with the root message and primary span.
+/// 2. Adding any labelled secondary spans to `relatedInformation`
+/// 3. Categorising child diagnostics as either `SuggestedFix`es,
+/// `relatedInformation` or additional message lines.
+///
+/// If the diagnostic has no primary span this will return `None`
+pub(crate) fn map_rust_diagnostic_to_lsp(
+ config: &DiagnosticsMapConfig,
+ rd: &flycheck::Diagnostic,
+ workspace_root: &AbsPath,
+ snap: &GlobalStateSnapshot,
+) -> Vec<MappedRustDiagnostic> {
+ let primary_spans: Vec<&DiagnosticSpan> = rd.spans.iter().filter(|s| s.is_primary).collect();
+ if primary_spans.is_empty() {
+ return Vec::new();
+ }
+
+ let severity = diagnostic_severity(config, rd.level, rd.code.clone());
+
+ let mut source = String::from("rustc");
+ let mut code = rd.code.as_ref().map(|c| c.code.clone());
+ if let Some(code_val) = &code {
+ // See if this is an RFC #2103 scoped lint (e.g. from Clippy)
+ let scoped_code: Vec<&str> = code_val.split("::").collect();
+ if scoped_code.len() == 2 {
+ source = String::from(scoped_code[0]);
+ code = Some(String::from(scoped_code[1]));
+ }
+ }
+
+ let mut needs_primary_span_label = true;
+ let mut subdiagnostics = Vec::new();
+ let mut tags = Vec::new();
+
+ for secondary_span in rd.spans.iter().filter(|s| !s.is_primary) {
+ let related = diagnostic_related_information(config, workspace_root, secondary_span, snap);
+ if let Some(related) = related {
+ subdiagnostics.push(SubDiagnostic { related, suggested_fix: None });
+ }
+ }
+
+ let mut message = rd.message.clone();
+ for child in &rd.children {
+ let child = map_rust_child_diagnostic(config, workspace_root, child, snap);
+ match child {
+ MappedRustChildDiagnostic::SubDiagnostic(sub) => {
+ subdiagnostics.push(sub);
+ }
+ MappedRustChildDiagnostic::MessageLine(message_line) => {
+ format_to!(message, "\n{}", message_line);
+
+ // These secondary messages usually duplicate the content of the
+ // primary span label.
+ needs_primary_span_label = false;
+ }
+ }
+ }
+
+ if let Some(code) = &rd.code {
+ let code = code.code.as_str();
+ if matches!(
+ code,
+ "dead_code"
+ | "unknown_lints"
+ | "unreachable_code"
+ | "unused_attributes"
+ | "unused_imports"
+ | "unused_macros"
+ | "unused_variables"
+ ) {
+ tags.push(lsp_types::DiagnosticTag::UNNECESSARY);
+ }
+
+ if matches!(code, "deprecated") {
+ tags.push(lsp_types::DiagnosticTag::DEPRECATED);
+ }
+ }
+
+ let code_description = match source.as_str() {
+ "rustc" => rustc_code_description(code.as_deref()),
+ "clippy" => clippy_code_description(code.as_deref()),
+ _ => None,
+ };
+
+ primary_spans
+ .iter()
+ .flat_map(|primary_span| {
+ let primary_location = primary_location(config, workspace_root, primary_span, snap);
+
+ let mut message = message.clone();
+ if needs_primary_span_label {
+ if let Some(primary_span_label) = &primary_span.label {
+ format_to!(message, "\n{}", primary_span_label);
+ }
+ }
+
+ // Each primary diagnostic span may result in multiple LSP diagnostics.
+ let mut diagnostics = Vec::new();
+
+ let mut related_info_macro_calls = vec![];
+
+ // If error occurs from macro expansion, add related info pointing to
+ // where the error originated
+ // Also, we would generate an additional diagnostic, so that exact place of macro
+ // will be highlighted in the error origin place.
+ let span_stack = std::iter::successors(Some(*primary_span), |span| {
+ Some(&span.expansion.as_ref()?.span)
+ });
+ for (i, span) in span_stack.enumerate() {
+ if is_dummy_macro_file(&span.file_name) {
+ continue;
+ }
+
+ // First span is the original diagnostic, others are macro call locations that
+ // generated that code.
+ let is_in_macro_call = i != 0;
+
+ let secondary_location = location(config, workspace_root, span, snap);
+ if secondary_location == primary_location {
+ continue;
+ }
+ related_info_macro_calls.push(lsp_types::DiagnosticRelatedInformation {
+ location: secondary_location.clone(),
+ message: if is_in_macro_call {
+ "Error originated from macro call here".to_string()
+ } else {
+ "Actual error occurred here".to_string()
+ },
+ });
+ // For the additional in-macro diagnostic we add the inverse message pointing to the error location in code.
+ let information_for_additional_diagnostic =
+ vec![lsp_types::DiagnosticRelatedInformation {
+ location: primary_location.clone(),
+ message: "Exact error occurred here".to_string(),
+ }];
+
+ let diagnostic = lsp_types::Diagnostic {
+ range: secondary_location.range,
+ // downgrade to hint if we're pointing at the macro
+ severity: Some(lsp_types::DiagnosticSeverity::HINT),
+ code: code.clone().map(lsp_types::NumberOrString::String),
+ code_description: code_description.clone(),
+ source: Some(source.clone()),
+ message: message.clone(),
+ related_information: Some(information_for_additional_diagnostic),
+ tags: if tags.is_empty() { None } else { Some(tags.clone()) },
+ data: None,
+ };
+ diagnostics.push(MappedRustDiagnostic {
+ url: secondary_location.uri,
+ diagnostic,
+ fix: None,
+ });
+ }
+
+ // Emit the primary diagnostic.
+ diagnostics.push(MappedRustDiagnostic {
+ url: primary_location.uri.clone(),
+ diagnostic: lsp_types::Diagnostic {
+ range: primary_location.range,
+ severity,
+ code: code.clone().map(lsp_types::NumberOrString::String),
+ code_description: code_description.clone(),
+ source: Some(source.clone()),
+ message,
+ related_information: {
+ let info = related_info_macro_calls
+ .iter()
+ .cloned()
+ .chain(subdiagnostics.iter().map(|sub| sub.related.clone()))
+ .collect::<Vec<_>>();
+ if info.is_empty() {
+ None
+ } else {
+ Some(info)
+ }
+ },
+ tags: if tags.is_empty() { None } else { Some(tags.clone()) },
+ data: None,
+ },
+ fix: None,
+ });
+
+ // Emit hint-level diagnostics for all `related_information` entries such as "help"s.
+ // This is useful because they will show up in the user's editor, unlike
+ // `related_information`, which just produces hard-to-read links, at least in VS Code.
+ let back_ref = lsp_types::DiagnosticRelatedInformation {
+ location: primary_location,
+ message: "original diagnostic".to_string(),
+ };
+ for sub in &subdiagnostics {
+ diagnostics.push(MappedRustDiagnostic {
+ url: sub.related.location.uri.clone(),
+ fix: sub.suggested_fix.clone(),
+ diagnostic: lsp_types::Diagnostic {
+ range: sub.related.location.range,
+ severity: Some(lsp_types::DiagnosticSeverity::HINT),
+ code: code.clone().map(lsp_types::NumberOrString::String),
+ code_description: code_description.clone(),
+ source: Some(source.clone()),
+ message: sub.related.message.clone(),
+ related_information: Some(vec![back_ref.clone()]),
+ tags: None, // don't apply modifiers again
+ data: None,
+ },
+ });
+ }
+
+ diagnostics
+ })
+ .collect()
+}
+
+fn rustc_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescription> {
+ code.filter(|code| {
+ let mut chars = code.chars();
+ chars.next().map_or(false, |c| c == 'E')
+ && chars.by_ref().take(4).all(|c| c.is_ascii_digit())
+ && chars.next().is_none()
+ })
+ .and_then(|code| {
+ lsp_types::Url::parse(&format!("https://doc.rust-lang.org/error-index.html#{}", code))
+ .ok()
+ .map(|href| lsp_types::CodeDescription { href })
+ })
+}
+
+fn clippy_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescription> {
+ code.and_then(|code| {
+ lsp_types::Url::parse(&format!(
+ "https://rust-lang.github.io/rust-clippy/master/index.html#{}",
+ code
+ ))
+ .ok()
+ .map(|href| lsp_types::CodeDescription { href })
+ })
+}
+
+#[cfg(test)]
+#[cfg(not(windows))]
+mod tests {
+ use std::{convert::TryInto, path::Path};
+
+ use crate::{config::Config, global_state::GlobalState};
+
+ use super::*;
+
+ use expect_test::{expect_file, ExpectFile};
+ use lsp_types::ClientCapabilities;
+
+ fn check(diagnostics_json: &str, expect: ExpectFile) {
+ check_with_config(DiagnosticsMapConfig::default(), diagnostics_json, expect)
+ }
+
+ fn check_with_config(config: DiagnosticsMapConfig, diagnostics_json: &str, expect: ExpectFile) {
+ let diagnostic: flycheck::Diagnostic = serde_json::from_str(diagnostics_json).unwrap();
+ let workspace_root: &AbsPath = Path::new("/test/").try_into().unwrap();
+ let (sender, _) = crossbeam_channel::unbounded();
+ let state = GlobalState::new(
+ sender,
+ Config::new(workspace_root.to_path_buf(), ClientCapabilities::default()),
+ );
+ let snap = state.snapshot();
+ let actual = map_rust_diagnostic_to_lsp(&config, &diagnostic, workspace_root, &snap);
+ expect.assert_debug_eq(&actual)
+ }
+
+ #[test]
+ fn rustc_incompatible_type_for_trait() {
+ check(
+ r##"{
+ "message": "method `next` has an incompatible type for trait",
+ "code": {
+ "code": "E0053",
+ "explanation": "\nThe parameters of any trait method must match between a trait implementation\nand the trait definition.\n\nHere are a couple examples of this error:\n\n```compile_fail,E0053\ntrait Foo {\n fn foo(x: u16);\n fn bar(&self);\n}\n\nstruct Bar;\n\nimpl Foo for Bar {\n // error, expected u16, found i16\n fn foo(x: i16) { }\n\n // error, types differ in mutability\n fn bar(&mut self) { }\n}\n```\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "compiler/ty/list_iter.rs",
+ "byte_start": 1307,
+ "byte_end": 1350,
+ "line_start": 52,
+ "line_end": 52,
+ "column_start": 5,
+ "column_end": 48,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " fn next(&self) -> Option<&'list ty::Ref<M>> {",
+ "highlight_start": 5,
+ "highlight_end": 48
+ }
+ ],
+ "label": "types differ in mutability",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "expected type `fn(&mut ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&ty::Ref<M>>`\n found type `fn(&ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&'list ty::Ref<M>>`",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error[E0053]: method `next` has an incompatible type for trait\n --> compiler/ty/list_iter.rs:52:5\n |\n52 | fn next(&self) -> Option<&'list ty::Ref<M>> {\n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ types differ in mutability\n |\n = note: expected type `fn(&mut ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&ty::Ref<M>>`\n found type `fn(&ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&'list ty::Ref<M>>`\n\n"
+ }
+ "##,
+ expect_file!["./test_data/rustc_incompatible_type_for_trait.txt"],
+ );
+ }
+
+ #[test]
+ fn rustc_unused_variable() {
+ check(
+ r##"{
+ "message": "unused variable: `foo`",
+ "code": {
+ "code": "unused_variables",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "#[warn(unused_variables)] on by default",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider prefixing with an underscore",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "_foo",
+ "suggestion_applicability": "MachineApplicable",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: unused variable: `foo`\n --> driver/subcommand/repl.rs:291:9\n |\n291 | let foo = 42;\n | ^^^ help: consider prefixing with an underscore: `_foo`\n |\n = note: #[warn(unused_variables)] on by default\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_unused_variable.txt"],
+ );
+ }
+
+ #[test]
+ #[cfg(not(windows))]
+ fn rustc_unused_variable_as_info() {
+ check_with_config(
+ DiagnosticsMapConfig {
+ warnings_as_info: vec!["unused_variables".to_string()],
+ ..DiagnosticsMapConfig::default()
+ },
+ r##"{
+ "message": "unused variable: `foo`",
+ "code": {
+ "code": "unused_variables",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "#[warn(unused_variables)] on by default",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider prefixing with an underscore",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "_foo",
+ "suggestion_applicability": "MachineApplicable",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: unused variable: `foo`\n --> driver/subcommand/repl.rs:291:9\n |\n291 | let foo = 42;\n | ^^^ help: consider prefixing with an underscore: `_foo`\n |\n = note: #[warn(unused_variables)] on by default\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_unused_variable_as_info.txt"],
+ );
+ }
+
+ #[test]
+ #[cfg(not(windows))]
+ fn rustc_unused_variable_as_hint() {
+ check_with_config(
+ DiagnosticsMapConfig {
+ warnings_as_hint: vec!["unused_variables".to_string()],
+ ..DiagnosticsMapConfig::default()
+ },
+ r##"{
+ "message": "unused variable: `foo`",
+ "code": {
+ "code": "unused_variables",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "#[warn(unused_variables)] on by default",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider prefixing with an underscore",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "_foo",
+ "suggestion_applicability": "MachineApplicable",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: unused variable: `foo`\n --> driver/subcommand/repl.rs:291:9\n |\n291 | let foo = 42;\n | ^^^ help: consider prefixing with an underscore: `_foo`\n |\n = note: #[warn(unused_variables)] on by default\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_unused_variable_as_hint.txt"],
+ );
+ }
+
+ #[test]
+ fn rustc_wrong_number_of_parameters() {
+ check(
+ r##"{
+ "message": "this function takes 2 parameters but 3 parameters were supplied",
+ "code": {
+ "code": "E0061",
+ "explanation": "\nThe number of arguments passed to a function must match the number of arguments\nspecified in the function signature.\n\nFor example, a function like:\n\n```\nfn f(a: u16, b: &str) {}\n```\n\nMust always be called with exactly two arguments, e.g., `f(2, \"test\")`.\n\nNote that Rust does not have a notion of optional function arguments or\nvariadic functions (except for its C-FFI).\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "compiler/ty/select.rs",
+ "byte_start": 8787,
+ "byte_end": 9241,
+ "line_start": 219,
+ "line_end": 231,
+ "column_start": 5,
+ "column_end": 6,
+ "is_primary": false,
+ "text": [
+ {
+ "text": " pub fn add_evidence(",
+ "highlight_start": 5,
+ "highlight_end": 25
+ },
+ {
+ "text": " &mut self,",
+ "highlight_start": 1,
+ "highlight_end": 19
+ },
+ {
+ "text": " target_poly: &ty::Ref<ty::Poly>,",
+ "highlight_start": 1,
+ "highlight_end": 41
+ },
+ {
+ "text": " evidence_poly: &ty::Ref<ty::Poly>,",
+ "highlight_start": 1,
+ "highlight_end": 43
+ },
+ {
+ "text": " ) {",
+ "highlight_start": 1,
+ "highlight_end": 8
+ },
+ {
+ "text": " match target_poly {",
+ "highlight_start": 1,
+ "highlight_end": 28
+ },
+ {
+ "text": " ty::Ref::Var(tvar, _) => self.add_var_evidence(tvar, evidence_poly),",
+ "highlight_start": 1,
+ "highlight_end": 81
+ },
+ {
+ "text": " ty::Ref::Fixed(target_ty) => {",
+ "highlight_start": 1,
+ "highlight_end": 43
+ },
+ {
+ "text": " let evidence_ty = evidence_poly.resolve_to_ty();",
+ "highlight_start": 1,
+ "highlight_end": 65
+ },
+ {
+ "text": " self.add_evidence_ty(target_ty, evidence_poly, evidence_ty)",
+ "highlight_start": 1,
+ "highlight_end": 76
+ },
+ {
+ "text": " }",
+ "highlight_start": 1,
+ "highlight_end": 14
+ },
+ {
+ "text": " }",
+ "highlight_start": 1,
+ "highlight_end": 10
+ },
+ {
+ "text": " }",
+ "highlight_start": 1,
+ "highlight_end": 6
+ }
+ ],
+ "label": "defined here",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ },
+ {
+ "file_name": "compiler/ty/select.rs",
+ "byte_start": 4045,
+ "byte_end": 4057,
+ "line_start": 104,
+ "line_end": 104,
+ "column_start": 18,
+ "column_end": 30,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " self.add_evidence(target_fixed, evidence_fixed, false);",
+ "highlight_start": 18,
+ "highlight_end": 30
+ }
+ ],
+ "label": "expected 2 parameters",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": "error[E0061]: this function takes 2 parameters but 3 parameters were supplied\n --> compiler/ty/select.rs:104:18\n |\n104 | self.add_evidence(target_fixed, evidence_fixed, false);\n | ^^^^^^^^^^^^ expected 2 parameters\n...\n219 | / pub fn add_evidence(\n220 | | &mut self,\n221 | | target_poly: &ty::Ref<ty::Poly>,\n222 | | evidence_poly: &ty::Ref<ty::Poly>,\n... |\n230 | | }\n231 | | }\n | |_____- defined here\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_wrong_number_of_parameters.txt"],
+ );
+ }
+
+ #[test]
+ fn clippy_pass_by_ref() {
+ check(
+ r##"{
+ "message": "this argument is passed by reference, but would be more efficient if passed by value",
+ "code": {
+ "code": "clippy::trivially_copy_pass_by_ref",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "compiler/mir/tagset.rs",
+ "byte_start": 941,
+ "byte_end": 946,
+ "line_start": 42,
+ "line_end": 42,
+ "column_start": 24,
+ "column_end": 29,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " pub fn is_disjoint(&self, other: Self) -> bool {",
+ "highlight_start": 24,
+ "highlight_end": 29
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "lint level defined here",
+ "code": null,
+ "level": "note",
+ "spans": [
+ {
+ "file_name": "compiler/lib.rs",
+ "byte_start": 8,
+ "byte_end": 19,
+ "line_start": 1,
+ "line_end": 1,
+ "column_start": 9,
+ "column_end": 20,
+ "is_primary": true,
+ "text": [
+ {
+ "text": "#![warn(clippy::all)]",
+ "highlight_start": 9,
+ "highlight_end": 20
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "#[warn(clippy::trivially_copy_pass_by_ref)] implied by #[warn(clippy::all)]",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#trivially_copy_pass_by_ref",
+ "code": null,
+ "level": "help",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider passing by value instead",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "compiler/mir/tagset.rs",
+ "byte_start": 941,
+ "byte_end": 946,
+ "line_start": 42,
+ "line_end": 42,
+ "column_start": 24,
+ "column_end": 29,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " pub fn is_disjoint(&self, other: Self) -> bool {",
+ "highlight_start": 24,
+ "highlight_end": 29
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "self",
+ "suggestion_applicability": "Unspecified",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: this argument is passed by reference, but would be more efficient if passed by value\n --> compiler/mir/tagset.rs:42:24\n |\n42 | pub fn is_disjoint(&self, other: Self) -> bool {\n | ^^^^^ help: consider passing by value instead: `self`\n |\nnote: lint level defined here\n --> compiler/lib.rs:1:9\n |\n1 | #![warn(clippy::all)]\n | ^^^^^^^^^^^\n = note: #[warn(clippy::trivially_copy_pass_by_ref)] implied by #[warn(clippy::all)]\n = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#trivially_copy_pass_by_ref\n\n"
+ }"##,
+ expect_file!["./test_data/clippy_pass_by_ref.txt"],
+ );
+ }
+
+ #[test]
+ fn rustc_range_map_lsp_position() {
+ check(
+ r##"{
+ "message": "mismatched types",
+ "code": {
+ "code": "E0308",
+ "explanation": "Expected type did not match the received type.\n\nErroneous code examples:\n\n```compile_fail,E0308\nfn plus_one(x: i32) -> i32 {\n x + 1\n}\n\nplus_one(\"Not a number\");\n// ^^^^^^^^^^^^^^ expected `i32`, found `&str`\n\nif \"Not a bool\" {\n// ^^^^^^^^^^^^ expected `bool`, found `&str`\n}\n\nlet x: f32 = \"Not a float\";\n// --- ^^^^^^^^^^^^^ expected `f32`, found `&str`\n// |\n// expected due to this\n```\n\nThis error occurs when an expression was used in a place where the compiler\nexpected an expression of a different type. It can occur in several cases, the\nmost common being when calling a function and passing an argument which has a\ndifferent type than the matching type in the function declaration.\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "crates/test_diagnostics/src/main.rs",
+ "byte_start": 87,
+ "byte_end": 105,
+ "line_start": 4,
+ "line_end": 4,
+ "column_start": 18,
+ "column_end": 24,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let x: u32 = \"𐐀𐐀𐐀𐐀\"; // 17-23",
+ "highlight_start": 18,
+ "highlight_end": 24
+ }
+ ],
+ "label": "expected `u32`, found `&str`",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ },
+ {
+ "file_name": "crates/test_diagnostics/src/main.rs",
+ "byte_start": 81,
+ "byte_end": 84,
+ "line_start": 4,
+ "line_end": 4,
+ "column_start": 12,
+ "column_end": 15,
+ "is_primary": false,
+ "text": [
+ {
+ "text": " let x: u32 = \"𐐀𐐀𐐀𐐀\"; // 17-23",
+ "highlight_start": 12,
+ "highlight_end": 15
+ }
+ ],
+ "label": "expected due to this",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": "error[E0308]: mismatched types\n --> crates/test_diagnostics/src/main.rs:4:18\n |\n4 | let x: u32 = \"𐐀𐐀𐐀𐐀\"; // 17-23\n | --- ^^^^^^ expected `u32`, found `&str`\n | |\n | expected due to this\n\n"
+ }"##,
+ expect_file!("./test_data/rustc_range_map_lsp_position.txt"),
+ )
+ }
+
+ #[test]
+ fn rustc_mismatched_type() {
+ check(
+ r##"{
+ "message": "mismatched types",
+ "code": {
+ "code": "E0308",
+ "explanation": "\nThis error occurs when the compiler was unable to infer the concrete type of a\nvariable. It can occur for several cases, the most common of which is a\nmismatch in the expected type that the compiler inferred for a variable's\ninitializing expression, and the actual type explicitly assigned to the\nvariable.\n\nFor example:\n\n```compile_fail,E0308\nlet x: i32 = \"I am not a number!\";\n// ~~~ ~~~~~~~~~~~~~~~~~~~~\n// | |\n// | initializing expression;\n// | compiler infers type `&str`\n// |\n// type `i32` assigned to variable `x`\n```\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "runtime/compiler_support.rs",
+ "byte_start": 1589,
+ "byte_end": 1594,
+ "line_start": 48,
+ "line_end": 48,
+ "column_start": 65,
+ "column_end": 70,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let layout = alloc::Layout::from_size_align_unchecked(size, align);",
+ "highlight_start": 65,
+ "highlight_end": 70
+ }
+ ],
+ "label": "expected usize, found u32",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": "error[E0308]: mismatched types\n --> runtime/compiler_support.rs:48:65\n |\n48 | let layout = alloc::Layout::from_size_align_unchecked(size, align);\n | ^^^^^ expected usize, found u32\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_mismatched_type.txt"],
+ );
+ }
+
+ #[test]
+ fn handles_macro_location() {
+ check(
+ r##"{
+ "rendered": "error[E0277]: can't compare `{integer}` with `&str`\n --> src/main.rs:2:5\n |\n2 | assert_eq!(1, \"love\");\n | ^^^^^^^^^^^^^^^^^^^^^^ no implementation for `{integer} == &str`\n |\n = help: the trait `std::cmp::PartialEq<&str>` is not implemented for `{integer}`\n = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)\n\n",
+ "children": [
+ {
+ "children": [],
+ "code": null,
+ "level": "help",
+ "message": "the trait `std::cmp::PartialEq<&str>` is not implemented for `{integer}`",
+ "rendered": null,
+ "spans": []
+ }
+ ],
+ "code": {
+ "code": "E0277",
+ "explanation": "\nYou tried to use a type which doesn't implement some trait in a place which\nexpected that trait. Erroneous code example:\n\n```compile_fail,E0277\n// here we declare the Foo trait with a bar method\ntrait Foo {\n fn bar(&self);\n}\n\n// we now declare a function which takes an object implementing the Foo trait\nfn some_func<T: Foo>(foo: T) {\n foo.bar();\n}\n\nfn main() {\n // we now call the method with the i32 type, which doesn't implement\n // the Foo trait\n some_func(5i32); // error: the trait bound `i32 : Foo` is not satisfied\n}\n```\n\nIn order to fix this error, verify that the type you're using does implement\nthe trait. Example:\n\n```\ntrait Foo {\n fn bar(&self);\n}\n\nfn some_func<T: Foo>(foo: T) {\n foo.bar(); // we can now use this method since i32 implements the\n // Foo trait\n}\n\n// we implement the trait on the i32 type\nimpl Foo for i32 {\n fn bar(&self) {}\n}\n\nfn main() {\n some_func(5i32); // ok!\n}\n```\n\nOr in a generic context, an erroneous code example would look like:\n\n```compile_fail,E0277\nfn some_func<T>(foo: T) {\n println!(\"{:?}\", foo); // error: the trait `core::fmt::Debug` is not\n // implemented for the type `T`\n}\n\nfn main() {\n // We now call the method with the i32 type,\n // which *does* implement the Debug trait.\n some_func(5i32);\n}\n```\n\nNote that the error here is in the definition of the generic function: Although\nwe only call it with a parameter that does implement `Debug`, the compiler\nstill rejects the function: It must work with all possible input types. In\norder to make this example compile, we need to restrict the generic type we're\naccepting:\n\n```\nuse std::fmt;\n\n// Restrict the input type to types that implement Debug.\nfn some_func<T: fmt::Debug>(foo: T) {\n println!(\"{:?}\", foo);\n}\n\nfn main() {\n // Calling the method is still fine, as i32 implements Debug.\n some_func(5i32);\n\n // This would fail to compile now:\n // struct WithoutDebug;\n // some_func(WithoutDebug);\n}\n```\n\nRust only looks at the signature of the called function, as such it must\nalready specify all requirements that will be used for every type parameter.\n"
+ },
+ "level": "error",
+ "message": "can't compare `{integer}` with `&str`",
+ "spans": [
+ {
+ "byte_end": 155,
+ "byte_start": 153,
+ "column_end": 33,
+ "column_start": 31,
+ "expansion": {
+ "def_site_span": {
+ "byte_end": 940,
+ "byte_start": 0,
+ "column_end": 6,
+ "column_start": 1,
+ "expansion": null,
+ "file_name": "<::core::macros::assert_eq macros>",
+ "is_primary": false,
+ "label": null,
+ "line_end": 36,
+ "line_start": 1,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 35,
+ "highlight_start": 1,
+ "text": "($ left : expr, $ right : expr) =>"
+ },
+ {
+ "highlight_end": 3,
+ "highlight_start": 1,
+ "text": "({"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " match (& $ left, & $ right)"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 34,
+ "highlight_start": 1,
+ "text": " (left_val, right_val) =>"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 46,
+ "highlight_start": 1,
+ "text": " if ! (* left_val == * right_val)"
+ },
+ {
+ "highlight_end": 15,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 25,
+ "highlight_start": 1,
+ "text": " panic !"
+ },
+ {
+ "highlight_end": 57,
+ "highlight_start": 1,
+ "text": " (r#\"assertion failed: `(left == right)`"
+ },
+ {
+ "highlight_end": 16,
+ "highlight_start": 1,
+ "text": " left: `{:?}`,"
+ },
+ {
+ "highlight_end": 18,
+ "highlight_start": 1,
+ "text": " right: `{:?}`\"#,"
+ },
+ {
+ "highlight_end": 47,
+ "highlight_start": 1,
+ "text": " & * left_val, & * right_val)"
+ },
+ {
+ "highlight_end": 15,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 42,
+ "highlight_start": 1,
+ "text": " }) ; ($ left : expr, $ right : expr,) =>"
+ },
+ {
+ "highlight_end": 49,
+ "highlight_start": 1,
+ "text": "({ $ crate :: assert_eq ! ($ left, $ right) }) ;"
+ },
+ {
+ "highlight_end": 53,
+ "highlight_start": 1,
+ "text": "($ left : expr, $ right : expr, $ ($ arg : tt) +) =>"
+ },
+ {
+ "highlight_end": 3,
+ "highlight_start": 1,
+ "text": "({"
+ },
+ {
+ "highlight_end": 37,
+ "highlight_start": 1,
+ "text": " match (& ($ left), & ($ right))"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 34,
+ "highlight_start": 1,
+ "text": " (left_val, right_val) =>"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 46,
+ "highlight_start": 1,
+ "text": " if ! (* left_val == * right_val)"
+ },
+ {
+ "highlight_end": 15,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 25,
+ "highlight_start": 1,
+ "text": " panic !"
+ },
+ {
+ "highlight_end": 57,
+ "highlight_start": 1,
+ "text": " (r#\"assertion failed: `(left == right)`"
+ },
+ {
+ "highlight_end": 16,
+ "highlight_start": 1,
+ "text": " left: `{:?}`,"
+ },
+ {
+ "highlight_end": 22,
+ "highlight_start": 1,
+ "text": " right: `{:?}`: {}\"#,"
+ },
+ {
+ "highlight_end": 72,
+ "highlight_start": 1,
+ "text": " & * left_val, & * right_val, $ crate :: format_args !"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " ($ ($ arg) +))"
+ },
+ {
+ "highlight_end": 15,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 6,
+ "highlight_start": 1,
+ "text": " }) ;"
+ }
+ ]
+ },
+ "macro_decl_name": "assert_eq!",
+ "span": {
+ "byte_end": 38,
+ "byte_start": 16,
+ "column_end": 27,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 2,
+ "line_start": 2,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 27,
+ "highlight_start": 5,
+ "text": " assert_eq!(1, \"love\");"
+ }
+ ]
+ }
+ },
+ "file_name": "<::core::macros::assert_eq macros>",
+ "is_primary": true,
+ "label": "no implementation for `{integer} == &str`",
+ "line_end": 7,
+ "line_start": 7,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 33,
+ "highlight_start": 31,
+ "text": " if ! (* left_val == * right_val)"
+ }
+ ]
+ }
+ ]
+ }"##,
+ expect_file!["./test_data/handles_macro_location.txt"],
+ );
+ }
+
+ #[test]
+ fn macro_compiler_error() {
+ check(
+ r##"{
+ "rendered": "error: Please register your known path in the path module\n --> crates/hir_def/src/path.rs:265:9\n |\n265 | compile_error!(\"Please register your known path in the path module\")\n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n | \n ::: crates/hir_def/src/data.rs:80:16\n |\n80 | let path = path![std::future::Future];\n | -------------------------- in this macro invocation\n\n",
+ "children": [],
+ "code": null,
+ "level": "error",
+ "message": "Please register your known path in the path module",
+ "spans": [
+ {
+ "byte_end": 8285,
+ "byte_start": 8217,
+ "column_end": 77,
+ "column_start": 9,
+ "expansion": {
+ "def_site_span": {
+ "byte_end": 8294,
+ "byte_start": 7858,
+ "column_end": 2,
+ "column_start": 1,
+ "expansion": null,
+ "file_name": "crates/hir_def/src/path.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 267,
+ "line_start": 254,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 28,
+ "highlight_start": 1,
+ "text": "macro_rules! __known_path {"
+ },
+ {
+ "highlight_end": 37,
+ "highlight_start": 1,
+ "text": " (std::iter::IntoIterator) => {};"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " (std::result::Result) => {};"
+ },
+ {
+ "highlight_end": 29,
+ "highlight_start": 1,
+ "text": " (std::ops::Range) => {};"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeFrom) => {};"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeFull) => {};"
+ },
+ {
+ "highlight_end": 31,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeTo) => {};"
+ },
+ {
+ "highlight_end": 40,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeToInclusive) => {};"
+ },
+ {
+ "highlight_end": 38,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeInclusive) => {};"
+ },
+ {
+ "highlight_end": 27,
+ "highlight_start": 1,
+ "text": " (std::ops::Try) => {};"
+ },
+ {
+ "highlight_end": 22,
+ "highlight_start": 1,
+ "text": " ($path:path) => {"
+ },
+ {
+ "highlight_end": 77,
+ "highlight_start": 1,
+ "text": " compile_error!(\"Please register your known path in the path module\")"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " };"
+ },
+ {
+ "highlight_end": 2,
+ "highlight_start": 1,
+ "text": "}"
+ }
+ ]
+ },
+ "macro_decl_name": "$crate::__known_path!",
+ "span": {
+ "byte_end": 8427,
+ "byte_start": 8385,
+ "column_end": 51,
+ "column_start": 9,
+ "expansion": {
+ "def_site_span": {
+ "byte_end": 8611,
+ "byte_start": 8312,
+ "column_end": 2,
+ "column_start": 1,
+ "expansion": null,
+ "file_name": "crates/hir_def/src/path.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 277,
+ "line_start": 270,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 22,
+ "highlight_start": 1,
+ "text": "macro_rules! __path {"
+ },
+ {
+ "highlight_end": 43,
+ "highlight_start": 1,
+ "text": " ($start:ident $(:: $seg:ident)*) => ({"
+ },
+ {
+ "highlight_end": 51,
+ "highlight_start": 1,
+ "text": " $crate::__known_path!($start $(:: $seg)*);"
+ },
+ {
+ "highlight_end": 87,
+ "highlight_start": 1,
+ "text": " $crate::path::ModPath::from_simple_segments($crate::path::PathKind::Abs, vec!["
+ },
+ {
+ "highlight_end": 76,
+ "highlight_start": 1,
+ "text": " $crate::path::__name![$start], $($crate::path::__name![$seg],)*"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " ])"
+ },
+ {
+ "highlight_end": 8,
+ "highlight_start": 1,
+ "text": " });"
+ },
+ {
+ "highlight_end": 2,
+ "highlight_start": 1,
+ "text": "}"
+ }
+ ]
+ },
+ "macro_decl_name": "path!",
+ "span": {
+ "byte_end": 2966,
+ "byte_start": 2940,
+ "column_end": 42,
+ "column_start": 16,
+ "expansion": null,
+ "file_name": "crates/hir_def/src/data.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 80,
+ "line_start": 80,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 42,
+ "highlight_start": 16,
+ "text": " let path = path![std::future::Future];"
+ }
+ ]
+ }
+ },
+ "file_name": "crates/hir_def/src/path.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 272,
+ "line_start": 272,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 51,
+ "highlight_start": 9,
+ "text": " $crate::__known_path!($start $(:: $seg)*);"
+ }
+ ]
+ }
+ },
+ "file_name": "crates/hir_def/src/path.rs",
+ "is_primary": true,
+ "label": null,
+ "line_end": 265,
+ "line_start": 265,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 77,
+ "highlight_start": 9,
+ "text": " compile_error!(\"Please register your known path in the path module\")"
+ }
+ ]
+ }
+ ]
+ }
+ "##,
+ expect_file!["./test_data/macro_compiler_error.txt"],
+ );
+ }
+
+ #[test]
+ fn snap_multi_line_fix() {
+ check(
+ r##"{
+ "rendered": "warning: returning the result of a let binding from a block\n --> src/main.rs:4:5\n |\n3 | let a = (0..10).collect();\n | -------------------------- unnecessary let binding\n4 | a\n | ^\n |\n = note: `#[warn(clippy::let_and_return)]` on by default\n = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#let_and_return\nhelp: return the expression directly\n |\n3 | \n4 | (0..10).collect()\n |\n\n",
+ "children": [
+ {
+ "children": [],
+ "code": null,
+ "level": "note",
+ "message": "`#[warn(clippy::let_and_return)]` on by default",
+ "rendered": null,
+ "spans": []
+ },
+ {
+ "children": [],
+ "code": null,
+ "level": "help",
+ "message": "for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#let_and_return",
+ "rendered": null,
+ "spans": []
+ },
+ {
+ "children": [],
+ "code": null,
+ "level": "help",
+ "message": "return the expression directly",
+ "rendered": null,
+ "spans": [
+ {
+ "byte_end": 55,
+ "byte_start": 29,
+ "column_end": 31,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": true,
+ "label": null,
+ "line_end": 3,
+ "line_start": 3,
+ "suggested_replacement": "",
+ "suggestion_applicability": "MachineApplicable",
+ "text": [
+ {
+ "highlight_end": 31,
+ "highlight_start": 5,
+ "text": " let a = (0..10).collect();"
+ }
+ ]
+ },
+ {
+ "byte_end": 61,
+ "byte_start": 60,
+ "column_end": 6,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": true,
+ "label": null,
+ "line_end": 4,
+ "line_start": 4,
+ "suggested_replacement": "(0..10).collect()",
+ "suggestion_applicability": "MachineApplicable",
+ "text": [
+ {
+ "highlight_end": 6,
+ "highlight_start": 5,
+ "text": " a"
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "code": {
+ "code": "clippy::let_and_return",
+ "explanation": null
+ },
+ "level": "warning",
+ "message": "returning the result of a let binding from a block",
+ "spans": [
+ {
+ "byte_end": 55,
+ "byte_start": 29,
+ "column_end": 31,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": false,
+ "label": "unnecessary let binding",
+ "line_end": 3,
+ "line_start": 3,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 31,
+ "highlight_start": 5,
+ "text": " let a = (0..10).collect();"
+ }
+ ]
+ },
+ {
+ "byte_end": 61,
+ "byte_start": 60,
+ "column_end": 6,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": true,
+ "label": null,
+ "line_end": 4,
+ "line_start": 4,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 6,
+ "highlight_start": 5,
+ "text": " a"
+ }
+ ]
+ }
+ ]
+ }
+ "##,
+ expect_file!["./test_data/snap_multi_line_fix.txt"],
+ );
+ }
+
+ #[test]
+ fn reasonable_line_numbers_from_empty_file() {
+ check(
+ r##"{
+ "message": "`main` function not found in crate `current`",
+ "code": {
+ "code": "E0601",
+ "explanation": "No `main` function was found in a binary crate.\n\nTo fix this error, add a `main` function:\n\n```\nfn main() {\n // Your program will start here.\n println!(\"Hello world!\");\n}\n```\n\nIf you don't know the basics of Rust, you can look at the\n[Rust Book][rust-book] to get started.\n\n[rust-book]: https://doc.rust-lang.org/book/\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "src/bin/current.rs",
+ "byte_start": 0,
+ "byte_end": 0,
+ "line_start": 0,
+ "line_end": 0,
+ "column_start": 1,
+ "column_end": 1,
+ "is_primary": true,
+ "text": [],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "consider adding a `main` function to `src/bin/current.rs`",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error[E0601]: `main` function not found in crate `current`\n |\n = note: consider adding a `main` function to `src/bin/current.rs`\n\n"
+ }"##,
+ expect_file!["./test_data/reasonable_line_numbers_from_empty_file.txt"],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs
new file mode 100644
index 000000000..3fcfb4a1b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs
@@ -0,0 +1,53 @@
+//! Generate minimal `TextEdit`s from different text versions
+use dissimilar::Chunk;
+use ide::{TextEdit, TextRange, TextSize};
+
+pub(crate) fn diff(left: &str, right: &str) -> TextEdit {
+ let chunks = dissimilar::diff(left, right);
+ textedit_from_chunks(chunks)
+}
+
+fn textedit_from_chunks(chunks: Vec<dissimilar::Chunk<'_>>) -> TextEdit {
+ let mut builder = TextEdit::builder();
+ let mut pos = TextSize::default();
+
+ let mut chunks = chunks.into_iter().peekable();
+ while let Some(chunk) = chunks.next() {
+ if let (Chunk::Delete(deleted), Some(&Chunk::Insert(inserted))) = (chunk, chunks.peek()) {
+ chunks.next().unwrap();
+ let deleted_len = TextSize::of(deleted);
+ builder.replace(TextRange::at(pos, deleted_len), inserted.into());
+ pos += deleted_len;
+ continue;
+ }
+
+ match chunk {
+ Chunk::Equal(text) => {
+ pos += TextSize::of(text);
+ }
+ Chunk::Delete(deleted) => {
+ let deleted_len = TextSize::of(deleted);
+ builder.delete(TextRange::at(pos, deleted_len));
+ pos += deleted_len;
+ }
+ Chunk::Insert(inserted) => {
+ builder.insert(pos, inserted.into());
+ }
+ }
+ }
+ builder.finish()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn diff_applies() {
+ let mut original = String::from("fn foo(a:u32){\n}");
+ let result = "fn foo(a: u32) {}";
+ let edit = diff(&original, result);
+ edit.apply(&mut original);
+ assert_eq!(original, result);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
new file mode 100644
index 000000000..f16559148
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
@@ -0,0 +1,266 @@
+//! See [RequestDispatcher].
+use std::{fmt, panic, thread};
+
+use ide::Cancelled;
+use lsp_server::ExtractError;
+use serde::{de::DeserializeOwned, Serialize};
+
+use crate::{
+ global_state::{GlobalState, GlobalStateSnapshot},
+ main_loop::Task,
+ version::version,
+ LspError, Result,
+};
+
+/// A visitor for routing a raw JSON request to an appropriate handler function.
+///
+/// Most requests are read-only and async and are handled on the threadpool
+/// (`on` method).
+///
+/// Some read-only requests are latency sensitive, and are immediately handled
+/// on the main loop thread (`on_sync`). These are typically typing-related
+/// requests.
+///
+/// Some requests modify the state, and are run on the main thread to get
+/// `&mut` (`on_sync_mut`).
+///
+/// Read-only requests are wrapped into `catch_unwind` -- they don't modify the
+/// state, so it's OK to recover from their failures.
+pub(crate) struct RequestDispatcher<'a> {
+ pub(crate) req: Option<lsp_server::Request>,
+ pub(crate) global_state: &'a mut GlobalState,
+}
+
+impl<'a> RequestDispatcher<'a> {
+ /// Dispatches the request onto the current thread, given full access to
+ /// mutable global state. Unlike all other methods here, this one isn't
+ /// guarded by `catch_unwind`, so, please, don't make bugs :-)
+ pub(crate) fn on_sync_mut<R>(
+ &mut self,
+ f: fn(&mut GlobalState, R::Params) -> Result<R::Result>,
+ ) -> &mut Self
+ where
+ R: lsp_types::request::Request,
+ R::Params: DeserializeOwned + panic::UnwindSafe + fmt::Debug,
+ R::Result: Serialize,
+ {
+ let (req, params, panic_context) = match self.parse::<R>() {
+ Some(it) => it,
+ None => return self,
+ };
+ let result = {
+ let _pctx = stdx::panic_context::enter(panic_context);
+ f(self.global_state, params)
+ };
+ if let Ok(response) = result_to_response::<R>(req.id.clone(), result) {
+ self.global_state.respond(response);
+ }
+
+ self
+ }
+
+ /// Dispatches the request onto the current thread.
+ pub(crate) fn on_sync<R>(
+ &mut self,
+ f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ ) -> &mut Self
+ where
+ R: lsp_types::request::Request,
+ R::Params: DeserializeOwned + panic::UnwindSafe + fmt::Debug,
+ R::Result: Serialize,
+ {
+ let (req, params, panic_context) = match self.parse::<R>() {
+ Some(it) => it,
+ None => return self,
+ };
+ let global_state_snapshot = self.global_state.snapshot();
+
+ let result = panic::catch_unwind(move || {
+ let _pctx = stdx::panic_context::enter(panic_context);
+ f(global_state_snapshot, params)
+ });
+
+ if let Ok(response) = thread_result_to_response::<R>(req.id.clone(), result) {
+ self.global_state.respond(response);
+ }
+
+ self
+ }
+
+ /// Dispatches the request onto thread pool
+ pub(crate) fn on<R>(
+ &mut self,
+ f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ ) -> &mut Self
+ where
+ R: lsp_types::request::Request + 'static,
+ R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
+ R::Result: Serialize,
+ {
+ let (req, params, panic_context) = match self.parse::<R>() {
+ Some(it) => it,
+ None => return self,
+ };
+
+ self.global_state.task_pool.handle.spawn({
+ let world = self.global_state.snapshot();
+ move || {
+ let result = panic::catch_unwind(move || {
+ let _pctx = stdx::panic_context::enter(panic_context);
+ f(world, params)
+ });
+ match thread_result_to_response::<R>(req.id.clone(), result) {
+ Ok(response) => Task::Response(response),
+ Err(_) => Task::Retry(req),
+ }
+ }
+ });
+
+ self
+ }
+
+ pub(crate) fn finish(&mut self) {
+ if let Some(req) = self.req.take() {
+ tracing::error!("unknown request: {:?}", req);
+ let response = lsp_server::Response::new_err(
+ req.id,
+ lsp_server::ErrorCode::MethodNotFound as i32,
+ "unknown request".to_string(),
+ );
+ self.global_state.respond(response);
+ }
+ }
+
+ fn parse<R>(&mut self) -> Option<(lsp_server::Request, R::Params, String)>
+ where
+ R: lsp_types::request::Request,
+ R::Params: DeserializeOwned + fmt::Debug,
+ {
+ let req = match &self.req {
+ Some(req) if req.method == R::METHOD => self.req.take()?,
+ _ => return None,
+ };
+
+ let res = crate::from_json(R::METHOD, &req.params);
+ match res {
+ Ok(params) => {
+ let panic_context =
+ format!("\nversion: {}\nrequest: {} {:#?}", version(), R::METHOD, params);
+ Some((req, params, panic_context))
+ }
+ Err(err) => {
+ let response = lsp_server::Response::new_err(
+ req.id,
+ lsp_server::ErrorCode::InvalidParams as i32,
+ err.to_string(),
+ );
+ self.global_state.respond(response);
+ None
+ }
+ }
+ }
+}
+
+fn thread_result_to_response<R>(
+ id: lsp_server::RequestId,
+ result: thread::Result<Result<R::Result>>,
+) -> Result<lsp_server::Response, Cancelled>
+where
+ R: lsp_types::request::Request,
+ R::Params: DeserializeOwned,
+ R::Result: Serialize,
+{
+ match result {
+ Ok(result) => result_to_response::<R>(id, result),
+ Err(panic) => {
+ let panic_message = panic
+ .downcast_ref::<String>()
+ .map(String::as_str)
+ .or_else(|| panic.downcast_ref::<&str>().copied());
+
+ let mut message = "request handler panicked".to_string();
+ if let Some(panic_message) = panic_message {
+ message.push_str(": ");
+ message.push_str(panic_message)
+ };
+
+ Ok(lsp_server::Response::new_err(
+ id,
+ lsp_server::ErrorCode::InternalError as i32,
+ message,
+ ))
+ }
+ }
+}
+
+fn result_to_response<R>(
+ id: lsp_server::RequestId,
+ result: Result<R::Result>,
+) -> Result<lsp_server::Response, Cancelled>
+where
+ R: lsp_types::request::Request,
+ R::Params: DeserializeOwned,
+ R::Result: Serialize,
+{
+ let res = match result {
+ Ok(resp) => lsp_server::Response::new_ok(id, &resp),
+ Err(e) => match e.downcast::<LspError>() {
+ Ok(lsp_error) => lsp_server::Response::new_err(id, lsp_error.code, lsp_error.message),
+ Err(e) => match e.downcast::<Cancelled>() {
+ Ok(cancelled) => return Err(*cancelled),
+ Err(e) => lsp_server::Response::new_err(
+ id,
+ lsp_server::ErrorCode::InternalError as i32,
+ e.to_string(),
+ ),
+ },
+ },
+ };
+ Ok(res)
+}
+
+pub(crate) struct NotificationDispatcher<'a> {
+ pub(crate) not: Option<lsp_server::Notification>,
+ pub(crate) global_state: &'a mut GlobalState,
+}
+
+impl<'a> NotificationDispatcher<'a> {
+ pub(crate) fn on<N>(
+ &mut self,
+ f: fn(&mut GlobalState, N::Params) -> Result<()>,
+ ) -> Result<&mut Self>
+ where
+ N: lsp_types::notification::Notification,
+ N::Params: DeserializeOwned + Send,
+ {
+ let not = match self.not.take() {
+ Some(it) => it,
+ None => return Ok(self),
+ };
+ let params = match not.extract::<N::Params>(N::METHOD) {
+ Ok(it) => it,
+ Err(ExtractError::JsonError { method, error }) => {
+ panic!("Invalid request\nMethod: {method}\n error: {error}",)
+ }
+ Err(ExtractError::MethodMismatch(not)) => {
+ self.not = Some(not);
+ return Ok(self);
+ }
+ };
+ let _pctx = stdx::panic_context::enter(format!(
+ "\nversion: {}\nnotification: {}",
+ version(),
+ N::METHOD
+ ));
+ f(self.global_state, params)?;
+ Ok(self)
+ }
+
+ pub(crate) fn finish(&mut self) {
+ if let Some(not) = &self.not {
+ if !not.method.starts_with("$/") {
+ tracing::error!("unhandled notification: {:?}", not);
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs
new file mode 100644
index 000000000..7bdd34d1f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs
@@ -0,0 +1,117 @@
+//! Conversion lsp_types types to rust-analyzer specific ones.
+use anyhow::format_err;
+use ide::{Annotation, AnnotationKind, AssistKind, LineCol, LineColUtf16};
+use ide_db::base_db::{FileId, FilePosition, FileRange};
+use syntax::{TextRange, TextSize};
+use vfs::AbsPathBuf;
+
+use crate::{
+ from_json,
+ global_state::GlobalStateSnapshot,
+ line_index::{LineIndex, OffsetEncoding},
+ lsp_ext,
+ lsp_utils::invalid_params_error,
+ Result,
+};
+
+pub(crate) fn abs_path(url: &lsp_types::Url) -> Result<AbsPathBuf> {
+ let path = url.to_file_path().map_err(|()| "url is not a file")?;
+ Ok(AbsPathBuf::try_from(path).unwrap())
+}
+
+pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
+ abs_path(url).map(vfs::VfsPath::from)
+}
+
+pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> {
+ let line_col = match line_index.encoding {
+ OffsetEncoding::Utf8 => {
+ LineCol { line: position.line as u32, col: position.character as u32 }
+ }
+ OffsetEncoding::Utf16 => {
+ let line_col =
+ LineColUtf16 { line: position.line as u32, col: position.character as u32 };
+ line_index.index.to_utf8(line_col)
+ }
+ };
+ let text_size =
+ line_index.index.offset(line_col).ok_or_else(|| format_err!("Invalid offset"))?;
+ Ok(text_size)
+}
+
+pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Result<TextRange> {
+ let start = offset(line_index, range.start)?;
+ let end = offset(line_index, range.end)?;
+ let text_range = TextRange::new(start, end);
+ Ok(text_range)
+}
+
+pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> Result<FileId> {
+ snap.url_to_file_id(url)
+}
+
+pub(crate) fn file_position(
+ snap: &GlobalStateSnapshot,
+ tdpp: lsp_types::TextDocumentPositionParams,
+) -> Result<FilePosition> {
+ let file_id = file_id(snap, &tdpp.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = offset(&line_index, tdpp.position)?;
+ Ok(FilePosition { file_id, offset })
+}
+
+pub(crate) fn file_range(
+ snap: &GlobalStateSnapshot,
+ text_document_identifier: lsp_types::TextDocumentIdentifier,
+ range: lsp_types::Range,
+) -> Result<FileRange> {
+ let file_id = file_id(snap, &text_document_identifier.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let range = text_range(&line_index, range)?;
+ Ok(FileRange { file_id, range })
+}
+
+pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind> {
+ let assist_kind = match &kind {
+ k if k == &lsp_types::CodeActionKind::EMPTY => AssistKind::None,
+ k if k == &lsp_types::CodeActionKind::QUICKFIX => AssistKind::QuickFix,
+ k if k == &lsp_types::CodeActionKind::REFACTOR => AssistKind::Refactor,
+ k if k == &lsp_types::CodeActionKind::REFACTOR_EXTRACT => AssistKind::RefactorExtract,
+ k if k == &lsp_types::CodeActionKind::REFACTOR_INLINE => AssistKind::RefactorInline,
+ k if k == &lsp_types::CodeActionKind::REFACTOR_REWRITE => AssistKind::RefactorRewrite,
+ _ => return None,
+ };
+
+ Some(assist_kind)
+}
+
+pub(crate) fn annotation(
+ snap: &GlobalStateSnapshot,
+ code_lens: lsp_types::CodeLens,
+) -> Result<Annotation> {
+ let data =
+ code_lens.data.ok_or_else(|| invalid_params_error("code lens without data".to_string()))?;
+ let resolve = from_json::<lsp_ext::CodeLensResolveData>("CodeLensResolveData", &data)?;
+
+ match resolve {
+ lsp_ext::CodeLensResolveData::Impls(params) => {
+ let file_id =
+ snap.url_to_file_id(&params.text_document_position_params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ Ok(Annotation {
+ range: text_range(&line_index, code_lens.range)?,
+ kind: AnnotationKind::HasImpls { file_id, data: None },
+ })
+ }
+ lsp_ext::CodeLensResolveData::References(params) => {
+ let file_id = snap.url_to_file_id(&params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ Ok(Annotation {
+ range: text_range(&line_index, code_lens.range)?,
+ kind: AnnotationKind::HasReferences { file_id, data: None },
+ })
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
new file mode 100644
index 000000000..8f881cba4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
@@ -0,0 +1,375 @@
+//! The context or environment in which the language server functions. In our
+//! server implementation this is know as the `WorldState`.
+//!
+//! Each tick provides an immutable snapshot of the state as `WorldSnapshot`.
+
+use std::{sync::Arc, time::Instant};
+
+use crossbeam_channel::{unbounded, Receiver, Sender};
+use flycheck::FlycheckHandle;
+use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
+use ide_db::base_db::{CrateId, FileLoader, SourceDatabase};
+use lsp_types::{SemanticTokens, Url};
+use parking_lot::{Mutex, RwLock};
+use proc_macro_api::ProcMacroServer;
+use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
+use rustc_hash::FxHashMap;
+use vfs::AnchoredPathBuf;
+
+use crate::{
+ config::Config,
+ diagnostics::{CheckFixes, DiagnosticCollection},
+ from_proto,
+ line_index::{LineEndings, LineIndex},
+ lsp_ext,
+ main_loop::Task,
+ mem_docs::MemDocs,
+ op_queue::OpQueue,
+ reload::{self, SourceRootConfig},
+ task_pool::TaskPool,
+ to_proto::url_from_abs_path,
+ Result,
+};
+
+// Enforces drop order
+pub(crate) struct Handle<H, C> {
+ pub(crate) handle: H,
+ pub(crate) receiver: C,
+}
+
+pub(crate) type ReqHandler = fn(&mut GlobalState, lsp_server::Response);
+pub(crate) type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>;
+
+/// `GlobalState` is the primary mutable state of the language server
+///
+/// The most interesting components are `vfs`, which stores a consistent
+/// snapshot of the file systems, and `analysis_host`, which stores our
+/// incremental salsa database.
+///
+/// Note that this struct has more than one impl in various modules!
+pub(crate) struct GlobalState {
+ sender: Sender<lsp_server::Message>,
+ req_queue: ReqQueue,
+ pub(crate) task_pool: Handle<TaskPool<Task>, Receiver<Task>>,
+ pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>,
+ pub(crate) config: Arc<Config>,
+ pub(crate) analysis_host: AnalysisHost,
+ pub(crate) diagnostics: DiagnosticCollection,
+ pub(crate) mem_docs: MemDocs,
+ pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
+ pub(crate) shutdown_requested: bool,
+ pub(crate) proc_macro_changed: bool,
+ pub(crate) last_reported_status: Option<lsp_ext::ServerStatusParams>,
+ pub(crate) source_root_config: SourceRootConfig,
+ pub(crate) proc_macro_clients: Vec<Result<ProcMacroServer, String>>,
+
+ pub(crate) flycheck: Vec<FlycheckHandle>,
+ pub(crate) flycheck_sender: Sender<flycheck::Message>,
+ pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
+
+ pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
+ pub(crate) vfs_config_version: u32,
+ pub(crate) vfs_progress_config_version: u32,
+ pub(crate) vfs_progress_n_total: usize,
+ pub(crate) vfs_progress_n_done: usize,
+
+ /// `workspaces` field stores the data we actually use, while the `OpQueue`
+ /// stores the result of the last fetch.
+ ///
+ /// If the fetch (partially) fails, we do not update the current value.
+ ///
+ /// The handling of build data is subtle. We fetch workspace in two phases:
+ ///
+ /// *First*, we run `cargo metadata`, which gives us fast results for
+ /// initial analysis.
+ ///
+ /// *Second*, we run `cargo check` which runs build scripts and compiles
+ /// proc macros.
+ ///
+ /// We need both for the precise analysis, but we want rust-analyzer to be
+ /// at least partially available just after the first phase. That's because
+ /// first phase is much faster, and is much less likely to fail.
+ ///
+ /// This creates a complication -- by the time the second phase completes,
+ /// the results of the fist phase could be invalid. That is, while we run
+ /// `cargo check`, the user edits `Cargo.toml`, we notice this, and the new
+ /// `cargo metadata` completes before `cargo check`.
+ ///
+ /// An additional complication is that we want to avoid needless work. When
+ /// the user just adds comments or whitespace to Cargo.toml, we do not want
+ /// to invalidate any salsa caches.
+ pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
+ pub(crate) fetch_workspaces_queue: OpQueue<Vec<anyhow::Result<ProjectWorkspace>>>,
+ pub(crate) fetch_build_data_queue:
+ OpQueue<(Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
+
+ pub(crate) prime_caches_queue: OpQueue<()>,
+}
+
+/// An immutable snapshot of the world's state at a point in time.
+pub(crate) struct GlobalStateSnapshot {
+ pub(crate) config: Arc<Config>,
+ pub(crate) analysis: Analysis,
+ pub(crate) check_fixes: CheckFixes,
+ mem_docs: MemDocs,
+ pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
+ vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
+ pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
+}
+
+impl std::panic::UnwindSafe for GlobalStateSnapshot {}
+
+impl GlobalState {
+ pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> GlobalState {
+ let loader = {
+ let (sender, receiver) = unbounded::<vfs::loader::Message>();
+ let handle: vfs_notify::NotifyHandle =
+ vfs::loader::Handle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
+ let handle = Box::new(handle) as Box<dyn vfs::loader::Handle>;
+ Handle { handle, receiver }
+ };
+
+ let task_pool = {
+ let (sender, receiver) = unbounded();
+ let handle = TaskPool::new(sender);
+ Handle { handle, receiver }
+ };
+
+ let analysis_host = AnalysisHost::new(config.lru_capacity());
+ let (flycheck_sender, flycheck_receiver) = unbounded();
+ let mut this = GlobalState {
+ sender,
+ req_queue: ReqQueue::default(),
+ task_pool,
+ loader,
+ config: Arc::new(config.clone()),
+ analysis_host,
+ diagnostics: Default::default(),
+ mem_docs: MemDocs::default(),
+ semantic_tokens_cache: Arc::new(Default::default()),
+ shutdown_requested: false,
+ proc_macro_changed: false,
+ last_reported_status: None,
+ source_root_config: SourceRootConfig::default(),
+ proc_macro_clients: vec![],
+
+ flycheck: Vec::new(),
+ flycheck_sender,
+ flycheck_receiver,
+
+ vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
+ vfs_config_version: 0,
+ vfs_progress_config_version: 0,
+ vfs_progress_n_total: 0,
+ vfs_progress_n_done: 0,
+
+ workspaces: Arc::new(Vec::new()),
+ fetch_workspaces_queue: OpQueue::default(),
+ prime_caches_queue: OpQueue::default(),
+
+ fetch_build_data_queue: OpQueue::default(),
+ };
+ // Apply any required database inputs from the config.
+ this.update_configuration(config);
+ this
+ }
+
+ pub(crate) fn process_changes(&mut self) -> bool {
+ let _p = profile::span("GlobalState::process_changes");
+ let mut fs_changes = Vec::new();
+ // A file was added or deleted
+ let mut has_structure_changes = false;
+
+ let (change, changed_files) = {
+ let mut change = Change::new();
+ let (vfs, line_endings_map) = &mut *self.vfs.write();
+ let changed_files = vfs.take_changes();
+ if changed_files.is_empty() {
+ return false;
+ }
+
+ for file in &changed_files {
+ if let Some(path) = vfs.file_path(file.file_id).as_path() {
+ let path = path.to_path_buf();
+ if reload::should_refresh_for_change(&path, file.change_kind) {
+ self.fetch_workspaces_queue
+ .request_op(format!("vfs file change: {}", path.display()));
+ }
+ fs_changes.push((path, file.change_kind));
+ if file.is_created_or_deleted() {
+ has_structure_changes = true;
+ }
+ }
+
+ if !file.exists() {
+ self.diagnostics.clear_native_for(file.file_id);
+ }
+
+ let text = if file.exists() {
+ let bytes = vfs.file_contents(file.file_id).to_vec();
+ String::from_utf8(bytes).ok().and_then(|text| {
+ let (text, line_endings) = LineEndings::normalize(text);
+ line_endings_map.insert(file.file_id, line_endings);
+ Some(Arc::new(text))
+ })
+ } else {
+ None
+ };
+ change.change_file(file.file_id, text);
+ }
+ if has_structure_changes {
+ let roots = self.source_root_config.partition(vfs);
+ change.set_roots(roots);
+ }
+ (change, changed_files)
+ };
+
+ self.analysis_host.apply_change(change);
+
+ let raw_database = &self.analysis_host.raw_database();
+ self.proc_macro_changed =
+ changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| {
+ let crates = raw_database.relevant_crates(file.file_id);
+ let crate_graph = raw_database.crate_graph();
+
+ crates.iter().any(|&krate| crate_graph[krate].is_proc_macro)
+ });
+ true
+ }
+
+ pub(crate) fn snapshot(&self) -> GlobalStateSnapshot {
+ GlobalStateSnapshot {
+ config: Arc::clone(&self.config),
+ workspaces: Arc::clone(&self.workspaces),
+ analysis: self.analysis_host.analysis(),
+ vfs: Arc::clone(&self.vfs),
+ check_fixes: Arc::clone(&self.diagnostics.check_fixes),
+ mem_docs: self.mem_docs.clone(),
+ semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
+ }
+ }
+
+ pub(crate) fn send_request<R: lsp_types::request::Request>(
+ &mut self,
+ params: R::Params,
+ handler: ReqHandler,
+ ) {
+ let request = self.req_queue.outgoing.register(R::METHOD.to_string(), params, handler);
+ self.send(request.into());
+ }
+
+ pub(crate) fn complete_request(&mut self, response: lsp_server::Response) {
+ let handler = self
+ .req_queue
+ .outgoing
+ .complete(response.id.clone())
+ .expect("received response for unknown request");
+ handler(self, response)
+ }
+
+ pub(crate) fn send_notification<N: lsp_types::notification::Notification>(
+ &mut self,
+ params: N::Params,
+ ) {
+ let not = lsp_server::Notification::new(N::METHOD.to_string(), params);
+ self.send(not.into());
+ }
+
+ pub(crate) fn register_request(
+ &mut self,
+ request: &lsp_server::Request,
+ request_received: Instant,
+ ) {
+ self.req_queue
+ .incoming
+ .register(request.id.clone(), (request.method.clone(), request_received));
+ }
+
+ pub(crate) fn respond(&mut self, response: lsp_server::Response) {
+ if let Some((method, start)) = self.req_queue.incoming.complete(response.id.clone()) {
+ if let Some(err) = &response.error {
+ if err.message.starts_with("server panicked") {
+ self.poke_rust_analyzer_developer(format!("{}, check the log", err.message))
+ }
+ }
+
+ let duration = start.elapsed();
+ tracing::debug!("handled {} - ({}) in {:0.2?}", method, response.id, duration);
+ self.send(response.into());
+ }
+ }
+
+ pub(crate) fn cancel(&mut self, request_id: lsp_server::RequestId) {
+ if let Some(response) = self.req_queue.incoming.cancel(request_id) {
+ self.send(response.into());
+ }
+ }
+
+ fn send(&mut self, message: lsp_server::Message) {
+ self.sender.send(message).unwrap()
+ }
+}
+
+impl Drop for GlobalState {
+ fn drop(&mut self) {
+ self.analysis_host.request_cancellation();
+ }
+}
+
+impl GlobalStateSnapshot {
+ pub(crate) fn url_to_file_id(&self, url: &Url) -> Result<FileId> {
+ url_to_file_id(&self.vfs.read().0, url)
+ }
+
+ pub(crate) fn file_id_to_url(&self, id: FileId) -> Url {
+ file_id_to_url(&self.vfs.read().0, id)
+ }
+
+ pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> {
+ let endings = self.vfs.read().1[&file_id];
+ let index = self.analysis.file_line_index(file_id)?;
+ let res = LineIndex { index, endings, encoding: self.config.offset_encoding() };
+ Ok(res)
+ }
+
+ pub(crate) fn url_file_version(&self, url: &Url) -> Option<i32> {
+ let path = from_proto::vfs_path(url).ok()?;
+ Some(self.mem_docs.get(&path)?.version)
+ }
+
+ pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Url {
+ let mut base = self.vfs.read().0.file_path(path.anchor);
+ base.pop();
+ let path = base.join(&path.path).unwrap();
+ let path = path.as_path().unwrap();
+ url_from_abs_path(path)
+ }
+
+ pub(crate) fn cargo_target_for_crate_root(
+ &self,
+ crate_id: CrateId,
+ ) -> Option<(&CargoWorkspace, Target)> {
+ let file_id = self.analysis.crate_root(crate_id).ok()?;
+ let path = self.vfs.read().0.file_path(file_id);
+ let path = path.as_path()?;
+ self.workspaces.iter().find_map(|ws| match ws {
+ ProjectWorkspace::Cargo { cargo, .. } => {
+ cargo.target_by_root(path).map(|it| (cargo, it))
+ }
+ ProjectWorkspace::Json { .. } => None,
+ ProjectWorkspace::DetachedFiles { .. } => None,
+ })
+ }
+}
+
+pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
+ let path = vfs.file_path(id);
+ let path = path.as_path().unwrap();
+ url_from_abs_path(path)
+}
+
+pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> {
+ let path = from_proto::vfs_path(url)?;
+ let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?;
+ Ok(res)
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs
new file mode 100644
index 000000000..deb777c95
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs
@@ -0,0 +1,1892 @@
+//! This module is responsible for implementing handlers for Language Server
+//! Protocol. The majority of requests are fulfilled by calling into the
+//! `ide` crate.
+
+use std::{
+ io::Write as _,
+ process::{self, Stdio},
+};
+
+use anyhow::Context;
+use ide::{
+ AnnotationConfig, AssistKind, AssistResolveStrategy, FileId, FilePosition, FileRange,
+ HoverAction, HoverGotoTypeData, Query, RangeInfo, Runnable, RunnableKind, SingleResolve,
+ SourceChange, TextEdit,
+};
+use ide_db::SymbolKind;
+use lsp_server::ErrorCode;
+use lsp_types::{
+ CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem,
+ CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
+ CodeLens, CompletionItem, Diagnostic, DiagnosticTag, DocumentFormattingParams, FoldingRange,
+ FoldingRangeParams, HoverContents, InlayHint, InlayHintParams, Location, LocationLink,
+ NumberOrString, Position, PrepareRenameResponse, Range, RenameParams,
+ SemanticTokensDeltaParams, SemanticTokensFullDeltaResult, SemanticTokensParams,
+ SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
+ SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
+};
+use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
+use serde_json::json;
+use stdx::{format_to, never};
+use syntax::{algo, ast, AstNode, TextRange, TextSize, T};
+use vfs::AbsPathBuf;
+
+use crate::{
+ cargo_target_spec::CargoTargetSpec,
+ config::{RustfmtConfig, WorkspaceSymbolConfig},
+ diff::diff,
+ from_proto,
+ global_state::{GlobalState, GlobalStateSnapshot},
+ line_index::LineEndings,
+ lsp_ext::{self, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams},
+ lsp_utils::{all_edits_are_disjoint, invalid_params_error},
+ to_proto, LspError, Result,
+};
+
+pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> {
+ state.proc_macro_clients.clear();
+ state.proc_macro_changed = false;
+ state.fetch_workspaces_queue.request_op("reload workspace request".to_string());
+ state.fetch_build_data_queue.request_op("reload workspace request".to_string());
+ Ok(())
+}
+
+pub(crate) fn handle_analyzer_status(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::AnalyzerStatusParams,
+) -> Result<String> {
+ let _p = profile::span("handle_analyzer_status");
+
+ let mut buf = String::new();
+
+ let mut file_id = None;
+ if let Some(tdi) = params.text_document {
+ match from_proto::file_id(&snap, &tdi.uri) {
+ Ok(it) => file_id = Some(it),
+ Err(_) => format_to!(buf, "file {} not found in vfs", tdi.uri),
+ }
+ }
+
+ if snap.workspaces.is_empty() {
+ buf.push_str("No workspaces\n")
+ } else {
+ buf.push_str("Workspaces:\n");
+ format_to!(
+ buf,
+ "Loaded {:?} packages across {} workspace{}.\n",
+ snap.workspaces.iter().map(|w| w.n_packages()).sum::<usize>(),
+ snap.workspaces.len(),
+ if snap.workspaces.len() == 1 { "" } else { "s" }
+ );
+ }
+ buf.push_str("\nAnalysis:\n");
+ buf.push_str(
+ &snap
+ .analysis
+ .status(file_id)
+ .unwrap_or_else(|_| "Analysis retrieval was cancelled".to_owned()),
+ );
+ Ok(buf)
+}
+
+pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<String> {
+ let _p = profile::span("handle_memory_usage");
+ let mut mem = state.analysis_host.per_query_memory_usage();
+ mem.push(("Remaining".into(), profile::memory_usage().allocated));
+
+ let mut out = String::new();
+ for (name, bytes) in mem {
+ format_to!(out, "{:>8} {}\n", bytes, name);
+ }
+ Ok(out)
+}
+
+pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> Result<()> {
+ state.analysis_host.shuffle_crate_graph();
+ Ok(())
+}
+
+pub(crate) fn handle_syntax_tree(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::SyntaxTreeParams,
+) -> Result<String> {
+ let _p = profile::span("handle_syntax_tree");
+ let id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(id)?;
+ let text_range = params.range.and_then(|r| from_proto::text_range(&line_index, r).ok());
+ let res = snap.analysis.syntax_tree(id, text_range)?;
+ Ok(res)
+}
+
+pub(crate) fn handle_view_hir(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_hir");
+ let position = from_proto::file_position(&snap, params)?;
+ let res = snap.analysis.view_hir(position)?;
+ Ok(res)
+}
+
+pub(crate) fn handle_view_file_text(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentIdentifier,
+) -> Result<String> {
+ let file_id = from_proto::file_id(&snap, &params.uri)?;
+ Ok(snap.analysis.file_text(file_id)?.to_string())
+}
+
+pub(crate) fn handle_view_item_tree(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::ViewItemTreeParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_item_tree");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let res = snap.analysis.view_item_tree(file_id)?;
+ Ok(res)
+}
+
+pub(crate) fn handle_view_crate_graph(
+ snap: GlobalStateSnapshot,
+ params: ViewCrateGraphParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_crate_graph");
+ let dot = snap.analysis.view_crate_graph(params.full)??;
+ Ok(dot)
+}
+
+pub(crate) fn handle_expand_macro(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::ExpandMacroParams,
+) -> Result<Option<lsp_ext::ExpandedMacro>> {
+ let _p = profile::span("handle_expand_macro");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = from_proto::offset(&line_index, params.position)?;
+
+ let res = snap.analysis.expand_macro(FilePosition { file_id, offset })?;
+ Ok(res.map(|it| lsp_ext::ExpandedMacro { name: it.name, expansion: it.expansion }))
+}
+
+pub(crate) fn handle_selection_range(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::SelectionRangeParams,
+) -> Result<Option<Vec<lsp_types::SelectionRange>>> {
+ let _p = profile::span("handle_selection_range");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let res: Result<Vec<lsp_types::SelectionRange>> = params
+ .positions
+ .into_iter()
+ .map(|position| {
+ let offset = from_proto::offset(&line_index, position)?;
+ let mut ranges = Vec::new();
+ {
+ let mut range = TextRange::new(offset, offset);
+ loop {
+ ranges.push(range);
+ let frange = FileRange { file_id, range };
+ let next = snap.analysis.extend_selection(frange)?;
+ if next == range {
+ break;
+ } else {
+ range = next
+ }
+ }
+ }
+ let mut range = lsp_types::SelectionRange {
+ range: to_proto::range(&line_index, *ranges.last().unwrap()),
+ parent: None,
+ };
+ for &r in ranges.iter().rev().skip(1) {
+ range = lsp_types::SelectionRange {
+ range: to_proto::range(&line_index, r),
+ parent: Some(Box::new(range)),
+ }
+ }
+ Ok(range)
+ })
+ .collect();
+
+ Ok(Some(res?))
+}
+
+pub(crate) fn handle_matching_brace(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::MatchingBraceParams,
+) -> Result<Vec<Position>> {
+ let _p = profile::span("handle_matching_brace");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ params
+ .positions
+ .into_iter()
+ .map(|position| {
+ let offset = from_proto::offset(&line_index, position);
+ offset.map(|offset| {
+ let offset = match snap.analysis.matching_brace(FilePosition { file_id, offset }) {
+ Ok(Some(matching_brace_offset)) => matching_brace_offset,
+ Err(_) | Ok(None) => offset,
+ };
+ to_proto::position(&line_index, offset)
+ })
+ })
+ .collect()
+}
+
+pub(crate) fn handle_join_lines(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::JoinLinesParams,
+) -> Result<Vec<lsp_types::TextEdit>> {
+ let _p = profile::span("handle_join_lines");
+
+ let config = snap.config.join_lines();
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut res = TextEdit::default();
+ for range in params.ranges {
+ let range = from_proto::text_range(&line_index, range)?;
+ let edit = snap.analysis.join_lines(&config, FileRange { file_id, range })?;
+ match res.union(edit) {
+ Ok(()) => (),
+ Err(_edit) => {
+ // just ignore overlapping edits
+ }
+ }
+ }
+
+ Ok(to_proto::text_edit_vec(&line_index, res))
+}
+
+pub(crate) fn handle_on_enter(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
+ let _p = profile::span("handle_on_enter");
+ let position = from_proto::file_position(&snap, params)?;
+ let edit = match snap.analysis.on_enter(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let line_index = snap.file_line_index(position.file_id)?;
+ let edit = to_proto::snippet_text_edit_vec(&line_index, true, edit);
+ Ok(Some(edit))
+}
+
+pub(crate) fn handle_on_type_formatting(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentOnTypeFormattingParams,
+) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
+ let _p = profile::span("handle_on_type_formatting");
+ let mut position = from_proto::file_position(&snap, params.text_document_position)?;
+ let line_index = snap.file_line_index(position.file_id)?;
+
+ // in `ide`, the `on_type` invariant is that
+ // `text.char_at(position) == typed_char`.
+ position.offset -= TextSize::of('.');
+ let char_typed = params.ch.chars().next().unwrap_or('\0');
+
+ let text = snap.analysis.file_text(position.file_id)?;
+ if stdx::never!(!text[usize::from(position.offset)..].starts_with(char_typed)) {
+ return Ok(None);
+ }
+
+ // We have an assist that inserts ` ` after typing `->` in `fn foo() ->{`,
+ // but it requires precise cursor positioning to work, and one can't
+ // position the cursor with on_type formatting. So, let's just toggle this
+ // feature off here, hoping that we'll enable it one day, 😿.
+ if char_typed == '>' {
+ return Ok(None);
+ }
+
+ let edit =
+ snap.analysis.on_char_typed(position, char_typed, snap.config.typing_autoclose_angle())?;
+ let edit = match edit {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ // This should be a single-file edit
+ let (_, text_edit) = edit.source_file_edits.into_iter().next().unwrap();
+
+ let change = to_proto::snippet_text_edit_vec(&line_index, edit.is_snippet, text_edit);
+ Ok(Some(change))
+}
+
+pub(crate) fn handle_document_symbol(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentSymbolParams,
+) -> Result<Option<lsp_types::DocumentSymbolResponse>> {
+ let _p = profile::span("handle_document_symbol");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut parents: Vec<(lsp_types::DocumentSymbol, Option<usize>)> = Vec::new();
+
+ for symbol in snap.analysis.file_structure(file_id)? {
+ let mut tags = Vec::new();
+ if symbol.deprecated {
+ tags.push(SymbolTag::DEPRECATED)
+ };
+
+ #[allow(deprecated)]
+ let doc_symbol = lsp_types::DocumentSymbol {
+ name: symbol.label,
+ detail: symbol.detail,
+ kind: to_proto::structure_node_kind(symbol.kind),
+ tags: Some(tags),
+ deprecated: Some(symbol.deprecated),
+ range: to_proto::range(&line_index, symbol.node_range),
+ selection_range: to_proto::range(&line_index, symbol.navigation_range),
+ children: None,
+ };
+ parents.push((doc_symbol, symbol.parent));
+ }
+
+ // Builds hierarchy from a flat list, in reverse order (so that indices
+ // makes sense)
+ let document_symbols = {
+ let mut acc = Vec::new();
+ while let Some((mut node, parent_idx)) = parents.pop() {
+ if let Some(children) = &mut node.children {
+ children.reverse();
+ }
+ let parent = match parent_idx {
+ None => &mut acc,
+ Some(i) => parents[i].0.children.get_or_insert_with(Vec::new),
+ };
+ parent.push(node);
+ }
+ acc.reverse();
+ acc
+ };
+
+ let res = if snap.config.hierarchical_symbols() {
+ document_symbols.into()
+ } else {
+ let url = to_proto::url(&snap, file_id);
+ let mut symbol_information = Vec::<SymbolInformation>::new();
+ for symbol in document_symbols {
+ flatten_document_symbol(&symbol, None, &url, &mut symbol_information);
+ }
+ symbol_information.into()
+ };
+ return Ok(Some(res));
+
+ fn flatten_document_symbol(
+ symbol: &lsp_types::DocumentSymbol,
+ container_name: Option<String>,
+ url: &Url,
+ res: &mut Vec<SymbolInformation>,
+ ) {
+ let mut tags = Vec::new();
+
+ #[allow(deprecated)]
+ if let Some(true) = symbol.deprecated {
+ tags.push(SymbolTag::DEPRECATED)
+ }
+
+ #[allow(deprecated)]
+ res.push(SymbolInformation {
+ name: symbol.name.clone(),
+ kind: symbol.kind,
+ tags: Some(tags),
+ deprecated: symbol.deprecated,
+ location: Location::new(url.clone(), symbol.range),
+ container_name,
+ });
+
+ for child in symbol.children.iter().flatten() {
+ flatten_document_symbol(child, Some(symbol.name.clone()), url, res);
+ }
+ }
+}
+
+pub(crate) fn handle_workspace_symbol(
+ snap: GlobalStateSnapshot,
+ params: WorkspaceSymbolParams,
+) -> Result<Option<Vec<SymbolInformation>>> {
+ let _p = profile::span("handle_workspace_symbol");
+
+ let config = snap.config.workspace_symbol();
+ let (all_symbols, libs) = decide_search_scope_and_kind(&params, &config);
+ let limit = config.search_limit;
+
+ let query = {
+ let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect();
+ let mut q = Query::new(query);
+ if !all_symbols {
+ q.only_types();
+ }
+ if libs {
+ q.libs();
+ }
+ q.limit(limit);
+ q
+ };
+ let mut res = exec_query(&snap, query)?;
+ if res.is_empty() && !all_symbols {
+ let mut query = Query::new(params.query);
+ query.limit(limit);
+ res = exec_query(&snap, query)?;
+ }
+
+ return Ok(Some(res));
+
+ fn decide_search_scope_and_kind(
+ params: &WorkspaceSymbolParams,
+ config: &WorkspaceSymbolConfig,
+ ) -> (bool, bool) {
+ // Support old-style parsing of markers in the query.
+ let mut all_symbols = params.query.contains('#');
+ let mut libs = params.query.contains('*');
+
+ // If no explicit marker was set, check request params. If that's also empty
+ // use global config.
+ if !all_symbols {
+ let search_kind = match params.search_kind {
+ Some(ref search_kind) => search_kind,
+ None => &config.search_kind,
+ };
+ all_symbols = match search_kind {
+ lsp_ext::WorkspaceSymbolSearchKind::OnlyTypes => false,
+ lsp_ext::WorkspaceSymbolSearchKind::AllSymbols => true,
+ }
+ }
+
+ if !libs {
+ let search_scope = match params.search_scope {
+ Some(ref search_scope) => search_scope,
+ None => &config.search_scope,
+ };
+ libs = match search_scope {
+ lsp_ext::WorkspaceSymbolSearchScope::Workspace => false,
+ lsp_ext::WorkspaceSymbolSearchScope::WorkspaceAndDependencies => true,
+ }
+ }
+
+ (all_symbols, libs)
+ }
+
+ fn exec_query(snap: &GlobalStateSnapshot, query: Query) -> Result<Vec<SymbolInformation>> {
+ let mut res = Vec::new();
+ for nav in snap.analysis.symbol_search(query)? {
+ let container_name = nav.container_name.as_ref().map(|v| v.to_string());
+
+ #[allow(deprecated)]
+ let info = SymbolInformation {
+ name: nav.name.to_string(),
+ kind: nav
+ .kind
+ .map(to_proto::symbol_kind)
+ .unwrap_or(lsp_types::SymbolKind::VARIABLE),
+ tags: None,
+ location: to_proto::location_from_nav(snap, nav)?,
+ container_name,
+ deprecated: None,
+ };
+ res.push(info);
+ }
+ Ok(res)
+ }
+}
+
+pub(crate) fn handle_will_rename_files(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::RenameFilesParams,
+) -> Result<Option<lsp_types::WorkspaceEdit>> {
+ let _p = profile::span("handle_will_rename_files");
+
+ let source_changes: Vec<SourceChange> = params
+ .files
+ .into_iter()
+ .filter_map(|file_rename| {
+ let from = Url::parse(&file_rename.old_uri).ok()?;
+ let to = Url::parse(&file_rename.new_uri).ok()?;
+
+ let from_path = from.to_file_path().ok()?;
+ let to_path = to.to_file_path().ok()?;
+
+ // Limit to single-level moves for now.
+ match (from_path.parent(), to_path.parent()) {
+ (Some(p1), Some(p2)) if p1 == p2 => {
+ if from_path.is_dir() {
+ // add '/' to end of url -- from `file://path/to/folder` to `file://path/to/folder/`
+ let mut old_folder_name = from_path.file_stem()?.to_str()?.to_string();
+ old_folder_name.push('/');
+ let from_with_trailing_slash = from.join(&old_folder_name).ok()?;
+
+ let imitate_from_url = from_with_trailing_slash.join("mod.rs").ok()?;
+ let new_file_name = to_path.file_name()?.to_str()?;
+ Some((
+ snap.url_to_file_id(&imitate_from_url).ok()?,
+ new_file_name.to_string(),
+ ))
+ } else {
+ let old_name = from_path.file_stem()?.to_str()?;
+ let new_name = to_path.file_stem()?.to_str()?;
+ match (old_name, new_name) {
+ ("mod", _) => None,
+ (_, "mod") => None,
+ _ => Some((snap.url_to_file_id(&from).ok()?, new_name.to_string())),
+ }
+ }
+ }
+ _ => None,
+ }
+ })
+ .filter_map(|(file_id, new_name)| {
+ snap.analysis.will_rename_file(file_id, &new_name).ok()?
+ })
+ .collect();
+
+ // Drop file system edits since we're just renaming things on the same level
+ let mut source_changes = source_changes.into_iter();
+ let mut source_change = source_changes.next().unwrap_or_default();
+ source_change.file_system_edits.clear();
+ // no collect here because we want to merge text edits on same file ids
+ source_change.extend(source_changes.flat_map(|it| it.source_file_edits));
+ if source_change.source_file_edits.is_empty() {
+ Ok(None)
+ } else {
+ to_proto::workspace_edit(&snap, source_change).map(Some)
+ }
+}
+
+pub(crate) fn handle_goto_definition(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::GotoDefinitionParams,
+) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+ let _p = profile::span("handle_goto_definition");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let nav_info = match snap.analysis.goto_definition(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_goto_declaration(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::request::GotoDeclarationParams,
+) -> Result<Option<lsp_types::request::GotoDeclarationResponse>> {
+ let _p = profile::span("handle_goto_declaration");
+ let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?;
+ let nav_info = match snap.analysis.goto_declaration(position)? {
+ None => return handle_goto_definition(snap, params),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_goto_implementation(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::request::GotoImplementationParams,
+) -> Result<Option<lsp_types::request::GotoImplementationResponse>> {
+ let _p = profile::span("handle_goto_implementation");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let nav_info = match snap.analysis.goto_implementation(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_goto_type_definition(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::request::GotoTypeDefinitionParams,
+) -> Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
+ let _p = profile::span("handle_goto_type_definition");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let nav_info = match snap.analysis.goto_type_definition(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_parent_module(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+ let _p = profile::span("handle_parent_module");
+ if let Ok(file_path) = &params.text_document.uri.to_file_path() {
+ if file_path.file_name().unwrap_or_default() == "Cargo.toml" {
+ // search workspaces for parent packages or fallback to workspace root
+ let abs_path_buf = match AbsPathBuf::try_from(file_path.to_path_buf()).ok() {
+ Some(abs_path_buf) => abs_path_buf,
+ None => return Ok(None),
+ };
+
+ let manifest_path = match ManifestPath::try_from(abs_path_buf).ok() {
+ Some(manifest_path) => manifest_path,
+ None => return Ok(None),
+ };
+
+ let links: Vec<LocationLink> = snap
+ .workspaces
+ .iter()
+ .filter_map(|ws| match ws {
+ ProjectWorkspace::Cargo { cargo, .. } => cargo.parent_manifests(&manifest_path),
+ _ => None,
+ })
+ .flatten()
+ .map(|parent_manifest_path| LocationLink {
+ origin_selection_range: None,
+ target_uri: to_proto::url_from_abs_path(&parent_manifest_path),
+ target_range: Range::default(),
+ target_selection_range: Range::default(),
+ })
+ .collect::<_>();
+ return Ok(Some(links.into()));
+ }
+
+ // check if invoked at the crate root
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let crate_id = match snap.analysis.crate_for(file_id)?.first() {
+ Some(&crate_id) => crate_id,
+ None => return Ok(None),
+ };
+ let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ if snap.analysis.crate_root(crate_id)? == file_id {
+ let cargo_toml_url = to_proto::url_from_abs_path(&cargo_spec.cargo_toml);
+ let res = vec![LocationLink {
+ origin_selection_range: None,
+ target_uri: cargo_toml_url,
+ target_range: Range::default(),
+ target_selection_range: Range::default(),
+ }]
+ .into();
+ return Ok(Some(res));
+ }
+ }
+
+ // locate parent module by semantics
+ let position = from_proto::file_position(&snap, params)?;
+ let navs = snap.analysis.parent_module(position)?;
+ let res = to_proto::goto_definition_response(&snap, None, navs)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_runnables(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::RunnablesParams,
+) -> Result<Vec<lsp_ext::Runnable>> {
+ let _p = profile::span("handle_runnables");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = params.position.and_then(|it| from_proto::offset(&line_index, it).ok());
+ let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?;
+
+ let expect_test = match offset {
+ Some(offset) => {
+ let source_file = snap.analysis.parse(file_id)?;
+ algo::find_node_at_offset::<ast::MacroCall>(source_file.syntax(), offset)
+ .and_then(|it| it.path()?.segment()?.name_ref())
+ .map_or(false, |it| it.text() == "expect" || it.text() == "expect_file")
+ }
+ None => false,
+ };
+
+ let mut res = Vec::new();
+ for runnable in snap.analysis.runnables(file_id)? {
+ if let Some(offset) = offset {
+ if !runnable.nav.full_range.contains_inclusive(offset) {
+ continue;
+ }
+ }
+ if should_skip_target(&runnable, cargo_spec.as_ref()) {
+ continue;
+ }
+ let mut runnable = to_proto::runnable(&snap, runnable)?;
+ if expect_test {
+ runnable.label = format!("{} + expect", runnable.label);
+ runnable.args.expect_test = Some(true);
+ }
+ res.push(runnable);
+ }
+
+ // Add `cargo check` and `cargo test` for all targets of the whole package
+ let config = snap.config.runnables();
+ match cargo_spec {
+ Some(spec) => {
+ for cmd in ["check", "test"] {
+ res.push(lsp_ext::Runnable {
+ label: format!("cargo {} -p {} --all-targets", cmd, spec.package),
+ location: None,
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::CargoRunnable {
+ workspace_root: Some(spec.workspace_root.clone().into()),
+ override_cargo: config.override_cargo.clone(),
+ cargo_args: vec![
+ cmd.to_string(),
+ "--package".to_string(),
+ spec.package.clone(),
+ "--all-targets".to_string(),
+ ],
+ cargo_extra_args: config.cargo_extra_args.clone(),
+ executable_args: Vec::new(),
+ expect_test: None,
+ },
+ })
+ }
+ }
+ None => {
+ if !snap.config.linked_projects().is_empty()
+ || !snap
+ .config
+ .discovered_projects
+ .as_ref()
+ .map(|projects| projects.is_empty())
+ .unwrap_or(true)
+ {
+ res.push(lsp_ext::Runnable {
+ label: "cargo check --workspace".to_string(),
+ location: None,
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::CargoRunnable {
+ workspace_root: None,
+ override_cargo: config.override_cargo,
+ cargo_args: vec!["check".to_string(), "--workspace".to_string()],
+ cargo_extra_args: config.cargo_extra_args,
+ executable_args: Vec::new(),
+ expect_test: None,
+ },
+ });
+ }
+ }
+ }
+ Ok(res)
+}
+
+pub(crate) fn handle_related_tests(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Vec<lsp_ext::TestInfo>> {
+ let _p = profile::span("handle_related_tests");
+ let position = from_proto::file_position(&snap, params)?;
+
+ let tests = snap.analysis.related_tests(position, None)?;
+ let mut res = Vec::new();
+ for it in tests {
+ if let Ok(runnable) = to_proto::runnable(&snap, it) {
+ res.push(lsp_ext::TestInfo { runnable })
+ }
+ }
+
+ Ok(res)
+}
+
+pub(crate) fn handle_completion(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::CompletionParams,
+) -> Result<Option<lsp_types::CompletionResponse>> {
+ let _p = profile::span("handle_completion");
+ let text_document_position = params.text_document_position.clone();
+ let position = from_proto::file_position(&snap, params.text_document_position)?;
+ let completion_trigger_character =
+ params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
+
+ if Some(':') == completion_trigger_character {
+ let source_file = snap.analysis.parse(position.file_id)?;
+ let left_token = source_file.syntax().token_at_offset(position.offset).left_biased();
+ let completion_triggered_after_single_colon = match left_token {
+ Some(left_token) => left_token.kind() == T![:],
+ None => true,
+ };
+ if completion_triggered_after_single_colon {
+ return Ok(None);
+ }
+ }
+
+ let completion_config = &snap.config.completion();
+ let items = match snap.analysis.completions(
+ completion_config,
+ position,
+ completion_trigger_character,
+ )? {
+ None => return Ok(None),
+ Some(items) => items,
+ };
+ let line_index = snap.file_line_index(position.file_id)?;
+
+ let items =
+ to_proto::completion_items(&snap.config, &line_index, text_document_position, items);
+
+ let completion_list = lsp_types::CompletionList { is_incomplete: true, items };
+ Ok(Some(completion_list.into()))
+}
+
+pub(crate) fn handle_completion_resolve(
+ snap: GlobalStateSnapshot,
+ mut original_completion: CompletionItem,
+) -> Result<CompletionItem> {
+ let _p = profile::span("handle_completion_resolve");
+
+ if !all_edits_are_disjoint(&original_completion, &[]) {
+ return Err(invalid_params_error(
+ "Received a completion with overlapping edits, this is not LSP-compliant".to_string(),
+ )
+ .into());
+ }
+
+ let data = match original_completion.data.take() {
+ Some(it) => it,
+ None => return Ok(original_completion),
+ };
+
+ let resolve_data: lsp_ext::CompletionResolveData = serde_json::from_value(data)?;
+
+ let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = from_proto::offset(&line_index, resolve_data.position.position)?;
+
+ let additional_edits = snap
+ .analysis
+ .resolve_completion_edits(
+ &snap.config.completion(),
+ FilePosition { file_id, offset },
+ resolve_data
+ .imports
+ .into_iter()
+ .map(|import| (import.full_import_path, import.imported_name)),
+ )?
+ .into_iter()
+ .flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))
+ .collect::<Vec<_>>();
+
+ if !all_edits_are_disjoint(&original_completion, &additional_edits) {
+ return Err(LspError::new(
+ ErrorCode::InternalError as i32,
+ "Import edit overlaps with the original completion edits, this is not LSP-compliant"
+ .into(),
+ )
+ .into());
+ }
+
+ if let Some(original_additional_edits) = original_completion.additional_text_edits.as_mut() {
+ original_additional_edits.extend(additional_edits.into_iter())
+ } else {
+ original_completion.additional_text_edits = Some(additional_edits);
+ }
+
+ Ok(original_completion)
+}
+
+pub(crate) fn handle_folding_range(
+ snap: GlobalStateSnapshot,
+ params: FoldingRangeParams,
+) -> Result<Option<Vec<FoldingRange>>> {
+ let _p = profile::span("handle_folding_range");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let folds = snap.analysis.folding_ranges(file_id)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let line_folding_only = snap.config.line_folding_only();
+ let res = folds
+ .into_iter()
+ .map(|it| to_proto::folding_range(&*text, &line_index, line_folding_only, it))
+ .collect();
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_signature_help(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::SignatureHelpParams,
+) -> Result<Option<lsp_types::SignatureHelp>> {
+ let _p = profile::span("handle_signature_help");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let help = match snap.analysis.signature_help(position)? {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+ let config = snap.config.call_info();
+ let res = to_proto::signature_help(help, config, snap.config.signature_help_label_offsets());
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_hover(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::HoverParams,
+) -> Result<Option<lsp_ext::Hover>> {
+ let _p = profile::span("handle_hover");
+ let range = match params.position {
+ PositionOrRange::Position(position) => Range::new(position, position),
+ PositionOrRange::Range(range) => range,
+ };
+
+ let file_range = from_proto::file_range(&snap, params.text_document, range)?;
+ let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
+ None => return Ok(None),
+ Some(info) => info,
+ };
+
+ let line_index = snap.file_line_index(file_range.file_id)?;
+ let range = to_proto::range(&line_index, info.range);
+ let markup_kind =
+ snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind);
+ let hover = lsp_ext::Hover {
+ hover: lsp_types::Hover {
+ contents: HoverContents::Markup(to_proto::markup_content(
+ info.info.markup,
+ markup_kind,
+ )),
+ range: Some(range),
+ },
+ actions: if snap.config.hover_actions().none() {
+ Vec::new()
+ } else {
+ prepare_hover_actions(&snap, &info.info.actions)
+ },
+ };
+
+ Ok(Some(hover))
+}
+
+pub(crate) fn handle_prepare_rename(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<PrepareRenameResponse>> {
+ let _p = profile::span("handle_prepare_rename");
+ let position = from_proto::file_position(&snap, params)?;
+
+ let change = snap.analysis.prepare_rename(position)?.map_err(to_proto::rename_error)?;
+
+ let line_index = snap.file_line_index(position.file_id)?;
+ let range = to_proto::range(&line_index, change.range);
+ Ok(Some(PrepareRenameResponse::Range(range)))
+}
+
+pub(crate) fn handle_rename(
+ snap: GlobalStateSnapshot,
+ params: RenameParams,
+) -> Result<Option<WorkspaceEdit>> {
+ let _p = profile::span("handle_rename");
+ let position = from_proto::file_position(&snap, params.text_document_position)?;
+
+ let mut change =
+ snap.analysis.rename(position, &*params.new_name)?.map_err(to_proto::rename_error)?;
+
+ // this is kind of a hack to prevent double edits from happening when moving files
+ // When a module gets renamed by renaming the mod declaration this causes the file to move
+ // which in turn will trigger a WillRenameFiles request to the server for which we reply with a
+ // a second identical set of renames, the client will then apply both edits causing incorrect edits
+ // with this we only emit source_file_edits in the WillRenameFiles response which will do the rename instead
+ // See https://github.com/microsoft/vscode-languageserver-node/issues/752 for more info
+ if !change.file_system_edits.is_empty() && snap.config.will_rename() {
+ change.source_file_edits.clear();
+ }
+ let workspace_edit = to_proto::workspace_edit(&snap, change)?;
+ Ok(Some(workspace_edit))
+}
+
+pub(crate) fn handle_references(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::ReferenceParams,
+) -> Result<Option<Vec<Location>>> {
+ let _p = profile::span("handle_references");
+ let position = from_proto::file_position(&snap, params.text_document_position)?;
+
+ let refs = match snap.analysis.find_all_refs(position, None)? {
+ None => return Ok(None),
+ Some(refs) => refs,
+ };
+
+ let include_declaration = params.context.include_declaration;
+ let locations = refs
+ .into_iter()
+ .flat_map(|refs| {
+ let decl = if include_declaration {
+ refs.declaration.map(|decl| FileRange {
+ file_id: decl.nav.file_id,
+ range: decl.nav.focus_or_full_range(),
+ })
+ } else {
+ None
+ };
+ refs.references
+ .into_iter()
+ .flat_map(|(file_id, refs)| {
+ refs.into_iter().map(move |(range, _)| FileRange { file_id, range })
+ })
+ .chain(decl)
+ })
+ .filter_map(|frange| to_proto::location(&snap, frange).ok())
+ .collect();
+
+ Ok(Some(locations))
+}
+
+pub(crate) fn handle_formatting(
+ snap: GlobalStateSnapshot,
+ params: DocumentFormattingParams,
+) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+ let _p = profile::span("handle_formatting");
+
+ run_rustfmt(&snap, params.text_document, None)
+}
+
+pub(crate) fn handle_range_formatting(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentRangeFormattingParams,
+) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+ let _p = profile::span("handle_range_formatting");
+
+ run_rustfmt(&snap, params.text_document, Some(params.range))
+}
+
+pub(crate) fn handle_code_action(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::CodeActionParams,
+) -> Result<Option<Vec<lsp_ext::CodeAction>>> {
+ let _p = profile::span("handle_code_action");
+
+ if !snap.config.code_action_literals() {
+ // We intentionally don't support command-based actions, as those either
+ // require either custom client-code or server-initiated edits. Server
+ // initiated edits break causality, so we avoid those.
+ return Ok(None);
+ }
+
+ let line_index =
+ snap.file_line_index(from_proto::file_id(&snap, &params.text_document.uri)?)?;
+ let frange = from_proto::file_range(&snap, params.text_document.clone(), params.range)?;
+
+ let mut assists_config = snap.config.assist();
+ assists_config.allowed = params
+ .context
+ .only
+ .clone()
+ .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
+
+ let mut res: Vec<lsp_ext::CodeAction> = Vec::new();
+
+ let code_action_resolve_cap = snap.config.code_action_resolve();
+ let resolve = if code_action_resolve_cap {
+ AssistResolveStrategy::None
+ } else {
+ AssistResolveStrategy::All
+ };
+ let assists = snap.analysis.assists_with_fixes(
+ &assists_config,
+ &snap.config.diagnostics(),
+ resolve,
+ frange,
+ )?;
+ for (index, assist) in assists.into_iter().enumerate() {
+ let resolve_data =
+ if code_action_resolve_cap { Some((index, params.clone())) } else { None };
+ let code_action = to_proto::code_action(&snap, assist, resolve_data)?;
+ res.push(code_action)
+ }
+
+ // Fixes from `cargo check`.
+ for fix in snap.check_fixes.get(&frange.file_id).into_iter().flatten() {
+ // FIXME: this mapping is awkward and shouldn't exist. Refactor
+ // `snap.check_fixes` to not convert to LSP prematurely.
+ let intersect_fix_range = fix
+ .ranges
+ .iter()
+ .copied()
+ .filter_map(|range| from_proto::text_range(&line_index, range).ok())
+ .any(|fix_range| fix_range.intersect(frange.range).is_some());
+ if intersect_fix_range {
+ res.push(fix.action.clone());
+ }
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_code_action_resolve(
+ snap: GlobalStateSnapshot,
+ mut code_action: lsp_ext::CodeAction,
+) -> Result<lsp_ext::CodeAction> {
+ let _p = profile::span("handle_code_action_resolve");
+ let params = match code_action.data.take() {
+ Some(it) => it,
+ None => return Err(invalid_params_error("code action without data".to_string()).into()),
+ };
+
+ let file_id = from_proto::file_id(&snap, &params.code_action_params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let range = from_proto::text_range(&line_index, params.code_action_params.range)?;
+ let frange = FileRange { file_id, range };
+
+ let mut assists_config = snap.config.assist();
+ assists_config.allowed = params
+ .code_action_params
+ .context
+ .only
+ .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
+
+ let (assist_index, assist_resolve) = match parse_action_id(&params.id) {
+ Ok(parsed_data) => parsed_data,
+ Err(e) => {
+ return Err(invalid_params_error(format!(
+ "Failed to parse action id string '{}': {}",
+ params.id, e
+ ))
+ .into())
+ }
+ };
+
+ let expected_assist_id = assist_resolve.assist_id.clone();
+ let expected_kind = assist_resolve.assist_kind;
+
+ let assists = snap.analysis.assists_with_fixes(
+ &assists_config,
+ &snap.config.diagnostics(),
+ AssistResolveStrategy::Single(assist_resolve),
+ frange,
+ )?;
+
+ let assist = match assists.get(assist_index) {
+ Some(assist) => assist,
+ None => return Err(invalid_params_error(format!(
+ "Failed to find the assist for index {} provided by the resolve request. Resolve request assist id: {}",
+ assist_index, params.id,
+ ))
+ .into())
+ };
+ if assist.id.0 != expected_assist_id || assist.id.1 != expected_kind {
+ return Err(invalid_params_error(format!(
+ "Mismatching assist at index {} for the resolve parameters given. Resolve request assist id: {}, actual id: {:?}.",
+ assist_index, params.id, assist.id
+ ))
+ .into());
+ }
+ let ca = to_proto::code_action(&snap, assist.clone(), None)?;
+ code_action.edit = ca.edit;
+ code_action.command = ca.command;
+ Ok(code_action)
+}
+
+fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> {
+ let id_parts = action_id.split(':').collect::<Vec<_>>();
+ match id_parts.as_slice() {
+ [assist_id_string, assist_kind_string, index_string] => {
+ let assist_kind: AssistKind = assist_kind_string.parse()?;
+ let index: usize = match index_string.parse() {
+ Ok(index) => index,
+ Err(e) => return Err(format!("Incorrect index string: {}", e)),
+ };
+ Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind }))
+ }
+ _ => Err("Action id contains incorrect number of segments".to_string()),
+ }
+}
+
+pub(crate) fn handle_code_lens(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::CodeLensParams,
+) -> Result<Option<Vec<CodeLens>>> {
+ let _p = profile::span("handle_code_lens");
+
+ let lens_config = snap.config.lens();
+ if lens_config.none() {
+ // early return before any db query!
+ return Ok(Some(Vec::default()));
+ }
+
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let cargo_target_spec = CargoTargetSpec::for_file(&snap, file_id)?;
+
+ let annotations = snap.analysis.annotations(
+ &AnnotationConfig {
+ binary_target: cargo_target_spec
+ .map(|spec| {
+ matches!(
+ spec.target_kind,
+ TargetKind::Bin | TargetKind::Example | TargetKind::Test
+ )
+ })
+ .unwrap_or(false),
+ annotate_runnables: lens_config.runnable(),
+ annotate_impls: lens_config.implementations,
+ annotate_references: lens_config.refs_adt,
+ annotate_method_references: lens_config.method_refs,
+ annotate_enum_variant_references: lens_config.enum_variant_refs,
+ },
+ file_id,
+ )?;
+
+ let mut res = Vec::new();
+ for a in annotations {
+ to_proto::code_lens(&mut res, &snap, a)?;
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_code_lens_resolve(
+ snap: GlobalStateSnapshot,
+ code_lens: CodeLens,
+) -> Result<CodeLens> {
+ let annotation = from_proto::annotation(&snap, code_lens.clone())?;
+ let annotation = snap.analysis.resolve_annotation(annotation)?;
+
+ let mut acc = Vec::new();
+ to_proto::code_lens(&mut acc, &snap, annotation)?;
+
+ let res = match acc.pop() {
+ Some(it) if acc.is_empty() => it,
+ _ => {
+ never!();
+ code_lens
+ }
+ };
+
+ Ok(res)
+}
+
+pub(crate) fn handle_document_highlight(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentHighlightParams,
+) -> Result<Option<Vec<lsp_types::DocumentHighlight>>> {
+ let _p = profile::span("handle_document_highlight");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let line_index = snap.file_line_index(position.file_id)?;
+
+ let refs = match snap.analysis.highlight_related(snap.config.highlight_related(), position)? {
+ None => return Ok(None),
+ Some(refs) => refs,
+ };
+ let res = refs
+ .into_iter()
+ .map(|ide::HighlightedRange { range, category }| lsp_types::DocumentHighlight {
+ range: to_proto::range(&line_index, range),
+ kind: category.map(to_proto::document_highlight_kind),
+ })
+ .collect();
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_ssr(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::SsrParams,
+) -> Result<lsp_types::WorkspaceEdit> {
+ let _p = profile::span("handle_ssr");
+ let selections = params
+ .selections
+ .iter()
+ .map(|range| from_proto::file_range(&snap, params.position.text_document.clone(), *range))
+ .collect::<Result<Vec<_>, _>>()?;
+ let position = from_proto::file_position(&snap, params.position)?;
+ let source_change = snap.analysis.structural_search_replace(
+ &params.query,
+ params.parse_only,
+ position,
+ selections,
+ )??;
+ to_proto::workspace_edit(&snap, source_change)
+}
+
+pub(crate) fn publish_diagnostics(
+ snap: &GlobalStateSnapshot,
+ file_id: FileId,
+) -> Result<Vec<Diagnostic>> {
+ let _p = profile::span("publish_diagnostics");
+ let line_index = snap.file_line_index(file_id)?;
+
+ let diagnostics: Vec<Diagnostic> = snap
+ .analysis
+ .diagnostics(&snap.config.diagnostics(), AssistResolveStrategy::None, file_id)?
+ .into_iter()
+ .map(|d| Diagnostic {
+ range: to_proto::range(&line_index, d.range),
+ severity: Some(to_proto::diagnostic_severity(d.severity)),
+ code: Some(NumberOrString::String(d.code.as_str().to_string())),
+ code_description: Some(lsp_types::CodeDescription {
+ href: lsp_types::Url::parse(&format!(
+ "https://rust-analyzer.github.io/manual.html#{}",
+ d.code.as_str()
+ ))
+ .unwrap(),
+ }),
+ source: Some("rust-analyzer".to_string()),
+ // https://github.com/rust-lang/rust-analyzer/issues/11404
+ message: if !d.message.is_empty() { d.message } else { " ".to_string() },
+ related_information: None,
+ tags: if d.unused { Some(vec![DiagnosticTag::UNNECESSARY]) } else { None },
+ data: None,
+ })
+ .collect();
+ Ok(diagnostics)
+}
+
+pub(crate) fn handle_inlay_hints(
+ snap: GlobalStateSnapshot,
+ params: InlayHintParams,
+) -> Result<Option<Vec<InlayHint>>> {
+ let _p = profile::span("handle_inlay_hints");
+ let document_uri = &params.text_document.uri;
+ let file_id = from_proto::file_id(&snap, document_uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let range = from_proto::file_range(
+ &snap,
+ TextDocumentIdentifier::new(document_uri.to_owned()),
+ params.range,
+ )?;
+ let inlay_hints_config = snap.config.inlay_hints();
+ Ok(Some(
+ snap.analysis
+ .inlay_hints(&inlay_hints_config, file_id, Some(range))?
+ .into_iter()
+ .map(|it| {
+ to_proto::inlay_hint(&snap, &line_index, inlay_hints_config.render_colons, it)
+ })
+ .collect(),
+ ))
+}
+
+pub(crate) fn handle_inlay_hints_resolve(
+ snap: GlobalStateSnapshot,
+ mut hint: InlayHint,
+) -> Result<InlayHint> {
+ let _p = profile::span("handle_inlay_hints_resolve");
+ let data = match hint.data.take() {
+ Some(it) => it,
+ None => return Ok(hint),
+ };
+
+ let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
+
+ let file_range = from_proto::file_range(
+ &snap,
+ resolve_data.text_document,
+ match resolve_data.position {
+ PositionOrRange::Position(pos) => Range::new(pos, pos),
+ PositionOrRange::Range(range) => range,
+ },
+ )?;
+ let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
+ None => return Ok(hint),
+ Some(info) => info,
+ };
+
+ let markup_kind =
+ snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind);
+
+ // FIXME: hover actions?
+ hint.tooltip = Some(lsp_types::InlayHintTooltip::MarkupContent(to_proto::markup_content(
+ info.info.markup,
+ markup_kind,
+ )));
+ Ok(hint)
+}
+
+pub(crate) fn handle_call_hierarchy_prepare(
+ snap: GlobalStateSnapshot,
+ params: CallHierarchyPrepareParams,
+) -> Result<Option<Vec<CallHierarchyItem>>> {
+ let _p = profile::span("handle_call_hierarchy_prepare");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+
+ let nav_info = match snap.analysis.call_hierarchy(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let RangeInfo { range: _, info: navs } = nav_info;
+ let res = navs
+ .into_iter()
+ .filter(|it| it.kind == Some(SymbolKind::Function))
+ .map(|it| to_proto::call_hierarchy_item(&snap, it))
+ .collect::<Result<Vec<_>>>()?;
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_call_hierarchy_incoming(
+ snap: GlobalStateSnapshot,
+ params: CallHierarchyIncomingCallsParams,
+) -> Result<Option<Vec<CallHierarchyIncomingCall>>> {
+ let _p = profile::span("handle_call_hierarchy_incoming");
+ let item = params.item;
+
+ let doc = TextDocumentIdentifier::new(item.uri);
+ let frange = from_proto::file_range(&snap, doc, item.selection_range)?;
+ let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
+
+ let call_items = match snap.analysis.incoming_calls(fpos)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let mut res = vec![];
+
+ for call_item in call_items.into_iter() {
+ let file_id = call_item.target.file_id;
+ let line_index = snap.file_line_index(file_id)?;
+ let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
+ res.push(CallHierarchyIncomingCall {
+ from: item,
+ from_ranges: call_item
+ .ranges
+ .into_iter()
+ .map(|it| to_proto::range(&line_index, it))
+ .collect(),
+ });
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_call_hierarchy_outgoing(
+ snap: GlobalStateSnapshot,
+ params: CallHierarchyOutgoingCallsParams,
+) -> Result<Option<Vec<CallHierarchyOutgoingCall>>> {
+ let _p = profile::span("handle_call_hierarchy_outgoing");
+ let item = params.item;
+
+ let doc = TextDocumentIdentifier::new(item.uri);
+ let frange = from_proto::file_range(&snap, doc, item.selection_range)?;
+ let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
+
+ let call_items = match snap.analysis.outgoing_calls(fpos)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let mut res = vec![];
+
+ for call_item in call_items.into_iter() {
+ let file_id = call_item.target.file_id;
+ let line_index = snap.file_line_index(file_id)?;
+ let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
+ res.push(CallHierarchyOutgoingCall {
+ to: item,
+ from_ranges: call_item
+ .ranges
+ .into_iter()
+ .map(|it| to_proto::range(&line_index, it))
+ .collect(),
+ });
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_semantic_tokens_full(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensParams,
+) -> Result<Option<SemanticTokensResult>> {
+ let _p = profile::span("handle_semantic_tokens_full");
+
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let highlights = snap.analysis.highlight(file_id)?;
+ let highlight_strings = snap.config.highlighting_strings();
+ let semantic_tokens =
+ to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
+
+ // Unconditionally cache the tokens
+ snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
+
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_semantic_tokens_full_delta(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensDeltaParams,
+) -> Result<Option<SemanticTokensFullDeltaResult>> {
+ let _p = profile::span("handle_semantic_tokens_full_delta");
+
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let highlights = snap.analysis.highlight(file_id)?;
+ let highlight_strings = snap.config.highlighting_strings();
+ let semantic_tokens =
+ to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
+
+ let mut cache = snap.semantic_tokens_cache.lock();
+ let cached_tokens = cache.entry(params.text_document.uri).or_default();
+
+ if let Some(prev_id) = &cached_tokens.result_id {
+ if *prev_id == params.previous_result_id {
+ let delta = to_proto::semantic_token_delta(cached_tokens, &semantic_tokens);
+ *cached_tokens = semantic_tokens;
+ return Ok(Some(delta.into()));
+ }
+ }
+
+ *cached_tokens = semantic_tokens.clone();
+
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_semantic_tokens_range(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensRangeParams,
+) -> Result<Option<SemanticTokensRangeResult>> {
+ let _p = profile::span("handle_semantic_tokens_range");
+
+ let frange = from_proto::file_range(&snap, params.text_document, params.range)?;
+ let text = snap.analysis.file_text(frange.file_id)?;
+ let line_index = snap.file_line_index(frange.file_id)?;
+
+ let highlights = snap.analysis.highlight_range(frange)?;
+ let highlight_strings = snap.config.highlighting_strings();
+ let semantic_tokens =
+ to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_open_docs(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<lsp_types::Url>> {
+ let _p = profile::span("handle_open_docs");
+ let position = from_proto::file_position(&snap, params)?;
+
+ let remote = snap.analysis.external_docs(position)?;
+
+ Ok(remote.and_then(|remote| Url::parse(&remote).ok()))
+}
+
+pub(crate) fn handle_open_cargo_toml(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::OpenCargoTomlParams,
+) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+ let _p = profile::span("handle_open_cargo_toml");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+
+ let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ let cargo_toml_url = to_proto::url_from_abs_path(&cargo_spec.cargo_toml);
+ let res: lsp_types::GotoDefinitionResponse =
+ Location::new(cargo_toml_url, Range::default()).into();
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_move_item(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::MoveItemParams,
+) -> Result<Vec<lsp_ext::SnippetTextEdit>> {
+ let _p = profile::span("handle_move_item");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let range = from_proto::file_range(&snap, params.text_document, params.range)?;
+
+ let direction = match params.direction {
+ lsp_ext::MoveItemDirection::Up => ide::Direction::Up,
+ lsp_ext::MoveItemDirection::Down => ide::Direction::Down,
+ };
+
+ match snap.analysis.move_item(range, direction)? {
+ Some(text_edit) => {
+ let line_index = snap.file_line_index(file_id)?;
+ Ok(to_proto::snippet_text_edit_vec(&line_index, true, text_edit))
+ }
+ None => Ok(vec![]),
+ }
+}
+
+fn to_command_link(command: lsp_types::Command, tooltip: String) -> lsp_ext::CommandLink {
+ lsp_ext::CommandLink { tooltip: Some(tooltip), command }
+}
+
+fn show_impl_command_link(
+ snap: &GlobalStateSnapshot,
+ position: &FilePosition,
+) -> Option<lsp_ext::CommandLinkGroup> {
+ if snap.config.hover_actions().implementations && snap.config.client_commands().show_reference {
+ if let Some(nav_data) = snap.analysis.goto_implementation(*position).unwrap_or(None) {
+ let uri = to_proto::url(snap, position.file_id);
+ let line_index = snap.file_line_index(position.file_id).ok()?;
+ let position = to_proto::position(&line_index, position.offset);
+ let locations: Vec<_> = nav_data
+ .info
+ .into_iter()
+ .filter_map(|nav| to_proto::location_from_nav(snap, nav).ok())
+ .collect();
+ let title = to_proto::implementation_title(locations.len());
+ let command = to_proto::command::show_references(title, &uri, position, locations);
+
+ return Some(lsp_ext::CommandLinkGroup {
+ commands: vec![to_command_link(command, "Go to implementations".into())],
+ ..Default::default()
+ });
+ }
+ }
+ None
+}
+
+fn show_ref_command_link(
+ snap: &GlobalStateSnapshot,
+ position: &FilePosition,
+) -> Option<lsp_ext::CommandLinkGroup> {
+ if snap.config.hover_actions().references && snap.config.client_commands().show_reference {
+ if let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) {
+ let uri = to_proto::url(snap, position.file_id);
+ let line_index = snap.file_line_index(position.file_id).ok()?;
+ let position = to_proto::position(&line_index, position.offset);
+ let locations: Vec<_> = ref_search_res
+ .into_iter()
+ .flat_map(|res| res.references)
+ .flat_map(|(file_id, ranges)| {
+ ranges.into_iter().filter_map(move |(range, _)| {
+ to_proto::location(snap, FileRange { file_id, range }).ok()
+ })
+ })
+ .collect();
+ let title = to_proto::reference_title(locations.len());
+ let command = to_proto::command::show_references(title, &uri, position, locations);
+
+ return Some(lsp_ext::CommandLinkGroup {
+ commands: vec![to_command_link(command, "Go to references".into())],
+ ..Default::default()
+ });
+ }
+ }
+ None
+}
+
+fn runnable_action_links(
+ snap: &GlobalStateSnapshot,
+ runnable: Runnable,
+) -> Option<lsp_ext::CommandLinkGroup> {
+ let hover_actions_config = snap.config.hover_actions();
+ if !hover_actions_config.runnable() {
+ return None;
+ }
+
+ let cargo_spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id).ok()?;
+ if should_skip_target(&runnable, cargo_spec.as_ref()) {
+ return None;
+ }
+
+ let client_commands_config = snap.config.client_commands();
+ if !(client_commands_config.run_single || client_commands_config.debug_single) {
+ return None;
+ }
+
+ let title = runnable.title();
+ let r = to_proto::runnable(snap, runnable).ok()?;
+
+ let mut group = lsp_ext::CommandLinkGroup::default();
+
+ if hover_actions_config.run && client_commands_config.run_single {
+ let run_command = to_proto::command::run_single(&r, &title);
+ group.commands.push(to_command_link(run_command, r.label.clone()));
+ }
+
+ if hover_actions_config.debug && client_commands_config.debug_single {
+ let dbg_command = to_proto::command::debug_single(&r);
+ group.commands.push(to_command_link(dbg_command, r.label));
+ }
+
+ Some(group)
+}
+
+fn goto_type_action_links(
+ snap: &GlobalStateSnapshot,
+ nav_targets: &[HoverGotoTypeData],
+) -> Option<lsp_ext::CommandLinkGroup> {
+ if !snap.config.hover_actions().goto_type_def
+ || nav_targets.is_empty()
+ || !snap.config.client_commands().goto_location
+ {
+ return None;
+ }
+
+ Some(lsp_ext::CommandLinkGroup {
+ title: Some("Go to ".into()),
+ commands: nav_targets
+ .iter()
+ .filter_map(|it| {
+ to_proto::command::goto_location(snap, &it.nav)
+ .map(|cmd| to_command_link(cmd, it.mod_path.clone()))
+ })
+ .collect(),
+ })
+}
+
+fn prepare_hover_actions(
+ snap: &GlobalStateSnapshot,
+ actions: &[HoverAction],
+) -> Vec<lsp_ext::CommandLinkGroup> {
+ actions
+ .iter()
+ .filter_map(|it| match it {
+ HoverAction::Implementation(position) => show_impl_command_link(snap, position),
+ HoverAction::Reference(position) => show_ref_command_link(snap, position),
+ HoverAction::Runnable(r) => runnable_action_links(snap, r.clone()),
+ HoverAction::GoToType(targets) => goto_type_action_links(snap, targets),
+ })
+ .collect()
+}
+
+fn should_skip_target(runnable: &Runnable, cargo_spec: Option<&CargoTargetSpec>) -> bool {
+ match runnable.kind {
+ RunnableKind::Bin => {
+ // Do not suggest binary run on other target than binary
+ match &cargo_spec {
+ Some(spec) => !matches!(
+ spec.target_kind,
+ TargetKind::Bin | TargetKind::Example | TargetKind::Test
+ ),
+ None => true,
+ }
+ }
+ _ => false,
+ }
+}
+
+fn run_rustfmt(
+ snap: &GlobalStateSnapshot,
+ text_document: TextDocumentIdentifier,
+ range: Option<lsp_types::Range>,
+) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+ let file_id = from_proto::file_id(snap, &text_document.uri)?;
+ let file = snap.analysis.file_text(file_id)?;
+ let crate_ids = snap.analysis.crate_for(file_id)?;
+
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut rustfmt = match snap.config.rustfmt() {
+ RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => {
+ let mut cmd = process::Command::new(toolchain::rustfmt());
+ cmd.args(extra_args);
+ // try to chdir to the file so we can respect `rustfmt.toml`
+ // FIXME: use `rustfmt --config-path` once
+ // https://github.com/rust-lang/rustfmt/issues/4660 gets fixed
+ match text_document.uri.to_file_path() {
+ Ok(mut path) => {
+ // pop off file name
+ if path.pop() && path.is_dir() {
+ cmd.current_dir(path);
+ }
+ }
+ Err(_) => {
+ tracing::error!(
+ "Unable to get file path for {}, rustfmt.toml might be ignored",
+ text_document.uri
+ );
+ }
+ }
+ if let Some(&crate_id) = crate_ids.first() {
+ // Assume all crates are in the same edition
+ let edition = snap.analysis.crate_edition(crate_id)?;
+ cmd.arg("--edition");
+ cmd.arg(edition.to_string());
+ }
+
+ if let Some(range) = range {
+ if !enable_range_formatting {
+ return Err(LspError::new(
+ ErrorCode::InvalidRequest as i32,
+ String::from(
+ "rustfmt range formatting is unstable. \
+ Opt-in by using a nightly build of rustfmt and setting \
+ `rustfmt.rangeFormatting.enable` to true in your LSP configuration",
+ ),
+ )
+ .into());
+ }
+
+ let frange = from_proto::file_range(snap, text_document, range)?;
+ let start_line = line_index.index.line_col(frange.range.start()).line;
+ let end_line = line_index.index.line_col(frange.range.end()).line;
+
+ cmd.arg("--unstable-features");
+ cmd.arg("--file-lines");
+ cmd.arg(
+ json!([{
+ "file": "stdin",
+ "range": [start_line, end_line]
+ }])
+ .to_string(),
+ );
+ }
+
+ cmd
+ }
+ RustfmtConfig::CustomCommand { command, args } => {
+ let mut cmd = process::Command::new(command);
+ cmd.args(args);
+ cmd
+ }
+ };
+
+ let mut rustfmt = rustfmt
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .context(format!("Failed to spawn {:?}", rustfmt))?;
+
+ rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
+
+ let output = rustfmt.wait_with_output()?;
+ let captured_stdout = String::from_utf8(output.stdout)?;
+ let captured_stderr = String::from_utf8(output.stderr).unwrap_or_default();
+
+ if !output.status.success() {
+ let rustfmt_not_installed =
+ captured_stderr.contains("not installed") || captured_stderr.contains("not available");
+
+ return match output.status.code() {
+ Some(1) if !rustfmt_not_installed => {
+ // While `rustfmt` doesn't have a specific exit code for parse errors this is the
+ // likely cause exiting with 1. Most Language Servers swallow parse errors on
+ // formatting because otherwise an error is surfaced to the user on top of the
+ // syntax error diagnostics they're already receiving. This is especially jarring
+ // if they have format on save enabled.
+ tracing::info!("rustfmt exited with status 1, assuming parse error and ignoring");
+ Ok(None)
+ }
+ _ => {
+ // Something else happened - e.g. `rustfmt` is missing or caught a signal
+ Err(LspError::new(
+ -32900,
+ format!(
+ r#"rustfmt exited with:
+ Status: {}
+ stdout: {}
+ stderr: {}"#,
+ output.status, captured_stdout, captured_stderr,
+ ),
+ )
+ .into())
+ }
+ };
+ }
+
+ let (new_text, new_line_endings) = LineEndings::normalize(captured_stdout);
+
+ if line_index.endings != new_line_endings {
+ // If line endings are different, send the entire file.
+ // Diffing would not work here, as the line endings might be the only
+ // difference.
+ Ok(Some(to_proto::text_edit_vec(
+ &line_index,
+ TextEdit::replace(TextRange::up_to(TextSize::of(&*file)), new_text),
+ )))
+ } else if *file == new_text {
+ // The document is already formatted correctly -- no edits needed.
+ Ok(None)
+ } else {
+ Ok(Some(to_proto::text_edit_vec(&line_index, diff(&file, &new_text))))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
new file mode 100644
index 000000000..47cdd8dfc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -0,0 +1,196 @@
+//! Fully integrated benchmarks for rust-analyzer, which load real cargo
+//! projects.
+//!
+//! The benchmark here is used to debug specific performance regressions. If you
+//! notice that, eg, completion is slow in some specific case, you can modify
+//! code here exercise this specific completion, and thus have a fast
+//! edit/compile/test cycle.
+//!
+//! Note that "Rust Analyzer: Run" action does not allow running a single test
+//! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line"
+//! which you can use to paste the command in terminal and add `--release` manually.
+
+use std::sync::Arc;
+
+use ide::{CallableSnippets, Change, CompletionConfig, FilePosition, TextSize};
+use ide_db::{
+ imports::insert_use::{ImportGranularity, InsertUseConfig},
+ SnippetCap,
+};
+use project_model::CargoConfig;
+use test_utils::project_root;
+use vfs::{AbsPathBuf, VfsPath};
+
+use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig};
+
+#[test]
+fn integrated_highlighting_benchmark() {
+ if std::env::var("RUN_SLOW_BENCHES").is_err() {
+ return;
+ }
+
+ // Load rust-analyzer itself.
+ let workspace_to_load = project_root();
+ let file = "./crates/ide-db/src/apply_change.rs";
+
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: false,
+ prefill_caches: false,
+ };
+
+ let (mut host, vfs, _proc_macro) = {
+ let _it = stdx::timeit("workspace loading");
+ load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
+ };
+
+ let file_id = {
+ let file = workspace_to_load.join(file);
+ let path = VfsPath::from(AbsPathBuf::assert(file));
+ vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
+ };
+
+ {
+ let _it = stdx::timeit("initial");
+ let analysis = host.analysis();
+ analysis.highlight_as_html(file_id, false).unwrap();
+ }
+
+ profile::init_from("*>100");
+ // let _s = profile::heartbeat_span();
+
+ {
+ let _it = stdx::timeit("change");
+ let mut text = host.analysis().file_text(file_id).unwrap().to_string();
+ text.push_str("\npub fn _dummy() {}\n");
+ let mut change = Change::new();
+ change.change_file(file_id, Some(Arc::new(text)));
+ host.apply_change(change);
+ }
+
+ {
+ let _it = stdx::timeit("after change");
+ let _span = profile::cpu_span();
+ let analysis = host.analysis();
+ analysis.highlight_as_html(file_id, false).unwrap();
+ }
+}
+
+#[test]
+fn integrated_completion_benchmark() {
+ if std::env::var("RUN_SLOW_BENCHES").is_err() {
+ return;
+ }
+
+ // Load rust-analyzer itself.
+ let workspace_to_load = project_root();
+ let file = "./crates/hir/src/lib.rs";
+
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: false,
+ prefill_caches: true,
+ };
+
+ let (mut host, vfs, _proc_macro) = {
+ let _it = stdx::timeit("workspace loading");
+ load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
+ };
+
+ let file_id = {
+ let file = workspace_to_load.join(file);
+ let path = VfsPath::from(AbsPathBuf::assert(file));
+ vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
+ };
+
+ {
+ let _it = stdx::timeit("initial");
+ let analysis = host.analysis();
+ analysis.highlight_as_html(file_id, false).unwrap();
+ }
+
+ profile::init_from("*>5");
+ // let _s = profile::heartbeat_span();
+
+ let completion_offset = {
+ let _it = stdx::timeit("change");
+ let mut text = host.analysis().file_text(file_id).unwrap().to_string();
+ let completion_offset =
+ patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
+ + "sel".len();
+ let mut change = Change::new();
+ change.change_file(file_id, Some(Arc::new(text)));
+ host.apply_change(change);
+ completion_offset
+ };
+
+ {
+ let _p = profile::span("unqualified path completion");
+ let _span = profile::cpu_span();
+ let analysis = host.analysis();
+ let config = CompletionConfig {
+ enable_postfix_completions: true,
+ enable_imports_on_the_fly: true,
+ enable_self_on_the_fly: true,
+ enable_private_editable: true,
+ callable: Some(CallableSnippets::FillArguments),
+ snippet_cap: SnippetCap::new(true),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::ByCrate,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ snippets: Vec::new(),
+ };
+ let position =
+ FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
+ analysis.completions(&config, position, None).unwrap();
+ }
+
+ let completion_offset = {
+ let _it = stdx::timeit("change");
+ let mut text = host.analysis().file_text(file_id).unwrap().to_string();
+ let completion_offset =
+ patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
+ + "self.".len();
+ let mut change = Change::new();
+ change.change_file(file_id, Some(Arc::new(text)));
+ host.apply_change(change);
+ completion_offset
+ };
+
+ {
+ let _p = profile::span("dot completion");
+ let _span = profile::cpu_span();
+ let analysis = host.analysis();
+ let config = CompletionConfig {
+ enable_postfix_completions: true,
+ enable_imports_on_the_fly: true,
+ enable_self_on_the_fly: true,
+ enable_private_editable: true,
+ callable: Some(CallableSnippets::FillArguments),
+ snippet_cap: SnippetCap::new(true),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::ByCrate,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ snippets: Vec::new(),
+ };
+ let position =
+ FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
+ analysis.completions(&config, position, None).unwrap();
+ }
+}
+
+fn patch(what: &mut String, from: &str, to: &str) -> usize {
+ let idx = what.find(from).unwrap();
+ *what = what.replacen(from, to, 1);
+ idx
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
new file mode 100644
index 000000000..552379752
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
@@ -0,0 +1,80 @@
+//! Implementation of the LSP for rust-analyzer.
+//!
+//! This crate takes Rust-specific analysis results from ide and translates
+//! into LSP types.
+//!
+//! It also is the root of all state. `world` module defines the bulk of the
+//! state, and `main_loop` module defines the rules for modifying it.
+//!
+//! The `cli` submodule implements some batch-processing analysis, primarily as
+//! a debugging aid.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+pub mod cli;
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod caps;
+mod cargo_target_spec;
+mod diagnostics;
+mod diff;
+mod dispatch;
+mod from_proto;
+mod global_state;
+mod handlers;
+mod line_index;
+mod lsp_utils;
+mod main_loop;
+mod markdown;
+mod mem_docs;
+mod op_queue;
+mod reload;
+mod semantic_tokens;
+mod task_pool;
+mod to_proto;
+mod version;
+
+pub mod config;
+pub mod lsp_ext;
+
+#[cfg(test)]
+mod integrated_benchmarks;
+
+use std::fmt;
+
+use serde::de::DeserializeOwned;
+
+pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version};
+
+pub type Error = Box<dyn std::error::Error + Send + Sync>;
+pub type Result<T, E = Error> = std::result::Result<T, E>;
+
+pub fn from_json<T: DeserializeOwned>(what: &'static str, json: &serde_json::Value) -> Result<T> {
+ let res = serde_json::from_value(json.clone())
+ .map_err(|e| format!("Failed to deserialize {}: {}; {}", what, e, json))?;
+ Ok(res)
+}
+
+#[derive(Debug)]
+struct LspError {
+ code: i32,
+ message: String,
+}
+
+impl LspError {
+ fn new(code: i32, message: String) -> LspError {
+ LspError { code, message }
+ }
+}
+
+impl fmt::Display for LspError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Language Server request failed with {}. ({})", self.code, self.message)
+ }
+}
+
+impl std::error::Error for LspError {}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/line_index.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/line_index.rs
new file mode 100644
index 000000000..c116414da
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/line_index.rs
@@ -0,0 +1,68 @@
+//! Enhances `ide::LineIndex` with additional info required to convert offsets
+//! into lsp positions.
+//!
+//! We maintain invariant that all internal strings use `\n` as line separator.
+//! This module does line ending conversion and detection (so that we can
+//! convert back to `\r\n` on the way out).
+
+use std::sync::Arc;
+
+pub enum OffsetEncoding {
+ Utf8,
+ Utf16,
+}
+
+pub(crate) struct LineIndex {
+ pub(crate) index: Arc<ide::LineIndex>,
+ pub(crate) endings: LineEndings,
+ pub(crate) encoding: OffsetEncoding,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub(crate) enum LineEndings {
+ Unix,
+ Dos,
+}
+
+impl LineEndings {
+ /// Replaces `\r\n` with `\n` in-place in `src`.
+ pub(crate) fn normalize(src: String) -> (String, LineEndings) {
+ if !src.as_bytes().contains(&b'\r') {
+ return (src, LineEndings::Unix);
+ }
+
+ // We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding.
+ // While we *can* call `as_mut_vec` and do surgery on the live string
+ // directly, let's rather steal the contents of `src`. This makes the code
+ // safe even if a panic occurs.
+
+ let mut buf = src.into_bytes();
+ let mut gap_len = 0;
+ let mut tail = buf.as_mut_slice();
+ loop {
+ let idx = match find_crlf(&tail[gap_len..]) {
+ None => tail.len(),
+ Some(idx) => idx + gap_len,
+ };
+ tail.copy_within(gap_len..idx, 0);
+ tail = &mut tail[idx - gap_len..];
+ if tail.len() == gap_len {
+ break;
+ }
+ gap_len += 1;
+ }
+
+ // Account for removed `\r`.
+ // After `set_len`, `buf` is guaranteed to contain utf-8 again.
+ let new_len = buf.len() - gap_len;
+ let src = unsafe {
+ buf.set_len(new_len);
+ String::from_utf8_unchecked(buf)
+ };
+ return (src, LineEndings::Dos);
+
+ fn find_crlf(src: &[u8]) -> Option<usize> {
+ src.windows(2).position(|it| it == b"\r\n")
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
new file mode 100644
index 000000000..5f0e10862
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
@@ -0,0 +1,549 @@
+//! rust-analyzer extensions to the LSP.
+
+use std::{collections::HashMap, path::PathBuf};
+
+use lsp_types::request::Request;
+use lsp_types::{
+ notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams,
+ PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
+};
+use serde::{Deserialize, Serialize};
+
+pub enum AnalyzerStatus {}
+
+impl Request for AnalyzerStatus {
+ type Params = AnalyzerStatusParams;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/analyzerStatus";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct AnalyzerStatusParams {
+ pub text_document: Option<TextDocumentIdentifier>,
+}
+
+pub enum MemoryUsage {}
+
+impl Request for MemoryUsage {
+ type Params = ();
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/memoryUsage";
+}
+
+pub enum ShuffleCrateGraph {}
+
+impl Request for ShuffleCrateGraph {
+ type Params = ();
+ type Result = ();
+ const METHOD: &'static str = "rust-analyzer/shuffleCrateGraph";
+}
+
+pub enum ReloadWorkspace {}
+
+impl Request for ReloadWorkspace {
+ type Params = ();
+ type Result = ();
+ const METHOD: &'static str = "rust-analyzer/reloadWorkspace";
+}
+
+pub enum SyntaxTree {}
+
+impl Request for SyntaxTree {
+ type Params = SyntaxTreeParams;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/syntaxTree";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct SyntaxTreeParams {
+ pub text_document: TextDocumentIdentifier,
+ pub range: Option<Range>,
+}
+
+pub enum ViewHir {}
+
+impl Request for ViewHir {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/viewHir";
+}
+
+pub enum ViewFileText {}
+
+impl Request for ViewFileText {
+ type Params = lsp_types::TextDocumentIdentifier;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/viewFileText";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct ViewCrateGraphParams {
+ /// Include *all* crates, not just crates in the workspace.
+ pub full: bool,
+}
+
+pub enum ViewCrateGraph {}
+
+impl Request for ViewCrateGraph {
+ type Params = ViewCrateGraphParams;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/viewCrateGraph";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct ViewItemTreeParams {
+ pub text_document: TextDocumentIdentifier,
+}
+
+pub enum ViewItemTree {}
+
+impl Request for ViewItemTree {
+ type Params = ViewItemTreeParams;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/viewItemTree";
+}
+
+pub enum ExpandMacro {}
+
+impl Request for ExpandMacro {
+ type Params = ExpandMacroParams;
+ type Result = Option<ExpandedMacro>;
+ const METHOD: &'static str = "rust-analyzer/expandMacro";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct ExpandMacroParams {
+ pub text_document: TextDocumentIdentifier,
+ pub position: Position,
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct ExpandedMacro {
+ pub name: String,
+ pub expansion: String,
+}
+
+pub enum MatchingBrace {}
+
+impl Request for MatchingBrace {
+ type Params = MatchingBraceParams;
+ type Result = Vec<Position>;
+ const METHOD: &'static str = "experimental/matchingBrace";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct MatchingBraceParams {
+ pub text_document: TextDocumentIdentifier,
+ pub positions: Vec<Position>,
+}
+
+pub enum ParentModule {}
+
+impl Request for ParentModule {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = Option<lsp_types::GotoDefinitionResponse>;
+ const METHOD: &'static str = "experimental/parentModule";
+}
+
+pub enum JoinLines {}
+
+impl Request for JoinLines {
+ type Params = JoinLinesParams;
+ type Result = Vec<lsp_types::TextEdit>;
+ const METHOD: &'static str = "experimental/joinLines";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct JoinLinesParams {
+ pub text_document: TextDocumentIdentifier,
+ pub ranges: Vec<Range>,
+}
+
+pub enum OnEnter {}
+
+impl Request for OnEnter {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = Option<Vec<SnippetTextEdit>>;
+ const METHOD: &'static str = "experimental/onEnter";
+}
+
+pub enum Runnables {}
+
+impl Request for Runnables {
+ type Params = RunnablesParams;
+ type Result = Vec<Runnable>;
+ const METHOD: &'static str = "experimental/runnables";
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct RunnablesParams {
+ pub text_document: TextDocumentIdentifier,
+ pub position: Option<Position>,
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct Runnable {
+ pub label: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub location: Option<lsp_types::LocationLink>,
+ pub kind: RunnableKind,
+ pub args: CargoRunnable,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "lowercase")]
+pub enum RunnableKind {
+ Cargo,
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct CargoRunnable {
+ // command to be executed instead of cargo
+ pub override_cargo: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub workspace_root: Option<PathBuf>,
+ // command, --package and --lib stuff
+ pub cargo_args: Vec<String>,
+ // user-specified additional cargo args, like `--release`.
+ pub cargo_extra_args: Vec<String>,
+ // stuff after --
+ pub executable_args: Vec<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub expect_test: Option<bool>,
+}
+
+pub enum RelatedTests {}
+
+impl Request for RelatedTests {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = Vec<TestInfo>;
+ const METHOD: &'static str = "rust-analyzer/relatedTests";
+}
+
+#[derive(Debug, Deserialize, Serialize)]
+pub struct TestInfo {
+ pub runnable: Runnable,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct InlayHintsParams {
+ pub text_document: TextDocumentIdentifier,
+ pub range: Option<lsp_types::Range>,
+}
+
+pub enum Ssr {}
+
+impl Request for Ssr {
+ type Params = SsrParams;
+ type Result = lsp_types::WorkspaceEdit;
+ const METHOD: &'static str = "experimental/ssr";
+}
+
+#[derive(Debug, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SsrParams {
+ pub query: String,
+ pub parse_only: bool,
+
+ /// File position where SSR was invoked. Paths in `query` will be resolved relative to this
+ /// position.
+ #[serde(flatten)]
+ pub position: lsp_types::TextDocumentPositionParams,
+
+ /// Current selections. Search/replace will be restricted to these if non-empty.
+ pub selections: Vec<lsp_types::Range>,
+}
+
+pub enum ServerStatusNotification {}
+
+impl Notification for ServerStatusNotification {
+ type Params = ServerStatusParams;
+ const METHOD: &'static str = "experimental/serverStatus";
+}
+
+#[derive(Deserialize, Serialize, PartialEq, Eq, Clone)]
+pub struct ServerStatusParams {
+ pub health: Health,
+ pub quiescent: bool,
+ pub message: Option<String>,
+}
+
+#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq)]
+#[serde(rename_all = "camelCase")]
+pub enum Health {
+ Ok,
+ Warning,
+ Error,
+}
+
+pub enum CodeActionRequest {}
+
+impl Request for CodeActionRequest {
+ type Params = lsp_types::CodeActionParams;
+ type Result = Option<Vec<CodeAction>>;
+ const METHOD: &'static str = "textDocument/codeAction";
+}
+
+pub enum CodeActionResolveRequest {}
+impl Request for CodeActionResolveRequest {
+ type Params = CodeAction;
+ type Result = CodeAction;
+ const METHOD: &'static str = "codeAction/resolve";
+}
+
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeAction {
+ pub title: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub group: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub kind: Option<CodeActionKind>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub command: Option<lsp_types::Command>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub edit: Option<SnippetWorkspaceEdit>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub is_preferred: Option<bool>,
+
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<CodeActionData>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeActionData {
+ pub code_action_params: lsp_types::CodeActionParams,
+ pub id: String,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SnippetWorkspaceEdit {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub changes: Option<HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>>>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_changes: Option<Vec<SnippetDocumentChangeOperation>>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub change_annotations:
+ Option<HashMap<lsp_types::ChangeAnnotationIdentifier, lsp_types::ChangeAnnotation>>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged, rename_all = "lowercase")]
+pub enum SnippetDocumentChangeOperation {
+ Op(lsp_types::ResourceOp),
+ Edit(SnippetTextDocumentEdit),
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SnippetTextDocumentEdit {
+ pub text_document: lsp_types::OptionalVersionedTextDocumentIdentifier,
+ pub edits: Vec<SnippetTextEdit>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SnippetTextEdit {
+ pub range: Range,
+ pub new_text: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub insert_text_format: Option<lsp_types::InsertTextFormat>,
+ /// The annotation id if this is an annotated
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub annotation_id: Option<lsp_types::ChangeAnnotationIdentifier>,
+}
+
+pub enum HoverRequest {}
+
+impl Request for HoverRequest {
+ type Params = HoverParams;
+ type Result = Option<Hover>;
+ const METHOD: &'static str = "textDocument/hover";
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct HoverParams {
+ pub text_document: TextDocumentIdentifier,
+ pub position: PositionOrRange,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum PositionOrRange {
+ Position(lsp_types::Position),
+ Range(lsp_types::Range),
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+pub struct Hover {
+ #[serde(flatten)]
+ pub hover: lsp_types::Hover,
+ #[serde(skip_serializing_if = "Vec::is_empty")]
+ pub actions: Vec<CommandLinkGroup>,
+}
+
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct CommandLinkGroup {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub title: Option<String>,
+ pub commands: Vec<CommandLink>,
+}
+
+// LSP v3.15 Command does not have a `tooltip` field, vscode supports one.
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct CommandLink {
+ #[serde(flatten)]
+ pub command: lsp_types::Command,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tooltip: Option<String>,
+}
+
+pub enum ExternalDocs {}
+
+impl Request for ExternalDocs {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = Option<lsp_types::Url>;
+ const METHOD: &'static str = "experimental/externalDocs";
+}
+
+pub enum OpenCargoToml {}
+
+impl Request for OpenCargoToml {
+ type Params = OpenCargoTomlParams;
+ type Result = Option<lsp_types::GotoDefinitionResponse>;
+ const METHOD: &'static str = "experimental/openCargoToml";
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct OpenCargoTomlParams {
+ pub text_document: TextDocumentIdentifier,
+}
+
+/// Information about CodeLens, that is to be resolved.
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub(crate) enum CodeLensResolveData {
+ Impls(lsp_types::request::GotoImplementationParams),
+ References(lsp_types::TextDocumentPositionParams),
+}
+
+pub fn supports_utf8(caps: &lsp_types::ClientCapabilities) -> bool {
+ caps.offset_encoding.as_deref().unwrap_or_default().iter().any(|it| it == "utf-8")
+}
+
+pub enum MoveItem {}
+
+impl Request for MoveItem {
+ type Params = MoveItemParams;
+ type Result = Vec<SnippetTextEdit>;
+ const METHOD: &'static str = "experimental/moveItem";
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct MoveItemParams {
+ pub direction: MoveItemDirection,
+ pub text_document: TextDocumentIdentifier,
+ pub range: Range,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+pub enum MoveItemDirection {
+ Up,
+ Down,
+}
+
+#[derive(Debug)]
+pub enum WorkspaceSymbol {}
+
+impl Request for WorkspaceSymbol {
+ type Params = WorkspaceSymbolParams;
+ type Result = Option<Vec<lsp_types::SymbolInformation>>;
+ const METHOD: &'static str = "workspace/symbol";
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceSymbolParams {
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ /// A non-empty query string
+ pub query: String,
+
+ pub search_scope: Option<WorkspaceSymbolSearchScope>,
+
+ pub search_kind: Option<WorkspaceSymbolSearchKind>,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub enum WorkspaceSymbolSearchScope {
+ Workspace,
+ WorkspaceAndDependencies,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub enum WorkspaceSymbolSearchKind {
+ OnlyTypes,
+ AllSymbols,
+}
+
+/// The document on type formatting request is sent from the client to
+/// the server to format parts of the document during typing. This is
+/// almost same as lsp_types::request::OnTypeFormatting, but the
+/// result has SnippetTextEdit in it instead of TextEdit.
+#[derive(Debug)]
+pub enum OnTypeFormatting {}
+
+impl Request for OnTypeFormatting {
+ type Params = DocumentOnTypeFormattingParams;
+ type Result = Option<Vec<SnippetTextEdit>>;
+ const METHOD: &'static str = "textDocument/onTypeFormatting";
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct CompletionResolveData {
+ pub position: lsp_types::TextDocumentPositionParams,
+ pub imports: Vec<CompletionImport>,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct InlayHintResolveData {
+ pub text_document: TextDocumentIdentifier,
+ pub position: PositionOrRange,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct CompletionImport {
+ pub full_import_path: String,
+ pub imported_name: String,
+}
+
+#[derive(Debug, Deserialize, Default)]
+pub struct ClientCommandOptions {
+ pub commands: Vec<String>,
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs
new file mode 100644
index 000000000..5a37cbe2e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs
@@ -0,0 +1,407 @@
+//! Utilities for LSP-related boilerplate code.
+use std::{ops::Range, sync::Arc};
+
+use lsp_server::Notification;
+
+use crate::{
+ from_proto,
+ global_state::GlobalState,
+ line_index::{LineEndings, LineIndex, OffsetEncoding},
+ LspError,
+};
+
+pub(crate) fn invalid_params_error(message: String) -> LspError {
+ LspError { code: lsp_server::ErrorCode::InvalidParams as i32, message }
+}
+
+pub(crate) fn notification_is<N: lsp_types::notification::Notification>(
+ notification: &Notification,
+) -> bool {
+ notification.method == N::METHOD
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub(crate) enum Progress {
+ Begin,
+ Report,
+ End,
+}
+
+impl Progress {
+ pub(crate) fn fraction(done: usize, total: usize) -> f64 {
+ assert!(done <= total);
+ done as f64 / total.max(1) as f64
+ }
+}
+
+impl GlobalState {
+ pub(crate) fn show_message(&mut self, typ: lsp_types::MessageType, message: String) {
+ let message = message;
+ self.send_notification::<lsp_types::notification::ShowMessage>(
+ lsp_types::ShowMessageParams { typ, message },
+ )
+ }
+
+ /// Sends a notification to the client containing the error `message`.
+ /// If `additional_info` is [`Some`], appends a note to the notification telling to check the logs.
+ /// This will always log `message` + `additional_info` to the server's error log.
+ pub(crate) fn show_and_log_error(&mut self, message: String, additional_info: Option<String>) {
+ let mut message = message;
+ match additional_info {
+ Some(additional_info) => {
+ tracing::error!("{}\n\n{}", &message, &additional_info);
+ if tracing::enabled!(tracing::Level::ERROR) {
+ message.push_str("\n\nCheck the server logs for additional info.");
+ }
+ }
+ None => tracing::error!("{}", &message),
+ }
+
+ self.send_notification::<lsp_types::notification::ShowMessage>(
+ lsp_types::ShowMessageParams { typ: lsp_types::MessageType::ERROR, message },
+ )
+ }
+
+ /// rust-analyzer is resilient -- if it fails, this doesn't usually affect
+ /// the user experience. Part of that is that we deliberately hide panics
+ /// from the user.
+ ///
+ /// We do however want to pester rust-analyzer developers with panics and
+ /// other "you really gotta fix that" messages. The current strategy is to
+ /// be noisy for "from source" builds or when profiling is enabled.
+ ///
+ /// It's unclear if making from source `cargo xtask install` builds more
+ /// panicky is a good idea, let's see if we can keep our awesome bleeding
+ /// edge users from being upset!
+ pub(crate) fn poke_rust_analyzer_developer(&mut self, message: String) {
+ let from_source_build = option_env!("POKE_RA_DEVS").is_some();
+ let profiling_enabled = std::env::var("RA_PROFILE").is_ok();
+ if from_source_build || profiling_enabled {
+ self.show_message(lsp_types::MessageType::ERROR, message)
+ }
+ }
+
+ pub(crate) fn report_progress(
+ &mut self,
+ title: &str,
+ state: Progress,
+ message: Option<String>,
+ fraction: Option<f64>,
+ ) {
+ if !self.config.work_done_progress() {
+ return;
+ }
+ let percentage = fraction.map(|f| {
+ assert!((0.0..=1.0).contains(&f));
+ (f * 100.0) as u32
+ });
+ let token = lsp_types::ProgressToken::String(format!("rustAnalyzer/{}", title));
+ let work_done_progress = match state {
+ Progress::Begin => {
+ self.send_request::<lsp_types::request::WorkDoneProgressCreate>(
+ lsp_types::WorkDoneProgressCreateParams { token: token.clone() },
+ |_, _| (),
+ );
+
+ lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin {
+ title: title.into(),
+ cancellable: None,
+ message,
+ percentage,
+ })
+ }
+ Progress::Report => {
+ lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport {
+ cancellable: None,
+ message,
+ percentage,
+ })
+ }
+ Progress::End => {
+ lsp_types::WorkDoneProgress::End(lsp_types::WorkDoneProgressEnd { message })
+ }
+ };
+ self.send_notification::<lsp_types::notification::Progress>(lsp_types::ProgressParams {
+ token,
+ value: lsp_types::ProgressParamsValue::WorkDone(work_done_progress),
+ });
+ }
+}
+
+pub(crate) fn apply_document_changes(
+ old_text: &mut String,
+ content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
+) {
+ let mut line_index = LineIndex {
+ index: Arc::new(ide::LineIndex::new(old_text)),
+ // We don't care about line endings or offset encoding here.
+ endings: LineEndings::Unix,
+ encoding: OffsetEncoding::Utf16,
+ };
+
+ // The changes we got must be applied sequentially, but can cross lines so we
+ // have to keep our line index updated.
+ // Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we
+ // remember the last valid line in the index and only rebuild it if needed.
+ // The VFS will normalize the end of lines to `\n`.
+ enum IndexValid {
+ All,
+ UpToLineExclusive(u32),
+ }
+
+ impl IndexValid {
+ fn covers(&self, line: u32) -> bool {
+ match *self {
+ IndexValid::UpToLineExclusive(to) => to > line,
+ _ => true,
+ }
+ }
+ }
+
+ let mut index_valid = IndexValid::All;
+ for change in content_changes {
+ match change.range {
+ Some(range) => {
+ if !index_valid.covers(range.end.line) {
+ line_index.index = Arc::new(ide::LineIndex::new(old_text));
+ }
+ index_valid = IndexValid::UpToLineExclusive(range.start.line);
+ if let Ok(range) = from_proto::text_range(&line_index, range) {
+ old_text.replace_range(Range::<usize>::from(range), &change.text);
+ }
+ }
+ None => {
+ *old_text = change.text;
+ index_valid = IndexValid::UpToLineExclusive(0);
+ }
+ }
+ }
+}
+
+/// Checks that the edits inside the completion and the additional edits do not overlap.
+/// LSP explicitly forbids the additional edits to overlap both with the main edit and themselves.
+pub(crate) fn all_edits_are_disjoint(
+ completion: &lsp_types::CompletionItem,
+ additional_edits: &[lsp_types::TextEdit],
+) -> bool {
+ let mut edit_ranges = Vec::new();
+ match completion.text_edit.as_ref() {
+ Some(lsp_types::CompletionTextEdit::Edit(edit)) => {
+ edit_ranges.push(edit.range);
+ }
+ Some(lsp_types::CompletionTextEdit::InsertAndReplace(edit)) => {
+ let replace = edit.replace;
+ let insert = edit.insert;
+ if replace.start != insert.start
+ || insert.start > insert.end
+ || insert.end > replace.end
+ {
+ // insert has to be a prefix of replace but it is not
+ return false;
+ }
+ edit_ranges.push(replace);
+ }
+ None => {}
+ }
+ if let Some(additional_changes) = completion.additional_text_edits.as_ref() {
+ edit_ranges.extend(additional_changes.iter().map(|edit| edit.range));
+ };
+ edit_ranges.extend(additional_edits.iter().map(|edit| edit.range));
+ edit_ranges.sort_by_key(|range| (range.start, range.end));
+ edit_ranges
+ .iter()
+ .zip(edit_ranges.iter().skip(1))
+ .all(|(previous, next)| previous.end <= next.start)
+}
+
+#[cfg(test)]
+mod tests {
+ use lsp_types::{
+ CompletionItem, CompletionTextEdit, InsertReplaceEdit, Position, Range,
+ TextDocumentContentChangeEvent,
+ };
+
+ use super::*;
+
+ #[test]
+ fn test_apply_document_changes() {
+ macro_rules! c {
+ [$($sl:expr, $sc:expr; $el:expr, $ec:expr => $text:expr),+] => {
+ vec![$(TextDocumentContentChangeEvent {
+ range: Some(Range {
+ start: Position { line: $sl, character: $sc },
+ end: Position { line: $el, character: $ec },
+ }),
+ range_length: None,
+ text: String::from($text),
+ }),+]
+ };
+ }
+
+ let mut text = String::new();
+ apply_document_changes(&mut text, vec![]);
+ assert_eq!(text, "");
+ apply_document_changes(
+ &mut text,
+ vec![TextDocumentContentChangeEvent {
+ range: None,
+ range_length: None,
+ text: String::from("the"),
+ }],
+ );
+ assert_eq!(text, "the");
+ apply_document_changes(&mut text, c![0, 3; 0, 3 => " quick"]);
+ assert_eq!(text, "the quick");
+ apply_document_changes(&mut text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]);
+ assert_eq!(text, "quick foxes");
+ apply_document_changes(&mut text, c![0, 11; 0, 11 => "\ndream"]);
+ assert_eq!(text, "quick foxes\ndream");
+ apply_document_changes(&mut text, c![1, 0; 1, 0 => "have "]);
+ assert_eq!(text, "quick foxes\nhave dream");
+ apply_document_changes(
+ &mut text,
+ c![0, 0; 0, 0 => "the ", 1, 4; 1, 4 => " quiet", 1, 16; 1, 16 => "s\n"],
+ );
+ assert_eq!(text, "the quick foxes\nhave quiet dreams\n");
+ apply_document_changes(&mut text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]);
+ assert_eq!(text, "the quick foxes\n\nhave quiet dreams\n\n");
+ apply_document_changes(
+ &mut text,
+ c![1, 0; 1, 0 => "DREAM", 2, 0; 2, 0 => "they ", 3, 0; 3, 0 => "DON'T THEY?"],
+ );
+ assert_eq!(text, "the quick foxes\nDREAM\nthey have quiet dreams\nDON'T THEY?\n");
+ apply_document_changes(&mut text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]);
+ assert_eq!(text, "the quick \nthey have quiet dreams\n");
+
+ text = String::from("❤️");
+ apply_document_changes(&mut text, c![0, 0; 0, 0 => "a"]);
+ assert_eq!(text, "a❤️");
+
+ text = String::from("a\nb");
+ apply_document_changes(&mut text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]);
+ assert_eq!(text, "adcb");
+
+ text = String::from("a\nb");
+ apply_document_changes(&mut text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]);
+ assert_eq!(text, "ațc\ncb");
+ }
+
+ #[test]
+ fn empty_completion_disjoint_tests() {
+ let empty_completion =
+ CompletionItem::new_simple("label".to_string(), "detail".to_string());
+
+ let disjoint_edit_1 = lsp_types::TextEdit::new(
+ Range::new(Position::new(2, 2), Position::new(3, 3)),
+ "new_text".to_string(),
+ );
+ let disjoint_edit_2 = lsp_types::TextEdit::new(
+ Range::new(Position::new(3, 3), Position::new(4, 4)),
+ "new_text".to_string(),
+ );
+
+ let joint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(5, 5)),
+ "new_text".to_string(),
+ );
+
+ assert!(
+ all_edits_are_disjoint(&empty_completion, &[]),
+ "Empty completion has all its edits disjoint"
+ );
+ assert!(
+ all_edits_are_disjoint(
+ &empty_completion,
+ &[disjoint_edit_1.clone(), disjoint_edit_2.clone()]
+ ),
+ "Empty completion is disjoint to whatever disjoint extra edits added"
+ );
+
+ assert!(
+ !all_edits_are_disjoint(
+ &empty_completion,
+ &[disjoint_edit_1, disjoint_edit_2, joint_edit]
+ ),
+ "Empty completion does not prevent joint extra edits from failing the validation"
+ );
+ }
+
+ #[test]
+ fn completion_with_joint_edits_disjoint_tests() {
+ let disjoint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(2, 2)),
+ "new_text".to_string(),
+ );
+ let disjoint_edit_2 = lsp_types::TextEdit::new(
+ Range::new(Position::new(2, 2), Position::new(3, 3)),
+ "new_text".to_string(),
+ );
+ let joint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(5, 5)),
+ "new_text".to_string(),
+ );
+
+ let mut completion_with_joint_edits =
+ CompletionItem::new_simple("label".to_string(), "detail".to_string());
+ completion_with_joint_edits.additional_text_edits =
+ Some(vec![disjoint_edit.clone(), joint_edit.clone()]);
+ assert!(
+ !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
+ "Completion with disjoint edits fails the validation even with empty extra edits"
+ );
+
+ completion_with_joint_edits.text_edit =
+ Some(CompletionTextEdit::Edit(disjoint_edit.clone()));
+ completion_with_joint_edits.additional_text_edits = Some(vec![joint_edit.clone()]);
+ assert!(
+ !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
+ "Completion with disjoint edits fails the validation even with empty extra edits"
+ );
+
+ completion_with_joint_edits.text_edit =
+ Some(CompletionTextEdit::InsertAndReplace(InsertReplaceEdit {
+ new_text: "new_text".to_string(),
+ insert: disjoint_edit.range,
+ replace: disjoint_edit_2.range,
+ }));
+ completion_with_joint_edits.additional_text_edits = Some(vec![joint_edit]);
+ assert!(
+ !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
+ "Completion with disjoint edits fails the validation even with empty extra edits"
+ );
+ }
+
+ #[test]
+ fn completion_with_disjoint_edits_disjoint_tests() {
+ let disjoint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(2, 2)),
+ "new_text".to_string(),
+ );
+ let disjoint_edit_2 = lsp_types::TextEdit::new(
+ Range::new(Position::new(2, 2), Position::new(3, 3)),
+ "new_text".to_string(),
+ );
+ let joint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(5, 5)),
+ "new_text".to_string(),
+ );
+
+ let mut completion_with_disjoint_edits =
+ CompletionItem::new_simple("label".to_string(), "detail".to_string());
+ completion_with_disjoint_edits.text_edit = Some(CompletionTextEdit::Edit(disjoint_edit));
+ let completion_with_disjoint_edits = completion_with_disjoint_edits;
+
+ assert!(
+ all_edits_are_disjoint(&completion_with_disjoint_edits, &[]),
+ "Completion with disjoint edits is valid"
+ );
+ assert!(
+ !all_edits_are_disjoint(&completion_with_disjoint_edits, &[joint_edit]),
+ "Completion with disjoint edits and joint extra edit is invalid"
+ );
+ assert!(
+ all_edits_are_disjoint(&completion_with_disjoint_edits, &[disjoint_edit_2]),
+ "Completion with disjoint edits and joint extra edit is valid"
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
new file mode 100644
index 000000000..5845cf712
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -0,0 +1,823 @@
+//! The main loop of `rust-analyzer` responsible for dispatching LSP
+//! requests/replies and notifications back to the client.
+use std::{
+ fmt,
+ sync::Arc,
+ time::{Duration, Instant},
+};
+
+use always_assert::always;
+use crossbeam_channel::{select, Receiver};
+use ide_db::base_db::{SourceDatabaseExt, VfsPath};
+use lsp_server::{Connection, Notification, Request};
+use lsp_types::notification::Notification as _;
+use vfs::{ChangeKind, FileId};
+
+use crate::{
+ config::Config,
+ dispatch::{NotificationDispatcher, RequestDispatcher},
+ from_proto,
+ global_state::{file_id_to_url, url_to_file_id, GlobalState},
+ handlers, lsp_ext,
+ lsp_utils::{apply_document_changes, notification_is, Progress},
+ mem_docs::DocumentData,
+ reload::{self, BuildDataProgress, ProjectWorkspaceProgress},
+ Result,
+};
+
+pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
+ tracing::info!("initial config: {:#?}", config);
+
+ // Windows scheduler implements priority boosts: if thread waits for an
+ // event (like a condvar), and event fires, priority of the thread is
+ // temporary bumped. This optimization backfires in our case: each time the
+ // `main_loop` schedules a task to run on a threadpool, the worker threads
+ // gets a higher priority, and (on a machine with fewer cores) displaces the
+ // main loop! We work-around this by marking the main loop as a
+ // higher-priority thread.
+ //
+ // https://docs.microsoft.com/en-us/windows/win32/procthread/scheduling-priorities
+ // https://docs.microsoft.com/en-us/windows/win32/procthread/priority-boosts
+ // https://github.com/rust-lang/rust-analyzer/issues/2835
+ #[cfg(windows)]
+ unsafe {
+ use winapi::um::processthreadsapi::*;
+ let thread = GetCurrentThread();
+ let thread_priority_above_normal = 1;
+ SetThreadPriority(thread, thread_priority_above_normal);
+ }
+
+ GlobalState::new(connection.sender, config).run(connection.receiver)
+}
+
+enum Event {
+ Lsp(lsp_server::Message),
+ Task(Task),
+ Vfs(vfs::loader::Message),
+ Flycheck(flycheck::Message),
+}
+
+#[derive(Debug)]
+pub(crate) enum Task {
+ Response(lsp_server::Response),
+ Retry(lsp_server::Request),
+ Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
+ PrimeCaches(PrimeCachesProgress),
+ FetchWorkspace(ProjectWorkspaceProgress),
+ FetchBuildData(BuildDataProgress),
+}
+
+#[derive(Debug)]
+pub(crate) enum PrimeCachesProgress {
+ Begin,
+ Report(ide::ParallelPrimeCachesProgress),
+ End { cancelled: bool },
+}
+
+impl fmt::Debug for Event {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let debug_verbose_not = |not: &Notification, f: &mut fmt::Formatter<'_>| {
+ f.debug_struct("Notification").field("method", &not.method).finish()
+ };
+
+ match self {
+ Event::Lsp(lsp_server::Message::Notification(not)) => {
+ if notification_is::<lsp_types::notification::DidOpenTextDocument>(not)
+ || notification_is::<lsp_types::notification::DidChangeTextDocument>(not)
+ {
+ return debug_verbose_not(not, f);
+ }
+ }
+ Event::Task(Task::Response(resp)) => {
+ return f
+ .debug_struct("Response")
+ .field("id", &resp.id)
+ .field("error", &resp.error)
+ .finish();
+ }
+ _ => (),
+ }
+ match self {
+ Event::Lsp(it) => fmt::Debug::fmt(it, f),
+ Event::Task(it) => fmt::Debug::fmt(it, f),
+ Event::Vfs(it) => fmt::Debug::fmt(it, f),
+ Event::Flycheck(it) => fmt::Debug::fmt(it, f),
+ }
+ }
+}
+
+impl GlobalState {
+ fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> {
+ if self.config.linked_projects().is_empty()
+ && self.config.detached_files().is_empty()
+ && self.config.notifications().cargo_toml_not_found
+ {
+ self.show_and_log_error("rust-analyzer failed to discover workspace".to_string(), None);
+ };
+
+ if self.config.did_save_text_document_dynamic_registration() {
+ let save_registration_options = lsp_types::TextDocumentSaveRegistrationOptions {
+ include_text: Some(false),
+ text_document_registration_options: lsp_types::TextDocumentRegistrationOptions {
+ document_selector: Some(vec![
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/*.rs".into()),
+ },
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/Cargo.toml".into()),
+ },
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/Cargo.lock".into()),
+ },
+ ]),
+ },
+ };
+
+ let registration = lsp_types::Registration {
+ id: "textDocument/didSave".to_string(),
+ method: "textDocument/didSave".to_string(),
+ register_options: Some(serde_json::to_value(save_registration_options).unwrap()),
+ };
+ self.send_request::<lsp_types::request::RegisterCapability>(
+ lsp_types::RegistrationParams { registrations: vec![registration] },
+ |_, _| (),
+ );
+ }
+
+ self.fetch_workspaces_queue.request_op("startup".to_string());
+ if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
+ self.fetch_workspaces(cause);
+ }
+
+ while let Some(event) = self.next_event(&inbox) {
+ if let Event::Lsp(lsp_server::Message::Notification(not)) = &event {
+ if not.method == lsp_types::notification::Exit::METHOD {
+ return Ok(());
+ }
+ }
+ self.handle_event(event)?
+ }
+
+ Err("client exited without proper shutdown sequence".into())
+ }
+
+ fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> {
+ select! {
+ recv(inbox) -> msg =>
+ msg.ok().map(Event::Lsp),
+
+ recv(self.task_pool.receiver) -> task =>
+ Some(Event::Task(task.unwrap())),
+
+ recv(self.loader.receiver) -> task =>
+ Some(Event::Vfs(task.unwrap())),
+
+ recv(self.flycheck_receiver) -> task =>
+ Some(Event::Flycheck(task.unwrap())),
+ }
+ }
+
+ fn handle_event(&mut self, event: Event) -> Result<()> {
+ let loop_start = Instant::now();
+ // NOTE: don't count blocking select! call as a loop-turn time
+ let _p = profile::span("GlobalState::handle_event");
+
+ tracing::debug!("handle_event({:?})", event);
+ let task_queue_len = self.task_pool.handle.len();
+ if task_queue_len > 0 {
+ tracing::info!("task queue len: {}", task_queue_len);
+ }
+
+ let was_quiescent = self.is_quiescent();
+ match event {
+ Event::Lsp(msg) => match msg {
+ lsp_server::Message::Request(req) => self.on_new_request(loop_start, req),
+ lsp_server::Message::Notification(not) => {
+ self.on_notification(not)?;
+ }
+ lsp_server::Message::Response(resp) => self.complete_request(resp),
+ },
+ Event::Task(mut task) => {
+ let _p = profile::span("GlobalState::handle_event/task");
+ let mut prime_caches_progress = Vec::new();
+ loop {
+ match task {
+ Task::Response(response) => self.respond(response),
+ Task::Retry(req) => self.on_request(req),
+ Task::Diagnostics(diagnostics_per_file) => {
+ for (file_id, diagnostics) in diagnostics_per_file {
+ self.diagnostics.set_native_diagnostics(file_id, diagnostics)
+ }
+ }
+ Task::PrimeCaches(progress) => match progress {
+ PrimeCachesProgress::Begin => prime_caches_progress.push(progress),
+ PrimeCachesProgress::Report(_) => {
+ match prime_caches_progress.last_mut() {
+ Some(last @ PrimeCachesProgress::Report(_)) => {
+ // Coalesce subsequent update events.
+ *last = progress;
+ }
+ _ => prime_caches_progress.push(progress),
+ }
+ }
+ PrimeCachesProgress::End { .. } => prime_caches_progress.push(progress),
+ },
+ Task::FetchWorkspace(progress) => {
+ let (state, msg) = match progress {
+ ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
+ ProjectWorkspaceProgress::Report(msg) => {
+ (Progress::Report, Some(msg))
+ }
+ ProjectWorkspaceProgress::End(workspaces) => {
+ self.fetch_workspaces_queue.op_completed(workspaces);
+
+ let old = Arc::clone(&self.workspaces);
+ self.switch_workspaces("fetched workspace".to_string());
+ let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces);
+
+ if self.config.run_build_scripts() && workspaces_updated {
+ self.fetch_build_data_queue
+ .request_op(format!("workspace updated"));
+ }
+
+ (Progress::End, None)
+ }
+ };
+
+ self.report_progress("Fetching", state, msg, None);
+ }
+ Task::FetchBuildData(progress) => {
+ let (state, msg) = match progress {
+ BuildDataProgress::Begin => (Some(Progress::Begin), None),
+ BuildDataProgress::Report(msg) => {
+ (Some(Progress::Report), Some(msg))
+ }
+ BuildDataProgress::End(build_data_result) => {
+ self.fetch_build_data_queue.op_completed(build_data_result);
+
+ self.switch_workspaces("fetched build data".to_string());
+
+ (Some(Progress::End), None)
+ }
+ };
+
+ if let Some(state) = state {
+ self.report_progress("Loading", state, msg, None);
+ }
+ }
+ }
+
+ // Coalesce multiple task events into one loop turn
+ task = match self.task_pool.receiver.try_recv() {
+ Ok(task) => task,
+ Err(_) => break,
+ };
+ }
+
+ for progress in prime_caches_progress {
+ let (state, message, fraction);
+ match progress {
+ PrimeCachesProgress::Begin => {
+ state = Progress::Begin;
+ message = None;
+ fraction = 0.0;
+ }
+ PrimeCachesProgress::Report(report) => {
+ state = Progress::Report;
+
+ message = match &report.crates_currently_indexing[..] {
+ [crate_name] => Some(format!(
+ "{}/{} ({})",
+ report.crates_done, report.crates_total, crate_name
+ )),
+ [crate_name, rest @ ..] => Some(format!(
+ "{}/{} ({} + {} more)",
+ report.crates_done,
+ report.crates_total,
+ crate_name,
+ rest.len()
+ )),
+ _ => None,
+ };
+
+ fraction = Progress::fraction(report.crates_done, report.crates_total);
+ }
+ PrimeCachesProgress::End { cancelled } => {
+ state = Progress::End;
+ message = None;
+ fraction = 1.0;
+
+ self.prime_caches_queue.op_completed(());
+ if cancelled {
+ self.prime_caches_queue
+ .request_op("restart after cancellation".to_string());
+ }
+ }
+ };
+
+ self.report_progress("Indexing", state, message, Some(fraction));
+ }
+ }
+ Event::Vfs(mut task) => {
+ let _p = profile::span("GlobalState::handle_event/vfs");
+ loop {
+ match task {
+ vfs::loader::Message::Loaded { files } => {
+ let vfs = &mut self.vfs.write().0;
+ for (path, contents) in files {
+ let path = VfsPath::from(path);
+ if !self.mem_docs.contains(&path) {
+ vfs.set_file_contents(path, contents);
+ }
+ }
+ }
+ vfs::loader::Message::Progress { n_total, n_done, config_version } => {
+ always!(config_version <= self.vfs_config_version);
+
+ self.vfs_progress_config_version = config_version;
+ self.vfs_progress_n_total = n_total;
+ self.vfs_progress_n_done = n_done;
+
+ let state = if n_done == 0 {
+ Progress::Begin
+ } else if n_done < n_total {
+ Progress::Report
+ } else {
+ assert_eq!(n_done, n_total);
+ Progress::End
+ };
+ self.report_progress(
+ "Roots Scanned",
+ state,
+ Some(format!("{}/{}", n_done, n_total)),
+ Some(Progress::fraction(n_done, n_total)),
+ )
+ }
+ }
+ // Coalesce many VFS event into a single loop turn
+ task = match self.loader.receiver.try_recv() {
+ Ok(task) => task,
+ Err(_) => break,
+ }
+ }
+ }
+ Event::Flycheck(mut task) => {
+ let _p = profile::span("GlobalState::handle_event/flycheck");
+ loop {
+ match task {
+ flycheck::Message::AddDiagnostic { workspace_root, diagnostic } => {
+ let snap = self.snapshot();
+ let diagnostics =
+ crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
+ &self.config.diagnostics_map(),
+ &diagnostic,
+ &workspace_root,
+ &snap,
+ );
+ for diag in diagnostics {
+ match url_to_file_id(&self.vfs.read().0, &diag.url) {
+ Ok(file_id) => self.diagnostics.add_check_diagnostic(
+ file_id,
+ diag.diagnostic,
+ diag.fix,
+ ),
+ Err(err) => {
+ tracing::error!(
+ "File with cargo diagnostic not found in VFS: {}",
+ err
+ );
+ }
+ };
+ }
+ }
+
+ flycheck::Message::Progress { id, progress } => {
+ let (state, message) = match progress {
+ flycheck::Progress::DidStart => {
+ self.diagnostics.clear_check();
+ (Progress::Begin, None)
+ }
+ flycheck::Progress::DidCheckCrate(target) => {
+ (Progress::Report, Some(target))
+ }
+ flycheck::Progress::DidCancel => (Progress::End, None),
+ flycheck::Progress::DidFinish(result) => {
+ if let Err(err) = result {
+ self.show_and_log_error(
+ "cargo check failed".to_string(),
+ Some(err.to_string()),
+ );
+ }
+ (Progress::End, None)
+ }
+ };
+
+ // When we're running multiple flychecks, we have to include a disambiguator in
+ // the title, or the editor complains. Note that this is a user-facing string.
+ let title = if self.flycheck.len() == 1 {
+ match self.config.flycheck() {
+ Some(config) => format!("{}", config),
+ None => "cargo check".to_string(),
+ }
+ } else {
+ format!("cargo check (#{})", id + 1)
+ };
+ self.report_progress(&title, state, message, None);
+ }
+ }
+ // Coalesce many flycheck updates into a single loop turn
+ task = match self.flycheck_receiver.try_recv() {
+ Ok(task) => task,
+ Err(_) => break,
+ }
+ }
+ }
+ }
+
+ let state_changed = self.process_changes();
+ let memdocs_added_or_removed = self.mem_docs.take_changes();
+
+ if self.is_quiescent() {
+ if !was_quiescent {
+ for flycheck in &self.flycheck {
+ flycheck.update();
+ }
+ if self.config.prefill_caches() {
+ self.prime_caches_queue.request_op("became quiescent".to_string());
+ }
+ }
+
+ if !was_quiescent || state_changed {
+ // Refresh semantic tokens if the client supports it.
+ if self.config.semantic_tokens_refresh() {
+ self.semantic_tokens_cache.lock().clear();
+ self.send_request::<lsp_types::request::SemanticTokensRefresh>((), |_, _| ());
+ }
+
+ // Refresh code lens if the client supports it.
+ if self.config.code_lens_refresh() {
+ self.send_request::<lsp_types::request::CodeLensRefresh>((), |_, _| ());
+ }
+ }
+
+ if !was_quiescent || state_changed || memdocs_added_or_removed {
+ if self.config.publish_diagnostics() {
+ self.update_diagnostics()
+ }
+ }
+ }
+
+ if let Some(diagnostic_changes) = self.diagnostics.take_changes() {
+ for file_id in diagnostic_changes {
+ let db = self.analysis_host.raw_database();
+ let source_root = db.file_source_root(file_id);
+ if db.source_root(source_root).is_library {
+ // Only publish diagnostics for files in the workspace, not from crates.io deps
+ // or the sysroot.
+ // While theoretically these should never have errors, we have quite a few false
+ // positives particularly in the stdlib, and those diagnostics would stay around
+ // forever if we emitted them here.
+ continue;
+ }
+
+ let url = file_id_to_url(&self.vfs.read().0, file_id);
+ let mut diagnostics =
+ self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>();
+ // https://github.com/rust-lang/rust-analyzer/issues/11404
+ for d in &mut diagnostics {
+ if d.message.is_empty() {
+ d.message = " ".to_string();
+ }
+ if let Some(rds) = d.related_information.as_mut() {
+ for rd in rds {
+ if rd.message.is_empty() {
+ rd.message = " ".to_string();
+ }
+ }
+ }
+ }
+ let version = from_proto::vfs_path(&url)
+ .map(|path| self.mem_docs.get(&path).map(|it| it.version))
+ .unwrap_or_default();
+
+ self.send_notification::<lsp_types::notification::PublishDiagnostics>(
+ lsp_types::PublishDiagnosticsParams { uri: url, diagnostics, version },
+ );
+ }
+ }
+
+ if self.config.cargo_autoreload() {
+ if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
+ self.fetch_workspaces(cause);
+ }
+ }
+
+ if !self.fetch_workspaces_queue.op_in_progress() {
+ if let Some(cause) = self.fetch_build_data_queue.should_start_op() {
+ self.fetch_build_data(cause);
+ }
+ }
+
+ if let Some(cause) = self.prime_caches_queue.should_start_op() {
+ tracing::debug!(%cause, "will prime caches");
+ let num_worker_threads = self.config.prime_caches_num_threads();
+
+ self.task_pool.handle.spawn_with_sender({
+ let analysis = self.snapshot().analysis;
+ move |sender| {
+ sender.send(Task::PrimeCaches(PrimeCachesProgress::Begin)).unwrap();
+ let res = analysis.parallel_prime_caches(num_worker_threads, |progress| {
+ let report = PrimeCachesProgress::Report(progress);
+ sender.send(Task::PrimeCaches(report)).unwrap();
+ });
+ sender
+ .send(Task::PrimeCaches(PrimeCachesProgress::End {
+ cancelled: res.is_err(),
+ }))
+ .unwrap();
+ }
+ });
+ }
+
+ let status = self.current_status();
+ if self.last_reported_status.as_ref() != Some(&status) {
+ self.last_reported_status = Some(status.clone());
+
+ if let (lsp_ext::Health::Error, Some(message)) = (status.health, &status.message) {
+ self.show_message(lsp_types::MessageType::ERROR, message.clone());
+ }
+
+ if self.config.server_status_notification() {
+ self.send_notification::<lsp_ext::ServerStatusNotification>(status);
+ }
+ }
+
+ let loop_duration = loop_start.elapsed();
+ if loop_duration > Duration::from_millis(100) && was_quiescent {
+ tracing::warn!("overly long loop turn: {:?}", loop_duration);
+ self.poke_rust_analyzer_developer(format!(
+ "overly long loop turn: {:?}",
+ loop_duration
+ ));
+ }
+ Ok(())
+ }
+
+ fn on_new_request(&mut self, request_received: Instant, req: Request) {
+ self.register_request(&req, request_received);
+ self.on_request(req);
+ }
+
+ fn on_request(&mut self, req: Request) {
+ if self.shutdown_requested {
+ self.respond(lsp_server::Response::new_err(
+ req.id,
+ lsp_server::ErrorCode::InvalidRequest as i32,
+ "Shutdown already requested.".to_owned(),
+ ));
+ return;
+ }
+
+ // Avoid flashing a bunch of unresolved references during initial load.
+ if self.workspaces.is_empty() && !self.is_quiescent() {
+ self.respond(lsp_server::Response::new_err(
+ req.id,
+ lsp_server::ErrorCode::ContentModified as i32,
+ "waiting for cargo metadata or cargo check".to_owned(),
+ ));
+ return;
+ }
+
+ RequestDispatcher { req: Some(req), global_state: self }
+ .on_sync_mut::<lsp_types::request::Shutdown>(|s, ()| {
+ s.shutdown_requested = true;
+ Ok(())
+ })
+ .on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
+ .on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
+ .on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
+ .on_sync::<lsp_ext::JoinLines>(handlers::handle_join_lines)
+ .on_sync::<lsp_ext::OnEnter>(handlers::handle_on_enter)
+ .on_sync::<lsp_types::request::SelectionRangeRequest>(handlers::handle_selection_range)
+ .on_sync::<lsp_ext::MatchingBrace>(handlers::handle_matching_brace)
+ .on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
+ .on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
+ .on::<lsp_ext::ViewHir>(handlers::handle_view_hir)
+ .on::<lsp_ext::ViewFileText>(handlers::handle_view_file_text)
+ .on::<lsp_ext::ViewCrateGraph>(handlers::handle_view_crate_graph)
+ .on::<lsp_ext::ViewItemTree>(handlers::handle_view_item_tree)
+ .on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro)
+ .on::<lsp_ext::ParentModule>(handlers::handle_parent_module)
+ .on::<lsp_ext::Runnables>(handlers::handle_runnables)
+ .on::<lsp_ext::RelatedTests>(handlers::handle_related_tests)
+ .on::<lsp_ext::CodeActionRequest>(handlers::handle_code_action)
+ .on::<lsp_ext::CodeActionResolveRequest>(handlers::handle_code_action_resolve)
+ .on::<lsp_ext::HoverRequest>(handlers::handle_hover)
+ .on::<lsp_ext::ExternalDocs>(handlers::handle_open_docs)
+ .on::<lsp_ext::OpenCargoToml>(handlers::handle_open_cargo_toml)
+ .on::<lsp_ext::MoveItem>(handlers::handle_move_item)
+ .on::<lsp_ext::WorkspaceSymbol>(handlers::handle_workspace_symbol)
+ .on::<lsp_ext::OnTypeFormatting>(handlers::handle_on_type_formatting)
+ .on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol)
+ .on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)
+ .on::<lsp_types::request::GotoDeclaration>(handlers::handle_goto_declaration)
+ .on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)
+ .on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
+ .on::<lsp_types::request::InlayHintRequest>(handlers::handle_inlay_hints)
+ .on::<lsp_types::request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
+ .on::<lsp_types::request::Completion>(handlers::handle_completion)
+ .on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
+ .on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)
+ .on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)
+ .on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)
+ .on::<lsp_types::request::SignatureHelpRequest>(handlers::handle_signature_help)
+ .on::<lsp_types::request::PrepareRenameRequest>(handlers::handle_prepare_rename)
+ .on::<lsp_types::request::Rename>(handlers::handle_rename)
+ .on::<lsp_types::request::References>(handlers::handle_references)
+ .on::<lsp_types::request::Formatting>(handlers::handle_formatting)
+ .on::<lsp_types::request::RangeFormatting>(handlers::handle_range_formatting)
+ .on::<lsp_types::request::DocumentHighlightRequest>(handlers::handle_document_highlight)
+ .on::<lsp_types::request::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)
+ .on::<lsp_types::request::CallHierarchyIncomingCalls>(
+ handlers::handle_call_hierarchy_incoming,
+ )
+ .on::<lsp_types::request::CallHierarchyOutgoingCalls>(
+ handlers::handle_call_hierarchy_outgoing,
+ )
+ .on::<lsp_types::request::SemanticTokensFullRequest>(
+ handlers::handle_semantic_tokens_full,
+ )
+ .on::<lsp_types::request::SemanticTokensFullDeltaRequest>(
+ handlers::handle_semantic_tokens_full_delta,
+ )
+ .on::<lsp_types::request::SemanticTokensRangeRequest>(
+ handlers::handle_semantic_tokens_range,
+ )
+ .on::<lsp_types::request::WillRenameFiles>(handlers::handle_will_rename_files)
+ .on::<lsp_ext::Ssr>(handlers::handle_ssr)
+ .finish();
+ }
+
+ fn on_notification(&mut self, not: Notification) -> Result<()> {
+ NotificationDispatcher { not: Some(not), global_state: self }
+ .on::<lsp_types::notification::Cancel>(|this, params| {
+ let id: lsp_server::RequestId = match params.id {
+ lsp_types::NumberOrString::Number(id) => id.into(),
+ lsp_types::NumberOrString::String(id) => id.into(),
+ };
+ this.cancel(id);
+ Ok(())
+ })?
+ .on::<lsp_types::notification::WorkDoneProgressCancel>(|_this, _params| {
+ // Just ignore this. It is OK to continue sending progress
+ // notifications for this token, as the client can't know when
+ // we accepted notification.
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidOpenTextDocument>(|this, params| {
+ if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
+ let already_exists = this
+ .mem_docs
+ .insert(path.clone(), DocumentData::new(params.text_document.version))
+ .is_err();
+ if already_exists {
+ tracing::error!("duplicate DidOpenTextDocument: {}", path)
+ }
+ this.vfs
+ .write()
+ .0
+ .set_file_contents(path, Some(params.text_document.text.into_bytes()));
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidChangeTextDocument>(|this, params| {
+ if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
+ match this.mem_docs.get_mut(&path) {
+ Some(doc) => {
+ // The version passed in DidChangeTextDocument is the version after all edits are applied
+ // so we should apply it before the vfs is notified.
+ doc.version = params.text_document.version;
+ }
+ None => {
+ tracing::error!("unexpected DidChangeTextDocument: {}", path);
+ return Ok(());
+ }
+ };
+
+ let vfs = &mut this.vfs.write().0;
+ let file_id = vfs.file_id(&path).unwrap();
+ let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec()).unwrap();
+ apply_document_changes(&mut text, params.content_changes);
+
+ vfs.set_file_contents(path, Some(text.into_bytes()));
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidCloseTextDocument>(|this, params| {
+ if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
+ if this.mem_docs.remove(&path).is_err() {
+ tracing::error!("orphan DidCloseTextDocument: {}", path);
+ }
+
+ this.semantic_tokens_cache.lock().remove(&params.text_document.uri);
+
+ if let Some(path) = path.as_path() {
+ this.loader.handle.invalidate(path.to_path_buf());
+ }
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
+ for flycheck in &this.flycheck {
+ flycheck.update();
+ }
+ if let Ok(abs_path) = from_proto::abs_path(&params.text_document.uri) {
+ if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
+ this.fetch_workspaces_queue
+ .request_op(format!("DidSaveTextDocument {}", abs_path.display()));
+ }
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidChangeConfiguration>(|this, _params| {
+ // As stated in https://github.com/microsoft/language-server-protocol/issues/676,
+ // this notification's parameters should be ignored and the actual config queried separately.
+ this.send_request::<lsp_types::request::WorkspaceConfiguration>(
+ lsp_types::ConfigurationParams {
+ items: vec![lsp_types::ConfigurationItem {
+ scope_uri: None,
+ section: Some("rust-analyzer".to_string()),
+ }],
+ },
+ |this, resp| {
+ tracing::debug!("config update response: '{:?}", resp);
+ let lsp_server::Response { error, result, .. } = resp;
+
+ match (error, result) {
+ (Some(err), _) => {
+ tracing::error!("failed to fetch the server settings: {:?}", err)
+ }
+ (None, Some(mut configs)) => {
+ if let Some(json) = configs.get_mut(0) {
+ // Note that json can be null according to the spec if the client can't
+ // provide a configuration. This is handled in Config::update below.
+ let mut config = Config::clone(&*this.config);
+ if let Err(error) = config.update(json.take()) {
+ this.show_message(
+ lsp_types::MessageType::WARNING,
+ error.to_string(),
+ );
+ }
+ this.update_configuration(config);
+ }
+ }
+ (None, None) => tracing::error!(
+ "received empty server settings response from the client"
+ ),
+ }
+ },
+ );
+
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidChangeWatchedFiles>(|this, params| {
+ for change in params.changes {
+ if let Ok(path) = from_proto::abs_path(&change.uri) {
+ this.loader.handle.invalidate(path);
+ }
+ }
+ Ok(())
+ })?
+ .finish();
+ Ok(())
+ }
+
+ fn update_diagnostics(&mut self) {
+ let subscriptions = self
+ .mem_docs
+ .iter()
+ .map(|path| self.vfs.read().0.file_id(path).unwrap())
+ .collect::<Vec<_>>();
+
+ tracing::trace!("updating notifications for {:?}", subscriptions);
+
+ let snapshot = self.snapshot();
+ self.task_pool.handle.spawn(move || {
+ let diagnostics = subscriptions
+ .into_iter()
+ .filter_map(|file_id| {
+ handlers::publish_diagnostics(&snapshot, file_id)
+ .ok()
+ .map(|diags| (file_id, diags))
+ })
+ .collect::<Vec<_>>();
+ Task::Diagnostics(diagnostics)
+ })
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/markdown.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/markdown.rs
new file mode 100644
index 000000000..912ed1e76
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/markdown.rs
@@ -0,0 +1,157 @@
+//! Transforms markdown
+use ide_db::rust_doc::is_rust_fence;
+
+const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
+
+pub(crate) fn format_docs(src: &str) -> String {
+ let mut processed_lines = Vec::new();
+ let mut in_code_block = false;
+ let mut is_rust = false;
+
+ for mut line in src.lines() {
+ if in_code_block && is_rust && code_line_ignored_by_rustdoc(line) {
+ continue;
+ }
+
+ if let Some(header) = RUSTDOC_FENCES.into_iter().find_map(|fence| line.strip_prefix(fence))
+ {
+ in_code_block ^= true;
+
+ if in_code_block {
+ is_rust = is_rust_fence(header);
+
+ if is_rust {
+ line = "```rust";
+ }
+ }
+ }
+
+ if in_code_block {
+ let trimmed = line.trim_start();
+ if trimmed.starts_with("##") {
+ line = &trimmed[1..];
+ }
+ }
+
+ processed_lines.push(line);
+ }
+ processed_lines.join("\n")
+}
+
+fn code_line_ignored_by_rustdoc(line: &str) -> bool {
+ let trimmed = line.trim();
+ trimmed == "#" || trimmed.starts_with("# ") || trimmed.starts_with("#\t")
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_format_docs_adds_rust() {
+ let comment = "```\nfn some_rust() {}\n```";
+ assert_eq!(format_docs(comment), "```rust\nfn some_rust() {}\n```");
+ }
+
+ #[test]
+ fn test_format_docs_handles_plain_text() {
+ let comment = "```text\nthis is plain text\n```";
+ assert_eq!(format_docs(comment), "```text\nthis is plain text\n```");
+ }
+
+ #[test]
+ fn test_format_docs_handles_non_rust() {
+ let comment = "```sh\nsupposedly shell code\n```";
+ assert_eq!(format_docs(comment), "```sh\nsupposedly shell code\n```");
+ }
+
+ #[test]
+ fn test_format_docs_handles_rust_alias() {
+ let comment = "```ignore\nlet z = 55;\n```";
+ assert_eq!(format_docs(comment), "```rust\nlet z = 55;\n```");
+ }
+
+ #[test]
+ fn test_format_docs_handles_complex_code_block_attrs() {
+ let comment = "```rust,no_run\nlet z = 55;\n```";
+ assert_eq!(format_docs(comment), "```rust\nlet z = 55;\n```");
+ }
+
+ #[test]
+ fn test_format_docs_handles_error_codes() {
+ let comment = "```compile_fail,E0641\nlet b = 0 as *const _;\n```";
+ assert_eq!(format_docs(comment), "```rust\nlet b = 0 as *const _;\n```");
+ }
+
+ #[test]
+ fn test_format_docs_skips_comments_in_rust_block() {
+ let comment =
+ "```rust\n # skip1\n# skip2\n#stay1\nstay2\n#\n #\n # \n #\tskip3\n\t#\t\n```";
+ assert_eq!(format_docs(comment), "```rust\n#stay1\nstay2\n```");
+ }
+
+ #[test]
+ fn test_format_docs_does_not_skip_lines_if_plain_text() {
+ let comment =
+ "```text\n # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t\n```";
+ assert_eq!(
+ format_docs(comment),
+ "```text\n # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t\n```",
+ );
+ }
+
+ #[test]
+ fn test_format_docs_keeps_comments_outside_of_rust_block() {
+ let comment = " # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t";
+ assert_eq!(format_docs(comment), comment);
+ }
+
+ #[test]
+ fn test_format_docs_preserves_newlines() {
+ let comment = "this\nis\nmultiline";
+ assert_eq!(format_docs(comment), comment);
+ }
+
+ #[test]
+ fn test_code_blocks_in_comments_marked_as_rust() {
+ let comment = r#"```rust
+fn main(){}
+```
+Some comment.
+```
+let a = 1;
+```"#;
+
+ assert_eq!(
+ format_docs(comment),
+ "```rust\nfn main(){}\n```\nSome comment.\n```rust\nlet a = 1;\n```"
+ );
+ }
+
+ #[test]
+ fn test_code_blocks_in_comments_marked_as_text() {
+ let comment = r#"```text
+filler
+text
+```
+Some comment.
+```
+let a = 1;
+```"#;
+
+ assert_eq!(
+ format_docs(comment),
+ "```text\nfiller\ntext\n```\nSome comment.\n```rust\nlet a = 1;\n```"
+ );
+ }
+
+ #[test]
+ fn test_format_docs_handles_escape_double_hashes() {
+ let comment = r#"```rust
+let s = "foo
+## bar # baz";
+```"#;
+
+ assert_eq!(format_docs(comment), "```rust\nlet s = \"foo\n# bar # baz\";\n```");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/mem_docs.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/mem_docs.rs
new file mode 100644
index 000000000..f86a0f66a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/mem_docs.rs
@@ -0,0 +1,65 @@
+//! In-memory document information.
+
+use std::mem;
+
+use rustc_hash::FxHashMap;
+use vfs::VfsPath;
+
+/// Holds the set of in-memory documents.
+///
+/// For these document, there true contents is maintained by the client. It
+/// might be different from what's on disk.
+#[derive(Default, Clone)]
+pub(crate) struct MemDocs {
+ mem_docs: FxHashMap<VfsPath, DocumentData>,
+ added_or_removed: bool,
+}
+
+impl MemDocs {
+ pub(crate) fn contains(&self, path: &VfsPath) -> bool {
+ self.mem_docs.contains_key(path)
+ }
+ pub(crate) fn insert(&mut self, path: VfsPath, data: DocumentData) -> Result<(), ()> {
+ self.added_or_removed = true;
+ match self.mem_docs.insert(path, data) {
+ Some(_) => Err(()),
+ None => Ok(()),
+ }
+ }
+ pub(crate) fn remove(&mut self, path: &VfsPath) -> Result<(), ()> {
+ self.added_or_removed = true;
+ match self.mem_docs.remove(path) {
+ Some(_) => Ok(()),
+ None => Err(()),
+ }
+ }
+ pub(crate) fn get(&self, path: &VfsPath) -> Option<&DocumentData> {
+ self.mem_docs.get(path)
+ }
+ pub(crate) fn get_mut(&mut self, path: &VfsPath) -> Option<&mut DocumentData> {
+ // NB: don't set `self.added_or_removed` here, as that purposefully only
+ // tracks changes to the key set.
+ self.mem_docs.get_mut(path)
+ }
+ pub(crate) fn iter(&self) -> impl Iterator<Item = &VfsPath> {
+ self.mem_docs.keys()
+ }
+ pub(crate) fn take_changes(&mut self) -> bool {
+ mem::replace(&mut self.added_or_removed, false)
+ }
+}
+
+/// Information about a document that the Language Client
+/// knows about.
+/// Its lifetime is driven by the textDocument/didOpen and textDocument/didClose
+/// client notifications.
+#[derive(Debug, Clone)]
+pub(crate) struct DocumentData {
+ pub(crate) version: i32,
+}
+
+impl DocumentData {
+ pub(crate) fn new(version: i32) -> Self {
+ DocumentData { version }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs
new file mode 100644
index 000000000..97aca0161
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs
@@ -0,0 +1,44 @@
+//! Bookkeeping to make sure only one long-running operation is being executed
+//! at a time.
+
+pub(crate) type Cause = String;
+
+pub(crate) struct OpQueue<Output> {
+ op_requested: Option<Cause>,
+ op_in_progress: bool,
+ last_op_result: Output,
+}
+
+impl<Output: Default> Default for OpQueue<Output> {
+ fn default() -> Self {
+ Self { op_requested: None, op_in_progress: false, last_op_result: Default::default() }
+ }
+}
+
+impl<Output> OpQueue<Output> {
+ pub(crate) fn request_op(&mut self, reason: Cause) {
+ self.op_requested = Some(reason);
+ }
+ pub(crate) fn should_start_op(&mut self) -> Option<Cause> {
+ if self.op_in_progress {
+ return None;
+ }
+ self.op_in_progress = self.op_requested.is_some();
+ self.op_requested.take()
+ }
+ pub(crate) fn op_completed(&mut self, result: Output) {
+ assert!(self.op_in_progress);
+ self.op_in_progress = false;
+ self.last_op_result = result;
+ }
+
+ pub(crate) fn last_op_result(&self) -> &Output {
+ &self.last_op_result
+ }
+ pub(crate) fn op_in_progress(&self) -> bool {
+ self.op_in_progress
+ }
+ pub(crate) fn op_requested(&self) -> bool {
+ self.op_requested.is_some()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
new file mode 100644
index 000000000..eaab275bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
@@ -0,0 +1,705 @@
+//! Project loading & configuration updates.
+//!
+//! This is quite tricky. The main problem is time and changes -- there's no
+//! fixed "project" rust-analyzer is working with, "current project" is itself
+//! mutable state. For example, when the user edits `Cargo.toml` by adding a new
+//! dependency, project model changes. What's more, switching project model is
+//! not instantaneous -- it takes time to run `cargo metadata` and (for proc
+//! macros) `cargo check`.
+//!
+//! The main guiding principle here is, as elsewhere in rust-analyzer,
+//! robustness. We try not to assume that the project model exists or is
+//! correct. Instead, we try to provide a best-effort service. Even if the
+//! project is currently loading and we don't have a full project model, we
+//! still want to respond to various requests.
+use std::{mem, sync::Arc};
+
+use flycheck::{FlycheckConfig, FlycheckHandle};
+use hir::db::DefDatabase;
+use ide::Change;
+use ide_db::base_db::{
+ CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
+ ProcMacroLoadResult, SourceRoot, VfsPath,
+};
+use proc_macro_api::{MacroDylib, ProcMacroServer};
+use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
+use syntax::SmolStr;
+use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
+
+use crate::{
+ config::{Config, FilesWatcher, LinkedProject},
+ global_state::GlobalState,
+ lsp_ext,
+ main_loop::Task,
+ op_queue::Cause,
+};
+
+#[derive(Debug)]
+pub(crate) enum ProjectWorkspaceProgress {
+ Begin,
+ Report(String),
+ End(Vec<anyhow::Result<ProjectWorkspace>>),
+}
+
+#[derive(Debug)]
+pub(crate) enum BuildDataProgress {
+ Begin,
+ Report(String),
+ End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
+}
+
+impl GlobalState {
+ pub(crate) fn is_quiescent(&self) -> bool {
+ !(self.fetch_workspaces_queue.op_in_progress()
+ || self.fetch_build_data_queue.op_in_progress()
+ || self.vfs_progress_config_version < self.vfs_config_version
+ || self.vfs_progress_n_done < self.vfs_progress_n_total)
+ }
+
+ pub(crate) fn update_configuration(&mut self, config: Config) {
+ let _p = profile::span("GlobalState::update_configuration");
+ let old_config = mem::replace(&mut self.config, Arc::new(config));
+ if self.config.lru_capacity() != old_config.lru_capacity() {
+ self.analysis_host.update_lru_capacity(self.config.lru_capacity());
+ }
+ if self.config.linked_projects() != old_config.linked_projects() {
+ self.fetch_workspaces_queue.request_op("linked projects changed".to_string())
+ } else if self.config.flycheck() != old_config.flycheck() {
+ self.reload_flycheck();
+ }
+
+ if self.analysis_host.raw_database().enable_proc_attr_macros()
+ != self.config.expand_proc_attr_macros()
+ {
+ self.analysis_host
+ .raw_database_mut()
+ .set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
+ }
+ }
+
+ pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
+ let mut status = lsp_ext::ServerStatusParams {
+ health: lsp_ext::Health::Ok,
+ quiescent: self.is_quiescent(),
+ message: None,
+ };
+
+ if self.proc_macro_changed {
+ status.health = lsp_ext::Health::Warning;
+ status.message =
+ Some("Reload required due to source changes of a procedural macro.".into())
+ }
+ if let Err(_) = self.fetch_build_data_error() {
+ status.health = lsp_ext::Health::Warning;
+ status.message =
+ Some("Failed to run build scripts of some packages, check the logs.".to_string());
+ }
+ if !self.config.cargo_autoreload()
+ && self.is_quiescent()
+ && self.fetch_workspaces_queue.op_requested()
+ {
+ status.health = lsp_ext::Health::Warning;
+ status.message = Some("Workspace reload required".to_string())
+ }
+
+ if let Err(error) = self.fetch_workspace_error() {
+ status.health = lsp_ext::Health::Error;
+ status.message = Some(error)
+ }
+ status
+ }
+
+ pub(crate) fn fetch_workspaces(&mut self, cause: Cause) {
+ tracing::info!(%cause, "will fetch workspaces");
+
+ self.task_pool.handle.spawn_with_sender({
+ let linked_projects = self.config.linked_projects();
+ let detached_files = self.config.detached_files().to_vec();
+ let cargo_config = self.config.cargo();
+
+ move |sender| {
+ let progress = {
+ let sender = sender.clone();
+ move |msg| {
+ sender
+ .send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
+ .unwrap()
+ }
+ };
+
+ sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
+
+ let mut workspaces = linked_projects
+ .iter()
+ .map(|project| match project {
+ LinkedProject::ProjectManifest(manifest) => {
+ project_model::ProjectWorkspace::load(
+ manifest.clone(),
+ &cargo_config,
+ &progress,
+ )
+ }
+ LinkedProject::InlineJsonProject(it) => {
+ project_model::ProjectWorkspace::load_inline(
+ it.clone(),
+ cargo_config.target.as_deref(),
+ )
+ }
+ })
+ .collect::<Vec<_>>();
+
+ if !detached_files.is_empty() {
+ workspaces
+ .push(project_model::ProjectWorkspace::load_detached_files(detached_files));
+ }
+
+ tracing::info!("did fetch workspaces {:?}", workspaces);
+ sender
+ .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(workspaces)))
+ .unwrap();
+ }
+ });
+ }
+
+ pub(crate) fn fetch_build_data(&mut self, cause: Cause) {
+ tracing::info!(%cause, "will fetch build data");
+ let workspaces = Arc::clone(&self.workspaces);
+ let config = self.config.cargo();
+ self.task_pool.handle.spawn_with_sender(move |sender| {
+ sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
+
+ let progress = {
+ let sender = sender.clone();
+ move |msg| {
+ sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
+ }
+ };
+ let mut res = Vec::new();
+ for ws in workspaces.iter() {
+ res.push(ws.run_build_scripts(&config, &progress));
+ }
+ sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
+ });
+ }
+
+ pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
+ let _p = profile::span("GlobalState::switch_workspaces");
+ tracing::info!(%cause, "will switch workspaces");
+
+ if let Err(error_message) = self.fetch_workspace_error() {
+ self.show_and_log_error(error_message, None);
+ if !self.workspaces.is_empty() {
+ // It only makes sense to switch to a partially broken workspace
+ // if we don't have any workspace at all yet.
+ return;
+ }
+ }
+
+ if let Err(error) = self.fetch_build_data_error() {
+ self.show_and_log_error(
+ "rust-analyzer failed to run build scripts".to_string(),
+ Some(error),
+ );
+ }
+
+ let workspaces = self
+ .fetch_workspaces_queue
+ .last_op_result()
+ .iter()
+ .filter_map(|res| res.as_ref().ok().cloned())
+ .collect::<Vec<_>>();
+
+ fn eq_ignore_build_data<'a>(
+ left: &'a ProjectWorkspace,
+ right: &'a ProjectWorkspace,
+ ) -> bool {
+ let key = |p: &'a ProjectWorkspace| match p {
+ ProjectWorkspace::Cargo {
+ cargo,
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+
+ build_scripts: _,
+ } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
+ _ => None,
+ };
+ match (key(left), key(right)) {
+ (Some(lk), Some(rk)) => lk == rk,
+ _ => left == right,
+ }
+ }
+
+ let same_workspaces = workspaces.len() == self.workspaces.len()
+ && workspaces
+ .iter()
+ .zip(self.workspaces.iter())
+ .all(|(l, r)| eq_ignore_build_data(l, r));
+
+ if same_workspaces {
+ let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
+ if Arc::ptr_eq(workspaces, &self.workspaces) {
+ tracing::debug!("set build scripts to workspaces");
+
+ let workspaces = workspaces
+ .iter()
+ .cloned()
+ .zip(build_scripts)
+ .map(|(mut ws, bs)| {
+ ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
+ ws
+ })
+ .collect::<Vec<_>>();
+
+ // Workspaces are the same, but we've updated build data.
+ self.workspaces = Arc::new(workspaces);
+ } else {
+ tracing::info!("build scripts do not match the version of the active workspace");
+ // Current build scripts do not match the version of the active
+ // workspace, so there's nothing for us to update.
+ return;
+ }
+ } else {
+ tracing::debug!("abandon build scripts for workspaces");
+
+ // Here, we completely changed the workspace (Cargo.toml edit), so
+ // we don't care about build-script results, they are stale.
+ self.workspaces = Arc::new(workspaces)
+ }
+
+ if let FilesWatcher::Client = self.config.files().watcher {
+ let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
+ watchers: self
+ .workspaces
+ .iter()
+ .flat_map(|ws| ws.to_roots())
+ .filter(|it| it.is_local)
+ .flat_map(|root| {
+ root.include.into_iter().flat_map(|it| {
+ [
+ format!("{}/**/*.rs", it.display()),
+ format!("{}/**/Cargo.toml", it.display()),
+ format!("{}/**/Cargo.lock", it.display()),
+ ]
+ })
+ })
+ .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None })
+ .collect(),
+ };
+ let registration = lsp_types::Registration {
+ id: "workspace/didChangeWatchedFiles".to_string(),
+ method: "workspace/didChangeWatchedFiles".to_string(),
+ register_options: Some(serde_json::to_value(registration_options).unwrap()),
+ };
+ self.send_request::<lsp_types::request::RegisterCapability>(
+ lsp_types::RegistrationParams { registrations: vec![registration] },
+ |_, _| (),
+ );
+ }
+
+ let mut change = Change::new();
+
+ let files_config = self.config.files();
+ let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
+
+ let standalone_server_name =
+ format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
+
+ if self.proc_macro_clients.is_empty() {
+ if let Some((path, args)) = self.config.proc_macro_srv() {
+ self.proc_macro_clients = self
+ .workspaces
+ .iter()
+ .map(|ws| {
+ let mut args = args.clone();
+ let mut path = path.clone();
+
+ if let ProjectWorkspace::Cargo { sysroot, .. } = ws {
+ tracing::info!("Found a cargo workspace...");
+ if let Some(sysroot) = sysroot.as_ref() {
+ tracing::info!("Found a cargo workspace with a sysroot...");
+ let server_path =
+ sysroot.root().join("libexec").join(&standalone_server_name);
+ if std::fs::metadata(&server_path).is_ok() {
+ tracing::info!(
+ "And the server exists at {}",
+ server_path.display()
+ );
+ path = server_path;
+ args = vec![];
+ } else {
+ tracing::info!(
+ "And the server does not exist at {}",
+ server_path.display()
+ );
+ }
+ }
+ }
+
+ tracing::info!(
+ "Using proc-macro server at {} with args {:?}",
+ path.display(),
+ args
+ );
+ ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|err| {
+ let error = format!(
+ "Failed to run proc_macro_srv from path {}, error: {:?}",
+ path.display(),
+ err
+ );
+ tracing::error!(error);
+ error
+ })
+ })
+ .collect();
+ }
+ }
+
+ let watch = match files_config.watcher {
+ FilesWatcher::Client => vec![],
+ FilesWatcher::Server => project_folders.watch,
+ };
+ self.vfs_config_version += 1;
+ self.loader.handle.set_config(vfs::loader::Config {
+ load: project_folders.load,
+ watch,
+ version: self.vfs_config_version,
+ });
+
+ // Create crate graph from all the workspaces
+ let crate_graph = {
+ let dummy_replacements = self.config.dummy_replacements();
+
+ let vfs = &mut self.vfs.write().0;
+ let loader = &mut self.loader;
+ let mem_docs = &self.mem_docs;
+ let mut load = move |path: &AbsPath| {
+ let _p = profile::span("GlobalState::load");
+ let vfs_path = vfs::VfsPath::from(path.to_path_buf());
+ if !mem_docs.contains(&vfs_path) {
+ let contents = loader.handle.load_sync(path);
+ vfs.set_file_contents(vfs_path.clone(), contents);
+ }
+ let res = vfs.file_id(&vfs_path);
+ if res.is_none() {
+ tracing::warn!("failed to load {}", path.display())
+ }
+ res
+ };
+
+ let mut crate_graph = CrateGraph::default();
+ for (idx, ws) in self.workspaces.iter().enumerate() {
+ let proc_macro_client = match self.proc_macro_clients.get(idx) {
+ Some(res) => res.as_ref().map_err(|e| &**e),
+ None => Err("Proc macros are disabled"),
+ };
+ let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| {
+ load_proc_macro(
+ proc_macro_client,
+ path,
+ dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(),
+ )
+ };
+ crate_graph.extend(ws.to_crate_graph(&mut load_proc_macro, &mut load));
+ }
+ crate_graph
+ };
+ change.set_crate_graph(crate_graph);
+
+ self.source_root_config = project_folders.source_root_config;
+
+ self.analysis_host.apply_change(change);
+ self.process_changes();
+ self.reload_flycheck();
+ tracing::info!("did switch workspaces");
+ }
+
+ fn fetch_workspace_error(&self) -> Result<(), String> {
+ let mut buf = String::new();
+
+ for ws in self.fetch_workspaces_queue.last_op_result() {
+ if let Err(err) = ws {
+ stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
+ }
+ }
+
+ if buf.is_empty() {
+ return Ok(());
+ }
+
+ Err(buf)
+ }
+
+ fn fetch_build_data_error(&self) -> Result<(), String> {
+ let mut buf = String::new();
+
+ for ws in &self.fetch_build_data_queue.last_op_result().1 {
+ match ws {
+ Ok(data) => match data.error() {
+ Some(stderr) => stdx::format_to!(buf, "{:#}\n", stderr),
+ _ => (),
+ },
+ // io errors
+ Err(err) => stdx::format_to!(buf, "{:#}\n", err),
+ }
+ }
+
+ if buf.is_empty() {
+ Ok(())
+ } else {
+ Err(buf)
+ }
+ }
+
+ fn reload_flycheck(&mut self) {
+ let _p = profile::span("GlobalState::reload_flycheck");
+ let config = match self.config.flycheck() {
+ Some(it) => it,
+ None => {
+ self.flycheck = Vec::new();
+ self.diagnostics.clear_check();
+ return;
+ }
+ };
+
+ let sender = self.flycheck_sender.clone();
+ self.flycheck = self
+ .workspaces
+ .iter()
+ .enumerate()
+ .filter_map(|(id, w)| match w {
+ ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())),
+ ProjectWorkspace::Json { project, .. } => {
+ // Enable flychecks for json projects if a custom flycheck command was supplied
+ // in the workspace configuration.
+ match config {
+ FlycheckConfig::CustomCommand { .. } => Some((id, project.path())),
+ _ => None,
+ }
+ }
+ ProjectWorkspace::DetachedFiles { .. } => None,
+ })
+ .map(|(id, root)| {
+ let sender = sender.clone();
+ FlycheckHandle::spawn(
+ id,
+ Box::new(move |msg| sender.send(msg).unwrap()),
+ config.clone(),
+ root.to_path_buf(),
+ )
+ })
+ .collect();
+ }
+}
+
+#[derive(Default)]
+pub(crate) struct ProjectFolders {
+ pub(crate) load: Vec<vfs::loader::Entry>,
+ pub(crate) watch: Vec<usize>,
+ pub(crate) source_root_config: SourceRootConfig,
+}
+
+impl ProjectFolders {
+ pub(crate) fn new(
+ workspaces: &[ProjectWorkspace],
+ global_excludes: &[AbsPathBuf],
+ ) -> ProjectFolders {
+ let mut res = ProjectFolders::default();
+ let mut fsc = FileSetConfig::builder();
+ let mut local_filesets = vec![];
+
+ for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
+ let file_set_roots: Vec<VfsPath> =
+ root.include.iter().cloned().map(VfsPath::from).collect();
+
+ let entry = {
+ let mut dirs = vfs::loader::Directories::default();
+ dirs.extensions.push("rs".into());
+ dirs.include.extend(root.include);
+ dirs.exclude.extend(root.exclude);
+ for excl in global_excludes {
+ if dirs
+ .include
+ .iter()
+ .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
+ {
+ dirs.exclude.push(excl.clone());
+ }
+ }
+
+ vfs::loader::Entry::Directories(dirs)
+ };
+
+ if root.is_local {
+ res.watch.push(res.load.len());
+ }
+ res.load.push(entry);
+
+ if root.is_local {
+ local_filesets.push(fsc.len());
+ }
+ fsc.add_file_set(file_set_roots)
+ }
+
+ let fsc = fsc.build();
+ res.source_root_config = SourceRootConfig { fsc, local_filesets };
+
+ res
+ }
+}
+
+#[derive(Default, Debug)]
+pub(crate) struct SourceRootConfig {
+ pub(crate) fsc: FileSetConfig,
+ pub(crate) local_filesets: Vec<usize>,
+}
+
+impl SourceRootConfig {
+ pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
+ let _p = profile::span("SourceRootConfig::partition");
+ self.fsc
+ .partition(vfs)
+ .into_iter()
+ .enumerate()
+ .map(|(idx, file_set)| {
+ let is_local = self.local_filesets.contains(&idx);
+ if is_local {
+ SourceRoot::new_local(file_set)
+ } else {
+ SourceRoot::new_library(file_set)
+ }
+ })
+ .collect()
+ }
+}
+
+/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
+/// with an identity dummy expander.
+pub(crate) fn load_proc_macro(
+ server: Result<&ProcMacroServer, &str>,
+ path: &AbsPath,
+ dummy_replace: &[Box<str>],
+) -> ProcMacroLoadResult {
+ let res: Result<Vec<_>, String> = (|| {
+ let dylib = MacroDylib::new(path.to_path_buf())
+ .map_err(|io| format!("Proc-macro dylib loading failed: {io}"))?;
+ let server = server.map_err(ToOwned::to_owned)?;
+ let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
+ if vec.is_empty() {
+ return Err("proc macro library returned no proc macros".to_string());
+ }
+ Ok(vec
+ .into_iter()
+ .map(|expander| expander_to_proc_macro(expander, dummy_replace))
+ .collect())
+ })();
+ return match res {
+ Ok(proc_macros) => {
+ tracing::info!(
+ "Loaded proc-macros for {}: {:?}",
+ path.display(),
+ proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>()
+ );
+ Ok(proc_macros)
+ }
+ Err(e) => {
+ tracing::warn!("proc-macro loading for {} failed: {e}", path.display());
+ Err(e)
+ }
+ };
+
+ fn expander_to_proc_macro(
+ expander: proc_macro_api::ProcMacro,
+ dummy_replace: &[Box<str>],
+ ) -> ProcMacro {
+ let name = SmolStr::from(expander.name());
+ let kind = match expander.kind() {
+ proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
+ proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
+ proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
+ };
+ let expander: Arc<dyn ProcMacroExpander> =
+ if dummy_replace.iter().any(|replace| &**replace == name) {
+ Arc::new(DummyExpander)
+ } else {
+ Arc::new(Expander(expander))
+ };
+ ProcMacro { name, kind, expander }
+ }
+
+ #[derive(Debug)]
+ struct Expander(proc_macro_api::ProcMacro);
+
+ impl ProcMacroExpander for Expander {
+ fn expand(
+ &self,
+ subtree: &tt::Subtree,
+ attrs: Option<&tt::Subtree>,
+ env: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
+ match self.0.expand(subtree, attrs, env) {
+ Ok(Ok(subtree)) => Ok(subtree),
+ Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
+ Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
+ }
+ }
+ }
+
+ /// Dummy identity expander, used for proc-macros that are deliberately ignored by the user.
+ #[derive(Debug)]
+ struct DummyExpander;
+
+ impl ProcMacroExpander for DummyExpander {
+ fn expand(
+ &self,
+ subtree: &tt::Subtree,
+ _: Option<&tt::Subtree>,
+ _: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ Ok(subtree.clone())
+ }
+ }
+}
+
+pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
+ const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
+ const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
+ let file_name = path.file_name().unwrap_or_default();
+
+ if file_name == "Cargo.toml" || file_name == "Cargo.lock" {
+ return true;
+ }
+ if change_kind == ChangeKind::Modify {
+ return false;
+ }
+ if path.extension().unwrap_or_default() != "rs" {
+ if (file_name == "config.toml" || file_name == "config")
+ && path.parent().map(|parent| parent.as_ref().ends_with(".cargo")) == Some(true)
+ {
+ return true;
+ }
+ return false;
+ }
+ if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
+ return true;
+ }
+ let parent = match path.parent() {
+ Some(it) => it,
+ None => return false,
+ };
+ if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
+ return true;
+ }
+ if file_name == "main.rs" {
+ let grand_parent = match parent.parent() {
+ Some(it) => it,
+ None => return false,
+ };
+ if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
+ return true;
+ }
+ }
+ false
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs
new file mode 100644
index 000000000..6c78b5df1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs
@@ -0,0 +1,301 @@
+//! Semantic Tokens helpers
+
+use std::ops;
+
+use lsp_types::{
+ Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
+ SemanticTokensEdit,
+};
+
+macro_rules! define_semantic_token_types {
+ ($(($ident:ident, $string:literal)),*$(,)?) => {
+ $(pub(crate) const $ident: SemanticTokenType = SemanticTokenType::new($string);)*
+
+ pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
+ SemanticTokenType::COMMENT,
+ SemanticTokenType::KEYWORD,
+ SemanticTokenType::STRING,
+ SemanticTokenType::NUMBER,
+ SemanticTokenType::REGEXP,
+ SemanticTokenType::OPERATOR,
+ SemanticTokenType::NAMESPACE,
+ SemanticTokenType::TYPE,
+ SemanticTokenType::STRUCT,
+ SemanticTokenType::CLASS,
+ SemanticTokenType::INTERFACE,
+ SemanticTokenType::ENUM,
+ SemanticTokenType::ENUM_MEMBER,
+ SemanticTokenType::TYPE_PARAMETER,
+ SemanticTokenType::FUNCTION,
+ SemanticTokenType::METHOD,
+ SemanticTokenType::PROPERTY,
+ SemanticTokenType::MACRO,
+ SemanticTokenType::VARIABLE,
+ SemanticTokenType::PARAMETER,
+ $($ident),*
+ ];
+ };
+}
+
+define_semantic_token_types![
+ (ANGLE, "angle"),
+ (ARITHMETIC, "arithmetic"),
+ (ATTRIBUTE, "attribute"),
+ (ATTRIBUTE_BRACKET, "attributeBracket"),
+ (BITWISE, "bitwise"),
+ (BOOLEAN, "boolean"),
+ (BRACE, "brace"),
+ (BRACKET, "bracket"),
+ (BUILTIN_ATTRIBUTE, "builtinAttribute"),
+ (BUILTIN_TYPE, "builtinType"),
+ (CHAR, "character"),
+ (COLON, "colon"),
+ (COMMA, "comma"),
+ (COMPARISON, "comparison"),
+ (CONST_PARAMETER, "constParameter"),
+ (DERIVE, "derive"),
+ (DERIVE_HELPER, "deriveHelper"),
+ (DOT, "dot"),
+ (ESCAPE_SEQUENCE, "escapeSequence"),
+ (FORMAT_SPECIFIER, "formatSpecifier"),
+ (GENERIC, "generic"),
+ (LABEL, "label"),
+ (LIFETIME, "lifetime"),
+ (LOGICAL, "logical"),
+ (MACRO_BANG, "macroBang"),
+ (OPERATOR, "operator"),
+ (PARENTHESIS, "parenthesis"),
+ (PUNCTUATION, "punctuation"),
+ (SELF_KEYWORD, "selfKeyword"),
+ (SELF_TYPE_KEYWORD, "selfTypeKeyword"),
+ (SEMICOLON, "semicolon"),
+ (TYPE_ALIAS, "typeAlias"),
+ (TOOL_MODULE, "toolModule"),
+ (UNION, "union"),
+ (UNRESOLVED_REFERENCE, "unresolvedReference"),
+];
+
+macro_rules! define_semantic_token_modifiers {
+ ($(($ident:ident, $string:literal)),*$(,)?) => {
+ $(pub(crate) const $ident: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
+
+ pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
+ SemanticTokenModifier::DOCUMENTATION,
+ SemanticTokenModifier::DECLARATION,
+ SemanticTokenModifier::DEFINITION,
+ SemanticTokenModifier::STATIC,
+ SemanticTokenModifier::ABSTRACT,
+ SemanticTokenModifier::DEPRECATED,
+ SemanticTokenModifier::READONLY,
+ SemanticTokenModifier::DEFAULT_LIBRARY,
+ $($ident),*
+ ];
+ };
+}
+
+define_semantic_token_modifiers![
+ (ASYNC, "async"),
+ (ATTRIBUTE_MODIFIER, "attribute"),
+ (CALLABLE, "callable"),
+ (CONSTANT, "constant"),
+ (CONSUMING, "consuming"),
+ (CONTROL_FLOW, "controlFlow"),
+ (CRATE_ROOT, "crateRoot"),
+ (INJECTED, "injected"),
+ (INTRA_DOC_LINK, "intraDocLink"),
+ (LIBRARY, "library"),
+ (MUTABLE, "mutable"),
+ (PUBLIC, "public"),
+ (REFERENCE, "reference"),
+ (TRAIT_MODIFIER, "trait"),
+ (UNSAFE, "unsafe"),
+];
+
+#[derive(Default)]
+pub(crate) struct ModifierSet(pub(crate) u32);
+
+impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
+ fn bitor_assign(&mut self, rhs: SemanticTokenModifier) {
+ let idx = SUPPORTED_MODIFIERS.iter().position(|it| it == &rhs).unwrap();
+ self.0 |= 1 << idx;
+ }
+}
+
+/// Tokens are encoded relative to each other.
+///
+/// This is a direct port of <https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45>
+pub(crate) struct SemanticTokensBuilder {
+ id: String,
+ prev_line: u32,
+ prev_char: u32,
+ data: Vec<SemanticToken>,
+}
+
+impl SemanticTokensBuilder {
+ pub(crate) fn new(id: String) -> Self {
+ SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() }
+ }
+
+ /// Push a new token onto the builder
+ pub(crate) fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
+ let mut push_line = range.start.line as u32;
+ let mut push_char = range.start.character as u32;
+
+ if !self.data.is_empty() {
+ push_line -= self.prev_line;
+ if push_line == 0 {
+ push_char -= self.prev_char;
+ }
+ }
+
+ // A token cannot be multiline
+ let token_len = range.end.character - range.start.character;
+
+ let token = SemanticToken {
+ delta_line: push_line,
+ delta_start: push_char,
+ length: token_len as u32,
+ token_type: token_index,
+ token_modifiers_bitset: modifier_bitset,
+ };
+
+ self.data.push(token);
+
+ self.prev_line = range.start.line as u32;
+ self.prev_char = range.start.character as u32;
+ }
+
+ pub(crate) fn build(self) -> SemanticTokens {
+ SemanticTokens { result_id: Some(self.id), data: self.data }
+ }
+}
+
+pub(crate) fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
+ let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count();
+
+ let (_, old) = old.split_at(offset);
+ let (_, new) = new.split_at(offset);
+
+ let offset_from_end =
+ new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count();
+
+ let (old, _) = old.split_at(old.len() - offset_from_end);
+ let (new, _) = new.split_at(new.len() - offset_from_end);
+
+ if old.is_empty() && new.is_empty() {
+ vec![]
+ } else {
+ // The lsp data field is actually a byte-diff but we
+ // travel in tokens so `start` and `delete_count` are in multiples of the
+ // serialized size of `SemanticToken`.
+ vec![SemanticTokensEdit {
+ start: 5 * offset as u32,
+ delete_count: 5 * old.len() as u32,
+ data: Some(new.into()),
+ }]
+ }
+}
+
+pub(crate) fn type_index(ty: SemanticTokenType) -> u32 {
+ SUPPORTED_TYPES.iter().position(|it| *it == ty).unwrap() as u32
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken {
+ SemanticToken {
+ delta_line: t.0,
+ delta_start: t.1,
+ length: t.2,
+ token_type: t.3,
+ token_modifiers_bitset: t.4,
+ }
+ }
+
+ #[test]
+ fn test_diff_insert_at_end() {
+ let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+ let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(
+ edits[0],
+ SemanticTokensEdit {
+ start: 10,
+ delete_count: 0,
+ data: Some(vec![from((11, 12, 13, 14, 15))])
+ }
+ );
+ }
+
+ #[test]
+ fn test_diff_insert_at_beginning() {
+ let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+ let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(
+ edits[0],
+ SemanticTokensEdit {
+ start: 0,
+ delete_count: 0,
+ data: Some(vec![from((11, 12, 13, 14, 15))])
+ }
+ );
+ }
+
+ #[test]
+ fn test_diff_insert_in_middle() {
+ let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+ let after = [
+ from((1, 2, 3, 4, 5)),
+ from((10, 20, 30, 40, 50)),
+ from((60, 70, 80, 90, 100)),
+ from((6, 7, 8, 9, 10)),
+ ];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(
+ edits[0],
+ SemanticTokensEdit {
+ start: 5,
+ delete_count: 0,
+ data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))])
+ }
+ );
+ }
+
+ #[test]
+ fn test_diff_remove_from_end() {
+ let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
+ let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) });
+ }
+
+ #[test]
+ fn test_diff_remove_from_beginning() {
+ let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+ let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) });
+ }
+
+ #[test]
+ fn test_diff_remove_from_middle() {
+ let before = [
+ from((1, 2, 3, 4, 5)),
+ from((10, 20, 30, 40, 50)),
+ from((60, 70, 80, 90, 100)),
+ from((6, 7, 8, 9, 10)),
+ ];
+ let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) });
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs
new file mode 100644
index 000000000..aeeb3b7c5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs
@@ -0,0 +1,52 @@
+//! A thin wrapper around `ThreadPool` to make sure that we join all things
+//! properly.
+use crossbeam_channel::Sender;
+
+pub(crate) struct TaskPool<T> {
+ sender: Sender<T>,
+ inner: threadpool::ThreadPool,
+}
+
+impl<T> TaskPool<T> {
+ pub(crate) fn new(sender: Sender<T>) -> TaskPool<T> {
+ const STACK_SIZE: usize = 8 * 1024 * 1024;
+
+ let inner = threadpool::Builder::new()
+ .thread_name("Worker".into())
+ .thread_stack_size(STACK_SIZE)
+ .build();
+ TaskPool { sender, inner }
+ }
+
+ pub(crate) fn spawn<F>(&mut self, task: F)
+ where
+ F: FnOnce() -> T + Send + 'static,
+ T: Send + 'static,
+ {
+ self.inner.execute({
+ let sender = self.sender.clone();
+ move || sender.send(task()).unwrap()
+ })
+ }
+
+ pub(crate) fn spawn_with_sender<F>(&mut self, task: F)
+ where
+ F: FnOnce(Sender<T>) + Send + 'static,
+ T: Send + 'static,
+ {
+ self.inner.execute({
+ let sender = self.sender.clone();
+ move || task(sender)
+ })
+ }
+
+ pub(crate) fn len(&self) -> usize {
+ self.inner.queued_count()
+ }
+}
+
+impl<T> Drop for TaskPool<T> {
+ fn drop(&mut self) {
+ self.inner.join()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
new file mode 100644
index 000000000..7f4fa57fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
@@ -0,0 +1,1397 @@
+//! Conversion of rust-analyzer specific types to lsp_types equivalents.
+use std::{
+ iter::once,
+ path,
+ sync::atomic::{AtomicU32, Ordering},
+};
+
+use ide::{
+ Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionItem,
+ CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit,
+ Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, InlayHint,
+ InlayKind, Markup, NavigationTarget, ReferenceCategory, RenameError, Runnable, Severity,
+ SignatureHelp, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize,
+};
+use itertools::Itertools;
+use serde_json::to_value;
+use vfs::AbsPath;
+
+use crate::{
+ cargo_target_spec::CargoTargetSpec,
+ config::{CallInfoConfig, Config},
+ global_state::GlobalStateSnapshot,
+ line_index::{LineEndings, LineIndex, OffsetEncoding},
+ lsp_ext,
+ lsp_utils::invalid_params_error,
+ semantic_tokens, Result,
+};
+
+pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
+ let line_col = line_index.index.line_col(offset);
+ match line_index.encoding {
+ OffsetEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
+ OffsetEncoding::Utf16 => {
+ let line_col = line_index.index.to_utf16(line_col);
+ lsp_types::Position::new(line_col.line, line_col.col)
+ }
+ }
+}
+
+pub(crate) fn range(line_index: &LineIndex, range: TextRange) -> lsp_types::Range {
+ let start = position(line_index, range.start());
+ let end = position(line_index, range.end());
+ lsp_types::Range::new(start, end)
+}
+
+pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind {
+ match symbol_kind {
+ SymbolKind::Function => lsp_types::SymbolKind::FUNCTION,
+ SymbolKind::Struct => lsp_types::SymbolKind::STRUCT,
+ SymbolKind::Enum => lsp_types::SymbolKind::ENUM,
+ SymbolKind::Variant => lsp_types::SymbolKind::ENUM_MEMBER,
+ SymbolKind::Trait => lsp_types::SymbolKind::INTERFACE,
+ SymbolKind::Macro
+ | SymbolKind::BuiltinAttr
+ | SymbolKind::Attribute
+ | SymbolKind::Derive
+ | SymbolKind::DeriveHelper => lsp_types::SymbolKind::FUNCTION,
+ SymbolKind::Module | SymbolKind::ToolModule => lsp_types::SymbolKind::MODULE,
+ SymbolKind::TypeAlias | SymbolKind::TypeParam | SymbolKind::SelfType => {
+ lsp_types::SymbolKind::TYPE_PARAMETER
+ }
+ SymbolKind::Field => lsp_types::SymbolKind::FIELD,
+ SymbolKind::Static => lsp_types::SymbolKind::CONSTANT,
+ SymbolKind::Const => lsp_types::SymbolKind::CONSTANT,
+ SymbolKind::ConstParam => lsp_types::SymbolKind::CONSTANT,
+ SymbolKind::Impl => lsp_types::SymbolKind::OBJECT,
+ SymbolKind::Local
+ | SymbolKind::SelfParam
+ | SymbolKind::LifetimeParam
+ | SymbolKind::ValueParam
+ | SymbolKind::Label => lsp_types::SymbolKind::VARIABLE,
+ SymbolKind::Union => lsp_types::SymbolKind::STRUCT,
+ }
+}
+
+pub(crate) fn structure_node_kind(kind: StructureNodeKind) -> lsp_types::SymbolKind {
+ match kind {
+ StructureNodeKind::SymbolKind(symbol) => symbol_kind(symbol),
+ StructureNodeKind::Region => lsp_types::SymbolKind::NAMESPACE,
+ }
+}
+
+pub(crate) fn document_highlight_kind(
+ category: ReferenceCategory,
+) -> lsp_types::DocumentHighlightKind {
+ match category {
+ ReferenceCategory::Read => lsp_types::DocumentHighlightKind::READ,
+ ReferenceCategory::Write => lsp_types::DocumentHighlightKind::WRITE,
+ }
+}
+
+pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSeverity {
+ match severity {
+ Severity::Error => lsp_types::DiagnosticSeverity::ERROR,
+ Severity::WeakWarning => lsp_types::DiagnosticSeverity::HINT,
+ }
+}
+
+pub(crate) fn documentation(documentation: Documentation) -> lsp_types::Documentation {
+ let value = crate::markdown::format_docs(documentation.as_str());
+ let markup_content = lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value };
+ lsp_types::Documentation::MarkupContent(markup_content)
+}
+
+pub(crate) fn completion_item_kind(
+ completion_item_kind: CompletionItemKind,
+) -> lsp_types::CompletionItemKind {
+ match completion_item_kind {
+ CompletionItemKind::Binding => lsp_types::CompletionItemKind::VARIABLE,
+ CompletionItemKind::BuiltinType => lsp_types::CompletionItemKind::STRUCT,
+ CompletionItemKind::InferredType => lsp_types::CompletionItemKind::SNIPPET,
+ CompletionItemKind::Keyword => lsp_types::CompletionItemKind::KEYWORD,
+ CompletionItemKind::Method => lsp_types::CompletionItemKind::METHOD,
+ CompletionItemKind::Snippet => lsp_types::CompletionItemKind::SNIPPET,
+ CompletionItemKind::UnresolvedReference => lsp_types::CompletionItemKind::REFERENCE,
+ CompletionItemKind::SymbolKind(symbol) => match symbol {
+ SymbolKind::Attribute => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT,
+ SymbolKind::ConstParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::Derive => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::DeriveHelper => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::Enum => lsp_types::CompletionItemKind::ENUM,
+ SymbolKind::Field => lsp_types::CompletionItemKind::FIELD,
+ SymbolKind::Function => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::Impl => lsp_types::CompletionItemKind::TEXT,
+ SymbolKind::Label => lsp_types::CompletionItemKind::VARIABLE,
+ SymbolKind::LifetimeParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::Local => lsp_types::CompletionItemKind::VARIABLE,
+ SymbolKind::Macro => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::Module => lsp_types::CompletionItemKind::MODULE,
+ SymbolKind::SelfParam => lsp_types::CompletionItemKind::VALUE,
+ SymbolKind::SelfType => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::Static => lsp_types::CompletionItemKind::VALUE,
+ SymbolKind::Struct => lsp_types::CompletionItemKind::STRUCT,
+ SymbolKind::Trait => lsp_types::CompletionItemKind::INTERFACE,
+ SymbolKind::TypeAlias => lsp_types::CompletionItemKind::STRUCT,
+ SymbolKind::TypeParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::Union => lsp_types::CompletionItemKind::STRUCT,
+ SymbolKind::ValueParam => lsp_types::CompletionItemKind::VALUE,
+ SymbolKind::Variant => lsp_types::CompletionItemKind::ENUM_MEMBER,
+ SymbolKind::BuiltinAttr => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::ToolModule => lsp_types::CompletionItemKind::MODULE,
+ },
+ }
+}
+
+pub(crate) fn text_edit(line_index: &LineIndex, indel: Indel) -> lsp_types::TextEdit {
+ let range = range(line_index, indel.delete);
+ let new_text = match line_index.endings {
+ LineEndings::Unix => indel.insert,
+ LineEndings::Dos => indel.insert.replace('\n', "\r\n"),
+ };
+ lsp_types::TextEdit { range, new_text }
+}
+
+pub(crate) fn completion_text_edit(
+ line_index: &LineIndex,
+ insert_replace_support: Option<lsp_types::Position>,
+ indel: Indel,
+) -> lsp_types::CompletionTextEdit {
+ let text_edit = text_edit(line_index, indel);
+ match insert_replace_support {
+ Some(cursor_pos) => lsp_types::InsertReplaceEdit {
+ new_text: text_edit.new_text,
+ insert: lsp_types::Range { start: text_edit.range.start, end: cursor_pos },
+ replace: text_edit.range,
+ }
+ .into(),
+ None => text_edit.into(),
+ }
+}
+
+pub(crate) fn snippet_text_edit(
+ line_index: &LineIndex,
+ is_snippet: bool,
+ indel: Indel,
+) -> lsp_ext::SnippetTextEdit {
+ let text_edit = text_edit(line_index, indel);
+ let insert_text_format =
+ if is_snippet { Some(lsp_types::InsertTextFormat::SNIPPET) } else { None };
+ lsp_ext::SnippetTextEdit {
+ range: text_edit.range,
+ new_text: text_edit.new_text,
+ insert_text_format,
+ annotation_id: None,
+ }
+}
+
+pub(crate) fn text_edit_vec(
+ line_index: &LineIndex,
+ text_edit: TextEdit,
+) -> Vec<lsp_types::TextEdit> {
+ text_edit.into_iter().map(|indel| self::text_edit(line_index, indel)).collect()
+}
+
+pub(crate) fn snippet_text_edit_vec(
+ line_index: &LineIndex,
+ is_snippet: bool,
+ text_edit: TextEdit,
+) -> Vec<lsp_ext::SnippetTextEdit> {
+ text_edit
+ .into_iter()
+ .map(|indel| self::snippet_text_edit(line_index, is_snippet, indel))
+ .collect()
+}
+
+pub(crate) fn completion_items(
+ config: &Config,
+ line_index: &LineIndex,
+ tdpp: lsp_types::TextDocumentPositionParams,
+ items: Vec<CompletionItem>,
+) -> Vec<lsp_types::CompletionItem> {
+ let max_relevance = items.iter().map(|it| it.relevance().score()).max().unwrap_or_default();
+ let mut res = Vec::with_capacity(items.len());
+ for item in items {
+ completion_item(&mut res, config, line_index, &tdpp, max_relevance, item)
+ }
+ res
+}
+
+fn completion_item(
+ acc: &mut Vec<lsp_types::CompletionItem>,
+ config: &Config,
+ line_index: &LineIndex,
+ tdpp: &lsp_types::TextDocumentPositionParams,
+ max_relevance: u32,
+ item: CompletionItem,
+) {
+ let insert_replace_support = config.insert_replace_support().then(|| tdpp.position);
+ let mut additional_text_edits = Vec::new();
+
+ // LSP does not allow arbitrary edits in completion, so we have to do a
+ // non-trivial mapping here.
+ let text_edit = {
+ let mut text_edit = None;
+ let source_range = item.source_range();
+ for indel in item.text_edit().iter() {
+ if indel.delete.contains_range(source_range) {
+ text_edit = Some(if indel.delete == source_range {
+ self::completion_text_edit(line_index, insert_replace_support, indel.clone())
+ } else {
+ assert!(source_range.end() == indel.delete.end());
+ let range1 = TextRange::new(indel.delete.start(), source_range.start());
+ let range2 = source_range;
+ let indel1 = Indel::replace(range1, String::new());
+ let indel2 = Indel::replace(range2, indel.insert.clone());
+ additional_text_edits.push(self::text_edit(line_index, indel1));
+ self::completion_text_edit(line_index, insert_replace_support, indel2)
+ })
+ } else {
+ assert!(source_range.intersect(indel.delete).is_none());
+ let text_edit = self::text_edit(line_index, indel.clone());
+ additional_text_edits.push(text_edit);
+ }
+ }
+ text_edit.unwrap()
+ };
+
+ let insert_text_format = item.is_snippet().then(|| lsp_types::InsertTextFormat::SNIPPET);
+ let tags = item.deprecated().then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
+ let command = if item.trigger_call_info() && config.client_commands().trigger_parameter_hints {
+ Some(command::trigger_parameter_hints())
+ } else {
+ None
+ };
+
+ let mut lsp_item = lsp_types::CompletionItem {
+ label: item.label().to_string(),
+ detail: item.detail().map(|it| it.to_string()),
+ filter_text: Some(item.lookup().to_string()),
+ kind: Some(completion_item_kind(item.kind())),
+ text_edit: Some(text_edit),
+ additional_text_edits: Some(additional_text_edits),
+ documentation: item.documentation().map(documentation),
+ deprecated: Some(item.deprecated()),
+ tags,
+ command,
+ insert_text_format,
+ ..Default::default()
+ };
+
+ if config.completion_label_details_support() {
+ lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
+ detail: None,
+ description: lsp_item.detail.clone(),
+ });
+ }
+
+ set_score(&mut lsp_item, max_relevance, item.relevance());
+
+ if config.completion().enable_imports_on_the_fly {
+ if let imports @ [_, ..] = item.imports_to_add() {
+ let imports: Vec<_> = imports
+ .iter()
+ .filter_map(|import_edit| {
+ let import_path = &import_edit.import_path;
+ let import_name = import_path.segments().last()?;
+ Some(lsp_ext::CompletionImport {
+ full_import_path: import_path.to_string(),
+ imported_name: import_name.to_string(),
+ })
+ })
+ .collect();
+ if !imports.is_empty() {
+ let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports };
+ lsp_item.data = Some(to_value(data).unwrap());
+ }
+ }
+ }
+
+ if let Some((mutability, offset, relevance)) = item.ref_match() {
+ let mut lsp_item_with_ref = lsp_item.clone();
+ set_score(&mut lsp_item_with_ref, max_relevance, relevance);
+ lsp_item_with_ref.label =
+ format!("&{}{}", mutability.as_keyword_for_ref(), lsp_item_with_ref.label);
+ lsp_item_with_ref.additional_text_edits.get_or_insert_with(Default::default).push(
+ self::text_edit(
+ line_index,
+ Indel::insert(offset, format!("&{}", mutability.as_keyword_for_ref())),
+ ),
+ );
+
+ acc.push(lsp_item_with_ref);
+ };
+
+ acc.push(lsp_item);
+
+ fn set_score(
+ res: &mut lsp_types::CompletionItem,
+ max_relevance: u32,
+ relevance: CompletionRelevance,
+ ) {
+ if relevance.is_relevant() && relevance.score() == max_relevance {
+ res.preselect = Some(true);
+ }
+ // The relevance needs to be inverted to come up with a sort score
+ // because the client will sort ascending.
+ let sort_score = relevance.score() ^ 0xFF_FF_FF_FF;
+ // Zero pad the string to ensure values can be properly sorted
+ // by the client. Hex format is used because it is easier to
+ // visually compare very large values, which the sort text
+ // tends to be since it is the opposite of the score.
+ res.sort_text = Some(format!("{:08x}", sort_score));
+ }
+}
+
+pub(crate) fn signature_help(
+ call_info: SignatureHelp,
+ config: CallInfoConfig,
+ label_offsets: bool,
+) -> lsp_types::SignatureHelp {
+ let (label, parameters) = match (config.params_only, label_offsets) {
+ (concise, false) => {
+ let params = call_info
+ .parameter_labels()
+ .map(|label| lsp_types::ParameterInformation {
+ label: lsp_types::ParameterLabel::Simple(label.to_string()),
+ documentation: None,
+ })
+ .collect::<Vec<_>>();
+ let label =
+ if concise { call_info.parameter_labels().join(", ") } else { call_info.signature };
+ (label, params)
+ }
+ (false, true) => {
+ let params = call_info
+ .parameter_ranges()
+ .iter()
+ .map(|it| {
+ let start = call_info.signature[..it.start().into()].chars().count() as u32;
+ let end = call_info.signature[..it.end().into()].chars().count() as u32;
+ [start, end]
+ })
+ .map(|label_offsets| lsp_types::ParameterInformation {
+ label: lsp_types::ParameterLabel::LabelOffsets(label_offsets),
+ documentation: None,
+ })
+ .collect::<Vec<_>>();
+ (call_info.signature, params)
+ }
+ (true, true) => {
+ let mut params = Vec::new();
+ let mut label = String::new();
+ let mut first = true;
+ for param in call_info.parameter_labels() {
+ if !first {
+ label.push_str(", ");
+ }
+ first = false;
+ let start = label.chars().count() as u32;
+ label.push_str(param);
+ let end = label.chars().count() as u32;
+ params.push(lsp_types::ParameterInformation {
+ label: lsp_types::ParameterLabel::LabelOffsets([start, end]),
+ documentation: None,
+ });
+ }
+
+ (label, params)
+ }
+ };
+
+ let documentation = call_info.doc.filter(|_| config.docs).map(|doc| {
+ lsp_types::Documentation::MarkupContent(lsp_types::MarkupContent {
+ kind: lsp_types::MarkupKind::Markdown,
+ value: doc,
+ })
+ });
+
+ let active_parameter = call_info.active_parameter.map(|it| it as u32);
+
+ let signature = lsp_types::SignatureInformation {
+ label,
+ documentation,
+ parameters: Some(parameters),
+ active_parameter,
+ };
+ lsp_types::SignatureHelp {
+ signatures: vec![signature],
+ active_signature: Some(0),
+ active_parameter,
+ }
+}
+
+pub(crate) fn inlay_hint(
+ snap: &GlobalStateSnapshot,
+ line_index: &LineIndex,
+ render_colons: bool,
+ inlay_hint: InlayHint,
+) -> lsp_types::InlayHint {
+ lsp_types::InlayHint {
+ position: match inlay_hint.kind {
+ // before annotated thing
+ InlayKind::ParameterHint
+ | InlayKind::ImplicitReborrowHint
+ | InlayKind::BindingModeHint => position(line_index, inlay_hint.range.start()),
+ // after annotated thing
+ InlayKind::ClosureReturnTypeHint
+ | InlayKind::TypeHint
+ | InlayKind::ChainingHint
+ | InlayKind::GenericParamListHint
+ | InlayKind::LifetimeHint
+ | InlayKind::ClosingBraceHint => position(line_index, inlay_hint.range.end()),
+ },
+ padding_left: Some(match inlay_hint.kind {
+ InlayKind::TypeHint => !render_colons,
+ InlayKind::ChainingHint | InlayKind::ClosingBraceHint => true,
+ InlayKind::BindingModeHint
+ | InlayKind::ClosureReturnTypeHint
+ | InlayKind::GenericParamListHint
+ | InlayKind::ImplicitReborrowHint
+ | InlayKind::LifetimeHint
+ | InlayKind::ParameterHint => false,
+ }),
+ padding_right: Some(match inlay_hint.kind {
+ InlayKind::ChainingHint
+ | InlayKind::ClosureReturnTypeHint
+ | InlayKind::GenericParamListHint
+ | InlayKind::ImplicitReborrowHint
+ | InlayKind::TypeHint
+ | InlayKind::ClosingBraceHint => false,
+ InlayKind::BindingModeHint => inlay_hint.label != "&",
+ InlayKind::ParameterHint | InlayKind::LifetimeHint => true,
+ }),
+ label: lsp_types::InlayHintLabel::String(match inlay_hint.kind {
+ InlayKind::ParameterHint if render_colons => format!("{}:", inlay_hint.label),
+ InlayKind::TypeHint if render_colons => format!(": {}", inlay_hint.label),
+ InlayKind::ClosureReturnTypeHint => format!(" -> {}", inlay_hint.label),
+ _ => inlay_hint.label.clone(),
+ }),
+ kind: match inlay_hint.kind {
+ InlayKind::ParameterHint => Some(lsp_types::InlayHintKind::PARAMETER),
+ InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint | InlayKind::ChainingHint => {
+ Some(lsp_types::InlayHintKind::TYPE)
+ }
+ InlayKind::BindingModeHint
+ | InlayKind::GenericParamListHint
+ | InlayKind::LifetimeHint
+ | InlayKind::ImplicitReborrowHint
+ | InlayKind::ClosingBraceHint => None,
+ },
+ text_edits: None,
+ data: (|| match inlay_hint.tooltip {
+ Some(ide::InlayTooltip::HoverOffset(file_id, offset)) => {
+ let uri = url(snap, file_id);
+ let line_index = snap.file_line_index(file_id).ok()?;
+
+ let text_document = lsp_types::TextDocumentIdentifier { uri };
+ to_value(lsp_ext::InlayHintResolveData {
+ text_document,
+ position: lsp_ext::PositionOrRange::Position(position(&line_index, offset)),
+ })
+ .ok()
+ }
+ Some(ide::InlayTooltip::HoverRanged(file_id, text_range)) => {
+ let uri = url(snap, file_id);
+ let text_document = lsp_types::TextDocumentIdentifier { uri };
+ let line_index = snap.file_line_index(file_id).ok()?;
+ to_value(lsp_ext::InlayHintResolveData {
+ text_document,
+ position: lsp_ext::PositionOrRange::Range(range(&line_index, text_range)),
+ })
+ .ok()
+ }
+ _ => None,
+ })(),
+ tooltip: Some(match inlay_hint.tooltip {
+ Some(ide::InlayTooltip::String(s)) => lsp_types::InlayHintTooltip::String(s),
+ _ => lsp_types::InlayHintTooltip::String(inlay_hint.label),
+ }),
+ }
+}
+
+static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
+
+pub(crate) fn semantic_tokens(
+ text: &str,
+ line_index: &LineIndex,
+ highlights: Vec<HlRange>,
+ highlight_strings: bool,
+) -> lsp_types::SemanticTokens {
+ let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
+ let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
+
+ for highlight_range in highlights {
+ if highlight_range.highlight.is_empty() {
+ continue;
+ }
+ let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
+ if !highlight_strings && ty == lsp_types::SemanticTokenType::STRING {
+ continue;
+ }
+ let token_index = semantic_tokens::type_index(ty);
+ let modifier_bitset = mods.0;
+
+ for mut text_range in line_index.index.lines(highlight_range.range) {
+ if text[text_range].ends_with('\n') {
+ text_range =
+ TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n'));
+ }
+ let range = range(line_index, text_range);
+ builder.push(range, token_index, modifier_bitset);
+ }
+ }
+
+ builder.build()
+}
+
+pub(crate) fn semantic_token_delta(
+ previous: &lsp_types::SemanticTokens,
+ current: &lsp_types::SemanticTokens,
+) -> lsp_types::SemanticTokensDelta {
+ let result_id = current.result_id.clone();
+ let edits = semantic_tokens::diff_tokens(&previous.data, &current.data);
+ lsp_types::SemanticTokensDelta { result_id, edits }
+}
+
+fn semantic_token_type_and_modifiers(
+ highlight: Highlight,
+) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {
+ let mut mods = semantic_tokens::ModifierSet::default();
+ let type_ = match highlight.tag {
+ HlTag::Symbol(symbol) => match symbol {
+ SymbolKind::Attribute => semantic_tokens::ATTRIBUTE,
+ SymbolKind::Derive => semantic_tokens::DERIVE,
+ SymbolKind::DeriveHelper => semantic_tokens::DERIVE_HELPER,
+ SymbolKind::Module => lsp_types::SemanticTokenType::NAMESPACE,
+ SymbolKind::Impl => semantic_tokens::TYPE_ALIAS,
+ SymbolKind::Field => lsp_types::SemanticTokenType::PROPERTY,
+ SymbolKind::TypeParam => lsp_types::SemanticTokenType::TYPE_PARAMETER,
+ SymbolKind::ConstParam => semantic_tokens::CONST_PARAMETER,
+ SymbolKind::LifetimeParam => semantic_tokens::LIFETIME,
+ SymbolKind::Label => semantic_tokens::LABEL,
+ SymbolKind::ValueParam => lsp_types::SemanticTokenType::PARAMETER,
+ SymbolKind::SelfParam => semantic_tokens::SELF_KEYWORD,
+ SymbolKind::SelfType => semantic_tokens::SELF_TYPE_KEYWORD,
+ SymbolKind::Local => lsp_types::SemanticTokenType::VARIABLE,
+ SymbolKind::Function => {
+ if highlight.mods.contains(HlMod::Associated) {
+ lsp_types::SemanticTokenType::METHOD
+ } else {
+ lsp_types::SemanticTokenType::FUNCTION
+ }
+ }
+ SymbolKind::Const => {
+ mods |= semantic_tokens::CONSTANT;
+ mods |= lsp_types::SemanticTokenModifier::STATIC;
+ lsp_types::SemanticTokenType::VARIABLE
+ }
+ SymbolKind::Static => {
+ mods |= lsp_types::SemanticTokenModifier::STATIC;
+ lsp_types::SemanticTokenType::VARIABLE
+ }
+ SymbolKind::Struct => lsp_types::SemanticTokenType::STRUCT,
+ SymbolKind::Enum => lsp_types::SemanticTokenType::ENUM,
+ SymbolKind::Variant => lsp_types::SemanticTokenType::ENUM_MEMBER,
+ SymbolKind::Union => semantic_tokens::UNION,
+ SymbolKind::TypeAlias => semantic_tokens::TYPE_ALIAS,
+ SymbolKind::Trait => lsp_types::SemanticTokenType::INTERFACE,
+ SymbolKind::Macro => lsp_types::SemanticTokenType::MACRO,
+ SymbolKind::BuiltinAttr => semantic_tokens::BUILTIN_ATTRIBUTE,
+ SymbolKind::ToolModule => semantic_tokens::TOOL_MODULE,
+ },
+ HlTag::AttributeBracket => semantic_tokens::ATTRIBUTE_BRACKET,
+ HlTag::BoolLiteral => semantic_tokens::BOOLEAN,
+ HlTag::BuiltinType => semantic_tokens::BUILTIN_TYPE,
+ HlTag::ByteLiteral | HlTag::NumericLiteral => lsp_types::SemanticTokenType::NUMBER,
+ HlTag::CharLiteral => semantic_tokens::CHAR,
+ HlTag::Comment => lsp_types::SemanticTokenType::COMMENT,
+ HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE,
+ HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER,
+ HlTag::Keyword => lsp_types::SemanticTokenType::KEYWORD,
+ HlTag::None => semantic_tokens::GENERIC,
+ HlTag::Operator(op) => match op {
+ HlOperator::Bitwise => semantic_tokens::BITWISE,
+ HlOperator::Arithmetic => semantic_tokens::ARITHMETIC,
+ HlOperator::Logical => semantic_tokens::LOGICAL,
+ HlOperator::Comparison => semantic_tokens::COMPARISON,
+ HlOperator::Other => semantic_tokens::OPERATOR,
+ },
+ HlTag::StringLiteral => lsp_types::SemanticTokenType::STRING,
+ HlTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE,
+ HlTag::Punctuation(punct) => match punct {
+ HlPunct::Bracket => semantic_tokens::BRACKET,
+ HlPunct::Brace => semantic_tokens::BRACE,
+ HlPunct::Parenthesis => semantic_tokens::PARENTHESIS,
+ HlPunct::Angle => semantic_tokens::ANGLE,
+ HlPunct::Comma => semantic_tokens::COMMA,
+ HlPunct::Dot => semantic_tokens::DOT,
+ HlPunct::Colon => semantic_tokens::COLON,
+ HlPunct::Semi => semantic_tokens::SEMICOLON,
+ HlPunct::Other => semantic_tokens::PUNCTUATION,
+ HlPunct::MacroBang => semantic_tokens::MACRO_BANG,
+ },
+ };
+
+ for modifier in highlight.mods.iter() {
+ let modifier = match modifier {
+ HlMod::Associated => continue,
+ HlMod::Async => semantic_tokens::ASYNC,
+ HlMod::Attribute => semantic_tokens::ATTRIBUTE_MODIFIER,
+ HlMod::Callable => semantic_tokens::CALLABLE,
+ HlMod::Consuming => semantic_tokens::CONSUMING,
+ HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW,
+ HlMod::CrateRoot => semantic_tokens::CRATE_ROOT,
+ HlMod::DefaultLibrary => lsp_types::SemanticTokenModifier::DEFAULT_LIBRARY,
+ HlMod::Definition => lsp_types::SemanticTokenModifier::DECLARATION,
+ HlMod::Documentation => lsp_types::SemanticTokenModifier::DOCUMENTATION,
+ HlMod::Injected => semantic_tokens::INJECTED,
+ HlMod::IntraDocLink => semantic_tokens::INTRA_DOC_LINK,
+ HlMod::Library => semantic_tokens::LIBRARY,
+ HlMod::Mutable => semantic_tokens::MUTABLE,
+ HlMod::Public => semantic_tokens::PUBLIC,
+ HlMod::Reference => semantic_tokens::REFERENCE,
+ HlMod::Static => lsp_types::SemanticTokenModifier::STATIC,
+ HlMod::Trait => semantic_tokens::TRAIT_MODIFIER,
+ HlMod::Unsafe => semantic_tokens::UNSAFE,
+ };
+ mods |= modifier;
+ }
+
+ (type_, mods)
+}
+
+pub(crate) fn folding_range(
+ text: &str,
+ line_index: &LineIndex,
+ line_folding_only: bool,
+ fold: Fold,
+) -> lsp_types::FoldingRange {
+ let kind = match fold.kind {
+ FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
+ FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
+ FoldKind::Region => Some(lsp_types::FoldingRangeKind::Region),
+ FoldKind::Mods
+ | FoldKind::Block
+ | FoldKind::ArgList
+ | FoldKind::Consts
+ | FoldKind::Statics
+ | FoldKind::WhereClause
+ | FoldKind::ReturnType
+ | FoldKind::Array
+ | FoldKind::MatchArm => None,
+ };
+
+ let range = range(line_index, fold.range);
+
+ if line_folding_only {
+ // Clients with line_folding_only == true (such as VSCode) will fold the whole end line
+ // even if it contains text not in the folding range. To prevent that we exclude
+ // range.end.line from the folding region if there is more text after range.end
+ // on the same line.
+ let has_more_text_on_end_line = text[TextRange::new(fold.range.end(), TextSize::of(text))]
+ .chars()
+ .take_while(|it| *it != '\n')
+ .any(|it| !it.is_whitespace());
+
+ let end_line = if has_more_text_on_end_line {
+ range.end.line.saturating_sub(1)
+ } else {
+ range.end.line
+ };
+
+ lsp_types::FoldingRange {
+ start_line: range.start.line,
+ start_character: None,
+ end_line,
+ end_character: None,
+ kind,
+ }
+ } else {
+ lsp_types::FoldingRange {
+ start_line: range.start.line,
+ start_character: Some(range.start.character),
+ end_line: range.end.line,
+ end_character: Some(range.end.character),
+ kind,
+ }
+ }
+}
+
+pub(crate) fn url(snap: &GlobalStateSnapshot, file_id: FileId) -> lsp_types::Url {
+ snap.file_id_to_url(file_id)
+}
+
+/// Returns a `Url` object from a given path, will lowercase drive letters if present.
+/// This will only happen when processing windows paths.
+///
+/// When processing non-windows path, this is essentially the same as `Url::from_file_path`.
+pub(crate) fn url_from_abs_path(path: &AbsPath) -> lsp_types::Url {
+ let url = lsp_types::Url::from_file_path(path).unwrap();
+ match path.as_ref().components().next() {
+ Some(path::Component::Prefix(prefix))
+ if matches!(prefix.kind(), path::Prefix::Disk(_) | path::Prefix::VerbatimDisk(_)) =>
+ {
+ // Need to lowercase driver letter
+ }
+ _ => return url,
+ }
+
+ let driver_letter_range = {
+ let (scheme, drive_letter, _rest) = match url.as_str().splitn(3, ':').collect_tuple() {
+ Some(it) => it,
+ None => return url,
+ };
+ let start = scheme.len() + ':'.len_utf8();
+ start..(start + drive_letter.len())
+ };
+
+ // Note: lowercasing the `path` itself doesn't help, the `Url::parse`
+ // machinery *also* canonicalizes the drive letter. So, just massage the
+ // string in place.
+ let mut url: String = url.into();
+ url[driver_letter_range].make_ascii_lowercase();
+ lsp_types::Url::parse(&url).unwrap()
+}
+
+pub(crate) fn optional_versioned_text_document_identifier(
+ snap: &GlobalStateSnapshot,
+ file_id: FileId,
+) -> lsp_types::OptionalVersionedTextDocumentIdentifier {
+ let url = url(snap, file_id);
+ let version = snap.url_file_version(&url);
+ lsp_types::OptionalVersionedTextDocumentIdentifier { uri: url, version }
+}
+
+pub(crate) fn location(
+ snap: &GlobalStateSnapshot,
+ frange: FileRange,
+) -> Result<lsp_types::Location> {
+ let url = url(snap, frange.file_id);
+ let line_index = snap.file_line_index(frange.file_id)?;
+ let range = range(&line_index, frange.range);
+ let loc = lsp_types::Location::new(url, range);
+ Ok(loc)
+}
+
+/// Prefer using `location_link`, if the client has the cap.
+pub(crate) fn location_from_nav(
+ snap: &GlobalStateSnapshot,
+ nav: NavigationTarget,
+) -> Result<lsp_types::Location> {
+ let url = url(snap, nav.file_id);
+ let line_index = snap.file_line_index(nav.file_id)?;
+ let range = range(&line_index, nav.full_range);
+ let loc = lsp_types::Location::new(url, range);
+ Ok(loc)
+}
+
+pub(crate) fn location_link(
+ snap: &GlobalStateSnapshot,
+ src: Option<FileRange>,
+ target: NavigationTarget,
+) -> Result<lsp_types::LocationLink> {
+ let origin_selection_range = match src {
+ Some(src) => {
+ let line_index = snap.file_line_index(src.file_id)?;
+ let range = range(&line_index, src.range);
+ Some(range)
+ }
+ None => None,
+ };
+ let (target_uri, target_range, target_selection_range) = location_info(snap, target)?;
+ let res = lsp_types::LocationLink {
+ origin_selection_range,
+ target_uri,
+ target_range,
+ target_selection_range,
+ };
+ Ok(res)
+}
+
+fn location_info(
+ snap: &GlobalStateSnapshot,
+ target: NavigationTarget,
+) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
+ let line_index = snap.file_line_index(target.file_id)?;
+
+ let target_uri = url(snap, target.file_id);
+ let target_range = range(&line_index, target.full_range);
+ let target_selection_range =
+ target.focus_range.map(|it| range(&line_index, it)).unwrap_or(target_range);
+ Ok((target_uri, target_range, target_selection_range))
+}
+
+pub(crate) fn goto_definition_response(
+ snap: &GlobalStateSnapshot,
+ src: Option<FileRange>,
+ targets: Vec<NavigationTarget>,
+) -> Result<lsp_types::GotoDefinitionResponse> {
+ if snap.config.location_link() {
+ let links = targets
+ .into_iter()
+ .map(|nav| location_link(snap, src, nav))
+ .collect::<Result<Vec<_>>>()?;
+ Ok(links.into())
+ } else {
+ let locations = targets
+ .into_iter()
+ .map(|nav| {
+ location(snap, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ })
+ .collect::<Result<Vec<_>>>()?;
+ Ok(locations.into())
+ }
+}
+
+fn outside_workspace_annotation_id() -> String {
+ String::from("OutsideWorkspace")
+}
+
+pub(crate) fn snippet_text_document_edit(
+ snap: &GlobalStateSnapshot,
+ is_snippet: bool,
+ file_id: FileId,
+ edit: TextEdit,
+) -> Result<lsp_ext::SnippetTextDocumentEdit> {
+ let text_document = optional_versioned_text_document_identifier(snap, file_id);
+ let line_index = snap.file_line_index(file_id)?;
+ let mut edits: Vec<_> =
+ edit.into_iter().map(|it| snippet_text_edit(&line_index, is_snippet, it)).collect();
+
+ if snap.analysis.is_library_file(file_id)? && snap.config.change_annotation_support() {
+ for edit in &mut edits {
+ edit.annotation_id = Some(outside_workspace_annotation_id())
+ }
+ }
+ Ok(lsp_ext::SnippetTextDocumentEdit { text_document, edits })
+}
+
+pub(crate) fn snippet_text_document_ops(
+ snap: &GlobalStateSnapshot,
+ file_system_edit: FileSystemEdit,
+) -> Cancellable<Vec<lsp_ext::SnippetDocumentChangeOperation>> {
+ let mut ops = Vec::new();
+ match file_system_edit {
+ FileSystemEdit::CreateFile { dst, initial_contents } => {
+ let uri = snap.anchored_path(&dst);
+ let create_file = lsp_types::ResourceOp::Create(lsp_types::CreateFile {
+ uri: uri.clone(),
+ options: None,
+ annotation_id: None,
+ });
+ ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(create_file));
+ if !initial_contents.is_empty() {
+ let text_document =
+ lsp_types::OptionalVersionedTextDocumentIdentifier { uri, version: None };
+ let text_edit = lsp_ext::SnippetTextEdit {
+ range: lsp_types::Range::default(),
+ new_text: initial_contents,
+ insert_text_format: Some(lsp_types::InsertTextFormat::PLAIN_TEXT),
+ annotation_id: None,
+ };
+ let edit_file =
+ lsp_ext::SnippetTextDocumentEdit { text_document, edits: vec![text_edit] };
+ ops.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit_file));
+ }
+ }
+ FileSystemEdit::MoveFile { src, dst } => {
+ let old_uri = snap.file_id_to_url(src);
+ let new_uri = snap.anchored_path(&dst);
+ let mut rename_file =
+ lsp_types::RenameFile { old_uri, new_uri, options: None, annotation_id: None };
+ if snap.analysis.is_library_file(src).ok() == Some(true)
+ && snap.config.change_annotation_support()
+ {
+ rename_file.annotation_id = Some(outside_workspace_annotation_id())
+ }
+ ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(lsp_types::ResourceOp::Rename(
+ rename_file,
+ )))
+ }
+ FileSystemEdit::MoveDir { src, src_id, dst } => {
+ let old_uri = snap.anchored_path(&src);
+ let new_uri = snap.anchored_path(&dst);
+ let mut rename_file =
+ lsp_types::RenameFile { old_uri, new_uri, options: None, annotation_id: None };
+ if snap.analysis.is_library_file(src_id).ok() == Some(true)
+ && snap.config.change_annotation_support()
+ {
+ rename_file.annotation_id = Some(outside_workspace_annotation_id())
+ }
+ ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(lsp_types::ResourceOp::Rename(
+ rename_file,
+ )))
+ }
+ }
+ Ok(ops)
+}
+
+pub(crate) fn snippet_workspace_edit(
+ snap: &GlobalStateSnapshot,
+ source_change: SourceChange,
+) -> Result<lsp_ext::SnippetWorkspaceEdit> {
+ let mut document_changes: Vec<lsp_ext::SnippetDocumentChangeOperation> = Vec::new();
+
+ for op in source_change.file_system_edits {
+ let ops = snippet_text_document_ops(snap, op)?;
+ document_changes.extend_from_slice(&ops);
+ }
+ for (file_id, edit) in source_change.source_file_edits {
+ let edit = snippet_text_document_edit(snap, source_change.is_snippet, file_id, edit)?;
+ document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit));
+ }
+ let mut workspace_edit = lsp_ext::SnippetWorkspaceEdit {
+ changes: None,
+ document_changes: Some(document_changes),
+ change_annotations: None,
+ };
+ if snap.config.change_annotation_support() {
+ workspace_edit.change_annotations = Some(
+ once((
+ outside_workspace_annotation_id(),
+ lsp_types::ChangeAnnotation {
+ label: String::from("Edit outside of the workspace"),
+ needs_confirmation: Some(true),
+ description: Some(String::from(
+ "This edit lies outside of the workspace and may affect dependencies",
+ )),
+ },
+ ))
+ .collect(),
+ )
+ }
+ Ok(workspace_edit)
+}
+
+pub(crate) fn workspace_edit(
+ snap: &GlobalStateSnapshot,
+ source_change: SourceChange,
+) -> Result<lsp_types::WorkspaceEdit> {
+ assert!(!source_change.is_snippet);
+ snippet_workspace_edit(snap, source_change).map(|it| it.into())
+}
+
+impl From<lsp_ext::SnippetWorkspaceEdit> for lsp_types::WorkspaceEdit {
+ fn from(snippet_workspace_edit: lsp_ext::SnippetWorkspaceEdit) -> lsp_types::WorkspaceEdit {
+ lsp_types::WorkspaceEdit {
+ changes: None,
+ document_changes: snippet_workspace_edit.document_changes.map(|changes| {
+ lsp_types::DocumentChanges::Operations(
+ changes
+ .into_iter()
+ .map(|change| match change {
+ lsp_ext::SnippetDocumentChangeOperation::Op(op) => {
+ lsp_types::DocumentChangeOperation::Op(op)
+ }
+ lsp_ext::SnippetDocumentChangeOperation::Edit(edit) => {
+ lsp_types::DocumentChangeOperation::Edit(
+ lsp_types::TextDocumentEdit {
+ text_document: edit.text_document,
+ edits: edit.edits.into_iter().map(From::from).collect(),
+ },
+ )
+ }
+ })
+ .collect(),
+ )
+ }),
+ change_annotations: snippet_workspace_edit.change_annotations,
+ }
+ }
+}
+
+impl From<lsp_ext::SnippetTextEdit>
+ for lsp_types::OneOf<lsp_types::TextEdit, lsp_types::AnnotatedTextEdit>
+{
+ fn from(
+ lsp_ext::SnippetTextEdit { annotation_id, insert_text_format:_, new_text, range }: lsp_ext::SnippetTextEdit,
+ ) -> Self {
+ match annotation_id {
+ Some(annotation_id) => lsp_types::OneOf::Right(lsp_types::AnnotatedTextEdit {
+ text_edit: lsp_types::TextEdit { range, new_text },
+ annotation_id,
+ }),
+ None => lsp_types::OneOf::Left(lsp_types::TextEdit { range, new_text }),
+ }
+ }
+}
+
+pub(crate) fn call_hierarchy_item(
+ snap: &GlobalStateSnapshot,
+ target: NavigationTarget,
+) -> Result<lsp_types::CallHierarchyItem> {
+ let name = target.name.to_string();
+ let detail = target.description.clone();
+ let kind = target.kind.map(symbol_kind).unwrap_or(lsp_types::SymbolKind::FUNCTION);
+ let (uri, range, selection_range) = location_info(snap, target)?;
+ Ok(lsp_types::CallHierarchyItem {
+ name,
+ kind,
+ tags: None,
+ detail,
+ uri,
+ range,
+ selection_range,
+ data: None,
+ })
+}
+
+pub(crate) fn code_action_kind(kind: AssistKind) -> lsp_types::CodeActionKind {
+ match kind {
+ AssistKind::None | AssistKind::Generate => lsp_types::CodeActionKind::EMPTY,
+ AssistKind::QuickFix => lsp_types::CodeActionKind::QUICKFIX,
+ AssistKind::Refactor => lsp_types::CodeActionKind::REFACTOR,
+ AssistKind::RefactorExtract => lsp_types::CodeActionKind::REFACTOR_EXTRACT,
+ AssistKind::RefactorInline => lsp_types::CodeActionKind::REFACTOR_INLINE,
+ AssistKind::RefactorRewrite => lsp_types::CodeActionKind::REFACTOR_REWRITE,
+ }
+}
+
+pub(crate) fn code_action(
+ snap: &GlobalStateSnapshot,
+ assist: Assist,
+ resolve_data: Option<(usize, lsp_types::CodeActionParams)>,
+) -> Result<lsp_ext::CodeAction> {
+ let mut res = lsp_ext::CodeAction {
+ title: assist.label.to_string(),
+ group: assist.group.filter(|_| snap.config.code_action_group()).map(|gr| gr.0),
+ kind: Some(code_action_kind(assist.id.1)),
+ edit: None,
+ is_preferred: None,
+ data: None,
+ command: None,
+ };
+
+ if assist.trigger_signature_help && snap.config.client_commands().trigger_parameter_hints {
+ res.command = Some(command::trigger_parameter_hints());
+ }
+
+ match (assist.source_change, resolve_data) {
+ (Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?),
+ (None, Some((index, code_action_params))) => {
+ res.data = Some(lsp_ext::CodeActionData {
+ id: format!("{}:{}:{}", assist.id.0, assist.id.1.name(), index),
+ code_action_params,
+ });
+ }
+ (None, None) => {
+ stdx::never!("assist should always be resolved if client can't do lazy resolving")
+ }
+ };
+ Ok(res)
+}
+
+pub(crate) fn runnable(
+ snap: &GlobalStateSnapshot,
+ runnable: Runnable,
+) -> Result<lsp_ext::Runnable> {
+ let config = snap.config.runnables();
+ let spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id)?;
+ let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone());
+ let target = spec.as_ref().map(|s| s.target.clone());
+ let (cargo_args, executable_args) =
+ CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg)?;
+ let label = runnable.label(target);
+ let location = location_link(snap, None, runnable.nav)?;
+
+ Ok(lsp_ext::Runnable {
+ label,
+ location: Some(location),
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::CargoRunnable {
+ workspace_root: workspace_root.map(|it| it.into()),
+ override_cargo: config.override_cargo,
+ cargo_args,
+ cargo_extra_args: config.cargo_extra_args,
+ executable_args,
+ expect_test: None,
+ },
+ })
+}
+
+pub(crate) fn code_lens(
+ acc: &mut Vec<lsp_types::CodeLens>,
+ snap: &GlobalStateSnapshot,
+ annotation: Annotation,
+) -> Result<()> {
+ let client_commands_config = snap.config.client_commands();
+ match annotation.kind {
+ AnnotationKind::Runnable(run) => {
+ let line_index = snap.file_line_index(run.nav.file_id)?;
+ let annotation_range = range(&line_index, annotation.range);
+
+ let title = run.title();
+ let can_debug = match run.kind {
+ ide::RunnableKind::DocTest { .. } => false,
+ ide::RunnableKind::TestMod { .. }
+ | ide::RunnableKind::Test { .. }
+ | ide::RunnableKind::Bench { .. }
+ | ide::RunnableKind::Bin => true,
+ };
+ let r = runnable(snap, run)?;
+
+ let lens_config = snap.config.lens();
+ if lens_config.run && client_commands_config.run_single {
+ let command = command::run_single(&r, &title);
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command: Some(command),
+ data: None,
+ })
+ }
+ if lens_config.debug && can_debug && client_commands_config.debug_single {
+ let command = command::debug_single(&r);
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command: Some(command),
+ data: None,
+ })
+ }
+ }
+ AnnotationKind::HasImpls { file_id, data } => {
+ if !client_commands_config.show_reference {
+ return Ok(());
+ }
+ let line_index = snap.file_line_index(file_id)?;
+ let annotation_range = range(&line_index, annotation.range);
+ let url = url(snap, file_id);
+
+ let id = lsp_types::TextDocumentIdentifier { uri: url.clone() };
+
+ let doc_pos = lsp_types::TextDocumentPositionParams::new(id, annotation_range.start);
+
+ let goto_params = lsp_types::request::GotoImplementationParams {
+ text_document_position_params: doc_pos,
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ };
+
+ let command = data.map(|ranges| {
+ let locations: Vec<lsp_types::Location> = ranges
+ .into_iter()
+ .filter_map(|target| {
+ location(
+ snap,
+ FileRange { file_id: target.file_id, range: target.full_range },
+ )
+ .ok()
+ })
+ .collect();
+
+ command::show_references(
+ implementation_title(locations.len()),
+ &url,
+ annotation_range.start,
+ locations,
+ )
+ });
+
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command,
+ data: Some(to_value(lsp_ext::CodeLensResolveData::Impls(goto_params)).unwrap()),
+ })
+ }
+ AnnotationKind::HasReferences { file_id, data } => {
+ if !client_commands_config.show_reference {
+ return Ok(());
+ }
+ let line_index = snap.file_line_index(file_id)?;
+ let annotation_range = range(&line_index, annotation.range);
+ let url = url(snap, file_id);
+
+ let id = lsp_types::TextDocumentIdentifier { uri: url.clone() };
+
+ let doc_pos = lsp_types::TextDocumentPositionParams::new(id, annotation_range.start);
+
+ let command = data.map(|ranges| {
+ let locations: Vec<lsp_types::Location> =
+ ranges.into_iter().filter_map(|range| location(snap, range).ok()).collect();
+
+ command::show_references(
+ reference_title(locations.len()),
+ &url,
+ annotation_range.start,
+ locations,
+ )
+ });
+
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command,
+ data: Some(to_value(lsp_ext::CodeLensResolveData::References(doc_pos)).unwrap()),
+ })
+ }
+ }
+ Ok(())
+}
+
+pub(crate) mod command {
+ use ide::{FileRange, NavigationTarget};
+ use serde_json::to_value;
+
+ use crate::{
+ global_state::GlobalStateSnapshot,
+ lsp_ext,
+ to_proto::{location, location_link},
+ };
+
+ pub(crate) fn show_references(
+ title: String,
+ uri: &lsp_types::Url,
+ position: lsp_types::Position,
+ locations: Vec<lsp_types::Location>,
+ ) -> lsp_types::Command {
+ // We cannot use the 'editor.action.showReferences' command directly
+ // because that command requires vscode types which we convert in the handler
+ // on the client side.
+
+ lsp_types::Command {
+ title,
+ command: "rust-analyzer.showReferences".into(),
+ arguments: Some(vec![
+ to_value(uri).unwrap(),
+ to_value(position).unwrap(),
+ to_value(locations).unwrap(),
+ ]),
+ }
+ }
+
+ pub(crate) fn run_single(runnable: &lsp_ext::Runnable, title: &str) -> lsp_types::Command {
+ lsp_types::Command {
+ title: title.to_string(),
+ command: "rust-analyzer.runSingle".into(),
+ arguments: Some(vec![to_value(runnable).unwrap()]),
+ }
+ }
+
+ pub(crate) fn debug_single(runnable: &lsp_ext::Runnable) -> lsp_types::Command {
+ lsp_types::Command {
+ title: "Debug".into(),
+ command: "rust-analyzer.debugSingle".into(),
+ arguments: Some(vec![to_value(runnable).unwrap()]),
+ }
+ }
+
+ pub(crate) fn goto_location(
+ snap: &GlobalStateSnapshot,
+ nav: &NavigationTarget,
+ ) -> Option<lsp_types::Command> {
+ let value = if snap.config.location_link() {
+ let link = location_link(snap, None, nav.clone()).ok()?;
+ to_value(link).ok()?
+ } else {
+ let range = FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() };
+ let location = location(snap, range).ok()?;
+ to_value(location).ok()?
+ };
+
+ Some(lsp_types::Command {
+ title: nav.name.to_string(),
+ command: "rust-analyzer.gotoLocation".into(),
+ arguments: Some(vec![value]),
+ })
+ }
+
+ pub(crate) fn trigger_parameter_hints() -> lsp_types::Command {
+ lsp_types::Command {
+ title: "triggerParameterHints".into(),
+ command: "editor.action.triggerParameterHints".into(),
+ arguments: None,
+ }
+ }
+}
+
+pub(crate) fn implementation_title(count: usize) -> String {
+ if count == 1 {
+ "1 implementation".into()
+ } else {
+ format!("{} implementations", count)
+ }
+}
+
+pub(crate) fn reference_title(count: usize) -> String {
+ if count == 1 {
+ "1 reference".into()
+ } else {
+ format!("{} references", count)
+ }
+}
+
+pub(crate) fn markup_content(
+ markup: Markup,
+ kind: ide::HoverDocFormat,
+) -> lsp_types::MarkupContent {
+ let kind = match kind {
+ ide::HoverDocFormat::Markdown => lsp_types::MarkupKind::Markdown,
+ ide::HoverDocFormat::PlainText => lsp_types::MarkupKind::PlainText,
+ };
+ let value = crate::markdown::format_docs(markup.as_str());
+ lsp_types::MarkupContent { kind, value }
+}
+
+pub(crate) fn rename_error(err: RenameError) -> crate::LspError {
+ // This is wrong, but we don't have a better alternative I suppose?
+ // https://github.com/microsoft/language-server-protocol/issues/1341
+ invalid_params_error(err.to_string())
+}
+
+#[cfg(test)]
+mod tests {
+ use std::sync::Arc;
+
+ use ide::Analysis;
+
+ use super::*;
+
+ #[test]
+ fn conv_fold_line_folding_only_fixup() {
+ let text = r#"mod a;
+mod b;
+mod c;
+
+fn main() {
+ if cond {
+ a::do_a();
+ } else {
+ b::do_b();
+ }
+}"#;
+
+ let (analysis, file_id) = Analysis::from_single_file(text.to_string());
+ let folds = analysis.folding_ranges(file_id).unwrap();
+ assert_eq!(folds.len(), 4);
+
+ let line_index = LineIndex {
+ index: Arc::new(ide::LineIndex::new(text)),
+ endings: LineEndings::Unix,
+ encoding: OffsetEncoding::Utf16,
+ };
+ let converted: Vec<lsp_types::FoldingRange> =
+ folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect();
+
+ let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)];
+ assert_eq!(converted.len(), expected_lines.len());
+ for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) {
+ assert_eq!(folding_range.start_line, *start_line);
+ assert_eq!(folding_range.start_character, None);
+ assert_eq!(folding_range.end_line, *end_line);
+ assert_eq!(folding_range.end_character, None);
+ }
+ }
+
+ // `Url` is not able to parse windows paths on unix machines.
+ #[test]
+ #[cfg(target_os = "windows")]
+ fn test_lowercase_drive_letter() {
+ use std::{convert::TryInto, path::Path};
+
+ let url = url_from_abs_path(Path::new("C:\\Test").try_into().unwrap());
+ assert_eq!(url.to_string(), "file:///c:/Test");
+
+ let url = url_from_abs_path(Path::new(r#"\\localhost\C$\my_dir"#).try_into().unwrap());
+ assert_eq!(url.to_string(), "file://localhost/C$/my_dir");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/version.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/version.rs
new file mode 100644
index 000000000..1e829299e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/version.rs
@@ -0,0 +1,57 @@
+//! Code for representing rust-analyzer's release version number.
+
+use std::fmt;
+
+/// Information about the git repository where rust-analyzer was built from.
+pub struct CommitInfo {
+ pub short_commit_hash: &'static str,
+ pub commit_hash: &'static str,
+ pub commit_date: &'static str,
+}
+
+/// Cargo's version.
+pub struct VersionInfo {
+ /// rust-analyzer's version, such as "1.57.0", "1.58.0-beta.1", "1.59.0-nightly", etc.
+ pub version: &'static str,
+ /// The release channel we were built for (stable/beta/nightly/dev).
+ ///
+ /// `None` if not built via rustbuild.
+ pub release_channel: Option<&'static str>,
+ /// Information about the Git repository we may have been built from.
+ ///
+ /// `None` if not built from a git repo.
+ pub commit_info: Option<CommitInfo>,
+}
+
+impl fmt::Display for VersionInfo {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.version)?;
+
+ if let Some(ci) = &self.commit_info {
+ write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
+ };
+ Ok(())
+ }
+}
+
+/// Returns information about cargo's version.
+pub const fn version() -> VersionInfo {
+ let version = match option_env!("CFG_RELEASE") {
+ Some(x) => x,
+ None => "0.0.0",
+ };
+
+ let release_channel = option_env!("CFG_RELEASE_CHANNEL");
+ let commit_info = match (
+ option_env!("RA_COMMIT_SHORT_HASH"),
+ option_env!("RA_COMMIT_HASH"),
+ option_env!("RA_COMMIT_DATE"),
+ ) {
+ (Some(short_commit_hash), Some(commit_hash), Some(commit_date)) => {
+ Some(CommitInfo { short_commit_hash, commit_hash, commit_date })
+ }
+ _ => None,
+ };
+
+ VersionInfo { version, release_channel, commit_info }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
new file mode 100644
index 000000000..4cc46af1b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -0,0 +1,1099 @@
+//! The most high-level integrated tests for rust-analyzer.
+//!
+//! This tests run a full LSP event loop, spawn cargo and process stdlib from
+//! sysroot. For this reason, the tests here are very slow, and should be
+//! avoided unless absolutely necessary.
+//!
+//! In particular, it's fine *not* to test that client & server agree on
+//! specific JSON shapes here -- there's little value in such tests, as we can't
+//! be sure without a real client anyway.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[cfg(not(feature = "in-rust-tree"))]
+mod sourcegen;
+mod support;
+mod testdir;
+mod tidy;
+
+use std::{collections::HashMap, path::PathBuf, time::Instant};
+
+use expect_test::expect;
+use lsp_types::{
+ notification::DidOpenTextDocument,
+ request::{
+ CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest,
+ WillRenameFiles, WorkspaceSymbol,
+ },
+ CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
+ DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams,
+ PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem,
+ TextDocumentPositionParams, WorkDoneProgressParams,
+};
+use rust_analyzer::lsp_ext::{OnEnter, Runnables, RunnablesParams};
+use serde_json::json;
+use test_utils::skip_slow_tests;
+
+use crate::{
+ support::{project, Project},
+ testdir::TestDir,
+};
+
+const PROFILE: &str = "";
+// const PROFILE: &'static str = "*@3>100";
+
+#[test]
+fn completes_items_from_standard_library() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+use std::collections::Spam;
+"#,
+ )
+ .with_config(serde_json::json!({
+ "cargo": { "noSysroot": false }
+ }))
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ let res = server.send_request::<Completion>(CompletionParams {
+ text_document_position: TextDocumentPositionParams::new(
+ server.doc_id("src/lib.rs"),
+ Position::new(0, 23),
+ ),
+ context: None,
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ });
+ assert!(res.to_string().contains("HashMap"));
+}
+
+#[test]
+fn test_runnables_project() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r#"
+//- /foo/Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /foo/src/lib.rs
+pub fn foo() {}
+
+//- /foo/tests/spam.rs
+#[test]
+fn test_eggs() {}
+
+//- /bar/Cargo.toml
+[package]
+name = "bar"
+version = "0.0.0"
+
+//- /bar/src/main.rs
+fn main() {}
+"#,
+ )
+ .root("foo")
+ .root("bar")
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ server.request::<Runnables>(
+ RunnablesParams { text_document: server.doc_id("foo/tests/spam.rs"), position: None },
+ json!([
+ {
+ "args": {
+ "cargoArgs": ["test", "--package", "foo", "--test", "spam"],
+ "executableArgs": ["test_eggs", "--exact", "--nocapture"],
+ "cargoExtraArgs": [],
+ "overrideCargo": null,
+ "workspaceRoot": server.path().join("foo")
+ },
+ "kind": "cargo",
+ "label": "test test_eggs",
+ "location": {
+ "targetRange": {
+ "end": { "character": 17, "line": 1 },
+ "start": { "character": 0, "line": 0 }
+ },
+ "targetSelectionRange": {
+ "end": { "character": 12, "line": 1 },
+ "start": { "character": 3, "line": 1 }
+ },
+ "targetUri": "file:///[..]/tests/spam.rs"
+ }
+ },
+ {
+ "args": {
+ "overrideCargo": null,
+ "workspaceRoot": server.path().join("foo"),
+ "cargoArgs": [
+ "test",
+ "--package",
+ "foo",
+ "--test",
+ "spam"
+ ],
+ "cargoExtraArgs": [],
+ "executableArgs": [
+ "",
+ "--nocapture"
+ ]
+ },
+ "kind": "cargo",
+ "label": "test-mod ",
+ "location": {
+ "targetUri": "file:///[..]/tests/spam.rs",
+ "targetRange": {
+ "start": {
+ "line": 0,
+ "character": 0
+ },
+ "end": {
+ "line": 3,
+ "character": 0
+ }
+ },
+ "targetSelectionRange": {
+ "start": {
+ "line": 0,
+ "character": 0
+ },
+ "end": {
+ "line": 3,
+ "character": 0
+ }
+ }
+ },
+ },
+ {
+ "args": {
+ "cargoArgs": ["check", "--package", "foo", "--all-targets"],
+ "executableArgs": [],
+ "cargoExtraArgs": [],
+ "overrideCargo": null,
+ "workspaceRoot": server.path().join("foo")
+ },
+ "kind": "cargo",
+ "label": "cargo check -p foo --all-targets"
+ },
+ {
+ "args": {
+ "cargoArgs": ["test", "--package", "foo", "--all-targets"],
+ "executableArgs": [],
+ "cargoExtraArgs": [],
+ "overrideCargo": null,
+ "workspaceRoot": server.path().join("foo")
+ },
+ "kind": "cargo",
+ "label": "cargo test -p foo --all-targets"
+ }
+ ]),
+ );
+}
+
+// Each package in these workspaces should be run from its own root
+#[test]
+fn test_path_dependency_runnables() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r#"
+//- /consumer/Cargo.toml
+[package]
+name = "consumer"
+version = "0.1.0"
+[dependencies]
+dependency = { path = "../dependency" }
+
+//- /consumer/src/lib.rs
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn consumer() {}
+}
+
+//- /dependency/Cargo.toml
+[package]
+name = "dependency"
+version = "0.1.0"
+[dev-dependencies]
+devdependency = { path = "../devdependency" }
+
+//- /dependency/src/lib.rs
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn dependency() {}
+}
+
+//- /devdependency/Cargo.toml
+[package]
+name = "devdependency"
+version = "0.1.0"
+
+//- /devdependency/src/lib.rs
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn devdependency() {}
+}
+ "#,
+ )
+ .root("consumer")
+ .root("dependency")
+ .root("devdependency")
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ for runnable in ["consumer", "dependency", "devdependency"] {
+ server.request::<Runnables>(
+ RunnablesParams {
+ text_document: server.doc_id(&format!("{}/src/lib.rs", runnable)),
+ position: None,
+ },
+ json!([
+ "{...}",
+ {
+ "label": "cargo test -p [..] --all-targets",
+ "kind": "cargo",
+ "args": {
+ "overrideCargo": null,
+ "workspaceRoot": server.path().join(runnable),
+ "cargoArgs": [
+ "test",
+ "--package",
+ runnable,
+ "--all-targets"
+ ],
+ "cargoExtraArgs": [],
+ "executableArgs": []
+ },
+ },
+ "{...}",
+ "{...}"
+ ]),
+ );
+ }
+}
+
+#[test]
+fn test_format_document() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = project(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+mod bar;
+
+fn main() {
+}
+
+pub use std::collections::HashMap;
+"#,
+ )
+ .wait_until_workspace_is_loaded();
+
+ server.request::<Formatting>(
+ DocumentFormattingParams {
+ text_document: server.doc_id("src/lib.rs"),
+ options: FormattingOptions {
+ tab_size: 4,
+ insert_spaces: false,
+ insert_final_newline: None,
+ trim_final_newlines: None,
+ trim_trailing_whitespace: None,
+ properties: HashMap::new(),
+ },
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([
+ {
+ "newText": "",
+ "range": {
+ "end": { "character": 0, "line": 3 },
+ "start": { "character": 11, "line": 2 }
+ }
+ }
+ ]),
+ );
+}
+
+#[test]
+fn test_format_document_2018() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = project(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+edition = "2018"
+
+//- /src/lib.rs
+mod bar;
+
+async fn test() {
+}
+
+fn main() {
+}
+
+pub use std::collections::HashMap;
+"#,
+ )
+ .wait_until_workspace_is_loaded();
+
+ server.request::<Formatting>(
+ DocumentFormattingParams {
+ text_document: server.doc_id("src/lib.rs"),
+ options: FormattingOptions {
+ tab_size: 4,
+ insert_spaces: false,
+ properties: HashMap::new(),
+ insert_final_newline: None,
+ trim_final_newlines: None,
+ trim_trailing_whitespace: None,
+ },
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([
+ {
+ "newText": "",
+ "range": {
+ "end": { "character": 0, "line": 3 },
+ "start": { "character": 17, "line": 2 }
+ }
+ },
+ {
+ "newText": "",
+ "range": {
+ "end": { "character": 0, "line": 6 },
+ "start": { "character": 11, "line": 5 }
+ }
+ }
+ ]),
+ );
+}
+
+#[test]
+fn test_format_document_unchanged() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = project(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+fn main() {}
+"#,
+ )
+ .wait_until_workspace_is_loaded();
+
+ server.request::<Formatting>(
+ DocumentFormattingParams {
+ text_document: server.doc_id("src/lib.rs"),
+ options: FormattingOptions {
+ tab_size: 4,
+ insert_spaces: false,
+ insert_final_newline: None,
+ trim_final_newlines: None,
+ trim_trailing_whitespace: None,
+ properties: HashMap::new(),
+ },
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!(null),
+ );
+}
+
+#[test]
+fn test_missing_module_code_action() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = project(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+mod bar;
+
+fn main() {}
+"#,
+ )
+ .wait_until_workspace_is_loaded();
+
+ server.request::<CodeActionRequest>(
+ CodeActionParams {
+ text_document: server.doc_id("src/lib.rs"),
+ range: Range::new(Position::new(0, 4), Position::new(0, 7)),
+ context: CodeActionContext::default(),
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([
+ {
+ "title": "Create module at `bar.rs`",
+ "kind": "quickfix",
+ "edit": {
+ "documentChanges": [
+ {
+ "kind": "create",
+ "uri": "file://[..]/src/bar.rs"
+ }
+ ]
+ }
+ },
+ {
+ "title": "Create module at `bar/mod.rs`",
+ "kind": "quickfix",
+ "edit": {
+ "documentChanges": [
+ {
+ "kind": "create",
+ "uri": "file://[..]src/bar/mod.rs"
+ }
+ ]
+ }
+ }
+ ]),
+ );
+
+ server.request::<CodeActionRequest>(
+ CodeActionParams {
+ text_document: server.doc_id("src/lib.rs"),
+ range: Range::new(Position::new(2, 8), Position::new(2, 8)),
+ context: CodeActionContext::default(),
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([]),
+ );
+}
+
+#[test]
+fn test_missing_module_code_action_in_json_project() {
+ if skip_slow_tests() {
+ // return;
+ }
+
+ let tmp_dir = TestDir::new();
+
+ let path = tmp_dir.path();
+
+ let project = json!({
+ "roots": [path],
+ "crates": [ {
+ "root_module": path.join("src/lib.rs"),
+ "deps": [],
+ "edition": "2015",
+ "cfg": [ "cfg_atom_1", "feature=\"cfg_1\""],
+ } ]
+ });
+
+ let code = format!(
+ r#"
+//- /rust-project.json
+{PROJECT}
+
+//- /src/lib.rs
+mod bar;
+
+fn main() {{}}
+"#,
+ PROJECT = project,
+ );
+
+ let server =
+ Project::with_fixture(&code).tmp_dir(tmp_dir).server().wait_until_workspace_is_loaded();
+
+ server.request::<CodeActionRequest>(
+ CodeActionParams {
+ text_document: server.doc_id("src/lib.rs"),
+ range: Range::new(Position::new(0, 4), Position::new(0, 7)),
+ context: CodeActionContext::default(),
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([
+ {
+ "title": "Create module at `bar.rs`",
+ "kind": "quickfix",
+ "edit": {
+ "documentChanges": [
+ {
+ "kind": "create",
+ "uri": "file://[..]/src/bar.rs"
+ }
+ ]
+ }
+ },
+ {
+ "title": "Create module at `bar/mod.rs`",
+ "kind": "quickfix",
+ "edit": {
+ "documentChanges": [
+ {
+ "kind": "create",
+ "uri": "file://[..]src/bar/mod.rs"
+ }
+ ]
+ }
+ }
+ ]),
+ );
+
+ server.request::<CodeActionRequest>(
+ CodeActionParams {
+ text_document: server.doc_id("src/lib.rs"),
+ range: Range::new(Position::new(2, 8), Position::new(2, 8)),
+ context: CodeActionContext::default(),
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([]),
+ );
+}
+
+#[test]
+fn diagnostics_dont_block_typing() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let librs: String = (0..10).map(|i| format!("mod m{};", i)).collect();
+ let libs: String = (0..10).map(|i| format!("//- /src/m{}.rs\nfn foo() {{}}\n\n", i)).collect();
+ let server = Project::with_fixture(&format!(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+{}
+
+{}
+
+fn main() {{}}
+"#,
+ librs, libs
+ ))
+ .with_config(serde_json::json!({
+ "cargo": { "noSysroot": false }
+ }))
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ for i in 0..10 {
+ server.notification::<DidOpenTextDocument>(DidOpenTextDocumentParams {
+ text_document: TextDocumentItem {
+ uri: server.doc_id(&format!("src/m{}.rs", i)).uri,
+ language_id: "rust".to_string(),
+ version: 0,
+ text: "/// Docs\nfn foo() {}".to_string(),
+ },
+ });
+ }
+ let start = Instant::now();
+ server.request::<OnEnter>(
+ TextDocumentPositionParams {
+ text_document: server.doc_id("src/m0.rs"),
+ position: Position { line: 0, character: 5 },
+ },
+ json!([{
+ "insertTextFormat": 2,
+ "newText": "\n/// $0",
+ "range": {
+ "end": { "character": 5, "line": 0 },
+ "start": { "character": 5, "line": 0 }
+ }
+ }]),
+ );
+ let elapsed = start.elapsed();
+ assert!(elapsed.as_millis() < 2000, "typing enter took {:?}", elapsed);
+}
+
+#[test]
+fn preserves_dos_line_endings() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ "
+//- /Cargo.toml
+[package]
+name = \"foo\"
+version = \"0.0.0\"
+
+//- /src/main.rs
+/// Some Docs\r\nfn main() {}
+",
+ )
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ server.request::<OnEnter>(
+ TextDocumentPositionParams {
+ text_document: server.doc_id("src/main.rs"),
+ position: Position { line: 0, character: 8 },
+ },
+ json!([{
+ "insertTextFormat": 2,
+ "newText": "\r\n/// $0",
+ "range": {
+ "end": { "line": 0, "character": 8 },
+ "start": { "line": 0, "character": 8 }
+ }
+ }]),
+ );
+}
+
+#[test]
+fn out_dirs_check() {
+ if skip_slow_tests() {
+ // return;
+ }
+
+ let server = Project::with_fixture(
+ r###"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /build.rs
+use std::{env, fs, path::Path};
+
+fn main() {
+ let out_dir = env::var_os("OUT_DIR").unwrap();
+ let dest_path = Path::new(&out_dir).join("hello.rs");
+ fs::write(
+ &dest_path,
+ r#"pub fn message() -> &'static str { "Hello, World!" }"#,
+ )
+ .unwrap();
+ println!("cargo:rustc-cfg=atom_cfg");
+ println!("cargo:rustc-cfg=featlike=\"set\"");
+ println!("cargo:rerun-if-changed=build.rs");
+}
+//- /src/main.rs
+#[rustc_builtin_macro] macro_rules! include {}
+#[rustc_builtin_macro] macro_rules! include_str {}
+#[rustc_builtin_macro] macro_rules! concat {}
+#[rustc_builtin_macro] macro_rules! env {}
+
+include!(concat!(env!("OUT_DIR"), "/hello.rs"));
+
+#[cfg(atom_cfg)]
+struct A;
+#[cfg(bad_atom_cfg)]
+struct A;
+#[cfg(featlike = "set")]
+struct B;
+#[cfg(featlike = "not_set")]
+struct B;
+
+fn main() {
+ let va = A;
+ let vb = B;
+ let should_be_str = message();
+ let another_str = include_str!("main.rs");
+}
+"###,
+ )
+ .with_config(serde_json::json!({
+ "cargo": {
+ "buildScripts": {
+ "enable": true
+ },
+ "noSysroot": true,
+ }
+ }))
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ let res = server.send_request::<HoverRequest>(HoverParams {
+ text_document_position_params: TextDocumentPositionParams::new(
+ server.doc_id("src/main.rs"),
+ Position::new(19, 10),
+ ),
+ work_done_progress_params: Default::default(),
+ });
+ assert!(res.to_string().contains("&str"));
+
+ let res = server.send_request::<HoverRequest>(HoverParams {
+ text_document_position_params: TextDocumentPositionParams::new(
+ server.doc_id("src/main.rs"),
+ Position::new(20, 10),
+ ),
+ work_done_progress_params: Default::default(),
+ });
+ assert!(res.to_string().contains("&str"));
+
+ server.request::<GotoTypeDefinition>(
+ GotoDefinitionParams {
+ text_document_position_params: TextDocumentPositionParams::new(
+ server.doc_id("src/main.rs"),
+ Position::new(17, 9),
+ ),
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ },
+ json!([{
+ "originSelectionRange": {
+ "end": { "character": 10, "line": 17 },
+ "start": { "character": 8, "line": 17 }
+ },
+ "targetRange": {
+ "end": { "character": 9, "line": 8 },
+ "start": { "character": 0, "line": 7 }
+ },
+ "targetSelectionRange": {
+ "end": { "character": 8, "line": 8 },
+ "start": { "character": 7, "line": 8 }
+ },
+ "targetUri": "file:///[..]src/main.rs"
+ }]),
+ );
+
+ server.request::<GotoTypeDefinition>(
+ GotoDefinitionParams {
+ text_document_position_params: TextDocumentPositionParams::new(
+ server.doc_id("src/main.rs"),
+ Position::new(18, 9),
+ ),
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ },
+ json!([{
+ "originSelectionRange": {
+ "end": { "character": 10, "line": 18 },
+ "start": { "character": 8, "line": 18 }
+ },
+ "targetRange": {
+ "end": { "character": 9, "line": 12 },
+ "start": { "character": 0, "line":11 }
+ },
+ "targetSelectionRange": {
+ "end": { "character": 8, "line": 12 },
+ "start": { "character": 7, "line": 12 }
+ },
+ "targetUri": "file:///[..]src/main.rs"
+ }]),
+ );
+}
+
+#[test]
+fn resolve_proc_macro() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r###"
+//- /foo/Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+edition = "2021"
+[dependencies]
+bar = {path = "../bar"}
+
+//- /foo/src/main.rs
+use bar::Bar;
+
+#[rustc_builtin_macro]
+macro derive($item:item) {}
+trait Bar {
+ fn bar();
+}
+#[derive(Bar)]
+struct Foo {}
+fn main() {
+ Foo::bar();
+}
+
+//- /bar/Cargo.toml
+[package]
+name = "bar"
+version = "0.0.0"
+edition = "2021"
+
+[lib]
+proc-macro = true
+
+//- /bar/src/lib.rs
+extern crate proc_macro;
+use proc_macro::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
+macro_rules! t {
+ ($n:literal) => {
+ TokenTree::from(Ident::new($n, Span::call_site()))
+ };
+ ({}) => {
+ TokenTree::from(Group::new(Delimiter::Brace, TokenStream::new()))
+ };
+ (()) => {
+ TokenTree::from(Group::new(Delimiter::Parenthesis, TokenStream::new()))
+ };
+}
+#[proc_macro_derive(Bar)]
+pub fn foo(_input: TokenStream) -> TokenStream {
+ // We hard code the output here for preventing to use any deps
+ let mut res = TokenStream::new();
+
+ // ill behaved proc-macro will use the stdout
+ // we should ignore it
+ println!("I am bad guy");
+
+ // impl Bar for Foo { fn bar() {} }
+ let mut tokens = vec![t!("impl"), t!("Bar"), t!("for"), t!("Foo")];
+ let mut fn_stream = TokenStream::new();
+ fn_stream.extend(vec![t!("fn"), t!("bar"), t!(()), t!({})]);
+ tokens.push(Group::new(Delimiter::Brace, fn_stream).into());
+ res.extend(tokens);
+ res
+}
+
+"###,
+ )
+ .with_config(serde_json::json!({
+ "cargo": {
+ "buildScripts": {
+ "enable": true
+ },
+ "noSysroot": true,
+ },
+ "procMacro": {
+ "enable": true,
+ "server": PathBuf::from(env!("CARGO_BIN_EXE_rust-analyzer")),
+ }
+ }))
+ .root("foo")
+ .root("bar")
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ let res = server.send_request::<HoverRequest>(HoverParams {
+ text_document_position_params: TextDocumentPositionParams::new(
+ server.doc_id("foo/src/main.rs"),
+ Position::new(10, 9),
+ ),
+ work_done_progress_params: Default::default(),
+ });
+ let value = res.get("contents").unwrap().get("value").unwrap().as_str().unwrap();
+
+ expect![[r#"
+
+ ```rust
+ foo::Foo
+ ```
+
+ ```rust
+ fn bar()
+ ```"#]]
+ .assert_eq(value);
+}
+
+#[test]
+fn test_will_rename_files_same_level() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let tmp_dir = TestDir::new();
+ let tmp_dir_path = tmp_dir.path().to_owned();
+ let tmp_dir_str = tmp_dir_path.to_str().unwrap();
+ let base_path = PathBuf::from(format!("file://{}", tmp_dir_str));
+
+ let code = r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+mod old_file;
+mod from_mod;
+mod to_mod;
+mod old_folder;
+fn main() {}
+
+//- /src/old_file.rs
+
+//- /src/old_folder/mod.rs
+
+//- /src/from_mod/mod.rs
+
+//- /src/to_mod/foo.rs
+
+"#;
+ let server =
+ Project::with_fixture(code).tmp_dir(tmp_dir).server().wait_until_workspace_is_loaded();
+
+ //rename same level file
+ server.request::<WillRenameFiles>(
+ RenameFilesParams {
+ files: vec![FileRename {
+ old_uri: base_path.join("src/old_file.rs").to_str().unwrap().to_string(),
+ new_uri: base_path.join("src/new_file.rs").to_str().unwrap().to_string(),
+ }],
+ },
+ json!({
+ "documentChanges": [
+ {
+ "textDocument": {
+ "uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace('\\', "/")),
+ "version": null
+ },
+ "edits": [
+ {
+ "range": {
+ "start": {
+ "line": 0,
+ "character": 4
+ },
+ "end": {
+ "line": 0,
+ "character": 12
+ }
+ },
+ "newText": "new_file"
+ }
+ ]
+ }
+ ]
+ }),
+ );
+
+ //rename file from mod.rs to foo.rs
+ server.request::<WillRenameFiles>(
+ RenameFilesParams {
+ files: vec![FileRename {
+ old_uri: base_path.join("src/from_mod/mod.rs").to_str().unwrap().to_string(),
+ new_uri: base_path.join("src/from_mod/foo.rs").to_str().unwrap().to_string(),
+ }],
+ },
+ json!(null),
+ );
+
+ //rename file from foo.rs to mod.rs
+ server.request::<WillRenameFiles>(
+ RenameFilesParams {
+ files: vec![FileRename {
+ old_uri: base_path.join("src/to_mod/foo.rs").to_str().unwrap().to_string(),
+ new_uri: base_path.join("src/to_mod/mod.rs").to_str().unwrap().to_string(),
+ }],
+ },
+ json!(null),
+ );
+
+ //rename same level file
+ server.request::<WillRenameFiles>(
+ RenameFilesParams {
+ files: vec![FileRename {
+ old_uri: base_path.join("src/old_folder").to_str().unwrap().to_string(),
+ new_uri: base_path.join("src/new_folder").to_str().unwrap().to_string(),
+ }],
+ },
+ json!({
+ "documentChanges": [
+ {
+ "textDocument": {
+ "uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace('\\', "/")),
+ "version": null
+ },
+ "edits": [
+ {
+ "range": {
+ "start": {
+ "line": 3,
+ "character": 4
+ },
+ "end": {
+ "line": 3,
+ "character": 14
+ }
+ },
+ "newText": "new_folder"
+ }
+ ]
+ }
+ ]
+ }),
+ );
+}
+
+#[test]
+fn test_exclude_config_works() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r#"
+//- /foo/Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /foo/src/lib.rs
+pub fn foo() {}
+
+//- /bar/Cargo.toml
+[package]
+name = "bar"
+version = "0.0.0"
+
+//- /bar/src/lib.rs
+pub fn bar() {}
+"#,
+ )
+ .root("foo")
+ .root("bar")
+ .with_config(json!({
+ "files": {
+ "excludeDirs": ["foo", "bar"]
+ }
+ }))
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ server.request::<WorkspaceSymbol>(Default::default(), json!([]));
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/sourcegen.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/sourcegen.rs
new file mode 100644
index 000000000..e6ac018a0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/sourcegen.rs
@@ -0,0 +1,80 @@
+//! Generates `assists.md` documentation.
+
+use std::{fmt, fs, io, path::PathBuf};
+
+#[test]
+fn sourcegen_feature_docs() {
+ let features = Feature::collect().unwrap();
+ let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
+ let contents = format!(
+ "
+// Generated file, do not edit by hand, see `sourcegen_feature_docs`.
+{}
+",
+ contents.trim()
+ );
+ let dst = sourcegen::project_root().join("docs/user/generated_features.adoc");
+ fs::write(&dst, &contents).unwrap();
+}
+
+#[derive(Debug)]
+struct Feature {
+ id: String,
+ location: sourcegen::Location,
+ doc: String,
+}
+
+impl Feature {
+ fn collect() -> io::Result<Vec<Feature>> {
+ let crates_dir = sourcegen::project_root().join("crates");
+
+ let mut res = Vec::new();
+ for path in sourcegen::list_rust_files(&crates_dir) {
+ collect_file(&mut res, path)?;
+ }
+ res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
+ return Ok(res);
+
+ fn collect_file(acc: &mut Vec<Feature>, path: PathBuf) -> io::Result<()> {
+ let text = std::fs::read_to_string(&path)?;
+ let comment_blocks = sourcegen::CommentBlock::extract("Feature", &text);
+
+ for block in comment_blocks {
+ let id = block.id;
+ if let Err(msg) = is_valid_feature_name(&id) {
+ panic!("invalid feature name: {:?}:\n {}", id, msg)
+ }
+ let doc = block.contents.join("\n");
+ let location = sourcegen::Location { file: path.clone(), line: block.line };
+ acc.push(Feature { id, location, doc })
+ }
+
+ Ok(())
+ }
+ }
+}
+
+fn is_valid_feature_name(feature: &str) -> Result<(), String> {
+ 'word: for word in feature.split_whitespace() {
+ for short in ["to", "and"] {
+ if word == short {
+ continue 'word;
+ }
+ }
+ for short in ["To", "And"] {
+ if word == short {
+ return Err(format!("Don't capitalize {:?}", word));
+ }
+ }
+ if !word.starts_with(char::is_uppercase) {
+ return Err(format!("Capitalize {:?}", word));
+ }
+ }
+ Ok(())
+}
+
+impl fmt::Display for Feature {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ writeln!(f, "=== {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
new file mode 100644
index 000000000..4fa88c3c6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -0,0 +1,406 @@
+use std::{
+ cell::{Cell, RefCell},
+ fs,
+ path::{Path, PathBuf},
+ sync::Once,
+ time::Duration,
+};
+
+use crossbeam_channel::{after, select, Receiver};
+use lsp_server::{Connection, Message, Notification, Request};
+use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url};
+use project_model::ProjectManifest;
+use rust_analyzer::{config::Config, lsp_ext, main_loop};
+use serde::Serialize;
+use serde_json::{json, to_string_pretty, Value};
+use test_utils::Fixture;
+use vfs::AbsPathBuf;
+
+use crate::testdir::TestDir;
+
+pub(crate) struct Project<'a> {
+ fixture: &'a str,
+ tmp_dir: Option<TestDir>,
+ roots: Vec<PathBuf>,
+ config: serde_json::Value,
+}
+
+impl<'a> Project<'a> {
+ pub(crate) fn with_fixture(fixture: &str) -> Project<'_> {
+ Project {
+ fixture,
+ tmp_dir: None,
+ roots: vec![],
+ config: serde_json::json!({
+ "cargo": {
+ // Loading standard library is costly, let's ignore it by default
+ "noSysroot": true,
+ // Can't use test binary as rustc wrapper.
+ "buildScripts": {
+ "useRustcWrapper": false
+ },
+ }
+ }),
+ }
+ }
+
+ pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Project<'a> {
+ self.tmp_dir = Some(tmp_dir);
+ self
+ }
+
+ pub(crate) fn root(mut self, path: &str) -> Project<'a> {
+ self.roots.push(path.into());
+ self
+ }
+
+ pub(crate) fn with_config(mut self, config: serde_json::Value) -> Project<'a> {
+ fn merge(dst: &mut serde_json::Value, src: serde_json::Value) {
+ match (dst, src) {
+ (Value::Object(dst), Value::Object(src)) => {
+ for (k, v) in src {
+ merge(dst.entry(k).or_insert(v.clone()), v)
+ }
+ }
+ (dst, src) => *dst = src,
+ }
+ }
+ merge(&mut self.config, config);
+ self
+ }
+
+ pub(crate) fn server(self) -> Server {
+ let tmp_dir = self.tmp_dir.unwrap_or_else(TestDir::new);
+ static INIT: Once = Once::new();
+ INIT.call_once(|| {
+ tracing_subscriber::fmt()
+ .with_test_writer()
+ .with_env_filter(tracing_subscriber::EnvFilter::from_env("RA_LOG"))
+ .init();
+ profile::init_from(crate::PROFILE);
+ });
+
+ let (mini_core, proc_macros, fixtures) = Fixture::parse(self.fixture);
+ assert!(proc_macros.is_empty());
+ assert!(mini_core.is_none());
+ for entry in fixtures {
+ let path = tmp_dir.path().join(&entry.path['/'.len_utf8()..]);
+ fs::create_dir_all(path.parent().unwrap()).unwrap();
+ fs::write(path.as_path(), entry.text.as_bytes()).unwrap();
+ }
+
+ let tmp_dir_path = AbsPathBuf::assert(tmp_dir.path().to_path_buf());
+ let mut roots =
+ self.roots.into_iter().map(|root| tmp_dir_path.join(root)).collect::<Vec<_>>();
+ if roots.is_empty() {
+ roots.push(tmp_dir_path.clone());
+ }
+ let discovered_projects = roots
+ .into_iter()
+ .map(|it| ProjectManifest::discover_single(&it).unwrap())
+ .collect::<Vec<_>>();
+
+ let mut config = Config::new(
+ tmp_dir_path,
+ lsp_types::ClientCapabilities {
+ workspace: Some(lsp_types::WorkspaceClientCapabilities {
+ did_change_watched_files: Some(
+ lsp_types::DidChangeWatchedFilesClientCapabilities {
+ dynamic_registration: Some(true),
+ },
+ ),
+ ..Default::default()
+ }),
+ text_document: Some(lsp_types::TextDocumentClientCapabilities {
+ definition: Some(lsp_types::GotoCapability {
+ link_support: Some(true),
+ ..Default::default()
+ }),
+ code_action: Some(lsp_types::CodeActionClientCapabilities {
+ code_action_literal_support: Some(
+ lsp_types::CodeActionLiteralSupport::default(),
+ ),
+ ..Default::default()
+ }),
+ hover: Some(lsp_types::HoverClientCapabilities {
+ content_format: Some(vec![lsp_types::MarkupKind::Markdown]),
+ ..Default::default()
+ }),
+ ..Default::default()
+ }),
+ window: Some(lsp_types::WindowClientCapabilities {
+ work_done_progress: Some(false),
+ ..Default::default()
+ }),
+ experimental: Some(json!({
+ "serverStatusNotification": true,
+ })),
+ ..Default::default()
+ },
+ );
+ config.discovered_projects = Some(discovered_projects);
+ config.update(self.config).expect("invalid config");
+
+ Server::new(tmp_dir, config)
+ }
+}
+
+pub(crate) fn project(fixture: &str) -> Server {
+ Project::with_fixture(fixture).server()
+}
+
+pub(crate) struct Server {
+ req_id: Cell<i32>,
+ messages: RefCell<Vec<Message>>,
+ _thread: jod_thread::JoinHandle<()>,
+ client: Connection,
+ /// XXX: remove the tempdir last
+ dir: TestDir,
+}
+
+impl Server {
+ fn new(dir: TestDir, config: Config) -> Server {
+ let (connection, client) = Connection::memory();
+
+ let _thread = jod_thread::Builder::new()
+ .name("test server".to_string())
+ .spawn(move || main_loop(config, connection).unwrap())
+ .expect("failed to spawn a thread");
+
+ Server { req_id: Cell::new(1), dir, messages: Default::default(), client, _thread }
+ }
+
+ pub(crate) fn doc_id(&self, rel_path: &str) -> TextDocumentIdentifier {
+ let path = self.dir.path().join(rel_path);
+ TextDocumentIdentifier { uri: Url::from_file_path(path).unwrap() }
+ }
+
+ pub(crate) fn notification<N>(&self, params: N::Params)
+ where
+ N: lsp_types::notification::Notification,
+ N::Params: Serialize,
+ {
+ let r = Notification::new(N::METHOD.to_string(), params);
+ self.send_notification(r)
+ }
+
+ #[track_caller]
+ pub(crate) fn request<R>(&self, params: R::Params, expected_resp: Value)
+ where
+ R: lsp_types::request::Request,
+ R::Params: Serialize,
+ {
+ let actual = self.send_request::<R>(params);
+ if let Some((expected_part, actual_part)) = find_mismatch(&expected_resp, &actual) {
+ panic!(
+ "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n",
+ to_string_pretty(&expected_resp).unwrap(),
+ to_string_pretty(&actual).unwrap(),
+ to_string_pretty(expected_part).unwrap(),
+ to_string_pretty(actual_part).unwrap(),
+ );
+ }
+ }
+
+ pub(crate) fn send_request<R>(&self, params: R::Params) -> Value
+ where
+ R: lsp_types::request::Request,
+ R::Params: Serialize,
+ {
+ let id = self.req_id.get();
+ self.req_id.set(id.wrapping_add(1));
+
+ let r = Request::new(id.into(), R::METHOD.to_string(), params);
+ self.send_request_(r)
+ }
+ fn send_request_(&self, r: Request) -> Value {
+ let id = r.id.clone();
+ self.client.sender.send(r.clone().into()).unwrap();
+ while let Some(msg) = self.recv().unwrap_or_else(|Timeout| panic!("timeout: {:?}", r)) {
+ match msg {
+ Message::Request(req) => {
+ if req.method == "client/registerCapability" {
+ let params = req.params.to_string();
+ if ["workspace/didChangeWatchedFiles", "textDocument/didSave"]
+ .into_iter()
+ .any(|it| params.contains(it))
+ {
+ continue;
+ }
+ }
+ panic!("unexpected request: {:?}", req)
+ }
+ Message::Notification(_) => (),
+ Message::Response(res) => {
+ assert_eq!(res.id, id);
+ if let Some(err) = res.error {
+ panic!("error response: {:#?}", err);
+ }
+ return res.result.unwrap();
+ }
+ }
+ }
+ panic!("no response for {:?}", r);
+ }
+ pub(crate) fn wait_until_workspace_is_loaded(self) -> Server {
+ self.wait_for_message_cond(1, &|msg: &Message| match msg {
+ Message::Notification(n) if n.method == "experimental/serverStatus" => {
+ let status = n
+ .clone()
+ .extract::<lsp_ext::ServerStatusParams>("experimental/serverStatus")
+ .unwrap();
+ status.quiescent
+ }
+ _ => false,
+ })
+ .unwrap_or_else(|Timeout| panic!("timeout while waiting for ws to load"));
+ self
+ }
+ fn wait_for_message_cond(
+ &self,
+ n: usize,
+ cond: &dyn Fn(&Message) -> bool,
+ ) -> Result<(), Timeout> {
+ let mut total = 0;
+ for msg in self.messages.borrow().iter() {
+ if cond(msg) {
+ total += 1
+ }
+ }
+ while total < n {
+ let msg = self.recv()?.expect("no response");
+ if cond(&msg) {
+ total += 1;
+ }
+ }
+ Ok(())
+ }
+ fn recv(&self) -> Result<Option<Message>, Timeout> {
+ let msg = recv_timeout(&self.client.receiver)?;
+ let msg = msg.map(|msg| {
+ self.messages.borrow_mut().push(msg.clone());
+ msg
+ });
+ Ok(msg)
+ }
+ fn send_notification(&self, not: Notification) {
+ self.client.sender.send(Message::Notification(not)).unwrap();
+ }
+
+ pub(crate) fn path(&self) -> &Path {
+ self.dir.path()
+ }
+}
+
+impl Drop for Server {
+ fn drop(&mut self) {
+ self.request::<Shutdown>((), Value::Null);
+ self.notification::<Exit>(());
+ }
+}
+
+struct Timeout;
+
+fn recv_timeout(receiver: &Receiver<Message>) -> Result<Option<Message>, Timeout> {
+ let timeout =
+ if cfg!(target_os = "macos") { Duration::from_secs(300) } else { Duration::from_secs(120) };
+ select! {
+ recv(receiver) -> msg => Ok(msg.ok()),
+ recv(after(timeout)) -> _ => Err(Timeout),
+ }
+}
+
+// Comparison functionality borrowed from cargo:
+
+/// Compares JSON object for approximate equality.
+/// You can use `[..]` wildcard in strings (useful for OS dependent things such
+/// as paths). You can use a `"{...}"` string literal as a wildcard for
+/// arbitrary nested JSON. Arrays are sorted before comparison.
+fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> {
+ match (expected, actual) {
+ (Value::Number(l), Value::Number(r)) if l == r => None,
+ (Value::Bool(l), Value::Bool(r)) if l == r => None,
+ (Value::String(l), Value::String(r)) if lines_match(l, r) => None,
+ (Value::Array(l), Value::Array(r)) => {
+ if l.len() != r.len() {
+ return Some((expected, actual));
+ }
+
+ let mut l = l.iter().collect::<Vec<_>>();
+ let mut r = r.iter().collect::<Vec<_>>();
+
+ l.retain(|l| match r.iter().position(|r| find_mismatch(l, r).is_none()) {
+ Some(i) => {
+ r.remove(i);
+ false
+ }
+ None => true,
+ });
+
+ if !l.is_empty() {
+ assert!(!r.is_empty());
+ Some((l[0], r[0]))
+ } else {
+ assert_eq!(r.len(), 0);
+ None
+ }
+ }
+ (Value::Object(l), Value::Object(r)) => {
+ fn sorted_values(obj: &serde_json::Map<String, Value>) -> Vec<&Value> {
+ let mut entries = obj.iter().collect::<Vec<_>>();
+ entries.sort_by_key(|it| it.0);
+ entries.into_iter().map(|(_k, v)| v).collect::<Vec<_>>()
+ }
+
+ let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k));
+ if !same_keys {
+ return Some((expected, actual));
+ }
+
+ let l = sorted_values(l);
+ let r = sorted_values(r);
+
+ l.into_iter().zip(r).find_map(|(l, r)| find_mismatch(l, r))
+ }
+ (Value::Null, Value::Null) => None,
+ // magic string literal "{...}" acts as wildcard for any sub-JSON
+ (Value::String(l), _) if l == "{...}" => None,
+ _ => Some((expected, actual)),
+ }
+}
+
+/// Compare a line with an expected pattern.
+/// - Use `[..]` as a wildcard to match 0 or more characters on the same line
+/// (similar to `.*` in a regex).
+fn lines_match(expected: &str, actual: &str) -> bool {
+ // Let's not deal with / vs \ (windows...)
+ // First replace backslash-escaped backslashes with forward slashes
+ // which can occur in, for example, JSON output
+ let expected = expected.replace(r"\\", "/").replace('\\', "/");
+ let mut actual: &str = &actual.replace(r"\\", "/").replace('\\', "/");
+ for (i, part) in expected.split("[..]").enumerate() {
+ match actual.find(part) {
+ Some(j) => {
+ if i == 0 && j != 0 {
+ return false;
+ }
+ actual = &actual[j + part.len()..];
+ }
+ None => return false,
+ }
+ }
+ actual.is_empty() || expected.ends_with("[..]")
+}
+
+#[test]
+fn lines_match_works() {
+ assert!(lines_match("a b", "a b"));
+ assert!(lines_match("a[..]b", "a b"));
+ assert!(lines_match("a[..]", "a b"));
+ assert!(lines_match("[..]", "a b"));
+ assert!(lines_match("[..]b", "a b"));
+
+ assert!(!lines_match("[..]b", "c"));
+ assert!(!lines_match("b", "c"));
+ assert!(!lines_match("b", "cb"));
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/testdir.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/testdir.rs
new file mode 100644
index 000000000..3bec23a91
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/testdir.rs
@@ -0,0 +1,75 @@
+use std::{
+ fs, io,
+ path::{Path, PathBuf},
+ sync::atomic::{AtomicUsize, Ordering},
+};
+
+pub(crate) struct TestDir {
+ path: PathBuf,
+ keep: bool,
+}
+
+impl TestDir {
+ pub(crate) fn new() -> TestDir {
+ let temp_dir = std::env::temp_dir();
+ // On MacOS builders on GitHub actions, the temp dir is a symlink, and
+ // that causes problems down the line. Specifically:
+ // * Cargo may emit different PackageId depending on the working directory
+ // * rust-analyzer may fail to map LSP URIs to correct paths.
+ //
+ // Work-around this by canonicalizing. Note that we don't want to do this
+ // on *every* OS, as on windows `canonicalize` itself creates problems.
+ #[cfg(target_os = "macos")]
+ let temp_dir = temp_dir.canonicalize().unwrap();
+
+ let base = temp_dir.join("testdir");
+ let pid = std::process::id();
+
+ static CNT: AtomicUsize = AtomicUsize::new(0);
+ for _ in 0..100 {
+ let cnt = CNT.fetch_add(1, Ordering::Relaxed);
+ let path = base.join(format!("{}_{}", pid, cnt));
+ if path.is_dir() {
+ continue;
+ }
+ fs::create_dir_all(&path).unwrap();
+ return TestDir { path, keep: false };
+ }
+ panic!("Failed to create a temporary directory")
+ }
+ #[allow(unused)]
+ pub(crate) fn keep(mut self) -> TestDir {
+ self.keep = true;
+ self
+ }
+ pub(crate) fn path(&self) -> &Path {
+ &self.path
+ }
+}
+
+impl Drop for TestDir {
+ fn drop(&mut self) {
+ if self.keep {
+ return;
+ }
+ remove_dir_all(&self.path).unwrap_or_else(|err| {
+ panic!("failed to remove temporary directory {}: {}", self.path.display(), err)
+ })
+ }
+}
+
+#[cfg(not(windows))]
+fn remove_dir_all(path: &Path) -> io::Result<()> {
+ fs::remove_dir_all(path)
+}
+
+#[cfg(windows)]
+fn remove_dir_all(path: &Path) -> io::Result<()> {
+ for _ in 0..99 {
+ if fs::remove_dir_all(path).is_ok() {
+ return Ok(());
+ }
+ std::thread::sleep(std::time::Duration::from_millis(10))
+ }
+ fs::remove_dir_all(path)
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
new file mode 100644
index 000000000..18f95925d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
@@ -0,0 +1,473 @@
+use std::{
+ collections::HashSet,
+ path::{Path, PathBuf},
+};
+
+use xshell::Shell;
+
+#[cfg(not(feature = "in-rust-tree"))]
+use xshell::cmd;
+
+#[cfg(not(feature = "in-rust-tree"))]
+#[test]
+fn check_code_formatting() {
+ let sh = &Shell::new().unwrap();
+ sh.change_dir(sourcegen::project_root());
+ sh.set_var("RUSTUP_TOOLCHAIN", "stable");
+
+ let out = cmd!(sh, "rustfmt --version").read().unwrap();
+ if !out.contains("stable") {
+ panic!(
+ "Failed to run rustfmt from toolchain 'stable'. \
+ Please run `rustup component add rustfmt --toolchain stable` to install it.",
+ )
+ }
+
+ let res = cmd!(sh, "cargo fmt -- --check").run();
+ if res.is_err() {
+ let _ = cmd!(sh, "cargo fmt").run();
+ }
+ res.unwrap()
+}
+
+#[test]
+fn check_lsp_extensions_docs() {
+ let sh = &Shell::new().unwrap();
+
+ let expected_hash = {
+ let lsp_ext_rs = sh
+ .read_file(sourcegen::project_root().join("crates/rust-analyzer/src/lsp_ext.rs"))
+ .unwrap();
+ stable_hash(lsp_ext_rs.as_str())
+ };
+
+ let actual_hash = {
+ let lsp_extensions_md =
+ sh.read_file(sourcegen::project_root().join("docs/dev/lsp-extensions.md")).unwrap();
+ let text = lsp_extensions_md
+ .lines()
+ .find_map(|line| line.strip_prefix("lsp_ext.rs hash:"))
+ .unwrap()
+ .trim();
+ u64::from_str_radix(text, 16).unwrap()
+ };
+
+ if actual_hash != expected_hash {
+ panic!(
+ "
+lsp_ext.rs was changed without touching lsp-extensions.md.
+
+Expected hash: {:x}
+Actual hash: {:x}
+
+Please adjust docs/dev/lsp-extensions.md.
+",
+ expected_hash, actual_hash
+ )
+ }
+}
+
+#[test]
+fn files_are_tidy() {
+ let sh = &Shell::new().unwrap();
+
+ let files = sourcegen::list_files(&sourcegen::project_root().join("crates"));
+
+ let mut tidy_docs = TidyDocs::default();
+ let mut tidy_marks = TidyMarks::default();
+ for path in files {
+ let extension = path.extension().unwrap_or_default().to_str().unwrap_or_default();
+ match extension {
+ "rs" => {
+ let text = sh.read_file(&path).unwrap();
+ check_todo(&path, &text);
+ check_dbg(&path, &text);
+ check_test_attrs(&path, &text);
+ check_trailing_ws(&path, &text);
+ deny_clippy(&path, &text);
+ tidy_docs.visit(&path, &text);
+ tidy_marks.visit(&path, &text);
+ }
+ "toml" => {
+ let text = sh.read_file(&path).unwrap();
+ check_cargo_toml(&path, text);
+ }
+ _ => (),
+ }
+ }
+
+ tidy_docs.finish();
+ tidy_marks.finish();
+}
+
+fn check_cargo_toml(path: &Path, text: String) {
+ let mut section = None;
+ for (line_no, text) in text.lines().enumerate() {
+ let text = text.trim();
+ if text.starts_with('[') {
+ if !text.ends_with(']') {
+ panic!(
+ "\nplease don't add comments or trailing whitespace in section lines.\n\
+ {}:{}\n",
+ path.display(),
+ line_no + 1
+ )
+ }
+ section = Some(text);
+ continue;
+ }
+ let text: String = text.split_whitespace().collect();
+ if !text.contains("path=") {
+ continue;
+ }
+ match section {
+ Some(s) if s.contains("dev-dependencies") => {
+ if text.contains("version") {
+ panic!(
+ "\ncargo internal dev-dependencies should not have a version.\n\
+ {}:{}\n",
+ path.display(),
+ line_no + 1
+ );
+ }
+ }
+ Some(s) if s.contains("dependencies") => {
+ if !text.contains("version") {
+ panic!(
+ "\ncargo internal dependencies should have a version.\n\
+ {}:{}\n",
+ path.display(),
+ line_no + 1
+ );
+ }
+ }
+ _ => {}
+ }
+ }
+}
+
+fn deny_clippy(path: &Path, text: &str) {
+ let ignore = &[
+ // The documentation in string literals may contain anything for its own purposes
+ "ide-db/src/generated/lints.rs",
+ // The tests test clippy lint hovers
+ "ide/src/hover/tests.rs",
+ // The tests test clippy lint completions
+ "ide-completion/src/tests/attribute.rs",
+ ];
+ if ignore.iter().any(|p| path.ends_with(p)) {
+ return;
+ }
+
+ if text.contains("\u{61}llow(clippy") {
+ panic!(
+ "\n\nallowing lints is forbidden: {}.
+rust-analyzer intentionally doesn't check clippy on CI.
+You can allow lint globally via `xtask clippy`.
+See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion.
+
+",
+ path.display()
+ )
+ }
+}
+
+#[cfg(not(feature = "in-rust-tree"))]
+#[test]
+fn check_licenses() {
+ let sh = &Shell::new().unwrap();
+
+ let expected = "
+0BSD OR MIT OR Apache-2.0
+Apache-2.0
+Apache-2.0 OR BSL-1.0
+Apache-2.0 OR MIT
+Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
+Apache-2.0/MIT
+BSD-3-Clause
+BlueOak-1.0.0 OR MIT OR Apache-2.0
+CC0-1.0 OR Artistic-2.0
+ISC
+MIT
+MIT / Apache-2.0
+MIT OR Apache-2.0
+MIT OR Apache-2.0 OR Zlib
+MIT OR Zlib OR Apache-2.0
+MIT/Apache-2.0
+Unlicense/MIT
+Zlib OR Apache-2.0 OR MIT
+"
+ .lines()
+ .filter(|it| !it.is_empty())
+ .collect::<Vec<_>>();
+
+ let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap();
+ let mut licenses = meta
+ .split(|c| c == ',' || c == '{' || c == '}')
+ .filter(|it| it.contains(r#""license""#))
+ .map(|it| it.trim())
+ .map(|it| it[r#""license":"#.len()..].trim_matches('"'))
+ .collect::<Vec<_>>();
+ licenses.sort_unstable();
+ licenses.dedup();
+ if licenses != expected {
+ let mut diff = String::new();
+
+ diff.push_str("New Licenses:\n");
+ for &l in licenses.iter() {
+ if !expected.contains(&l) {
+ diff += &format!(" {}\n", l)
+ }
+ }
+
+ diff.push_str("\nMissing Licenses:\n");
+ for &l in expected.iter() {
+ if !licenses.contains(&l) {
+ diff += &format!(" {}\n", l)
+ }
+ }
+
+ panic!("different set of licenses!\n{}", diff);
+ }
+ assert_eq!(licenses, expected);
+}
+
+fn check_todo(path: &Path, text: &str) {
+ let need_todo = &[
+ // This file itself obviously needs to use todo (<- like this!).
+ "tests/tidy.rs",
+ // Some of our assists generate `todo!()`.
+ "handlers/add_turbo_fish.rs",
+ "handlers/generate_function.rs",
+ "handlers/add_missing_match_arms.rs",
+ "handlers/replace_derive_with_manual_impl.rs",
+ // To support generating `todo!()` in assists, we have `expr_todo()` in
+ // `ast::make`.
+ "ast/make.rs",
+ // The documentation in string literals may contain anything for its own purposes
+ "ide-db/src/generated/lints.rs",
+ "ide-assists/src/utils/gen_trait_fn_body.rs",
+ "ide-assists/src/tests/generated.rs",
+ // The tests for missing fields
+ "ide-diagnostics/src/handlers/missing_fields.rs",
+ ];
+ if need_todo.iter().any(|p| path.ends_with(p)) {
+ return;
+ }
+ if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
+ // Generated by an assist
+ if text.contains("${0:todo!()}") {
+ return;
+ }
+
+ panic!(
+ "\nTODO markers or todo! macros should not be committed to the master branch,\n\
+ use FIXME instead\n\
+ {}\n",
+ path.display(),
+ )
+ }
+}
+
+fn check_dbg(path: &Path, text: &str) {
+ let need_dbg = &[
+ // This file itself obviously needs to use dbg.
+ "slow-tests/tidy.rs",
+ // Assists to remove `dbg!()`
+ "handlers/remove_dbg.rs",
+ // We have .dbg postfix
+ "ide-completion/src/completions/postfix.rs",
+ "ide-completion/src/completions/keyword.rs",
+ "ide-completion/src/tests/proc_macros.rs",
+ // The documentation in string literals may contain anything for its own purposes
+ "ide-completion/src/lib.rs",
+ "ide-db/src/generated/lints.rs",
+ // test for doc test for remove_dbg
+ "src/tests/generated.rs",
+ ];
+ if need_dbg.iter().any(|p| path.ends_with(p)) {
+ return;
+ }
+ if text.contains("dbg!") {
+ panic!(
+ "\ndbg! macros should not be committed to the master branch,\n\
+ {}\n",
+ path.display(),
+ )
+ }
+}
+
+fn check_test_attrs(path: &Path, text: &str) {
+ let ignore_rule =
+ "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#ignore";
+ let need_ignore: &[&str] = &[
+ // This file.
+ "slow-tests/tidy.rs",
+ // Special case to run `#[ignore]` tests.
+ "ide/src/runnables.rs",
+ // A legit test which needs to be ignored, as it takes too long to run
+ // :(
+ "hir-def/src/nameres/collector.rs",
+ // Long sourcegen test to generate lint completions.
+ "ide-db/src/tests/sourcegen_lints.rs",
+ // Obviously needs ignore.
+ "ide-assists/src/handlers/toggle_ignore.rs",
+ // See above.
+ "ide-assists/src/tests/generated.rs",
+ ];
+ if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) {
+ panic!("\ndon't `#[ignore]` tests, see:\n\n {}\n\n {}\n", ignore_rule, path.display(),)
+ }
+
+ let panic_rule =
+ "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#should_panic";
+ let need_panic: &[&str] = &[
+ // This file.
+ "slow-tests/tidy.rs",
+ "test-utils/src/fixture.rs",
+ ];
+ if text.contains("#[should_panic") && !need_panic.iter().any(|p| path.ends_with(p)) {
+ panic!(
+ "\ndon't add `#[should_panic]` tests, see:\n\n {}\n\n {}\n",
+ panic_rule,
+ path.display(),
+ )
+ }
+}
+
+fn check_trailing_ws(path: &Path, text: &str) {
+ if is_exclude_dir(path, &["test_data"]) {
+ return;
+ }
+ for (line_number, line) in text.lines().enumerate() {
+ if line.chars().last().map(char::is_whitespace) == Some(true) {
+ panic!("Trailing whitespace in {} at line {}", path.display(), line_number + 1)
+ }
+ }
+}
+
+#[derive(Default)]
+struct TidyDocs {
+ missing_docs: Vec<String>,
+ contains_fixme: Vec<PathBuf>,
+}
+
+impl TidyDocs {
+ fn visit(&mut self, path: &Path, text: &str) {
+ // Tests and diagnostic fixes don't need module level comments.
+ if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar"]) {
+ return;
+ }
+
+ if is_exclude_file(path) {
+ return;
+ }
+
+ let first_line = match text.lines().next() {
+ Some(it) => it,
+ None => return,
+ };
+
+ if first_line.starts_with("//!") {
+ if first_line.contains("FIXME") {
+ self.contains_fixme.push(path.to_path_buf());
+ }
+ } else {
+ if text.contains("// Feature:")
+ || text.contains("// Assist:")
+ || text.contains("// Diagnostic:")
+ {
+ return;
+ }
+ self.missing_docs.push(path.display().to_string());
+ }
+
+ fn is_exclude_file(d: &Path) -> bool {
+ let file_names = ["tests.rs", "famous_defs_fixture.rs"];
+
+ d.file_name()
+ .unwrap_or_default()
+ .to_str()
+ .map(|f_n| file_names.iter().any(|name| *name == f_n))
+ .unwrap_or(false)
+ }
+ }
+
+ fn finish(self) {
+ if !self.missing_docs.is_empty() {
+ panic!(
+ "\nMissing docs strings\n\n\
+ modules:\n{}\n\n",
+ self.missing_docs.join("\n")
+ )
+ }
+
+ for path in self.contains_fixme {
+ panic!("FIXME doc in a fully-documented crate: {}", path.display())
+ }
+ }
+}
+
+fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
+ p.strip_prefix(sourcegen::project_root())
+ .unwrap()
+ .components()
+ .rev()
+ .skip(1)
+ .filter_map(|it| it.as_os_str().to_str())
+ .any(|it| dirs_to_exclude.contains(&it))
+}
+
+#[derive(Default)]
+struct TidyMarks {
+ hits: HashSet<String>,
+ checks: HashSet<String>,
+}
+
+impl TidyMarks {
+ fn visit(&mut self, _path: &Path, text: &str) {
+ find_marks(&mut self.hits, text, "hit");
+ find_marks(&mut self.checks, text, "check");
+ find_marks(&mut self.checks, text, "check_count");
+ }
+
+ fn finish(self) {
+ assert!(!self.hits.is_empty());
+
+ let diff: Vec<_> =
+ self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect();
+
+ if !diff.is_empty() {
+ panic!("unpaired marks: {:?}", diff)
+ }
+ }
+}
+
+#[allow(deprecated)]
+fn stable_hash(text: &str) -> u64 {
+ use std::hash::{Hash, Hasher, SipHasher};
+
+ let text = text.replace('\r', "");
+ let mut hasher = SipHasher::default();
+ text.hash(&mut hasher);
+ hasher.finish()
+}
+
+fn find_marks(set: &mut HashSet<String>, text: &str, mark: &str) {
+ let mut text = text;
+ let mut prev_text = "";
+ while text != prev_text {
+ prev_text = text;
+ if let Some(idx) = text.find(mark) {
+ text = &text[idx + mark.len()..];
+ if let Some(stripped_text) = text.strip_prefix("!(") {
+ text = stripped_text.trim_start();
+ if let Some(idx2) = text.find(|c: char| !(c.is_alphanumeric() || c == '_')) {
+ let mark_text = &text[..idx2];
+ set.insert(mark_text.to_string());
+ text = &text[idx2..];
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
new file mode 100644
index 000000000..a84110d94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "sourcegen"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+xshell = "0.2.2"
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
new file mode 100644
index 000000000..ce0224ec7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
@@ -0,0 +1,203 @@
+//! rust-analyzer relies heavily on source code generation.
+//!
+//! Things like feature documentation or assist tests are implemented by
+//! processing rust-analyzer's own source code and generating the appropriate
+//! output. See `sourcegen_` tests in various crates.
+//!
+//! This crate contains utilities to make this kind of source-gen easy.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::{
+ fmt, fs, mem,
+ path::{Path, PathBuf},
+};
+
+use xshell::{cmd, Shell};
+
+pub fn list_rust_files(dir: &Path) -> Vec<PathBuf> {
+ let mut res = list_files(dir);
+ res.retain(|it| {
+ it.file_name().unwrap_or_default().to_str().unwrap_or_default().ends_with(".rs")
+ });
+ res
+}
+
+pub fn list_files(dir: &Path) -> Vec<PathBuf> {
+ let mut res = Vec::new();
+ let mut work = vec![dir.to_path_buf()];
+ while let Some(dir) = work.pop() {
+ for entry in dir.read_dir().unwrap() {
+ let entry = entry.unwrap();
+ let file_type = entry.file_type().unwrap();
+ let path = entry.path();
+ let is_hidden =
+ path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.');
+ if !is_hidden {
+ if file_type.is_dir() {
+ work.push(path);
+ } else if file_type.is_file() {
+ res.push(path);
+ }
+ }
+ }
+ }
+ res
+}
+
+#[derive(Clone)]
+pub struct CommentBlock {
+ pub id: String,
+ pub line: usize,
+ pub contents: Vec<String>,
+ is_doc: bool,
+}
+
+impl CommentBlock {
+ pub fn extract(tag: &str, text: &str) -> Vec<CommentBlock> {
+ assert!(tag.starts_with(char::is_uppercase));
+
+ let tag = format!("{}:", tag);
+ // Would be nice if we had `.retain_mut` here!
+ CommentBlock::extract_untagged(text)
+ .into_iter()
+ .filter_map(|mut block| {
+ let first = block.contents.remove(0);
+ first.strip_prefix(&tag).map(|id| {
+ if block.is_doc {
+ panic!(
+ "Use plain (non-doc) comments with tags like {}:\n {}",
+ tag, first
+ );
+ }
+
+ block.id = id.trim().to_string();
+ block
+ })
+ })
+ .collect()
+ }
+
+ pub fn extract_untagged(text: &str) -> Vec<CommentBlock> {
+ let mut res = Vec::new();
+
+ let lines = text.lines().map(str::trim_start);
+
+ let dummy_block =
+ CommentBlock { id: String::new(), line: 0, contents: Vec::new(), is_doc: false };
+ let mut block = dummy_block.clone();
+ for (line_num, line) in lines.enumerate() {
+ match line.strip_prefix("//") {
+ Some(mut contents) => {
+ if let Some('/' | '!') = contents.chars().next() {
+ contents = &contents[1..];
+ block.is_doc = true;
+ }
+ if let Some(' ') = contents.chars().next() {
+ contents = &contents[1..];
+ }
+ block.contents.push(contents.to_string());
+ }
+ None => {
+ if !block.contents.is_empty() {
+ let block = mem::replace(&mut block, dummy_block.clone());
+ res.push(block);
+ }
+ block.line = line_num + 2;
+ }
+ }
+ }
+ if !block.contents.is_empty() {
+ res.push(block);
+ }
+ res
+ }
+}
+
+#[derive(Debug)]
+pub struct Location {
+ pub file: PathBuf,
+ pub line: usize,
+}
+
+impl fmt::Display for Location {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let path = self.file.strip_prefix(&project_root()).unwrap().display().to_string();
+ let path = path.replace('\\', "/");
+ let name = self.file.file_name().unwrap();
+ write!(
+ f,
+ "https://github.com/rust-lang/rust-analyzer/blob/master/{}#L{}[{}]",
+ path,
+ self.line,
+ name.to_str().unwrap()
+ )
+ }
+}
+
+fn ensure_rustfmt(sh: &Shell) {
+ let version = cmd!(sh, "rustfmt --version").read().unwrap_or_default();
+ if !version.contains("stable") {
+ panic!(
+ "Failed to run rustfmt from toolchain 'stable'. \
+ Please run `rustup component add rustfmt --toolchain stable` to install it.",
+ );
+ }
+}
+
+pub fn reformat(text: String) -> String {
+ let sh = Shell::new().unwrap();
+ sh.set_var("RUSTUP_TOOLCHAIN", "stable");
+ ensure_rustfmt(&sh);
+ let rustfmt_toml = project_root().join("rustfmt.toml");
+ let mut stdout = cmd!(sh, "rustfmt --config-path {rustfmt_toml} --config fn_single_line=true")
+ .stdin(text)
+ .read()
+ .unwrap();
+ if !stdout.ends_with('\n') {
+ stdout.push('\n');
+ }
+ stdout
+}
+
+pub fn add_preamble(generator: &'static str, mut text: String) -> String {
+ let preamble = format!("//! Generated by `{}`, do not edit by hand.\n\n", generator);
+ text.insert_str(0, &preamble);
+ text
+}
+
+/// Checks that the `file` has the specified `contents`. If that is not the
+/// case, updates the file and then fails the test.
+pub fn ensure_file_contents(file: &Path, contents: &str) {
+ if let Ok(old_contents) = fs::read_to_string(file) {
+ if normalize_newlines(&old_contents) == normalize_newlines(contents) {
+ // File is already up to date.
+ return;
+ }
+ }
+
+ let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
+ eprintln!(
+ "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
+ display_path.display()
+ );
+ if std::env::var("CI").is_ok() {
+ eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
+ }
+ if let Some(parent) = file.parent() {
+ let _ = fs::create_dir_all(parent);
+ }
+ fs::write(file, contents).unwrap();
+ panic!("some file was not up to date and has been updated, simply re-run the tests");
+}
+
+fn normalize_newlines(s: &str) -> String {
+ s.replace("\r\n", "\n")
+}
+
+pub fn project_root() -> PathBuf {
+ let dir = env!("CARGO_MANIFEST_DIR");
+ let res = PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned();
+ assert!(res.join("triagebot.toml").exists());
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
new file mode 100644
index 000000000..092b99ae5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "stdx"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+libc = "0.2.126"
+backtrace = { version = "0.3.65", optional = true }
+always-assert = { version = "0.1.2", features = ["log"] }
+# Think twice before adding anything here
+
+[target.'cfg(windows)'.dependencies]
+miow = "0.4.0"
+winapi = { version = "0.3.9", features = ["winerror"] }
+
+[features]
+# Uncomment to enable for the whole crate graph
+# default = [ "backtrace" ]
diff --git a/src/tools/rust-analyzer/crates/stdx/src/lib.rs b/src/tools/rust-analyzer/crates/stdx/src/lib.rs
new file mode 100644
index 000000000..b4d45206c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/src/lib.rs
@@ -0,0 +1,247 @@
+//! Missing batteries for standard libraries.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::process::Command;
+use std::{cmp::Ordering, ops, time::Instant};
+use std::{io as sio, iter};
+
+mod macros;
+pub mod process;
+pub mod panic_context;
+pub mod non_empty_vec;
+
+pub use always_assert::{always, never};
+
+#[inline(always)]
+pub fn is_ci() -> bool {
+ option_env!("CI").is_some()
+}
+
+#[must_use]
+pub fn timeit(label: &'static str) -> impl Drop {
+ let start = Instant::now();
+ defer(move || eprintln!("{}: {:.2?}", label, start.elapsed()))
+}
+
+/// Prints backtrace to stderr, useful for debugging.
+pub fn print_backtrace() {
+ #[cfg(feature = "backtrace")]
+ eprintln!("{:?}", backtrace::Backtrace::new());
+
+ #[cfg(not(feature = "backtrace"))]
+ eprintln!(
+ r#"Enable the backtrace feature.
+Uncomment `default = [ "backtrace" ]` in `crates/stdx/Cargo.toml`.
+"#
+ );
+}
+
+pub fn to_lower_snake_case(s: &str) -> String {
+ to_snake_case(s, char::to_ascii_lowercase)
+}
+pub fn to_upper_snake_case(s: &str) -> String {
+ to_snake_case(s, char::to_ascii_uppercase)
+}
+
+// Code partially taken from rust/compiler/rustc_lint/src/nonstandard_style.rs
+// commit: 9626f2b
+fn to_snake_case<F: Fn(&char) -> char>(mut s: &str, change_case: F) -> String {
+ let mut words = vec![];
+
+ // Preserve leading underscores
+ s = s.trim_start_matches(|c: char| {
+ if c == '_' {
+ words.push(String::new());
+ true
+ } else {
+ false
+ }
+ });
+
+ for s in s.split('_') {
+ let mut last_upper = false;
+ let mut buf = String::new();
+
+ if s.is_empty() {
+ continue;
+ }
+
+ for ch in s.chars() {
+ if !buf.is_empty() && buf != "'" && ch.is_uppercase() && !last_upper {
+ words.push(buf);
+ buf = String::new();
+ }
+
+ last_upper = ch.is_uppercase();
+ buf.extend(iter::once(change_case(&ch)));
+ }
+
+ words.push(buf);
+ }
+
+ words.join("_")
+}
+
+pub fn replace(buf: &mut String, from: char, to: &str) {
+ if !buf.contains(from) {
+ return;
+ }
+ // FIXME: do this in place.
+ *buf = buf.replace(from, to);
+}
+
+pub fn trim_indent(mut text: &str) -> String {
+ if text.starts_with('\n') {
+ text = &text[1..];
+ }
+ let indent = text
+ .lines()
+ .filter(|it| !it.trim().is_empty())
+ .map(|it| it.len() - it.trim_start().len())
+ .min()
+ .unwrap_or(0);
+ text.split_inclusive('\n')
+ .map(
+ |line| {
+ if line.len() <= indent {
+ line.trim_start_matches(' ')
+ } else {
+ &line[indent..]
+ }
+ },
+ )
+ .collect()
+}
+
+pub fn equal_range_by<T, F>(slice: &[T], mut key: F) -> ops::Range<usize>
+where
+ F: FnMut(&T) -> Ordering,
+{
+ let start = slice.partition_point(|it| key(it) == Ordering::Less);
+ let len = slice[start..].partition_point(|it| key(it) == Ordering::Equal);
+ start..start + len
+}
+
+#[must_use]
+pub fn defer<F: FnOnce()>(f: F) -> impl Drop {
+ struct D<F: FnOnce()>(Option<F>);
+ impl<F: FnOnce()> Drop for D<F> {
+ fn drop(&mut self) {
+ if let Some(f) = self.0.take() {
+ f();
+ }
+ }
+ }
+ D(Some(f))
+}
+
+/// A [`std::process::Child`] wrapper that will kill the child on drop.
+#[cfg_attr(not(target_arch = "wasm32"), repr(transparent))]
+#[derive(Debug)]
+pub struct JodChild(pub std::process::Child);
+
+impl ops::Deref for JodChild {
+ type Target = std::process::Child;
+ fn deref(&self) -> &std::process::Child {
+ &self.0
+ }
+}
+
+impl ops::DerefMut for JodChild {
+ fn deref_mut(&mut self) -> &mut std::process::Child {
+ &mut self.0
+ }
+}
+
+impl Drop for JodChild {
+ fn drop(&mut self) {
+ let _ = self.0.kill();
+ let _ = self.0.wait();
+ }
+}
+
+impl JodChild {
+ pub fn spawn(mut command: Command) -> sio::Result<Self> {
+ command.spawn().map(Self)
+ }
+
+ pub fn into_inner(self) -> std::process::Child {
+ if cfg!(target_arch = "wasm32") {
+ panic!("no processes on wasm");
+ }
+ // SAFETY: repr transparent, except on WASM
+ unsafe { std::mem::transmute::<JodChild, std::process::Child>(self) }
+ }
+}
+
+// feature: iter_order_by
+// Iterator::eq_by
+pub fn iter_eq_by<I, I2, F>(this: I2, other: I, mut eq: F) -> bool
+where
+ I: IntoIterator,
+ I2: IntoIterator,
+ F: FnMut(I2::Item, I::Item) -> bool,
+{
+ let mut other = other.into_iter();
+ let mut this = this.into_iter();
+
+ loop {
+ let x = match this.next() {
+ None => return other.next().is_none(),
+ Some(val) => val,
+ };
+
+ let y = match other.next() {
+ None => return false,
+ Some(val) => val,
+ };
+
+ if !eq(x, y) {
+ return false;
+ }
+ }
+}
+
+/// Returns all final segments of the argument, longest first.
+pub fn slice_tails<T>(this: &[T]) -> impl Iterator<Item = &[T]> {
+ (0..this.len()).map(|i| &this[i..])
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_trim_indent() {
+ assert_eq!(trim_indent(""), "");
+ assert_eq!(
+ trim_indent(
+ "
+ hello
+ world
+"
+ ),
+ "hello\nworld\n"
+ );
+ assert_eq!(
+ trim_indent(
+ "
+ hello
+ world"
+ ),
+ "hello\nworld"
+ );
+ assert_eq!(trim_indent(" hello\n world\n"), "hello\nworld\n");
+ assert_eq!(
+ trim_indent(
+ "
+ fn main() {
+ return 92;
+ }
+ "
+ ),
+ "fn main() {\n return 92;\n}\n"
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/stdx/src/macros.rs b/src/tools/rust-analyzer/crates/stdx/src/macros.rs
new file mode 100644
index 000000000..d91fc690c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/src/macros.rs
@@ -0,0 +1,47 @@
+//! Convenience macros.
+
+#[macro_export]
+macro_rules! eprintln {
+ ($($tt:tt)*) => {{
+ if $crate::is_ci() {
+ panic!("Forgot to remove debug-print?")
+ }
+ std::eprintln!($($tt)*)
+ }}
+}
+
+/// Appends formatted string to a `String`.
+#[macro_export]
+macro_rules! format_to {
+ ($buf:expr) => ();
+ ($buf:expr, $lit:literal $($arg:tt)*) => {
+ { use ::std::fmt::Write as _; let _ = ::std::write!($buf, $lit $($arg)*); }
+ };
+}
+
+/// Generates `From` impls for `Enum E { Foo(Foo), Bar(Bar) }` enums
+///
+/// # Example
+///
+/// ```rust
+/// impl_from!(Struct, Union, Enum for Adt);
+/// ```
+#[macro_export]
+macro_rules! impl_from {
+ ($($variant:ident $(($($sub_variant:ident),*))?),* for $enum:ident) => {
+ $(
+ impl From<$variant> for $enum {
+ fn from(it: $variant) -> $enum {
+ $enum::$variant(it)
+ }
+ }
+ $($(
+ impl From<$sub_variant> for $enum {
+ fn from(it: $sub_variant) -> $enum {
+ $enum::$variant($variant::$sub_variant(it))
+ }
+ }
+ )*)?
+ )*
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/stdx/src/non_empty_vec.rs b/src/tools/rust-analyzer/crates/stdx/src/non_empty_vec.rs
new file mode 100644
index 000000000..342194c78
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/src/non_empty_vec.rs
@@ -0,0 +1,39 @@
+//! See [`NonEmptyVec`].
+
+/// A [`Vec`] that is guaranteed to at least contain one element.
+pub struct NonEmptyVec<T> {
+ first: T,
+ rest: Vec<T>,
+}
+
+impl<T> NonEmptyVec<T> {
+ #[inline]
+ pub fn new(first: T) -> Self {
+ NonEmptyVec { first, rest: Vec::new() }
+ }
+
+ #[inline]
+ pub fn last_mut(&mut self) -> &mut T {
+ self.rest.last_mut().unwrap_or(&mut self.first)
+ }
+
+ #[inline]
+ pub fn pop(&mut self) -> Option<T> {
+ self.rest.pop()
+ }
+
+ #[inline]
+ pub fn push(&mut self, value: T) {
+ self.rest.push(value)
+ }
+
+ #[inline]
+ pub fn len(&self) -> usize {
+ 1 + self.rest.len()
+ }
+
+ #[inline]
+ pub fn into_last(mut self) -> T {
+ self.rest.pop().unwrap_or(self.first)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs b/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs
new file mode 100644
index 000000000..f8fafc5a6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs
@@ -0,0 +1,49 @@
+//! A micro-crate to enhance panic messages with context info.
+//!
+//! FIXME: upstream to <https://github.com/kriomant/panic-context> ?
+
+use std::{cell::RefCell, panic, sync::Once};
+
+pub fn enter(context: String) -> PanicContext {
+ static ONCE: Once = Once::new();
+ ONCE.call_once(PanicContext::init);
+
+ with_ctx(|ctx| ctx.push(context));
+ PanicContext { _priv: () }
+}
+
+#[must_use]
+pub struct PanicContext {
+ _priv: (),
+}
+
+impl PanicContext {
+ fn init() {
+ let default_hook = panic::take_hook();
+ let hook = move |panic_info: &panic::PanicInfo<'_>| {
+ with_ctx(|ctx| {
+ if !ctx.is_empty() {
+ eprintln!("Panic context:");
+ for frame in ctx.iter() {
+ eprintln!("> {}\n", frame);
+ }
+ }
+ default_hook(panic_info);
+ });
+ };
+ panic::set_hook(Box::new(hook));
+ }
+}
+
+impl Drop for PanicContext {
+ fn drop(&mut self) {
+ with_ctx(|ctx| assert!(ctx.pop().is_some()));
+ }
+}
+
+fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
+ thread_local! {
+ static CTX: RefCell<Vec<String>> = RefCell::new(Vec::new());
+ }
+ CTX.with(|ctx| f(&mut *ctx.borrow_mut()));
+}
diff --git a/src/tools/rust-analyzer/crates/stdx/src/process.rs b/src/tools/rust-analyzer/crates/stdx/src/process.rs
new file mode 100644
index 000000000..e5aa34365
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/src/process.rs
@@ -0,0 +1,267 @@
+//! Read both stdout and stderr of child without deadlocks.
+//!
+//! <https://github.com/rust-lang/cargo/blob/905af549966f23a9288e9993a85d1249a5436556/crates/cargo-util/src/read2.rs>
+//! <https://github.com/rust-lang/cargo/blob/58a961314437258065e23cb6316dfc121d96fb71/crates/cargo-util/src/process_builder.rs#L231>
+
+use std::{
+ io,
+ process::{ChildStderr, ChildStdout, Command, Output, Stdio},
+};
+
+use crate::JodChild;
+
+pub fn streaming_output(
+ out: ChildStdout,
+ err: ChildStderr,
+ on_stdout_line: &mut dyn FnMut(&str),
+ on_stderr_line: &mut dyn FnMut(&str),
+) -> io::Result<(Vec<u8>, Vec<u8>)> {
+ let mut stdout = Vec::new();
+ let mut stderr = Vec::new();
+
+ imp::read2(out, err, &mut |is_out, data, eof| {
+ let idx = if eof {
+ data.len()
+ } else {
+ match data.iter().rposition(|b| *b == b'\n') {
+ Some(i) => i + 1,
+ None => return,
+ }
+ };
+ {
+ // scope for new_lines
+ let new_lines = {
+ let dst = if is_out { &mut stdout } else { &mut stderr };
+ let start = dst.len();
+ let data = data.drain(..idx);
+ dst.extend(data);
+ &dst[start..]
+ };
+ for line in String::from_utf8_lossy(new_lines).lines() {
+ if is_out {
+ on_stdout_line(line);
+ } else {
+ on_stderr_line(line);
+ }
+ }
+ }
+ })?;
+
+ Ok((stdout, stderr))
+}
+
+pub fn spawn_with_streaming_output(
+ mut cmd: Command,
+ on_stdout_line: &mut dyn FnMut(&str),
+ on_stderr_line: &mut dyn FnMut(&str),
+) -> io::Result<Output> {
+ let cmd = cmd.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
+
+ let mut child = JodChild(cmd.spawn()?);
+ let (stdout, stderr) = streaming_output(
+ child.stdout.take().unwrap(),
+ child.stderr.take().unwrap(),
+ on_stdout_line,
+ on_stderr_line,
+ )?;
+ let status = child.wait()?;
+ Ok(Output { status, stdout, stderr })
+}
+
+#[cfg(unix)]
+mod imp {
+ use std::{
+ io::{self, prelude::*},
+ mem,
+ os::unix::prelude::*,
+ process::{ChildStderr, ChildStdout},
+ };
+
+ pub(crate) fn read2(
+ mut out_pipe: ChildStdout,
+ mut err_pipe: ChildStderr,
+ data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ unsafe {
+ libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
+ libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
+ }
+
+ let mut out_done = false;
+ let mut err_done = false;
+ let mut out = Vec::new();
+ let mut err = Vec::new();
+
+ let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() };
+ fds[0].fd = out_pipe.as_raw_fd();
+ fds[0].events = libc::POLLIN;
+ fds[1].fd = err_pipe.as_raw_fd();
+ fds[1].events = libc::POLLIN;
+ let mut nfds = 2;
+ let mut errfd = 1;
+
+ while nfds > 0 {
+ // wait for either pipe to become readable using `select`
+ let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) };
+ if r == -1 {
+ let err = io::Error::last_os_error();
+ if err.kind() == io::ErrorKind::Interrupted {
+ continue;
+ }
+ return Err(err);
+ }
+
+ // Read as much as we can from each pipe, ignoring EWOULDBLOCK or
+ // EAGAIN. If we hit EOF, then this will happen because the underlying
+ // reader will return Ok(0), in which case we'll see `Ok` ourselves. In
+ // this case we flip the other fd back into blocking mode and read
+ // whatever's leftover on that file descriptor.
+ let handle = |res: io::Result<_>| match res {
+ Ok(_) => Ok(true),
+ Err(e) => {
+ if e.kind() == io::ErrorKind::WouldBlock {
+ Ok(false)
+ } else {
+ Err(e)
+ }
+ }
+ };
+ if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? {
+ err_done = true;
+ nfds -= 1;
+ }
+ data(false, &mut err, err_done);
+ if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? {
+ out_done = true;
+ fds[0].fd = err_pipe.as_raw_fd();
+ errfd = 0;
+ nfds -= 1;
+ }
+ data(true, &mut out, out_done);
+ }
+ Ok(())
+ }
+}
+
+#[cfg(windows)]
+mod imp {
+ use std::{
+ io,
+ os::windows::prelude::*,
+ process::{ChildStderr, ChildStdout},
+ slice,
+ };
+
+ use miow::{
+ iocp::{CompletionPort, CompletionStatus},
+ pipe::NamedPipe,
+ Overlapped,
+ };
+ use winapi::shared::winerror::ERROR_BROKEN_PIPE;
+
+ struct Pipe<'a> {
+ dst: &'a mut Vec<u8>,
+ overlapped: Overlapped,
+ pipe: NamedPipe,
+ done: bool,
+ }
+
+ pub(crate) fn read2(
+ out_pipe: ChildStdout,
+ err_pipe: ChildStderr,
+ data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ let mut out = Vec::new();
+ let mut err = Vec::new();
+
+ let port = CompletionPort::new(1)?;
+ port.add_handle(0, &out_pipe)?;
+ port.add_handle(1, &err_pipe)?;
+
+ unsafe {
+ let mut out_pipe = Pipe::new(out_pipe, &mut out);
+ let mut err_pipe = Pipe::new(err_pipe, &mut err);
+
+ out_pipe.read()?;
+ err_pipe.read()?;
+
+ let mut status = [CompletionStatus::zero(), CompletionStatus::zero()];
+
+ while !out_pipe.done || !err_pipe.done {
+ for status in port.get_many(&mut status, None)? {
+ if status.token() == 0 {
+ out_pipe.complete(status);
+ data(true, out_pipe.dst, out_pipe.done);
+ out_pipe.read()?;
+ } else {
+ err_pipe.complete(status);
+ data(false, err_pipe.dst, err_pipe.done);
+ err_pipe.read()?;
+ }
+ }
+ }
+
+ Ok(())
+ }
+ }
+
+ impl<'a> Pipe<'a> {
+ unsafe fn new<P: IntoRawHandle>(p: P, dst: &'a mut Vec<u8>) -> Pipe<'a> {
+ Pipe {
+ dst,
+ pipe: NamedPipe::from_raw_handle(p.into_raw_handle()),
+ overlapped: Overlapped::zero(),
+ done: false,
+ }
+ }
+
+ unsafe fn read(&mut self) -> io::Result<()> {
+ let dst = slice_to_end(self.dst);
+ match self.pipe.read_overlapped(dst, self.overlapped.raw()) {
+ Ok(_) => Ok(()),
+ Err(e) => {
+ if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) {
+ self.done = true;
+ Ok(())
+ } else {
+ Err(e)
+ }
+ }
+ }
+ }
+
+ unsafe fn complete(&mut self, status: &CompletionStatus) {
+ let prev = self.dst.len();
+ self.dst.set_len(prev + status.bytes_transferred() as usize);
+ if status.bytes_transferred() == 0 {
+ self.done = true;
+ }
+ }
+ }
+
+ unsafe fn slice_to_end(v: &mut Vec<u8>) -> &mut [u8] {
+ if v.capacity() == 0 {
+ v.reserve(16);
+ }
+ if v.capacity() == v.len() {
+ v.reserve(1);
+ }
+ slice::from_raw_parts_mut(v.as_mut_ptr().add(v.len()), v.capacity() - v.len())
+ }
+}
+
+#[cfg(target_arch = "wasm32")]
+mod imp {
+ use std::{
+ io,
+ process::{ChildStderr, ChildStdout},
+ };
+
+ pub(crate) fn read2(
+ _out_pipe: ChildStdout,
+ _err_pipe: ChildStderr,
+ _data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ panic!("no processes on wasm")
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
new file mode 100644
index 000000000..0e2dec386
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
@@ -0,0 +1,39 @@
+[package]
+name = "syntax"
+version = "0.0.0"
+description = "Comment and whitespace preserving parser for the Rust language"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/rust-analyzer"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+itertools = "0.10.3"
+rowan = "0.15.8"
+rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
+rustc-hash = "1.1.0"
+once_cell = "1.12.0"
+indexmap = "1.9.1"
+smol_str = "0.1.23"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+parser = { path = "../parser", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+
+[dev-dependencies]
+rayon = "1.5.3"
+expect-test = "1.4.0"
+proc-macro2 = "1.0.39"
+quote = "1.0.20"
+ungrammar = "1.16.1"
+
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml
new file mode 100644
index 000000000..ba2f515b0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml
@@ -0,0 +1,27 @@
+
+[package]
+name = "syntax-fuzz"
+version = "0.0.1"
+publish = false
+edition = "2021"
+rust-version = "1.57"
+
+[package.metadata]
+cargo-fuzz = true
+
+[dependencies]
+syntax = { path = "..", version = "0.0.0" }
+text_edit = { path = "../../text_edit", version = "0.0.0" }
+libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer-sys.git" }
+
+# Prevent this from interfering with workspaces
+[workspace]
+members = ["."]
+
+[[bin]]
+name = "parser"
+path = "fuzz_targets/parser.rs"
+
+[[bin]]
+name = "reparse"
+path = "fuzz_targets/reparse.rs"
diff --git a/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/parser.rs b/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/parser.rs
new file mode 100644
index 000000000..f80e13002
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/parser.rs
@@ -0,0 +1,11 @@
+//! Fuzzing for from-scratch parsing.
+
+#![no_main]
+use libfuzzer_sys::fuzz_target;
+use syntax::fuzz::check_parser;
+
+fuzz_target!(|data: &[u8]| {
+ if let Ok(text) = std::str::from_utf8(data) {
+ check_parser(text)
+ }
+});
diff --git a/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/reparse.rs b/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/reparse.rs
new file mode 100644
index 000000000..f865ce8d6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/reparse.rs
@@ -0,0 +1,11 @@
+//! Fuzzing for incremental parsing.
+
+#![no_main]
+use libfuzzer_sys::fuzz_target;
+use syntax::fuzz::CheckReparse;
+
+fuzz_target!(|data: &[u8]| {
+ if let Some(check) = CheckReparse::from_data(data) {
+ check.run();
+ }
+});
diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram
new file mode 100644
index 000000000..62aa47839
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram
@@ -0,0 +1,667 @@
+// Rust Un-Grammar.
+//
+// This grammar specifies the structure of Rust's concrete syntax tree.
+// It does not specify parsing rules (ambiguities, precedence, etc are out of scope).
+// Tokens are processed -- contextual keywords are recognised, compound operators glued.
+//
+// Legend:
+//
+// // -- comment
+// Name = -- non-terminal definition
+// 'ident' -- token (terminal)
+// A B -- sequence
+// A | B -- alternation
+// A* -- zero or more repetition
+// A? -- zero or one repetition
+// (A) -- same as A
+// label:A -- suggested name for field of AST node
+
+//*************************//
+// Names, Paths and Macros //
+//*************************//
+
+Name =
+ 'ident' | 'self'
+
+NameRef =
+ 'ident' | 'int_number' | 'self' | 'super' | 'crate' | 'Self'
+
+Lifetime =
+ 'lifetime_ident'
+
+Path =
+ (qualifier:Path '::')? segment:PathSegment
+
+PathSegment =
+ '::'? NameRef
+| NameRef GenericArgList?
+| NameRef ParamList RetType?
+| '<' PathType ('as' PathType)? '>'
+
+GenericArgList =
+ '::'? '<' (GenericArg (',' GenericArg)* ','?)? '>'
+
+GenericArg =
+ TypeArg
+| AssocTypeArg
+| LifetimeArg
+| ConstArg
+
+TypeArg =
+ Type
+
+AssocTypeArg =
+ NameRef GenericParamList? (':' TypeBoundList | ('=' Type | ConstArg))
+
+LifetimeArg =
+ Lifetime
+
+ConstArg =
+ Expr
+
+MacroCall =
+ Attr* Path '!' TokenTree ';'?
+
+TokenTree =
+ '(' ')'
+| '{' '}'
+| '[' ']'
+
+MacroItems =
+ Item*
+
+MacroStmts =
+ statements:Stmt*
+ Expr?
+
+//*************************//
+// Items //
+//*************************//
+
+SourceFile =
+ 'shebang'?
+ Attr*
+ Item*
+
+Item =
+ Const
+| Enum
+| ExternBlock
+| ExternCrate
+| Fn
+| Impl
+| MacroCall
+| MacroRules
+| MacroDef
+| Module
+| Static
+| Struct
+| Trait
+| TypeAlias
+| Union
+| Use
+
+MacroRules =
+ Attr* Visibility?
+ 'macro_rules' '!' Name
+ TokenTree
+
+MacroDef =
+ Attr* Visibility?
+ 'macro' Name args:TokenTree?
+ body:TokenTree
+
+Module =
+ Attr* Visibility?
+ 'mod' Name
+ (ItemList | ';')
+
+ItemList =
+ '{' Attr* Item* '}'
+
+ExternCrate =
+ Attr* Visibility?
+ 'extern' 'crate' NameRef Rename? ';'
+
+Rename =
+ 'as' (Name | '_')
+
+Use =
+ Attr* Visibility?
+ 'use' UseTree ';'
+
+UseTree =
+ (Path? '::')? ('*' | UseTreeList)
+| Path Rename?
+
+UseTreeList =
+ '{' (UseTree (',' UseTree)* ','?)? '}'
+
+Fn =
+ Attr* Visibility?
+ 'default'? 'const'? 'async'? 'unsafe'? Abi?
+ 'fn' Name GenericParamList? ParamList RetType? WhereClause?
+ (body:BlockExpr | ';')
+
+Abi =
+ 'extern' 'string'?
+
+ParamList =
+ '('(
+ SelfParam
+ | (SelfParam ',')? (Param (',' Param)* ','?)?
+ )')'
+| '|' (Param (',' Param)* ','?)? '|'
+
+SelfParam =
+ Attr* (
+ ('&' Lifetime?)? 'mut'? Name
+ | 'mut'? Name ':' Type
+ )
+
+Param =
+ Attr* (
+ Pat (':' Type)?
+ | Type
+ | '...'
+ )
+
+RetType =
+ '->' Type
+
+TypeAlias =
+ Attr* Visibility?
+ 'default'?
+ 'type' Name GenericParamList? (':' TypeBoundList?)? WhereClause?
+ ('=' Type)? ';'
+
+Struct =
+ Attr* Visibility?
+ 'struct' Name GenericParamList? (
+ WhereClause? (RecordFieldList | ';')
+ | TupleFieldList WhereClause? ';'
+ )
+
+RecordFieldList =
+ '{' fields:(RecordField (',' RecordField)* ','?)? '}'
+
+RecordField =
+ Attr* Visibility?
+ Name ':' Type
+
+TupleFieldList =
+ '(' fields:(TupleField (',' TupleField)* ','?)? ')'
+
+TupleField =
+ Attr* Visibility?
+ Type
+
+FieldList =
+ RecordFieldList
+| TupleFieldList
+
+Enum =
+ Attr* Visibility?
+ 'enum' Name GenericParamList? WhereClause?
+ VariantList
+
+VariantList =
+ '{' (Variant (',' Variant)* ','?)? '}'
+
+Variant =
+ Attr* Visibility?
+ Name FieldList? ('=' Expr)?
+
+Union =
+ Attr* Visibility?
+ 'union' Name GenericParamList? WhereClause?
+ RecordFieldList
+
+// A Data Type.
+//
+// Not used directly in the grammar, but handy to have anyway.
+Adt =
+ Enum
+| Struct
+| Union
+
+Const =
+ Attr* Visibility?
+ 'default'?
+ 'const' (Name | '_') ':' Type
+ ('=' body:Expr)? ';'
+
+Static =
+ Attr* Visibility?
+ 'static' 'mut'? Name ':' Type
+ ('=' body:Expr)? ';'
+
+Trait =
+ Attr* Visibility?
+ 'unsafe'? 'auto'?
+ 'trait' Name GenericParamList? (':' TypeBoundList?)? WhereClause?
+ AssocItemList
+
+AssocItemList =
+ '{' Attr* AssocItem* '}'
+
+AssocItem =
+ Const
+| Fn
+| MacroCall
+| TypeAlias
+
+Impl =
+ Attr* Visibility?
+ 'default'? 'unsafe'?
+ 'impl' GenericParamList? ('const'? '!'? trait:Type 'for')? self_ty:Type WhereClause?
+ AssocItemList
+
+ExternBlock =
+ Attr* 'unsafe'? Abi ExternItemList
+
+ExternItemList =
+ '{' Attr* ExternItem* '}'
+
+ExternItem =
+ Fn
+| MacroCall
+| Static
+| TypeAlias
+
+GenericParamList =
+ '<' (GenericParam (',' GenericParam)* ','?)? '>'
+
+GenericParam =
+ ConstParam
+| LifetimeParam
+| TypeParam
+
+TypeParam =
+ Attr* Name (':' TypeBoundList?)?
+ ('=' default_type:Type)?
+
+ConstParam =
+ Attr* 'const' Name ':' Type
+ ('=' default_val:Expr)?
+
+LifetimeParam =
+ Attr* Lifetime (':' TypeBoundList?)?
+
+WhereClause =
+ 'where' predicates:(WherePred (',' WherePred)* ','?)
+
+WherePred =
+ ('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList?
+
+Visibility =
+ 'pub' ('(' 'in'? Path ')')?
+
+Attr =
+ '#' '!'? '[' Meta ']'
+
+Meta =
+ Path ('=' Expr | TokenTree)?
+
+//****************************//
+// Statements and Expressions //
+//****************************//
+
+Stmt =
+ ';'
+| ExprStmt
+| Item
+| LetStmt
+
+LetStmt =
+ Attr* 'let' Pat (':' Type)?
+ '=' initializer:Expr
+ LetElse?
+ ';'
+
+LetElse =
+ 'else' BlockExpr
+
+ExprStmt =
+ Expr ';'?
+
+Expr =
+ ArrayExpr
+| AwaitExpr
+| BinExpr
+| BlockExpr
+| BoxExpr
+| BreakExpr
+| CallExpr
+| CastExpr
+| ClosureExpr
+| ContinueExpr
+| FieldExpr
+| ForExpr
+| IfExpr
+| IndexExpr
+| Literal
+| LoopExpr
+| MacroExpr
+| MacroStmts
+| MatchExpr
+| MethodCallExpr
+| ParenExpr
+| PathExpr
+| PrefixExpr
+| RangeExpr
+| RecordExpr
+| RefExpr
+| ReturnExpr
+| TryExpr
+| TupleExpr
+| WhileExpr
+| YieldExpr
+| LetExpr
+| UnderscoreExpr
+
+MacroExpr =
+ MacroCall
+
+Literal =
+ Attr* value:(
+ 'int_number' | 'float_number'
+ | 'string' | 'raw_string'
+ | 'byte_string' | 'raw_byte_string'
+ | 'true' | 'false'
+ | 'char' | 'byte'
+ )
+
+PathExpr =
+ Attr* Path
+
+StmtList =
+ '{'
+ Attr*
+ statements:Stmt*
+ tail_expr:Expr?
+ '}'
+
+RefExpr =
+ Attr* '&' ('raw' | 'mut' | 'const') Expr
+
+TryExpr =
+ Attr* Expr '?'
+
+BlockExpr =
+ Attr* Label? ('try' | 'unsafe' | 'async' | 'const') StmtList
+
+PrefixExpr =
+ Attr* op:('-' | '!' | '*') Expr
+
+BinExpr =
+ Attr*
+ lhs:Expr
+ op:(
+ '||' | '&&'
+ | '==' | '!=' | '<=' | '>=' | '<' | '>'
+ | '+' | '*' | '-' | '/' | '%' | '<<' | '>>' | '^' | '|' | '&'
+ | '=' | '+=' | '/=' | '*=' | '%=' | '>>=' | '<<=' | '-=' | '|=' | '&=' | '^='
+ )
+ rhs:Expr
+
+CastExpr =
+ Attr* Expr 'as' Type
+
+ParenExpr =
+ Attr* '(' Attr* Expr ')'
+
+ArrayExpr =
+ Attr* '[' Attr* (
+ (Expr (',' Expr)* ','?)?
+ | Expr ';' Expr
+ ) ']'
+
+IndexExpr =
+ Attr* base:Expr '[' index:Expr ']'
+
+TupleExpr =
+ Attr* '(' Attr* fields:(Expr (',' Expr)* ','?)? ')'
+
+RecordExpr =
+ Path RecordExprFieldList
+
+RecordExprFieldList =
+ '{'
+ Attr*
+ fields:(RecordExprField (',' RecordExprField)* ','?)?
+ ('..' spread:Expr?)?
+ '}'
+
+RecordExprField =
+ Attr* (NameRef ':')? Expr
+
+CallExpr =
+ Attr* Expr ArgList
+
+ArgList =
+ '(' args:(Expr (',' Expr)* ','?)? ')'
+
+MethodCallExpr =
+ Attr* receiver:Expr '.' NameRef GenericArgList? ArgList
+
+FieldExpr =
+ Attr* Expr '.' NameRef
+
+ClosureExpr =
+ Attr* ('for' GenericParamList)? 'static'? 'async'? 'move'? ParamList RetType?
+ body:Expr
+
+IfExpr =
+ Attr* 'if' condition:Expr then_branch:BlockExpr
+ ('else' else_branch:(IfExpr | BlockExpr))?
+
+LoopExpr =
+ Attr* Label? 'loop'
+ loop_body:BlockExpr
+
+ForExpr =
+ Attr* Label? 'for' Pat 'in' iterable:Expr
+ loop_body:BlockExpr
+
+WhileExpr =
+ Attr* Label? 'while' condition:Expr
+ loop_body:BlockExpr
+
+Label =
+ Lifetime ':'
+
+BreakExpr =
+ Attr* 'break' Lifetime? Expr?
+
+ContinueExpr =
+ Attr* 'continue' Lifetime?
+
+RangeExpr =
+ Attr* start:Expr? op:('..' | '..=') end:Expr?
+
+MatchExpr =
+ Attr* 'match' Expr MatchArmList
+
+MatchArmList =
+ '{'
+ Attr*
+ arms:MatchArm*
+ '}'
+
+MatchArm =
+ Attr* Pat guard:MatchGuard? '=>' Expr ','?
+
+MatchGuard =
+ 'if' condition:Expr
+
+ReturnExpr =
+ Attr* 'return' Expr?
+
+YieldExpr =
+ Attr* 'yield' Expr?
+
+LetExpr =
+ Attr* 'let' Pat '=' Expr
+
+UnderscoreExpr =
+ Attr* '_'
+
+AwaitExpr =
+ Attr* Expr '.' 'await'
+
+BoxExpr =
+ Attr* 'box' Expr
+
+//*************************//
+// Types //
+//*************************//
+
+Type =
+ ArrayType
+| DynTraitType
+| FnPtrType
+| ForType
+| ImplTraitType
+| InferType
+| MacroType
+| NeverType
+| ParenType
+| PathType
+| PtrType
+| RefType
+| SliceType
+| TupleType
+
+ParenType =
+ '(' Type ')'
+
+NeverType =
+ '!'
+
+MacroType =
+ MacroCall
+
+PathType =
+ Path
+
+TupleType =
+ '(' fields:(Type (',' Type)* ','?)? ')'
+
+PtrType =
+ '*' ('const' | 'mut') Type
+
+RefType =
+ '&' Lifetime? 'mut'? Type
+
+ArrayType =
+ '[' Type ';' Expr ']'
+
+SliceType =
+ '[' Type ']'
+
+InferType =
+ '_'
+
+FnPtrType =
+ 'const'? 'async'? 'unsafe'? Abi? 'fn' ParamList RetType?
+
+ForType =
+ 'for' GenericParamList Type
+
+ImplTraitType =
+ 'impl' TypeBoundList
+
+DynTraitType =
+ 'dyn' TypeBoundList
+
+TypeBoundList =
+ bounds:(TypeBound ('+' TypeBound)* '+'?)
+
+TypeBound =
+ Lifetime
+| ('?' | '~' 'const')? Type
+
+//************************//
+// Patterns //
+//************************//
+
+Pat =
+ IdentPat
+| BoxPat
+| RestPat
+| LiteralPat
+| MacroPat
+| OrPat
+| ParenPat
+| PathPat
+| WildcardPat
+| RangePat
+| RecordPat
+| RefPat
+| SlicePat
+| TuplePat
+| TupleStructPat
+| ConstBlockPat
+
+LiteralPat =
+ Literal
+
+IdentPat =
+ Attr* 'ref'? 'mut'? Name ('@' Pat)?
+
+WildcardPat =
+ '_'
+
+RangePat =
+ // 1..
+ start:Pat op:('..' | '..=')
+ // 1..2
+ | start:Pat op:('..' | '..=') end:Pat
+ // ..2
+ | op:('..' | '..=') end:Pat
+
+RefPat =
+ '&' 'mut'? Pat
+
+RecordPat =
+ Path RecordPatFieldList
+
+RecordPatFieldList =
+ '{'
+ fields:(RecordPatField (',' RecordPatField)* ','?)?
+ RestPat?
+ '}'
+
+RecordPatField =
+ Attr* (NameRef ':')? Pat
+
+TupleStructPat =
+ Path '(' fields:(Pat (',' Pat)* ','?)? ')'
+
+TuplePat =
+ '(' fields:(Pat (',' Pat)* ','?)? ')'
+
+ParenPat =
+ '(' Pat ')'
+
+SlicePat =
+ '[' (Pat (',' Pat)* ','?)? ']'
+
+PathPat =
+ Path
+
+OrPat =
+ (Pat ('|' Pat)* '|'?)
+
+BoxPat =
+ 'box' Pat
+
+RestPat =
+ Attr* '..'
+
+MacroPat =
+ MacroCall
+
+ConstBlockPat =
+ 'const' BlockExpr
diff --git a/src/tools/rust-analyzer/crates/syntax/src/algo.rs b/src/tools/rust-analyzer/crates/syntax/src/algo.rs
new file mode 100644
index 000000000..8b14789dd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/algo.rs
@@ -0,0 +1,660 @@
+//! Collection of assorted algorithms for syntax trees.
+
+use std::hash::BuildHasherDefault;
+
+use indexmap::IndexMap;
+use itertools::Itertools;
+use rustc_hash::FxHashMap;
+use text_edit::TextEditBuilder;
+
+use crate::{
+ AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
+ TextSize,
+};
+
+/// Returns ancestors of the node at the offset, sorted by length. This should
+/// do the right thing at an edge, e.g. when searching for expressions at `{
+/// $0foo }` we will get the name reference instead of the whole block, which
+/// we would get if we just did `find_token_at_offset(...).flat_map(|t|
+/// t.parent().ancestors())`.
+pub fn ancestors_at_offset(
+ node: &SyntaxNode,
+ offset: TextSize,
+) -> impl Iterator<Item = SyntaxNode> {
+ node.token_at_offset(offset)
+ .map(|token| token.parent_ancestors())
+ .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
+}
+
+/// Finds a node of specific Ast type at offset. Note that this is slightly
+/// imprecise: if the cursor is strictly between two nodes of the desired type,
+/// as in
+///
+/// ```no_run
+/// struct Foo {}|struct Bar;
+/// ```
+///
+/// then the shorter node will be silently preferred.
+pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextSize) -> Option<N> {
+ ancestors_at_offset(syntax, offset).find_map(N::cast)
+}
+
+pub fn find_node_at_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> {
+ syntax.covering_element(range).ancestors().find_map(N::cast)
+}
+
+/// Skip to next non `trivia` token
+pub fn skip_trivia_token(mut token: SyntaxToken, direction: Direction) -> Option<SyntaxToken> {
+ while token.kind().is_trivia() {
+ token = match direction {
+ Direction::Next => token.next_token()?,
+ Direction::Prev => token.prev_token()?,
+ }
+ }
+ Some(token)
+}
+/// Skip to next non `whitespace` token
+pub fn skip_whitespace_token(mut token: SyntaxToken, direction: Direction) -> Option<SyntaxToken> {
+ while token.kind() == SyntaxKind::WHITESPACE {
+ token = match direction {
+ Direction::Next => token.next_token()?,
+ Direction::Prev => token.prev_token()?,
+ }
+ }
+ Some(token)
+}
+
+/// Finds the first sibling in the given direction which is not `trivia`
+pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> {
+ return match element {
+ NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia),
+ NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia),
+ };
+
+ fn not_trivia(element: &SyntaxElement) -> bool {
+ match element {
+ NodeOrToken::Node(_) => true,
+ NodeOrToken::Token(token) => !token.kind().is_trivia(),
+ }
+ }
+}
+
+pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNode> {
+ if u == v {
+ return Some(u.clone());
+ }
+
+ let u_depth = u.ancestors().count();
+ let v_depth = v.ancestors().count();
+ let keep = u_depth.min(v_depth);
+
+ let u_candidates = u.ancestors().skip(u_depth - keep);
+ let v_candidates = v.ancestors().skip(v_depth - keep);
+ let (res, _) = u_candidates.zip(v_candidates).find(|(x, y)| x == y)?;
+ Some(res)
+}
+
+pub fn neighbor<T: AstNode>(me: &T, direction: Direction) -> Option<T> {
+ me.syntax().siblings(direction).skip(1).find_map(T::cast)
+}
+
+pub fn has_errors(node: &SyntaxNode) -> bool {
+ node.children().any(|it| it.kind() == SyntaxKind::ERROR)
+}
+
+type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>;
+
+#[derive(Debug, Hash, PartialEq, Eq)]
+enum TreeDiffInsertPos {
+ After(SyntaxElement),
+ AsFirstChild(SyntaxElement),
+}
+
+#[derive(Debug)]
+pub struct TreeDiff {
+ replacements: FxHashMap<SyntaxElement, SyntaxElement>,
+ deletions: Vec<SyntaxElement>,
+ // the vec as well as the indexmap are both here to preserve order
+ insertions: FxIndexMap<TreeDiffInsertPos, Vec<SyntaxElement>>,
+}
+
+impl TreeDiff {
+ pub fn into_text_edit(&self, builder: &mut TextEditBuilder) {
+ let _p = profile::span("into_text_edit");
+
+ for (anchor, to) in &self.insertions {
+ let offset = match anchor {
+ TreeDiffInsertPos::After(it) => it.text_range().end(),
+ TreeDiffInsertPos::AsFirstChild(it) => it.text_range().start(),
+ };
+ to.iter().for_each(|to| builder.insert(offset, to.to_string()));
+ }
+ for (from, to) in &self.replacements {
+ builder.replace(from.text_range(), to.to_string());
+ }
+ for text_range in self.deletions.iter().map(SyntaxElement::text_range) {
+ builder.delete(text_range);
+ }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.replacements.is_empty() && self.deletions.is_empty() && self.insertions.is_empty()
+ }
+}
+
+/// Finds a (potentially minimal) diff, which, applied to `from`, will result in `to`.
+///
+/// Specifically, returns a structure that consists of a replacements, insertions and deletions
+/// such that applying this map on `from` will result in `to`.
+///
+/// This function tries to find a fine-grained diff.
+pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
+ let _p = profile::span("diff");
+
+ let mut diff = TreeDiff {
+ replacements: FxHashMap::default(),
+ insertions: FxIndexMap::default(),
+ deletions: Vec::new(),
+ };
+ let (from, to) = (from.clone().into(), to.clone().into());
+
+ if !syntax_element_eq(&from, &to) {
+ go(&mut diff, from, to);
+ }
+ return diff;
+
+ fn syntax_element_eq(lhs: &SyntaxElement, rhs: &SyntaxElement) -> bool {
+ lhs.kind() == rhs.kind()
+ && lhs.text_range().len() == rhs.text_range().len()
+ && match (&lhs, &rhs) {
+ (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
+ lhs == rhs || lhs.text() == rhs.text()
+ }
+ (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
+ _ => false,
+ }
+ }
+
+ // FIXME: this is horribly inefficient. I bet there's a cool algorithm to diff trees properly.
+ fn go(diff: &mut TreeDiff, lhs: SyntaxElement, rhs: SyntaxElement) {
+ let (lhs, rhs) = match lhs.as_node().zip(rhs.as_node()) {
+ Some((lhs, rhs)) => (lhs, rhs),
+ _ => {
+ cov_mark::hit!(diff_node_token_replace);
+ diff.replacements.insert(lhs, rhs);
+ return;
+ }
+ };
+
+ let mut look_ahead_scratch = Vec::default();
+
+ let mut rhs_children = rhs.children_with_tokens();
+ let mut lhs_children = lhs.children_with_tokens();
+ let mut last_lhs = None;
+ loop {
+ let lhs_child = lhs_children.next();
+ match (lhs_child.clone(), rhs_children.next()) {
+ (None, None) => break,
+ (None, Some(element)) => {
+ let insert_pos = match last_lhs.clone() {
+ Some(prev) => {
+ cov_mark::hit!(diff_insert);
+ TreeDiffInsertPos::After(prev)
+ }
+ // first iteration, insert into out parent as the first child
+ None => {
+ cov_mark::hit!(diff_insert_as_first_child);
+ TreeDiffInsertPos::AsFirstChild(lhs.clone().into())
+ }
+ };
+ diff.insertions.entry(insert_pos).or_insert_with(Vec::new).push(element);
+ }
+ (Some(element), None) => {
+ cov_mark::hit!(diff_delete);
+ diff.deletions.push(element);
+ }
+ (Some(ref lhs_ele), Some(ref rhs_ele)) if syntax_element_eq(lhs_ele, rhs_ele) => {}
+ (Some(lhs_ele), Some(rhs_ele)) => {
+ // nodes differ, look for lhs_ele in rhs, if its found we can mark everything up
+ // until that element as insertions. This is important to keep the diff minimal
+ // in regards to insertions that have been actually done, this is important for
+ // use insertions as we do not want to replace the entire module node.
+ look_ahead_scratch.push(rhs_ele.clone());
+ let mut rhs_children_clone = rhs_children.clone();
+ let mut insert = false;
+ for rhs_child in &mut rhs_children_clone {
+ if syntax_element_eq(&lhs_ele, &rhs_child) {
+ cov_mark::hit!(diff_insertions);
+ insert = true;
+ break;
+ }
+ look_ahead_scratch.push(rhs_child);
+ }
+ let drain = look_ahead_scratch.drain(..);
+ if insert {
+ let insert_pos = if let Some(prev) = last_lhs.clone().filter(|_| insert) {
+ TreeDiffInsertPos::After(prev)
+ } else {
+ cov_mark::hit!(insert_first_child);
+ TreeDiffInsertPos::AsFirstChild(lhs.clone().into())
+ };
+
+ diff.insertions.entry(insert_pos).or_insert_with(Vec::new).extend(drain);
+ rhs_children = rhs_children_clone;
+ } else {
+ go(diff, lhs_ele, rhs_ele);
+ }
+ }
+ }
+ last_lhs = lhs_child.or(last_lhs);
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use itertools::Itertools;
+ use parser::SyntaxKind;
+ use text_edit::TextEdit;
+
+ use crate::{AstNode, SyntaxElement};
+
+ #[test]
+ fn replace_node_token() {
+ cov_mark::check!(diff_node_token_replace);
+ check_diff(
+ r#"use node;"#,
+ r#"ident"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+ Line 0: Token(USE_KW@0..3 "use") -> ident
+
+ deletions:
+
+ Line 1: " "
+ Line 1: node
+ Line 1: ;
+ "#]],
+ );
+ }
+
+ #[test]
+ fn replace_parent() {
+ cov_mark::check!(diff_insert_as_first_child);
+ check_diff(
+ r#""#,
+ r#"use foo::bar;"#,
+ expect![[r#"
+ insertions:
+
+ Line 0: AsFirstChild(Node(SOURCE_FILE@0..0))
+ -> use foo::bar;
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn insert_last() {
+ cov_mark::check!(diff_insert);
+ check_diff(
+ r#"
+use foo;
+use bar;"#,
+ r#"
+use foo;
+use bar;
+use baz;"#,
+ expect![[r#"
+ insertions:
+
+ Line 2: After(Node(USE@10..18))
+ -> "\n"
+ -> use baz;
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn insert_middle() {
+ check_diff(
+ r#"
+use foo;
+use baz;"#,
+ r#"
+use foo;
+use bar;
+use baz;"#,
+ expect![[r#"
+ insertions:
+
+ Line 2: After(Token(WHITESPACE@9..10 "\n"))
+ -> use bar;
+ -> "\n"
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ )
+ }
+
+ #[test]
+ fn insert_first() {
+ check_diff(
+ r#"
+use bar;
+use baz;"#,
+ r#"
+use foo;
+use bar;
+use baz;"#,
+ expect![[r#"
+ insertions:
+
+ Line 0: After(Token(WHITESPACE@0..1 "\n"))
+ -> use foo;
+ -> "\n"
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ )
+ }
+
+ #[test]
+ fn first_child_insertion() {
+ cov_mark::check!(insert_first_child);
+ check_diff(
+ r#"fn main() {
+ stdi
+ }"#,
+ r#"use foo::bar;
+
+ fn main() {
+ stdi
+ }"#,
+ expect![[r#"
+ insertions:
+
+ Line 0: AsFirstChild(Node(SOURCE_FILE@0..30))
+ -> use foo::bar;
+ -> "\n\n "
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn delete_last() {
+ cov_mark::check!(diff_delete);
+ check_diff(
+ r#"use foo;
+ use bar;"#,
+ r#"use foo;"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+
+
+ deletions:
+
+ Line 1: "\n "
+ Line 2: use bar;
+ "#]],
+ );
+ }
+
+ #[test]
+ fn delete_middle() {
+ cov_mark::check!(diff_insertions);
+ check_diff(
+ r#"
+use expect_test::{expect, Expect};
+use text_edit::TextEdit;
+
+use crate::AstNode;
+"#,
+ r#"
+use expect_test::{expect, Expect};
+
+use crate::AstNode;
+"#,
+ expect![[r#"
+ insertions:
+
+ Line 1: After(Node(USE@1..35))
+ -> "\n\n"
+ -> use crate::AstNode;
+
+ replacements:
+
+
+
+ deletions:
+
+ Line 2: use text_edit::TextEdit;
+ Line 3: "\n\n"
+ Line 4: use crate::AstNode;
+ Line 5: "\n"
+ "#]],
+ )
+ }
+
+ #[test]
+ fn delete_first() {
+ check_diff(
+ r#"
+use text_edit::TextEdit;
+
+use crate::AstNode;
+"#,
+ r#"
+use crate::AstNode;
+"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+ Line 2: Token(IDENT@5..14 "text_edit") -> crate
+ Line 2: Token(IDENT@16..24 "TextEdit") -> AstNode
+ Line 2: Token(WHITESPACE@25..27 "\n\n") -> "\n"
+
+ deletions:
+
+ Line 3: use crate::AstNode;
+ Line 4: "\n"
+ "#]],
+ )
+ }
+
+ #[test]
+ fn merge_use() {
+ check_diff(
+ r#"
+use std::{
+ fmt,
+ hash::BuildHasherDefault,
+ ops::{self, RangeInclusive},
+};
+"#,
+ r#"
+use std::fmt;
+use std::hash::BuildHasherDefault;
+use std::ops::{self, RangeInclusive};
+"#,
+ expect![[r#"
+ insertions:
+
+ Line 2: After(Node(PATH_SEGMENT@5..8))
+ -> ::
+ -> fmt
+ Line 6: After(Token(WHITESPACE@86..87 "\n"))
+ -> use std::hash::BuildHasherDefault;
+ -> "\n"
+ -> use std::ops::{self, RangeInclusive};
+ -> "\n"
+
+ replacements:
+
+ Line 2: Token(IDENT@5..8 "std") -> std
+
+ deletions:
+
+ Line 2: ::
+ Line 2: {
+ fmt,
+ hash::BuildHasherDefault,
+ ops::{self, RangeInclusive},
+ }
+ "#]],
+ )
+ }
+
+ #[test]
+ fn early_return_assist() {
+ check_diff(
+ r#"
+fn main() {
+ if let Ok(x) = Err(92) {
+ foo(x);
+ }
+}
+ "#,
+ r#"
+fn main() {
+ let x = match Err(92) {
+ Ok(it) => it,
+ _ => return,
+ };
+ foo(x);
+}
+ "#,
+ expect![[r#"
+ insertions:
+
+ Line 3: After(Node(BLOCK_EXPR@40..63))
+ -> " "
+ -> match Err(92) {
+ Ok(it) => it,
+ _ => return,
+ }
+ -> ;
+ Line 3: After(Node(IF_EXPR@17..63))
+ -> "\n "
+ -> foo(x);
+
+ replacements:
+
+ Line 3: Token(IF_KW@17..19 "if") -> let
+ Line 3: Token(LET_KW@20..23 "let") -> x
+ Line 3: Node(BLOCK_EXPR@40..63) -> =
+
+ deletions:
+
+ Line 3: " "
+ Line 3: Ok(x)
+ Line 3: " "
+ Line 3: =
+ Line 3: " "
+ Line 3: Err(92)
+ "#]],
+ )
+ }
+
+ fn check_diff(from: &str, to: &str, expected_diff: Expect) {
+ let from_node = crate::SourceFile::parse(from).tree().syntax().clone();
+ let to_node = crate::SourceFile::parse(to).tree().syntax().clone();
+ let diff = super::diff(&from_node, &to_node);
+
+ let line_number =
+ |syn: &SyntaxElement| from[..syn.text_range().start().into()].lines().count();
+
+ let fmt_syntax = |syn: &SyntaxElement| match syn.kind() {
+ SyntaxKind::WHITESPACE => format!("{:?}", syn.to_string()),
+ _ => format!("{}", syn),
+ };
+
+ let insertions =
+ diff.insertions.iter().format_with("\n", |(k, v), f| -> Result<(), std::fmt::Error> {
+ f(&format!(
+ "Line {}: {:?}\n-> {}",
+ line_number(match k {
+ super::TreeDiffInsertPos::After(syn) => syn,
+ super::TreeDiffInsertPos::AsFirstChild(syn) => syn,
+ }),
+ k,
+ v.iter().format_with("\n-> ", |v, f| f(&fmt_syntax(v)))
+ ))
+ });
+
+ let replacements = diff
+ .replacements
+ .iter()
+ .sorted_by_key(|(syntax, _)| syntax.text_range().start())
+ .format_with("\n", |(k, v), f| {
+ f(&format!("Line {}: {:?} -> {}", line_number(k), k, fmt_syntax(v)))
+ });
+
+ let deletions = diff
+ .deletions
+ .iter()
+ .format_with("\n", |v, f| f(&format!("Line {}: {}", line_number(v), &fmt_syntax(v))));
+
+ let actual = format!(
+ "insertions:\n\n{}\n\nreplacements:\n\n{}\n\ndeletions:\n\n{}\n",
+ insertions, replacements, deletions
+ );
+ expected_diff.assert_eq(&actual);
+
+ let mut from = from.to_owned();
+ let mut text_edit = TextEdit::builder();
+ diff.into_text_edit(&mut text_edit);
+ text_edit.finish().apply(&mut from);
+ assert_eq!(&*from, to, "diff did not turn `from` to `to`");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
new file mode 100644
index 000000000..4aa64d0d6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
@@ -0,0 +1,367 @@
+//! Abstract Syntax Tree, layered on top of untyped `SyntaxNode`s
+
+mod generated;
+mod traits;
+mod token_ext;
+mod node_ext;
+mod expr_ext;
+mod operators;
+pub mod edit;
+pub mod edit_in_place;
+pub mod make;
+
+use std::marker::PhantomData;
+
+use crate::{
+ syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
+ SyntaxKind,
+};
+
+pub use self::{
+ expr_ext::{ArrayExprKind, BlockModifier, CallableExpr, ElseBranch, LiteralKind},
+ generated::{nodes::*, tokens::*},
+ node_ext::{
+ AttrKind, FieldKind, Macro, NameLike, NameOrNameRef, PathSegmentKind, SelfParamKind,
+ SlicePatComponents, StructKind, TypeBoundKind, TypeOrConstParam, VisibilityKind,
+ },
+ operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
+ token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix},
+ traits::{
+ AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericParams,
+ HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility,
+ },
+};
+
+/// The main trait to go from untyped `SyntaxNode` to a typed ast. The
+/// conversion itself has zero runtime cost: ast and syntax nodes have exactly
+/// the same representation: a pointer to the tree root and a pointer to the
+/// node itself.
+pub trait AstNode {
+ fn can_cast(kind: SyntaxKind) -> bool
+ where
+ Self: Sized;
+
+ fn cast(syntax: SyntaxNode) -> Option<Self>
+ where
+ Self: Sized;
+
+ fn syntax(&self) -> &SyntaxNode;
+ fn clone_for_update(&self) -> Self
+ where
+ Self: Sized,
+ {
+ Self::cast(self.syntax().clone_for_update()).unwrap()
+ }
+ fn clone_subtree(&self) -> Self
+ where
+ Self: Sized,
+ {
+ Self::cast(self.syntax().clone_subtree()).unwrap()
+ }
+}
+
+/// Like `AstNode`, but wraps tokens rather than interior nodes.
+pub trait AstToken {
+ fn can_cast(token: SyntaxKind) -> bool
+ where
+ Self: Sized;
+
+ fn cast(syntax: SyntaxToken) -> Option<Self>
+ where
+ Self: Sized;
+
+ fn syntax(&self) -> &SyntaxToken;
+
+ fn text(&self) -> &str {
+ self.syntax().text()
+ }
+}
+
+/// An iterator over `SyntaxNode` children of a particular AST type.
+#[derive(Debug, Clone)]
+pub struct AstChildren<N> {
+ inner: SyntaxNodeChildren,
+ ph: PhantomData<N>,
+}
+
+impl<N> AstChildren<N> {
+ fn new(parent: &SyntaxNode) -> Self {
+ AstChildren { inner: parent.children(), ph: PhantomData }
+ }
+}
+
+impl<N: AstNode> Iterator for AstChildren<N> {
+ type Item = N;
+ fn next(&mut self) -> Option<N> {
+ self.inner.find_map(N::cast)
+ }
+}
+
+mod support {
+ use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken};
+
+ pub(super) fn child<N: AstNode>(parent: &SyntaxNode) -> Option<N> {
+ parent.children().find_map(N::cast)
+ }
+
+ pub(super) fn children<N: AstNode>(parent: &SyntaxNode) -> AstChildren<N> {
+ AstChildren::new(parent)
+ }
+
+ pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option<SyntaxToken> {
+ parent.children_with_tokens().filter_map(|it| it.into_token()).find(|it| it.kind() == kind)
+ }
+}
+
+#[test]
+fn assert_ast_is_object_safe() {
+ fn _f(_: &dyn AstNode, _: &dyn HasName) {}
+}
+
+#[test]
+fn test_doc_comment_none() {
+ let file = SourceFile::parse(
+ r#"
+ // non-doc
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert!(module.doc_comments().doc_comment_text().is_none());
+}
+
+#[test]
+fn test_outer_doc_comment_of_items() {
+ let file = SourceFile::parse(
+ r#"
+ /// doc
+ // non-doc
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" doc", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_inner_doc_comment_of_items() {
+ let file = SourceFile::parse(
+ r#"
+ //! doc
+ // non-doc
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert!(module.doc_comments().doc_comment_text().is_none());
+}
+
+#[test]
+fn test_doc_comment_of_statics() {
+ let file = SourceFile::parse(
+ r#"
+ /// Number of levels
+ static LEVELS: i32 = 0;
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let st = file.syntax().descendants().find_map(Static::cast).unwrap();
+ assert_eq!(" Number of levels", st.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_preserves_indents() {
+ let file = SourceFile::parse(
+ r#"
+ /// doc1
+ /// ```
+ /// fn foo() {
+ /// // ...
+ /// }
+ /// ```
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(
+ " doc1\n ```\n fn foo() {\n // ...\n }\n ```",
+ module.doc_comments().doc_comment_text().unwrap()
+ );
+}
+
+#[test]
+fn test_doc_comment_preserves_newlines() {
+ let file = SourceFile::parse(
+ r#"
+ /// this
+ /// is
+ /// mod
+ /// foo
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" this\n is\n mod\n foo", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_single_line_block_strips_suffix() {
+ let file = SourceFile::parse(
+ r#"
+ /** this is mod foo*/
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" this is mod foo", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_single_line_block_strips_suffix_whitespace() {
+ let file = SourceFile::parse(
+ r#"
+ /** this is mod foo */
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" this is mod foo ", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_multi_line_block_strips_suffix() {
+ let file = SourceFile::parse(
+ r#"
+ /**
+ this
+ is
+ mod foo
+ */
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(
+ "\n this\n is\n mod foo\n ",
+ module.doc_comments().doc_comment_text().unwrap()
+ );
+}
+
+#[test]
+fn test_comments_preserve_trailing_whitespace() {
+ let file = SourceFile::parse(
+ "\n/// Representation of a Realm. \n/// In the specification these are called Realm Records.\nstruct Realm {}",
+ )
+ .ok()
+ .unwrap();
+ let def = file.syntax().descendants().find_map(Struct::cast).unwrap();
+ assert_eq!(
+ " Representation of a Realm. \n In the specification these are called Realm Records.",
+ def.doc_comments().doc_comment_text().unwrap()
+ );
+}
+
+#[test]
+fn test_four_slash_line_comment() {
+ let file = SourceFile::parse(
+ r#"
+ //// too many slashes to be a doc comment
+ /// doc comment
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" doc comment", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_where_predicates() {
+ fn assert_bound(text: &str, bound: Option<TypeBound>) {
+ assert_eq!(text, bound.unwrap().syntax().text().to_string());
+ }
+
+ let file = SourceFile::parse(
+ r#"
+fn foo()
+where
+ T: Clone + Copy + Debug + 'static,
+ 'a: 'b + 'c,
+ Iterator::Item: 'a + Debug,
+ Iterator::Item: Debug + 'a,
+ <T as Iterator>::Item: Debug + 'a,
+ for<'a> F: Fn(&'a str)
+{}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let where_clause = file.syntax().descendants().find_map(WhereClause::cast).unwrap();
+
+ let mut predicates = where_clause.predicates();
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert!(pred.for_token().is_none());
+ assert!(pred.generic_param_list().is_none());
+ assert_eq!("T", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("Clone", bounds.next());
+ assert_bound("Copy", bounds.next());
+ assert_bound("Debug", bounds.next());
+ assert_bound("'static", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert_eq!("'a", pred.lifetime().unwrap().lifetime_ident_token().unwrap().text());
+
+ assert_bound("'b", bounds.next());
+ assert_bound("'c", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert_eq!("Iterator::Item", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("'a", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert_eq!("Iterator::Item", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("Debug", bounds.next());
+ assert_bound("'a", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert_eq!("<T as Iterator>::Item", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("Debug", bounds.next());
+ assert_bound("'a", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert!(pred.for_token().is_some());
+ assert_eq!("<'a>", pred.generic_param_list().unwrap().syntax().text().to_string());
+ assert_eq!("F", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("Fn(&'a str)", bounds.next());
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs
new file mode 100644
index 000000000..15805dfc8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs
@@ -0,0 +1,174 @@
+//! This module contains functions for editing syntax trees. As the trees are
+//! immutable, all function here return a fresh copy of the tree, instead of
+//! doing an in-place modification.
+use std::{fmt, iter, ops};
+
+use crate::{
+ ast::{self, make, AstNode},
+ ted, AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
+};
+
+#[derive(Debug, Clone, Copy)]
+pub struct IndentLevel(pub u8);
+
+impl From<u8> for IndentLevel {
+ fn from(level: u8) -> IndentLevel {
+ IndentLevel(level)
+ }
+}
+
+impl fmt::Display for IndentLevel {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let spaces = " ";
+ let buf;
+ let len = self.0 as usize * 4;
+ let indent = if len <= spaces.len() {
+ &spaces[..len]
+ } else {
+ buf = " ".repeat(len);
+ &buf
+ };
+ fmt::Display::fmt(indent, f)
+ }
+}
+
+impl ops::Add<u8> for IndentLevel {
+ type Output = IndentLevel;
+ fn add(self, rhs: u8) -> IndentLevel {
+ IndentLevel(self.0 + rhs)
+ }
+}
+
+impl IndentLevel {
+ pub fn single() -> IndentLevel {
+ IndentLevel(0)
+ }
+ pub fn is_zero(&self) -> bool {
+ self.0 == 0
+ }
+ pub fn from_element(element: &SyntaxElement) -> IndentLevel {
+ match element {
+ rowan::NodeOrToken::Node(it) => IndentLevel::from_node(it),
+ rowan::NodeOrToken::Token(it) => IndentLevel::from_token(it),
+ }
+ }
+
+ pub fn from_node(node: &SyntaxNode) -> IndentLevel {
+ match node.first_token() {
+ Some(it) => Self::from_token(&it),
+ None => IndentLevel(0),
+ }
+ }
+
+ pub fn from_token(token: &SyntaxToken) -> IndentLevel {
+ for ws in prev_tokens(token.clone()).filter_map(ast::Whitespace::cast) {
+ let text = ws.syntax().text();
+ if let Some(pos) = text.rfind('\n') {
+ let level = text[pos + 1..].chars().count() / 4;
+ return IndentLevel(level as u8);
+ }
+ }
+ IndentLevel(0)
+ }
+
+ /// XXX: this intentionally doesn't change the indent of the very first token.
+ /// Ie, in something like
+ /// ```
+ /// fn foo() {
+ /// 92
+ /// }
+ /// ```
+ /// if you indent the block, the `{` token would stay put.
+ pub(super) fn increase_indent(self, node: &SyntaxNode) {
+ let tokens = node.preorder_with_tokens().filter_map(|event| match event {
+ rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
+ _ => None,
+ });
+ for token in tokens {
+ if let Some(ws) = ast::Whitespace::cast(token) {
+ if ws.text().contains('\n') {
+ let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self));
+ ted::replace(ws.syntax(), &new_ws);
+ }
+ }
+ }
+ }
+
+ pub(super) fn decrease_indent(self, node: &SyntaxNode) {
+ let tokens = node.preorder_with_tokens().filter_map(|event| match event {
+ rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
+ _ => None,
+ });
+ for token in tokens {
+ if let Some(ws) = ast::Whitespace::cast(token) {
+ if ws.text().contains('\n') {
+ let new_ws = make::tokens::whitespace(
+ &ws.syntax().text().replace(&format!("\n{}", self), "\n"),
+ );
+ ted::replace(ws.syntax(), &new_ws);
+ }
+ }
+ }
+ }
+}
+
+fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
+ iter::successors(Some(token), |token| token.prev_token())
+}
+
+/// Soft-deprecated in favor of mutable tree editing API `edit_in_place::Ident`.
+pub trait AstNodeEdit: AstNode + Clone + Sized {
+ fn indent_level(&self) -> IndentLevel {
+ IndentLevel::from_node(self.syntax())
+ }
+ #[must_use]
+ fn indent(&self, level: IndentLevel) -> Self {
+ fn indent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode {
+ let res = node.clone_subtree().clone_for_update();
+ level.increase_indent(&res);
+ res.clone_subtree()
+ }
+
+ Self::cast(indent_inner(self.syntax(), level)).unwrap()
+ }
+ #[must_use]
+ fn dedent(&self, level: IndentLevel) -> Self {
+ fn dedent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode {
+ let res = node.clone_subtree().clone_for_update();
+ level.decrease_indent(&res);
+ res.clone_subtree()
+ }
+
+ Self::cast(dedent_inner(self.syntax(), level)).unwrap()
+ }
+ #[must_use]
+ fn reset_indent(&self) -> Self {
+ let level = IndentLevel::from_node(self.syntax());
+ self.dedent(level)
+ }
+}
+
+impl<N: AstNode + Clone> AstNodeEdit for N {}
+
+#[test]
+fn test_increase_indent() {
+ let arm_list = {
+ let arm = make::match_arm(iter::once(make::wildcard_pat().into()), None, make::expr_unit());
+ make::match_arm_list(vec![arm.clone(), arm])
+ };
+ assert_eq!(
+ arm_list.syntax().to_string(),
+ "{
+ _ => (),
+ _ => (),
+}"
+ );
+ let indented = arm_list.indent(IndentLevel(2));
+ assert_eq!(
+ indented.syntax().to_string(),
+ "{
+ _ => (),
+ _ => (),
+ }"
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
new file mode 100644
index 000000000..e3e928aec
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
@@ -0,0 +1,717 @@
+//! Structural editing for ast.
+
+use std::iter::{empty, successors};
+
+use parser::{SyntaxKind, T};
+use rowan::SyntaxElement;
+
+use crate::{
+ algo::{self, neighbor},
+ ast::{self, edit::IndentLevel, make, HasGenericParams},
+ ted::{self, Position},
+ AstNode, AstToken, Direction,
+ SyntaxKind::{ATTR, COMMENT, WHITESPACE},
+ SyntaxNode,
+};
+
+use super::HasName;
+
+pub trait GenericParamsOwnerEdit: ast::HasGenericParams {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList;
+ fn get_or_create_where_clause(&self) -> ast::WhereClause;
+}
+
+impl GenericParamsOwnerEdit for ast::Fn {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(fn_token) = self.fn_token() {
+ Position::after(fn_token)
+ } else if let Some(param_list) = self.param_list() {
+ Position::before(param_list.syntax)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = if let Some(ty) = self.ret_type() {
+ Position::after(ty.syntax())
+ } else if let Some(param_list) = self.param_list() {
+ Position::after(param_list.syntax())
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Impl {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = match self.impl_token() {
+ Some(imp_token) => Position::after(imp_token),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = match self.assoc_item_list() {
+ Some(items) => Position::before(items.syntax()),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Trait {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(trait_token) = self.trait_token() {
+ Position::after(trait_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = match self.assoc_item_list() {
+ Some(items) => Position::before(items.syntax()),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Struct {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(struct_token) = self.struct_token() {
+ Position::after(struct_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let tfl = self.field_list().and_then(|fl| match fl {
+ ast::FieldList::RecordFieldList(_) => None,
+ ast::FieldList::TupleFieldList(it) => Some(it),
+ });
+ let position = if let Some(tfl) = tfl {
+ Position::after(tfl.syntax())
+ } else if let Some(gpl) = self.generic_param_list() {
+ Position::after(gpl.syntax())
+ } else if let Some(name) = self.name() {
+ Position::after(name.syntax())
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Enum {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(enum_token) = self.enum_token() {
+ Position::after(enum_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = if let Some(gpl) = self.generic_param_list() {
+ Position::after(gpl.syntax())
+ } else if let Some(name) = self.name() {
+ Position::after(name.syntax())
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+fn create_where_clause(position: Position) {
+ let where_clause = make::where_clause(empty()).clone_for_update();
+ ted::insert(position, where_clause.syntax());
+}
+
+fn create_generic_param_list(position: Position) -> ast::GenericParamList {
+ let gpl = make::generic_param_list(empty()).clone_for_update();
+ ted::insert_raw(position, gpl.syntax());
+ gpl
+}
+
+pub trait AttrsOwnerEdit: ast::HasAttrs {
+ fn remove_attrs_and_docs(&self) {
+ remove_attrs_and_docs(self.syntax());
+
+ fn remove_attrs_and_docs(node: &SyntaxNode) {
+ let mut remove_next_ws = false;
+ for child in node.children_with_tokens() {
+ match child.kind() {
+ ATTR | COMMENT => {
+ remove_next_ws = true;
+ child.detach();
+ continue;
+ }
+ WHITESPACE if remove_next_ws => {
+ child.detach();
+ }
+ _ => (),
+ }
+ remove_next_ws = false;
+ }
+ }
+ }
+}
+
+impl<T: ast::HasAttrs> AttrsOwnerEdit for T {}
+
+impl ast::GenericParamList {
+ pub fn add_generic_param(&self, generic_param: ast::GenericParam) {
+ match self.generic_params().last() {
+ Some(last_param) => {
+ let position = Position::after(last_param.syntax());
+ let elements = vec![
+ make::token(T![,]).into(),
+ make::tokens::single_space().into(),
+ generic_param.syntax().clone().into(),
+ ];
+ ted::insert_all(position, elements);
+ }
+ None => {
+ let after_l_angle = Position::after(self.l_angle_token().unwrap());
+ ted::insert(after_l_angle, generic_param.syntax());
+ }
+ }
+ }
+}
+
+impl ast::WhereClause {
+ pub fn add_predicate(&self, predicate: ast::WherePred) {
+ if let Some(pred) = self.predicates().last() {
+ if !pred.syntax().siblings_with_tokens(Direction::Next).any(|it| it.kind() == T![,]) {
+ ted::append_child_raw(self.syntax(), make::token(T![,]));
+ }
+ }
+ ted::append_child(self.syntax(), predicate.syntax());
+ }
+}
+
+impl ast::TypeBoundList {
+ pub fn remove(&self) {
+ match self.syntax().siblings_with_tokens(Direction::Prev).find(|it| it.kind() == T![:]) {
+ Some(colon) => ted::remove_all(colon..=self.syntax().clone().into()),
+ None => ted::remove(self.syntax()),
+ }
+ }
+}
+
+impl ast::PathSegment {
+ pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList {
+ if self.generic_arg_list().is_none() {
+ let arg_list = make::generic_arg_list().clone_for_update();
+ ted::append_child(self.syntax(), arg_list.syntax());
+ }
+ self.generic_arg_list().unwrap()
+ }
+}
+
+impl ast::UseTree {
+ pub fn remove(&self) {
+ for dir in [Direction::Next, Direction::Prev] {
+ if let Some(next_use_tree) = neighbor(self, dir) {
+ let separators = self
+ .syntax()
+ .siblings_with_tokens(dir)
+ .skip(1)
+ .take_while(|it| it.as_node() != Some(next_use_tree.syntax()));
+ ted::remove_all_iter(separators);
+ break;
+ }
+ }
+ ted::remove(self.syntax());
+ }
+
+ pub fn get_or_create_use_tree_list(&self) -> ast::UseTreeList {
+ match self.use_tree_list() {
+ Some(it) => it,
+ None => {
+ let position = Position::last_child_of(self.syntax());
+ let use_tree_list = make::use_tree_list(empty()).clone_for_update();
+ let mut elements = Vec::with_capacity(2);
+ if self.coloncolon_token().is_none() {
+ elements.push(make::token(T![::]).into());
+ }
+ elements.push(use_tree_list.syntax().clone().into());
+ ted::insert_all_raw(position, elements);
+ use_tree_list
+ }
+ }
+ }
+
+ /// Splits off the given prefix, making it the path component of the use tree,
+ /// appending the rest of the path to all UseTreeList items.
+ ///
+ /// # Examples
+ ///
+ /// `prefix$0::suffix` -> `prefix::{suffix}`
+ ///
+ /// `prefix$0` -> `prefix::{self}`
+ ///
+ /// `prefix$0::*` -> `prefix::{*}`
+ pub fn split_prefix(&self, prefix: &ast::Path) {
+ debug_assert_eq!(self.path(), Some(prefix.top_path()));
+ let path = self.path().unwrap();
+ if &path == prefix && self.use_tree_list().is_none() {
+ if self.star_token().is_some() {
+ // path$0::* -> *
+ self.coloncolon_token().map(ted::remove);
+ ted::remove(prefix.syntax());
+ } else {
+ // path$0 -> self
+ let self_suffix =
+ make::path_unqualified(make::path_segment_self()).clone_for_update();
+ ted::replace(path.syntax(), self_suffix.syntax());
+ }
+ } else if split_path_prefix(prefix).is_none() {
+ return;
+ }
+ // At this point, prefix path is detached; _self_ use tree has suffix path.
+ // Next, transform 'suffix' use tree into 'prefix::{suffix}'
+ let subtree = self.clone_subtree().clone_for_update();
+ ted::remove_all_iter(self.syntax().children_with_tokens());
+ ted::insert(Position::first_child_of(self.syntax()), prefix.syntax());
+ self.get_or_create_use_tree_list().add_use_tree(subtree);
+
+ fn split_path_prefix(prefix: &ast::Path) -> Option<()> {
+ let parent = prefix.parent_path()?;
+ let segment = parent.segment()?;
+ if algo::has_errors(segment.syntax()) {
+ return None;
+ }
+ for p in successors(parent.parent_path(), |it| it.parent_path()) {
+ p.segment()?;
+ }
+ prefix.parent_path().and_then(|p| p.coloncolon_token()).map(ted::remove);
+ ted::remove(prefix.syntax());
+ Some(())
+ }
+ }
+}
+
+impl ast::UseTreeList {
+ pub fn add_use_tree(&self, use_tree: ast::UseTree) {
+ let (position, elements) = match self.use_trees().last() {
+ Some(last_tree) => (
+ Position::after(last_tree.syntax()),
+ vec![
+ make::token(T![,]).into(),
+ make::tokens::single_space().into(),
+ use_tree.syntax.into(),
+ ],
+ ),
+ None => {
+ let position = match self.l_curly_token() {
+ Some(l_curly) => Position::after(l_curly),
+ None => Position::last_child_of(self.syntax()),
+ };
+ (position, vec![use_tree.syntax.into()])
+ }
+ };
+ ted::insert_all_raw(position, elements);
+ }
+}
+
+impl ast::Use {
+ pub fn remove(&self) {
+ let next_ws = self
+ .syntax()
+ .next_sibling_or_token()
+ .and_then(|it| it.into_token())
+ .and_then(ast::Whitespace::cast);
+ if let Some(next_ws) = next_ws {
+ let ws_text = next_ws.syntax().text();
+ if let Some(rest) = ws_text.strip_prefix('\n') {
+ if rest.is_empty() {
+ ted::remove(next_ws.syntax());
+ } else {
+ ted::replace(next_ws.syntax(), make::tokens::whitespace(rest));
+ }
+ }
+ }
+ ted::remove(self.syntax());
+ }
+}
+
+impl ast::Impl {
+ pub fn get_or_create_assoc_item_list(&self) -> ast::AssocItemList {
+ if self.assoc_item_list().is_none() {
+ let assoc_item_list = make::assoc_item_list().clone_for_update();
+ ted::append_child(self.syntax(), assoc_item_list.syntax());
+ }
+ self.assoc_item_list().unwrap()
+ }
+}
+
+impl ast::AssocItemList {
+ pub fn add_item(&self, item: ast::AssocItem) {
+ let (indent, position, whitespace) = match self.assoc_items().last() {
+ Some(last_item) => (
+ IndentLevel::from_node(last_item.syntax()),
+ Position::after(last_item.syntax()),
+ "\n\n",
+ ),
+ None => match self.l_curly_token() {
+ Some(l_curly) => {
+ normalize_ws_between_braces(self.syntax());
+ (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n")
+ }
+ None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
+ },
+ };
+ let elements: Vec<SyntaxElement<_>> = vec![
+ make::tokens::whitespace(&format!("{}{}", whitespace, indent)).into(),
+ item.syntax().clone().into(),
+ ];
+ ted::insert_all(position, elements);
+ }
+}
+
+impl ast::Fn {
+ pub fn get_or_create_body(&self) -> ast::BlockExpr {
+ if self.body().is_none() {
+ let body = make::ext::empty_block_expr().clone_for_update();
+ match self.semicolon_token() {
+ Some(semi) => {
+ ted::replace(semi, body.syntax());
+ ted::insert(Position::before(body.syntax), make::tokens::single_space());
+ }
+ None => ted::append_child(self.syntax(), body.syntax()),
+ }
+ }
+ self.body().unwrap()
+ }
+}
+
+impl ast::MatchArm {
+ pub fn remove(&self) {
+ if let Some(sibling) = self.syntax().prev_sibling_or_token() {
+ if sibling.kind() == SyntaxKind::WHITESPACE {
+ ted::remove(sibling);
+ }
+ }
+ if let Some(sibling) = self.syntax().next_sibling_or_token() {
+ if sibling.kind() == T![,] {
+ ted::remove(sibling);
+ }
+ }
+ ted::remove(self.syntax());
+ }
+}
+
+impl ast::MatchArmList {
+ pub fn add_arm(&self, arm: ast::MatchArm) {
+ normalize_ws_between_braces(self.syntax());
+ let mut elements = Vec::new();
+ let position = match self.arms().last() {
+ Some(last_arm) => {
+ if needs_comma(&last_arm) {
+ ted::append_child(last_arm.syntax(), make::token(SyntaxKind::COMMA));
+ }
+ Position::after(last_arm.syntax().clone())
+ }
+ None => match self.l_curly_token() {
+ Some(it) => Position::after(it),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+ let indent = IndentLevel::from_node(self.syntax()) + 1;
+ elements.push(make::tokens::whitespace(&format!("\n{}", indent)).into());
+ elements.push(arm.syntax().clone().into());
+ if needs_comma(&arm) {
+ ted::append_child(arm.syntax(), make::token(SyntaxKind::COMMA));
+ }
+ ted::insert_all(position, elements);
+
+ fn needs_comma(arm: &ast::MatchArm) -> bool {
+ arm.expr().map_or(false, |e| !e.is_block_like()) && arm.comma_token().is_none()
+ }
+ }
+}
+
+impl ast::RecordExprFieldList {
+ pub fn add_field(&self, field: ast::RecordExprField) {
+ let is_multiline = self.syntax().text().contains_char('\n');
+ let whitespace = if is_multiline {
+ let indent = IndentLevel::from_node(self.syntax()) + 1;
+ make::tokens::whitespace(&format!("\n{}", indent))
+ } else {
+ make::tokens::single_space()
+ };
+
+ if is_multiline {
+ normalize_ws_between_braces(self.syntax());
+ }
+
+ let position = match self.fields().last() {
+ Some(last_field) => {
+ let comma = match last_field
+ .syntax()
+ .siblings_with_tokens(Direction::Next)
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T![,])
+ {
+ Some(it) => it,
+ None => {
+ let comma = ast::make::token(T![,]);
+ ted::insert(Position::after(last_field.syntax()), &comma);
+ comma
+ }
+ };
+ Position::after(comma)
+ }
+ None => match self.l_curly_token() {
+ Some(it) => Position::after(it),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+
+ ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
+ if is_multiline {
+ ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
+ }
+ }
+}
+
+impl ast::RecordExprField {
+ /// This will either replace the initializer, or in the case that this is a shorthand convert
+ /// the initializer into the name ref and insert the expr as the new initializer.
+ pub fn replace_expr(&self, expr: ast::Expr) {
+ if self.name_ref().is_some() {
+ match self.expr() {
+ Some(prev) => ted::replace(prev.syntax(), expr.syntax()),
+ None => ted::append_child(self.syntax(), expr.syntax()),
+ }
+ return;
+ }
+ // this is a shorthand
+ if let Some(ast::Expr::PathExpr(path_expr)) = self.expr() {
+ if let Some(path) = path_expr.path() {
+ if let Some(name_ref) = path.as_single_name_ref() {
+ path_expr.syntax().detach();
+ let children = vec![
+ name_ref.syntax().clone().into(),
+ ast::make::token(T![:]).into(),
+ ast::make::tokens::single_space().into(),
+ expr.syntax().clone().into(),
+ ];
+ ted::insert_all_raw(Position::last_child_of(self.syntax()), children);
+ }
+ }
+ }
+ }
+}
+
+impl ast::RecordPatFieldList {
+ pub fn add_field(&self, field: ast::RecordPatField) {
+ let is_multiline = self.syntax().text().contains_char('\n');
+ let whitespace = if is_multiline {
+ let indent = IndentLevel::from_node(self.syntax()) + 1;
+ make::tokens::whitespace(&format!("\n{}", indent))
+ } else {
+ make::tokens::single_space()
+ };
+
+ if is_multiline {
+ normalize_ws_between_braces(self.syntax());
+ }
+
+ let position = match self.fields().last() {
+ Some(last_field) => {
+ let comma = match last_field
+ .syntax()
+ .siblings_with_tokens(Direction::Next)
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T![,])
+ {
+ Some(it) => it,
+ None => {
+ let comma = ast::make::token(T![,]);
+ ted::insert(Position::after(last_field.syntax()), &comma);
+ comma
+ }
+ };
+ Position::after(comma)
+ }
+ None => match self.l_curly_token() {
+ Some(it) => Position::after(it),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+
+ ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
+ if is_multiline {
+ ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
+ }
+ }
+}
+impl ast::StmtList {
+ pub fn push_front(&self, statement: ast::Stmt) {
+ ted::insert(Position::after(self.l_curly_token().unwrap()), statement.syntax());
+ }
+}
+
+fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
+ let l = node
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T!['{'])?;
+ let r = node
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T!['}'])?;
+
+ let indent = IndentLevel::from_node(node);
+
+ match l.next_sibling_or_token() {
+ Some(ws) if ws.kind() == SyntaxKind::WHITESPACE => {
+ if ws.next_sibling_or_token()?.into_token()? == r {
+ ted::replace(ws, make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ }
+ Some(ws) if ws.kind() == T!['}'] => {
+ ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ _ => (),
+ }
+ Some(())
+}
+
+pub trait Indent: AstNode + Clone + Sized {
+ fn indent_level(&self) -> IndentLevel {
+ IndentLevel::from_node(self.syntax())
+ }
+ fn indent(&self, by: IndentLevel) {
+ by.increase_indent(self.syntax());
+ }
+ fn dedent(&self, by: IndentLevel) {
+ by.decrease_indent(self.syntax());
+ }
+ fn reindent_to(&self, target_level: IndentLevel) {
+ let current_level = IndentLevel::from_node(self.syntax());
+ self.dedent(current_level);
+ self.indent(target_level);
+ }
+}
+
+impl<N: AstNode + Clone> Indent for N {}
+
+#[cfg(test)]
+mod tests {
+ use std::fmt;
+
+ use crate::SourceFile;
+
+ use super::*;
+
+ fn ast_mut_from_text<N: AstNode>(text: &str) -> N {
+ let parse = SourceFile::parse(text);
+ parse.tree().syntax().descendants().find_map(N::cast).unwrap().clone_for_update()
+ }
+
+ #[test]
+ fn test_create_generic_param_list() {
+ fn check_create_gpl<N: GenericParamsOwnerEdit + fmt::Display>(before: &str, after: &str) {
+ let gpl_owner = ast_mut_from_text::<N>(before);
+ gpl_owner.get_or_create_generic_param_list();
+ assert_eq!(gpl_owner.to_string(), after);
+ }
+
+ check_create_gpl::<ast::Fn>("fn foo", "fn foo<>");
+ check_create_gpl::<ast::Fn>("fn foo() {}", "fn foo<>() {}");
+
+ check_create_gpl::<ast::Impl>("impl", "impl<>");
+ check_create_gpl::<ast::Impl>("impl Struct {}", "impl<> Struct {}");
+ check_create_gpl::<ast::Impl>("impl Trait for Struct {}", "impl<> Trait for Struct {}");
+
+ check_create_gpl::<ast::Trait>("trait Trait<>", "trait Trait<>");
+ check_create_gpl::<ast::Trait>("trait Trait<> {}", "trait Trait<> {}");
+
+ check_create_gpl::<ast::Struct>("struct A", "struct A<>");
+ check_create_gpl::<ast::Struct>("struct A;", "struct A<>;");
+ check_create_gpl::<ast::Struct>("struct A();", "struct A<>();");
+ check_create_gpl::<ast::Struct>("struct A {}", "struct A<> {}");
+
+ check_create_gpl::<ast::Enum>("enum E", "enum E<>");
+ check_create_gpl::<ast::Enum>("enum E {", "enum E<> {");
+ }
+
+ #[test]
+ fn test_increase_indent() {
+ let arm_list = ast_mut_from_text::<ast::Fn>(
+ "fn foo() {
+ ;
+ ;
+}",
+ );
+ arm_list.indent(IndentLevel(2));
+ assert_eq!(
+ arm_list.to_string(),
+ "fn foo() {
+ ;
+ ;
+ }",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
new file mode 100644
index 000000000..db66d08a7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
@@ -0,0 +1,410 @@
+//! Various extension methods to ast Expr Nodes, which are hard to code-generate.
+//!
+//! These methods should only do simple, shallow tasks related to the syntax of the node itself.
+
+use crate::{
+ ast::{
+ self,
+ operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
+ support, AstChildren, AstNode,
+ },
+ AstToken,
+ SyntaxKind::*,
+ SyntaxNode, SyntaxToken, T,
+};
+
+impl ast::HasAttrs for ast::Expr {}
+
+impl ast::Expr {
+ pub fn is_block_like(&self) -> bool {
+ matches!(
+ self,
+ ast::Expr::IfExpr(_)
+ | ast::Expr::LoopExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::WhileExpr(_)
+ | ast::Expr::BlockExpr(_)
+ | ast::Expr::MatchExpr(_)
+ )
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ElseBranch {
+ Block(ast::BlockExpr),
+ IfExpr(ast::IfExpr),
+}
+
+impl From<ast::BlockExpr> for ElseBranch {
+ fn from(block_expr: ast::BlockExpr) -> Self {
+ Self::Block(block_expr)
+ }
+}
+
+impl From<ast::IfExpr> for ElseBranch {
+ fn from(if_expr: ast::IfExpr) -> Self {
+ Self::IfExpr(if_expr)
+ }
+}
+
+impl ast::IfExpr {
+ pub fn then_branch(&self) -> Option<ast::BlockExpr> {
+ self.children_after_condition().next()
+ }
+
+ pub fn else_branch(&self) -> Option<ElseBranch> {
+ let res = match self.children_after_condition().nth(1) {
+ Some(block) => ElseBranch::Block(block),
+ None => {
+ let elif = self.children_after_condition().next()?;
+ ElseBranch::IfExpr(elif)
+ }
+ };
+ Some(res)
+ }
+
+ fn children_after_condition<N: AstNode>(&self) -> impl Iterator<Item = N> {
+ self.syntax().children().skip(1).filter_map(N::cast)
+ }
+}
+
+#[test]
+fn if_block_condition() {
+ let parse = ast::SourceFile::parse(
+ r#"
+ fn test() {
+ if { true } { "if" }
+ else if { false } { "first elif" }
+ else if true { "second elif" }
+ else if (true) { "third elif" }
+ else { "else" }
+ }
+ "#,
+ );
+ let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap();
+ assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#);
+ let elif = match if_.else_branch().unwrap() {
+ ElseBranch::IfExpr(elif) => elif,
+ ElseBranch::Block(_) => panic!("should be `else if`"),
+ };
+ assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "first elif" }"#);
+ let elif = match elif.else_branch().unwrap() {
+ ElseBranch::IfExpr(elif) => elif,
+ ElseBranch::Block(_) => panic!("should be `else if`"),
+ };
+ assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "second elif" }"#);
+ let elif = match elif.else_branch().unwrap() {
+ ElseBranch::IfExpr(elif) => elif,
+ ElseBranch::Block(_) => panic!("should be `else if`"),
+ };
+ assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "third elif" }"#);
+ let else_ = match elif.else_branch().unwrap() {
+ ElseBranch::Block(else_) => else_,
+ ElseBranch::IfExpr(_) => panic!("should be `else`"),
+ };
+ assert_eq!(else_.syntax().text(), r#"{ "else" }"#);
+}
+
+#[test]
+fn if_condition_with_if_inside() {
+ let parse = ast::SourceFile::parse(
+ r#"
+ fn test() {
+ if if true { true } else { false } { "if" }
+ else { "else" }
+ }
+ "#,
+ );
+ let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap();
+ assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#);
+ let else_ = match if_.else_branch().unwrap() {
+ ElseBranch::Block(else_) => else_,
+ ElseBranch::IfExpr(_) => panic!("should be `else`"),
+ };
+ assert_eq!(else_.syntax().text(), r#"{ "else" }"#);
+}
+
+impl ast::PrefixExpr {
+ pub fn op_kind(&self) -> Option<UnaryOp> {
+ let res = match self.op_token()?.kind() {
+ T![*] => UnaryOp::Deref,
+ T![!] => UnaryOp::Not,
+ T![-] => UnaryOp::Neg,
+ _ => return None,
+ };
+ Some(res)
+ }
+
+ pub fn op_token(&self) -> Option<SyntaxToken> {
+ self.syntax().first_child_or_token()?.into_token()
+ }
+}
+
+impl ast::BinExpr {
+ pub fn op_details(&self) -> Option<(SyntaxToken, BinaryOp)> {
+ self.syntax().children_with_tokens().filter_map(|it| it.into_token()).find_map(|c| {
+ #[rustfmt::skip]
+ let bin_op = match c.kind() {
+ T![||] => BinaryOp::LogicOp(LogicOp::Or),
+ T![&&] => BinaryOp::LogicOp(LogicOp::And),
+
+ T![==] => BinaryOp::CmpOp(CmpOp::Eq { negated: false }),
+ T![!=] => BinaryOp::CmpOp(CmpOp::Eq { negated: true }),
+ T![<=] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Less, strict: false }),
+ T![>=] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Greater, strict: false }),
+ T![<] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Less, strict: true }),
+ T![>] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Greater, strict: true }),
+
+ T![+] => BinaryOp::ArithOp(ArithOp::Add),
+ T![*] => BinaryOp::ArithOp(ArithOp::Mul),
+ T![-] => BinaryOp::ArithOp(ArithOp::Sub),
+ T![/] => BinaryOp::ArithOp(ArithOp::Div),
+ T![%] => BinaryOp::ArithOp(ArithOp::Rem),
+ T![<<] => BinaryOp::ArithOp(ArithOp::Shl),
+ T![>>] => BinaryOp::ArithOp(ArithOp::Shr),
+ T![^] => BinaryOp::ArithOp(ArithOp::BitXor),
+ T![|] => BinaryOp::ArithOp(ArithOp::BitOr),
+ T![&] => BinaryOp::ArithOp(ArithOp::BitAnd),
+
+ T![=] => BinaryOp::Assignment { op: None },
+ T![+=] => BinaryOp::Assignment { op: Some(ArithOp::Add) },
+ T![*=] => BinaryOp::Assignment { op: Some(ArithOp::Mul) },
+ T![-=] => BinaryOp::Assignment { op: Some(ArithOp::Sub) },
+ T![/=] => BinaryOp::Assignment { op: Some(ArithOp::Div) },
+ T![%=] => BinaryOp::Assignment { op: Some(ArithOp::Rem) },
+ T![<<=] => BinaryOp::Assignment { op: Some(ArithOp::Shl) },
+ T![>>=] => BinaryOp::Assignment { op: Some(ArithOp::Shr) },
+ T![^=] => BinaryOp::Assignment { op: Some(ArithOp::BitXor) },
+ T![|=] => BinaryOp::Assignment { op: Some(ArithOp::BitOr) },
+ T![&=] => BinaryOp::Assignment { op: Some(ArithOp::BitAnd) },
+
+ _ => return None,
+ };
+ Some((c, bin_op))
+ })
+ }
+
+ pub fn op_kind(&self) -> Option<BinaryOp> {
+ self.op_details().map(|t| t.1)
+ }
+
+ pub fn op_token(&self) -> Option<SyntaxToken> {
+ self.op_details().map(|t| t.0)
+ }
+
+ pub fn lhs(&self) -> Option<ast::Expr> {
+ support::children(self.syntax()).next()
+ }
+
+ pub fn rhs(&self) -> Option<ast::Expr> {
+ support::children(self.syntax()).nth(1)
+ }
+
+ pub fn sub_exprs(&self) -> (Option<ast::Expr>, Option<ast::Expr>) {
+ let mut children = support::children(self.syntax());
+ let first = children.next();
+ let second = children.next();
+ (first, second)
+ }
+}
+
+impl ast::RangeExpr {
+ fn op_details(&self) -> Option<(usize, SyntaxToken, RangeOp)> {
+ self.syntax().children_with_tokens().enumerate().find_map(|(ix, child)| {
+ let token = child.into_token()?;
+ let bin_op = match token.kind() {
+ T![..] => RangeOp::Exclusive,
+ T![..=] => RangeOp::Inclusive,
+ _ => return None,
+ };
+ Some((ix, token, bin_op))
+ })
+ }
+
+ pub fn op_kind(&self) -> Option<RangeOp> {
+ self.op_details().map(|t| t.2)
+ }
+
+ pub fn op_token(&self) -> Option<SyntaxToken> {
+ self.op_details().map(|t| t.1)
+ }
+
+ pub fn start(&self) -> Option<ast::Expr> {
+ let op_ix = self.op_details()?.0;
+ self.syntax()
+ .children_with_tokens()
+ .take(op_ix)
+ .find_map(|it| ast::Expr::cast(it.into_node()?))
+ }
+
+ pub fn end(&self) -> Option<ast::Expr> {
+ let op_ix = self.op_details()?.0;
+ self.syntax()
+ .children_with_tokens()
+ .skip(op_ix + 1)
+ .find_map(|it| ast::Expr::cast(it.into_node()?))
+ }
+}
+
+impl ast::IndexExpr {
+ pub fn base(&self) -> Option<ast::Expr> {
+ support::children(self.syntax()).next()
+ }
+ pub fn index(&self) -> Option<ast::Expr> {
+ support::children(self.syntax()).nth(1)
+ }
+}
+
+pub enum ArrayExprKind {
+ Repeat { initializer: Option<ast::Expr>, repeat: Option<ast::Expr> },
+ ElementList(AstChildren<ast::Expr>),
+}
+
+impl ast::ArrayExpr {
+ pub fn kind(&self) -> ArrayExprKind {
+ if self.is_repeat() {
+ ArrayExprKind::Repeat {
+ initializer: support::children(self.syntax()).next(),
+ repeat: support::children(self.syntax()).nth(1),
+ }
+ } else {
+ ArrayExprKind::ElementList(support::children(self.syntax()))
+ }
+ }
+
+ fn is_repeat(&self) -> bool {
+ self.semicolon_token().is_some()
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum LiteralKind {
+ String(ast::String),
+ ByteString(ast::ByteString),
+ IntNumber(ast::IntNumber),
+ FloatNumber(ast::FloatNumber),
+ Char(ast::Char),
+ Byte(ast::Byte),
+ Bool(bool),
+}
+
+impl ast::Literal {
+ pub fn token(&self) -> SyntaxToken {
+ self.syntax()
+ .children_with_tokens()
+ .find(|e| e.kind() != ATTR && !e.kind().is_trivia())
+ .and_then(|e| e.into_token())
+ .unwrap()
+ }
+
+ pub fn kind(&self) -> LiteralKind {
+ let token = self.token();
+
+ if let Some(t) = ast::IntNumber::cast(token.clone()) {
+ return LiteralKind::IntNumber(t);
+ }
+ if let Some(t) = ast::FloatNumber::cast(token.clone()) {
+ return LiteralKind::FloatNumber(t);
+ }
+ if let Some(t) = ast::String::cast(token.clone()) {
+ return LiteralKind::String(t);
+ }
+ if let Some(t) = ast::ByteString::cast(token.clone()) {
+ return LiteralKind::ByteString(t);
+ }
+ if let Some(t) = ast::Char::cast(token.clone()) {
+ return LiteralKind::Char(t);
+ }
+ if let Some(t) = ast::Byte::cast(token.clone()) {
+ return LiteralKind::Byte(t);
+ }
+
+ match token.kind() {
+ T![true] => LiteralKind::Bool(true),
+ T![false] => LiteralKind::Bool(false),
+ _ => unreachable!(),
+ }
+ }
+}
+
+pub enum BlockModifier {
+ Async(SyntaxToken),
+ Unsafe(SyntaxToken),
+ Try(SyntaxToken),
+ Const(SyntaxToken),
+ Label(ast::Label),
+}
+
+impl ast::BlockExpr {
+ pub fn modifier(&self) -> Option<BlockModifier> {
+ self.async_token()
+ .map(BlockModifier::Async)
+ .or_else(|| self.unsafe_token().map(BlockModifier::Unsafe))
+ .or_else(|| self.try_token().map(BlockModifier::Try))
+ .or_else(|| self.const_token().map(BlockModifier::Const))
+ .or_else(|| self.label().map(BlockModifier::Label))
+ }
+ /// false if the block is an intrinsic part of the syntax and can't be
+ /// replaced with arbitrary expression.
+ ///
+ /// ```not_rust
+ /// fn foo() { not_stand_alone }
+ /// const FOO: () = { stand_alone };
+ /// ```
+ pub fn is_standalone(&self) -> bool {
+ let parent = match self.syntax().parent() {
+ Some(it) => it,
+ None => return true,
+ };
+ !matches!(parent.kind(), FN | IF_EXPR | WHILE_EXPR | LOOP_EXPR)
+ }
+}
+
+#[test]
+fn test_literal_with_attr() {
+ let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#);
+ let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap();
+ assert_eq!(lit.token().text(), r#""Hello""#);
+}
+
+impl ast::RecordExprField {
+ pub fn parent_record_lit(&self) -> ast::RecordExpr {
+ self.syntax().ancestors().find_map(ast::RecordExpr::cast).unwrap()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum CallableExpr {
+ Call(ast::CallExpr),
+ MethodCall(ast::MethodCallExpr),
+}
+
+impl ast::HasAttrs for CallableExpr {}
+impl ast::HasArgList for CallableExpr {}
+
+impl AstNode for CallableExpr {
+ fn can_cast(kind: parser::SyntaxKind) -> bool
+ where
+ Self: Sized,
+ {
+ ast::CallExpr::can_cast(kind) || ast::MethodCallExpr::can_cast(kind)
+ }
+
+ fn cast(syntax: SyntaxNode) -> Option<Self>
+ where
+ Self: Sized,
+ {
+ if let Some(it) = ast::CallExpr::cast(syntax.clone()) {
+ Some(Self::Call(it))
+ } else {
+ ast::MethodCallExpr::cast(syntax).map(Self::MethodCall)
+ }
+ }
+
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Self::Call(it) => it.syntax(),
+ Self::MethodCall(it) => it.syntax(),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs
new file mode 100644
index 000000000..843b43cf0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs
@@ -0,0 +1,41 @@
+//! This file is actually hand-written, but the submodules are indeed generated.
+#[rustfmt::skip]
+pub(crate) mod nodes;
+#[rustfmt::skip]
+pub(crate) mod tokens;
+
+use crate::{
+ AstNode,
+ SyntaxKind::{self, *},
+ SyntaxNode,
+};
+
+pub(crate) use nodes::*;
+
+// Stmt is the only nested enum, so it's easier to just hand-write it
+impl AstNode for Stmt {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ LET_STMT | EXPR_STMT => true,
+ _ => Item::can_cast(kind),
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ LET_STMT => Stmt::LetStmt(LetStmt { syntax }),
+ EXPR_STMT => Stmt::ExprStmt(ExprStmt { syntax }),
+ _ => {
+ let item = Item::cast(syntax)?;
+ Stmt::Item(item)
+ }
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Stmt::LetStmt(it) => &it.syntax,
+ Stmt::ExprStmt(it) => &it.syntax,
+ Stmt::Item(it) => it.syntax(),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
new file mode 100644
index 000000000..63309a155
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -0,0 +1,4806 @@
+//! Generated by `sourcegen_ast`, do not edit by hand.
+
+#![allow(non_snake_case)]
+use crate::{
+ ast::{self, support, AstChildren, AstNode},
+ SyntaxKind::{self, *},
+ SyntaxNode, SyntaxToken, T,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Name {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Name {
+ pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) }
+ pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct NameRef {
+ pub(crate) syntax: SyntaxNode,
+}
+impl NameRef {
+ pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) }
+ pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
+ pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) }
+ pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
+ pub fn Self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![Self]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Lifetime {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Lifetime {
+ pub fn lifetime_ident_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![lifetime_ident])
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Path {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Path {
+ pub fn qualifier(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn segment(&self) -> Option<PathSegment> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathSegment {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PathSegment {
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericArgList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl GenericArgList {
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ pub fn generic_args(&self) -> AstChildren<GenericArg> { support::children(&self.syntax) }
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParamList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ParamList {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn self_param(&self) -> Option<SelfParam> { support::child(&self.syntax) }
+ pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) }
+ pub fn params(&self) -> AstChildren<Param> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+ pub fn pipe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![|]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RetType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RetType {
+ pub fn thin_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![->]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PathType {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeArg {
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AssocTypeArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasTypeBounds for AssocTypeArg {}
+impl AssocTypeArg {
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn const_arg(&self) -> Option<ConstArg> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LifetimeArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl LifetimeArg {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ConstArg {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericParamList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl GenericParamList {
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ pub fn generic_params(&self) -> AstChildren<GenericParam> { support::children(&self.syntax) }
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeBoundList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeBoundList {
+ pub fn bounds(&self) -> AstChildren<TypeBound> { support::children(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroCall {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MacroCall {}
+impl ast::HasDocComments for MacroCall {}
+impl MacroCall {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Attr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Attr {
+ pub fn pound_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![#]) }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn meta(&self) -> Option<Meta> { support::child(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TokenTree {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TokenTree {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroItems {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasModuleItem for MacroItems {}
+impl MacroItems {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroStmts {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroStmts {
+ pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SourceFile {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for SourceFile {}
+impl ast::HasModuleItem for SourceFile {}
+impl ast::HasDocComments for SourceFile {}
+impl SourceFile {
+ pub fn shebang_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![shebang]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Const {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Const {}
+impl ast::HasName for Const {}
+impl ast::HasVisibility for Const {}
+impl ast::HasDocComments for Const {}
+impl Const {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Enum {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Enum {}
+impl ast::HasName for Enum {}
+impl ast::HasVisibility for Enum {}
+impl ast::HasGenericParams for Enum {}
+impl ast::HasDocComments for Enum {}
+impl Enum {
+ pub fn enum_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![enum]) }
+ pub fn variant_list(&self) -> Option<VariantList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternBlock {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ExternBlock {}
+impl ast::HasDocComments for ExternBlock {}
+impl ExternBlock {
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+ pub fn extern_item_list(&self) -> Option<ExternItemList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternCrate {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ExternCrate {}
+impl ast::HasVisibility for ExternCrate {}
+impl ast::HasDocComments for ExternCrate {}
+impl ExternCrate {
+ pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
+ pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn rename(&self) -> Option<Rename> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Fn {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Fn {}
+impl ast::HasName for Fn {}
+impl ast::HasVisibility for Fn {}
+impl ast::HasGenericParams for Fn {}
+impl ast::HasDocComments for Fn {}
+impl Fn {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+ pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+ pub fn body(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Impl {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Impl {}
+impl ast::HasVisibility for Impl {}
+impl ast::HasGenericParams for Impl {}
+impl ast::HasDocComments for Impl {}
+impl Impl {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroRules {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MacroRules {}
+impl ast::HasName for MacroRules {}
+impl ast::HasVisibility for MacroRules {}
+impl ast::HasDocComments for MacroRules {}
+impl MacroRules {
+ pub fn macro_rules_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![macro_rules])
+ }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroDef {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MacroDef {}
+impl ast::HasName for MacroDef {}
+impl ast::HasVisibility for MacroDef {}
+impl ast::HasDocComments for MacroDef {}
+impl MacroDef {
+ pub fn macro_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![macro]) }
+ pub fn args(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+ pub fn body(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Module {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Module {}
+impl ast::HasName for Module {}
+impl ast::HasVisibility for Module {}
+impl ast::HasDocComments for Module {}
+impl Module {
+ pub fn mod_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mod]) }
+ pub fn item_list(&self) -> Option<ItemList> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Static {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Static {}
+impl ast::HasName for Static {}
+impl ast::HasVisibility for Static {}
+impl ast::HasDocComments for Static {}
+impl Static {
+ pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Struct {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Struct {}
+impl ast::HasName for Struct {}
+impl ast::HasVisibility for Struct {}
+impl ast::HasGenericParams for Struct {}
+impl ast::HasDocComments for Struct {}
+impl Struct {
+ pub fn struct_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![struct]) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+ pub fn field_list(&self) -> Option<FieldList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Trait {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Trait {}
+impl ast::HasName for Trait {}
+impl ast::HasVisibility for Trait {}
+impl ast::HasGenericParams for Trait {}
+impl ast::HasTypeBounds for Trait {}
+impl ast::HasDocComments for Trait {}
+impl Trait {
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn auto_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![auto]) }
+ pub fn trait_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![trait]) }
+ pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeAlias {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TypeAlias {}
+impl ast::HasName for TypeAlias {}
+impl ast::HasVisibility for TypeAlias {}
+impl ast::HasGenericParams for TypeAlias {}
+impl ast::HasTypeBounds for TypeAlias {}
+impl ast::HasDocComments for TypeAlias {}
+impl TypeAlias {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn type_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![type]) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Union {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Union {}
+impl ast::HasName for Union {}
+impl ast::HasVisibility for Union {}
+impl ast::HasGenericParams for Union {}
+impl ast::HasDocComments for Union {}
+impl Union {
+ pub fn union_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![union]) }
+ pub fn record_field_list(&self) -> Option<RecordFieldList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Use {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Use {}
+impl ast::HasVisibility for Use {}
+impl ast::HasDocComments for Use {}
+impl Use {
+ pub fn use_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![use]) }
+ pub fn use_tree(&self) -> Option<UseTree> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Visibility {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Visibility {
+ pub fn pub_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![pub]) }
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) }
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ItemList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ItemList {}
+impl ast::HasModuleItem for ItemList {}
+impl ItemList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Rename {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasName for Rename {}
+impl Rename {
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UseTree {
+ pub(crate) syntax: SyntaxNode,
+}
+impl UseTree {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn star_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![*]) }
+ pub fn use_tree_list(&self) -> Option<UseTreeList> { support::child(&self.syntax) }
+ pub fn rename(&self) -> Option<Rename> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UseTreeList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl UseTreeList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn use_trees(&self) -> AstChildren<UseTree> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Abi {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Abi {
+ pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WhereClause {
+ pub(crate) syntax: SyntaxNode,
+}
+impl WhereClause {
+ pub fn where_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![where]) }
+ pub fn predicates(&self) -> AstChildren<WherePred> { support::children(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BlockExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BlockExpr {}
+impl BlockExpr {
+ pub fn label(&self) -> Option<Label> { support::child(&self.syntax) }
+ pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn stmt_list(&self) -> Option<StmtList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SelfParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for SelfParam {}
+impl ast::HasName for SelfParam {}
+impl SelfParam {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Param {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Param {}
+impl Param {
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn dotdotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![...]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordFieldList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn fields(&self) -> AstChildren<RecordField> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TupleFieldList {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<TupleField> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordField {}
+impl ast::HasName for RecordField {}
+impl ast::HasVisibility for RecordField {}
+impl ast::HasDocComments for RecordField {}
+impl RecordField {
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TupleField {}
+impl ast::HasVisibility for TupleField {}
+impl ast::HasDocComments for TupleField {}
+impl TupleField {
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct VariantList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl VariantList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn variants(&self) -> AstChildren<Variant> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Variant {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Variant {}
+impl ast::HasName for Variant {}
+impl ast::HasVisibility for Variant {}
+impl ast::HasDocComments for Variant {}
+impl Variant {
+ pub fn field_list(&self) -> Option<FieldList> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AssocItemList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for AssocItemList {}
+impl AssocItemList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn assoc_items(&self) -> AstChildren<AssocItem> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternItemList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ExternItemList {}
+impl ExternItemList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn extern_items(&self) -> AstChildren<ExternItem> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ConstParam {}
+impl ast::HasName for ConstParam {}
+impl ConstParam {
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn default_val(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LifetimeParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LifetimeParam {}
+impl ast::HasTypeBounds for LifetimeParam {}
+impl LifetimeParam {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TypeParam {}
+impl ast::HasName for TypeParam {}
+impl ast::HasTypeBounds for TypeParam {}
+impl TypeParam {
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn default_type(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WherePred {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasTypeBounds for WherePred {}
+impl WherePred {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Meta {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Meta {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExprStmt {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ExprStmt {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetStmt {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LetStmt {}
+impl LetStmt {
+ pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn initializer(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn let_else(&self) -> Option<LetElse> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetElse {
+ pub(crate) syntax: SyntaxNode,
+}
+impl LetElse {
+ pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) }
+ pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArrayExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ArrayExpr {}
+impl ArrayExpr {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn exprs(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AwaitExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for AwaitExpr {}
+impl AwaitExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+ pub fn await_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![await]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BinExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BinExpr {}
+impl BinExpr {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BoxExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BoxExpr {}
+impl BoxExpr {
+ pub fn box_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![box]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BreakExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BreakExpr {}
+impl BreakExpr {
+ pub fn break_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![break]) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CallExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for CallExpr {}
+impl ast::HasArgList for CallExpr {}
+impl CallExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CastExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for CastExpr {}
+impl CastExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ClosureExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ClosureExpr {}
+impl ClosureExpr {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+ pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ContinueExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ContinueExpr {}
+impl ContinueExpr {
+ pub fn continue_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![continue])
+ }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FieldExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for FieldExpr {}
+impl FieldExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ForExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ForExpr {}
+impl ForExpr {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IfExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for IfExpr {}
+impl IfExpr {
+ pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
+ pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IndexExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for IndexExpr {}
+impl IndexExpr {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Literal {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Literal {}
+impl Literal {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LoopExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LoopExpr {}
+impl ast::HasLoopBody for LoopExpr {}
+impl LoopExpr {
+ pub fn loop_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![loop]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroExpr {
+ pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MatchExpr {}
+impl MatchExpr {
+ pub fn match_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![match]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn match_arm_list(&self) -> Option<MatchArmList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MethodCallExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MethodCallExpr {}
+impl ast::HasArgList for MethodCallExpr {}
+impl MethodCallExpr {
+ pub fn receiver(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ParenExpr {}
+impl ParenExpr {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for PathExpr {}
+impl PathExpr {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PrefixExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for PrefixExpr {}
+impl PrefixExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RangeExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RangeExpr {}
+impl RangeExpr {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordExpr {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn record_expr_field_list(&self) -> Option<RecordExprFieldList> {
+ support::child(&self.syntax)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RefExpr {}
+impl RefExpr {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn raw_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![raw]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ReturnExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ReturnExpr {}
+impl ReturnExpr {
+ pub fn return_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![return]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TryExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TryExpr {}
+impl TryExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TupleExpr {}
+impl TupleExpr {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WhileExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for WhileExpr {}
+impl WhileExpr {
+ pub fn while_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![while]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct YieldExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for YieldExpr {}
+impl YieldExpr {
+ pub fn yield_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![yield]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LetExpr {}
+impl LetExpr {
+ pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UnderscoreExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for UnderscoreExpr {}
+impl UnderscoreExpr {
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct StmtList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for StmtList {}
+impl StmtList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
+ pub fn tail_expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Label {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Label {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExprFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordExprFieldList {}
+impl RecordExprFieldList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn fields(&self) -> AstChildren<RecordExprField> { support::children(&self.syntax) }
+ pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
+ pub fn spread(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExprField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordExprField {}
+impl RecordExprField {
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArgList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ArgList {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn args(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchArmList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MatchArmList {}
+impl MatchArmList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn arms(&self) -> AstChildren<MatchArm> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchArm {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MatchArm {}
+impl MatchArm {
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn guard(&self) -> Option<MatchGuard> { support::child(&self.syntax) }
+ pub fn fat_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=>]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchGuard {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MatchGuard {
+ pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArrayType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ArrayType {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct DynTraitType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl DynTraitType {
+ pub fn dyn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![dyn]) }
+ pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FnPtrType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl FnPtrType {
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+ pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ForType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ForType {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ImplTraitType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ImplTraitType {
+ pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) }
+ pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct InferType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl InferType {
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroType {
+ pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct NeverType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl NeverType {
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ParenType {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PtrType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PtrType {
+ pub fn star_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![*]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RefType {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SliceType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl SliceType {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TupleType {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Type> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeBound {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeBound {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
+ pub fn tilde_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![~]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IdentPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for IdentPat {}
+impl ast::HasName for IdentPat {}
+impl IdentPat {
+ pub fn ref_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ref]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn at_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![@]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BoxPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl BoxPat {
+ pub fn box_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![box]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RestPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RestPat {}
+impl RestPat {
+ pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LiteralPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl LiteralPat {
+ pub fn literal(&self) -> Option<Literal> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroPat {
+ pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct OrPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl OrPat {
+ pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ParenPat {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PathPat {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WildcardPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl WildcardPat {
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RangePat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RangePat {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordPat {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn record_pat_field_list(&self) -> Option<RecordPatFieldList> {
+ support::child(&self.syntax)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RefPat {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SlicePat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl SlicePat {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TuplePat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TuplePat {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleStructPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TupleStructPat {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstBlockPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ConstBlockPat {
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPatFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordPatFieldList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn fields(&self) -> AstChildren<RecordPatField> { support::children(&self.syntax) }
+ pub fn rest_pat(&self) -> Option<RestPat> { support::child(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPatField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordPatField {}
+impl RecordPatField {
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericArg {
+ TypeArg(TypeArg),
+ AssocTypeArg(AssocTypeArg),
+ LifetimeArg(LifetimeArg),
+ ConstArg(ConstArg),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Type {
+ ArrayType(ArrayType),
+ DynTraitType(DynTraitType),
+ FnPtrType(FnPtrType),
+ ForType(ForType),
+ ImplTraitType(ImplTraitType),
+ InferType(InferType),
+ MacroType(MacroType),
+ NeverType(NeverType),
+ ParenType(ParenType),
+ PathType(PathType),
+ PtrType(PtrType),
+ RefType(RefType),
+ SliceType(SliceType),
+ TupleType(TupleType),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Expr {
+ ArrayExpr(ArrayExpr),
+ AwaitExpr(AwaitExpr),
+ BinExpr(BinExpr),
+ BlockExpr(BlockExpr),
+ BoxExpr(BoxExpr),
+ BreakExpr(BreakExpr),
+ CallExpr(CallExpr),
+ CastExpr(CastExpr),
+ ClosureExpr(ClosureExpr),
+ ContinueExpr(ContinueExpr),
+ FieldExpr(FieldExpr),
+ ForExpr(ForExpr),
+ IfExpr(IfExpr),
+ IndexExpr(IndexExpr),
+ Literal(Literal),
+ LoopExpr(LoopExpr),
+ MacroExpr(MacroExpr),
+ MacroStmts(MacroStmts),
+ MatchExpr(MatchExpr),
+ MethodCallExpr(MethodCallExpr),
+ ParenExpr(ParenExpr),
+ PathExpr(PathExpr),
+ PrefixExpr(PrefixExpr),
+ RangeExpr(RangeExpr),
+ RecordExpr(RecordExpr),
+ RefExpr(RefExpr),
+ ReturnExpr(ReturnExpr),
+ TryExpr(TryExpr),
+ TupleExpr(TupleExpr),
+ WhileExpr(WhileExpr),
+ YieldExpr(YieldExpr),
+ LetExpr(LetExpr),
+ UnderscoreExpr(UnderscoreExpr),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Item {
+ Const(Const),
+ Enum(Enum),
+ ExternBlock(ExternBlock),
+ ExternCrate(ExternCrate),
+ Fn(Fn),
+ Impl(Impl),
+ MacroCall(MacroCall),
+ MacroRules(MacroRules),
+ MacroDef(MacroDef),
+ Module(Module),
+ Static(Static),
+ Struct(Struct),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ Union(Union),
+ Use(Use),
+}
+impl ast::HasAttrs for Item {}
+impl ast::HasDocComments for Item {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Stmt {
+ ExprStmt(ExprStmt),
+ Item(Item),
+ LetStmt(LetStmt),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Pat {
+ IdentPat(IdentPat),
+ BoxPat(BoxPat),
+ RestPat(RestPat),
+ LiteralPat(LiteralPat),
+ MacroPat(MacroPat),
+ OrPat(OrPat),
+ ParenPat(ParenPat),
+ PathPat(PathPat),
+ WildcardPat(WildcardPat),
+ RangePat(RangePat),
+ RecordPat(RecordPat),
+ RefPat(RefPat),
+ SlicePat(SlicePat),
+ TuplePat(TuplePat),
+ TupleStructPat(TupleStructPat),
+ ConstBlockPat(ConstBlockPat),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum FieldList {
+ RecordFieldList(RecordFieldList),
+ TupleFieldList(TupleFieldList),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Adt {
+ Enum(Enum),
+ Struct(Struct),
+ Union(Union),
+}
+impl ast::HasAttrs for Adt {}
+impl ast::HasDocComments for Adt {}
+impl ast::HasGenericParams for Adt {}
+impl ast::HasName for Adt {}
+impl ast::HasVisibility for Adt {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItem {
+ Const(Const),
+ Fn(Fn),
+ MacroCall(MacroCall),
+ TypeAlias(TypeAlias),
+}
+impl ast::HasAttrs for AssocItem {}
+impl ast::HasDocComments for AssocItem {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ExternItem {
+ Fn(Fn),
+ MacroCall(MacroCall),
+ Static(Static),
+ TypeAlias(TypeAlias),
+}
+impl ast::HasAttrs for ExternItem {}
+impl ast::HasDocComments for ExternItem {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericParam {
+ ConstParam(ConstParam),
+ LifetimeParam(LifetimeParam),
+ TypeParam(TypeParam),
+}
+impl ast::HasAttrs for GenericParam {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasArgList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasArgList for AnyHasArgList {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasAttrs {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for AnyHasAttrs {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasDocComments {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasDocComments for AnyHasDocComments {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasGenericParams {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasGenericParams for AnyHasGenericParams {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasLoopBody {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasLoopBody for AnyHasLoopBody {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasModuleItem {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasModuleItem for AnyHasModuleItem {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasName {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasName for AnyHasName {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasTypeBounds {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasTypeBounds for AnyHasTypeBounds {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasVisibility {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasVisibility for AnyHasVisibility {}
+impl AstNode for Name {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == NAME }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for NameRef {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == NAME_REF }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Lifetime {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Path {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathSegment {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_SEGMENT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for GenericArgList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_ARG_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParamList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RetType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RET_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AssocTypeArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_TYPE_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LifetimeArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for GenericParamList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_PARAM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeBoundList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroCall {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_CALL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Attr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ATTR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TokenTree {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TOKEN_TREE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroItems {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_ITEMS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroStmts {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_STMTS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SourceFile {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SOURCE_FILE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Const {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Enum {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ENUM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternBlock {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_BLOCK }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternCrate {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_CRATE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Fn {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FN }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Impl {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroRules {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_RULES }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroDef {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_DEF }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Module {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MODULE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Static {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STATIC }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Struct {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STRUCT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Trait {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TRAIT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeAlias {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ALIAS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Union {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == UNION }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Use {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == USE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Visibility {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == VISIBILITY }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ItemList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ITEM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Rename {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RENAME }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UseTree {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UseTreeList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Abi {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ABI }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WhereClause {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_CLAUSE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BlockExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BLOCK_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SelfParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SELF_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Param {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for VariantList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Variant {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AssocItemList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_ITEM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternItemList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_ITEM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LifetimeParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WherePred {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_PRED }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Meta {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == META }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExprStmt {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXPR_STMT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetStmt {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_STMT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetElse {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_ELSE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArrayExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AwaitExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == AWAIT_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BinExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BIN_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BoxExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BOX_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BreakExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BREAK_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for CallExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CALL_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for CastExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CAST_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ClosureExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ContinueExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONTINUE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for FieldExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FIELD_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ForExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IfExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IF_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IndexExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == INDEX_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Literal {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LoopExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LOOP_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MethodCallExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == METHOD_CALL_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PrefixExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PREFIX_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RangeExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REF_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ReturnExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RETURN_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TryExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WhileExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHILE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for YieldExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == YIELD_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UnderscoreExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == UNDERSCORE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for StmtList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STMT_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Label {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LABEL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExprFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExprField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArgList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ARG_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchArmList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchArm {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchGuard {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_GUARD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArrayType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for DynTraitType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == DYN_TRAIT_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for FnPtrType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FN_PTR_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ForType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ImplTraitType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL_TRAIT_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for InferType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == INFER_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for NeverType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == NEVER_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PtrType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PTR_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REF_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SliceType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeBound {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IdentPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IDENT_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BoxPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BOX_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RestPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REST_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LiteralPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for OrPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == OR_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WildcardPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WILDCARD_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RangePat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REF_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SlicePat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TuplePat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleStructPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_STRUCT_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstBlockPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_BLOCK_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPatFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPatField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl From<TypeArg> for GenericArg {
+ fn from(node: TypeArg) -> GenericArg { GenericArg::TypeArg(node) }
+}
+impl From<AssocTypeArg> for GenericArg {
+ fn from(node: AssocTypeArg) -> GenericArg { GenericArg::AssocTypeArg(node) }
+}
+impl From<LifetimeArg> for GenericArg {
+ fn from(node: LifetimeArg) -> GenericArg { GenericArg::LifetimeArg(node) }
+}
+impl From<ConstArg> for GenericArg {
+ fn from(node: ConstArg) -> GenericArg { GenericArg::ConstArg(node) }
+}
+impl AstNode for GenericArg {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ TYPE_ARG => GenericArg::TypeArg(TypeArg { syntax }),
+ ASSOC_TYPE_ARG => GenericArg::AssocTypeArg(AssocTypeArg { syntax }),
+ LIFETIME_ARG => GenericArg::LifetimeArg(LifetimeArg { syntax }),
+ CONST_ARG => GenericArg::ConstArg(ConstArg { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GenericArg::TypeArg(it) => &it.syntax,
+ GenericArg::AssocTypeArg(it) => &it.syntax,
+ GenericArg::LifetimeArg(it) => &it.syntax,
+ GenericArg::ConstArg(it) => &it.syntax,
+ }
+ }
+}
+impl From<ArrayType> for Type {
+ fn from(node: ArrayType) -> Type { Type::ArrayType(node) }
+}
+impl From<DynTraitType> for Type {
+ fn from(node: DynTraitType) -> Type { Type::DynTraitType(node) }
+}
+impl From<FnPtrType> for Type {
+ fn from(node: FnPtrType) -> Type { Type::FnPtrType(node) }
+}
+impl From<ForType> for Type {
+ fn from(node: ForType) -> Type { Type::ForType(node) }
+}
+impl From<ImplTraitType> for Type {
+ fn from(node: ImplTraitType) -> Type { Type::ImplTraitType(node) }
+}
+impl From<InferType> for Type {
+ fn from(node: InferType) -> Type { Type::InferType(node) }
+}
+impl From<MacroType> for Type {
+ fn from(node: MacroType) -> Type { Type::MacroType(node) }
+}
+impl From<NeverType> for Type {
+ fn from(node: NeverType) -> Type { Type::NeverType(node) }
+}
+impl From<ParenType> for Type {
+ fn from(node: ParenType) -> Type { Type::ParenType(node) }
+}
+impl From<PathType> for Type {
+ fn from(node: PathType) -> Type { Type::PathType(node) }
+}
+impl From<PtrType> for Type {
+ fn from(node: PtrType) -> Type { Type::PtrType(node) }
+}
+impl From<RefType> for Type {
+ fn from(node: RefType) -> Type { Type::RefType(node) }
+}
+impl From<SliceType> for Type {
+ fn from(node: SliceType) -> Type { Type::SliceType(node) }
+}
+impl From<TupleType> for Type {
+ fn from(node: TupleType) -> Type { Type::TupleType(node) }
+}
+impl AstNode for Type {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ARRAY_TYPE | DYN_TRAIT_TYPE | FN_PTR_TYPE | FOR_TYPE | IMPL_TRAIT_TYPE | INFER_TYPE
+ | MACRO_TYPE | NEVER_TYPE | PAREN_TYPE | PATH_TYPE | PTR_TYPE | REF_TYPE
+ | SLICE_TYPE | TUPLE_TYPE => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ ARRAY_TYPE => Type::ArrayType(ArrayType { syntax }),
+ DYN_TRAIT_TYPE => Type::DynTraitType(DynTraitType { syntax }),
+ FN_PTR_TYPE => Type::FnPtrType(FnPtrType { syntax }),
+ FOR_TYPE => Type::ForType(ForType { syntax }),
+ IMPL_TRAIT_TYPE => Type::ImplTraitType(ImplTraitType { syntax }),
+ INFER_TYPE => Type::InferType(InferType { syntax }),
+ MACRO_TYPE => Type::MacroType(MacroType { syntax }),
+ NEVER_TYPE => Type::NeverType(NeverType { syntax }),
+ PAREN_TYPE => Type::ParenType(ParenType { syntax }),
+ PATH_TYPE => Type::PathType(PathType { syntax }),
+ PTR_TYPE => Type::PtrType(PtrType { syntax }),
+ REF_TYPE => Type::RefType(RefType { syntax }),
+ SLICE_TYPE => Type::SliceType(SliceType { syntax }),
+ TUPLE_TYPE => Type::TupleType(TupleType { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Type::ArrayType(it) => &it.syntax,
+ Type::DynTraitType(it) => &it.syntax,
+ Type::FnPtrType(it) => &it.syntax,
+ Type::ForType(it) => &it.syntax,
+ Type::ImplTraitType(it) => &it.syntax,
+ Type::InferType(it) => &it.syntax,
+ Type::MacroType(it) => &it.syntax,
+ Type::NeverType(it) => &it.syntax,
+ Type::ParenType(it) => &it.syntax,
+ Type::PathType(it) => &it.syntax,
+ Type::PtrType(it) => &it.syntax,
+ Type::RefType(it) => &it.syntax,
+ Type::SliceType(it) => &it.syntax,
+ Type::TupleType(it) => &it.syntax,
+ }
+ }
+}
+impl From<ArrayExpr> for Expr {
+ fn from(node: ArrayExpr) -> Expr { Expr::ArrayExpr(node) }
+}
+impl From<AwaitExpr> for Expr {
+ fn from(node: AwaitExpr) -> Expr { Expr::AwaitExpr(node) }
+}
+impl From<BinExpr> for Expr {
+ fn from(node: BinExpr) -> Expr { Expr::BinExpr(node) }
+}
+impl From<BlockExpr> for Expr {
+ fn from(node: BlockExpr) -> Expr { Expr::BlockExpr(node) }
+}
+impl From<BoxExpr> for Expr {
+ fn from(node: BoxExpr) -> Expr { Expr::BoxExpr(node) }
+}
+impl From<BreakExpr> for Expr {
+ fn from(node: BreakExpr) -> Expr { Expr::BreakExpr(node) }
+}
+impl From<CallExpr> for Expr {
+ fn from(node: CallExpr) -> Expr { Expr::CallExpr(node) }
+}
+impl From<CastExpr> for Expr {
+ fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) }
+}
+impl From<ClosureExpr> for Expr {
+ fn from(node: ClosureExpr) -> Expr { Expr::ClosureExpr(node) }
+}
+impl From<ContinueExpr> for Expr {
+ fn from(node: ContinueExpr) -> Expr { Expr::ContinueExpr(node) }
+}
+impl From<FieldExpr> for Expr {
+ fn from(node: FieldExpr) -> Expr { Expr::FieldExpr(node) }
+}
+impl From<ForExpr> for Expr {
+ fn from(node: ForExpr) -> Expr { Expr::ForExpr(node) }
+}
+impl From<IfExpr> for Expr {
+ fn from(node: IfExpr) -> Expr { Expr::IfExpr(node) }
+}
+impl From<IndexExpr> for Expr {
+ fn from(node: IndexExpr) -> Expr { Expr::IndexExpr(node) }
+}
+impl From<Literal> for Expr {
+ fn from(node: Literal) -> Expr { Expr::Literal(node) }
+}
+impl From<LoopExpr> for Expr {
+ fn from(node: LoopExpr) -> Expr { Expr::LoopExpr(node) }
+}
+impl From<MacroExpr> for Expr {
+ fn from(node: MacroExpr) -> Expr { Expr::MacroExpr(node) }
+}
+impl From<MacroStmts> for Expr {
+ fn from(node: MacroStmts) -> Expr { Expr::MacroStmts(node) }
+}
+impl From<MatchExpr> for Expr {
+ fn from(node: MatchExpr) -> Expr { Expr::MatchExpr(node) }
+}
+impl From<MethodCallExpr> for Expr {
+ fn from(node: MethodCallExpr) -> Expr { Expr::MethodCallExpr(node) }
+}
+impl From<ParenExpr> for Expr {
+ fn from(node: ParenExpr) -> Expr { Expr::ParenExpr(node) }
+}
+impl From<PathExpr> for Expr {
+ fn from(node: PathExpr) -> Expr { Expr::PathExpr(node) }
+}
+impl From<PrefixExpr> for Expr {
+ fn from(node: PrefixExpr) -> Expr { Expr::PrefixExpr(node) }
+}
+impl From<RangeExpr> for Expr {
+ fn from(node: RangeExpr) -> Expr { Expr::RangeExpr(node) }
+}
+impl From<RecordExpr> for Expr {
+ fn from(node: RecordExpr) -> Expr { Expr::RecordExpr(node) }
+}
+impl From<RefExpr> for Expr {
+ fn from(node: RefExpr) -> Expr { Expr::RefExpr(node) }
+}
+impl From<ReturnExpr> for Expr {
+ fn from(node: ReturnExpr) -> Expr { Expr::ReturnExpr(node) }
+}
+impl From<TryExpr> for Expr {
+ fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) }
+}
+impl From<TupleExpr> for Expr {
+ fn from(node: TupleExpr) -> Expr { Expr::TupleExpr(node) }
+}
+impl From<WhileExpr> for Expr {
+ fn from(node: WhileExpr) -> Expr { Expr::WhileExpr(node) }
+}
+impl From<YieldExpr> for Expr {
+ fn from(node: YieldExpr) -> Expr { Expr::YieldExpr(node) }
+}
+impl From<LetExpr> for Expr {
+ fn from(node: LetExpr) -> Expr { Expr::LetExpr(node) }
+}
+impl From<UnderscoreExpr> for Expr {
+ fn from(node: UnderscoreExpr) -> Expr { Expr::UnderscoreExpr(node) }
+}
+impl AstNode for Expr {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ARRAY_EXPR | AWAIT_EXPR | BIN_EXPR | BLOCK_EXPR | BOX_EXPR | BREAK_EXPR | CALL_EXPR
+ | CAST_EXPR | CLOSURE_EXPR | CONTINUE_EXPR | FIELD_EXPR | FOR_EXPR | IF_EXPR
+ | INDEX_EXPR | LITERAL | LOOP_EXPR | MACRO_EXPR | MACRO_STMTS | MATCH_EXPR
+ | METHOD_CALL_EXPR | PAREN_EXPR | PATH_EXPR | PREFIX_EXPR | RANGE_EXPR
+ | RECORD_EXPR | REF_EXPR | RETURN_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR
+ | YIELD_EXPR | LET_EXPR | UNDERSCORE_EXPR => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ ARRAY_EXPR => Expr::ArrayExpr(ArrayExpr { syntax }),
+ AWAIT_EXPR => Expr::AwaitExpr(AwaitExpr { syntax }),
+ BIN_EXPR => Expr::BinExpr(BinExpr { syntax }),
+ BLOCK_EXPR => Expr::BlockExpr(BlockExpr { syntax }),
+ BOX_EXPR => Expr::BoxExpr(BoxExpr { syntax }),
+ BREAK_EXPR => Expr::BreakExpr(BreakExpr { syntax }),
+ CALL_EXPR => Expr::CallExpr(CallExpr { syntax }),
+ CAST_EXPR => Expr::CastExpr(CastExpr { syntax }),
+ CLOSURE_EXPR => Expr::ClosureExpr(ClosureExpr { syntax }),
+ CONTINUE_EXPR => Expr::ContinueExpr(ContinueExpr { syntax }),
+ FIELD_EXPR => Expr::FieldExpr(FieldExpr { syntax }),
+ FOR_EXPR => Expr::ForExpr(ForExpr { syntax }),
+ IF_EXPR => Expr::IfExpr(IfExpr { syntax }),
+ INDEX_EXPR => Expr::IndexExpr(IndexExpr { syntax }),
+ LITERAL => Expr::Literal(Literal { syntax }),
+ LOOP_EXPR => Expr::LoopExpr(LoopExpr { syntax }),
+ MACRO_EXPR => Expr::MacroExpr(MacroExpr { syntax }),
+ MACRO_STMTS => Expr::MacroStmts(MacroStmts { syntax }),
+ MATCH_EXPR => Expr::MatchExpr(MatchExpr { syntax }),
+ METHOD_CALL_EXPR => Expr::MethodCallExpr(MethodCallExpr { syntax }),
+ PAREN_EXPR => Expr::ParenExpr(ParenExpr { syntax }),
+ PATH_EXPR => Expr::PathExpr(PathExpr { syntax }),
+ PREFIX_EXPR => Expr::PrefixExpr(PrefixExpr { syntax }),
+ RANGE_EXPR => Expr::RangeExpr(RangeExpr { syntax }),
+ RECORD_EXPR => Expr::RecordExpr(RecordExpr { syntax }),
+ REF_EXPR => Expr::RefExpr(RefExpr { syntax }),
+ RETURN_EXPR => Expr::ReturnExpr(ReturnExpr { syntax }),
+ TRY_EXPR => Expr::TryExpr(TryExpr { syntax }),
+ TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }),
+ WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }),
+ YIELD_EXPR => Expr::YieldExpr(YieldExpr { syntax }),
+ LET_EXPR => Expr::LetExpr(LetExpr { syntax }),
+ UNDERSCORE_EXPR => Expr::UnderscoreExpr(UnderscoreExpr { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Expr::ArrayExpr(it) => &it.syntax,
+ Expr::AwaitExpr(it) => &it.syntax,
+ Expr::BinExpr(it) => &it.syntax,
+ Expr::BlockExpr(it) => &it.syntax,
+ Expr::BoxExpr(it) => &it.syntax,
+ Expr::BreakExpr(it) => &it.syntax,
+ Expr::CallExpr(it) => &it.syntax,
+ Expr::CastExpr(it) => &it.syntax,
+ Expr::ClosureExpr(it) => &it.syntax,
+ Expr::ContinueExpr(it) => &it.syntax,
+ Expr::FieldExpr(it) => &it.syntax,
+ Expr::ForExpr(it) => &it.syntax,
+ Expr::IfExpr(it) => &it.syntax,
+ Expr::IndexExpr(it) => &it.syntax,
+ Expr::Literal(it) => &it.syntax,
+ Expr::LoopExpr(it) => &it.syntax,
+ Expr::MacroExpr(it) => &it.syntax,
+ Expr::MacroStmts(it) => &it.syntax,
+ Expr::MatchExpr(it) => &it.syntax,
+ Expr::MethodCallExpr(it) => &it.syntax,
+ Expr::ParenExpr(it) => &it.syntax,
+ Expr::PathExpr(it) => &it.syntax,
+ Expr::PrefixExpr(it) => &it.syntax,
+ Expr::RangeExpr(it) => &it.syntax,
+ Expr::RecordExpr(it) => &it.syntax,
+ Expr::RefExpr(it) => &it.syntax,
+ Expr::ReturnExpr(it) => &it.syntax,
+ Expr::TryExpr(it) => &it.syntax,
+ Expr::TupleExpr(it) => &it.syntax,
+ Expr::WhileExpr(it) => &it.syntax,
+ Expr::YieldExpr(it) => &it.syntax,
+ Expr::LetExpr(it) => &it.syntax,
+ Expr::UnderscoreExpr(it) => &it.syntax,
+ }
+ }
+}
+impl From<Const> for Item {
+ fn from(node: Const) -> Item { Item::Const(node) }
+}
+impl From<Enum> for Item {
+ fn from(node: Enum) -> Item { Item::Enum(node) }
+}
+impl From<ExternBlock> for Item {
+ fn from(node: ExternBlock) -> Item { Item::ExternBlock(node) }
+}
+impl From<ExternCrate> for Item {
+ fn from(node: ExternCrate) -> Item { Item::ExternCrate(node) }
+}
+impl From<Fn> for Item {
+ fn from(node: Fn) -> Item { Item::Fn(node) }
+}
+impl From<Impl> for Item {
+ fn from(node: Impl) -> Item { Item::Impl(node) }
+}
+impl From<MacroCall> for Item {
+ fn from(node: MacroCall) -> Item { Item::MacroCall(node) }
+}
+impl From<MacroRules> for Item {
+ fn from(node: MacroRules) -> Item { Item::MacroRules(node) }
+}
+impl From<MacroDef> for Item {
+ fn from(node: MacroDef) -> Item { Item::MacroDef(node) }
+}
+impl From<Module> for Item {
+ fn from(node: Module) -> Item { Item::Module(node) }
+}
+impl From<Static> for Item {
+ fn from(node: Static) -> Item { Item::Static(node) }
+}
+impl From<Struct> for Item {
+ fn from(node: Struct) -> Item { Item::Struct(node) }
+}
+impl From<Trait> for Item {
+ fn from(node: Trait) -> Item { Item::Trait(node) }
+}
+impl From<TypeAlias> for Item {
+ fn from(node: TypeAlias) -> Item { Item::TypeAlias(node) }
+}
+impl From<Union> for Item {
+ fn from(node: Union) -> Item { Item::Union(node) }
+}
+impl From<Use> for Item {
+ fn from(node: Use) -> Item { Item::Use(node) }
+}
+impl AstNode for Item {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE | FN | IMPL | MACRO_CALL | MACRO_RULES
+ | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT | TYPE_ALIAS | UNION | USE => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ CONST => Item::Const(Const { syntax }),
+ ENUM => Item::Enum(Enum { syntax }),
+ EXTERN_BLOCK => Item::ExternBlock(ExternBlock { syntax }),
+ EXTERN_CRATE => Item::ExternCrate(ExternCrate { syntax }),
+ FN => Item::Fn(Fn { syntax }),
+ IMPL => Item::Impl(Impl { syntax }),
+ MACRO_CALL => Item::MacroCall(MacroCall { syntax }),
+ MACRO_RULES => Item::MacroRules(MacroRules { syntax }),
+ MACRO_DEF => Item::MacroDef(MacroDef { syntax }),
+ MODULE => Item::Module(Module { syntax }),
+ STATIC => Item::Static(Static { syntax }),
+ STRUCT => Item::Struct(Struct { syntax }),
+ TRAIT => Item::Trait(Trait { syntax }),
+ TYPE_ALIAS => Item::TypeAlias(TypeAlias { syntax }),
+ UNION => Item::Union(Union { syntax }),
+ USE => Item::Use(Use { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Item::Const(it) => &it.syntax,
+ Item::Enum(it) => &it.syntax,
+ Item::ExternBlock(it) => &it.syntax,
+ Item::ExternCrate(it) => &it.syntax,
+ Item::Fn(it) => &it.syntax,
+ Item::Impl(it) => &it.syntax,
+ Item::MacroCall(it) => &it.syntax,
+ Item::MacroRules(it) => &it.syntax,
+ Item::MacroDef(it) => &it.syntax,
+ Item::Module(it) => &it.syntax,
+ Item::Static(it) => &it.syntax,
+ Item::Struct(it) => &it.syntax,
+ Item::Trait(it) => &it.syntax,
+ Item::TypeAlias(it) => &it.syntax,
+ Item::Union(it) => &it.syntax,
+ Item::Use(it) => &it.syntax,
+ }
+ }
+}
+impl From<ExprStmt> for Stmt {
+ fn from(node: ExprStmt) -> Stmt { Stmt::ExprStmt(node) }
+}
+impl From<Item> for Stmt {
+ fn from(node: Item) -> Stmt { Stmt::Item(node) }
+}
+impl From<LetStmt> for Stmt {
+ fn from(node: LetStmt) -> Stmt { Stmt::LetStmt(node) }
+}
+impl From<IdentPat> for Pat {
+ fn from(node: IdentPat) -> Pat { Pat::IdentPat(node) }
+}
+impl From<BoxPat> for Pat {
+ fn from(node: BoxPat) -> Pat { Pat::BoxPat(node) }
+}
+impl From<RestPat> for Pat {
+ fn from(node: RestPat) -> Pat { Pat::RestPat(node) }
+}
+impl From<LiteralPat> for Pat {
+ fn from(node: LiteralPat) -> Pat { Pat::LiteralPat(node) }
+}
+impl From<MacroPat> for Pat {
+ fn from(node: MacroPat) -> Pat { Pat::MacroPat(node) }
+}
+impl From<OrPat> for Pat {
+ fn from(node: OrPat) -> Pat { Pat::OrPat(node) }
+}
+impl From<ParenPat> for Pat {
+ fn from(node: ParenPat) -> Pat { Pat::ParenPat(node) }
+}
+impl From<PathPat> for Pat {
+ fn from(node: PathPat) -> Pat { Pat::PathPat(node) }
+}
+impl From<WildcardPat> for Pat {
+ fn from(node: WildcardPat) -> Pat { Pat::WildcardPat(node) }
+}
+impl From<RangePat> for Pat {
+ fn from(node: RangePat) -> Pat { Pat::RangePat(node) }
+}
+impl From<RecordPat> for Pat {
+ fn from(node: RecordPat) -> Pat { Pat::RecordPat(node) }
+}
+impl From<RefPat> for Pat {
+ fn from(node: RefPat) -> Pat { Pat::RefPat(node) }
+}
+impl From<SlicePat> for Pat {
+ fn from(node: SlicePat) -> Pat { Pat::SlicePat(node) }
+}
+impl From<TuplePat> for Pat {
+ fn from(node: TuplePat) -> Pat { Pat::TuplePat(node) }
+}
+impl From<TupleStructPat> for Pat {
+ fn from(node: TupleStructPat) -> Pat { Pat::TupleStructPat(node) }
+}
+impl From<ConstBlockPat> for Pat {
+ fn from(node: ConstBlockPat) -> Pat { Pat::ConstBlockPat(node) }
+}
+impl AstNode for Pat {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ IDENT_PAT | BOX_PAT | REST_PAT | LITERAL_PAT | MACRO_PAT | OR_PAT | PAREN_PAT
+ | PATH_PAT | WILDCARD_PAT | RANGE_PAT | RECORD_PAT | REF_PAT | SLICE_PAT
+ | TUPLE_PAT | TUPLE_STRUCT_PAT | CONST_BLOCK_PAT => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ IDENT_PAT => Pat::IdentPat(IdentPat { syntax }),
+ BOX_PAT => Pat::BoxPat(BoxPat { syntax }),
+ REST_PAT => Pat::RestPat(RestPat { syntax }),
+ LITERAL_PAT => Pat::LiteralPat(LiteralPat { syntax }),
+ MACRO_PAT => Pat::MacroPat(MacroPat { syntax }),
+ OR_PAT => Pat::OrPat(OrPat { syntax }),
+ PAREN_PAT => Pat::ParenPat(ParenPat { syntax }),
+ PATH_PAT => Pat::PathPat(PathPat { syntax }),
+ WILDCARD_PAT => Pat::WildcardPat(WildcardPat { syntax }),
+ RANGE_PAT => Pat::RangePat(RangePat { syntax }),
+ RECORD_PAT => Pat::RecordPat(RecordPat { syntax }),
+ REF_PAT => Pat::RefPat(RefPat { syntax }),
+ SLICE_PAT => Pat::SlicePat(SlicePat { syntax }),
+ TUPLE_PAT => Pat::TuplePat(TuplePat { syntax }),
+ TUPLE_STRUCT_PAT => Pat::TupleStructPat(TupleStructPat { syntax }),
+ CONST_BLOCK_PAT => Pat::ConstBlockPat(ConstBlockPat { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Pat::IdentPat(it) => &it.syntax,
+ Pat::BoxPat(it) => &it.syntax,
+ Pat::RestPat(it) => &it.syntax,
+ Pat::LiteralPat(it) => &it.syntax,
+ Pat::MacroPat(it) => &it.syntax,
+ Pat::OrPat(it) => &it.syntax,
+ Pat::ParenPat(it) => &it.syntax,
+ Pat::PathPat(it) => &it.syntax,
+ Pat::WildcardPat(it) => &it.syntax,
+ Pat::RangePat(it) => &it.syntax,
+ Pat::RecordPat(it) => &it.syntax,
+ Pat::RefPat(it) => &it.syntax,
+ Pat::SlicePat(it) => &it.syntax,
+ Pat::TuplePat(it) => &it.syntax,
+ Pat::TupleStructPat(it) => &it.syntax,
+ Pat::ConstBlockPat(it) => &it.syntax,
+ }
+ }
+}
+impl From<RecordFieldList> for FieldList {
+ fn from(node: RecordFieldList) -> FieldList { FieldList::RecordFieldList(node) }
+}
+impl From<TupleFieldList> for FieldList {
+ fn from(node: TupleFieldList) -> FieldList { FieldList::TupleFieldList(node) }
+}
+impl AstNode for FieldList {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ RECORD_FIELD_LIST | TUPLE_FIELD_LIST => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ RECORD_FIELD_LIST => FieldList::RecordFieldList(RecordFieldList { syntax }),
+ TUPLE_FIELD_LIST => FieldList::TupleFieldList(TupleFieldList { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ FieldList::RecordFieldList(it) => &it.syntax,
+ FieldList::TupleFieldList(it) => &it.syntax,
+ }
+ }
+}
+impl From<Enum> for Adt {
+ fn from(node: Enum) -> Adt { Adt::Enum(node) }
+}
+impl From<Struct> for Adt {
+ fn from(node: Struct) -> Adt { Adt::Struct(node) }
+}
+impl From<Union> for Adt {
+ fn from(node: Union) -> Adt { Adt::Union(node) }
+}
+impl AstNode for Adt {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ENUM | STRUCT | UNION => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ ENUM => Adt::Enum(Enum { syntax }),
+ STRUCT => Adt::Struct(Struct { syntax }),
+ UNION => Adt::Union(Union { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Adt::Enum(it) => &it.syntax,
+ Adt::Struct(it) => &it.syntax,
+ Adt::Union(it) => &it.syntax,
+ }
+ }
+}
+impl From<Const> for AssocItem {
+ fn from(node: Const) -> AssocItem { AssocItem::Const(node) }
+}
+impl From<Fn> for AssocItem {
+ fn from(node: Fn) -> AssocItem { AssocItem::Fn(node) }
+}
+impl From<MacroCall> for AssocItem {
+ fn from(node: MacroCall) -> AssocItem { AssocItem::MacroCall(node) }
+}
+impl From<TypeAlias> for AssocItem {
+ fn from(node: TypeAlias) -> AssocItem { AssocItem::TypeAlias(node) }
+}
+impl AstNode for AssocItem {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST | FN | MACRO_CALL | TYPE_ALIAS => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ CONST => AssocItem::Const(Const { syntax }),
+ FN => AssocItem::Fn(Fn { syntax }),
+ MACRO_CALL => AssocItem::MacroCall(MacroCall { syntax }),
+ TYPE_ALIAS => AssocItem::TypeAlias(TypeAlias { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ AssocItem::Const(it) => &it.syntax,
+ AssocItem::Fn(it) => &it.syntax,
+ AssocItem::MacroCall(it) => &it.syntax,
+ AssocItem::TypeAlias(it) => &it.syntax,
+ }
+ }
+}
+impl From<Fn> for ExternItem {
+ fn from(node: Fn) -> ExternItem { ExternItem::Fn(node) }
+}
+impl From<MacroCall> for ExternItem {
+ fn from(node: MacroCall) -> ExternItem { ExternItem::MacroCall(node) }
+}
+impl From<Static> for ExternItem {
+ fn from(node: Static) -> ExternItem { ExternItem::Static(node) }
+}
+impl From<TypeAlias> for ExternItem {
+ fn from(node: TypeAlias) -> ExternItem { ExternItem::TypeAlias(node) }
+}
+impl AstNode for ExternItem {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ FN | MACRO_CALL | STATIC | TYPE_ALIAS => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ FN => ExternItem::Fn(Fn { syntax }),
+ MACRO_CALL => ExternItem::MacroCall(MacroCall { syntax }),
+ STATIC => ExternItem::Static(Static { syntax }),
+ TYPE_ALIAS => ExternItem::TypeAlias(TypeAlias { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ ExternItem::Fn(it) => &it.syntax,
+ ExternItem::MacroCall(it) => &it.syntax,
+ ExternItem::Static(it) => &it.syntax,
+ ExternItem::TypeAlias(it) => &it.syntax,
+ }
+ }
+}
+impl From<ConstParam> for GenericParam {
+ fn from(node: ConstParam) -> GenericParam { GenericParam::ConstParam(node) }
+}
+impl From<LifetimeParam> for GenericParam {
+ fn from(node: LifetimeParam) -> GenericParam { GenericParam::LifetimeParam(node) }
+}
+impl From<TypeParam> for GenericParam {
+ fn from(node: TypeParam) -> GenericParam { GenericParam::TypeParam(node) }
+}
+impl AstNode for GenericParam {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST_PARAM | LIFETIME_PARAM | TYPE_PARAM => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ CONST_PARAM => GenericParam::ConstParam(ConstParam { syntax }),
+ LIFETIME_PARAM => GenericParam::LifetimeParam(LifetimeParam { syntax }),
+ TYPE_PARAM => GenericParam::TypeParam(TypeParam { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GenericParam::ConstParam(it) => &it.syntax,
+ GenericParam::LifetimeParam(it) => &it.syntax,
+ GenericParam::TypeParam(it) => &it.syntax,
+ }
+ }
+}
+impl AnyHasArgList {
+ #[inline]
+ pub fn new<T: ast::HasArgList>(node: T) -> AnyHasArgList {
+ AnyHasArgList { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasArgList {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CALL_EXPR | METHOD_CALL_EXPR => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasArgList { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasAttrs {
+ #[inline]
+ pub fn new<T: ast::HasAttrs>(node: T) -> AnyHasAttrs {
+ AnyHasAttrs { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasAttrs {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ MACRO_CALL
+ | SOURCE_FILE
+ | CONST
+ | ENUM
+ | EXTERN_BLOCK
+ | EXTERN_CRATE
+ | FN
+ | IMPL
+ | MACRO_RULES
+ | MACRO_DEF
+ | MODULE
+ | STATIC
+ | STRUCT
+ | TRAIT
+ | TYPE_ALIAS
+ | UNION
+ | USE
+ | ITEM_LIST
+ | BLOCK_EXPR
+ | SELF_PARAM
+ | PARAM
+ | RECORD_FIELD
+ | TUPLE_FIELD
+ | VARIANT
+ | ASSOC_ITEM_LIST
+ | EXTERN_ITEM_LIST
+ | CONST_PARAM
+ | LIFETIME_PARAM
+ | TYPE_PARAM
+ | LET_STMT
+ | ARRAY_EXPR
+ | AWAIT_EXPR
+ | BIN_EXPR
+ | BOX_EXPR
+ | BREAK_EXPR
+ | CALL_EXPR
+ | CAST_EXPR
+ | CLOSURE_EXPR
+ | CONTINUE_EXPR
+ | FIELD_EXPR
+ | FOR_EXPR
+ | IF_EXPR
+ | INDEX_EXPR
+ | LITERAL
+ | LOOP_EXPR
+ | MATCH_EXPR
+ | METHOD_CALL_EXPR
+ | PAREN_EXPR
+ | PATH_EXPR
+ | PREFIX_EXPR
+ | RANGE_EXPR
+ | REF_EXPR
+ | RETURN_EXPR
+ | TRY_EXPR
+ | TUPLE_EXPR
+ | WHILE_EXPR
+ | YIELD_EXPR
+ | LET_EXPR
+ | UNDERSCORE_EXPR
+ | STMT_LIST
+ | RECORD_EXPR_FIELD_LIST
+ | RECORD_EXPR_FIELD
+ | MATCH_ARM_LIST
+ | MATCH_ARM
+ | IDENT_PAT
+ | REST_PAT
+ | RECORD_PAT_FIELD => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasAttrs { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasDocComments {
+ #[inline]
+ pub fn new<T: ast::HasDocComments>(node: T) -> AnyHasDocComments {
+ AnyHasDocComments { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasDocComments {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ MACRO_CALL | SOURCE_FILE | CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE | FN | IMPL
+ | MACRO_RULES | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT | TYPE_ALIAS | UNION
+ | USE | RECORD_FIELD | TUPLE_FIELD | VARIANT => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasDocComments { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasGenericParams {
+ #[inline]
+ pub fn new<T: ast::HasGenericParams>(node: T) -> AnyHasGenericParams {
+ AnyHasGenericParams { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasGenericParams {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasGenericParams { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasLoopBody {
+ #[inline]
+ pub fn new<T: ast::HasLoopBody>(node: T) -> AnyHasLoopBody {
+ AnyHasLoopBody { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasLoopBody {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ FOR_EXPR | LOOP_EXPR | WHILE_EXPR => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasLoopBody { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasModuleItem {
+ #[inline]
+ pub fn new<T: ast::HasModuleItem>(node: T) -> AnyHasModuleItem {
+ AnyHasModuleItem { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasModuleItem {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ MACRO_ITEMS | SOURCE_FILE | ITEM_LIST => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasModuleItem { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasName {
+ #[inline]
+ pub fn new<T: ast::HasName>(node: T) -> AnyHasName {
+ AnyHasName { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasName {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST | ENUM | FN | MACRO_RULES | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT
+ | TYPE_ALIAS | UNION | RENAME | SELF_PARAM | RECORD_FIELD | VARIANT | CONST_PARAM
+ | TYPE_PARAM | IDENT_PAT => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasName { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasTypeBounds {
+ #[inline]
+ pub fn new<T: ast::HasTypeBounds>(node: T) -> AnyHasTypeBounds {
+ AnyHasTypeBounds { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasTypeBounds {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ASSOC_TYPE_ARG | TRAIT | TYPE_ALIAS | LIFETIME_PARAM | TYPE_PARAM | WHERE_PRED => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasTypeBounds { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasVisibility {
+ #[inline]
+ pub fn new<T: ast::HasVisibility>(node: T) -> AnyHasVisibility {
+ AnyHasVisibility { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasVisibility {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST | ENUM | EXTERN_CRATE | FN | IMPL | MACRO_RULES | MACRO_DEF | MODULE | STATIC
+ | STRUCT | TRAIT | TYPE_ALIAS | UNION | USE | RECORD_FIELD | TUPLE_FIELD | VARIANT => {
+ true
+ }
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasVisibility { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl std::fmt::Display for GenericArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Type {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Expr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Item {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Stmt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Pat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for FieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Adt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AssocItem {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternItem {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for GenericParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for NameRef {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Lifetime {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Path {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathSegment {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for GenericArgList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParamList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RetType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AssocTypeArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LifetimeArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ConstArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for GenericParamList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeBoundList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroCall {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Attr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroItems {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroStmts {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SourceFile {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Const {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Enum {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternBlock {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternCrate {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Fn {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Impl {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroRules {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroDef {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Module {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Static {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Struct {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Trait {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeAlias {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Union {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Use {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Visibility {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ItemList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Rename {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for UseTree {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for UseTreeList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Abi {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WhereClause {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BlockExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SelfParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Param {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for VariantList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Variant {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AssocItemList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternItemList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ConstParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LifetimeParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WherePred {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Meta {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExprStmt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LetStmt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LetElse {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ArrayExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AwaitExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BinExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BoxExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BreakExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for CallExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for CastExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ClosureExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ContinueExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for FieldExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ForExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for IfExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for IndexExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Literal {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LoopExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MethodCallExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParenExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PrefixExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RangeExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RefExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ReturnExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TryExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WhileExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for YieldExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LetExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for UnderscoreExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for StmtList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Label {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordExprFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordExprField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ArgList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchArmList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchArm {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchGuard {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ArrayType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for DynTraitType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for FnPtrType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ForType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ImplTraitType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for InferType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for NeverType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParenType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PtrType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RefType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SliceType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeBound {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for IdentPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BoxPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RestPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LiteralPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for OrPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParenPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WildcardPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RangePat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RefPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SlicePat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TuplePat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleStructPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ConstBlockPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordPatFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordPatField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs
new file mode 100644
index 000000000..a3209c5ab
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs
@@ -0,0 +1,196 @@
+//! Generated by `sourcegen_ast`, do not edit by hand.
+
+use crate::{
+ ast::AstToken,
+ SyntaxKind::{self, *},
+ SyntaxToken,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Whitespace {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Whitespace {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Whitespace {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHITESPACE }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Comment {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Comment {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Comment {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == COMMENT }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct String {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for String {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for String {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STRING }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ByteString {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for ByteString {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for ByteString {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BYTE_STRING }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IntNumber {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for IntNumber {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for IntNumber {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == INT_NUMBER }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FloatNumber {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for FloatNumber {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for FloatNumber {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FLOAT_NUMBER }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Char {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Char {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Char {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CHAR }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Byte {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Byte {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Byte {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BYTE }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Ident {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Ident {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Ident {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IDENT }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
new file mode 100644
index 000000000..5908dda8e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
@@ -0,0 +1,901 @@
+//! This module contains free-standing functions for creating AST fragments out
+//! of smaller pieces.
+//!
+//! Note that all functions here intended to be stupid constructors, which just
+//! assemble a finish node from immediate children. If you want to do something
+//! smarter than that, it belongs to the `ext` submodule.
+//!
+//! Keep in mind that `from_text` functions should be kept private. The public
+//! API should require to assemble every node piecewise. The trick of
+//! `parse(format!())` we use internally is an implementation detail -- long
+//! term, it will be replaced with direct tree manipulation.
+use itertools::Itertools;
+use stdx::{format_to, never};
+
+use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxToken};
+
+/// While the parent module defines basic atomic "constructors", the `ext`
+/// module defines shortcuts for common things.
+///
+/// It's named `ext` rather than `shortcuts` just to keep it short.
+pub mod ext {
+ use super::*;
+
+ pub fn simple_ident_pat(name: ast::Name) -> ast::IdentPat {
+ return from_text(&name.text());
+
+ fn from_text(text: &str) -> ast::IdentPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+ }
+ pub fn ident_path(ident: &str) -> ast::Path {
+ path_unqualified(path_segment(name_ref(ident)))
+ }
+
+ pub fn path_from_idents<'a>(
+ parts: impl std::iter::IntoIterator<Item = &'a str>,
+ ) -> Option<ast::Path> {
+ let mut iter = parts.into_iter();
+ let base = ext::ident_path(iter.next()?);
+ let path = iter.fold(base, |base, s| {
+ let path = ext::ident_path(s);
+ path_concat(base, path)
+ });
+ Some(path)
+ }
+
+ pub fn field_from_idents<'a>(
+ parts: impl std::iter::IntoIterator<Item = &'a str>,
+ ) -> Option<ast::Expr> {
+ let mut iter = parts.into_iter();
+ let base = expr_path(ext::ident_path(iter.next()?));
+ let expr = iter.fold(base, expr_field);
+ Some(expr)
+ }
+
+ pub fn expr_unreachable() -> ast::Expr {
+ expr_from_text("unreachable!()")
+ }
+ pub fn expr_todo() -> ast::Expr {
+ expr_from_text("todo!()")
+ }
+ pub fn expr_ty_default(ty: &ast::Type) -> ast::Expr {
+ expr_from_text(&format!("{}::default()", ty))
+ }
+ pub fn expr_ty_new(ty: &ast::Type) -> ast::Expr {
+ expr_from_text(&format!("{}::new()", ty))
+ }
+
+ pub fn zero_number() -> ast::Expr {
+ expr_from_text("0")
+ }
+ pub fn zero_float() -> ast::Expr {
+ expr_from_text("0.0")
+ }
+ pub fn empty_str() -> ast::Expr {
+ expr_from_text(r#""""#)
+ }
+ pub fn empty_char() -> ast::Expr {
+ expr_from_text("'\x00'")
+ }
+ pub fn default_bool() -> ast::Expr {
+ expr_from_text("false")
+ }
+ pub fn option_none() -> ast::Expr {
+ expr_from_text("None")
+ }
+ pub fn empty_block_expr() -> ast::BlockExpr {
+ block_expr(None, None)
+ }
+
+ pub fn ty_bool() -> ast::Type {
+ ty_path(ident_path("bool"))
+ }
+ pub fn ty_option(t: ast::Type) -> ast::Type {
+ ty_from_text(&format!("Option<{}>", t))
+ }
+ pub fn ty_result(t: ast::Type, e: ast::Type) -> ast::Type {
+ ty_from_text(&format!("Result<{}, {}>", t, e))
+ }
+}
+
+pub fn name(text: &str) -> ast::Name {
+ ast_from_text(&format!("mod {}{};", raw_ident_esc(text), text))
+}
+pub fn name_ref(text: &str) -> ast::NameRef {
+ ast_from_text(&format!("fn f() {{ {}{}; }}", raw_ident_esc(text), text))
+}
+fn raw_ident_esc(ident: &str) -> &'static str {
+ let is_keyword = parser::SyntaxKind::from_keyword(ident).is_some();
+ if is_keyword && !matches!(ident, "self" | "crate" | "super" | "Self") {
+ "r#"
+ } else {
+ ""
+ }
+}
+
+pub fn lifetime(text: &str) -> ast::Lifetime {
+ let mut text = text;
+ let tmp;
+ if never!(!text.starts_with('\'')) {
+ tmp = format!("'{}", text);
+ text = &tmp;
+ }
+ ast_from_text(&format!("fn f<{}>() {{ }}", text))
+}
+
+// FIXME: replace stringly-typed constructor with a family of typed ctors, a-la
+// `expr_xxx`.
+pub fn ty(text: &str) -> ast::Type {
+ ty_from_text(text)
+}
+pub fn ty_placeholder() -> ast::Type {
+ ty_from_text("_")
+}
+pub fn ty_unit() -> ast::Type {
+ ty_from_text("()")
+}
+pub fn ty_tuple(types: impl IntoIterator<Item = ast::Type>) -> ast::Type {
+ let mut count: usize = 0;
+ let mut contents = types.into_iter().inspect(|_| count += 1).join(", ");
+ if count == 1 {
+ contents.push(',');
+ }
+
+ ty_from_text(&format!("({})", contents))
+}
+pub fn ty_ref(target: ast::Type, exclusive: bool) -> ast::Type {
+ ty_from_text(&if exclusive { format!("&mut {}", target) } else { format!("&{}", target) })
+}
+pub fn ty_path(path: ast::Path) -> ast::Type {
+ ty_from_text(&path.to_string())
+}
+fn ty_from_text(text: &str) -> ast::Type {
+ ast_from_text(&format!("type _T = {};", text))
+}
+
+pub fn assoc_item_list() -> ast::AssocItemList {
+ ast_from_text("impl C for D {}")
+}
+
+pub fn impl_(
+ ty: ast::Path,
+ params: Option<ast::GenericParamList>,
+ ty_params: Option<ast::GenericParamList>,
+) -> ast::Impl {
+ let params = match params {
+ Some(params) => params.to_string(),
+ None => String::new(),
+ };
+ let ty_params = match ty_params {
+ Some(params) => params.to_string(),
+ None => String::new(),
+ };
+ ast_from_text(&format!("impl{} {}{} {{}}", params, ty, ty_params))
+}
+
+pub fn impl_trait(
+ trait_: ast::Path,
+ ty: ast::Path,
+ ty_params: Option<ast::GenericParamList>,
+) -> ast::Impl {
+ let ty_params = ty_params.map_or_else(String::new, |params| params.to_string());
+ ast_from_text(&format!("impl{2} {} for {}{2} {{}}", trait_, ty, ty_params))
+}
+
+pub(crate) fn generic_arg_list() -> ast::GenericArgList {
+ ast_from_text("const S: T<> = ();")
+}
+
+pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment {
+ ast_from_text(&format!("type __ = {};", name_ref))
+}
+
+pub fn path_segment_ty(type_ref: ast::Type, trait_ref: Option<ast::PathType>) -> ast::PathSegment {
+ let text = match trait_ref {
+ Some(trait_ref) => format!("fn f(x: <{} as {}>) {{}}", type_ref, trait_ref),
+ None => format!("fn f(x: <{}>) {{}}", type_ref),
+ };
+ ast_from_text(&text)
+}
+
+pub fn path_segment_self() -> ast::PathSegment {
+ ast_from_text("use self;")
+}
+
+pub fn path_segment_super() -> ast::PathSegment {
+ ast_from_text("use super;")
+}
+
+pub fn path_segment_crate() -> ast::PathSegment {
+ ast_from_text("use crate;")
+}
+
+pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path {
+ ast_from_text(&format!("type __ = {};", segment))
+}
+
+pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path {
+ ast_from_text(&format!("{}::{}", qual, segment))
+}
+// FIXME: path concatenation operation doesn't make sense as AST op.
+pub fn path_concat(first: ast::Path, second: ast::Path) -> ast::Path {
+ ast_from_text(&format!("type __ = {}::{};", first, second))
+}
+
+pub fn path_from_segments(
+ segments: impl IntoIterator<Item = ast::PathSegment>,
+ is_abs: bool,
+) -> ast::Path {
+ let segments = segments.into_iter().map(|it| it.syntax().clone()).join("::");
+ ast_from_text(&if is_abs {
+ format!("fn f(x: ::{}) {{}}", segments)
+ } else {
+ format!("fn f(x: {}) {{}}", segments)
+ })
+}
+
+pub fn join_paths(paths: impl IntoIterator<Item = ast::Path>) -> ast::Path {
+ let paths = paths.into_iter().map(|it| it.syntax().clone()).join("::");
+ ast_from_text(&format!("type __ = {};", paths))
+}
+
+// FIXME: should not be pub
+pub fn path_from_text(text: &str) -> ast::Path {
+ ast_from_text(&format!("fn main() {{ let test = {}; }}", text))
+}
+
+pub fn use_tree_glob() -> ast::UseTree {
+ ast_from_text("use *;")
+}
+pub fn use_tree(
+ path: ast::Path,
+ use_tree_list: Option<ast::UseTreeList>,
+ alias: Option<ast::Rename>,
+ add_star: bool,
+) -> ast::UseTree {
+ let mut buf = "use ".to_string();
+ buf += &path.syntax().to_string();
+ if let Some(use_tree_list) = use_tree_list {
+ format_to!(buf, "::{}", use_tree_list);
+ }
+ if add_star {
+ buf += "::*";
+ }
+
+ if let Some(alias) = alias {
+ format_to!(buf, " {}", alias);
+ }
+ ast_from_text(&buf)
+}
+
+pub fn use_tree_list(use_trees: impl IntoIterator<Item = ast::UseTree>) -> ast::UseTreeList {
+ let use_trees = use_trees.into_iter().map(|it| it.syntax().clone()).join(", ");
+ ast_from_text(&format!("use {{{}}};", use_trees))
+}
+
+pub fn use_(visibility: Option<ast::Visibility>, use_tree: ast::UseTree) -> ast::Use {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("{}use {};", visibility, use_tree))
+}
+
+pub fn record_expr(path: ast::Path, fields: ast::RecordExprFieldList) -> ast::RecordExpr {
+ ast_from_text(&format!("fn f() {{ {} {} }}", path, fields))
+}
+
+pub fn record_expr_field_list(
+ fields: impl IntoIterator<Item = ast::RecordExprField>,
+) -> ast::RecordExprFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("fn f() {{ S {{ {} }} }}", fields))
+}
+
+pub fn record_expr_field(name: ast::NameRef, expr: Option<ast::Expr>) -> ast::RecordExprField {
+ return match expr {
+ Some(expr) => from_text(&format!("{}: {}", name, expr)),
+ None => from_text(&name.to_string()),
+ };
+
+ fn from_text(text: &str) -> ast::RecordExprField {
+ ast_from_text(&format!("fn f() {{ S {{ {}, }} }}", text))
+ }
+}
+
+pub fn record_field(
+ visibility: Option<ast::Visibility>,
+ name: ast::Name,
+ ty: ast::Type,
+) -> ast::RecordField {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("struct S {{ {}{}: {}, }}", visibility, name, ty))
+}
+
+// TODO
+pub fn block_expr(
+ stmts: impl IntoIterator<Item = ast::Stmt>,
+ tail_expr: Option<ast::Expr>,
+) -> ast::BlockExpr {
+ let mut buf = "{\n".to_string();
+ for stmt in stmts.into_iter() {
+ format_to!(buf, " {}\n", stmt);
+ }
+ if let Some(tail_expr) = tail_expr {
+ format_to!(buf, " {}\n", tail_expr);
+ }
+ buf += "}";
+ ast_from_text(&format!("fn f() {}", buf))
+}
+
+/// Ideally this function wouldn't exist since it involves manual indenting.
+/// It differs from `make::block_expr` by also supporting comments.
+///
+/// FIXME: replace usages of this with the mutable syntax tree API
+pub fn hacky_block_expr_with_comments(
+ elements: impl IntoIterator<Item = crate::SyntaxElement>,
+ tail_expr: Option<ast::Expr>,
+) -> ast::BlockExpr {
+ let mut buf = "{\n".to_string();
+ for node_or_token in elements.into_iter() {
+ match node_or_token {
+ rowan::NodeOrToken::Node(n) => format_to!(buf, " {}\n", n),
+ rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::COMMENT => {
+ format_to!(buf, " {}\n", t)
+ }
+ _ => (),
+ }
+ }
+ if let Some(tail_expr) = tail_expr {
+ format_to!(buf, " {}\n", tail_expr);
+ }
+ buf += "}";
+ ast_from_text(&format!("fn f() {}", buf))
+}
+
+pub fn expr_unit() -> ast::Expr {
+ expr_from_text("()")
+}
+pub fn expr_literal(text: &str) -> ast::Literal {
+ assert_eq!(text.trim(), text);
+ ast_from_text(&format!("fn f() {{ let _ = {}; }}", text))
+}
+
+pub fn expr_empty_block() -> ast::Expr {
+ expr_from_text("{}")
+}
+pub fn expr_path(path: ast::Path) -> ast::Expr {
+ expr_from_text(&path.to_string())
+}
+pub fn expr_continue(label: Option<ast::Lifetime>) -> ast::Expr {
+ match label {
+ Some(label) => expr_from_text(&format!("continue {}", label)),
+ None => expr_from_text("continue"),
+ }
+}
+// Consider `op: SyntaxKind` instead for nicer syntax at the call-site?
+pub fn expr_bin_op(lhs: ast::Expr, op: ast::BinaryOp, rhs: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{} {} {}", lhs, op, rhs))
+}
+pub fn expr_break(label: Option<ast::Lifetime>, expr: Option<ast::Expr>) -> ast::Expr {
+ let mut s = String::from("break");
+
+ if let Some(label) = label {
+ format_to!(s, " {}", label);
+ }
+
+ if let Some(expr) = expr {
+ format_to!(s, " {}", expr);
+ }
+
+ expr_from_text(&s)
+}
+pub fn expr_return(expr: Option<ast::Expr>) -> ast::Expr {
+ match expr {
+ Some(expr) => expr_from_text(&format!("return {}", expr)),
+ None => expr_from_text("return"),
+ }
+}
+pub fn expr_try(expr: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{}?", expr))
+}
+pub fn expr_await(expr: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{}.await", expr))
+}
+pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr {
+ expr_from_text(&format!("match {} {}", expr, match_arm_list))
+}
+pub fn expr_if(
+ condition: ast::Expr,
+ then_branch: ast::BlockExpr,
+ else_branch: Option<ast::ElseBranch>,
+) -> ast::Expr {
+ let else_branch = match else_branch {
+ Some(ast::ElseBranch::Block(block)) => format!("else {}", block),
+ Some(ast::ElseBranch::IfExpr(if_expr)) => format!("else {}", if_expr),
+ None => String::new(),
+ };
+ expr_from_text(&format!("if {} {} {}", condition, then_branch, else_branch))
+}
+pub fn expr_for_loop(pat: ast::Pat, expr: ast::Expr, block: ast::BlockExpr) -> ast::Expr {
+ expr_from_text(&format!("for {} in {} {}", pat, expr, block))
+}
+
+pub fn expr_loop(block: ast::BlockExpr) -> ast::Expr {
+ expr_from_text(&format!("loop {}", block))
+}
+
+pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr {
+ let token = token(op);
+ expr_from_text(&format!("{}{}", token, expr))
+}
+pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
+ expr_from_text(&format!("{}{}", f, arg_list))
+}
+pub fn expr_method_call(
+ receiver: ast::Expr,
+ method: ast::NameRef,
+ arg_list: ast::ArgList,
+) -> ast::Expr {
+ expr_from_text(&format!("{}.{}{}", receiver, method, arg_list))
+}
+pub fn expr_macro_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
+ expr_from_text(&format!("{}!{}", f, arg_list))
+}
+pub fn expr_ref(expr: ast::Expr, exclusive: bool) -> ast::Expr {
+ expr_from_text(&if exclusive { format!("&mut {}", expr) } else { format!("&{}", expr) })
+}
+pub fn expr_closure(pats: impl IntoIterator<Item = ast::Param>, expr: ast::Expr) -> ast::Expr {
+ let params = pats.into_iter().join(", ");
+ expr_from_text(&format!("|{}| {}", params, expr))
+}
+pub fn expr_field(receiver: ast::Expr, field: &str) -> ast::Expr {
+ expr_from_text(&format!("{}.{}", receiver, field))
+}
+pub fn expr_paren(expr: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("({})", expr))
+}
+pub fn expr_tuple(elements: impl IntoIterator<Item = ast::Expr>) -> ast::Expr {
+ let expr = elements.into_iter().format(", ");
+ expr_from_text(&format!("({})", expr))
+}
+pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{} = {}", lhs, rhs))
+}
+fn expr_from_text(text: &str) -> ast::Expr {
+ ast_from_text(&format!("const C: () = {};", text))
+}
+pub fn expr_let(pattern: ast::Pat, expr: ast::Expr) -> ast::LetExpr {
+ ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr))
+}
+
+pub fn arg_list(args: impl IntoIterator<Item = ast::Expr>) -> ast::ArgList {
+ ast_from_text(&format!("fn main() {{ ()({}) }}", args.into_iter().format(", ")))
+}
+
+pub fn ident_pat(ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat {
+ let mut s = String::from("fn f(");
+ if ref_ {
+ s.push_str("ref ");
+ }
+ if mut_ {
+ s.push_str("mut ");
+ }
+ format_to!(s, "{}", name);
+ s.push_str(": ())");
+ ast_from_text(&s)
+}
+
+pub fn wildcard_pat() -> ast::WildcardPat {
+ return from_text("_");
+
+ fn from_text(text: &str) -> ast::WildcardPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn literal_pat(lit: &str) -> ast::LiteralPat {
+ return from_text(lit);
+
+ fn from_text(text: &str) -> ast::LiteralPat {
+ ast_from_text(&format!("fn f() {{ match x {{ {} => {{}} }} }}", text))
+ }
+}
+
+/// Creates a tuple of patterns from an iterator of patterns.
+///
+/// Invariant: `pats` must be length > 0
+pub fn tuple_pat(pats: impl IntoIterator<Item = ast::Pat>) -> ast::TuplePat {
+ let mut count: usize = 0;
+ let mut pats_str = pats.into_iter().inspect(|_| count += 1).join(", ");
+ if count == 1 {
+ pats_str.push(',');
+ }
+ return from_text(&format!("({})", pats_str));
+
+ fn from_text(text: &str) -> ast::TuplePat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn tuple_struct_pat(
+ path: ast::Path,
+ pats: impl IntoIterator<Item = ast::Pat>,
+) -> ast::TupleStructPat {
+ let pats_str = pats.into_iter().join(", ");
+ return from_text(&format!("{}({})", path, pats_str));
+
+ fn from_text(text: &str) -> ast::TupleStructPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn record_pat(path: ast::Path, pats: impl IntoIterator<Item = ast::Pat>) -> ast::RecordPat {
+ let pats_str = pats.into_iter().join(", ");
+ return from_text(&format!("{} {{ {} }}", path, pats_str));
+
+ fn from_text(text: &str) -> ast::RecordPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn record_pat_with_fields(path: ast::Path, fields: ast::RecordPatFieldList) -> ast::RecordPat {
+ ast_from_text(&format!("fn f({} {}: ()))", path, fields))
+}
+
+pub fn record_pat_field_list(
+ fields: impl IntoIterator<Item = ast::RecordPatField>,
+) -> ast::RecordPatFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("fn f(S {{ {} }}: ()))", fields))
+}
+
+pub fn record_pat_field(name_ref: ast::NameRef, pat: ast::Pat) -> ast::RecordPatField {
+ ast_from_text(&format!("fn f(S {{ {}: {} }}: ()))", name_ref, pat))
+}
+
+pub fn record_pat_field_shorthand(name_ref: ast::NameRef) -> ast::RecordPatField {
+ ast_from_text(&format!("fn f(S {{ {} }}: ()))", name_ref))
+}
+
+/// Returns a `BindPat` if the path has just one segment, a `PathPat` otherwise.
+pub fn path_pat(path: ast::Path) -> ast::Pat {
+ return from_text(&path.to_string());
+ fn from_text(text: &str) -> ast::Pat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn match_arm(
+ pats: impl IntoIterator<Item = ast::Pat>,
+ guard: Option<ast::Expr>,
+ expr: ast::Expr,
+) -> ast::MatchArm {
+ let pats_str = pats.into_iter().join(" | ");
+ return match guard {
+ Some(guard) => from_text(&format!("{} if {} => {}", pats_str, guard, expr)),
+ None => from_text(&format!("{} => {}", pats_str, expr)),
+ };
+
+ fn from_text(text: &str) -> ast::MatchArm {
+ ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text))
+ }
+}
+
+pub fn match_arm_with_guard(
+ pats: impl IntoIterator<Item = ast::Pat>,
+ guard: ast::Expr,
+ expr: ast::Expr,
+) -> ast::MatchArm {
+ let pats_str = pats.into_iter().join(" | ");
+ return from_text(&format!("{} if {} => {}", pats_str, guard, expr));
+
+ fn from_text(text: &str) -> ast::MatchArm {
+ ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text))
+ }
+}
+
+pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
+ let arms_str = arms
+ .into_iter()
+ .map(|arm| {
+ let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like());
+ let comma = if needs_comma { "," } else { "" };
+ format!(" {}{}\n", arm.syntax(), comma)
+ })
+ .collect::<String>();
+ return from_text(&arms_str);
+
+ fn from_text(text: &str) -> ast::MatchArmList {
+ ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text))
+ }
+}
+
+pub fn where_pred(
+ path: ast::Path,
+ bounds: impl IntoIterator<Item = ast::TypeBound>,
+) -> ast::WherePred {
+ let bounds = bounds.into_iter().join(" + ");
+ return from_text(&format!("{}: {}", path, bounds));
+
+ fn from_text(text: &str) -> ast::WherePred {
+ ast_from_text(&format!("fn f() where {} {{ }}", text))
+ }
+}
+
+pub fn where_clause(preds: impl IntoIterator<Item = ast::WherePred>) -> ast::WhereClause {
+ let preds = preds.into_iter().join(", ");
+ return from_text(preds.as_str());
+
+ fn from_text(text: &str) -> ast::WhereClause {
+ ast_from_text(&format!("fn f() where {} {{ }}", text))
+ }
+}
+
+pub fn let_stmt(
+ pattern: ast::Pat,
+ ty: Option<ast::Type>,
+ initializer: Option<ast::Expr>,
+) -> ast::LetStmt {
+ let mut text = String::new();
+ format_to!(text, "let {}", pattern);
+ if let Some(ty) = ty {
+ format_to!(text, ": {}", ty);
+ }
+ match initializer {
+ Some(it) => format_to!(text, " = {};", it),
+ None => format_to!(text, ";"),
+ };
+ ast_from_text(&format!("fn f() {{ {} }}", text))
+}
+pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt {
+ let semi = if expr.is_block_like() { "" } else { ";" };
+ ast_from_text(&format!("fn f() {{ {}{} (); }}", expr, semi))
+}
+
+pub fn item_const(
+ visibility: Option<ast::Visibility>,
+ name: ast::Name,
+ ty: ast::Type,
+ expr: ast::Expr,
+) -> ast::Const {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("{} const {}: {} = {};", visibility, name, ty, expr))
+}
+
+pub fn param(pat: ast::Pat, ty: ast::Type) -> ast::Param {
+ ast_from_text(&format!("fn f({}: {}) {{ }}", pat, ty))
+}
+
+pub fn self_param() -> ast::SelfParam {
+ ast_from_text("fn f(&self) { }")
+}
+
+pub fn ret_type(ty: ast::Type) -> ast::RetType {
+ ast_from_text(&format!("fn f() -> {} {{ }}", ty))
+}
+
+pub fn param_list(
+ self_param: Option<ast::SelfParam>,
+ pats: impl IntoIterator<Item = ast::Param>,
+) -> ast::ParamList {
+ let args = pats.into_iter().join(", ");
+ let list = match self_param {
+ Some(self_param) if args.is_empty() => format!("fn f({}) {{ }}", self_param),
+ Some(self_param) => format!("fn f({}, {}) {{ }}", self_param, args),
+ None => format!("fn f({}) {{ }}", args),
+ };
+ ast_from_text(&list)
+}
+
+pub fn type_param(name: ast::Name, ty: Option<ast::TypeBoundList>) -> ast::TypeParam {
+ let bound = match ty {
+ Some(it) => format!(": {}", it),
+ None => String::new(),
+ };
+ ast_from_text(&format!("fn f<{}{}>() {{ }}", name, bound))
+}
+
+pub fn lifetime_param(lifetime: ast::Lifetime) -> ast::LifetimeParam {
+ ast_from_text(&format!("fn f<{}>() {{ }}", lifetime))
+}
+
+pub fn generic_param_list(
+ pats: impl IntoIterator<Item = ast::GenericParam>,
+) -> ast::GenericParamList {
+ let args = pats.into_iter().join(", ");
+ ast_from_text(&format!("fn f<{}>() {{ }}", args))
+}
+
+pub fn visibility_pub_crate() -> ast::Visibility {
+ ast_from_text("pub(crate) struct S")
+}
+
+pub fn visibility_pub() -> ast::Visibility {
+ ast_from_text("pub struct S")
+}
+
+pub fn tuple_field_list(fields: impl IntoIterator<Item = ast::TupleField>) -> ast::TupleFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("struct f({});", fields))
+}
+
+pub fn record_field_list(
+ fields: impl IntoIterator<Item = ast::RecordField>,
+) -> ast::RecordFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("struct f {{ {} }}", fields))
+}
+
+pub fn tuple_field(visibility: Option<ast::Visibility>, ty: ast::Type) -> ast::TupleField {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("struct f({}{});", visibility, ty))
+}
+
+pub fn variant(name: ast::Name, field_list: Option<ast::FieldList>) -> ast::Variant {
+ let field_list = match field_list {
+ None => String::new(),
+ Some(it) => format!("{}", it),
+ };
+ ast_from_text(&format!("enum f {{ {}{} }}", name, field_list))
+}
+
+pub fn fn_(
+ visibility: Option<ast::Visibility>,
+ fn_name: ast::Name,
+ type_params: Option<ast::GenericParamList>,
+ params: ast::ParamList,
+ body: ast::BlockExpr,
+ ret_type: Option<ast::RetType>,
+ is_async: bool,
+) -> ast::Fn {
+ let type_params = match type_params {
+ Some(type_params) => format!("{}", type_params),
+ None => "".into(),
+ };
+ let ret_type = match ret_type {
+ Some(ret_type) => format!("{} ", ret_type),
+ None => "".into(),
+ };
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+
+ let async_literal = if is_async { "async " } else { "" };
+
+ ast_from_text(&format!(
+ "{}{}fn {}{}{} {}{}",
+ visibility, async_literal, fn_name, type_params, params, ret_type, body
+ ))
+}
+
+pub fn struct_(
+ visibility: Option<ast::Visibility>,
+ strukt_name: ast::Name,
+ generic_param_list: Option<ast::GenericParamList>,
+ field_list: ast::FieldList,
+) -> ast::Struct {
+ let semicolon = if matches!(field_list, ast::FieldList::TupleFieldList(_)) { ";" } else { "" };
+ let type_params = generic_param_list.map_or_else(String::new, |it| it.to_string());
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+
+ ast_from_text(&format!(
+ "{}struct {}{}{}{}",
+ visibility, strukt_name, type_params, field_list, semicolon
+ ))
+}
+
+#[track_caller]
+fn ast_from_text<N: AstNode>(text: &str) -> N {
+ let parse = SourceFile::parse(text);
+ let node = match parse.tree().syntax().descendants().find_map(N::cast) {
+ Some(it) => it,
+ None => {
+ panic!("Failed to make ast node `{}` from text {}", std::any::type_name::<N>(), text)
+ }
+ };
+ let node = node.clone_subtree();
+ assert_eq!(node.syntax().text_range().start(), 0.into());
+ node
+}
+
+pub fn token(kind: SyntaxKind) -> SyntaxToken {
+ tokens::SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == kind)
+ .unwrap_or_else(|| panic!("unhandled token: {:?}", kind))
+}
+
+pub mod tokens {
+ use once_cell::sync::Lazy;
+
+ use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken};
+
+ pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
+ SourceFile::parse(
+ "const C: <()>::Item = (1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p)\n;\n\n",
+ )
+ });
+
+ pub fn single_space() -> SyntaxToken {
+ SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == WHITESPACE && it.text() == " ")
+ .unwrap()
+ }
+
+ pub fn whitespace(text: &str) -> SyntaxToken {
+ assert!(text.trim().is_empty());
+ let sf = SourceFile::parse(text).ok().unwrap();
+ sf.syntax().clone_for_update().first_child_or_token().unwrap().into_token().unwrap()
+ }
+
+ pub fn doc_comment(text: &str) -> SyntaxToken {
+ assert!(!text.trim().is_empty());
+ let sf = SourceFile::parse(text).ok().unwrap();
+ sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
+ }
+
+ pub fn literal(text: &str) -> SyntaxToken {
+ assert_eq!(text.trim(), text);
+ let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text));
+ lit.syntax().first_child_or_token().unwrap().into_token().unwrap()
+ }
+
+ pub fn single_newline() -> SyntaxToken {
+ let res = SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == WHITESPACE && it.text() == "\n")
+ .unwrap();
+ res.detach();
+ res
+ }
+
+ pub fn blank_line() -> SyntaxToken {
+ SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n")
+ .unwrap()
+ }
+
+ pub struct WsBuilder(SourceFile);
+
+ impl WsBuilder {
+ pub fn new(text: &str) -> WsBuilder {
+ WsBuilder(SourceFile::parse(text).ok().unwrap())
+ }
+ pub fn ws(&self) -> SyntaxToken {
+ self.0.syntax().first_child_or_token().unwrap().into_token().unwrap()
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
new file mode 100644
index 000000000..bb92c51e9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
@@ -0,0 +1,875 @@
+//! Various extension methods to ast Nodes, which are hard to code-generate.
+//! Extensions for various expressions live in a sibling `expr_extensions` module.
+//!
+//! These methods should only do simple, shallow tasks related to the syntax of the node itself.
+
+use std::{borrow::Cow, fmt, iter::successors};
+
+use itertools::Itertools;
+use parser::SyntaxKind;
+use rowan::{GreenNodeData, GreenTokenData};
+
+use crate::{
+ ast::{self, support, AstNode, AstToken, HasAttrs, HasGenericParams, HasName, SyntaxNode},
+ NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T,
+};
+
+impl ast::Lifetime {
+ pub fn text(&self) -> TokenText<'_> {
+ text_of_first_token(self.syntax())
+ }
+}
+
+impl ast::Name {
+ pub fn text(&self) -> TokenText<'_> {
+ text_of_first_token(self.syntax())
+ }
+}
+
+impl ast::NameRef {
+ pub fn text(&self) -> TokenText<'_> {
+ text_of_first_token(self.syntax())
+ }
+
+ pub fn as_tuple_field(&self) -> Option<usize> {
+ self.text().parse().ok()
+ }
+
+ pub fn token_kind(&self) -> SyntaxKind {
+ self.syntax().first_token().map_or(SyntaxKind::ERROR, |it| it.kind())
+ }
+}
+
+fn text_of_first_token(node: &SyntaxNode) -> TokenText<'_> {
+ fn first_token(green_ref: &GreenNodeData) -> &GreenTokenData {
+ green_ref.children().next().and_then(NodeOrToken::into_token).unwrap()
+ }
+
+ match node.green() {
+ Cow::Borrowed(green_ref) => TokenText::borrowed(first_token(green_ref).text()),
+ Cow::Owned(green) => TokenText::owned(first_token(&green).to_owned()),
+ }
+}
+
+impl ast::HasModuleItem for ast::StmtList {}
+
+impl ast::BlockExpr {
+ // FIXME: remove all these methods, they belong to ast::StmtList
+ pub fn statements(&self) -> impl Iterator<Item = ast::Stmt> {
+ self.stmt_list().into_iter().flat_map(|it| it.statements())
+ }
+ pub fn tail_expr(&self) -> Option<ast::Expr> {
+ self.stmt_list()?.tail_expr()
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum Macro {
+ MacroRules(ast::MacroRules),
+ MacroDef(ast::MacroDef),
+}
+
+impl From<ast::MacroRules> for Macro {
+ fn from(it: ast::MacroRules) -> Self {
+ Macro::MacroRules(it)
+ }
+}
+
+impl From<ast::MacroDef> for Macro {
+ fn from(it: ast::MacroDef) -> Self {
+ Macro::MacroDef(it)
+ }
+}
+
+impl AstNode for Macro {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, SyntaxKind::MACRO_RULES | SyntaxKind::MACRO_DEF)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ SyntaxKind::MACRO_RULES => Macro::MacroRules(ast::MacroRules { syntax }),
+ SyntaxKind::MACRO_DEF => Macro::MacroDef(ast::MacroDef { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Macro::MacroRules(it) => it.syntax(),
+ Macro::MacroDef(it) => it.syntax(),
+ }
+ }
+}
+
+impl HasName for Macro {
+ fn name(&self) -> Option<ast::Name> {
+ match self {
+ Macro::MacroRules(mac) => mac.name(),
+ Macro::MacroDef(mac) => mac.name(),
+ }
+ }
+}
+
+impl HasAttrs for Macro {}
+
+impl From<ast::AssocItem> for ast::Item {
+ fn from(assoc: ast::AssocItem) -> Self {
+ match assoc {
+ ast::AssocItem::Const(it) => ast::Item::Const(it),
+ ast::AssocItem::Fn(it) => ast::Item::Fn(it),
+ ast::AssocItem::MacroCall(it) => ast::Item::MacroCall(it),
+ ast::AssocItem::TypeAlias(it) => ast::Item::TypeAlias(it),
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum AttrKind {
+ Inner,
+ Outer,
+}
+
+impl AttrKind {
+ /// Returns `true` if the attr_kind is [`Inner`](Self::Inner).
+ pub fn is_inner(&self) -> bool {
+ matches!(self, Self::Inner)
+ }
+
+ /// Returns `true` if the attr_kind is [`Outer`](Self::Outer).
+ pub fn is_outer(&self) -> bool {
+ matches!(self, Self::Outer)
+ }
+}
+
+impl ast::Attr {
+ pub fn as_simple_atom(&self) -> Option<SmolStr> {
+ let meta = self.meta()?;
+ if meta.eq_token().is_some() || meta.token_tree().is_some() {
+ return None;
+ }
+ self.simple_name()
+ }
+
+ pub fn as_simple_call(&self) -> Option<(SmolStr, ast::TokenTree)> {
+ let tt = self.meta()?.token_tree()?;
+ Some((self.simple_name()?, tt))
+ }
+
+ pub fn simple_name(&self) -> Option<SmolStr> {
+ let path = self.meta()?.path()?;
+ match (path.segment(), path.qualifier()) {
+ (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()),
+ _ => None,
+ }
+ }
+
+ pub fn kind(&self) -> AttrKind {
+ match self.excl_token() {
+ Some(_) => AttrKind::Inner,
+ None => AttrKind::Outer,
+ }
+ }
+
+ pub fn path(&self) -> Option<ast::Path> {
+ self.meta()?.path()
+ }
+
+ pub fn expr(&self) -> Option<ast::Expr> {
+ self.meta()?.expr()
+ }
+
+ pub fn token_tree(&self) -> Option<ast::TokenTree> {
+ self.meta()?.token_tree()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum PathSegmentKind {
+ Name(ast::NameRef),
+ Type { type_ref: Option<ast::Type>, trait_ref: Option<ast::PathType> },
+ SelfTypeKw,
+ SelfKw,
+ SuperKw,
+ CrateKw,
+}
+
+impl ast::PathSegment {
+ pub fn parent_path(&self) -> ast::Path {
+ self.syntax()
+ .parent()
+ .and_then(ast::Path::cast)
+ .expect("segments are always nested in paths")
+ }
+
+ pub fn crate_token(&self) -> Option<SyntaxToken> {
+ self.name_ref().and_then(|it| it.crate_token())
+ }
+
+ pub fn self_token(&self) -> Option<SyntaxToken> {
+ self.name_ref().and_then(|it| it.self_token())
+ }
+
+ pub fn self_type_token(&self) -> Option<SyntaxToken> {
+ self.name_ref().and_then(|it| it.Self_token())
+ }
+
+ pub fn super_token(&self) -> Option<SyntaxToken> {
+ self.name_ref().and_then(|it| it.super_token())
+ }
+
+ pub fn kind(&self) -> Option<PathSegmentKind> {
+ let res = if let Some(name_ref) = self.name_ref() {
+ match name_ref.token_kind() {
+ T![Self] => PathSegmentKind::SelfTypeKw,
+ T![self] => PathSegmentKind::SelfKw,
+ T![super] => PathSegmentKind::SuperKw,
+ T![crate] => PathSegmentKind::CrateKw,
+ _ => PathSegmentKind::Name(name_ref),
+ }
+ } else {
+ match self.syntax().first_child_or_token()?.kind() {
+ T![<] => {
+ // <T> or <T as Trait>
+ // T is any TypeRef, Trait has to be a PathType
+ let mut type_refs =
+ self.syntax().children().filter(|node| ast::Type::can_cast(node.kind()));
+ let type_ref = type_refs.next().and_then(ast::Type::cast);
+ let trait_ref = type_refs.next().and_then(ast::PathType::cast);
+ PathSegmentKind::Type { type_ref, trait_ref }
+ }
+ _ => return None,
+ }
+ };
+ Some(res)
+ }
+}
+
+impl ast::Path {
+ pub fn parent_path(&self) -> Option<ast::Path> {
+ self.syntax().parent().and_then(ast::Path::cast)
+ }
+
+ pub fn as_single_segment(&self) -> Option<ast::PathSegment> {
+ match self.qualifier() {
+ Some(_) => None,
+ None => self.segment(),
+ }
+ }
+
+ pub fn as_single_name_ref(&self) -> Option<ast::NameRef> {
+ match self.qualifier() {
+ Some(_) => None,
+ None => self.segment()?.name_ref(),
+ }
+ }
+
+ pub fn first_qualifier_or_self(&self) -> ast::Path {
+ successors(Some(self.clone()), ast::Path::qualifier).last().unwrap()
+ }
+
+ pub fn first_segment(&self) -> Option<ast::PathSegment> {
+ self.first_qualifier_or_self().segment()
+ }
+
+ pub fn segments(&self) -> impl Iterator<Item = ast::PathSegment> + Clone {
+ successors(self.first_segment(), |p| {
+ p.parent_path().parent_path().and_then(|p| p.segment())
+ })
+ }
+
+ pub fn qualifiers(&self) -> impl Iterator<Item = ast::Path> + Clone {
+ successors(self.qualifier(), |p| p.qualifier())
+ }
+
+ pub fn top_path(&self) -> ast::Path {
+ let mut this = self.clone();
+ while let Some(path) = this.parent_path() {
+ this = path;
+ }
+ this
+ }
+}
+
+impl ast::Use {
+ pub fn is_simple_glob(&self) -> bool {
+ self.use_tree().map_or(false, |use_tree| {
+ use_tree.use_tree_list().is_none() && use_tree.star_token().is_some()
+ })
+ }
+}
+
+impl ast::UseTree {
+ pub fn is_simple_path(&self) -> bool {
+ self.use_tree_list().is_none() && self.star_token().is_none()
+ }
+}
+
+impl ast::UseTreeList {
+ pub fn parent_use_tree(&self) -> ast::UseTree {
+ self.syntax()
+ .parent()
+ .and_then(ast::UseTree::cast)
+ .expect("UseTreeLists are always nested in UseTrees")
+ }
+
+ pub fn has_inner_comment(&self) -> bool {
+ self.syntax()
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find_map(ast::Comment::cast)
+ .is_some()
+ }
+}
+
+impl ast::Impl {
+ pub fn self_ty(&self) -> Option<ast::Type> {
+ match self.target() {
+ (Some(t), None) | (_, Some(t)) => Some(t),
+ _ => None,
+ }
+ }
+
+ pub fn trait_(&self) -> Option<ast::Type> {
+ match self.target() {
+ (Some(t), Some(_)) => Some(t),
+ _ => None,
+ }
+ }
+
+ fn target(&self) -> (Option<ast::Type>, Option<ast::Type>) {
+ let mut types = support::children(self.syntax());
+ let first = types.next();
+ let second = types.next();
+ (first, second)
+ }
+
+ pub fn for_trait_name_ref(name_ref: &ast::NameRef) -> Option<ast::Impl> {
+ let this = name_ref.syntax().ancestors().find_map(ast::Impl::cast)?;
+ if this.trait_()?.syntax().text_range().start() == name_ref.syntax().text_range().start() {
+ Some(this)
+ } else {
+ None
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum StructKind {
+ Record(ast::RecordFieldList),
+ Tuple(ast::TupleFieldList),
+ Unit,
+}
+
+impl StructKind {
+ fn from_node<N: AstNode>(node: &N) -> StructKind {
+ if let Some(nfdl) = support::child::<ast::RecordFieldList>(node.syntax()) {
+ StructKind::Record(nfdl)
+ } else if let Some(pfl) = support::child::<ast::TupleFieldList>(node.syntax()) {
+ StructKind::Tuple(pfl)
+ } else {
+ StructKind::Unit
+ }
+ }
+}
+
+impl ast::Struct {
+ pub fn kind(&self) -> StructKind {
+ StructKind::from_node(self)
+ }
+}
+
+impl ast::RecordExprField {
+ pub fn for_field_name(field_name: &ast::NameRef) -> Option<ast::RecordExprField> {
+ let candidate = Self::for_name_ref(field_name)?;
+ if candidate.field_name().as_ref() == Some(field_name) {
+ Some(candidate)
+ } else {
+ None
+ }
+ }
+
+ pub fn for_name_ref(name_ref: &ast::NameRef) -> Option<ast::RecordExprField> {
+ let syn = name_ref.syntax();
+ syn.parent()
+ .and_then(ast::RecordExprField::cast)
+ .or_else(|| syn.ancestors().nth(4).and_then(ast::RecordExprField::cast))
+ }
+
+ /// Deals with field init shorthand
+ pub fn field_name(&self) -> Option<ast::NameRef> {
+ if let Some(name_ref) = self.name_ref() {
+ return Some(name_ref);
+ }
+ if let ast::Expr::PathExpr(expr) = self.expr()? {
+ let path = expr.path()?;
+ let segment = path.segment()?;
+ let name_ref = segment.name_ref()?;
+ if path.qualifier().is_none() {
+ return Some(name_ref);
+ }
+ }
+ None
+ }
+}
+
+#[derive(Debug, Clone)]
+pub enum NameLike {
+ NameRef(ast::NameRef),
+ Name(ast::Name),
+ Lifetime(ast::Lifetime),
+}
+
+impl NameLike {
+ pub fn as_name_ref(&self) -> Option<&ast::NameRef> {
+ match self {
+ NameLike::NameRef(name_ref) => Some(name_ref),
+ _ => None,
+ }
+ }
+ pub fn as_lifetime(&self) -> Option<&ast::Lifetime> {
+ match self {
+ NameLike::Lifetime(lifetime) => Some(lifetime),
+ _ => None,
+ }
+ }
+ pub fn text(&self) -> TokenText<'_> {
+ match self {
+ NameLike::NameRef(name_ref) => name_ref.text(),
+ NameLike::Name(name) => name.text(),
+ NameLike::Lifetime(lifetime) => lifetime.text(),
+ }
+ }
+}
+
+impl ast::AstNode for NameLike {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, SyntaxKind::NAME | SyntaxKind::NAME_REF | SyntaxKind::LIFETIME)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ SyntaxKind::NAME => NameLike::Name(ast::Name { syntax }),
+ SyntaxKind::NAME_REF => NameLike::NameRef(ast::NameRef { syntax }),
+ SyntaxKind::LIFETIME => NameLike::Lifetime(ast::Lifetime { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ NameLike::NameRef(it) => it.syntax(),
+ NameLike::Name(it) => it.syntax(),
+ NameLike::Lifetime(it) => it.syntax(),
+ }
+ }
+}
+
+const _: () = {
+ use ast::{Lifetime, Name, NameRef};
+ stdx::impl_from!(NameRef, Name, Lifetime for NameLike);
+};
+
+#[derive(Debug, Clone, PartialEq)]
+pub enum NameOrNameRef {
+ Name(ast::Name),
+ NameRef(ast::NameRef),
+}
+
+impl fmt::Display for NameOrNameRef {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ NameOrNameRef::Name(it) => fmt::Display::fmt(it, f),
+ NameOrNameRef::NameRef(it) => fmt::Display::fmt(it, f),
+ }
+ }
+}
+
+impl NameOrNameRef {
+ pub fn text(&self) -> TokenText<'_> {
+ match self {
+ NameOrNameRef::Name(name) => name.text(),
+ NameOrNameRef::NameRef(name_ref) => name_ref.text(),
+ }
+ }
+}
+
+impl ast::RecordPatField {
+ pub fn for_field_name_ref(field_name: &ast::NameRef) -> Option<ast::RecordPatField> {
+ let candidate = field_name.syntax().parent().and_then(ast::RecordPatField::cast)?;
+ match candidate.field_name()? {
+ NameOrNameRef::NameRef(name_ref) if name_ref == *field_name => Some(candidate),
+ _ => None,
+ }
+ }
+
+ pub fn for_field_name(field_name: &ast::Name) -> Option<ast::RecordPatField> {
+ let candidate =
+ field_name.syntax().ancestors().nth(2).and_then(ast::RecordPatField::cast)?;
+ match candidate.field_name()? {
+ NameOrNameRef::Name(name) if name == *field_name => Some(candidate),
+ _ => None,
+ }
+ }
+
+ pub fn parent_record_pat(&self) -> ast::RecordPat {
+ self.syntax().ancestors().find_map(ast::RecordPat::cast).unwrap()
+ }
+
+ /// Deals with field init shorthand
+ pub fn field_name(&self) -> Option<NameOrNameRef> {
+ if let Some(name_ref) = self.name_ref() {
+ return Some(NameOrNameRef::NameRef(name_ref));
+ }
+ match self.pat() {
+ Some(ast::Pat::IdentPat(pat)) => {
+ let name = pat.name()?;
+ Some(NameOrNameRef::Name(name))
+ }
+ Some(ast::Pat::BoxPat(pat)) => match pat.pat() {
+ Some(ast::Pat::IdentPat(pat)) => {
+ let name = pat.name()?;
+ Some(NameOrNameRef::Name(name))
+ }
+ _ => None,
+ },
+ _ => None,
+ }
+ }
+}
+
+impl ast::Variant {
+ pub fn parent_enum(&self) -> ast::Enum {
+ self.syntax()
+ .parent()
+ .and_then(|it| it.parent())
+ .and_then(ast::Enum::cast)
+ .expect("EnumVariants are always nested in Enums")
+ }
+ pub fn kind(&self) -> StructKind {
+ StructKind::from_node(self)
+ }
+}
+
+impl ast::Item {
+ pub fn generic_param_list(&self) -> Option<ast::GenericParamList> {
+ ast::AnyHasGenericParams::cast(self.syntax().clone())?.generic_param_list()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum FieldKind {
+ Name(ast::NameRef),
+ Index(SyntaxToken),
+}
+
+impl ast::FieldExpr {
+ pub fn index_token(&self) -> Option<SyntaxToken> {
+ self.syntax
+ .children_with_tokens()
+ // FIXME: Accepting floats here to reject them in validation later
+ .find(|c| c.kind() == SyntaxKind::INT_NUMBER || c.kind() == SyntaxKind::FLOAT_NUMBER)
+ .as_ref()
+ .and_then(SyntaxElement::as_token)
+ .cloned()
+ }
+
+ pub fn field_access(&self) -> Option<FieldKind> {
+ match self.name_ref() {
+ Some(nr) => Some(FieldKind::Name(nr)),
+ None => self.index_token().map(FieldKind::Index),
+ }
+ }
+}
+
+pub struct SlicePatComponents {
+ pub prefix: Vec<ast::Pat>,
+ pub slice: Option<ast::Pat>,
+ pub suffix: Vec<ast::Pat>,
+}
+
+impl ast::SlicePat {
+ pub fn components(&self) -> SlicePatComponents {
+ let mut args = self.pats().peekable();
+ let prefix = args
+ .peeking_take_while(|p| match p {
+ ast::Pat::RestPat(_) => false,
+ ast::Pat::IdentPat(bp) => !matches!(bp.pat(), Some(ast::Pat::RestPat(_))),
+ ast::Pat::RefPat(rp) => match rp.pat() {
+ Some(ast::Pat::RestPat(_)) => false,
+ Some(ast::Pat::IdentPat(bp)) => !matches!(bp.pat(), Some(ast::Pat::RestPat(_))),
+ _ => true,
+ },
+ _ => true,
+ })
+ .collect();
+ let slice = args.next();
+ let suffix = args.collect();
+
+ SlicePatComponents { prefix, slice, suffix }
+ }
+}
+
+impl ast::IdentPat {
+ pub fn is_simple_ident(&self) -> bool {
+ self.at_token().is_none()
+ && self.mut_token().is_none()
+ && self.ref_token().is_none()
+ && self.pat().is_none()
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum SelfParamKind {
+ /// self
+ Owned,
+ /// &self
+ Ref,
+ /// &mut self
+ MutRef,
+}
+
+impl ast::SelfParam {
+ pub fn kind(&self) -> SelfParamKind {
+ if self.amp_token().is_some() {
+ if self.mut_token().is_some() {
+ SelfParamKind::MutRef
+ } else {
+ SelfParamKind::Ref
+ }
+ } else {
+ SelfParamKind::Owned
+ }
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum TypeBoundKind {
+ /// Trait
+ PathType(ast::PathType),
+ /// for<'a> ...
+ ForType(ast::ForType),
+ /// 'a
+ Lifetime(ast::Lifetime),
+}
+
+impl ast::TypeBound {
+ pub fn kind(&self) -> TypeBoundKind {
+ if let Some(path_type) = support::children(self.syntax()).next() {
+ TypeBoundKind::PathType(path_type)
+ } else if let Some(for_type) = support::children(self.syntax()).next() {
+ TypeBoundKind::ForType(for_type)
+ } else if let Some(lifetime) = self.lifetime() {
+ TypeBoundKind::Lifetime(lifetime)
+ } else {
+ unreachable!()
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub enum TypeOrConstParam {
+ Type(ast::TypeParam),
+ Const(ast::ConstParam),
+}
+
+impl TypeOrConstParam {
+ pub fn name(&self) -> Option<ast::Name> {
+ match self {
+ TypeOrConstParam::Type(x) => x.name(),
+ TypeOrConstParam::Const(x) => x.name(),
+ }
+ }
+}
+
+pub enum VisibilityKind {
+ In(ast::Path),
+ PubCrate,
+ PubSuper,
+ PubSelf,
+ Pub,
+}
+
+impl ast::Visibility {
+ pub fn kind(&self) -> VisibilityKind {
+ match self.path() {
+ Some(path) => {
+ if let Some(segment) =
+ path.as_single_segment().filter(|it| it.coloncolon_token().is_none())
+ {
+ if segment.crate_token().is_some() {
+ return VisibilityKind::PubCrate;
+ } else if segment.super_token().is_some() {
+ return VisibilityKind::PubSuper;
+ } else if segment.self_token().is_some() {
+ return VisibilityKind::PubSelf;
+ }
+ }
+ VisibilityKind::In(path)
+ }
+ None => VisibilityKind::Pub,
+ }
+ }
+}
+
+impl ast::LifetimeParam {
+ pub fn lifetime_bounds(&self) -> impl Iterator<Item = SyntaxToken> {
+ self.syntax()
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .skip_while(|x| x.kind() != T![:])
+ .filter(|it| it.kind() == T![lifetime_ident])
+ }
+}
+
+impl ast::Module {
+ /// Returns the parent ast::Module, this is different than the semantic parent in that this only
+ /// considers parent declarations in the AST
+ pub fn parent(&self) -> Option<ast::Module> {
+ self.syntax().ancestors().nth(2).and_then(ast::Module::cast)
+ }
+}
+
+impl ast::RangePat {
+ pub fn start(&self) -> Option<ast::Pat> {
+ self.syntax()
+ .children_with_tokens()
+ .take_while(|it| !(it.kind() == T![..] || it.kind() == T![..=]))
+ .filter_map(|it| it.into_node())
+ .find_map(ast::Pat::cast)
+ }
+
+ pub fn end(&self) -> Option<ast::Pat> {
+ self.syntax()
+ .children_with_tokens()
+ .skip_while(|it| !(it.kind() == T![..] || it.kind() == T![..=]))
+ .filter_map(|it| it.into_node())
+ .find_map(ast::Pat::cast)
+ }
+}
+
+impl ast::TokenTree {
+ pub fn token_trees_and_tokens(
+ &self,
+ ) -> impl Iterator<Item = NodeOrToken<ast::TokenTree, SyntaxToken>> {
+ self.syntax().children_with_tokens().filter_map(|not| match not {
+ NodeOrToken::Node(node) => ast::TokenTree::cast(node).map(NodeOrToken::Node),
+ NodeOrToken::Token(t) => Some(NodeOrToken::Token(t)),
+ })
+ }
+
+ pub fn left_delimiter_token(&self) -> Option<SyntaxToken> {
+ self.syntax()
+ .first_child_or_token()?
+ .into_token()
+ .filter(|it| matches!(it.kind(), T!['{'] | T!['('] | T!['[']))
+ }
+
+ pub fn right_delimiter_token(&self) -> Option<SyntaxToken> {
+ self.syntax()
+ .last_child_or_token()?
+ .into_token()
+ .filter(|it| matches!(it.kind(), T!['}'] | T![')'] | T![']']))
+ }
+
+ pub fn parent_meta(&self) -> Option<ast::Meta> {
+ self.syntax().parent().and_then(ast::Meta::cast)
+ }
+}
+
+impl ast::Meta {
+ pub fn parent_attr(&self) -> Option<ast::Attr> {
+ self.syntax().parent().and_then(ast::Attr::cast)
+ }
+}
+
+impl ast::GenericArgList {
+ pub fn lifetime_args(&self) -> impl Iterator<Item = ast::LifetimeArg> {
+ self.generic_args().filter_map(|arg| match arg {
+ ast::GenericArg::LifetimeArg(it) => Some(it),
+ _ => None,
+ })
+ }
+}
+
+impl ast::GenericParamList {
+ pub fn lifetime_params(&self) -> impl Iterator<Item = ast::LifetimeParam> {
+ self.generic_params().filter_map(|param| match param {
+ ast::GenericParam::LifetimeParam(it) => Some(it),
+ ast::GenericParam::TypeParam(_) | ast::GenericParam::ConstParam(_) => None,
+ })
+ }
+ pub fn type_or_const_params(&self) -> impl Iterator<Item = ast::TypeOrConstParam> {
+ self.generic_params().filter_map(|param| match param {
+ ast::GenericParam::TypeParam(it) => Some(ast::TypeOrConstParam::Type(it)),
+ ast::GenericParam::LifetimeParam(_) => None,
+ ast::GenericParam::ConstParam(it) => Some(ast::TypeOrConstParam::Const(it)),
+ })
+ }
+}
+
+impl ast::ForExpr {
+ pub fn iterable(&self) -> Option<ast::Expr> {
+ // If the iterable is a BlockExpr, check if the body is missing.
+ // If it is assume the iterable is the expression that is missing instead.
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ match first {
+ Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first),
+ first => first,
+ }
+ }
+}
+
+impl ast::HasLoopBody for ast::ForExpr {
+ fn loop_body(&self) -> Option<ast::BlockExpr> {
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ let second = exprs.next();
+ second.or(first)
+ }
+}
+
+impl ast::WhileExpr {
+ pub fn condition(&self) -> Option<ast::Expr> {
+ // If the condition is a BlockExpr, check if the body is missing.
+ // If it is assume the condition is the expression that is missing instead.
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ match first {
+ Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first),
+ first => first,
+ }
+ }
+}
+
+impl ast::HasLoopBody for ast::WhileExpr {
+ fn loop_body(&self) -> Option<ast::BlockExpr> {
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ let second = exprs.next();
+ second.or(first)
+ }
+}
+
+impl ast::HasAttrs for ast::AnyHasDocComments {}
+
+impl From<ast::Adt> for ast::Item {
+ fn from(it: ast::Adt) -> Self {
+ match it {
+ ast::Adt::Enum(it) => ast::Item::Enum(it),
+ ast::Adt::Struct(it) => ast::Item::Struct(it),
+ ast::Adt::Union(it) => ast::Item::Union(it),
+ }
+ }
+}
+
+impl ast::IfExpr {
+ pub fn condition(&self) -> Option<ast::Expr> {
+ support::child(&self.syntax)
+ }
+}
+
+impl ast::MatchGuard {
+ pub fn condition(&self) -> Option<ast::Expr> {
+ support::child(&self.syntax)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs
new file mode 100644
index 000000000..a687ba0b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs
@@ -0,0 +1,122 @@
+//! Defines a bunch of data-less enums for unary and binary operators.
+//!
+//! Types here don't know about AST, this allows re-using them for both AST and
+//! HIR.
+use std::fmt;
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum RangeOp {
+ /// `..`
+ Exclusive,
+ /// `..=`
+ Inclusive,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum UnaryOp {
+ /// `*`
+ Deref,
+ /// `!`
+ Not,
+ /// `-`
+ Neg,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum BinaryOp {
+ LogicOp(LogicOp),
+ ArithOp(ArithOp),
+ CmpOp(CmpOp),
+ Assignment { op: Option<ArithOp> },
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum LogicOp {
+ And,
+ Or,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum CmpOp {
+ Eq { negated: bool },
+ Ord { ordering: Ordering, strict: bool },
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum Ordering {
+ Less,
+ Greater,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum ArithOp {
+ Add,
+ Mul,
+ Sub,
+ Div,
+ Rem,
+ Shl,
+ Shr,
+ BitXor,
+ BitOr,
+ BitAnd,
+}
+
+impl fmt::Display for LogicOp {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let res = match self {
+ LogicOp::And => "&&",
+ LogicOp::Or => "||",
+ };
+ f.write_str(res)
+ }
+}
+
+impl fmt::Display for ArithOp {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let res = match self {
+ ArithOp::Add => "+",
+ ArithOp::Mul => "*",
+ ArithOp::Sub => "-",
+ ArithOp::Div => "/",
+ ArithOp::Rem => "%",
+ ArithOp::Shl => "<<",
+ ArithOp::Shr => ">>",
+ ArithOp::BitXor => "^",
+ ArithOp::BitOr => "|",
+ ArithOp::BitAnd => "&",
+ };
+ f.write_str(res)
+ }
+}
+
+impl fmt::Display for CmpOp {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let res = match self {
+ CmpOp::Eq { negated: false } => "==",
+ CmpOp::Eq { negated: true } => "!=",
+ CmpOp::Ord { ordering: Ordering::Less, strict: false } => "<=",
+ CmpOp::Ord { ordering: Ordering::Less, strict: true } => "<",
+ CmpOp::Ord { ordering: Ordering::Greater, strict: false } => ">=",
+ CmpOp::Ord { ordering: Ordering::Greater, strict: true } => ">",
+ };
+ f.write_str(res)
+ }
+}
+
+impl fmt::Display for BinaryOp {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ BinaryOp::LogicOp(op) => fmt::Display::fmt(op, f),
+ BinaryOp::ArithOp(op) => fmt::Display::fmt(op, f),
+ BinaryOp::CmpOp(op) => fmt::Display::fmt(op, f),
+ BinaryOp::Assignment { op } => {
+ f.write_str("=")?;
+ if let Some(op) = op {
+ fmt::Display::fmt(op, f)?;
+ }
+ Ok(())
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
new file mode 100644
index 000000000..28976d837
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
@@ -0,0 +1,472 @@
+//! There are many AstNodes, but only a few tokens, so we hand-write them here.
+
+use std::borrow::Cow;
+
+use rustc_lexer::unescape::{unescape_byte, unescape_char, unescape_literal, Mode};
+
+use crate::{
+ ast::{self, AstToken},
+ TextRange, TextSize,
+};
+
+impl ast::Comment {
+ pub fn kind(&self) -> CommentKind {
+ CommentKind::from_text(self.text())
+ }
+
+ pub fn is_doc(&self) -> bool {
+ self.kind().doc.is_some()
+ }
+
+ pub fn is_inner(&self) -> bool {
+ self.kind().doc == Some(CommentPlacement::Inner)
+ }
+
+ pub fn is_outer(&self) -> bool {
+ self.kind().doc == Some(CommentPlacement::Outer)
+ }
+
+ pub fn prefix(&self) -> &'static str {
+ let &(prefix, _kind) = CommentKind::BY_PREFIX
+ .iter()
+ .find(|&(prefix, kind)| self.kind() == *kind && self.text().starts_with(prefix))
+ .unwrap();
+ prefix
+ }
+
+ /// Returns the textual content of a doc comment node as a single string with prefix and suffix
+ /// removed.
+ pub fn doc_comment(&self) -> Option<&str> {
+ let kind = self.kind();
+ match kind {
+ CommentKind { shape, doc: Some(_) } => {
+ let prefix = kind.prefix();
+ let text = &self.text()[prefix.len()..];
+ let text = if shape == CommentShape::Block {
+ text.strip_suffix("*/").unwrap_or(text)
+ } else {
+ text
+ };
+ Some(text)
+ }
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub struct CommentKind {
+ pub shape: CommentShape,
+ pub doc: Option<CommentPlacement>,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum CommentShape {
+ Line,
+ Block,
+}
+
+impl CommentShape {
+ pub fn is_line(self) -> bool {
+ self == CommentShape::Line
+ }
+
+ pub fn is_block(self) -> bool {
+ self == CommentShape::Block
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum CommentPlacement {
+ Inner,
+ Outer,
+}
+
+impl CommentKind {
+ const BY_PREFIX: [(&'static str, CommentKind); 9] = [
+ ("/**/", CommentKind { shape: CommentShape::Block, doc: None }),
+ ("/***", CommentKind { shape: CommentShape::Block, doc: None }),
+ ("////", CommentKind { shape: CommentShape::Line, doc: None }),
+ ("///", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Outer) }),
+ ("//!", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Inner) }),
+ ("/**", CommentKind { shape: CommentShape::Block, doc: Some(CommentPlacement::Outer) }),
+ ("/*!", CommentKind { shape: CommentShape::Block, doc: Some(CommentPlacement::Inner) }),
+ ("//", CommentKind { shape: CommentShape::Line, doc: None }),
+ ("/*", CommentKind { shape: CommentShape::Block, doc: None }),
+ ];
+
+ pub(crate) fn from_text(text: &str) -> CommentKind {
+ let &(_prefix, kind) = CommentKind::BY_PREFIX
+ .iter()
+ .find(|&(prefix, _kind)| text.starts_with(prefix))
+ .unwrap();
+ kind
+ }
+
+ pub fn prefix(&self) -> &'static str {
+ let &(prefix, _) =
+ CommentKind::BY_PREFIX.iter().rev().find(|(_, kind)| kind == self).unwrap();
+ prefix
+ }
+}
+
+impl ast::Whitespace {
+ pub fn spans_multiple_lines(&self) -> bool {
+ let text = self.text();
+ text.find('\n').map_or(false, |idx| text[idx + 1..].contains('\n'))
+ }
+}
+
+pub struct QuoteOffsets {
+ pub quotes: (TextRange, TextRange),
+ pub contents: TextRange,
+}
+
+impl QuoteOffsets {
+ fn new(literal: &str) -> Option<QuoteOffsets> {
+ let left_quote = literal.find('"')?;
+ let right_quote = literal.rfind('"')?;
+ if left_quote == right_quote {
+ // `literal` only contains one quote
+ return None;
+ }
+
+ let start = TextSize::from(0);
+ let left_quote = TextSize::try_from(left_quote).unwrap() + TextSize::of('"');
+ let right_quote = TextSize::try_from(right_quote).unwrap();
+ let end = TextSize::of(literal);
+
+ let res = QuoteOffsets {
+ quotes: (TextRange::new(start, left_quote), TextRange::new(right_quote, end)),
+ contents: TextRange::new(left_quote, right_quote),
+ };
+ Some(res)
+ }
+}
+
+pub trait IsString: AstToken {
+ fn quote_offsets(&self) -> Option<QuoteOffsets> {
+ let text = self.text();
+ let offsets = QuoteOffsets::new(text)?;
+ let o = self.syntax().text_range().start();
+ let offsets = QuoteOffsets {
+ quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o),
+ contents: offsets.contents + o,
+ };
+ Some(offsets)
+ }
+ fn text_range_between_quotes(&self) -> Option<TextRange> {
+ self.quote_offsets().map(|it| it.contents)
+ }
+ fn open_quote_text_range(&self) -> Option<TextRange> {
+ self.quote_offsets().map(|it| it.quotes.0)
+ }
+ fn close_quote_text_range(&self) -> Option<TextRange> {
+ self.quote_offsets().map(|it| it.quotes.1)
+ }
+ fn escaped_char_ranges(
+ &self,
+ cb: &mut dyn FnMut(TextRange, Result<char, rustc_lexer::unescape::EscapeError>),
+ ) {
+ let text_range_no_quotes = match self.text_range_between_quotes() {
+ Some(it) => it,
+ None => return,
+ };
+
+ let start = self.syntax().text_range().start();
+ let text = &self.text()[text_range_no_quotes - start];
+ let offset = text_range_no_quotes.start() - start;
+
+ unescape_literal(text, Mode::Str, &mut |range, unescaped_char| {
+ let text_range =
+ TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+ cb(text_range + offset, unescaped_char);
+ });
+ }
+}
+
+impl IsString for ast::String {}
+
+impl ast::String {
+ pub fn is_raw(&self) -> bool {
+ self.text().starts_with('r')
+ }
+ pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
+ let contents_range = self.text_range_between_quotes()?;
+ assert!(TextRange::up_to(contents_range.len()).contains_range(range));
+ Some(range + contents_range.start())
+ }
+
+ pub fn value(&self) -> Option<Cow<'_, str>> {
+ if self.is_raw() {
+ let text = self.text();
+ let text =
+ &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+ return Some(Cow::Borrowed(text));
+ }
+
+ let text = self.text();
+ let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+
+ let mut buf = String::new();
+ let mut text_iter = text.chars();
+ let mut has_error = false;
+ unescape_literal(text, Mode::Str, &mut |char_range, unescaped_char| match (
+ unescaped_char,
+ buf.capacity() == 0,
+ ) {
+ (Ok(c), false) => buf.push(c),
+ (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
+ (Ok(c), true) => {
+ buf.reserve_exact(text.len());
+ buf.push_str(&text[..char_range.start]);
+ buf.push(c);
+ }
+ (Err(_), _) => has_error = true,
+ });
+
+ match (has_error, buf.capacity() == 0) {
+ (true, _) => None,
+ (false, true) => Some(Cow::Borrowed(text)),
+ (false, false) => Some(Cow::Owned(buf)),
+ }
+ }
+}
+
+impl IsString for ast::ByteString {}
+
+impl ast::ByteString {
+ pub fn is_raw(&self) -> bool {
+ self.text().starts_with("br")
+ }
+
+ pub fn value(&self) -> Option<Cow<'_, [u8]>> {
+ if self.is_raw() {
+ let text = self.text();
+ let text =
+ &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+ return Some(Cow::Borrowed(text.as_bytes()));
+ }
+
+ let text = self.text();
+ let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+
+ let mut buf: Vec<u8> = Vec::new();
+ let mut text_iter = text.chars();
+ let mut has_error = false;
+ unescape_literal(text, Mode::ByteStr, &mut |char_range, unescaped_char| match (
+ unescaped_char,
+ buf.capacity() == 0,
+ ) {
+ (Ok(c), false) => buf.push(c as u8),
+ (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
+ (Ok(c), true) => {
+ buf.reserve_exact(text.len());
+ buf.extend_from_slice(text[..char_range.start].as_bytes());
+ buf.push(c as u8);
+ }
+ (Err(_), _) => has_error = true,
+ });
+
+ match (has_error, buf.capacity() == 0) {
+ (true, _) => None,
+ (false, true) => Some(Cow::Borrowed(text.as_bytes())),
+ (false, false) => Some(Cow::Owned(buf)),
+ }
+ }
+}
+
+impl ast::IntNumber {
+ pub fn radix(&self) -> Radix {
+ match self.text().get(..2).unwrap_or_default() {
+ "0b" => Radix::Binary,
+ "0o" => Radix::Octal,
+ "0x" => Radix::Hexadecimal,
+ _ => Radix::Decimal,
+ }
+ }
+
+ pub fn split_into_parts(&self) -> (&str, &str, &str) {
+ let radix = self.radix();
+ let (prefix, mut text) = self.text().split_at(radix.prefix_len());
+
+ let is_suffix_start: fn(&(usize, char)) -> bool = match radix {
+ Radix::Hexadecimal => |(_, c)| matches!(c, 'g'..='z' | 'G'..='Z'),
+ _ => |(_, c)| c.is_ascii_alphabetic(),
+ };
+
+ let mut suffix = "";
+ if let Some((suffix_start, _)) = text.char_indices().find(is_suffix_start) {
+ let (text2, suffix2) = text.split_at(suffix_start);
+ text = text2;
+ suffix = suffix2;
+ };
+
+ (prefix, text, suffix)
+ }
+
+ pub fn value(&self) -> Option<u128> {
+ let (_, text, _) = self.split_into_parts();
+ let value = u128::from_str_radix(&text.replace('_', ""), self.radix() as u32).ok()?;
+ Some(value)
+ }
+
+ pub fn suffix(&self) -> Option<&str> {
+ let (_, _, suffix) = self.split_into_parts();
+ if suffix.is_empty() {
+ None
+ } else {
+ Some(suffix)
+ }
+ }
+
+ pub fn float_value(&self) -> Option<f64> {
+ let (_, text, _) = self.split_into_parts();
+ text.parse::<f64>().ok()
+ }
+}
+
+impl ast::FloatNumber {
+ pub fn split_into_parts(&self) -> (&str, &str) {
+ let text = self.text();
+ let mut float_text = self.text();
+ let mut suffix = "";
+ let mut indices = text.char_indices();
+ if let Some((mut suffix_start, c)) = indices.by_ref().find(|(_, c)| c.is_ascii_alphabetic())
+ {
+ if c == 'e' || c == 'E' {
+ if let Some(suffix_start_tuple) = indices.find(|(_, c)| c.is_ascii_alphabetic()) {
+ suffix_start = suffix_start_tuple.0;
+
+ float_text = &text[..suffix_start];
+ suffix = &text[suffix_start..];
+ }
+ } else {
+ float_text = &text[..suffix_start];
+ suffix = &text[suffix_start..];
+ }
+ }
+
+ (float_text, suffix)
+ }
+
+ pub fn suffix(&self) -> Option<&str> {
+ let (_, suffix) = self.split_into_parts();
+ if suffix.is_empty() {
+ None
+ } else {
+ Some(suffix)
+ }
+ }
+
+ pub fn value(&self) -> Option<f64> {
+ let (text, _) = self.split_into_parts();
+ text.parse::<f64>().ok()
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Copy, Clone)]
+pub enum Radix {
+ Binary = 2,
+ Octal = 8,
+ Decimal = 10,
+ Hexadecimal = 16,
+}
+
+impl Radix {
+ pub const ALL: &'static [Radix] =
+ &[Radix::Binary, Radix::Octal, Radix::Decimal, Radix::Hexadecimal];
+
+ const fn prefix_len(self) -> usize {
+ match self {
+ Self::Decimal => 0,
+ _ => 2,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::ast::{self, make, FloatNumber, IntNumber};
+
+ fn check_float_suffix<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
+ assert_eq!(FloatNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into());
+ }
+
+ fn check_int_suffix<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
+ assert_eq!(IntNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into());
+ }
+
+ #[test]
+ fn test_float_number_suffix() {
+ check_float_suffix("123.0", None);
+ check_float_suffix("123f32", "f32");
+ check_float_suffix("123.0e", None);
+ check_float_suffix("123.0e4", None);
+ check_float_suffix("123.0ef32", "f32");
+ check_float_suffix("123.0E4f32", "f32");
+ check_float_suffix("1_2_3.0_f32", "f32");
+ }
+
+ #[test]
+ fn test_int_number_suffix() {
+ check_int_suffix("123", None);
+ check_int_suffix("123i32", "i32");
+ check_int_suffix("1_0_1_l_o_l", "l_o_l");
+ check_int_suffix("0b11", None);
+ check_int_suffix("0o11", None);
+ check_int_suffix("0xff", None);
+ check_int_suffix("0b11u32", "u32");
+ check_int_suffix("0o11u32", "u32");
+ check_int_suffix("0xffu32", "u32");
+ }
+
+ fn check_string_value<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
+ assert_eq!(
+ ast::String { syntax: make::tokens::literal(&format!("\"{}\"", lit)) }
+ .value()
+ .as_deref(),
+ expected.into()
+ );
+ }
+
+ #[test]
+ fn test_string_escape() {
+ check_string_value(r"foobar", "foobar");
+ check_string_value(r"\foobar", None);
+ check_string_value(r"\nfoobar", "\nfoobar");
+ check_string_value(r"C:\\Windows\\System32\\", "C:\\Windows\\System32\\");
+ }
+}
+
+impl ast::Char {
+ pub fn value(&self) -> Option<char> {
+ let mut text = self.text();
+ if text.starts_with('\'') {
+ text = &text[1..];
+ } else {
+ return None;
+ }
+ if text.ends_with('\'') {
+ text = &text[0..text.len() - 1];
+ }
+
+ unescape_char(text).ok()
+ }
+}
+
+impl ast::Byte {
+ pub fn value(&self) -> Option<u8> {
+ let mut text = self.text();
+ if text.starts_with("b\'") {
+ text = &text[2..];
+ } else {
+ return None;
+ }
+ if text.ends_with('\'') {
+ text = &text[0..text.len() - 1];
+ }
+
+ unescape_byte(text).ok()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
new file mode 100644
index 000000000..aa2b7ed5c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
@@ -0,0 +1,136 @@
+//! Various traits that are implemented by ast nodes.
+//!
+//! The implementations are usually trivial, and live in generated.rs
+use itertools::Either;
+
+use crate::{
+ ast::{self, support, AstChildren, AstNode, AstToken},
+ syntax_node::SyntaxElementChildren,
+ SyntaxElement, SyntaxToken, T,
+};
+
+pub trait HasName: AstNode {
+ fn name(&self) -> Option<ast::Name> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasVisibility: AstNode {
+ fn visibility(&self) -> Option<ast::Visibility> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasLoopBody: AstNode {
+ fn loop_body(&self) -> Option<ast::BlockExpr> {
+ support::child(self.syntax())
+ }
+
+ fn label(&self) -> Option<ast::Label> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasArgList: AstNode {
+ fn arg_list(&self) -> Option<ast::ArgList> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasModuleItem: AstNode {
+ fn items(&self) -> AstChildren<ast::Item> {
+ support::children(self.syntax())
+ }
+}
+
+pub trait HasGenericParams: AstNode {
+ fn generic_param_list(&self) -> Option<ast::GenericParamList> {
+ support::child(self.syntax())
+ }
+
+ fn where_clause(&self) -> Option<ast::WhereClause> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasTypeBounds: AstNode {
+ fn type_bound_list(&self) -> Option<ast::TypeBoundList> {
+ support::child(self.syntax())
+ }
+
+ fn colon_token(&self) -> Option<SyntaxToken> {
+ support::token(self.syntax(), T![:])
+ }
+}
+
+pub trait HasAttrs: AstNode {
+ fn attrs(&self) -> AstChildren<ast::Attr> {
+ support::children(self.syntax())
+ }
+ fn has_atom_attr(&self, atom: &str) -> bool {
+ self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom)
+ }
+}
+
+pub trait HasDocComments: HasAttrs {
+ fn doc_comments(&self) -> DocCommentIter {
+ DocCommentIter { iter: self.syntax().children_with_tokens() }
+ }
+ fn doc_comments_and_attrs(&self) -> AttrDocCommentIter {
+ AttrDocCommentIter { iter: self.syntax().children_with_tokens() }
+ }
+}
+
+impl DocCommentIter {
+ pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> DocCommentIter {
+ DocCommentIter { iter: syntax_node.children_with_tokens() }
+ }
+
+ #[cfg(test)]
+ pub fn doc_comment_text(self) -> Option<String> {
+ let docs = itertools::Itertools::join(
+ &mut self.filter_map(|comment| comment.doc_comment().map(ToOwned::to_owned)),
+ "\n",
+ );
+ if docs.is_empty() {
+ None
+ } else {
+ Some(docs)
+ }
+ }
+}
+
+pub struct DocCommentIter {
+ iter: SyntaxElementChildren,
+}
+
+impl Iterator for DocCommentIter {
+ type Item = ast::Comment;
+ fn next(&mut self) -> Option<ast::Comment> {
+ self.iter.by_ref().find_map(|el| {
+ el.into_token().and_then(ast::Comment::cast).filter(ast::Comment::is_doc)
+ })
+ }
+}
+
+pub struct AttrDocCommentIter {
+ iter: SyntaxElementChildren,
+}
+
+impl AttrDocCommentIter {
+ pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> AttrDocCommentIter {
+ AttrDocCommentIter { iter: syntax_node.children_with_tokens() }
+ }
+}
+
+impl Iterator for AttrDocCommentIter {
+ type Item = Either<ast::Attr, ast::Comment>;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.iter.by_ref().find_map(|el| match el {
+ SyntaxElement::Node(node) => ast::Attr::cast(node).map(Either::Left),
+ SyntaxElement::Token(tok) => {
+ ast::Comment::cast(tok).filter(ast::Comment::is_doc).map(Either::Right)
+ }
+ })
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs b/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs
new file mode 100644
index 000000000..256999fe0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs
@@ -0,0 +1,75 @@
+//! Some infrastructure for fuzzy testing.
+//!
+//! We don't normally run fuzzying, so this is hopelessly bitrotten :(
+
+use std::{
+ convert::TryInto,
+ str::{self, FromStr},
+};
+
+use text_edit::Indel;
+
+use crate::{validation, AstNode, SourceFile, TextRange};
+
+fn check_file_invariants(file: &SourceFile) {
+ let root = file.syntax();
+ validation::validate_block_structure(root);
+}
+
+pub fn check_parser(text: &str) {
+ let file = SourceFile::parse(text);
+ check_file_invariants(&file.tree());
+}
+
+#[derive(Debug, Clone)]
+pub struct CheckReparse {
+ text: String,
+ edit: Indel,
+ edited_text: String,
+}
+
+impl CheckReparse {
+ pub fn from_data(data: &[u8]) -> Option<Self> {
+ const PREFIX: &str = "fn main(){\n\t";
+ const SUFFIX: &str = "\n}";
+
+ let data = str::from_utf8(data).ok()?;
+ let mut lines = data.lines();
+ let delete_start = usize::from_str(lines.next()?).ok()? + PREFIX.len();
+ let delete_len = usize::from_str(lines.next()?).ok()?;
+ let insert = lines.next()?.to_string();
+ let text = lines.collect::<Vec<_>>().join("\n");
+ let text = format!("{}{}{}", PREFIX, text, SUFFIX);
+ text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range
+ let delete =
+ TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap());
+ let edited_text =
+ format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]);
+ let edit = Indel { insert, delete };
+ Some(CheckReparse { text, edit, edited_text })
+ }
+
+ pub fn run(&self) {
+ let parse = SourceFile::parse(&self.text);
+ let new_parse = parse.reparse(&self.edit);
+ check_file_invariants(&new_parse.tree());
+ assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text);
+ let full_reparse = SourceFile::parse(&self.edited_text);
+ for (a, b) in
+ new_parse.tree().syntax().descendants().zip(full_reparse.tree().syntax().descendants())
+ {
+ if (a.kind(), a.text_range()) != (b.kind(), b.text_range()) {
+ eprint!("original:\n{:#?}", parse.tree().syntax());
+ eprint!("reparsed:\n{:#?}", new_parse.tree().syntax());
+ eprint!("full reparse:\n{:#?}", full_reparse.tree().syntax());
+ assert_eq!(
+ format!("{:?}", a),
+ format!("{:?}", b),
+ "different syntax tree produced by the full reparse"
+ );
+ }
+ }
+ // FIXME
+ // assert_eq!(new_file.errors(), full_reparse.errors());
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/hacks.rs b/src/tools/rust-analyzer/crates/syntax/src/hacks.rs
new file mode 100644
index 000000000..a047f61fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/hacks.rs
@@ -0,0 +1,15 @@
+//! Things which exist to solve practial issues, but which shouldn't exist.
+//!
+//! Please avoid adding new usages of the functions in this module
+
+use crate::{ast, AstNode};
+
+pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> {
+ let s = s.trim();
+ let file = ast::SourceFile::parse(&format!("const _: () = {};", s));
+ let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
+ if expr.syntax().text() != s {
+ return None;
+ }
+ Some(expr)
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
new file mode 100644
index 000000000..7fa354c0c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
@@ -0,0 +1,358 @@
+//! Syntax Tree library used throughout the rust analyzer.
+//!
+//! Properties:
+//! - easy and fast incremental re-parsing
+//! - graceful handling of errors
+//! - full-fidelity representation (*any* text can be precisely represented as
+//! a syntax tree)
+//!
+//! For more information, see the [RFC]. Current implementation is inspired by
+//! the [Swift] one.
+//!
+//! The most interesting modules here are `syntax_node` (which defines concrete
+//! syntax tree) and `ast` (which defines abstract syntax tree on top of the
+//! CST). The actual parser live in a separate `parser` crate, though the
+//! lexer lives in this crate.
+//!
+//! See `api_walkthrough` test in this file for a quick API tour!
+//!
+//! [RFC]: <https://github.com/rust-lang/rfcs/pull/2256>
+//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod syntax_node;
+mod syntax_error;
+mod parsing;
+mod validation;
+mod ptr;
+mod token_text;
+#[cfg(test)]
+mod tests;
+
+pub mod algo;
+pub mod ast;
+#[doc(hidden)]
+pub mod fuzz;
+pub mod utils;
+pub mod ted;
+pub mod hacks;
+
+use std::{marker::PhantomData, sync::Arc};
+
+use stdx::format_to;
+use text_edit::Indel;
+
+pub use crate::{
+ ast::{AstNode, AstToken},
+ ptr::{AstPtr, SyntaxNodePtr},
+ syntax_error::SyntaxError,
+ syntax_node::{
+ PreorderWithTokens, RustLanguage, SyntaxElement, SyntaxElementChildren, SyntaxNode,
+ SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder,
+ },
+ token_text::TokenText,
+};
+pub use parser::{SyntaxKind, T};
+pub use rowan::{
+ api::Preorder, Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize,
+ TokenAtOffset, WalkEvent,
+};
+pub use smol_str::SmolStr;
+
+/// `Parse` is the result of the parsing: a syntax tree and a collection of
+/// errors.
+///
+/// Note that we always produce a syntax tree, even for completely invalid
+/// files.
+#[derive(Debug, PartialEq, Eq)]
+pub struct Parse<T> {
+ green: GreenNode,
+ errors: Arc<Vec<SyntaxError>>,
+ _ty: PhantomData<fn() -> T>,
+}
+
+impl<T> Clone for Parse<T> {
+ fn clone(&self) -> Parse<T> {
+ Parse { green: self.green.clone(), errors: self.errors.clone(), _ty: PhantomData }
+ }
+}
+
+impl<T> Parse<T> {
+ fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
+ Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ }
+
+ pub fn syntax_node(&self) -> SyntaxNode {
+ SyntaxNode::new_root(self.green.clone())
+ }
+ pub fn errors(&self) -> &[SyntaxError] {
+ &*self.errors
+ }
+}
+
+impl<T: AstNode> Parse<T> {
+ pub fn to_syntax(self) -> Parse<SyntaxNode> {
+ Parse { green: self.green, errors: self.errors, _ty: PhantomData }
+ }
+
+ pub fn tree(&self) -> T {
+ T::cast(self.syntax_node()).unwrap()
+ }
+
+ pub fn ok(self) -> Result<T, Arc<Vec<SyntaxError>>> {
+ if self.errors.is_empty() {
+ Ok(self.tree())
+ } else {
+ Err(self.errors)
+ }
+ }
+}
+
+impl Parse<SyntaxNode> {
+ pub fn cast<N: AstNode>(self) -> Option<Parse<N>> {
+ if N::cast(self.syntax_node()).is_some() {
+ Some(Parse { green: self.green, errors: self.errors, _ty: PhantomData })
+ } else {
+ None
+ }
+ }
+}
+
+impl Parse<SourceFile> {
+ pub fn debug_dump(&self) -> String {
+ let mut buf = format!("{:#?}", self.tree().syntax());
+ for err in self.errors.iter() {
+ format_to!(buf, "error {:?}: {}\n", err.range(), err);
+ }
+ buf
+ }
+
+ pub fn reparse(&self, indel: &Indel) -> Parse<SourceFile> {
+ self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel))
+ }
+
+ fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
+ // FIXME: validation errors are not handled here
+ parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map(
+ |(green_node, errors, _reparsed_range)| Parse {
+ green: green_node,
+ errors: Arc::new(errors),
+ _ty: PhantomData,
+ },
+ )
+ }
+
+ fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
+ let mut text = self.tree().syntax().text().to_string();
+ indel.apply(&mut text);
+ SourceFile::parse(&text)
+ }
+}
+
+/// `SourceFile` represents a parse tree for a single Rust file.
+pub use crate::ast::SourceFile;
+
+impl SourceFile {
+ pub fn parse(text: &str) -> Parse<SourceFile> {
+ let (green, mut errors) = parsing::parse_text(text);
+ let root = SyntaxNode::new_root(green.clone());
+
+ errors.extend(validation::validate(&root));
+
+ assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
+ Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ }
+}
+
+/// Matches a `SyntaxNode` against an `ast` type.
+///
+/// # Example:
+///
+/// ```ignore
+/// match_ast! {
+/// match node {
+/// ast::CallExpr(it) => { ... },
+/// ast::MethodCallExpr(it) => { ... },
+/// ast::MacroCall(it) => { ... },
+/// _ => None,
+/// }
+/// }
+/// ```
+#[macro_export]
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+
+ (match ($node:expr) {
+ $( $( $path:ident )::+ ($it:pat) => $res:expr, )*
+ _ => $catch_all:expr $(,)?
+ }) => {{
+ $( if let Some($it) = $($path::)+cast($node.clone()) { $res } else )*
+ { $catch_all }
+ }};
+}
+
+/// This test does not assert anything and instead just shows off the crate's
+/// API.
+#[test]
+fn api_walkthrough() {
+ use ast::{HasModuleItem, HasName};
+
+ let source_code = "
+ fn foo() {
+ 1 + 1
+ }
+ ";
+ // `SourceFile` is the main entry point.
+ //
+ // The `parse` method returns a `Parse` -- a pair of syntax tree and a list
+ // of errors. That is, syntax tree is constructed even in presence of errors.
+ let parse = SourceFile::parse(source_code);
+ assert!(parse.errors().is_empty());
+
+ // The `tree` method returns an owned syntax node of type `SourceFile`.
+ // Owned nodes are cheap: inside, they are `Rc` handles to the underling data.
+ let file: SourceFile = parse.tree();
+
+ // `SourceFile` is the root of the syntax tree. We can iterate file's items.
+ // Let's fetch the `foo` function.
+ let mut func = None;
+ for item in file.items() {
+ match item {
+ ast::Item::Fn(f) => func = Some(f),
+ _ => unreachable!(),
+ }
+ }
+ let func: ast::Fn = func.unwrap();
+
+ // Each AST node has a bunch of getters for children. All getters return
+ // `Option`s though, to account for incomplete code. Some getters are common
+ // for several kinds of node. In this case, a trait like `ast::NameOwner`
+ // usually exists. By convention, all ast types should be used with `ast::`
+ // qualifier.
+ let name: Option<ast::Name> = func.name();
+ let name = name.unwrap();
+ assert_eq!(name.text(), "foo");
+
+ // Let's get the `1 + 1` expression!
+ let body: ast::BlockExpr = func.body().unwrap();
+ let stmt_list: ast::StmtList = body.stmt_list().unwrap();
+ let expr: ast::Expr = stmt_list.tail_expr().unwrap();
+
+ // Enums are used to group related ast nodes together, and can be used for
+ // matching. However, because there are no public fields, it's possible to
+ // match only the top level enum: that is the price we pay for increased API
+ // flexibility
+ let bin_expr: &ast::BinExpr = match &expr {
+ ast::Expr::BinExpr(e) => e,
+ _ => unreachable!(),
+ };
+
+ // Besides the "typed" AST API, there's an untyped CST one as well.
+ // To switch from AST to CST, call `.syntax()` method:
+ let expr_syntax: &SyntaxNode = expr.syntax();
+
+ // Note how `expr` and `bin_expr` are in fact the same node underneath:
+ assert!(expr_syntax == bin_expr.syntax());
+
+ // To go from CST to AST, `AstNode::cast` function is used:
+ let _expr: ast::Expr = match ast::Expr::cast(expr_syntax.clone()) {
+ Some(e) => e,
+ None => unreachable!(),
+ };
+
+ // The two properties each syntax node has is a `SyntaxKind`:
+ assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR);
+
+ // And text range:
+ assert_eq!(expr_syntax.text_range(), TextRange::new(32.into(), 37.into()));
+
+ // You can get node's text as a `SyntaxText` object, which will traverse the
+ // tree collecting token's text:
+ let text: SyntaxText = expr_syntax.text();
+ assert_eq!(text.to_string(), "1 + 1");
+
+ // There's a bunch of traversal methods on `SyntaxNode`:
+ assert_eq!(expr_syntax.parent().as_ref(), Some(stmt_list.syntax()));
+ assert_eq!(stmt_list.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{']));
+ assert_eq!(
+ expr_syntax.next_sibling_or_token().map(|it| it.kind()),
+ Some(SyntaxKind::WHITESPACE)
+ );
+
+ // As well as some iterator helpers:
+ let f = expr_syntax.ancestors().find_map(ast::Fn::cast);
+ assert_eq!(f, Some(func));
+ assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}']));
+ assert_eq!(
+ expr_syntax.descendants_with_tokens().count(),
+ 8, // 5 tokens `1`, ` `, `+`, ` `, `!`
+ // 2 child literal expressions: `1`, `1`
+ // 1 the node itself: `1 + 1`
+ );
+
+ // There's also a `preorder` method with a more fine-grained iteration control:
+ let mut buf = String::new();
+ let mut indent = 0;
+ for event in expr_syntax.preorder_with_tokens() {
+ match event {
+ WalkEvent::Enter(node) => {
+ let text = match &node {
+ NodeOrToken::Node(it) => it.text().to_string(),
+ NodeOrToken::Token(it) => it.text().to_string(),
+ };
+ format_to!(buf, "{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent);
+ indent += 2;
+ }
+ WalkEvent::Leave(_) => indent -= 2,
+ }
+ }
+ assert_eq!(indent, 0);
+ assert_eq!(
+ buf.trim(),
+ r#"
+"1 + 1" BIN_EXPR
+ "1" LITERAL
+ "1" INT_NUMBER
+ " " WHITESPACE
+ "+" PLUS
+ " " WHITESPACE
+ "1" LITERAL
+ "1" INT_NUMBER
+"#
+ .trim()
+ );
+
+ // To recursively process the tree, there are three approaches:
+ // 1. explicitly call getter methods on AST nodes.
+ // 2. use descendants and `AstNode::cast`.
+ // 3. use descendants and `match_ast!`.
+ //
+ // Here's how the first one looks like:
+ let exprs_cast: Vec<String> = file
+ .syntax()
+ .descendants()
+ .filter_map(ast::Expr::cast)
+ .map(|expr| expr.syntax().text().to_string())
+ .collect();
+
+ // An alternative is to use a macro.
+ let mut exprs_visit = Vec::new();
+ for node in file.syntax().descendants() {
+ match_ast! {
+ match node {
+ ast::Expr(it) => {
+ let res = it.syntax().text().to_string();
+ exprs_visit.push(res);
+ },
+ _ => (),
+ }
+ }
+ }
+ assert_eq!(exprs_cast, exprs_visit);
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
new file mode 100644
index 000000000..047e670c9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
@@ -0,0 +1,46 @@
+//! Lexing, bridging to parser (which does the actual parsing) and
+//! incremental reparsing.
+
+mod reparsing;
+
+use rowan::TextRange;
+
+use crate::{syntax_node::GreenNode, SyntaxError, SyntaxTreeBuilder};
+
+pub(crate) use crate::parsing::reparsing::incremental_reparse;
+
+pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
+ let lexed = parser::LexedStr::new(text);
+ let parser_input = lexed.to_input();
+ let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input);
+ let (node, errors, _eof) = build_tree(lexed, parser_output);
+ (node, errors)
+}
+
+pub(crate) fn build_tree(
+ lexed: parser::LexedStr<'_>,
+ parser_output: parser::Output,
+) -> (GreenNode, Vec<SyntaxError>, bool) {
+ let mut builder = SyntaxTreeBuilder::default();
+
+ let is_eof = lexed.intersperse_trivia(&parser_output, &mut |step| match step {
+ parser::StrStep::Token { kind, text } => builder.token(kind, text),
+ parser::StrStep::Enter { kind } => builder.start_node(kind),
+ parser::StrStep::Exit => builder.finish_node(),
+ parser::StrStep::Error { msg, pos } => {
+ builder.error(msg.to_string(), pos.try_into().unwrap())
+ }
+ });
+
+ let (node, mut errors) = builder.finish_raw();
+ for (i, err) in lexed.errors() {
+ let text_range = lexed.text_range(i);
+ let text_range = TextRange::new(
+ text_range.start.try_into().unwrap(),
+ text_range.end.try_into().unwrap(),
+ );
+ errors.push(SyntaxError::new(err, text_range))
+ }
+
+ (node, errors, is_eof)
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
new file mode 100644
index 000000000..701e6232d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
@@ -0,0 +1,441 @@
+//! Implementation of incremental re-parsing.
+//!
+//! We use two simple strategies for this:
+//! - if the edit modifies only a single token (like changing an identifier's
+//! letter), we replace only this token.
+//! - otherwise, we search for the nearest `{}` block which contains the edit
+//! and try to parse only this block.
+
+use parser::Reparser;
+use text_edit::Indel;
+
+use crate::{
+ parsing::build_tree,
+ syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode},
+ SyntaxError,
+ SyntaxKind::*,
+ TextRange, TextSize, T,
+};
+
+pub(crate) fn incremental_reparse(
+ node: &SyntaxNode,
+ edit: &Indel,
+ errors: Vec<SyntaxError>,
+) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
+ if let Some((green, new_errors, old_range)) = reparse_token(node, edit) {
+ return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
+ }
+
+ if let Some((green, new_errors, old_range)) = reparse_block(node, edit) {
+ return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
+ }
+ None
+}
+
+fn reparse_token(
+ root: &SyntaxNode,
+ edit: &Indel,
+) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
+ let prev_token = root.covering_element(edit.delete).as_token()?.clone();
+ let prev_token_kind = prev_token.kind();
+ match prev_token_kind {
+ WHITESPACE | COMMENT | IDENT | STRING => {
+ if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT {
+ // removing a new line may extends previous token
+ let deleted_range = edit.delete - prev_token.text_range().start();
+ if prev_token.text()[deleted_range].contains('\n') {
+ return None;
+ }
+ }
+
+ let mut new_text = get_text_after_edit(prev_token.clone().into(), edit);
+ let (new_token_kind, new_err) = parser::LexedStr::single_token(&new_text)?;
+
+ if new_token_kind != prev_token_kind
+ || (new_token_kind == IDENT && is_contextual_kw(&new_text))
+ {
+ return None;
+ }
+
+ // Check that edited token is not a part of the bigger token.
+ // E.g. if for source code `bruh"str"` the user removed `ruh`, then
+ // `b` no longer remains an identifier, but becomes a part of byte string literal
+ if let Some(next_char) = root.text().char_at(prev_token.text_range().end()) {
+ new_text.push(next_char);
+ let token_with_next_char = parser::LexedStr::single_token(&new_text);
+ if let Some((_kind, _error)) = token_with_next_char {
+ return None;
+ }
+ new_text.pop();
+ }
+
+ let new_token = GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), &new_text);
+ let range = TextRange::up_to(TextSize::of(&new_text));
+ Some((
+ prev_token.replace_with(new_token),
+ new_err.into_iter().map(|msg| SyntaxError::new(msg, range)).collect(),
+ prev_token.text_range(),
+ ))
+ }
+ _ => None,
+ }
+}
+
+fn reparse_block(
+ root: &SyntaxNode,
+ edit: &Indel,
+) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
+ let (node, reparser) = find_reparsable_node(root, edit.delete)?;
+ let text = get_text_after_edit(node.clone().into(), edit);
+
+ let lexed = parser::LexedStr::new(text.as_str());
+ let parser_input = lexed.to_input();
+ if !is_balanced(&lexed) {
+ return None;
+ }
+
+ let tree_traversal = reparser.parse(&parser_input);
+
+ let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal);
+
+ Some((node.replace_with(green), new_parser_errors, node.text_range()))
+}
+
+fn get_text_after_edit(element: SyntaxElement, edit: &Indel) -> String {
+ let edit = Indel::replace(edit.delete - element.text_range().start(), edit.insert.clone());
+
+ let mut text = match element {
+ NodeOrToken::Token(token) => token.text().to_string(),
+ NodeOrToken::Node(node) => node.text().to_string(),
+ };
+ edit.apply(&mut text);
+ text
+}
+
+fn is_contextual_kw(text: &str) -> bool {
+ matches!(text, "auto" | "default" | "union")
+}
+
+fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
+ let node = node.covering_element(range);
+
+ node.ancestors().find_map(|node| {
+ let first_child = node.first_child_or_token().map(|it| it.kind());
+ let parent = node.parent().map(|it| it.kind());
+ Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r))
+ })
+}
+
+fn is_balanced(lexed: &parser::LexedStr<'_>) -> bool {
+ if lexed.is_empty() || lexed.kind(0) != T!['{'] || lexed.kind(lexed.len() - 1) != T!['}'] {
+ return false;
+ }
+ let mut balance = 0usize;
+ for i in 1..lexed.len() - 1 {
+ match lexed.kind(i) {
+ T!['{'] => balance += 1,
+ T!['}'] => {
+ balance = match balance.checked_sub(1) {
+ Some(b) => b,
+ None => return false,
+ }
+ }
+ _ => (),
+ }
+ }
+ balance == 0
+}
+
+fn merge_errors(
+ old_errors: Vec<SyntaxError>,
+ new_errors: Vec<SyntaxError>,
+ range_before_reparse: TextRange,
+ edit: &Indel,
+) -> Vec<SyntaxError> {
+ let mut res = Vec::new();
+
+ for old_err in old_errors {
+ let old_err_range = old_err.range();
+ if old_err_range.end() <= range_before_reparse.start() {
+ res.push(old_err);
+ } else if old_err_range.start() >= range_before_reparse.end() {
+ let inserted_len = TextSize::of(&edit.insert);
+ res.push(old_err.with_range((old_err_range + inserted_len) - edit.delete.len()));
+ // Note: extra parens are intentional to prevent uint underflow, HWAB (here was a bug)
+ }
+ }
+ res.extend(new_errors.into_iter().map(|new_err| {
+ // fighting borrow checker with a variable ;)
+ let offseted_range = new_err.range() + range_before_reparse.start();
+ new_err.with_range(offseted_range)
+ }));
+ res
+}
+
+#[cfg(test)]
+mod tests {
+ use test_utils::{assert_eq_text, extract_range};
+
+ use super::*;
+ use crate::{AstNode, Parse, SourceFile};
+
+ fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
+ let (range, before) = extract_range(before);
+ let edit = Indel::replace(range, replace_with.to_owned());
+ let after = {
+ let mut after = before.clone();
+ edit.apply(&mut after);
+ after
+ };
+
+ let fully_reparsed = SourceFile::parse(&after);
+ let incrementally_reparsed: Parse<SourceFile> = {
+ let before = SourceFile::parse(&before);
+ let (green, new_errors, range) =
+ incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap();
+ assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
+ Parse::new(green, new_errors)
+ };
+
+ assert_eq_text!(
+ &format!("{:#?}", fully_reparsed.tree().syntax()),
+ &format!("{:#?}", incrementally_reparsed.tree().syntax()),
+ );
+ assert_eq!(fully_reparsed.errors(), incrementally_reparsed.errors());
+ }
+
+ #[test] // FIXME: some test here actually test token reparsing
+ fn reparse_block_tests() {
+ do_check(
+ r"
+fn foo() {
+ let x = foo + $0bar$0
+}
+",
+ "baz",
+ 3,
+ );
+ do_check(
+ r"
+fn foo() {
+ let x = foo$0 + bar$0
+}
+",
+ "baz",
+ 25,
+ );
+ do_check(
+ r"
+struct Foo {
+ f: foo$0$0
+}
+",
+ ",\n g: (),",
+ 14,
+ );
+ do_check(
+ r"
+fn foo {
+ let;
+ 1 + 1;
+ $092$0;
+}
+",
+ "62",
+ 31, // FIXME: reparse only int literal here
+ );
+ do_check(
+ r"
+mod foo {
+ fn $0$0
+}
+",
+ "bar",
+ 11,
+ );
+
+ do_check(
+ r"
+trait Foo {
+ type $0Foo$0;
+}
+",
+ "Output",
+ 3,
+ );
+ do_check(
+ r"
+impl IntoIterator<Item=i32> for Foo {
+ f$0$0
+}
+",
+ "n next(",
+ 9,
+ );
+ do_check(r"use a::b::{foo,$0,bar$0};", "baz", 10);
+ do_check(
+ r"
+pub enum A {
+ Foo$0$0
+}
+",
+ "\nBar;\n",
+ 11,
+ );
+ do_check(
+ r"
+foo!{a, b$0$0 d}
+",
+ ", c[3]",
+ 8,
+ );
+ do_check(
+ r"
+fn foo() {
+ vec![$0$0]
+}
+",
+ "123",
+ 14,
+ );
+ do_check(
+ r"
+extern {
+ fn$0;$0
+}
+",
+ " exit(code: c_int)",
+ 11,
+ );
+ }
+
+ #[test]
+ fn reparse_token_tests() {
+ do_check(
+ r"$0$0
+fn foo() -> i32 { 1 }
+",
+ "\n\n\n \n",
+ 1,
+ );
+ do_check(
+ r"
+fn foo() -> $0$0 {}
+",
+ " \n",
+ 2,
+ );
+ do_check(
+ r"
+fn $0foo$0() -> i32 { 1 }
+",
+ "bar",
+ 3,
+ );
+ do_check(
+ r"
+fn foo$0$0foo() { }
+",
+ "bar",
+ 6,
+ );
+ do_check(
+ r"
+fn foo /* $0$0 */ () {}
+",
+ "some comment",
+ 6,
+ );
+ do_check(
+ r"
+fn baz $0$0 () {}
+",
+ " \t\t\n\n",
+ 2,
+ );
+ do_check(
+ r"
+fn baz $0$0 () {}
+",
+ " \t\t\n\n",
+ 2,
+ );
+ do_check(
+ r"
+/// foo $0$0omment
+mod { }
+",
+ "c",
+ 14,
+ );
+ do_check(
+ r#"
+fn -> &str { "Hello$0$0" }
+"#,
+ ", world",
+ 7,
+ );
+ do_check(
+ r#"
+fn -> &str { // "Hello$0$0"
+"#,
+ ", world",
+ 10,
+ );
+ do_check(
+ r##"
+fn -> &str { r#"Hello$0$0"#
+"##,
+ ", world",
+ 10,
+ );
+ do_check(
+ r"
+#[derive($0Copy$0)]
+enum Foo {
+
+}
+",
+ "Clone",
+ 4,
+ );
+ }
+
+ #[test]
+ fn reparse_str_token_with_error_unchanged() {
+ do_check(r#""$0Unclosed$0 string literal"#, "Still unclosed", 24);
+ }
+
+ #[test]
+ fn reparse_str_token_with_error_fixed() {
+ do_check(r#""unterinated$0$0"#, "\"", 12);
+ }
+
+ #[test]
+ fn reparse_block_with_error_in_middle_unchanged() {
+ do_check(
+ r#"fn main() {
+ if {}
+ 32 + 4$0$0
+ return
+ if {}
+ }"#,
+ "23",
+ 105,
+ )
+ }
+
+ #[test]
+ fn reparse_block_with_error_in_middle_fixed() {
+ do_check(
+ r#"fn main() {
+ if {}
+ 32 + 4$0$0
+ return
+ if {}
+ }"#,
+ ";",
+ 105,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
new file mode 100644
index 000000000..a886972ff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
@@ -0,0 +1,104 @@
+//! In rust-analyzer, syntax trees are transient objects.
+//!
+//! That means that we create trees when we need them, and tear them down to
+//! save memory. In this architecture, hanging on to a particular syntax node
+//! for a long time is ill-advisable, as that keeps the whole tree resident.
+//!
+//! Instead, we provide a [`SyntaxNodePtr`] type, which stores information about
+//! *location* of a particular syntax node in a tree. Its a small type which can
+//! be cheaply stored, and which can be resolved to a real [`SyntaxNode`] when
+//! necessary.
+
+use std::{
+ hash::{Hash, Hasher},
+ marker::PhantomData,
+};
+
+use rowan::TextRange;
+
+use crate::{syntax_node::RustLanguage, AstNode, SyntaxNode};
+
+/// A "pointer" to a [`SyntaxNode`], via location in the source code.
+pub type SyntaxNodePtr = rowan::ast::SyntaxNodePtr<RustLanguage>;
+
+/// Like `SyntaxNodePtr`, but remembers the type of node.
+#[derive(Debug)]
+pub struct AstPtr<N: AstNode> {
+ raw: SyntaxNodePtr,
+ _ty: PhantomData<fn() -> N>,
+}
+
+impl<N: AstNode> Clone for AstPtr<N> {
+ fn clone(&self) -> AstPtr<N> {
+ AstPtr { raw: self.raw.clone(), _ty: PhantomData }
+ }
+}
+
+impl<N: AstNode> Eq for AstPtr<N> {}
+
+impl<N: AstNode> PartialEq for AstPtr<N> {
+ fn eq(&self, other: &AstPtr<N>) -> bool {
+ self.raw == other.raw
+ }
+}
+
+impl<N: AstNode> Hash for AstPtr<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.raw.hash(state);
+ }
+}
+
+impl<N: AstNode> AstPtr<N> {
+ pub fn new(node: &N) -> AstPtr<N> {
+ AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData }
+ }
+
+ pub fn to_node(&self, root: &SyntaxNode) -> N {
+ let syntax_node = self.raw.to_node(root);
+ N::cast(syntax_node).unwrap()
+ }
+
+ pub fn syntax_node_ptr(&self) -> SyntaxNodePtr {
+ self.raw.clone()
+ }
+
+ pub fn text_range(&self) -> TextRange {
+ self.raw.text_range()
+ }
+
+ pub fn cast<U: AstNode>(self) -> Option<AstPtr<U>> {
+ if !U::can_cast(self.raw.kind()) {
+ return None;
+ }
+ Some(AstPtr { raw: self.raw, _ty: PhantomData })
+ }
+
+ pub fn upcast<M: AstNode>(self) -> AstPtr<M>
+ where
+ N: Into<M>,
+ {
+ AstPtr { raw: self.raw, _ty: PhantomData }
+ }
+
+ /// Like `SyntaxNodePtr::cast` but the trait bounds work out.
+ pub fn try_from_raw(raw: SyntaxNodePtr) -> Option<AstPtr<N>> {
+ N::can_cast(raw.kind()).then(|| AstPtr { raw, _ty: PhantomData })
+ }
+}
+
+impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr {
+ fn from(ptr: AstPtr<N>) -> SyntaxNodePtr {
+ ptr.raw
+ }
+}
+
+#[test]
+fn test_local_syntax_ptr() {
+ use crate::{ast, AstNode, SourceFile};
+
+ let file = SourceFile::parse("struct Foo { f: u32, }").ok().unwrap();
+ let field = file.syntax().descendants().find_map(ast::RecordField::cast).unwrap();
+ let ptr = SyntaxNodePtr::new(field.syntax());
+ let field_syntax = ptr.to_node(file.syntax());
+ assert_eq!(field.syntax(), &field_syntax);
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs
new file mode 100644
index 000000000..dc6130bd6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs
@@ -0,0 +1,44 @@
+//! See docs for `SyntaxError`.
+
+use std::fmt;
+
+use crate::{TextRange, TextSize};
+
+/// Represents the result of unsuccessful tokenization, parsing
+/// or tree validation.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SyntaxError(String, TextRange);
+
+// FIXME: there was an unused SyntaxErrorKind previously (before this enum was removed)
+// It was introduced in this PR: https://github.com/rust-lang/rust-analyzer/pull/846/files#diff-827da9b03b8f9faa1bade5cdd44d5dafR95
+// but it was not removed by a mistake.
+//
+// So, we need to find a place where to stick validation for attributes in match clauses.
+// Code before refactor:
+// InvalidMatchInnerAttr => {
+// write!(f, "Inner attributes are only allowed directly after the opening brace of the match expression")
+// }
+
+impl SyntaxError {
+ pub fn new(message: impl Into<String>, range: TextRange) -> Self {
+ Self(message.into(), range)
+ }
+ pub fn new_at_offset(message: impl Into<String>, offset: TextSize) -> Self {
+ Self(message.into(), TextRange::empty(offset))
+ }
+
+ pub fn range(&self) -> TextRange {
+ self.1
+ }
+
+ pub fn with_range(mut self, range: TextRange) -> Self {
+ self.1 = range;
+ self
+ }
+}
+
+impl fmt::Display for SyntaxError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs
new file mode 100644
index 000000000..a08c01597
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs
@@ -0,0 +1,75 @@
+//! This module defines Concrete Syntax Tree (CST), used by rust-analyzer.
+//!
+//! The CST includes comments and whitespace, provides a single node type,
+//! `SyntaxNode`, and a basic traversal API (parent, children, siblings).
+//!
+//! The *real* implementation is in the (language-agnostic) `rowan` crate, this
+//! module just wraps its API.
+
+use rowan::{GreenNodeBuilder, Language};
+
+use crate::{Parse, SyntaxError, SyntaxKind, TextSize};
+
+pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum RustLanguage {}
+impl Language for RustLanguage {
+ type Kind = SyntaxKind;
+
+ fn kind_from_raw(raw: rowan::SyntaxKind) -> SyntaxKind {
+ SyntaxKind::from(raw.0)
+ }
+
+ fn kind_to_raw(kind: SyntaxKind) -> rowan::SyntaxKind {
+ rowan::SyntaxKind(kind.into())
+ }
+}
+
+pub type SyntaxNode = rowan::SyntaxNode<RustLanguage>;
+pub type SyntaxToken = rowan::SyntaxToken<RustLanguage>;
+pub type SyntaxElement = rowan::SyntaxElement<RustLanguage>;
+pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
+pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
+pub type PreorderWithTokens = rowan::api::PreorderWithTokens<RustLanguage>;
+
+#[derive(Default)]
+pub struct SyntaxTreeBuilder {
+ errors: Vec<SyntaxError>,
+ inner: GreenNodeBuilder<'static>,
+}
+
+impl SyntaxTreeBuilder {
+ pub(crate) fn finish_raw(self) -> (GreenNode, Vec<SyntaxError>) {
+ let green = self.inner.finish();
+ (green, self.errors)
+ }
+
+ pub fn finish(self) -> Parse<SyntaxNode> {
+ let (green, errors) = self.finish_raw();
+ // Disable block validation, see https://github.com/rust-lang/rust-analyzer/pull/10357
+ if cfg!(debug_assertions) && false {
+ let node = SyntaxNode::new_root(green.clone());
+ crate::validation::validate_block_structure(&node);
+ }
+ Parse::new(green, errors)
+ }
+
+ pub fn token(&mut self, kind: SyntaxKind, text: &str) {
+ let kind = RustLanguage::kind_to_raw(kind);
+ self.inner.token(kind, text);
+ }
+
+ pub fn start_node(&mut self, kind: SyntaxKind) {
+ let kind = RustLanguage::kind_to_raw(kind);
+ self.inner.start_node(kind);
+ }
+
+ pub fn finish_node(&mut self) {
+ self.inner.finish_node();
+ }
+
+ pub fn error(&mut self, error: String, text_pos: TextSize) {
+ self.errors.push(SyntaxError::new_at_offset(error, text_pos));
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ted.rs b/src/tools/rust-analyzer/crates/syntax/src/ted.rs
new file mode 100644
index 000000000..a47b4b11c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ted.rs
@@ -0,0 +1,206 @@
+//! Primitive tree editor, ed for trees.
+//!
+//! The `_raw`-suffixed functions insert elements as is, unsuffixed versions fix
+//! up elements around the edges.
+use std::{mem, ops::RangeInclusive};
+
+use parser::T;
+
+use crate::{
+ ast::{self, edit::IndentLevel, make, AstNode},
+ SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
+};
+
+/// Utility trait to allow calling `ted` functions with references or owned
+/// nodes. Do not use outside of this module.
+pub trait Element {
+ fn syntax_element(self) -> SyntaxElement;
+}
+
+impl<E: Element + Clone> Element for &'_ E {
+ fn syntax_element(self) -> SyntaxElement {
+ self.clone().syntax_element()
+ }
+}
+impl Element for SyntaxElement {
+ fn syntax_element(self) -> SyntaxElement {
+ self
+ }
+}
+impl Element for SyntaxNode {
+ fn syntax_element(self) -> SyntaxElement {
+ self.into()
+ }
+}
+impl Element for SyntaxToken {
+ fn syntax_element(self) -> SyntaxElement {
+ self.into()
+ }
+}
+
+#[derive(Debug)]
+pub struct Position {
+ repr: PositionRepr,
+}
+
+#[derive(Debug)]
+enum PositionRepr {
+ FirstChild(SyntaxNode),
+ After(SyntaxElement),
+}
+
+impl Position {
+ pub fn after(elem: impl Element) -> Position {
+ let repr = PositionRepr::After(elem.syntax_element());
+ Position { repr }
+ }
+ pub fn before(elem: impl Element) -> Position {
+ let elem = elem.syntax_element();
+ let repr = match elem.prev_sibling_or_token() {
+ Some(it) => PositionRepr::After(it),
+ None => PositionRepr::FirstChild(elem.parent().unwrap()),
+ };
+ Position { repr }
+ }
+ pub fn first_child_of(node: &(impl Into<SyntaxNode> + Clone)) -> Position {
+ let repr = PositionRepr::FirstChild(node.clone().into());
+ Position { repr }
+ }
+ pub fn last_child_of(node: &(impl Into<SyntaxNode> + Clone)) -> Position {
+ let node = node.clone().into();
+ let repr = match node.last_child_or_token() {
+ Some(it) => PositionRepr::After(it),
+ None => PositionRepr::FirstChild(node),
+ };
+ Position { repr }
+ }
+}
+
+pub fn insert(position: Position, elem: impl Element) {
+ insert_all(position, vec![elem.syntax_element()]);
+}
+pub fn insert_raw(position: Position, elem: impl Element) {
+ insert_all_raw(position, vec![elem.syntax_element()]);
+}
+pub fn insert_all(position: Position, mut elements: Vec<SyntaxElement>) {
+ if let Some(first) = elements.first() {
+ if let Some(ws) = ws_before(&position, first) {
+ elements.insert(0, ws.into());
+ }
+ }
+ if let Some(last) = elements.last() {
+ if let Some(ws) = ws_after(&position, last) {
+ elements.push(ws.into());
+ }
+ }
+ insert_all_raw(position, elements);
+}
+pub fn insert_all_raw(position: Position, elements: Vec<SyntaxElement>) {
+ let (parent, index) = match position.repr {
+ PositionRepr::FirstChild(parent) => (parent, 0),
+ PositionRepr::After(child) => (child.parent().unwrap(), child.index() + 1),
+ };
+ parent.splice_children(index..index, elements);
+}
+
+pub fn remove(elem: impl Element) {
+ elem.syntax_element().detach();
+}
+pub fn remove_all(range: RangeInclusive<SyntaxElement>) {
+ replace_all(range, Vec::new());
+}
+pub fn remove_all_iter(range: impl IntoIterator<Item = SyntaxElement>) {
+ let mut it = range.into_iter();
+ if let Some(mut first) = it.next() {
+ match it.last() {
+ Some(mut last) => {
+ if first.index() > last.index() {
+ mem::swap(&mut first, &mut last);
+ }
+ remove_all(first..=last);
+ }
+ None => remove(first),
+ }
+ }
+}
+
+pub fn replace(old: impl Element, new: impl Element) {
+ replace_with_many(old, vec![new.syntax_element()]);
+}
+pub fn replace_with_many(old: impl Element, new: Vec<SyntaxElement>) {
+ let old = old.syntax_element();
+ replace_all(old.clone()..=old, new);
+}
+pub fn replace_all(range: RangeInclusive<SyntaxElement>, new: Vec<SyntaxElement>) {
+ let start = range.start().index();
+ let end = range.end().index();
+ let parent = range.start().parent().unwrap();
+ parent.splice_children(start..end + 1, new);
+}
+
+pub fn append_child(node: &(impl Into<SyntaxNode> + Clone), child: impl Element) {
+ let position = Position::last_child_of(node);
+ insert(position, child);
+}
+pub fn append_child_raw(node: &(impl Into<SyntaxNode> + Clone), child: impl Element) {
+ let position = Position::last_child_of(node);
+ insert_raw(position, child);
+}
+
+fn ws_before(position: &Position, new: &SyntaxElement) -> Option<SyntaxToken> {
+ let prev = match &position.repr {
+ PositionRepr::FirstChild(_) => return None,
+ PositionRepr::After(it) => it,
+ };
+
+ if prev.kind() == T!['{'] && new.kind() == SyntaxKind::USE {
+ if let Some(item_list) = prev.parent().and_then(ast::ItemList::cast) {
+ let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into());
+ indent.0 += 1;
+ return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ }
+
+ if prev.kind() == T!['{'] && ast::Stmt::can_cast(new.kind()) {
+ if let Some(stmt_list) = prev.parent().and_then(ast::StmtList::cast) {
+ let mut indent = IndentLevel::from_element(&stmt_list.syntax().clone().into());
+ indent.0 += 1;
+ return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ }
+
+ ws_between(prev, new)
+}
+fn ws_after(position: &Position, new: &SyntaxElement) -> Option<SyntaxToken> {
+ let next = match &position.repr {
+ PositionRepr::FirstChild(parent) => parent.first_child_or_token()?,
+ PositionRepr::After(sibling) => sibling.next_sibling_or_token()?,
+ };
+ ws_between(new, &next)
+}
+fn ws_between(left: &SyntaxElement, right: &SyntaxElement) -> Option<SyntaxToken> {
+ if left.kind() == SyntaxKind::WHITESPACE || right.kind() == SyntaxKind::WHITESPACE {
+ return None;
+ }
+ if right.kind() == T![;] || right.kind() == T![,] {
+ return None;
+ }
+ if left.kind() == T![<] || right.kind() == T![>] {
+ return None;
+ }
+ if left.kind() == T![&] && right.kind() == SyntaxKind::LIFETIME {
+ return None;
+ }
+ if right.kind() == SyntaxKind::GENERIC_ARG_LIST {
+ return None;
+ }
+
+ if right.kind() == SyntaxKind::USE {
+ let mut indent = IndentLevel::from_element(left);
+ if left.kind() == SyntaxKind::USE {
+ indent.0 = IndentLevel::from_element(right).0.max(indent.0);
+ }
+ return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ Some(make::tokens::single_space())
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests.rs b/src/tools/rust-analyzer/crates/syntax/src/tests.rs
new file mode 100644
index 000000000..58fba8cfa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests.rs
@@ -0,0 +1,186 @@
+#[cfg(not(feature = "in-rust-tree"))]
+mod ast_src;
+#[cfg(not(feature = "in-rust-tree"))]
+mod sourcegen_ast;
+
+use std::{
+ fs,
+ path::{Path, PathBuf},
+};
+
+use ast::HasName;
+use expect_test::expect_file;
+use rayon::prelude::*;
+use test_utils::{bench, bench_fixture, project_root};
+
+use crate::{ast, fuzz, AstNode, SourceFile, SyntaxError};
+
+#[test]
+fn parse_smoke_test() {
+ let code = r##"
+fn main() {
+ println!("Hello, world!")
+}
+ "##;
+
+ let parse = SourceFile::parse(code);
+ // eprintln!("{:#?}", parse.syntax_node());
+ assert!(parse.ok().is_ok());
+}
+
+#[test]
+fn benchmark_parser() {
+ if std::env::var("RUN_SLOW_BENCHES").is_err() {
+ return;
+ }
+
+ let data = bench_fixture::glorious_old_parser();
+ let tree = {
+ let _b = bench("parsing");
+ let p = SourceFile::parse(&data);
+ assert!(p.errors.is_empty());
+ assert_eq!(p.tree().syntax.text_range().len(), 352474.into());
+ p.tree()
+ };
+
+ {
+ let _b = bench("tree traversal");
+ let fn_names =
+ tree.syntax().descendants().filter_map(ast::Fn::cast).filter_map(|f| f.name()).count();
+ assert_eq!(fn_names, 268);
+ }
+}
+
+#[test]
+fn validation_tests() {
+ dir_tests(&test_data_dir(), &["parser/validation"], "rast", |text, path| {
+ let parse = SourceFile::parse(text);
+ let errors = parse.errors();
+ assert_errors_are_present(errors, path);
+ parse.debug_dump()
+ });
+}
+
+#[test]
+fn parser_fuzz_tests() {
+ for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) {
+ fuzz::check_parser(&text)
+ }
+}
+
+#[test]
+fn reparse_fuzz_tests() {
+ for (_, text) in collect_rust_files(&test_data_dir(), &["reparse/fuzz-failures"]) {
+ let check = fuzz::CheckReparse::from_data(text.as_bytes()).unwrap();
+ check.run();
+ }
+}
+
+/// Test that Rust-analyzer can parse and validate the rust-analyzer
+#[test]
+fn self_hosting_parsing() {
+ let crates_dir = project_root().join("crates");
+
+ let mut files = ::sourcegen::list_rust_files(&crates_dir);
+ files.retain(|path| {
+ // Get all files which are not in the crates/syntax/test_data folder
+ !path.components().any(|component| component.as_os_str() == "test_data")
+ });
+
+ assert!(
+ files.len() > 100,
+ "self_hosting_parsing found too few files - is it running in the right directory?"
+ );
+
+ let errors = files
+ .into_par_iter()
+ .filter_map(|file| {
+ let text = read_text(&file);
+ match SourceFile::parse(&text).ok() {
+ Ok(_) => None,
+ Err(err) => Some((file, err)),
+ }
+ })
+ .collect::<Vec<_>>();
+
+ if !errors.is_empty() {
+ let errors = errors
+ .into_iter()
+ .map(|(path, err)| format!("{}: {:?}\n", path.display(), err[0]))
+ .collect::<String>();
+ panic!("Parsing errors:\n{}\n", errors);
+ }
+}
+
+fn test_data_dir() -> PathBuf {
+ project_root().join("crates/syntax/test_data")
+}
+
+fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) {
+ assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display());
+}
+
+/// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir`
+/// subdirectories defined by `paths`.
+///
+/// If the content of the matching output file differs from the output of `f()`
+/// the test will fail.
+///
+/// If there is no matching output file it will be created and filled with the
+/// output of `f()`, but the test will fail.
+fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], outfile_extension: &str, f: F)
+where
+ F: Fn(&str, &Path) -> String,
+{
+ for (path, input_code) in collect_rust_files(test_data_dir, paths) {
+ let actual = f(&input_code, &path);
+ let path = path.with_extension(outfile_extension);
+ expect_file![path].assert_eq(&actual)
+ }
+}
+
+/// Collects all `.rs` files from `dir` subdirectories defined by `paths`.
+fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> {
+ paths
+ .iter()
+ .flat_map(|path| {
+ let path = root_dir.to_owned().join(path);
+ rust_files_in_dir(&path).into_iter()
+ })
+ .map(|path| {
+ let text = read_text(&path);
+ (path, text)
+ })
+ .collect()
+}
+
+/// Collects paths to all `.rs` files from `dir` in a sorted `Vec<PathBuf>`.
+fn rust_files_in_dir(dir: &Path) -> Vec<PathBuf> {
+ let mut acc = Vec::new();
+ for file in fs::read_dir(&dir).unwrap() {
+ let file = file.unwrap();
+ let path = file.path();
+ if path.extension().unwrap_or_default() == "rs" {
+ acc.push(path);
+ }
+ }
+ acc.sort();
+ acc
+}
+
+/// Read file and normalize newlines.
+///
+/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
+///
+/// ```
+/// let s = "
+/// ";
+/// assert_eq!(s.as_bytes(), &[10]);
+/// ```
+///
+/// so this should always be correct.
+fn read_text(path: &Path) -> String {
+ fs::read_to_string(path)
+ .unwrap_or_else(|_| panic!("File at {:?} should be valid", path))
+ .replace("\r\n", "\n")
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs b/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
new file mode 100644
index 000000000..cf5be1c30
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
@@ -0,0 +1,252 @@
+//! Defines input for code generation process.
+
+pub(crate) struct KindsSrc<'a> {
+ pub(crate) punct: &'a [(&'a str, &'a str)],
+ pub(crate) keywords: &'a [&'a str],
+ pub(crate) contextual_keywords: &'a [&'a str],
+ pub(crate) literals: &'a [&'a str],
+ pub(crate) tokens: &'a [&'a str],
+ pub(crate) nodes: &'a [&'a str],
+}
+
+pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc {
+ punct: &[
+ (";", "SEMICOLON"),
+ (",", "COMMA"),
+ ("(", "L_PAREN"),
+ (")", "R_PAREN"),
+ ("{", "L_CURLY"),
+ ("}", "R_CURLY"),
+ ("[", "L_BRACK"),
+ ("]", "R_BRACK"),
+ ("<", "L_ANGLE"),
+ (">", "R_ANGLE"),
+ ("@", "AT"),
+ ("#", "POUND"),
+ ("~", "TILDE"),
+ ("?", "QUESTION"),
+ ("$", "DOLLAR"),
+ ("&", "AMP"),
+ ("|", "PIPE"),
+ ("+", "PLUS"),
+ ("*", "STAR"),
+ ("/", "SLASH"),
+ ("^", "CARET"),
+ ("%", "PERCENT"),
+ ("_", "UNDERSCORE"),
+ (".", "DOT"),
+ ("..", "DOT2"),
+ ("...", "DOT3"),
+ ("..=", "DOT2EQ"),
+ (":", "COLON"),
+ ("::", "COLON2"),
+ ("=", "EQ"),
+ ("==", "EQ2"),
+ ("=>", "FAT_ARROW"),
+ ("!", "BANG"),
+ ("!=", "NEQ"),
+ ("-", "MINUS"),
+ ("->", "THIN_ARROW"),
+ ("<=", "LTEQ"),
+ (">=", "GTEQ"),
+ ("+=", "PLUSEQ"),
+ ("-=", "MINUSEQ"),
+ ("|=", "PIPEEQ"),
+ ("&=", "AMPEQ"),
+ ("^=", "CARETEQ"),
+ ("/=", "SLASHEQ"),
+ ("*=", "STAREQ"),
+ ("%=", "PERCENTEQ"),
+ ("&&", "AMP2"),
+ ("||", "PIPE2"),
+ ("<<", "SHL"),
+ (">>", "SHR"),
+ ("<<=", "SHLEQ"),
+ (">>=", "SHREQ"),
+ ],
+ keywords: &[
+ "as", "async", "await", "box", "break", "const", "continue", "crate", "dyn", "else",
+ "enum", "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "macro",
+ "match", "mod", "move", "mut", "pub", "ref", "return", "self", "Self", "static", "struct",
+ "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", "yield",
+ ],
+ contextual_keywords: &["auto", "default", "existential", "union", "raw", "macro_rules"],
+ literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING"],
+ tokens: &["ERROR", "IDENT", "WHITESPACE", "LIFETIME_IDENT", "COMMENT", "SHEBANG"],
+ nodes: &[
+ "SOURCE_FILE",
+ "STRUCT",
+ "UNION",
+ "ENUM",
+ "FN",
+ "RET_TYPE",
+ "EXTERN_CRATE",
+ "MODULE",
+ "USE",
+ "STATIC",
+ "CONST",
+ "TRAIT",
+ "IMPL",
+ "TYPE_ALIAS",
+ "MACRO_CALL",
+ "MACRO_RULES",
+ "MACRO_ARM",
+ "TOKEN_TREE",
+ "MACRO_DEF",
+ "PAREN_TYPE",
+ "TUPLE_TYPE",
+ "MACRO_TYPE",
+ "NEVER_TYPE",
+ "PATH_TYPE",
+ "PTR_TYPE",
+ "ARRAY_TYPE",
+ "SLICE_TYPE",
+ "REF_TYPE",
+ "INFER_TYPE",
+ "FN_PTR_TYPE",
+ "FOR_TYPE",
+ "IMPL_TRAIT_TYPE",
+ "DYN_TRAIT_TYPE",
+ "OR_PAT",
+ "PAREN_PAT",
+ "REF_PAT",
+ "BOX_PAT",
+ "IDENT_PAT",
+ "WILDCARD_PAT",
+ "REST_PAT",
+ "PATH_PAT",
+ "RECORD_PAT",
+ "RECORD_PAT_FIELD_LIST",
+ "RECORD_PAT_FIELD",
+ "TUPLE_STRUCT_PAT",
+ "TUPLE_PAT",
+ "SLICE_PAT",
+ "RANGE_PAT",
+ "LITERAL_PAT",
+ "MACRO_PAT",
+ "CONST_BLOCK_PAT",
+ // atoms
+ "TUPLE_EXPR",
+ "ARRAY_EXPR",
+ "PAREN_EXPR",
+ "PATH_EXPR",
+ "CLOSURE_EXPR",
+ "IF_EXPR",
+ "WHILE_EXPR",
+ "LOOP_EXPR",
+ "FOR_EXPR",
+ "CONTINUE_EXPR",
+ "BREAK_EXPR",
+ "LABEL",
+ "BLOCK_EXPR",
+ "STMT_LIST",
+ "RETURN_EXPR",
+ "YIELD_EXPR",
+ "LET_EXPR",
+ "UNDERSCORE_EXPR",
+ "MACRO_EXPR",
+ "MATCH_EXPR",
+ "MATCH_ARM_LIST",
+ "MATCH_ARM",
+ "MATCH_GUARD",
+ "RECORD_EXPR",
+ "RECORD_EXPR_FIELD_LIST",
+ "RECORD_EXPR_FIELD",
+ "BOX_EXPR",
+ // postfix
+ "CALL_EXPR",
+ "INDEX_EXPR",
+ "METHOD_CALL_EXPR",
+ "FIELD_EXPR",
+ "AWAIT_EXPR",
+ "TRY_EXPR",
+ "CAST_EXPR",
+ // unary
+ "REF_EXPR",
+ "PREFIX_EXPR",
+ "RANGE_EXPR", // just weird
+ "BIN_EXPR",
+ "EXTERN_BLOCK",
+ "EXTERN_ITEM_LIST",
+ "VARIANT",
+ "RECORD_FIELD_LIST",
+ "RECORD_FIELD",
+ "TUPLE_FIELD_LIST",
+ "TUPLE_FIELD",
+ "VARIANT_LIST",
+ "ITEM_LIST",
+ "ASSOC_ITEM_LIST",
+ "ATTR",
+ "META",
+ "USE_TREE",
+ "USE_TREE_LIST",
+ "PATH",
+ "PATH_SEGMENT",
+ "LITERAL",
+ "RENAME",
+ "VISIBILITY",
+ "WHERE_CLAUSE",
+ "WHERE_PRED",
+ "ABI",
+ "NAME",
+ "NAME_REF",
+ "LET_STMT",
+ "LET_ELSE",
+ "EXPR_STMT",
+ "GENERIC_PARAM_LIST",
+ "GENERIC_PARAM",
+ "LIFETIME_PARAM",
+ "TYPE_PARAM",
+ "CONST_PARAM",
+ "GENERIC_ARG_LIST",
+ "LIFETIME",
+ "LIFETIME_ARG",
+ "TYPE_ARG",
+ "ASSOC_TYPE_ARG",
+ "CONST_ARG",
+ "PARAM_LIST",
+ "PARAM",
+ "SELF_PARAM",
+ "ARG_LIST",
+ "TYPE_BOUND",
+ "TYPE_BOUND_LIST",
+ // macro related
+ "MACRO_ITEMS",
+ "MACRO_STMTS",
+ ],
+};
+
+#[derive(Default, Debug)]
+pub(crate) struct AstSrc {
+ pub(crate) tokens: Vec<String>,
+ pub(crate) nodes: Vec<AstNodeSrc>,
+ pub(crate) enums: Vec<AstEnumSrc>,
+}
+
+#[derive(Debug)]
+pub(crate) struct AstNodeSrc {
+ pub(crate) doc: Vec<String>,
+ pub(crate) name: String,
+ pub(crate) traits: Vec<String>,
+ pub(crate) fields: Vec<Field>,
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub(crate) enum Field {
+ Token(String),
+ Node { name: String, ty: String, cardinality: Cardinality },
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub(crate) enum Cardinality {
+ Optional,
+ Many,
+}
+
+#[derive(Debug)]
+pub(crate) struct AstEnumSrc {
+ pub(crate) doc: Vec<String>,
+ pub(crate) name: String,
+ pub(crate) traits: Vec<String>,
+ pub(crate) variants: Vec<String>,
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs b/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
new file mode 100644
index 000000000..6d2766225
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
@@ -0,0 +1,862 @@
+//! This module generates AST datatype used by rust-analyzer.
+//!
+//! Specifically, it generates the `SyntaxKind` enum and a number of newtype
+//! wrappers around `SyntaxNode` which implement `syntax::AstNode`.
+
+use std::{
+ collections::{BTreeSet, HashSet},
+ fmt::Write,
+};
+
+use itertools::Itertools;
+use proc_macro2::{Punct, Spacing};
+use quote::{format_ident, quote};
+use ungrammar::{Grammar, Rule};
+
+use crate::tests::ast_src::{
+ AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC,
+};
+
+#[test]
+fn sourcegen_ast() {
+ let syntax_kinds = generate_syntax_kinds(KINDS_SRC);
+ let syntax_kinds_file =
+ sourcegen::project_root().join("crates/parser/src/syntax_kind/generated.rs");
+ sourcegen::ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds);
+
+ let grammar =
+ include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/rust.ungram")).parse().unwrap();
+ let ast = lower(&grammar);
+
+ let ast_tokens = generate_tokens(&ast);
+ let ast_tokens_file =
+ sourcegen::project_root().join("crates/syntax/src/ast/generated/tokens.rs");
+ sourcegen::ensure_file_contents(ast_tokens_file.as_path(), &ast_tokens);
+
+ let ast_nodes = generate_nodes(KINDS_SRC, &ast);
+ let ast_nodes_file = sourcegen::project_root().join("crates/syntax/src/ast/generated/nodes.rs");
+ sourcegen::ensure_file_contents(ast_nodes_file.as_path(), &ast_nodes);
+}
+
+fn generate_tokens(grammar: &AstSrc) -> String {
+ let tokens = grammar.tokens.iter().map(|token| {
+ let name = format_ident!("{}", token);
+ let kind = format_ident!("{}", to_upper_snake_case(token));
+ quote! {
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub struct #name {
+ pub(crate) syntax: SyntaxToken,
+ }
+ impl std::fmt::Display for #name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+ }
+ impl AstToken for #name {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == #kind }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+ }
+ }
+ });
+
+ sourcegen::add_preamble(
+ "sourcegen_ast",
+ sourcegen::reformat(
+ quote! {
+ use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
+ #(#tokens)*
+ }
+ .to_string(),
+ ),
+ )
+ .replace("#[derive", "\n#[derive")
+}
+
+fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
+ let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
+ .nodes
+ .iter()
+ .map(|node| {
+ let name = format_ident!("{}", node.name);
+ let kind = format_ident!("{}", to_upper_snake_case(&node.name));
+ let traits = node
+ .traits
+ .iter()
+ .filter(|trait_name| {
+ // Loops have two expressions so this might collide, therefor manual impl it
+ node.name != "ForExpr" && node.name != "WhileExpr"
+ || trait_name.as_str() != "HasLoopBody"
+ })
+ .map(|trait_name| {
+ let trait_name = format_ident!("{}", trait_name);
+ quote!(impl ast::#trait_name for #name {})
+ });
+
+ let methods = node.fields.iter().map(|field| {
+ let method_name = field.method_name();
+ let ty = field.ty();
+
+ if field.is_many() {
+ quote! {
+ pub fn #method_name(&self) -> AstChildren<#ty> {
+ support::children(&self.syntax)
+ }
+ }
+ } else if let Some(token_kind) = field.token_kind() {
+ quote! {
+ pub fn #method_name(&self) -> Option<#ty> {
+ support::token(&self.syntax, #token_kind)
+ }
+ }
+ } else {
+ quote! {
+ pub fn #method_name(&self) -> Option<#ty> {
+ support::child(&self.syntax)
+ }
+ }
+ }
+ });
+ (
+ quote! {
+ #[pretty_doc_comment_placeholder_workaround]
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub struct #name {
+ pub(crate) syntax: SyntaxNode,
+ }
+
+ #(#traits)*
+
+ impl #name {
+ #(#methods)*
+ }
+ },
+ quote! {
+ impl AstNode for #name {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ kind == #kind
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+ }
+ },
+ )
+ })
+ .unzip();
+
+ let (enum_defs, enum_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
+ .enums
+ .iter()
+ .map(|en| {
+ let variants: Vec<_> = en.variants.iter().map(|var| format_ident!("{}", var)).collect();
+ let name = format_ident!("{}", en.name);
+ let kinds: Vec<_> = variants
+ .iter()
+ .map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string())))
+ .collect();
+ let traits = en.traits.iter().map(|trait_name| {
+ let trait_name = format_ident!("{}", trait_name);
+ quote!(impl ast::#trait_name for #name {})
+ });
+
+ let ast_node = if en.name == "Stmt" {
+ quote! {}
+ } else {
+ quote! {
+ impl AstNode for #name {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ #(#kinds)|* => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ #(
+ #kinds => #name::#variants(#variants { syntax }),
+ )*
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ #(
+ #name::#variants(it) => &it.syntax,
+ )*
+ }
+ }
+ }
+ }
+ };
+
+ (
+ quote! {
+ #[pretty_doc_comment_placeholder_workaround]
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub enum #name {
+ #(#variants(#variants),)*
+ }
+
+ #(#traits)*
+ },
+ quote! {
+ #(
+ impl From<#variants> for #name {
+ fn from(node: #variants) -> #name {
+ #name::#variants(node)
+ }
+ }
+ )*
+ #ast_node
+ },
+ )
+ })
+ .unzip();
+
+ let (any_node_defs, any_node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
+ .nodes
+ .iter()
+ .flat_map(|node| node.traits.iter().map(move |t| (t, node)))
+ .into_group_map()
+ .into_iter()
+ .sorted_by_key(|(k, _)| *k)
+ .map(|(trait_name, nodes)| {
+ let name = format_ident!("Any{}", trait_name);
+ let trait_name = format_ident!("{}", trait_name);
+ let kinds: Vec<_> = nodes
+ .iter()
+ .map(|name| format_ident!("{}", to_upper_snake_case(&name.name.to_string())))
+ .collect();
+
+ (
+ quote! {
+ #[pretty_doc_comment_placeholder_workaround]
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub struct #name {
+ pub(crate) syntax: SyntaxNode,
+ }
+ impl ast::#trait_name for #name {}
+ },
+ quote! {
+ impl #name {
+ #[inline]
+ pub fn new<T: ast::#trait_name>(node: T) -> #name {
+ #name {
+ syntax: node.syntax().clone()
+ }
+ }
+ }
+ impl AstNode for #name {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ #(#kinds)|* => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| #name { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ &self.syntax
+ }
+ }
+ },
+ )
+ })
+ .unzip();
+
+ let enum_names = grammar.enums.iter().map(|it| &it.name);
+ let node_names = grammar.nodes.iter().map(|it| &it.name);
+
+ let display_impls =
+ enum_names.chain(node_names.clone()).map(|it| format_ident!("{}", it)).map(|name| {
+ quote! {
+ impl std::fmt::Display for #name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+ }
+ }
+ });
+
+ let defined_nodes: HashSet<_> = node_names.collect();
+
+ for node in kinds
+ .nodes
+ .iter()
+ .map(|kind| to_pascal_case(kind))
+ .filter(|name| !defined_nodes.iter().any(|&it| it == name))
+ {
+ drop(node)
+ // FIXME: restore this
+ // eprintln!("Warning: node {} not defined in ast source", node);
+ }
+
+ let ast = quote! {
+ #![allow(non_snake_case)]
+ use crate::{
+ SyntaxNode, SyntaxToken, SyntaxKind::{self, *},
+ ast::{self, AstNode, AstChildren, support},
+ T,
+ };
+
+ #(#node_defs)*
+ #(#enum_defs)*
+ #(#any_node_defs)*
+ #(#node_boilerplate_impls)*
+ #(#enum_boilerplate_impls)*
+ #(#any_node_boilerplate_impls)*
+ #(#display_impls)*
+ };
+
+ let ast = ast.to_string().replace("T ! [", "T![");
+
+ let mut res = String::with_capacity(ast.len() * 2);
+
+ let mut docs =
+ grammar.nodes.iter().map(|it| &it.doc).chain(grammar.enums.iter().map(|it| &it.doc));
+
+ for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
+ res.push_str(chunk);
+ if let Some(doc) = docs.next() {
+ write_doc_comment(doc, &mut res);
+ }
+ }
+
+ let res = sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(res));
+ res.replace("#[derive", "\n#[derive")
+}
+
+fn write_doc_comment(contents: &[String], dest: &mut String) {
+ for line in contents {
+ writeln!(dest, "///{}", line).unwrap();
+ }
+}
+
+fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String {
+ let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
+ .punct
+ .iter()
+ .filter(|(token, _name)| token.len() == 1)
+ .map(|(token, name)| (token.chars().next().unwrap(), format_ident!("{}", name)))
+ .unzip();
+
+ let punctuation_values = grammar.punct.iter().map(|(token, _name)| {
+ if "{}[]()".contains(token) {
+ let c = token.chars().next().unwrap();
+ quote! { #c }
+ } else {
+ let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
+ quote! { #(#cs)* }
+ }
+ });
+ let punctuation =
+ grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let x = |&name| match name {
+ "Self" => format_ident!("SELF_TYPE_KW"),
+ name => format_ident!("{}_KW", to_upper_snake_case(name)),
+ };
+ let full_keywords_values = grammar.keywords;
+ let full_keywords = full_keywords_values.iter().map(x);
+
+ let contextual_keywords_values = &grammar.contextual_keywords;
+ let contextual_keywords = contextual_keywords_values.iter().map(x);
+
+ let all_keywords_values = grammar
+ .keywords
+ .iter()
+ .chain(grammar.contextual_keywords.iter())
+ .copied()
+ .collect::<Vec<_>>();
+ let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
+ let all_keywords = all_keywords_values.iter().map(x).collect::<Vec<_>>();
+
+ let literals =
+ grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let ast = quote! {
+ #![allow(bad_style, missing_docs, unreachable_pub)]
+ /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`.
+ #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+ #[repr(u16)]
+ pub enum SyntaxKind {
+ // Technical SyntaxKinds: they appear temporally during parsing,
+ // but never end up in the final tree
+ #[doc(hidden)]
+ TOMBSTONE,
+ #[doc(hidden)]
+ EOF,
+ #(#punctuation,)*
+ #(#all_keywords,)*
+ #(#literals,)*
+ #(#tokens,)*
+ #(#nodes,)*
+
+ // Technical kind so that we can cast from u16 safely
+ #[doc(hidden)]
+ __LAST,
+ }
+ use self::SyntaxKind::*;
+
+ impl SyntaxKind {
+ pub fn is_keyword(self) -> bool {
+ match self {
+ #(#all_keywords)|* => true,
+ _ => false,
+ }
+ }
+
+ pub fn is_punct(self) -> bool {
+ match self {
+ #(#punctuation)|* => true,
+ _ => false,
+ }
+ }
+
+ pub fn is_literal(self) -> bool {
+ match self {
+ #(#literals)|* => true,
+ _ => false,
+ }
+ }
+
+ pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
+ let kw = match ident {
+ #(#full_keywords_values => #full_keywords,)*
+ _ => return None,
+ };
+ Some(kw)
+ }
+
+ pub fn from_contextual_keyword(ident: &str) -> Option<SyntaxKind> {
+ let kw = match ident {
+ #(#contextual_keywords_values => #contextual_keywords,)*
+ _ => return None,
+ };
+ Some(kw)
+ }
+
+ pub fn from_char(c: char) -> Option<SyntaxKind> {
+ let tok = match c {
+ #(#single_byte_tokens_values => #single_byte_tokens,)*
+ _ => return None,
+ };
+ Some(tok)
+ }
+ }
+
+ #[macro_export]
+ macro_rules! T {
+ #([#punctuation_values] => { $crate::SyntaxKind::#punctuation };)*
+ #([#all_keywords_idents] => { $crate::SyntaxKind::#all_keywords };)*
+ [lifetime_ident] => { $crate::SyntaxKind::LIFETIME_IDENT };
+ [ident] => { $crate::SyntaxKind::IDENT };
+ [shebang] => { $crate::SyntaxKind::SHEBANG };
+ }
+ pub use T;
+ };
+
+ sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(ast.to_string()))
+}
+
+fn to_upper_snake_case(s: &str) -> String {
+ let mut buf = String::with_capacity(s.len());
+ let mut prev = false;
+ for c in s.chars() {
+ if c.is_ascii_uppercase() && prev {
+ buf.push('_')
+ }
+ prev = true;
+
+ buf.push(c.to_ascii_uppercase());
+ }
+ buf
+}
+
+fn to_lower_snake_case(s: &str) -> String {
+ let mut buf = String::with_capacity(s.len());
+ let mut prev = false;
+ for c in s.chars() {
+ if c.is_ascii_uppercase() && prev {
+ buf.push('_')
+ }
+ prev = true;
+
+ buf.push(c.to_ascii_lowercase());
+ }
+ buf
+}
+
+fn to_pascal_case(s: &str) -> String {
+ let mut buf = String::with_capacity(s.len());
+ let mut prev_is_underscore = true;
+ for c in s.chars() {
+ if c == '_' {
+ prev_is_underscore = true;
+ } else if prev_is_underscore {
+ buf.push(c.to_ascii_uppercase());
+ prev_is_underscore = false;
+ } else {
+ buf.push(c.to_ascii_lowercase());
+ }
+ }
+ buf
+}
+
+fn pluralize(s: &str) -> String {
+ format!("{}s", s)
+}
+
+impl Field {
+ fn is_many(&self) -> bool {
+ matches!(self, Field::Node { cardinality: Cardinality::Many, .. })
+ }
+ fn token_kind(&self) -> Option<proc_macro2::TokenStream> {
+ match self {
+ Field::Token(token) => {
+ let token: proc_macro2::TokenStream = token.parse().unwrap();
+ Some(quote! { T![#token] })
+ }
+ _ => None,
+ }
+ }
+ fn method_name(&self) -> proc_macro2::Ident {
+ match self {
+ Field::Token(name) => {
+ let name = match name.as_str() {
+ ";" => "semicolon",
+ "->" => "thin_arrow",
+ "'{'" => "l_curly",
+ "'}'" => "r_curly",
+ "'('" => "l_paren",
+ "')'" => "r_paren",
+ "'['" => "l_brack",
+ "']'" => "r_brack",
+ "<" => "l_angle",
+ ">" => "r_angle",
+ "=" => "eq",
+ "!" => "excl",
+ "*" => "star",
+ "&" => "amp",
+ "_" => "underscore",
+ "." => "dot",
+ ".." => "dotdot",
+ "..." => "dotdotdot",
+ "..=" => "dotdoteq",
+ "=>" => "fat_arrow",
+ "@" => "at",
+ ":" => "colon",
+ "::" => "coloncolon",
+ "#" => "pound",
+ "?" => "question_mark",
+ "," => "comma",
+ "|" => "pipe",
+ "~" => "tilde",
+ _ => name,
+ };
+ format_ident!("{}_token", name)
+ }
+ Field::Node { name, .. } => {
+ if name == "type" {
+ format_ident!("ty")
+ } else {
+ format_ident!("{}", name)
+ }
+ }
+ }
+ }
+ fn ty(&self) -> proc_macro2::Ident {
+ match self {
+ Field::Token(_) => format_ident!("SyntaxToken"),
+ Field::Node { ty, .. } => format_ident!("{}", ty),
+ }
+ }
+}
+
+fn lower(grammar: &Grammar) -> AstSrc {
+ let mut res = AstSrc {
+ tokens: "Whitespace Comment String ByteString IntNumber FloatNumber Char Byte Ident"
+ .split_ascii_whitespace()
+ .map(|it| it.to_string())
+ .collect::<Vec<_>>(),
+ ..Default::default()
+ };
+
+ let nodes = grammar.iter().collect::<Vec<_>>();
+
+ for &node in &nodes {
+ let name = grammar[node].name.clone();
+ let rule = &grammar[node].rule;
+ match lower_enum(grammar, rule) {
+ Some(variants) => {
+ let enum_src = AstEnumSrc { doc: Vec::new(), name, traits: Vec::new(), variants };
+ res.enums.push(enum_src);
+ }
+ None => {
+ let mut fields = Vec::new();
+ lower_rule(&mut fields, grammar, None, rule);
+ res.nodes.push(AstNodeSrc { doc: Vec::new(), name, traits: Vec::new(), fields });
+ }
+ }
+ }
+
+ deduplicate_fields(&mut res);
+ extract_enums(&mut res);
+ extract_struct_traits(&mut res);
+ extract_enum_traits(&mut res);
+ res
+}
+
+fn lower_enum(grammar: &Grammar, rule: &Rule) -> Option<Vec<String>> {
+ let alternatives = match rule {
+ Rule::Alt(it) => it,
+ _ => return None,
+ };
+ let mut variants = Vec::new();
+ for alternative in alternatives {
+ match alternative {
+ Rule::Node(it) => variants.push(grammar[*it].name.clone()),
+ Rule::Token(it) if grammar[*it].name == ";" => (),
+ _ => return None,
+ }
+ }
+ Some(variants)
+}
+
+fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, rule: &Rule) {
+ if lower_comma_list(acc, grammar, label, rule) {
+ return;
+ }
+
+ match rule {
+ Rule::Node(node) => {
+ let ty = grammar[*node].name.clone();
+ let name = label.cloned().unwrap_or_else(|| to_lower_snake_case(&ty));
+ let field = Field::Node { name, ty, cardinality: Cardinality::Optional };
+ acc.push(field);
+ }
+ Rule::Token(token) => {
+ assert!(label.is_none());
+ let mut name = grammar[*token].name.clone();
+ if name != "int_number" && name != "string" {
+ if "[]{}()".contains(&name) {
+ name = format!("'{}'", name);
+ }
+ let field = Field::Token(name);
+ acc.push(field);
+ }
+ }
+ Rule::Rep(inner) => {
+ if let Rule::Node(node) = &**inner {
+ let ty = grammar[*node].name.clone();
+ let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
+ let field = Field::Node { name, ty, cardinality: Cardinality::Many };
+ acc.push(field);
+ return;
+ }
+ panic!("unhandled rule: {:?}", rule)
+ }
+ Rule::Labeled { label: l, rule } => {
+ assert!(label.is_none());
+ let manually_implemented = matches!(
+ l.as_str(),
+ "lhs"
+ | "rhs"
+ | "then_branch"
+ | "else_branch"
+ | "start"
+ | "end"
+ | "op"
+ | "index"
+ | "base"
+ | "value"
+ | "trait"
+ | "self_ty"
+ | "iterable"
+ | "condition"
+ );
+ if manually_implemented {
+ return;
+ }
+ lower_rule(acc, grammar, Some(l), rule);
+ }
+ Rule::Seq(rules) | Rule::Alt(rules) => {
+ for rule in rules {
+ lower_rule(acc, grammar, label, rule)
+ }
+ }
+ Rule::Opt(rule) => lower_rule(acc, grammar, label, rule),
+ }
+}
+
+// (T (',' T)* ','?)
+fn lower_comma_list(
+ acc: &mut Vec<Field>,
+ grammar: &Grammar,
+ label: Option<&String>,
+ rule: &Rule,
+) -> bool {
+ let rule = match rule {
+ Rule::Seq(it) => it,
+ _ => return false,
+ };
+ let (node, repeat, trailing_comma) = match rule.as_slice() {
+ [Rule::Node(node), Rule::Rep(repeat), Rule::Opt(trailing_comma)] => {
+ (node, repeat, trailing_comma)
+ }
+ _ => return false,
+ };
+ let repeat = match &**repeat {
+ Rule::Seq(it) => it,
+ _ => return false,
+ };
+ match repeat.as_slice() {
+ [comma, Rule::Node(n)] if comma == &**trailing_comma && n == node => (),
+ _ => return false,
+ }
+ let ty = grammar[*node].name.clone();
+ let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
+ let field = Field::Node { name, ty, cardinality: Cardinality::Many };
+ acc.push(field);
+ true
+}
+
+fn deduplicate_fields(ast: &mut AstSrc) {
+ for node in &mut ast.nodes {
+ let mut i = 0;
+ 'outer: while i < node.fields.len() {
+ for j in 0..i {
+ let f1 = &node.fields[i];
+ let f2 = &node.fields[j];
+ if f1 == f2 {
+ node.fields.remove(i);
+ continue 'outer;
+ }
+ }
+ i += 1;
+ }
+ }
+}
+
+fn extract_enums(ast: &mut AstSrc) {
+ for node in &mut ast.nodes {
+ for enm in &ast.enums {
+ let mut to_remove = Vec::new();
+ for (i, field) in node.fields.iter().enumerate() {
+ let ty = field.ty().to_string();
+ if enm.variants.iter().any(|it| it == &ty) {
+ to_remove.push(i);
+ }
+ }
+ if to_remove.len() == enm.variants.len() {
+ node.remove_field(to_remove);
+ let ty = enm.name.clone();
+ let name = to_lower_snake_case(&ty);
+ node.fields.push(Field::Node { name, ty, cardinality: Cardinality::Optional });
+ }
+ }
+ }
+}
+
+fn extract_struct_traits(ast: &mut AstSrc) {
+ let traits: &[(&str, &[&str])] = &[
+ ("HasAttrs", &["attrs"]),
+ ("HasName", &["name"]),
+ ("HasVisibility", &["visibility"]),
+ ("HasGenericParams", &["generic_param_list", "where_clause"]),
+ ("HasTypeBounds", &["type_bound_list", "colon_token"]),
+ ("HasModuleItem", &["items"]),
+ ("HasLoopBody", &["label", "loop_body"]),
+ ("HasArgList", &["arg_list"]),
+ ];
+
+ for node in &mut ast.nodes {
+ for (name, methods) in traits {
+ extract_struct_trait(node, name, methods);
+ }
+ }
+
+ let nodes_with_doc_comments = [
+ "SourceFile",
+ "Fn",
+ "Struct",
+ "Union",
+ "RecordField",
+ "TupleField",
+ "Enum",
+ "Variant",
+ "Trait",
+ "Module",
+ "Static",
+ "Const",
+ "TypeAlias",
+ "Impl",
+ "ExternBlock",
+ "ExternCrate",
+ "MacroCall",
+ "MacroRules",
+ "MacroDef",
+ "Use",
+ ];
+
+ for node in &mut ast.nodes {
+ if nodes_with_doc_comments.contains(&&*node.name) {
+ node.traits.push("HasDocComments".into());
+ }
+ }
+}
+
+fn extract_struct_trait(node: &mut AstNodeSrc, trait_name: &str, methods: &[&str]) {
+ let mut to_remove = Vec::new();
+ for (i, field) in node.fields.iter().enumerate() {
+ let method_name = field.method_name().to_string();
+ if methods.iter().any(|&it| it == method_name) {
+ to_remove.push(i);
+ }
+ }
+ if to_remove.len() == methods.len() {
+ node.traits.push(trait_name.to_string());
+ node.remove_field(to_remove);
+ }
+}
+
+fn extract_enum_traits(ast: &mut AstSrc) {
+ for enm in &mut ast.enums {
+ if enm.name == "Stmt" {
+ continue;
+ }
+ let nodes = &ast.nodes;
+ let mut variant_traits = enm
+ .variants
+ .iter()
+ .map(|var| nodes.iter().find(|it| &it.name == var).unwrap())
+ .map(|node| node.traits.iter().cloned().collect::<BTreeSet<_>>());
+
+ let mut enum_traits = match variant_traits.next() {
+ Some(it) => it,
+ None => continue,
+ };
+ for traits in variant_traits {
+ enum_traits = enum_traits.intersection(&traits).cloned().collect();
+ }
+ enm.traits = enum_traits.into_iter().collect();
+ }
+}
+
+impl AstNodeSrc {
+ fn remove_field(&mut self, to_remove: Vec<usize>) {
+ to_remove.into_iter().rev().for_each(|idx| {
+ self.fields.remove(idx);
+ });
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/token_text.rs b/src/tools/rust-analyzer/crates/syntax/src/token_text.rs
new file mode 100644
index 000000000..913b24d42
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/token_text.rs
@@ -0,0 +1,95 @@
+//! Yet another version of owned string, backed by a syntax tree token.
+
+use std::{cmp::Ordering, fmt, ops};
+
+use rowan::GreenToken;
+
+pub struct TokenText<'a>(pub(crate) Repr<'a>);
+
+pub(crate) enum Repr<'a> {
+ Borrowed(&'a str),
+ Owned(GreenToken),
+}
+
+impl<'a> TokenText<'a> {
+ pub(crate) fn borrowed(text: &'a str) -> Self {
+ TokenText(Repr::Borrowed(text))
+ }
+
+ pub(crate) fn owned(green: GreenToken) -> Self {
+ TokenText(Repr::Owned(green))
+ }
+
+ pub fn as_str(&self) -> &str {
+ match &self.0 {
+ &Repr::Borrowed(it) => it,
+ Repr::Owned(green) => green.text(),
+ }
+ }
+}
+
+impl ops::Deref for TokenText<'_> {
+ type Target = str;
+
+ fn deref(&self) -> &str {
+ self.as_str()
+ }
+}
+impl AsRef<str> for TokenText<'_> {
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl From<TokenText<'_>> for String {
+ fn from(token_text: TokenText<'_>) -> Self {
+ token_text.as_str().into()
+ }
+}
+
+impl PartialEq<&'_ str> for TokenText<'_> {
+ fn eq(&self, other: &&str) -> bool {
+ self.as_str() == *other
+ }
+}
+impl PartialEq<TokenText<'_>> for &'_ str {
+ fn eq(&self, other: &TokenText<'_>) -> bool {
+ other == self
+ }
+}
+impl PartialEq<String> for TokenText<'_> {
+ fn eq(&self, other: &String) -> bool {
+ self.as_str() == other.as_str()
+ }
+}
+impl PartialEq<TokenText<'_>> for String {
+ fn eq(&self, other: &TokenText<'_>) -> bool {
+ other == self
+ }
+}
+impl PartialEq for TokenText<'_> {
+ fn eq(&self, other: &TokenText<'_>) -> bool {
+ self.as_str() == other.as_str()
+ }
+}
+impl Eq for TokenText<'_> {}
+impl Ord for TokenText<'_> {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.as_str().cmp(other.as_str())
+ }
+}
+impl PartialOrd for TokenText<'_> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+impl fmt::Display for TokenText<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+impl fmt::Debug for TokenText<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(self.as_str(), f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/utils.rs b/src/tools/rust-analyzer/crates/syntax/src/utils.rs
new file mode 100644
index 000000000..f4c02518b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/utils.rs
@@ -0,0 +1,43 @@
+//! A set of utils methods to reuse on other abstraction levels
+
+use itertools::Itertools;
+
+use crate::{ast, match_ast, AstNode};
+
+pub fn path_to_string_stripping_turbo_fish(path: &ast::Path) -> String {
+ path.syntax()
+ .children()
+ .filter_map(|node| {
+ match_ast! {
+ match node {
+ ast::PathSegment(it) => {
+ Some(it.name_ref()?.to_string())
+ },
+ ast::Path(it) => {
+ Some(path_to_string_stripping_turbo_fish(&it))
+ },
+ _ => None,
+ }
+ }
+ })
+ .join("::")
+}
+
+#[cfg(test)]
+mod tests {
+ use super::path_to_string_stripping_turbo_fish;
+ use crate::ast::make;
+
+ #[test]
+ fn turbofishes_are_stripped() {
+ assert_eq!("Vec", path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>")),);
+ assert_eq!(
+ "Vec::new",
+ path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>::new")),
+ );
+ assert_eq!(
+ "Vec::new",
+ path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::new()")),
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/validation.rs b/src/tools/rust-analyzer/crates/syntax/src/validation.rs
new file mode 100644
index 000000000..b9f2b5132
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/validation.rs
@@ -0,0 +1,378 @@
+//! This module implements syntax validation that the parser doesn't handle.
+//!
+//! A failed validation emits a diagnostic.
+
+mod block;
+
+use rowan::Direction;
+use rustc_lexer::unescape::{
+ self, unescape_byte, unescape_byte_literal, unescape_char, unescape_literal, Mode,
+};
+
+use crate::{
+ algo,
+ ast::{self, HasAttrs, HasVisibility},
+ match_ast, AstNode, SyntaxError,
+ SyntaxKind::{CONST, FN, INT_NUMBER, TYPE_ALIAS},
+ SyntaxNode, SyntaxToken, TextSize, T,
+};
+
+pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> {
+ // FIXME:
+ // * Add unescape validation of raw string literals and raw byte string literals
+ // * Add validation of doc comments are being attached to nodes
+
+ let mut errors = Vec::new();
+ for node in root.descendants() {
+ match_ast! {
+ match node {
+ ast::Literal(it) => validate_literal(it, &mut errors),
+ ast::Const(it) => validate_const(it, &mut errors),
+ ast::BlockExpr(it) => block::validate_block_expr(it, &mut errors),
+ ast::FieldExpr(it) => validate_numeric_name(it.name_ref(), &mut errors),
+ ast::RecordExprField(it) => validate_numeric_name(it.name_ref(), &mut errors),
+ ast::Visibility(it) => validate_visibility(it, &mut errors),
+ ast::RangeExpr(it) => validate_range_expr(it, &mut errors),
+ ast::PathSegment(it) => validate_path_keywords(it, &mut errors),
+ ast::RefType(it) => validate_trait_object_ref_ty(it, &mut errors),
+ ast::PtrType(it) => validate_trait_object_ptr_ty(it, &mut errors),
+ ast::FnPtrType(it) => validate_trait_object_fn_ptr_ret_ty(it, &mut errors),
+ ast::MacroRules(it) => validate_macro_rules(it, &mut errors),
+ ast::LetExpr(it) => validate_let_expr(it, &mut errors),
+ _ => (),
+ }
+ }
+ }
+ errors
+}
+
+fn rustc_unescape_error_to_string(err: unescape::EscapeError) -> &'static str {
+ use unescape::EscapeError as EE;
+
+ #[rustfmt::skip]
+ let err_message = match err {
+ EE::ZeroChars => {
+ "Literal must not be empty"
+ }
+ EE::MoreThanOneChar => {
+ "Literal must be one character long"
+ }
+ EE::LoneSlash => {
+ "Character must be escaped: `\\`"
+ }
+ EE::InvalidEscape => {
+ "Invalid escape"
+ }
+ EE::BareCarriageReturn | EE::BareCarriageReturnInRawString => {
+ "Character must be escaped: `\r`"
+ }
+ EE::EscapeOnlyChar => {
+ "Escape character `\\` must be escaped itself"
+ }
+ EE::TooShortHexEscape => {
+ "ASCII hex escape code must have exactly two digits"
+ }
+ EE::InvalidCharInHexEscape => {
+ "ASCII hex escape code must contain only hex characters"
+ }
+ EE::OutOfRangeHexEscape => {
+ "ASCII hex escape code must be at most 0x7F"
+ }
+ EE::NoBraceInUnicodeEscape => {
+ "Missing `{` to begin the unicode escape"
+ }
+ EE::InvalidCharInUnicodeEscape => {
+ "Unicode escape must contain only hex characters and underscores"
+ }
+ EE::EmptyUnicodeEscape => {
+ "Unicode escape must not be empty"
+ }
+ EE::UnclosedUnicodeEscape => {
+ "Missing `}` to terminate the unicode escape"
+ }
+ EE::LeadingUnderscoreUnicodeEscape => {
+ "Unicode escape code must not begin with an underscore"
+ }
+ EE::OverlongUnicodeEscape => {
+ "Unicode escape code must have at most 6 digits"
+ }
+ EE::LoneSurrogateUnicodeEscape => {
+ "Unicode escape code must not be a surrogate"
+ }
+ EE::OutOfRangeUnicodeEscape => {
+ "Unicode escape code must be at most 0x10FFFF"
+ }
+ EE::UnicodeEscapeInByte => {
+ "Byte literals must not contain unicode escapes"
+ }
+ EE::NonAsciiCharInByte | EE::NonAsciiCharInByteString => {
+ "Byte literals must not contain non-ASCII characters"
+ }
+ };
+
+ err_message
+}
+
+fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
+ // FIXME: move this function to outer scope (https://github.com/rust-lang/rust-analyzer/pull/2834#discussion_r366196658)
+ fn unquote(text: &str, prefix_len: usize, end_delimiter: char) -> Option<&str> {
+ text.rfind(end_delimiter).and_then(|end| text.get(prefix_len..end))
+ }
+
+ let token = literal.token();
+ let text = token.text();
+
+ // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-lang/rust-analyzer/pull/2834#discussion_r366199205)
+ let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {
+ let off = token.text_range().start() + TextSize::try_from(off + prefix_len).unwrap();
+ acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off));
+ };
+
+ match literal.kind() {
+ ast::LiteralKind::String(s) => {
+ if !s.is_raw() {
+ if let Some(without_quotes) = unquote(text, 1, '"') {
+ unescape_literal(without_quotes, Mode::Str, &mut |range, char| {
+ if let Err(err) = char {
+ push_err(1, (range.start, err));
+ }
+ });
+ }
+ }
+ }
+ ast::LiteralKind::ByteString(s) => {
+ if !s.is_raw() {
+ if let Some(without_quotes) = unquote(text, 2, '"') {
+ unescape_byte_literal(without_quotes, Mode::ByteStr, &mut |range, char| {
+ if let Err(err) = char {
+ push_err(2, (range.start, err));
+ }
+ });
+ }
+ }
+ }
+ ast::LiteralKind::Char(_) => {
+ if let Some(Err(e)) = unquote(text, 1, '\'').map(unescape_char) {
+ push_err(1, e);
+ }
+ }
+ ast::LiteralKind::Byte(_) => {
+ if let Some(Err(e)) = unquote(text, 2, '\'').map(unescape_byte) {
+ push_err(2, e);
+ }
+ }
+ ast::LiteralKind::IntNumber(_)
+ | ast::LiteralKind::FloatNumber(_)
+ | ast::LiteralKind::Bool(_) => {}
+ }
+}
+
+pub(crate) fn validate_block_structure(root: &SyntaxNode) {
+ let mut stack = Vec::new();
+ for node in root.descendants_with_tokens() {
+ match node.kind() {
+ T!['{'] => stack.push(node),
+ T!['}'] => {
+ if let Some(pair) = stack.pop() {
+ assert_eq!(
+ node.parent(),
+ pair.parent(),
+ "\nunpaired curlys:\n{}\n{:#?}\n",
+ root.text(),
+ root,
+ );
+ assert!(
+ node.next_sibling_or_token().is_none()
+ && pair.prev_sibling_or_token().is_none(),
+ "\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n",
+ node,
+ root.text(),
+ node,
+ );
+ }
+ }
+ _ => (),
+ }
+ }
+}
+
+fn validate_numeric_name(name_ref: Option<ast::NameRef>, errors: &mut Vec<SyntaxError>) {
+ if let Some(int_token) = int_token(name_ref) {
+ if int_token.text().chars().any(|c| !c.is_digit(10)) {
+ errors.push(SyntaxError::new(
+ "Tuple (struct) field access is only allowed through \
+ decimal integers with no underscores or suffix",
+ int_token.text_range(),
+ ));
+ }
+ }
+
+ fn int_token(name_ref: Option<ast::NameRef>) -> Option<SyntaxToken> {
+ name_ref?.syntax().first_child_or_token()?.into_token().filter(|it| it.kind() == INT_NUMBER)
+ }
+}
+
+fn validate_visibility(vis: ast::Visibility, errors: &mut Vec<SyntaxError>) {
+ let path_without_in_token = vis.in_token().is_none()
+ && vis.path().and_then(|p| p.as_single_name_ref()).and_then(|n| n.ident_token()).is_some();
+ if path_without_in_token {
+ errors.push(SyntaxError::new("incorrect visibility restriction", vis.syntax.text_range()));
+ }
+ let parent = match vis.syntax().parent() {
+ Some(it) => it,
+ None => return,
+ };
+ match parent.kind() {
+ FN | CONST | TYPE_ALIAS => (),
+ _ => return,
+ }
+
+ let impl_def = match parent.parent().and_then(|it| it.parent()).and_then(ast::Impl::cast) {
+ Some(it) => it,
+ None => return,
+ };
+ // FIXME: disable validation if there's an attribute, since some proc macros use this syntax.
+ // ideally the validation would run only on the fully expanded code, then this wouldn't be necessary.
+ if impl_def.trait_().is_some() && impl_def.attrs().next().is_none() {
+ errors.push(SyntaxError::new("Unnecessary visibility qualifier", vis.syntax.text_range()));
+ }
+}
+
+fn validate_range_expr(expr: ast::RangeExpr, errors: &mut Vec<SyntaxError>) {
+ if expr.op_kind() == Some(ast::RangeOp::Inclusive) && expr.end().is_none() {
+ errors.push(SyntaxError::new(
+ "An inclusive range must have an end expression",
+ expr.syntax().text_range(),
+ ));
+ }
+}
+
+fn validate_path_keywords(segment: ast::PathSegment, errors: &mut Vec<SyntaxError>) {
+ let path = segment.parent_path();
+ let is_path_start = segment.coloncolon_token().is_none() && path.qualifier().is_none();
+
+ if let Some(token) = segment.self_token() {
+ if !is_path_start {
+ errors.push(SyntaxError::new(
+ "The `self` keyword is only allowed as the first segment of a path",
+ token.text_range(),
+ ));
+ }
+ } else if let Some(token) = segment.crate_token() {
+ if !is_path_start || use_prefix(path).is_some() {
+ errors.push(SyntaxError::new(
+ "The `crate` keyword is only allowed as the first segment of a path",
+ token.text_range(),
+ ));
+ }
+ }
+
+ fn use_prefix(mut path: ast::Path) -> Option<ast::Path> {
+ for node in path.syntax().ancestors().skip(1) {
+ match_ast! {
+ match node {
+ ast::UseTree(it) => if let Some(tree_path) = it.path() {
+ // Even a top-level path exists within a `UseTree` so we must explicitly
+ // allow our path but disallow anything else
+ if tree_path != path {
+ return Some(tree_path);
+ }
+ },
+ ast::UseTreeList(_) => continue,
+ ast::Path(parent) => path = parent,
+ _ => return None,
+ }
+ };
+ }
+ None
+ }
+}
+
+fn validate_trait_object_ref_ty(ty: ast::RefType, errors: &mut Vec<SyntaxError>) {
+ if let Some(ast::Type::DynTraitType(ty)) = ty.ty() {
+ if let Some(err) = validate_trait_object_ty(ty) {
+ errors.push(err);
+ }
+ }
+}
+
+fn validate_trait_object_ptr_ty(ty: ast::PtrType, errors: &mut Vec<SyntaxError>) {
+ if let Some(ast::Type::DynTraitType(ty)) = ty.ty() {
+ if let Some(err) = validate_trait_object_ty(ty) {
+ errors.push(err);
+ }
+ }
+}
+
+fn validate_trait_object_fn_ptr_ret_ty(ty: ast::FnPtrType, errors: &mut Vec<SyntaxError>) {
+ if let Some(ast::Type::DynTraitType(ty)) = ty.ret_type().and_then(|ty| ty.ty()) {
+ if let Some(err) = validate_trait_object_ty(ty) {
+ errors.push(err);
+ }
+ }
+}
+
+fn validate_trait_object_ty(ty: ast::DynTraitType) -> Option<SyntaxError> {
+ let tbl = ty.type_bound_list()?;
+
+ if tbl.bounds().count() > 1 {
+ let dyn_token = ty.dyn_token()?;
+ let potential_parenthesis =
+ algo::skip_trivia_token(dyn_token.prev_token()?, Direction::Prev)?;
+ let kind = potential_parenthesis.kind();
+ if !matches!(kind, T!['('] | T![<] | T![=]) {
+ return Some(SyntaxError::new("ambiguous `+` in a type", ty.syntax().text_range()));
+ }
+ }
+ None
+}
+
+fn validate_macro_rules(mac: ast::MacroRules, errors: &mut Vec<SyntaxError>) {
+ if let Some(vis) = mac.visibility() {
+ errors.push(SyntaxError::new(
+ "visibilities are not allowed on `macro_rules!` items",
+ vis.syntax().text_range(),
+ ));
+ }
+}
+
+fn validate_const(const_: ast::Const, errors: &mut Vec<SyntaxError>) {
+ if let Some(mut_token) = const_
+ .const_token()
+ .and_then(|t| t.next_token())
+ .and_then(|t| algo::skip_trivia_token(t, Direction::Next))
+ .filter(|t| t.kind() == T![mut])
+ {
+ errors.push(SyntaxError::new("const globals cannot be mutable", mut_token.text_range()));
+ }
+}
+
+fn validate_let_expr(let_: ast::LetExpr, errors: &mut Vec<SyntaxError>) {
+ let mut token = let_.syntax().clone();
+ loop {
+ token = match token.parent() {
+ Some(it) => it,
+ None => break,
+ };
+
+ if ast::ParenExpr::can_cast(token.kind()) {
+ continue;
+ } else if let Some(it) = ast::BinExpr::cast(token.clone()) {
+ if it.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And)) {
+ continue;
+ }
+ } else if ast::IfExpr::can_cast(token.kind())
+ || ast::WhileExpr::can_cast(token.kind())
+ || ast::MatchGuard::can_cast(token.kind())
+ {
+ // It must be part of the condition since the expressions are inside a block.
+ return;
+ }
+
+ break;
+ }
+ errors.push(SyntaxError::new(
+ "`let` expressions are not supported here",
+ let_.syntax().text_range(),
+ ));
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs b/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs
new file mode 100644
index 000000000..8eb4a10a3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs
@@ -0,0 +1,24 @@
+//! Logic for validating block expressions i.e. `ast::BlockExpr`.
+
+use crate::{
+ ast::{self, AstNode, HasAttrs},
+ SyntaxError,
+ SyntaxKind::*,
+};
+
+pub(crate) fn validate_block_expr(block: ast::BlockExpr, errors: &mut Vec<SyntaxError>) {
+ if let Some(parent) = block.syntax().parent() {
+ match parent.kind() {
+ FN | EXPR_STMT | STMT_LIST => return,
+ _ => {}
+ }
+ }
+ if let Some(stmt_list) = block.stmt_list() {
+ errors.extend(stmt_list.attrs().filter(|attr| attr.kind().is_inner()).map(|attr| {
+ SyntaxError::new(
+ "A block in this position cannot accept inner attributes",
+ attr.syntax().text_range(),
+ )
+ }));
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0000.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0000.rs
new file mode 100644
index 000000000..f977d23c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0000.rs
@@ -0,0 +1,199 @@
+//! An experimental implementation of [Rust RFC#2256 lrs);
+ let root = SyntaxNode::new_owned(root);
+ validate_block_structure(root.borrowed());
+ File { root }
+ }
+ pub fn parse(text: &str) -> File {
+ let tokens = tokenize(&text);
+ let (green, errors) = parser_impl::parse_with::<syntax_node::GreenBuilder>(
+ text, &tokens, grammar::root,
+ );
+ File::new(green, errors)
+ }
+ pub fn reparse(&self, edit: &AtomTextEdit) -> File {
+ self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
+ }
+ pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<File> {
+ let (node, reparser) = find_reparsable_node(self.syntax(), edit.delete)?;
+ let text = replace_range(
+ node.text().to_string(),
+ edit.delete - node.range().start(),
+ &edit.insert,
+ );
+ let tokens = tokenize(&text);
+ if !is_balanced(&tokens) {
+ return None;
+ }
+ let (green, new_errors) = parser_impl::parse_with::<syntax_node::GreenBuilder>(
+ &te2t, &tokens, reparser,
+ );
+ let green_root = node.replace_with(green);
+ let errors = merge_errors(self.errors(), new_errors, node, edit);
+ Some(File::new(green_root, errors))
+ }
+ fn full_reparse(&self, edit: &AtomTextEdit) -> File {
+ let text = replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
+ File::parse(&text)
+ }
+ pub fn ast(&self) -> ast::Root {
+ ast::Root::cast(self.syntax()).unwrap()
+ }
+ pub fn syntax(&self) -> SyntaxNodeRef {
+ self.root.brroowed()
+ }
+ mp_tree(root),
+ );
+ assert!(
+ node.next_sibling().is_none() && pair.prev_sibling().is_none(),
+ "\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n",
+ node,
+ root.text(),
+ node.text(),
+ );
+ }
+ }
+ _ => (),
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct AtomTextEdit {
+ pub delete: TextRange,
+ pub insert: String,
+}
+
+impl AtomTextEdit {
+ pub fn replace(range: TextRange, replace_with: String) -> AtomTextEdit {
+ AtomTextEdit { delete: range, insert: replace_with }
+ }
+
+ pub fn delete(range: TextRange) -> AtomTextEdit {
+ AtomTextEdit::replace(range, String::new())
+ }
+
+ pub fn insert(offset: TextUnit, text: String) -> AtomTextEdit {
+ AtomTextEdit::replace(TextRange::offset_len(offset, 0.into()), text)
+ }
+}
+
+fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> {
+ let node = algo::find_covering_node(node, range);
+ return algo::ancestors(node)
+ .filter_map(|node| reparser(node).map(|r| (node, r)))
+ .next();
+
+ fn reparser(node: SyntaxNodeRef) -> Option<fn(&mut Parser)> {
+ let res = match node.kind() {
+ BLOCK => grammar::block,
+ RECORD_FIELD_LIST => grammar::record_field_list,
+ _ => return None,
+ };
+ Some(res)
+ }
+}
+
+pub /*(meh)*/ fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String {
+ let start = u32::from(range.start()) as usize;
+ let end = u32::from(range.end()) as usize;
+ text.replace_range(start..end, replace_with);
+ text
+}
+
+fn is_balanced(tokens: &[Token]) -> bool {
+ if tokens.is_empty()
+ || tokens.first().unwrap().kind != L_CURLY
+ || tokens.last().unwrap().kind != R_CURLY {
+ return false
+ }
+ let mut balance = 0usize;
+ for t in tokens.iter() {
+ match t.kind {
+ L_CURLYt {
+ pub delete: TextRange,
+ pub insert: String,
+}
+
+impl AtomTextEdit {
+ pub fn replace(range: TextRange, replace_with: String) -> AtomTextEdit {
+ AtomTextEdit { delete: range, insert: replace_with }
+ }
+
+ pub fn delete(range: TextRange) -> AtomTextEdit {
+ AtomTextEdit::replace(range, String::new())
+ }
+
+ pub fn insert(offset: TextUnit, text: String) -> AtomTextEdit {
+ AtomTextEdit::replace(TextRange::offset_len(offset, 0.into()), text)
+ }
+}
+
+fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> {
+ let node = algo::find_covering_node(node, range);
+ return algo::ancestors(node)
+ .filter_map(|node| reparser(node).map(|r| (node, r)))
+ .next();
+
+ fn reparser(node: SyntaxNodeRef) -> Option<fn(&mut Parser)> {
+ let res = match node.kind() {
+ ;
+ let end = u32::from(range.end()) as usize;
+ text.replaT => grammar::record_field_list,
+ _ => return None,
+ };
+ Some(res)
+ }
+}
+
+pub /*(meh)*/ fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String {
+ let start = u32::from(range.start()) as usize;
+ let end = u32::from(range.end()) as usize;
+ text.replace_range(start..end, replace_with);
+ text
+}
+
+fn is_balanced(tokens: &[Token]) -> bool {
+ if tokens.is_empty()
+ || tokens.first().unwrap().kind != L_CURLY
+ || tokens.last().unwrap().kind != R_CURLY {
+ return false
+ }
+ let mut balance = 0usize;
+ for t in tokens.iter() {
+ match t.kind {
+ L_CURLY => balance += 1,
+ R_CURLY => balance = match balance.checked_sub(1) {
+ Some(b) => b,
+ None => return false,
+ },
+ _ => (),
+ }
+ }
+ balance == 0
+}
+
+fn merge_errors(
+ old_errors: Vec<SyntaxError>,
+ new_errors: Vec<SyntaxError>,
+ old_node: SyntaxNodeRef,
+ edit: &AtomTextEdit,
+) -> Vec<SyntaxError> {
+ let mut res = Vec::new();
+ for e in old_errors {
+ if e.offset < old_node.range().start() {
+ res.push(e)
+ } else if e.offset > old_node.range().end() {
+ res.push(SyntaxError {
+ msg: e.msg,
+ offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(),
+ })
+ }
+ }
+ for e in new_errors {
+ res.push(SyntaxError {
+ msg: e.msg,
+ offset: e.offset + old_node.range().start(),
+ })
+ }
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0001.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0001.rs
new file mode 100644
index 000000000..f1148058e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0001.rs
@@ -0,0 +1,106 @@
+use syntax::{
+ File, TextRange, SyntaxNodeRef, TextUnit,
+ SyntaxKind::*,
+ algo::{find_leaf_at_offset, LeafAtOffset, find_covering_node, ancestors, Direction, siblings},
+};
+
+pub fn extend_selection(file: &File, range: TextRange) -> Option<TextRange> {
+ let syntax = file.syntax();
+ extend(syntax.borrowed(), range)
+}
+
+pub(crate) fn extend(root: SyntaxNodeRef, range: TextRange) -> Option<TextRange> {
+ if range.is_empty() {
+ let offset = range.start();
+ let mut leaves = find_leaf_at_offset(root, offset);
+ if leaves.clone().all(|it| it.kind() == WHITESPACE) {
+ return Some(extend_ws(root, leaves.next()?, offset));
+ }
+ let leaf = match leaves {
+ LeafAtOffset::None => return None,
+ LeafAtOffset::Single(l) => l,
+ LeafAtOffset::Between(l, r) => pick_best(l, r),
+ };
+ return Some(leaf.range());
+ };
+ let node = find_covering_node(root, range);
+ if node.kind() == COMMENT && range == node.range() {
+ if let Some(range) = extend_comments(node) {
+ return Some(range);
+ }
+ }
+
+ match ancestors(node).skip_while(|n| n.range() == range).next() {
+ None => None,
+ Some(parent) => Some(parent.range()),
+ }
+}
+
+fn extend_ws(root: SyntaxNodeRef, ws: SyntaxNodeRef, offset: TextUnit) -> TextRange {
+ let ws_text = ws.leaf_text().unwrap();
+ let suffix = TextRange::from_to(offset, ws.range().end()) - ws.range().start();
+ let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start();
+ let ws_suffix = &ws_text.as_str()[suffix];
+ let ws_prefix = &ws_text.as_str()[prefix];
+ if ws_text.contains("\n") && !ws_suffix.contains("\n") {
+ if let Some(node) = ws.next_sibling() {
+ let start = match ws_prefix.rfind('\n') {
+ Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32),
+ None => node.range().start()
+ };
+ let end = if root.text().char_at(node.range().end()) == Some('\n') {
+ node.range().end() + TextUnit::of_char('\n')
+ } else {
+ node.range().end()
+ };
+ return TextRange::from_to(start, end);
+ }
+ }
+ ws.range()
+}
+
+fn pick_best<'a>(l: SyntaxNodeRef<'a>, r: Syntd[axNodeRef<'a>) -> SyntaxNodeRef<'a> {
+ return if priority(r) > priority(l) { r } else { l };
+ fn priority(n: SyntaxNodeRef) -> usize {
+ match n.kind() {
+ WHITESPACE => 0,
+ IDENT | SELF_KW | SUPER_KW | CRATE_KW => 2,
+ _ => 1,
+ }
+ }
+}
+
+fn extend_comments(node: SyntaxNodeRef) -> Option<TextRange> {
+ let left = adj_com[ments(node, Direction::Backward);
+ let right = adj_comments(node, Direction::Forward);
+ if left != right {
+ Some(TextRange::from_to(
+ left.range().start(),
+ right.range().end(),
+ ))
+ } else {
+ None
+ }
+}
+
+fn adj_comments(node: SyntaxNodeRef, dir: Direction) -> SyntaxNodeRef {
+ let mut res = node;
+ for node in siblings(node, dir) {
+ match node.kind() {
+ COMMENT => res = node,
+ WHITESPACE if !node.leaf_text().unwrap().as_str().contains("\n\n") => (),
+ _ => break
+ }
+ }
+ res
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use test_utils::extract_offset;
+
+ fn do_check(before: &str, afters: &[&str]) {
+ let (cursor, before) = extract_offset(before);
+ let file = File::parse(&before);
+ let mut range = TextRange::of
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0002.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0002.rs
new file mode 100644
index 000000000..f35dc7289
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0002.rs
@@ -0,0 +1 @@
+!('\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0003.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0003.rs
new file mode 100644
index 000000000..0f59c4722
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0003.rs
@@ -0,0 +1 @@
+if'\xɿ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0004.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0004.rs
new file mode 100644
index 000000000..003290f52
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0004.rs
@@ -0,0 +1 @@
+b"\xʿ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast
new file mode 100644
index 000000000..50057a02d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast
@@ -0,0 +1,127 @@
+SOURCE_FILE@0..350
+ FN@0..349
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..8
+ IDENT@3..8 "block"
+ PARAM_LIST@8..10
+ L_PAREN@8..9 "("
+ R_PAREN@9..10 ")"
+ WHITESPACE@10..11 " "
+ BLOCK_EXPR@11..349
+ STMT_LIST@11..349
+ L_CURLY@11..12 "{"
+ WHITESPACE@12..17 "\n "
+ LET_STMT@17..129
+ LET_KW@17..20 "let"
+ WHITESPACE@20..21 " "
+ IDENT_PAT@21..26
+ NAME@21..26
+ IDENT@21..26 "inner"
+ WHITESPACE@26..27 " "
+ EQ@27..28 "="
+ WHITESPACE@28..29 " "
+ BLOCK_EXPR@29..128
+ STMT_LIST@29..128
+ L_CURLY@29..30 "{"
+ WHITESPACE@30..39 "\n "
+ ATTR@39..83
+ POUND@39..40 "#"
+ BANG@40..41 "!"
+ L_BRACK@41..42 "["
+ META@42..82
+ PATH@42..45
+ PATH_SEGMENT@42..45
+ NAME_REF@42..45
+ IDENT@42..45 "doc"
+ TOKEN_TREE@45..82
+ L_PAREN@45..46 "("
+ STRING@46..81 "\"Inner attributes not ..."
+ R_PAREN@81..82 ")"
+ R_BRACK@82..83 "]"
+ WHITESPACE@83..92 "\n "
+ COMMENT@92..122 "//! Nor are ModuleDoc ..."
+ WHITESPACE@122..127 "\n "
+ R_CURLY@127..128 "}"
+ SEMICOLON@128..129 ";"
+ WHITESPACE@129..134 "\n "
+ EXPR_STMT@134..257
+ IF_EXPR@134..257
+ IF_KW@134..136 "if"
+ WHITESPACE@136..137 " "
+ LITERAL@137..141
+ TRUE_KW@137..141 "true"
+ WHITESPACE@141..142 " "
+ BLOCK_EXPR@142..257
+ STMT_LIST@142..257
+ L_CURLY@142..143 "{"
+ WHITESPACE@143..152 "\n "
+ ATTR@152..171
+ POUND@152..153 "#"
+ BANG@153..154 "!"
+ L_BRACK@154..155 "["
+ META@155..170
+ PATH@155..158
+ PATH_SEGMENT@155..158
+ NAME_REF@155..158
+ IDENT@155..158 "doc"
+ TOKEN_TREE@158..170
+ L_PAREN@158..159 "("
+ STRING@159..169 "\"Nor here\""
+ R_PAREN@169..170 ")"
+ R_BRACK@170..171 "]"
+ WHITESPACE@171..180 "\n "
+ ATTR@180..212
+ POUND@180..181 "#"
+ BANG@181..182 "!"
+ L_BRACK@182..183 "["
+ META@183..211
+ PATH@183..186
+ PATH_SEGMENT@183..186
+ NAME_REF@183..186
+ IDENT@183..186 "doc"
+ TOKEN_TREE@186..211
+ L_PAREN@186..187 "("
+ STRING@187..210 "\"We error on each attr\""
+ R_PAREN@210..211 ")"
+ R_BRACK@211..212 "]"
+ WHITESPACE@212..221 "\n "
+ COMMENT@221..251 "//! Nor are ModuleDoc ..."
+ WHITESPACE@251..256 "\n "
+ R_CURLY@256..257 "}"
+ WHITESPACE@257..262 "\n "
+ WHILE_EXPR@262..347
+ WHILE_KW@262..267 "while"
+ WHITESPACE@267..268 " "
+ LITERAL@268..272
+ TRUE_KW@268..272 "true"
+ WHITESPACE@272..273 " "
+ BLOCK_EXPR@273..347
+ STMT_LIST@273..347
+ L_CURLY@273..274 "{"
+ WHITESPACE@274..283 "\n "
+ ATTR@283..302
+ POUND@283..284 "#"
+ BANG@284..285 "!"
+ L_BRACK@285..286 "["
+ META@286..301
+ PATH@286..289
+ PATH_SEGMENT@286..289
+ NAME_REF@286..289
+ IDENT@286..289 "doc"
+ TOKEN_TREE@289..301
+ L_PAREN@289..290 "("
+ STRING@290..300 "\"Nor here\""
+ R_PAREN@300..301 ")"
+ R_BRACK@301..302 "]"
+ WHITESPACE@302..311 "\n "
+ COMMENT@311..341 "//! Nor are ModuleDoc ..."
+ WHITESPACE@341..346 "\n "
+ R_CURLY@346..347 "}"
+ WHITESPACE@347..348 "\n"
+ R_CURLY@348..349 "}"
+ WHITESPACE@349..350 "\n"
+error 39..83: A block in this position cannot accept inner attributes
+error 152..171: A block in this position cannot accept inner attributes
+error 180..212: A block in this position cannot accept inner attributes
+error 283..302: A block in this position cannot accept inner attributes
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs
new file mode 100644
index 000000000..6a04f2d0a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs
@@ -0,0 +1,15 @@
+fn block() {
+ let inner = {
+ #![doc("Inner attributes not allowed here")]
+ //! Nor are ModuleDoc comments
+ };
+ if true {
+ #![doc("Nor here")]
+ #![doc("We error on each attr")]
+ //! Nor are ModuleDoc comments
+ }
+ while true {
+ #![doc("Nor here")]
+ //! Nor are ModuleDoc comments
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rast
new file mode 100644
index 000000000..90c258cd1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rast
@@ -0,0 +1,105 @@
+SOURCE_FILE@0..118
+ IMPL@0..117
+ IMPL_KW@0..4 "impl"
+ WHITESPACE@4..5 " "
+ PATH_TYPE@5..6
+ PATH@5..6
+ PATH_SEGMENT@5..6
+ NAME_REF@5..6
+ IDENT@5..6 "T"
+ WHITESPACE@6..7 " "
+ FOR_KW@7..10 "for"
+ WHITESPACE@10..11 " "
+ TUPLE_TYPE@11..13
+ L_PAREN@11..12 "("
+ R_PAREN@12..13 ")"
+ WHITESPACE@13..14 " "
+ ASSOC_ITEM_LIST@14..117
+ L_CURLY@14..15 "{"
+ WHITESPACE@15..20 "\n "
+ FN@20..31
+ FN_KW@20..22 "fn"
+ WHITESPACE@22..23 " "
+ NAME@23..26
+ IDENT@23..26 "foo"
+ PARAM_LIST@26..28
+ L_PAREN@26..27 "("
+ R_PAREN@27..28 ")"
+ WHITESPACE@28..29 " "
+ BLOCK_EXPR@29..31
+ STMT_LIST@29..31
+ L_CURLY@29..30 "{"
+ R_CURLY@30..31 "}"
+ WHITESPACE@31..36 "\n "
+ FN@36..51
+ VISIBILITY@36..39
+ PUB_KW@36..39 "pub"
+ WHITESPACE@39..40 " "
+ FN_KW@40..42 "fn"
+ WHITESPACE@42..43 " "
+ NAME@43..46
+ IDENT@43..46 "bar"
+ PARAM_LIST@46..48
+ L_PAREN@46..47 "("
+ R_PAREN@47..48 ")"
+ WHITESPACE@48..49 " "
+ BLOCK_EXPR@49..51
+ STMT_LIST@49..51
+ L_CURLY@49..50 "{"
+ R_CURLY@50..51 "}"
+ WHITESPACE@51..56 "\n "
+ TYPE_ALIAS@56..81
+ VISIBILITY@56..66
+ PUB_KW@56..59 "pub"
+ L_PAREN@59..60 "("
+ PATH@60..65
+ PATH_SEGMENT@60..65
+ NAME_REF@60..65
+ CRATE_KW@60..65 "crate"
+ R_PAREN@65..66 ")"
+ WHITESPACE@66..67 " "
+ TYPE_KW@67..71 "type"
+ WHITESPACE@71..72 " "
+ NAME@72..75
+ IDENT@72..75 "Baz"
+ WHITESPACE@75..76 " "
+ EQ@76..77 "="
+ WHITESPACE@77..78 " "
+ TUPLE_TYPE@78..80
+ L_PAREN@78..79 "("
+ R_PAREN@79..80 ")"
+ SEMICOLON@80..81 ";"
+ WHITESPACE@81..86 "\n "
+ CONST@86..115
+ VISIBILITY@86..96
+ PUB_KW@86..89 "pub"
+ L_PAREN@89..90 "("
+ PATH@90..95
+ PATH_SEGMENT@90..95
+ NAME_REF@90..95
+ CRATE_KW@90..95 "crate"
+ R_PAREN@95..96 ")"
+ WHITESPACE@96..97 " "
+ CONST_KW@97..102 "const"
+ WHITESPACE@102..103 " "
+ NAME@103..104
+ IDENT@103..104 "C"
+ COLON@104..105 ":"
+ WHITESPACE@105..106 " "
+ PATH_TYPE@106..109
+ PATH@106..109
+ PATH_SEGMENT@106..109
+ NAME_REF@106..109
+ IDENT@106..109 "i32"
+ WHITESPACE@109..110 " "
+ EQ@110..111 "="
+ WHITESPACE@111..112 " "
+ LITERAL@112..114
+ INT_NUMBER@112..114 "92"
+ SEMICOLON@114..115 ";"
+ WHITESPACE@115..116 "\n"
+ R_CURLY@116..117 "}"
+ WHITESPACE@117..118 "\n"
+error 36..39: Unnecessary visibility qualifier
+error 56..66: Unnecessary visibility qualifier
+error 86..96: Unnecessary visibility qualifier
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rs
new file mode 100644
index 000000000..a43e7ef10
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rs
@@ -0,0 +1,6 @@
+impl T for () {
+ fn foo() {}
+ pub fn bar() {}
+ pub(crate) type Baz = ();
+ pub(crate) const C: i32 = 92;
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rast
new file mode 100644
index 000000000..fd302fb4d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE@0..33
+ FN@0..32
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..7
+ IDENT@3..7 "main"
+ PARAM_LIST@7..9
+ L_PAREN@7..8 "("
+ R_PAREN@8..9 ")"
+ WHITESPACE@9..10 " "
+ BLOCK_EXPR@10..32
+ STMT_LIST@10..32
+ L_CURLY@10..11 "{"
+ WHITESPACE@11..16 "\n "
+ EXPR_STMT@16..21
+ RANGE_EXPR@16..20
+ LITERAL@16..17
+ INT_NUMBER@16..17 "0"
+ DOT2EQ@17..20 "..="
+ SEMICOLON@20..21 ";"
+ WHITESPACE@21..26 "\n "
+ EXPR_STMT@26..30
+ RANGE_EXPR@26..29
+ DOT2EQ@26..29 "..="
+ SEMICOLON@29..30 ";"
+ WHITESPACE@30..31 "\n"
+ R_CURLY@31..32 "}"
+ WHITESPACE@32..33 "\n"
+error 16..20: An inclusive range must have an end expression
+error 26..29: An inclusive range must have an end expression
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs
new file mode 100644
index 000000000..0b4ed7a2b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs
@@ -0,0 +1,4 @@
+fn main() {
+ 0..=;
+ ..=;
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rast
new file mode 100644
index 000000000..7449b5ddf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rast
@@ -0,0 +1,96 @@
+SOURCE_FILE@0..98
+ USE@0..12
+ USE_KW@0..3 "use"
+ WHITESPACE@3..4 " "
+ USE_TREE@4..11
+ PATH@4..11
+ PATH_SEGMENT@4..11
+ COLON2@4..6 "::"
+ NAME_REF@6..11
+ CRATE_KW@6..11 "crate"
+ SEMICOLON@11..12 ";"
+ WHITESPACE@12..13 "\n"
+ USE@13..54
+ USE_KW@13..16 "use"
+ WHITESPACE@16..17 " "
+ USE_TREE@17..53
+ USE_TREE_LIST@17..53
+ L_CURLY@17..18 "{"
+ USE_TREE@18..23
+ PATH@18..23
+ PATH_SEGMENT@18..23
+ NAME_REF@18..23
+ CRATE_KW@18..23 "crate"
+ COMMA@23..24 ","
+ WHITESPACE@24..25 " "
+ USE_TREE@25..52
+ PATH@25..28
+ PATH_SEGMENT@25..28
+ NAME_REF@25..28
+ IDENT@25..28 "foo"
+ COLON2@28..30 "::"
+ USE_TREE_LIST@30..52
+ L_CURLY@30..31 "{"
+ USE_TREE@31..51
+ PATH@31..51
+ PATH@31..46
+ PATH@31..41
+ PATH@31..36
+ PATH_SEGMENT@31..36
+ NAME_REF@31..36
+ CRATE_KW@31..36 "crate"
+ COLON2@36..38 "::"
+ PATH_SEGMENT@38..41
+ NAME_REF@38..41
+ IDENT@38..41 "foo"
+ COLON2@41..43 "::"
+ PATH_SEGMENT@43..46
+ NAME_REF@43..46
+ IDENT@43..46 "bar"
+ COLON2@46..48 "::"
+ PATH_SEGMENT@48..51
+ NAME_REF@48..51
+ IDENT@48..51 "baz"
+ R_CURLY@51..52 "}"
+ R_CURLY@52..53 "}"
+ SEMICOLON@53..54 ";"
+ WHITESPACE@54..55 "\n"
+ USE@55..72
+ USE_KW@55..58 "use"
+ WHITESPACE@58..59 " "
+ USE_TREE@59..71
+ PATH@59..71
+ PATH@59..64
+ PATH_SEGMENT@59..64
+ NAME_REF@59..64
+ IDENT@59..64 "hello"
+ COLON2@64..66 "::"
+ PATH_SEGMENT@66..71
+ NAME_REF@66..71
+ CRATE_KW@66..71 "crate"
+ SEMICOLON@71..72 ";"
+ WHITESPACE@72..73 "\n"
+ USE@73..97
+ USE_KW@73..76 "use"
+ WHITESPACE@76..77 " "
+ USE_TREE@77..96
+ PATH@77..96
+ PATH@77..89
+ PATH@77..82
+ PATH_SEGMENT@77..82
+ NAME_REF@77..82
+ IDENT@77..82 "hello"
+ COLON2@82..84 "::"
+ PATH_SEGMENT@84..89
+ NAME_REF@84..89
+ CRATE_KW@84..89 "crate"
+ COLON2@89..91 "::"
+ PATH_SEGMENT@91..96
+ NAME_REF@91..96
+ IDENT@91..96 "there"
+ SEMICOLON@96..97 ";"
+ WHITESPACE@97..98 "\n"
+error 6..11: The `crate` keyword is only allowed as the first segment of a path
+error 31..36: The `crate` keyword is only allowed as the first segment of a path
+error 66..71: The `crate` keyword is only allowed as the first segment of a path
+error 84..89: The `crate` keyword is only allowed as the first segment of a path
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rs
new file mode 100644
index 000000000..508def2c7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rs
@@ -0,0 +1,4 @@
+use ::crate;
+use {crate, foo::{crate::foo::bar::baz}};
+use hello::crate;
+use hello::crate::there;
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rast
new file mode 100644
index 000000000..01f601091
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE@0..25
+ USE@0..11
+ USE_KW@0..3 "use"
+ WHITESPACE@3..4 " "
+ USE_TREE@4..10
+ PATH@4..10
+ PATH_SEGMENT@4..10
+ COLON2@4..6 "::"
+ NAME_REF@6..10
+ SELF_KW@6..10 "self"
+ SEMICOLON@10..11 ";"
+ WHITESPACE@11..12 "\n"
+ USE@12..24
+ USE_KW@12..15 "use"
+ WHITESPACE@15..16 " "
+ USE_TREE@16..23
+ PATH@16..23
+ PATH@16..17
+ PATH_SEGMENT@16..17
+ NAME_REF@16..17
+ IDENT@16..17 "a"
+ COLON2@17..19 "::"
+ PATH_SEGMENT@19..23
+ NAME_REF@19..23
+ SELF_KW@19..23 "self"
+ SEMICOLON@23..24 ";"
+ WHITESPACE@24..25 "\n"
+error 6..10: The `self` keyword is only allowed as the first segment of a path
+error 19..23: The `self` keyword is only allowed as the first segment of a path
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rs
new file mode 100644
index 000000000..b9e1d7d8b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rs
@@ -0,0 +1,2 @@
+use ::self;
+use a::self;
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rast
new file mode 100644
index 000000000..d94daacdc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rast
@@ -0,0 +1,196 @@
+SOURCE_FILE@0..187
+ TYPE_ALIAS@0..35
+ TYPE_KW@0..4 "type"
+ WHITESPACE@4..5 " "
+ NAME@5..8
+ IDENT@5..8 "Foo"
+ GENERIC_PARAM_LIST@8..12
+ L_ANGLE@8..9 "<"
+ LIFETIME_PARAM@9..11
+ LIFETIME@9..11
+ LIFETIME_IDENT@9..11 "'a"
+ R_ANGLE@11..12 ">"
+ WHITESPACE@12..13 " "
+ EQ@13..14 "="
+ WHITESPACE@14..15 " "
+ REF_TYPE@15..34
+ AMP@15..16 "&"
+ LIFETIME@16..18
+ LIFETIME_IDENT@16..18 "'a"
+ WHITESPACE@18..19 " "
+ DYN_TRAIT_TYPE@19..34
+ DYN_KW@19..22 "dyn"
+ WHITESPACE@22..23 " "
+ TYPE_BOUND_LIST@23..34
+ TYPE_BOUND@23..27
+ PATH_TYPE@23..27
+ PATH@23..27
+ PATH_SEGMENT@23..27
+ NAME_REF@23..27
+ IDENT@23..27 "Send"
+ WHITESPACE@27..28 " "
+ PLUS@28..29 "+"
+ WHITESPACE@29..30 " "
+ TYPE_BOUND@30..34
+ PATH_TYPE@30..34
+ PATH@30..34
+ PATH_SEGMENT@30..34
+ NAME_REF@30..34
+ IDENT@30..34 "Sync"
+ SEMICOLON@34..35 ";"
+ WHITESPACE@35..36 "\n"
+ TYPE_ALIAS@36..70
+ TYPE_KW@36..40 "type"
+ WHITESPACE@40..41 " "
+ NAME@41..44
+ IDENT@41..44 "Foo"
+ WHITESPACE@44..45 " "
+ EQ@45..46 "="
+ WHITESPACE@46..47 " "
+ PTR_TYPE@47..69
+ STAR@47..48 "*"
+ CONST_KW@48..53 "const"
+ WHITESPACE@53..54 " "
+ DYN_TRAIT_TYPE@54..69
+ DYN_KW@54..57 "dyn"
+ WHITESPACE@57..58 " "
+ TYPE_BOUND_LIST@58..69
+ TYPE_BOUND@58..62
+ PATH_TYPE@58..62
+ PATH@58..62
+ PATH_SEGMENT@58..62
+ NAME_REF@58..62
+ IDENT@58..62 "Send"
+ WHITESPACE@62..63 " "
+ PLUS@63..64 "+"
+ WHITESPACE@64..65 " "
+ TYPE_BOUND@65..69
+ PATH_TYPE@65..69
+ PATH@65..69
+ PATH_SEGMENT@65..69
+ NAME_REF@65..69
+ IDENT@65..69 "Sync"
+ SEMICOLON@69..70 ";"
+ WHITESPACE@70..71 "\n"
+ TYPE_ALIAS@71..109
+ TYPE_KW@71..75 "type"
+ WHITESPACE@75..76 " "
+ NAME@76..79
+ IDENT@76..79 "Foo"
+ WHITESPACE@79..80 " "
+ EQ@80..81 "="
+ WHITESPACE@81..82 " "
+ FN_PTR_TYPE@82..108
+ FN_KW@82..84 "fn"
+ PARAM_LIST@84..86
+ L_PAREN@84..85 "("
+ R_PAREN@85..86 ")"
+ WHITESPACE@86..87 " "
+ RET_TYPE@87..108
+ THIN_ARROW@87..89 "->"
+ WHITESPACE@89..90 " "
+ DYN_TRAIT_TYPE@90..108
+ DYN_KW@90..93 "dyn"
+ WHITESPACE@93..94 " "
+ TYPE_BOUND_LIST@94..108
+ TYPE_BOUND@94..98
+ PATH_TYPE@94..98
+ PATH@94..98
+ PATH_SEGMENT@94..98
+ NAME_REF@94..98
+ IDENT@94..98 "Send"
+ WHITESPACE@98..99 " "
+ PLUS@99..100 "+"
+ WHITESPACE@100..101 " "
+ TYPE_BOUND@101..108
+ LIFETIME@101..108
+ LIFETIME_IDENT@101..108 "'static"
+ SEMICOLON@108..109 ";"
+ WHITESPACE@109..110 "\n"
+ FN@110..186
+ FN_KW@110..112 "fn"
+ WHITESPACE@112..113 " "
+ NAME@113..117
+ IDENT@113..117 "main"
+ PARAM_LIST@117..119
+ L_PAREN@117..118 "("
+ R_PAREN@118..119 ")"
+ WHITESPACE@119..120 " "
+ BLOCK_EXPR@120..186
+ STMT_LIST@120..186
+ L_CURLY@120..121 "{"
+ WHITESPACE@121..126 "\n "
+ LET_STMT@126..184
+ LET_KW@126..129 "let"
+ WHITESPACE@129..130 " "
+ IDENT_PAT@130..131
+ NAME@130..131
+ IDENT@130..131 "b"
+ WHITESPACE@131..132 " "
+ EQ@132..133 "="
+ WHITESPACE@133..134 " "
+ CAST_EXPR@134..183
+ PAREN_EXPR@134..138
+ L_PAREN@134..135 "("
+ REF_EXPR@135..137
+ AMP@135..136 "&"
+ PATH_EXPR@136..137
+ PATH@136..137
+ PATH_SEGMENT@136..137
+ NAME_REF@136..137
+ IDENT@136..137 "a"
+ R_PAREN@137..138 ")"
+ WHITESPACE@138..139 " "
+ AS_KW@139..141 "as"
+ WHITESPACE@141..142 " "
+ REF_TYPE@142..183
+ AMP@142..143 "&"
+ DYN_TRAIT_TYPE@143..183
+ DYN_KW@143..146 "dyn"
+ WHITESPACE@146..147 " "
+ TYPE_BOUND_LIST@147..183
+ TYPE_BOUND@147..175
+ PATH_TYPE@147..175
+ PATH@147..175
+ PATH_SEGMENT@147..175
+ NAME_REF@147..150
+ IDENT@147..150 "Add"
+ GENERIC_ARG_LIST@150..175
+ L_ANGLE@150..151 "<"
+ TYPE_ARG@151..156
+ PATH_TYPE@151..156
+ PATH@151..156
+ PATH_SEGMENT@151..156
+ NAME_REF@151..156
+ IDENT@151..156 "Other"
+ COMMA@156..157 ","
+ WHITESPACE@157..158 " "
+ ASSOC_TYPE_ARG@158..174
+ NAME_REF@158..164
+ IDENT@158..164 "Output"
+ WHITESPACE@164..165 " "
+ EQ@165..166 "="
+ WHITESPACE@166..167 " "
+ PATH_TYPE@167..174
+ PATH@167..174
+ PATH_SEGMENT@167..174
+ NAME_REF@167..174
+ IDENT@167..174 "Addable"
+ R_ANGLE@174..175 ">"
+ WHITESPACE@175..176 " "
+ PLUS@176..177 "+"
+ WHITESPACE@177..178 " "
+ TYPE_BOUND@178..183
+ PATH_TYPE@178..183
+ PATH@178..183
+ PATH_SEGMENT@178..183
+ NAME_REF@178..183
+ IDENT@178..183 "Other"
+ SEMICOLON@183..184 ";"
+ WHITESPACE@184..185 "\n"
+ R_CURLY@185..186 "}"
+ WHITESPACE@186..187 "\n"
+error 19..34: ambiguous `+` in a type
+error 54..69: ambiguous `+` in a type
+error 90..108: ambiguous `+` in a type
+error 143..183: ambiguous `+` in a type
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs
new file mode 100644
index 000000000..0a5958f25
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs
@@ -0,0 +1,6 @@
+type Foo<'a> = &'a dyn Send + Sync;
+type Foo = *const dyn Send + Sync;
+type Foo = fn() -> dyn Send + 'static;
+fn main() {
+ let b = (&a) as &dyn Add<Other, Output = Addable> + Other;
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rast
new file mode 100644
index 000000000..c7eb312c9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE@0..24
+ CONST@0..23
+ CONST_KW@0..5 "const"
+ WHITESPACE@5..6 " "
+ MUT_KW@6..9 "mut"
+ WHITESPACE@9..10 " "
+ NAME@10..13
+ IDENT@10..13 "FOO"
+ COLON@13..14 ":"
+ WHITESPACE@14..15 " "
+ TUPLE_TYPE@15..17
+ L_PAREN@15..16 "("
+ R_PAREN@16..17 ")"
+ WHITESPACE@17..18 " "
+ EQ@18..19 "="
+ WHITESPACE@19..20 " "
+ TUPLE_EXPR@20..22
+ L_PAREN@20..21 "("
+ R_PAREN@21..22 ")"
+ SEMICOLON@22..23 ";"
+ WHITESPACE@23..24 "\n"
+error 6..9: const globals cannot be mutable
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs
new file mode 100644
index 000000000..ccab6bccf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs
@@ -0,0 +1 @@
+const mut FOO: () = ();
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rast
new file mode 100644
index 000000000..9e1e48864
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rast
@@ -0,0 +1,216 @@
+SOURCE_FILE@0..282
+ FN@0..281
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..281
+ STMT_LIST@9..281
+ L_CURLY@9..10 "{"
+ WHITESPACE@10..15 "\n "
+ CONST@15..42
+ CONST_KW@15..20 "const"
+ WHITESPACE@20..21 " "
+ UNDERSCORE@21..22 "_"
+ COLON@22..23 ":"
+ WHITESPACE@23..24 " "
+ TUPLE_TYPE@24..26
+ L_PAREN@24..25 "("
+ R_PAREN@25..26 ")"
+ WHITESPACE@26..27 " "
+ EQ@27..28 "="
+ WHITESPACE@28..29 " "
+ LET_EXPR@29..41
+ LET_KW@29..32 "let"
+ WHITESPACE@32..33 " "
+ WILDCARD_PAT@33..34
+ UNDERSCORE@33..34 "_"
+ WHITESPACE@34..35 " "
+ EQ@35..36 "="
+ WHITESPACE@36..37 " "
+ PATH_EXPR@37..41
+ PATH@37..41
+ PATH_SEGMENT@37..41
+ NAME_REF@37..41
+ IDENT@37..41 "None"
+ SEMICOLON@41..42 ";"
+ WHITESPACE@42..48 "\n\n "
+ LET_STMT@48..83
+ LET_KW@48..51 "let"
+ WHITESPACE@51..52 " "
+ WILDCARD_PAT@52..53
+ UNDERSCORE@52..53 "_"
+ WHITESPACE@53..54 " "
+ EQ@54..55 "="
+ WHITESPACE@55..56 " "
+ IF_EXPR@56..82
+ IF_KW@56..58 "if"
+ WHITESPACE@58..59 " "
+ LITERAL@59..63
+ TRUE_KW@59..63 "true"
+ WHITESPACE@63..64 " "
+ BLOCK_EXPR@64..82
+ STMT_LIST@64..82
+ L_CURLY@64..65 "{"
+ WHITESPACE@65..66 " "
+ PAREN_EXPR@66..80
+ L_PAREN@66..67 "("
+ LET_EXPR@67..79
+ LET_KW@67..70 "let"
+ WHITESPACE@70..71 " "
+ WILDCARD_PAT@71..72
+ UNDERSCORE@71..72 "_"
+ WHITESPACE@72..73 " "
+ EQ@73..74 "="
+ WHITESPACE@74..75 " "
+ PATH_EXPR@75..79
+ PATH@75..79
+ PATH_SEGMENT@75..79
+ NAME_REF@75..79
+ IDENT@75..79 "None"
+ R_PAREN@79..80 ")"
+ WHITESPACE@80..81 " "
+ R_CURLY@81..82 "}"
+ SEMICOLON@82..83 ";"
+ WHITESPACE@83..89 "\n\n "
+ IF_EXPR@89..279
+ IF_KW@89..91 "if"
+ WHITESPACE@91..92 " "
+ BIN_EXPR@92..114
+ LITERAL@92..96
+ TRUE_KW@92..96 "true"
+ WHITESPACE@96..97 " "
+ AMP2@97..99 "&&"
+ WHITESPACE@99..100 " "
+ PAREN_EXPR@100..114
+ L_PAREN@100..101 "("
+ LET_EXPR@101..113
+ LET_KW@101..104 "let"
+ WHITESPACE@104..105 " "
+ WILDCARD_PAT@105..106
+ UNDERSCORE@105..106 "_"
+ WHITESPACE@106..107 " "
+ EQ@107..108 "="
+ WHITESPACE@108..109 " "
+ PATH_EXPR@109..113
+ PATH@109..113
+ PATH_SEGMENT@109..113
+ NAME_REF@109..113
+ IDENT@109..113 "None"
+ R_PAREN@113..114 ")"
+ WHITESPACE@114..115 " "
+ BLOCK_EXPR@115..279
+ STMT_LIST@115..279
+ L_CURLY@115..116 "{"
+ WHITESPACE@116..125 "\n "
+ EXPR_STMT@125..140
+ PAREN_EXPR@125..139
+ L_PAREN@125..126 "("
+ LET_EXPR@126..138
+ LET_KW@126..129 "let"
+ WHITESPACE@129..130 " "
+ WILDCARD_PAT@130..131
+ UNDERSCORE@130..131 "_"
+ WHITESPACE@131..132 " "
+ EQ@132..133 "="
+ WHITESPACE@133..134 " "
+ PATH_EXPR@134..138
+ PATH@134..138
+ PATH_SEGMENT@134..138
+ NAME_REF@134..138
+ IDENT@134..138 "None"
+ R_PAREN@138..139 ")"
+ SEMICOLON@139..140 ";"
+ WHITESPACE@140..149 "\n "
+ WHILE_EXPR@149..273
+ WHILE_KW@149..154 "while"
+ WHITESPACE@154..155 " "
+ LET_EXPR@155..167
+ LET_KW@155..158 "let"
+ WHITESPACE@158..159 " "
+ WILDCARD_PAT@159..160
+ UNDERSCORE@159..160 "_"
+ WHITESPACE@160..161 " "
+ EQ@161..162 "="
+ WHITESPACE@162..163 " "
+ PATH_EXPR@163..167
+ PATH@163..167
+ PATH_SEGMENT@163..167
+ NAME_REF@163..167
+ IDENT@163..167 "None"
+ WHITESPACE@167..168 " "
+ BLOCK_EXPR@168..273
+ STMT_LIST@168..273
+ L_CURLY@168..169 "{"
+ WHITESPACE@169..182 "\n "
+ MATCH_EXPR@182..263
+ MATCH_KW@182..187 "match"
+ WHITESPACE@187..188 " "
+ PATH_EXPR@188..192
+ PATH@188..192
+ PATH_SEGMENT@188..192
+ NAME_REF@188..192
+ IDENT@188..192 "None"
+ WHITESPACE@192..193 " "
+ MATCH_ARM_LIST@193..263
+ L_CURLY@193..194 "{"
+ WHITESPACE@194..211 "\n "
+ MATCH_ARM@211..249
+ WILDCARD_PAT@211..212
+ UNDERSCORE@211..212 "_"
+ WHITESPACE@212..213 " "
+ MATCH_GUARD@213..228
+ IF_KW@213..215 "if"
+ WHITESPACE@215..216 " "
+ LET_EXPR@216..228
+ LET_KW@216..219 "let"
+ WHITESPACE@219..220 " "
+ WILDCARD_PAT@220..221
+ UNDERSCORE@220..221 "_"
+ WHITESPACE@221..222 " "
+ EQ@222..223 "="
+ WHITESPACE@223..224 " "
+ PATH_EXPR@224..228
+ PATH@224..228
+ PATH_SEGMENT@224..228
+ NAME_REF@224..228
+ IDENT@224..228 "None"
+ WHITESPACE@228..229 " "
+ FAT_ARROW@229..231 "=>"
+ WHITESPACE@231..232 " "
+ BLOCK_EXPR@232..249
+ STMT_LIST@232..249
+ L_CURLY@232..233 "{"
+ WHITESPACE@233..234 " "
+ LET_STMT@234..247
+ LET_KW@234..237 "let"
+ WHITESPACE@237..238 " "
+ WILDCARD_PAT@238..239
+ UNDERSCORE@238..239 "_"
+ WHITESPACE@239..240 " "
+ EQ@240..241 "="
+ WHITESPACE@241..242 " "
+ PATH_EXPR@242..246
+ PATH@242..246
+ PATH_SEGMENT@242..246
+ NAME_REF@242..246
+ IDENT@242..246 "None"
+ SEMICOLON@246..247 ";"
+ WHITESPACE@247..248 " "
+ R_CURLY@248..249 "}"
+ WHITESPACE@249..262 "\n "
+ R_CURLY@262..263 "}"
+ WHITESPACE@263..272 "\n "
+ R_CURLY@272..273 "}"
+ WHITESPACE@273..278 "\n "
+ R_CURLY@278..279 "}"
+ WHITESPACE@279..280 "\n"
+ R_CURLY@280..281 "}"
+ WHITESPACE@281..282 "\n"
+error 29..41: `let` expressions are not supported here
+error 67..79: `let` expressions are not supported here
+error 126..138: `let` expressions are not supported here
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rs
new file mode 100644
index 000000000..1515ae533
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rs
@@ -0,0 +1,14 @@
+fn foo() {
+ const _: () = let _ = None;
+
+ let _ = if true { (let _ = None) };
+
+ if true && (let _ = None) {
+ (let _ = None);
+ while let _ = None {
+ match None {
+ _ if let _ = None => { let _ = None; }
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0000.rs b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0000.rs
new file mode 100644
index 000000000..388eb74ed
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0000.rs
@@ -0,0 +1,6 @@
+0
+1
+
+
+
+0 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0001.rs b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0001.rs
new file mode 100644
index 000000000..d2d42c6f9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0001.rs
@@ -0,0 +1,4 @@
+0
+1
+
+bb" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0002.rs b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0002.rs
new file mode 100644
index 000000000..3fbee1548
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0002.rs
@@ -0,0 +1,4 @@
+1
+1
+
+""! \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0003.rs b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0003.rs
new file mode 100644
index 000000000..d2757cd08
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0003.rs
Binary files differ
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0004.rs b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0004.rs
new file mode 100644
index 000000000..481617a70
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0004.rs
@@ -0,0 +1,4 @@
+0
+0
+}
+{; \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0005.rs b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0005.rs
new file mode 100644
index 000000000..074d761c7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0005.rs
@@ -0,0 +1,7 @@
+05
+1
+
+
+
+b'
+ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
new file mode 100644
index 000000000..cceafe04e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
@@ -0,0 +1,19 @@
+[package]
+name = "test-utils"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+# Avoid adding deps here, this crate is widely used in tests it should compile fast!
+dissimilar = "1.0.4"
+text-size = "1.1.0"
+rustc-hash = "1.1.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/assert_linear.rs b/src/tools/rust-analyzer/crates/test-utils/src/assert_linear.rs
new file mode 100644
index 000000000..24502ddb4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/assert_linear.rs
@@ -0,0 +1,112 @@
+//! Checks that a set of measurements looks like a linear function rather than
+//! like a quadratic function. Algorithm:
+//!
+//! 1. Linearly scale input to be in [0; 1)
+//! 2. Using linear regression, compute the best linear function approximating
+//! the input.
+//! 3. Compute RMSE and maximal absolute error.
+//! 4. Check that errors are within tolerances and that the constant term is not
+//! too negative.
+//!
+//! Ideally, we should use a proper "model selection" to directly compare
+//! quadratic and linear models, but that sounds rather complicated:
+//!
+//! https://stats.stackexchange.com/questions/21844/selecting-best-model-based-on-linear-quadratic-and-cubic-fit-of-data
+//!
+//! We might get false positives on a VM, but never false negatives. So, if the
+//! first round fails, we repeat the ordeal three more times and fail only if
+//! every time there's a fault.
+use stdx::format_to;
+
+#[derive(Default)]
+pub struct AssertLinear {
+ rounds: Vec<Round>,
+}
+
+#[derive(Default)]
+struct Round {
+ samples: Vec<(f64, f64)>,
+ plot: String,
+ linear: bool,
+}
+
+impl AssertLinear {
+ pub fn next_round(&mut self) -> bool {
+ if let Some(round) = self.rounds.last_mut() {
+ round.finish();
+ }
+ if self.rounds.iter().any(|it| it.linear) || self.rounds.len() == 4 {
+ return false;
+ }
+ self.rounds.push(Round::default());
+ true
+ }
+
+ pub fn sample(&mut self, x: f64, y: f64) {
+ self.rounds.last_mut().unwrap().samples.push((x, y));
+ }
+}
+
+impl Drop for AssertLinear {
+ fn drop(&mut self) {
+ assert!(!self.rounds.is_empty());
+ if self.rounds.iter().all(|it| !it.linear) {
+ for round in &self.rounds {
+ eprintln!("\n{}", round.plot);
+ }
+ panic!("Doesn't look linear!");
+ }
+ }
+}
+
+impl Round {
+ fn finish(&mut self) {
+ let (mut xs, mut ys): (Vec<_>, Vec<_>) = self.samples.iter().copied().unzip();
+ normalize(&mut xs);
+ normalize(&mut ys);
+ let xy = xs.iter().copied().zip(ys.iter().copied());
+
+ // Linear regression: finding a and b to fit y = a + b*x.
+
+ let mean_x = mean(&xs);
+ let mean_y = mean(&ys);
+
+ let b = {
+ let mut num = 0.0;
+ let mut denom = 0.0;
+ for (x, y) in xy.clone() {
+ num += (x - mean_x) * (y - mean_y);
+ denom += (x - mean_x).powi(2);
+ }
+ num / denom
+ };
+
+ let a = mean_y - b * mean_x;
+
+ self.plot = format!("y_pred = {:.3} + {:.3} * x\n\nx y y_pred\n", a, b);
+
+ let mut se = 0.0;
+ let mut max_error = 0.0f64;
+ for (x, y) in xy {
+ let y_pred = a + b * x;
+ se += (y - y_pred).powi(2);
+ max_error = max_error.max((y_pred - y).abs());
+
+ format_to!(self.plot, "{:.3} {:.3} {:.3}\n", x, y, y_pred);
+ }
+
+ let rmse = (se / xs.len() as f64).sqrt();
+ format_to!(self.plot, "\nrmse = {:.3} max error = {:.3}", rmse, max_error);
+
+ self.linear = rmse < 0.05 && max_error < 0.1 && a > -0.1;
+
+ fn normalize(xs: &mut Vec<f64>) {
+ let max = xs.iter().copied().max_by(|a, b| a.partial_cmp(b).unwrap()).unwrap();
+ xs.iter_mut().for_each(|it| *it /= max);
+ }
+
+ fn mean(xs: &[f64]) -> f64 {
+ xs.iter().copied().sum::<f64>() / (xs.len() as f64)
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/bench_fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/bench_fixture.rs
new file mode 100644
index 000000000..979156263
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/bench_fixture.rs
@@ -0,0 +1,45 @@
+//! Generates large snippets of Rust code for usage in the benchmarks.
+
+use std::fs;
+
+use stdx::format_to;
+
+use crate::project_root;
+
+pub fn big_struct() -> String {
+ let n = 1_000;
+ big_struct_n(n)
+}
+
+pub fn big_struct_n(n: u32) -> String {
+ let mut buf = "pub struct RegisterBlock {".to_string();
+ for i in 0..n {
+ format_to!(buf, " /// Doc comment for {}.\n", i);
+ format_to!(buf, " pub s{}: S{},\n", i, i);
+ }
+ buf.push_str("}\n\n");
+ for i in 0..n {
+ format_to!(
+ buf,
+ "
+
+#[repr(transparent)]
+struct S{} {{
+ field: u32,
+}}",
+ i
+ );
+ }
+
+ buf
+}
+
+pub fn glorious_old_parser() -> String {
+ let path = project_root().join("bench_data/glorious_old_parser");
+ fs::read_to_string(&path).unwrap()
+}
+
+pub fn numerous_macro_rules() -> String {
+ let path = project_root().join("bench_data/numerous_macro_rules");
+ fs::read_to_string(&path).unwrap()
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
new file mode 100644
index 000000000..8c806e792
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
@@ -0,0 +1,409 @@
+//! Defines `Fixture` -- a convenient way to describe the initial state of
+//! rust-analyzer database from a single string.
+//!
+//! Fixtures are strings containing rust source code with optional metadata.
+//! A fixture without metadata is parsed into a single source file.
+//! Use this to test functionality local to one file.
+//!
+//! Simple Example:
+//! ```
+//! r#"
+//! fn main() {
+//! println!("Hello World")
+//! }
+//! "#
+//! ```
+//!
+//! Metadata can be added to a fixture after a `//-` comment.
+//! The basic form is specifying filenames,
+//! which is also how to define multiple files in a single test fixture
+//!
+//! Example using two files in the same crate:
+//! ```
+//! "
+//! //- /main.rs
+//! mod foo;
+//! fn main() {
+//! foo::bar();
+//! }
+//!
+//! //- /foo.rs
+//! pub fn bar() {}
+//! "
+//! ```
+//!
+//! Example using two crates with one file each, with one crate depending on the other:
+//! ```
+//! r#"
+//! //- /main.rs crate:a deps:b
+//! fn main() {
+//! b::foo();
+//! }
+//! //- /lib.rs crate:b
+//! pub fn b() {
+//! println!("Hello World")
+//! }
+//! "#
+//! ```
+//!
+//! Metadata allows specifying all settings and variables
+//! that are available in a real rust project:
+//! - crate names via `crate:cratename`
+//! - dependencies via `deps:dep1,dep2`
+//! - configuration settings via `cfg:dbg=false,opt_level=2`
+//! - environment variables via `env:PATH=/bin,RUST_LOG=debug`
+//!
+//! Example using all available metadata:
+//! ```
+//! "
+//! //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo
+//! fn insert_source_code_here() {}
+//! "
+//! ```
+
+use rustc_hash::FxHashMap;
+use stdx::trim_indent;
+
+#[derive(Debug, Eq, PartialEq)]
+pub struct Fixture {
+ pub path: String,
+ pub text: String,
+ pub krate: Option<String>,
+ pub deps: Vec<String>,
+ pub extern_prelude: Option<Vec<String>>,
+ pub cfg_atoms: Vec<String>,
+ pub cfg_key_values: Vec<(String, String)>,
+ pub edition: Option<String>,
+ pub env: FxHashMap<String, String>,
+ pub introduce_new_source_root: Option<String>,
+}
+
+pub struct MiniCore {
+ activated_flags: Vec<String>,
+ valid_flags: Vec<String>,
+}
+
+impl Fixture {
+ /// Parses text which looks like this:
+ ///
+ /// ```not_rust
+ /// //- some meta
+ /// line 1
+ /// line 2
+ /// //- other meta
+ /// ```
+ ///
+ /// Fixture can also start with a proc_macros and minicore declaration(in that order):
+ ///
+ /// ```
+ /// //- proc_macros: identity
+ /// //- minicore: sized
+ /// ```
+ ///
+ /// That will include predefined proc macros and a subset of `libcore` into the fixture, see
+ /// `minicore.rs` for what's available.
+ pub fn parse(ra_fixture: &str) -> (Option<MiniCore>, Vec<String>, Vec<Fixture>) {
+ let fixture = trim_indent(ra_fixture);
+ let mut fixture = fixture.as_str();
+ let mut mini_core = None;
+ let mut res: Vec<Fixture> = Vec::new();
+ let mut test_proc_macros = vec![];
+
+ if fixture.starts_with("//- proc_macros:") {
+ let first_line = fixture.split_inclusive('\n').next().unwrap();
+ test_proc_macros = first_line
+ .strip_prefix("//- proc_macros:")
+ .unwrap()
+ .split(',')
+ .map(|it| it.trim().to_string())
+ .collect();
+ fixture = &fixture[first_line.len()..];
+ }
+
+ if fixture.starts_with("//- minicore:") {
+ let first_line = fixture.split_inclusive('\n').next().unwrap();
+ mini_core = Some(MiniCore::parse(first_line));
+ fixture = &fixture[first_line.len()..];
+ }
+
+ let default = if fixture.contains("//-") { None } else { Some("//- /main.rs") };
+
+ for (ix, line) in default.into_iter().chain(fixture.split_inclusive('\n')).enumerate() {
+ if line.contains("//-") {
+ assert!(
+ line.starts_with("//-"),
+ "Metadata line {} has invalid indentation. \
+ All metadata lines need to have the same indentation.\n\
+ The offending line: {:?}",
+ ix,
+ line
+ );
+ }
+
+ if line.starts_with("//-") {
+ let meta = Fixture::parse_meta_line(line);
+ res.push(meta);
+ } else {
+ if line.starts_with("// ")
+ && line.contains(':')
+ && !line.contains("::")
+ && !line.contains('.')
+ && line.chars().all(|it| !it.is_uppercase())
+ {
+ panic!("looks like invalid metadata line: {:?}", line);
+ }
+
+ if let Some(entry) = res.last_mut() {
+ entry.text.push_str(line);
+ }
+ }
+ }
+
+ (mini_core, test_proc_macros, res)
+ }
+
+ //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo
+ fn parse_meta_line(meta: &str) -> Fixture {
+ assert!(meta.starts_with("//-"));
+ let meta = meta["//-".len()..].trim();
+ let components = meta.split_ascii_whitespace().collect::<Vec<_>>();
+
+ let path = components[0].to_string();
+ assert!(path.starts_with('/'), "fixture path does not start with `/`: {:?}", path);
+
+ let mut krate = None;
+ let mut deps = Vec::new();
+ let mut extern_prelude = None;
+ let mut edition = None;
+ let mut cfg_atoms = Vec::new();
+ let mut cfg_key_values = Vec::new();
+ let mut env = FxHashMap::default();
+ let mut introduce_new_source_root = None;
+ for component in components[1..].iter() {
+ let (key, value) = component
+ .split_once(':')
+ .unwrap_or_else(|| panic!("invalid meta line: {:?}", meta));
+ match key {
+ "crate" => krate = Some(value.to_string()),
+ "deps" => deps = value.split(',').map(|it| it.to_string()).collect(),
+ "extern-prelude" => {
+ if value.is_empty() {
+ extern_prelude = Some(Vec::new());
+ } else {
+ extern_prelude =
+ Some(value.split(',').map(|it| it.to_string()).collect::<Vec<_>>());
+ }
+ }
+ "edition" => edition = Some(value.to_string()),
+ "cfg" => {
+ for entry in value.split(',') {
+ match entry.split_once('=') {
+ Some((k, v)) => cfg_key_values.push((k.to_string(), v.to_string())),
+ None => cfg_atoms.push(entry.to_string()),
+ }
+ }
+ }
+ "env" => {
+ for key in value.split(',') {
+ if let Some((k, v)) = key.split_once('=') {
+ env.insert(k.into(), v.into());
+ }
+ }
+ }
+ "new_source_root" => introduce_new_source_root = Some(value.to_string()),
+ _ => panic!("bad component: {:?}", component),
+ }
+ }
+
+ for prelude_dep in extern_prelude.iter().flatten() {
+ assert!(
+ deps.contains(prelude_dep),
+ "extern-prelude {:?} must be a subset of deps {:?}",
+ extern_prelude,
+ deps
+ );
+ }
+
+ Fixture {
+ path,
+ text: String::new(),
+ krate,
+ deps,
+ extern_prelude,
+ cfg_atoms,
+ cfg_key_values,
+ edition,
+ env,
+ introduce_new_source_root,
+ }
+ }
+}
+
+impl MiniCore {
+ fn has_flag(&self, flag: &str) -> bool {
+ self.activated_flags.iter().any(|it| it == flag)
+ }
+
+ #[track_caller]
+ fn assert_valid_flag(&self, flag: &str) {
+ if !self.valid_flags.iter().any(|it| it == flag) {
+ panic!("invalid flag: {:?}, valid flags: {:?}", flag, self.valid_flags);
+ }
+ }
+
+ fn parse(line: &str) -> MiniCore {
+ let mut res = MiniCore { activated_flags: Vec::new(), valid_flags: Vec::new() };
+
+ let line = line.strip_prefix("//- minicore:").unwrap().trim();
+ for entry in line.split(", ") {
+ if res.has_flag(entry) {
+ panic!("duplicate minicore flag: {:?}", entry);
+ }
+ res.activated_flags.push(entry.to_string());
+ }
+
+ res
+ }
+
+ /// Strips parts of minicore.rs which are flagged by inactive flags.
+ ///
+ /// This is probably over-engineered to support flags dependencies.
+ pub fn source_code(mut self) -> String {
+ let mut buf = String::new();
+ let raw_mini_core = include_str!("./minicore.rs");
+ let mut lines = raw_mini_core.split_inclusive('\n');
+
+ let mut parsing_flags = false;
+ let mut implications = Vec::new();
+
+ // Parse `//!` preamble and extract flags and dependencies.
+ for line in lines.by_ref() {
+ let line = match line.strip_prefix("//!") {
+ Some(it) => it,
+ None => {
+ assert!(line.trim().is_empty());
+ break;
+ }
+ };
+
+ if parsing_flags {
+ let (flag, deps) = line.split_once(':').unwrap();
+ let flag = flag.trim();
+ self.valid_flags.push(flag.to_string());
+ for dep in deps.split(", ") {
+ let dep = dep.trim();
+ if !dep.is_empty() {
+ self.assert_valid_flag(dep);
+ implications.push((flag, dep));
+ }
+ }
+ }
+
+ if line.contains("Available flags:") {
+ parsing_flags = true;
+ }
+ }
+
+ for flag in &self.activated_flags {
+ self.assert_valid_flag(flag);
+ }
+
+ // Fixed point loop to compute transitive closure of flags.
+ loop {
+ let mut changed = false;
+ for &(u, v) in &implications {
+ if self.has_flag(u) && !self.has_flag(v) {
+ self.activated_flags.push(v.to_string());
+ changed = true;
+ }
+ }
+ if !changed {
+ break;
+ }
+ }
+
+ let mut active_regions = Vec::new();
+ let mut seen_regions = Vec::new();
+ for line in lines {
+ let trimmed = line.trim();
+ if let Some(region) = trimmed.strip_prefix("// region:") {
+ active_regions.push(region);
+ continue;
+ }
+ if let Some(region) = trimmed.strip_prefix("// endregion:") {
+ let prev = active_regions.pop().unwrap();
+ assert_eq!(prev, region);
+ continue;
+ }
+
+ let mut line_region = false;
+ if let Some(idx) = trimmed.find("// :") {
+ line_region = true;
+ active_regions.push(&trimmed[idx + "// :".len()..]);
+ }
+
+ let mut keep = true;
+ for &region in &active_regions {
+ assert!(
+ !region.starts_with(' '),
+ "region marker starts with a space: {:?}",
+ region
+ );
+ self.assert_valid_flag(region);
+ seen_regions.push(region);
+ keep &= self.has_flag(region);
+ }
+
+ if keep {
+ buf.push_str(line);
+ }
+ if line_region {
+ active_regions.pop().unwrap();
+ }
+ }
+
+ for flag in &self.valid_flags {
+ if !seen_regions.iter().any(|it| it == flag) {
+ panic!("unused minicore flag: {:?}", flag);
+ }
+ }
+ buf
+ }
+}
+
+#[test]
+#[should_panic]
+fn parse_fixture_checks_further_indented_metadata() {
+ Fixture::parse(
+ r"
+ //- /lib.rs
+ mod bar;
+
+ fn foo() {}
+ //- /bar.rs
+ pub fn baz() {}
+ ",
+ );
+}
+
+#[test]
+fn parse_fixture_gets_full_meta() {
+ let (mini_core, proc_macros, parsed) = Fixture::parse(
+ r#"
+//- proc_macros: identity
+//- minicore: coerce_unsized
+//- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b,atom env:OUTDIR=path/to,OTHER=foo
+mod m;
+"#,
+ );
+ assert_eq!(proc_macros, vec!["identity".to_string()]);
+ assert_eq!(mini_core.unwrap().activated_flags, vec!["coerce_unsized".to_string()]);
+ assert_eq!(1, parsed.len());
+
+ let meta = &parsed[0];
+ assert_eq!("mod m;\n", meta.text);
+
+ assert_eq!("foo", meta.krate.as_ref().unwrap());
+ assert_eq!("/lib.rs", meta.path);
+ assert_eq!(2, meta.env.len());
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs
new file mode 100644
index 000000000..8a9cfb6c2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs
@@ -0,0 +1,500 @@
+//! Assorted testing utilities.
+//!
+//! Most notable things are:
+//!
+//! * Rich text comparison, which outputs a diff.
+//! * Extracting markup (mainly, `$0` markers) out of fixture strings.
+//! * marks (see the eponymous module).
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod assert_linear;
+pub mod bench_fixture;
+mod fixture;
+
+use std::{
+ collections::BTreeMap,
+ env, fs,
+ path::{Path, PathBuf},
+};
+
+use profile::StopWatch;
+use stdx::is_ci;
+use text_size::{TextRange, TextSize};
+
+pub use dissimilar::diff as __diff;
+pub use rustc_hash::FxHashMap;
+
+pub use crate::{
+ assert_linear::AssertLinear,
+ fixture::{Fixture, MiniCore},
+};
+
+pub const CURSOR_MARKER: &str = "$0";
+pub const ESCAPED_CURSOR_MARKER: &str = "\\$0";
+
+/// Asserts that two strings are equal, otherwise displays a rich diff between them.
+///
+/// The diff shows changes from the "original" left string to the "actual" right string.
+///
+/// All arguments starting from and including the 3rd one are passed to
+/// `eprintln!()` macro in case of text inequality.
+#[macro_export]
+macro_rules! assert_eq_text {
+ ($left:expr, $right:expr) => {
+ assert_eq_text!($left, $right,)
+ };
+ ($left:expr, $right:expr, $($tt:tt)*) => {{
+ let left = $left;
+ let right = $right;
+ if left != right {
+ if left.trim() == right.trim() {
+ std::eprintln!("Left:\n{:?}\n\nRight:\n{:?}\n\nWhitespace difference\n", left, right);
+ } else {
+ let diff = $crate::__diff(left, right);
+ std::eprintln!("Left:\n{}\n\nRight:\n{}\n\nDiff:\n{}\n", left, right, $crate::format_diff(diff));
+ }
+ std::eprintln!($($tt)*);
+ panic!("text differs");
+ }
+ }};
+}
+
+/// Infallible version of `try_extract_offset()`.
+pub fn extract_offset(text: &str) -> (TextSize, String) {
+ match try_extract_offset(text) {
+ None => panic!("text should contain cursor marker"),
+ Some(result) => result,
+ }
+}
+
+/// Returns the offset of the first occurrence of `$0` marker and the copy of `text`
+/// without the marker.
+fn try_extract_offset(text: &str) -> Option<(TextSize, String)> {
+ let cursor_pos = text.find(CURSOR_MARKER)?;
+ let mut new_text = String::with_capacity(text.len() - CURSOR_MARKER.len());
+ new_text.push_str(&text[..cursor_pos]);
+ new_text.push_str(&text[cursor_pos + CURSOR_MARKER.len()..]);
+ let cursor_pos = TextSize::from(cursor_pos as u32);
+ Some((cursor_pos, new_text))
+}
+
+/// Infallible version of `try_extract_range()`.
+pub fn extract_range(text: &str) -> (TextRange, String) {
+ match try_extract_range(text) {
+ None => panic!("text should contain cursor marker"),
+ Some(result) => result,
+ }
+}
+
+/// Returns `TextRange` between the first two markers `$0...$0` and the copy
+/// of `text` without both of these markers.
+fn try_extract_range(text: &str) -> Option<(TextRange, String)> {
+ let (start, text) = try_extract_offset(text)?;
+ let (end, text) = try_extract_offset(&text)?;
+ Some((TextRange::new(start, end), text))
+}
+
+#[derive(Clone, Copy)]
+pub enum RangeOrOffset {
+ Range(TextRange),
+ Offset(TextSize),
+}
+
+impl RangeOrOffset {
+ pub fn expect_offset(self) -> TextSize {
+ match self {
+ RangeOrOffset::Offset(it) => it,
+ RangeOrOffset::Range(_) => panic!("expected an offset but got a range instead"),
+ }
+ }
+ pub fn expect_range(self) -> TextRange {
+ match self {
+ RangeOrOffset::Range(it) => it,
+ RangeOrOffset::Offset(_) => panic!("expected a range but got an offset"),
+ }
+ }
+ pub fn range_or_empty(self) -> TextRange {
+ match self {
+ RangeOrOffset::Range(range) => range,
+ RangeOrOffset::Offset(offset) => TextRange::empty(offset),
+ }
+ }
+}
+
+impl From<RangeOrOffset> for TextRange {
+ fn from(selection: RangeOrOffset) -> Self {
+ match selection {
+ RangeOrOffset::Range(it) => it,
+ RangeOrOffset::Offset(it) => TextRange::empty(it),
+ }
+ }
+}
+
+/// Extracts `TextRange` or `TextSize` depending on the amount of `$0` markers
+/// found in `text`.
+///
+/// # Panics
+/// Panics if no `$0` marker is present in the `text`.
+pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) {
+ if let Some((range, text)) = try_extract_range(text) {
+ return (RangeOrOffset::Range(range), text);
+ }
+ let (offset, text) = extract_offset(text);
+ (RangeOrOffset::Offset(offset), text)
+}
+
+/// Extracts ranges, marked with `<tag> </tag>` pairs from the `text`
+pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option<String>)>, String) {
+ let open = format!("<{}", tag);
+ let close = format!("</{}>", tag);
+ let mut ranges = Vec::new();
+ let mut res = String::new();
+ let mut stack = Vec::new();
+ loop {
+ match text.find('<') {
+ None => {
+ res.push_str(text);
+ break;
+ }
+ Some(i) => {
+ res.push_str(&text[..i]);
+ text = &text[i..];
+ if text.starts_with(&open) {
+ let close_open = text.find('>').unwrap();
+ let attr = text[open.len()..close_open].trim();
+ let attr = if attr.is_empty() { None } else { Some(attr.to_string()) };
+ text = &text[close_open + '>'.len_utf8()..];
+ let from = TextSize::of(&res);
+ stack.push((from, attr));
+ } else if text.starts_with(&close) {
+ text = &text[close.len()..];
+ let (from, attr) =
+ stack.pop().unwrap_or_else(|| panic!("unmatched </{}>", tag));
+ let to = TextSize::of(&res);
+ ranges.push((TextRange::new(from, to), attr));
+ } else {
+ res.push('<');
+ text = &text['<'.len_utf8()..];
+ }
+ }
+ }
+ }
+ assert!(stack.is_empty(), "unmatched <{}>", tag);
+ ranges.sort_by_key(|r| (r.0.start(), r.0.end()));
+ (ranges, res)
+}
+#[test]
+fn test_extract_tags() {
+ let (tags, text) = extract_tags(r#"<tag fn>fn <tag>main</tag>() {}</tag>"#, "tag");
+ let actual = tags.into_iter().map(|(range, attr)| (&text[range], attr)).collect::<Vec<_>>();
+ assert_eq!(actual, vec![("fn main() {}", Some("fn".into())), ("main", None),]);
+}
+
+/// Inserts `$0` marker into the `text` at `offset`.
+pub fn add_cursor(text: &str, offset: TextSize) -> String {
+ let offset: usize = offset.into();
+ let mut res = String::new();
+ res.push_str(&text[..offset]);
+ res.push_str("$0");
+ res.push_str(&text[offset..]);
+ res
+}
+
+/// Extracts `//^^^ some text` annotations.
+///
+/// A run of `^^^` can be arbitrary long and points to the corresponding range
+/// in the line above.
+///
+/// The `// ^file text` syntax can be used to attach `text` to the entirety of
+/// the file.
+///
+/// Multiline string values are supported:
+///
+/// // ^^^ first line
+/// // | second line
+///
+/// Trailing whitespace is sometimes desired but usually stripped by the editor
+/// if at the end of a line, or incorrectly sized if followed by another
+/// annotation. In those cases the annotation can be explicitly ended with the
+/// `$` character.
+///
+/// // ^^^ trailing-ws-wanted $
+///
+/// Annotations point to the last line that actually was long enough for the
+/// range, not counting annotations themselves. So overlapping annotations are
+/// possible:
+/// ```no_run
+/// // stuff other stuff
+/// // ^^ 'st'
+/// // ^^^^^ 'stuff'
+/// // ^^^^^^^^^^^ 'other stuff'
+/// ```
+pub fn extract_annotations(text: &str) -> Vec<(TextRange, String)> {
+ let mut res = Vec::new();
+ // map from line length to beginning of last line that had that length
+ let mut line_start_map = BTreeMap::new();
+ let mut line_start: TextSize = 0.into();
+ let mut prev_line_annotations: Vec<(TextSize, usize)> = Vec::new();
+ for line in text.split_inclusive('\n') {
+ let mut this_line_annotations = Vec::new();
+ let line_length = if let Some((prefix, suffix)) = line.split_once("//") {
+ let ss_len = TextSize::of("//");
+ let annotation_offset = TextSize::of(prefix) + ss_len;
+ for annotation in extract_line_annotations(suffix.trim_end_matches('\n')) {
+ match annotation {
+ LineAnnotation::Annotation { mut range, content, file } => {
+ range += annotation_offset;
+ this_line_annotations.push((range.end(), res.len()));
+ let range = if file {
+ TextRange::up_to(TextSize::of(text))
+ } else {
+ let line_start = line_start_map.range(range.end()..).next().unwrap();
+
+ range + line_start.1
+ };
+ res.push((range, content));
+ }
+ LineAnnotation::Continuation { mut offset, content } => {
+ offset += annotation_offset;
+ let &(_, idx) = prev_line_annotations
+ .iter()
+ .find(|&&(off, _idx)| off == offset)
+ .unwrap();
+ res[idx].1.push('\n');
+ res[idx].1.push_str(&content);
+ res[idx].1.push('\n');
+ }
+ }
+ }
+ annotation_offset
+ } else {
+ TextSize::of(line)
+ };
+
+ line_start_map = line_start_map.split_off(&line_length);
+ line_start_map.insert(line_length, line_start);
+
+ line_start += TextSize::of(line);
+
+ prev_line_annotations = this_line_annotations;
+ }
+
+ res
+}
+
+enum LineAnnotation {
+ Annotation { range: TextRange, content: String, file: bool },
+ Continuation { offset: TextSize, content: String },
+}
+
+fn extract_line_annotations(mut line: &str) -> Vec<LineAnnotation> {
+ let mut res = Vec::new();
+ let mut offset: TextSize = 0.into();
+ let marker: fn(char) -> bool = if line.contains('^') { |c| c == '^' } else { |c| c == '|' };
+ while let Some(idx) = line.find(marker) {
+ offset += TextSize::try_from(idx).unwrap();
+ line = &line[idx..];
+
+ let mut len = line.chars().take_while(|&it| it == '^').count();
+ let mut continuation = false;
+ if len == 0 {
+ assert!(line.starts_with('|'));
+ continuation = true;
+ len = 1;
+ }
+ let range = TextRange::at(offset, len.try_into().unwrap());
+ let line_no_caret = &line[len..];
+ let end_marker = line_no_caret.find(|c| c == '$');
+ let next = line_no_caret.find(marker).map_or(line.len(), |it| it + len);
+
+ let cond = |end_marker| {
+ end_marker < next
+ && (line_no_caret[end_marker + 1..].is_empty()
+ || line_no_caret[end_marker + 1..]
+ .strip_prefix(|c: char| c.is_whitespace() || c == '^')
+ .is_some())
+ };
+ let mut content = match end_marker {
+ Some(end_marker) if cond(end_marker) => &line_no_caret[..end_marker],
+ _ => line_no_caret[..next - len].trim_end(),
+ };
+
+ let mut file = false;
+ if !continuation && content.starts_with("file") {
+ file = true;
+ content = &content["file".len()..];
+ }
+
+ let content = content.trim_start().to_string();
+
+ let annotation = if continuation {
+ LineAnnotation::Continuation { offset: range.end(), content }
+ } else {
+ LineAnnotation::Annotation { range, content, file }
+ };
+ res.push(annotation);
+
+ line = &line[next..];
+ offset += TextSize::try_from(next).unwrap();
+ }
+
+ res
+}
+
+#[test]
+fn test_extract_annotations_1() {
+ let text = stdx::trim_indent(
+ r#"
+fn main() {
+ let (x, y) = (9, 2);
+ //^ def ^ def
+ zoo + 1
+} //^^^ type:
+ // | i32
+
+// ^file
+ "#,
+ );
+ let res = extract_annotations(&text)
+ .into_iter()
+ .map(|(range, ann)| (&text[range], ann))
+ .collect::<Vec<_>>();
+
+ assert_eq!(
+ res[..3],
+ [("x", "def".into()), ("y", "def".into()), ("zoo", "type:\ni32\n".into())]
+ );
+ assert_eq!(res[3].0.len(), 115);
+}
+
+#[test]
+fn test_extract_annotations_2() {
+ let text = stdx::trim_indent(
+ r#"
+fn main() {
+ (x, y);
+ //^ a
+ // ^ b
+ //^^^^^^^^ c
+}"#,
+ );
+ let res = extract_annotations(&text)
+ .into_iter()
+ .map(|(range, ann)| (&text[range], ann))
+ .collect::<Vec<_>>();
+
+ assert_eq!(res, [("x", "a".into()), ("y", "b".into()), ("(x, y)", "c".into())]);
+}
+
+/// Returns `false` if slow tests should not run, otherwise returns `true` and
+/// also creates a file at `./target/.slow_tests_cookie` which serves as a flag
+/// that slow tests did run.
+pub fn skip_slow_tests() -> bool {
+ let should_skip = (std::env::var("CI").is_err() && std::env::var("RUN_SLOW_TESTS").is_err())
+ || std::env::var("SKIP_SLOW_TESTS").is_ok();
+ if should_skip {
+ eprintln!("ignoring slow test");
+ } else {
+ let path = project_root().join("./target/.slow_tests_cookie");
+ fs::write(&path, ".").unwrap();
+ }
+ should_skip
+}
+
+/// Returns the path to the root directory of `rust-analyzer` project.
+pub fn project_root() -> PathBuf {
+ let dir = env!("CARGO_MANIFEST_DIR");
+ PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()
+}
+
+pub fn format_diff(chunks: Vec<dissimilar::Chunk<'_>>) -> String {
+ let mut buf = String::new();
+ for chunk in chunks {
+ let formatted = match chunk {
+ dissimilar::Chunk::Equal(text) => text.into(),
+ dissimilar::Chunk::Delete(text) => format!("\x1b[41m{}\x1b[0m", text),
+ dissimilar::Chunk::Insert(text) => format!("\x1b[42m{}\x1b[0m", text),
+ };
+ buf.push_str(&formatted);
+ }
+ buf
+}
+
+/// Utility for writing benchmark tests.
+///
+/// A benchmark test looks like this:
+///
+/// ```
+/// #[test]
+/// fn benchmark_foo() {
+/// if skip_slow_tests() { return; }
+///
+/// let data = bench_fixture::some_fixture();
+/// let analysis = some_setup();
+///
+/// let hash = {
+/// let _b = bench("foo");
+/// actual_work(analysis)
+/// };
+/// assert_eq!(hash, 92);
+/// }
+/// ```
+///
+/// * We skip benchmarks by default, to save time.
+/// Ideal benchmark time is 800 -- 1500 ms in debug.
+/// * We don't count preparation as part of the benchmark
+/// * The benchmark itself returns some kind of numeric hash.
+/// The hash is used as a sanity check that some code is actually run.
+/// Otherwise, it's too easy to win the benchmark by just doing nothing.
+pub fn bench(label: &'static str) -> impl Drop {
+ struct Bencher {
+ sw: StopWatch,
+ label: &'static str,
+ }
+
+ impl Drop for Bencher {
+ fn drop(&mut self) {
+ eprintln!("{}: {}", self.label, self.sw.elapsed());
+ }
+ }
+
+ Bencher { sw: StopWatch::start(), label }
+}
+
+/// Checks that the `file` has the specified `contents`. If that is not the
+/// case, updates the file and then fails the test.
+#[track_caller]
+pub fn ensure_file_contents(file: &Path, contents: &str) {
+ if let Err(()) = try_ensure_file_contents(file, contents) {
+ panic!("Some files were not up-to-date");
+ }
+}
+
+/// Checks that the `file` has the specified `contents`. If that is not the
+/// case, updates the file and return an Error.
+pub fn try_ensure_file_contents(file: &Path, contents: &str) -> Result<(), ()> {
+ match std::fs::read_to_string(file) {
+ Ok(old_contents) if normalize_newlines(&old_contents) == normalize_newlines(contents) => {
+ return Ok(());
+ }
+ _ => (),
+ }
+ let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
+ eprintln!(
+ "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
+ display_path.display()
+ );
+ if is_ci() {
+ eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
+ }
+ if let Some(parent) = file.parent() {
+ let _ = std::fs::create_dir_all(parent);
+ }
+ std::fs::write(file, contents).unwrap();
+ Err(())
+}
+
+fn normalize_newlines(s: &str) -> String {
+ s.replace("\r\n", "\n")
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
new file mode 100644
index 000000000..f48d1ec66
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
@@ -0,0 +1,669 @@
+//! This is a fixture we use for tests that need lang items.
+//!
+//! We want to include the minimal subset of core for each test, so this file
+//! supports "conditional compilation". Tests use the following syntax to include minicore:
+//!
+//! //- minicore: flag1, flag2
+//!
+//! We then strip all the code marked with other flags.
+//!
+//! Available flags:
+//! sized:
+//! unsize: sized
+//! coerce_unsized: unsize
+//! slice:
+//! range:
+//! deref: sized
+//! deref_mut: deref
+//! index: sized
+//! fn:
+//! try:
+//! pin:
+//! future: pin
+//! option:
+//! result:
+//! iterator: option
+//! iterators: iterator, fn
+//! default: sized
+//! hash:
+//! clone: sized
+//! copy: clone
+//! from: sized
+//! eq: sized
+//! ord: eq, option
+//! derive:
+//! fmt: result
+//! bool_impl: option, fn
+//! add:
+//! as_ref: sized
+//! drop:
+
+pub mod marker {
+ // region:sized
+ #[lang = "sized"]
+ #[fundamental]
+ #[rustc_specialization_trait]
+ pub trait Sized {}
+ // endregion:sized
+
+ // region:unsize
+ #[lang = "unsize"]
+ pub trait Unsize<T: ?Sized> {}
+ // endregion:unsize
+
+ // region:copy
+ #[lang = "copy"]
+ pub trait Copy: Clone {}
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro Copy($item:item) {}
+ // endregion:derive
+
+ mod copy_impls {
+ use super::Copy;
+
+ macro_rules! impl_copy {
+ ($($t:ty)*) => {
+ $(
+ impl Copy for $t {}
+ )*
+ }
+ }
+
+ impl_copy! {
+ usize u8 u16 u32 u64 u128
+ isize i8 i16 i32 i64 i128
+ f32 f64
+ bool char
+ }
+
+ impl<T: ?Sized> Copy for *const T {}
+ impl<T: ?Sized> Copy for *mut T {}
+ impl<T: ?Sized> Copy for &T {}
+ }
+ // endregion:copy
+}
+
+// region:default
+pub mod default {
+ pub trait Default: Sized {
+ fn default() -> Self;
+ }
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro Default($item:item) {}
+ // endregion:derive
+}
+// endregion:default
+
+// region:hash
+pub mod hash {
+ pub trait Hasher {}
+
+ pub trait Hash {
+ fn hash<H: Hasher>(&self, state: &mut H);
+ }
+}
+// endregion:hash
+
+// region:clone
+pub mod clone {
+ #[lang = "clone"]
+ pub trait Clone: Sized {
+ fn clone(&self) -> Self;
+ }
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro Clone($item:item) {}
+ // endregion:derive
+}
+// endregion:clone
+
+pub mod convert {
+ // region:from
+ pub trait From<T>: Sized {
+ fn from(_: T) -> Self;
+ }
+ pub trait Into<T>: Sized {
+ fn into(self) -> T;
+ }
+
+ impl<T, U> Into<U> for T
+ where
+ U: From<T>,
+ {
+ fn into(self) -> U {
+ U::from(self)
+ }
+ }
+
+ impl<T> From<T> for T {
+ fn from(t: T) -> T {
+ t
+ }
+ }
+ // endregion:from
+
+ // region:as_ref
+ pub trait AsRef<T: ?Sized> {
+ fn as_ref(&self) -> &T;
+ }
+ // endregion:as_ref
+}
+
+pub mod ops {
+ // region:coerce_unsized
+ mod unsize {
+ use crate::marker::Unsize;
+
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T: ?Sized> {}
+
+ impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {}
+ impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b mut T {}
+ impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for &'a mut T {}
+ impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a mut T {}
+
+ impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
+ impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a T {}
+
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *mut T {}
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {}
+ }
+ pub use self::unsize::CoerceUnsized;
+ // endregion:coerce_unsized
+
+ // region:deref
+ mod deref {
+ #[lang = "deref"]
+ pub trait Deref {
+ #[lang = "deref_target"]
+ type Target: ?Sized;
+ fn deref(&self) -> &Self::Target;
+ }
+ // region:deref_mut
+ #[lang = "deref_mut"]
+ pub trait DerefMut: Deref {
+ fn deref_mut(&mut self) -> &mut Self::Target;
+ }
+ // endregion:deref_mut
+ }
+ pub use self::deref::{
+ Deref,
+ DerefMut, // :deref_mut
+ };
+ // endregion:deref
+
+ // region:drop
+ #[lang = "drop"]
+ pub trait Drop {
+ fn drop(&mut self);
+ }
+ // endregion:drop
+
+ // region:index
+ mod index {
+ #[lang = "index"]
+ pub trait Index<Idx: ?Sized> {
+ type Output: ?Sized;
+ fn index(&self, index: Idx) -> &Self::Output;
+ }
+ #[lang = "index_mut"]
+ pub trait IndexMut<Idx: ?Sized>: Index<Idx> {
+ fn index_mut(&mut self, index: Idx) -> &mut Self::Output;
+ }
+
+ // region:slice
+ impl<T, I> Index<I> for [T]
+ where
+ I: SliceIndex<[T]>,
+ {
+ type Output = I::Output;
+ fn index(&self, index: I) -> &I::Output {
+ loop {}
+ }
+ }
+ impl<T, I> IndexMut<I> for [T]
+ where
+ I: SliceIndex<[T]>,
+ {
+ fn index_mut(&mut self, index: I) -> &mut I::Output {
+ loop {}
+ }
+ }
+
+ pub unsafe trait SliceIndex<T: ?Sized> {
+ type Output: ?Sized;
+ }
+ unsafe impl<T> SliceIndex<[T]> for usize {
+ type Output = T;
+ }
+ // endregion:slice
+ }
+ pub use self::index::{Index, IndexMut};
+ // endregion:index
+
+ // region:drop
+ pub mod mem {
+ pub fn drop<T>(_x: T) {}
+ }
+ // endregion:drop
+
+ // region:range
+ mod range {
+ #[lang = "RangeFull"]
+ pub struct RangeFull;
+
+ #[lang = "Range"]
+ pub struct Range<Idx> {
+ pub start: Idx,
+ pub end: Idx,
+ }
+
+ #[lang = "RangeFrom"]
+ pub struct RangeFrom<Idx> {
+ pub start: Idx,
+ }
+
+ #[lang = "RangeTo"]
+ pub struct RangeTo<Idx> {
+ pub end: Idx,
+ }
+
+ #[lang = "RangeInclusive"]
+ pub struct RangeInclusive<Idx> {
+ pub(crate) start: Idx,
+ pub(crate) end: Idx,
+ pub(crate) exhausted: bool,
+ }
+
+ #[lang = "RangeToInclusive"]
+ pub struct RangeToInclusive<Idx> {
+ pub end: Idx,
+ }
+ }
+ pub use self::range::{Range, RangeFrom, RangeFull, RangeTo};
+ pub use self::range::{RangeInclusive, RangeToInclusive};
+ // endregion:range
+
+ // region:fn
+ mod function {
+ #[lang = "fn"]
+ #[fundamental]
+ pub trait Fn<Args>: FnMut<Args> {}
+
+ #[lang = "fn_mut"]
+ #[fundamental]
+ pub trait FnMut<Args>: FnOnce<Args> {}
+
+ #[lang = "fn_once"]
+ #[fundamental]
+ pub trait FnOnce<Args> {
+ #[lang = "fn_once_output"]
+ type Output;
+ }
+ }
+ pub use self::function::{Fn, FnMut, FnOnce};
+ // endregion:fn
+ // region:try
+ mod try_ {
+ pub enum ControlFlow<B, C = ()> {
+ Continue(C),
+ Break(B),
+ }
+ pub trait FromResidual<R = Self::Residual> {
+ #[lang = "from_residual"]
+ fn from_residual(residual: R) -> Self;
+ }
+ #[lang = "try"]
+ pub trait Try: FromResidual<Self::Residual> {
+ type Output;
+ type Residual;
+ #[lang = "from_output"]
+ fn from_output(output: Self::Output) -> Self;
+ #[lang = "branch"]
+ fn branch(self) -> ControlFlow<Self::Residual, Self::Output>;
+ }
+
+ impl<B, C> Try for ControlFlow<B, C> {
+ type Output = C;
+ type Residual = ControlFlow<B, convert::Infallible>;
+ fn from_output(output: Self::Output) -> Self {}
+ fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {}
+ }
+
+ impl<B, C> FromResidual for ControlFlow<B, C> {
+ fn from_residual(residual: ControlFlow<B, convert::Infallible>) -> Self {}
+ }
+ }
+ pub use self::try_::{ControlFlow, FromResidual, Try};
+ // endregion:try
+
+ // region:add
+ #[lang = "add"]
+ pub trait Add<Rhs = Self> {
+ type Output;
+ fn add(self, rhs: Rhs) -> Self::Output;
+ }
+ // endregion:add
+}
+
+// region:eq
+pub mod cmp {
+ #[lang = "eq"]
+ pub trait PartialEq<Rhs: ?Sized = Self> {
+ fn eq(&self, other: &Rhs) -> bool;
+ fn ne(&self, other: &Rhs) -> bool {
+ !self.eq(other)
+ }
+ }
+
+ pub trait Eq: PartialEq<Self> {}
+
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro PartialEq($item:item) {}
+ #[rustc_builtin_macro]
+ pub macro Eq($item:item) {}
+ // endregion:derive
+
+ // region:ord
+ #[lang = "partial_ord"]
+ pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
+ fn partial_cmp(&self, other: &Rhs) -> Option<Ordering>;
+ }
+
+ pub trait Ord: Eq + PartialOrd<Self> {
+ fn cmp(&self, other: &Self) -> Ordering;
+ }
+
+ pub enum Ordering {
+ Less = -1,
+ Equal = 0,
+ Greater = 1,
+ }
+
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro PartialOrd($item:item) {}
+ #[rustc_builtin_macro]
+ pub macro Ord($item:item) {}
+ // endregion:derive
+
+ // endregion:ord
+}
+// endregion:eq
+
+// region:fmt
+pub mod fmt {
+ pub struct Error;
+ pub type Result = Result<(), Error>;
+ pub struct Formatter<'a>;
+ pub trait Debug {
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result;
+ }
+}
+// endregion:fmt
+
+// region:slice
+pub mod slice {
+ #[lang = "slice"]
+ impl<T> [T] {
+ pub fn len(&self) -> usize {
+ loop {}
+ }
+ }
+}
+// endregion:slice
+
+// region:option
+pub mod option {
+ pub enum Option<T> {
+ #[lang = "None"]
+ None,
+ #[lang = "Some"]
+ Some(T),
+ }
+
+ impl<T> Option<T> {
+ pub const fn unwrap(self) -> T {
+ match self {
+ Some(val) => val,
+ None => panic!("called `Option::unwrap()` on a `None` value"),
+ }
+ }
+ }
+}
+// endregion:option
+
+// region:result
+pub mod result {
+ pub enum Result<T, E> {
+ #[lang = "Ok"]
+ Ok(T),
+ #[lang = "Err"]
+ Err(E),
+ }
+}
+// endregion:result
+
+// region:pin
+pub mod pin {
+ #[lang = "pin"]
+ #[fundamental]
+ pub struct Pin<P> {
+ pointer: P,
+ }
+}
+// endregion:pin
+
+// region:future
+pub mod future {
+ use crate::{
+ pin::Pin,
+ task::{Context, Poll},
+ };
+
+ #[lang = "future_trait"]
+ pub trait Future {
+ type Output;
+ #[lang = "poll"]
+ fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output>;
+ }
+}
+pub mod task {
+ pub enum Poll<T> {
+ #[lang = "Ready"]
+ Ready(T),
+ #[lang = "Pending"]
+ Pending,
+ }
+
+ pub struct Context<'a> {
+ waker: &'a (),
+ }
+}
+// endregion:future
+
+// region:iterator
+pub mod iter {
+ // region:iterators
+ mod adapters {
+ pub struct Take<I> {
+ iter: I,
+ n: usize,
+ }
+ impl<I> Iterator for Take<I>
+ where
+ I: Iterator,
+ {
+ type Item = <I as Iterator>::Item;
+
+ fn next(&mut self) -> Option<<I as Iterator>::Item> {
+ loop {}
+ }
+ }
+
+ pub struct FilterMap<I, F> {
+ iter: I,
+ f: F,
+ }
+ impl<B, I: Iterator, F> Iterator for FilterMap<I, F>
+ where
+ F: FnMut(I::Item) -> Option<B>,
+ {
+ type Item = B;
+
+ #[inline]
+ fn next(&mut self) -> Option<B> {
+ loop {}
+ }
+ }
+ }
+ pub use self::adapters::{Take, FilterMap};
+
+ mod sources {
+ mod repeat {
+ pub fn repeat<T>(elt: T) -> Repeat<T> {
+ loop {}
+ }
+
+ pub struct Repeat<A> {
+ element: A,
+ }
+
+ impl<A> Iterator for Repeat<A> {
+ type Item = A;
+
+ fn next(&mut self) -> Option<A> {
+ loop {}
+ }
+ }
+ }
+ pub use self::repeat::{repeat, Repeat};
+ }
+ pub use self::sources::{repeat, Repeat};
+ // endregion:iterators
+
+ mod traits {
+ mod iterator {
+ use super::super::Take;
+
+ pub trait Iterator {
+ type Item;
+ #[lang = "next"]
+ fn next(&mut self) -> Option<Self::Item>;
+ fn nth(&mut self, n: usize) -> Option<Self::Item> {
+ loop {}
+ }
+ fn by_ref(&mut self) -> &mut Self
+ where
+ Self: Sized,
+ {
+ self
+ }
+ // region:iterators
+ fn take(self, n: usize) -> crate::iter::Take<Self> {
+ loop {}
+ }
+ fn filter_map<B, F>(self, f: F) -> crate::iter::FilterMap<Self, F>
+ where
+ Self: Sized,
+ F: FnMut(Self::Item) -> Option<B>,
+ {
+ loop {}
+ }
+ // endregion:iterators
+ }
+ impl<I: Iterator + ?Sized> Iterator for &mut I {
+ type Item = I::Item;
+ fn next(&mut self) -> Option<I::Item> {
+ (**self).next()
+ }
+ }
+ }
+ pub use self::iterator::Iterator;
+
+ mod collect {
+ pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ #[lang = "into_iter"]
+ fn into_iter(self) -> Self::IntoIter;
+ }
+ impl<I: Iterator> IntoIterator for I {
+ type Item = I::Item;
+ type IntoIter = I;
+ fn into_iter(self) -> I {
+ self
+ }
+ }
+ }
+ pub use self::collect::IntoIterator;
+ }
+ pub use self::traits::{IntoIterator, Iterator};
+}
+// endregion:iterator
+
+// region:derive
+mod macros {
+ pub(crate) mod builtin {
+ #[rustc_builtin_macro]
+ pub macro derive($item:item) {
+ /* compiler built-in */
+ }
+ }
+}
+// endregion:derive
+
+// region:bool_impl
+#[lang = "bool"]
+impl bool {
+ pub fn then<T, F: FnOnce() -> T>(self, f: F) -> Option<T> {
+ if self {
+ Some(f())
+ } else {
+ None
+ }
+ }
+}
+// endregion:bool_impl
+
+pub mod prelude {
+ pub mod v1 {
+ pub use crate::{
+ clone::Clone, // :clone
+ cmp::{Eq, PartialEq}, // :eq
+ cmp::{Ord, PartialOrd}, // :ord
+ convert::AsRef, // :as_ref
+ convert::{From, Into}, // :from
+ default::Default, // :default
+ iter::{IntoIterator, Iterator}, // :iterator
+ macros::builtin::derive, // :derive
+ marker::Copy, // :copy
+ marker::Sized, // :sized
+ mem::drop, // :drop
+ ops::Drop, // :drop
+ ops::{Fn, FnMut, FnOnce}, // :fn
+ option::Option::{self, None, Some}, // :option
+ result::Result::{self, Err, Ok}, // :result
+ };
+ }
+
+ pub mod rust_2015 {
+ pub use super::v1::*;
+ }
+
+ pub mod rust_2018 {
+ pub use super::v1::*;
+ }
+
+ pub mod rust_2021 {
+ pub use super::v1::*;
+ }
+}
+
+#[prelude_import]
+#[allow(unused)]
+use prelude::v1::*;
diff --git a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
new file mode 100644
index 000000000..cf14bbd3c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "text-edit"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+itertools = "0.10.3"
+text-size = "1.1.0"
diff --git a/src/tools/rust-analyzer/crates/text-edit/src/lib.rs b/src/tools/rust-analyzer/crates/text-edit/src/lib.rs
new file mode 100644
index 000000000..9bb4271b6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/text-edit/src/lib.rs
@@ -0,0 +1,264 @@
+//! Representation of a `TextEdit`.
+//!
+//! `rust-analyzer` never mutates text itself and only sends diffs to clients,
+//! so `TextEdit` is the ultimate representation of the work done by
+//! rust-analyzer.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use itertools::Itertools;
+use std::cmp::max;
+pub use text_size::{TextRange, TextSize};
+
+/// `InsertDelete` -- a single "atomic" change to text
+///
+/// Must not overlap with other `InDel`s
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Indel {
+ pub insert: String,
+ /// Refers to offsets in the original text
+ pub delete: TextRange,
+}
+
+#[derive(Default, Debug, Clone)]
+pub struct TextEdit {
+ /// Invariant: disjoint and sorted by `delete`.
+ indels: Vec<Indel>,
+}
+
+#[derive(Debug, Default, Clone)]
+pub struct TextEditBuilder {
+ indels: Vec<Indel>,
+}
+
+impl Indel {
+ pub fn insert(offset: TextSize, text: String) -> Indel {
+ Indel::replace(TextRange::empty(offset), text)
+ }
+ pub fn delete(range: TextRange) -> Indel {
+ Indel::replace(range, String::new())
+ }
+ pub fn replace(range: TextRange, replace_with: String) -> Indel {
+ Indel { delete: range, insert: replace_with }
+ }
+
+ pub fn apply(&self, text: &mut String) {
+ let start: usize = self.delete.start().into();
+ let end: usize = self.delete.end().into();
+ text.replace_range(start..end, &self.insert);
+ }
+}
+
+impl TextEdit {
+ pub fn builder() -> TextEditBuilder {
+ TextEditBuilder::default()
+ }
+
+ pub fn insert(offset: TextSize, text: String) -> TextEdit {
+ let mut builder = TextEdit::builder();
+ builder.insert(offset, text);
+ builder.finish()
+ }
+
+ pub fn delete(range: TextRange) -> TextEdit {
+ let mut builder = TextEdit::builder();
+ builder.delete(range);
+ builder.finish()
+ }
+
+ pub fn replace(range: TextRange, replace_with: String) -> TextEdit {
+ let mut builder = TextEdit::builder();
+ builder.replace(range, replace_with);
+ builder.finish()
+ }
+
+ pub fn len(&self) -> usize {
+ self.indels.len()
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.indels.is_empty()
+ }
+
+ pub fn iter(&self) -> std::slice::Iter<'_, Indel> {
+ self.into_iter()
+ }
+
+ pub fn apply(&self, text: &mut String) {
+ match self.len() {
+ 0 => return,
+ 1 => {
+ self.indels[0].apply(text);
+ return;
+ }
+ _ => (),
+ }
+
+ let text_size = TextSize::of(&*text);
+ let mut total_len = text_size;
+ let mut max_total_len = text_size;
+ for indel in &self.indels {
+ total_len += TextSize::of(&indel.insert);
+ total_len -= indel.delete.len();
+ max_total_len = max(max_total_len, total_len);
+ }
+
+ if let Some(additional) = max_total_len.checked_sub(text_size) {
+ text.reserve(additional.into());
+ }
+
+ for indel in self.indels.iter().rev() {
+ indel.apply(text);
+ }
+
+ assert_eq!(TextSize::of(&*text), total_len);
+ }
+
+ pub fn union(&mut self, other: TextEdit) -> Result<(), TextEdit> {
+ let iter_merge =
+ self.iter().merge_by(other.iter(), |l, r| l.delete.start() <= r.delete.start());
+ if !check_disjoint(&mut iter_merge.clone()) {
+ return Err(other);
+ }
+
+ // Only dedup deletions and replacements, keep all insertions
+ self.indels = iter_merge.dedup_by(|a, b| a == b && !a.delete.is_empty()).cloned().collect();
+ Ok(())
+ }
+
+ pub fn apply_to_offset(&self, offset: TextSize) -> Option<TextSize> {
+ let mut res = offset;
+ for indel in &self.indels {
+ if indel.delete.start() >= offset {
+ break;
+ }
+ if offset < indel.delete.end() {
+ return None;
+ }
+ res += TextSize::of(&indel.insert);
+ res -= indel.delete.len();
+ }
+ Some(res)
+ }
+}
+
+impl IntoIterator for TextEdit {
+ type Item = Indel;
+ type IntoIter = std::vec::IntoIter<Indel>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.indels.into_iter()
+ }
+}
+
+impl<'a> IntoIterator for &'a TextEdit {
+ type Item = &'a Indel;
+ type IntoIter = std::slice::Iter<'a, Indel>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.indels.iter()
+ }
+}
+
+impl TextEditBuilder {
+ pub fn is_empty(&self) -> bool {
+ self.indels.is_empty()
+ }
+ pub fn replace(&mut self, range: TextRange, replace_with: String) {
+ self.indel(Indel::replace(range, replace_with));
+ }
+ pub fn delete(&mut self, range: TextRange) {
+ self.indel(Indel::delete(range));
+ }
+ pub fn insert(&mut self, offset: TextSize, text: String) {
+ self.indel(Indel::insert(offset, text));
+ }
+ pub fn finish(self) -> TextEdit {
+ let mut indels = self.indels;
+ assert_disjoint_or_equal(&mut indels);
+ TextEdit { indels }
+ }
+ pub fn invalidates_offset(&self, offset: TextSize) -> bool {
+ self.indels.iter().any(|indel| indel.delete.contains_inclusive(offset))
+ }
+ fn indel(&mut self, indel: Indel) {
+ self.indels.push(indel);
+ if self.indels.len() <= 16 {
+ assert_disjoint_or_equal(&mut self.indels);
+ }
+ }
+}
+
+fn assert_disjoint_or_equal(indels: &mut [Indel]) {
+ assert!(check_disjoint_and_sort(indels));
+}
+
+fn check_disjoint_and_sort(indels: &mut [Indel]) -> bool {
+ indels.sort_by_key(|indel| (indel.delete.start(), indel.delete.end()));
+ check_disjoint(&mut indels.iter())
+}
+
+fn check_disjoint<'a, I>(indels: &mut I) -> bool
+where
+ I: std::iter::Iterator<Item = &'a Indel> + Clone,
+{
+ indels.clone().zip(indels.skip(1)).all(|(l, r)| l.delete.end() <= r.delete.start() || l == r)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::{TextEdit, TextEditBuilder, TextRange};
+
+ fn range(start: u32, end: u32) -> TextRange {
+ TextRange::new(start.into(), end.into())
+ }
+
+ #[test]
+ fn test_apply() {
+ let mut text = "_11h1_2222_xx3333_4444_6666".to_string();
+ let mut builder = TextEditBuilder::default();
+ builder.replace(range(3, 4), "1".to_string());
+ builder.delete(range(11, 13));
+ builder.insert(22.into(), "_5555".to_string());
+
+ let text_edit = builder.finish();
+ text_edit.apply(&mut text);
+
+ assert_eq!(text, "_1111_2222_3333_4444_5555_6666")
+ }
+
+ #[test]
+ fn test_union() {
+ let mut edit1 = TextEdit::delete(range(7, 11));
+ let mut builder = TextEditBuilder::default();
+ builder.delete(range(1, 5));
+ builder.delete(range(13, 17));
+
+ let edit2 = builder.finish();
+ assert!(edit1.union(edit2).is_ok());
+ assert_eq!(edit1.indels.len(), 3);
+ }
+
+ #[test]
+ fn test_union_with_duplicates() {
+ let mut builder1 = TextEditBuilder::default();
+ builder1.delete(range(7, 11));
+ builder1.delete(range(13, 17));
+
+ let mut builder2 = TextEditBuilder::default();
+ builder2.delete(range(1, 5));
+ builder2.delete(range(13, 17));
+
+ let mut edit1 = builder1.finish();
+ let edit2 = builder2.finish();
+ assert!(edit1.union(edit2).is_ok());
+ assert_eq!(edit1.indels.len(), 3);
+ }
+
+ #[test]
+ fn test_union_panics() {
+ let mut edit1 = TextEdit::delete(range(7, 11));
+ let edit2 = TextEdit::delete(range(9, 13));
+ assert!(edit1.union(edit2).is_err());
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/toolchain/Cargo.toml b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml
new file mode 100644
index 000000000..7d3b9e09e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml
@@ -0,0 +1,13 @@
+[package]
+name = "toolchain"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+home = "0.5.3"
diff --git a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
new file mode 100644
index 000000000..b05da7691
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
@@ -0,0 +1,69 @@
+//! Discovery of `cargo` & `rustc` executables.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::{env, iter, path::PathBuf};
+
+pub fn cargo() -> PathBuf {
+ get_path_for_executable("cargo")
+}
+
+pub fn rustc() -> PathBuf {
+ get_path_for_executable("rustc")
+}
+
+pub fn rustup() -> PathBuf {
+ get_path_for_executable("rustup")
+}
+
+pub fn rustfmt() -> PathBuf {
+ get_path_for_executable("rustfmt")
+}
+
+/// Return a `PathBuf` to use for the given executable.
+///
+/// E.g., `get_path_for_executable("cargo")` may return just `cargo` if that
+/// gives a valid Cargo executable; or it may return a full path to a valid
+/// Cargo.
+fn get_path_for_executable(executable_name: &'static str) -> PathBuf {
+ // The current implementation checks three places for an executable to use:
+ // 1) Appropriate environment variable (erroring if this is set but not a usable executable)
+ // example: for cargo, this checks $CARGO environment variable; for rustc, $RUSTC; etc
+ // 2) `<executable_name>`
+ // example: for cargo, this tries just `cargo`, which will succeed if `cargo` is on the $PATH
+ // 3) `~/.cargo/bin/<executable_name>`
+ // example: for cargo, this tries ~/.cargo/bin/cargo
+ // It seems that this is a reasonable place to try for cargo, rustc, and rustup
+ let env_var = executable_name.to_ascii_uppercase();
+ if let Some(path) = env::var_os(&env_var) {
+ return path.into();
+ }
+
+ if lookup_in_path(executable_name) {
+ return executable_name.into();
+ }
+
+ if let Some(mut path) = home::home_dir() {
+ path.push(".cargo");
+ path.push("bin");
+ path.push(executable_name);
+ if let Some(path) = probe(path) {
+ return path;
+ }
+ }
+
+ executable_name.into()
+}
+
+fn lookup_in_path(exec: &str) -> bool {
+ let paths = env::var_os("PATH").unwrap_or_default();
+ env::split_paths(&paths).map(|path| path.join(exec)).find_map(probe).is_some()
+}
+
+fn probe(path: PathBuf) -> Option<PathBuf> {
+ let with_extension = match env::consts::EXE_EXTENSION {
+ "" => None,
+ it => Some(path.with_extension(it)),
+ };
+ iter::once(path).chain(with_extension).find(|it| it.is_file())
+}
diff --git a/src/tools/rust-analyzer/crates/tt/Cargo.toml b/src/tools/rust-analyzer/crates/tt/Cargo.toml
new file mode 100644
index 000000000..52dfb8608
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/tt/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "tt"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+smol_str = "0.1.23"
+
+stdx = { path = "../stdx", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/tt/src/buffer.rs b/src/tools/rust-analyzer/crates/tt/src/buffer.rs
new file mode 100644
index 000000000..69226bd4c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/tt/src/buffer.rs
@@ -0,0 +1,231 @@
+//! Stateful iteration over token trees.
+//!
+//! We use this as the source of tokens for parser.
+use crate::{Leaf, Subtree, TokenTree};
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+struct EntryId(usize);
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+struct EntryPtr(EntryId, usize);
+
+/// Internal type which is used instead of `TokenTree` to represent a token tree
+/// within a `TokenBuffer`.
+#[derive(Debug)]
+enum Entry<'t> {
+ // Mimicking types from proc-macro.
+ Subtree(Option<&'t TokenTree>, &'t Subtree, EntryId),
+ Leaf(&'t TokenTree),
+ // End entries contain a pointer to the entry from the containing
+ // token tree, or None if this is the outermost level.
+ End(Option<EntryPtr>),
+}
+
+/// A token tree buffer
+/// The safe version of `syn` [`TokenBuffer`](https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L41)
+#[derive(Debug)]
+pub struct TokenBuffer<'t> {
+ buffers: Vec<Box<[Entry<'t>]>>,
+}
+
+trait TokenList<'a> {
+ fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>);
+}
+
+impl<'a> TokenList<'a> for &'a [TokenTree] {
+ fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
+ // Must contain everything in tokens and then the Entry::End
+ let start_capacity = self.len() + 1;
+ let mut entries = Vec::with_capacity(start_capacity);
+ let mut children = vec![];
+ for (idx, tt) in self.iter().enumerate() {
+ match tt {
+ TokenTree::Leaf(_) => {
+ entries.push(Entry::Leaf(tt));
+ }
+ TokenTree::Subtree(subtree) => {
+ entries.push(Entry::End(None));
+ children.push((idx, (subtree, Some(tt))));
+ }
+ }
+ }
+ (children, entries)
+ }
+}
+
+impl<'a> TokenList<'a> for &'a Subtree {
+ fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
+ // Must contain everything in tokens and then the Entry::End
+ let mut entries = vec![];
+ let mut children = vec![];
+ entries.push(Entry::End(None));
+ children.push((0usize, (*self, None)));
+ (children, entries)
+ }
+}
+
+impl<'t> TokenBuffer<'t> {
+ pub fn from_tokens(tokens: &'t [TokenTree]) -> TokenBuffer<'t> {
+ Self::new(tokens)
+ }
+
+ pub fn from_subtree(subtree: &'t Subtree) -> TokenBuffer<'t> {
+ Self::new(subtree)
+ }
+
+ fn new<T: TokenList<'t>>(tokens: T) -> TokenBuffer<'t> {
+ let mut buffers = vec![];
+ let idx = TokenBuffer::new_inner(tokens, &mut buffers, None);
+ assert_eq!(idx, 0);
+ TokenBuffer { buffers }
+ }
+
+ fn new_inner<T: TokenList<'t>>(
+ tokens: T,
+ buffers: &mut Vec<Box<[Entry<'t>]>>,
+ next: Option<EntryPtr>,
+ ) -> usize {
+ let (children, mut entries) = tokens.entries();
+
+ entries.push(Entry::End(next));
+ let res = buffers.len();
+ buffers.push(entries.into_boxed_slice());
+
+ for (child_idx, (subtree, tt)) in children {
+ let idx = TokenBuffer::new_inner(
+ subtree.token_trees.as_slice(),
+ buffers,
+ Some(EntryPtr(EntryId(res), child_idx + 1)),
+ );
+ buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, subtree, EntryId(idx));
+ }
+
+ res
+ }
+
+ /// Creates a cursor referencing the first token in the buffer and able to
+ /// traverse until the end of the buffer.
+ pub fn begin(&self) -> Cursor<'_> {
+ Cursor::create(self, EntryPtr(EntryId(0), 0))
+ }
+
+ fn entry(&self, ptr: &EntryPtr) -> Option<&Entry<'_>> {
+ let id = ptr.0;
+ self.buffers[id.0].get(ptr.1)
+ }
+}
+
+#[derive(Debug)]
+pub enum TokenTreeRef<'a> {
+ Subtree(&'a Subtree, Option<&'a TokenTree>),
+ Leaf(&'a Leaf, &'a TokenTree),
+}
+
+impl<'a> TokenTreeRef<'a> {
+ pub fn cloned(&self) -> TokenTree {
+ match &self {
+ TokenTreeRef::Subtree(subtree, tt) => match tt {
+ Some(it) => (*it).clone(),
+ None => (*subtree).clone().into(),
+ },
+ TokenTreeRef::Leaf(_, tt) => (*tt).clone(),
+ }
+ }
+}
+
+/// A safe version of `Cursor` from `syn` crate <https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125>
+#[derive(Copy, Clone, Debug)]
+pub struct Cursor<'a> {
+ buffer: &'a TokenBuffer<'a>,
+ ptr: EntryPtr,
+}
+
+impl<'a> PartialEq for Cursor<'a> {
+ fn eq(&self, other: &Cursor<'_>) -> bool {
+ self.ptr == other.ptr && std::ptr::eq(self.buffer, other.buffer)
+ }
+}
+
+impl<'a> Eq for Cursor<'a> {}
+
+impl<'a> Cursor<'a> {
+ /// Check whether it is eof
+ pub fn eof(self) -> bool {
+ matches!(self.buffer.entry(&self.ptr), None | Some(Entry::End(None)))
+ }
+
+ /// If the cursor is pointing at the end of a subtree, returns
+ /// the parent subtree
+ pub fn end(self) -> Option<&'a Subtree> {
+ match self.entry() {
+ Some(Entry::End(Some(ptr))) => {
+ let idx = ptr.1;
+ if let Some(Entry::Subtree(_, subtree, _)) =
+ self.buffer.entry(&EntryPtr(ptr.0, idx - 1))
+ {
+ return Some(subtree);
+ }
+ None
+ }
+ _ => None,
+ }
+ }
+
+ fn entry(self) -> Option<&'a Entry<'a>> {
+ self.buffer.entry(&self.ptr)
+ }
+
+ /// If the cursor is pointing at a `Subtree`, returns
+ /// a cursor into that subtree
+ pub fn subtree(self) -> Option<Cursor<'a>> {
+ match self.entry() {
+ Some(Entry::Subtree(_, _, entry_id)) => {
+ Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0)))
+ }
+ _ => None,
+ }
+ }
+
+ /// If the cursor is pointing at a `TokenTree`, returns it
+ pub fn token_tree(self) -> Option<TokenTreeRef<'a>> {
+ match self.entry() {
+ Some(Entry::Leaf(tt)) => match tt {
+ TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, *tt)),
+ TokenTree::Subtree(subtree) => Some(TokenTreeRef::Subtree(subtree, Some(tt))),
+ },
+ Some(Entry::Subtree(tt, subtree, _)) => Some(TokenTreeRef::Subtree(subtree, *tt)),
+ Some(Entry::End(_)) | None => None,
+ }
+ }
+
+ fn create(buffer: &'a TokenBuffer<'_>, ptr: EntryPtr) -> Cursor<'a> {
+ Cursor { buffer, ptr }
+ }
+
+ /// Bump the cursor
+ pub fn bump(self) -> Cursor<'a> {
+ if let Some(Entry::End(exit)) = self.buffer.entry(&self.ptr) {
+ match exit {
+ Some(exit) => Cursor::create(self.buffer, *exit),
+ None => self,
+ }
+ } else {
+ Cursor::create(self.buffer, EntryPtr(self.ptr.0, self.ptr.1 + 1))
+ }
+ }
+
+ /// Bump the cursor, if it is a subtree, returns
+ /// a cursor into that subtree
+ pub fn bump_subtree(self) -> Cursor<'a> {
+ match self.entry() {
+ Some(Entry::Subtree(_, _, _)) => self.subtree().unwrap(),
+ _ => self.bump(),
+ }
+ }
+
+ /// Check whether it is a top level
+ pub fn is_root(&self) -> bool {
+ let entry_id = self.ptr.0;
+ entry_id.0 == 0
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs
new file mode 100644
index 000000000..a54861de9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs
@@ -0,0 +1,322 @@
+//! `tt` crate defines a `TokenTree` data structure: this is the interface (both
+//! input and output) of macros. It closely mirrors `proc_macro` crate's
+//! `TokenTree`.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::fmt;
+
+use stdx::impl_from;
+
+pub use smol_str::SmolStr;
+
+/// Represents identity of the token.
+///
+/// For hygiene purposes, we need to track which expanded tokens originated from
+/// which source tokens. We do it by assigning an distinct identity to each
+/// source token and making sure that identities are preserved during macro
+/// expansion.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TokenId(pub u32);
+
+impl TokenId {
+ pub const fn unspecified() -> TokenId {
+ TokenId(!0)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum TokenTree {
+ Leaf(Leaf),
+ Subtree(Subtree),
+}
+impl_from!(Leaf, Subtree for TokenTree);
+
+impl TokenTree {
+ pub fn empty() -> Self {
+ TokenTree::Subtree(Subtree::default())
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Leaf {
+ Literal(Literal),
+ Punct(Punct),
+ Ident(Ident),
+}
+impl_from!(Literal, Punct, Ident for Leaf);
+
+#[derive(Clone, PartialEq, Eq, Hash, Default)]
+pub struct Subtree {
+ pub delimiter: Option<Delimiter>,
+ pub token_trees: Vec<TokenTree>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct Delimiter {
+ pub id: TokenId,
+ pub kind: DelimiterKind,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum DelimiterKind {
+ Parenthesis,
+ Brace,
+ Bracket,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Literal {
+ pub text: SmolStr,
+ pub id: TokenId,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Punct {
+ pub char: char,
+ pub spacing: Spacing,
+ pub id: TokenId,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum Spacing {
+ Alone,
+ Joint,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Ident {
+ pub text: SmolStr,
+ pub id: TokenId,
+}
+
+impl Leaf {
+ pub fn id(&self) -> TokenId {
+ match self {
+ Leaf::Literal(l) => l.id,
+ Leaf::Punct(p) => p.id,
+ Leaf::Ident(i) => i.id,
+ }
+ }
+}
+
+fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usize) -> fmt::Result {
+ let align = " ".repeat(level);
+
+ let aux = match subtree.delimiter.map(|it| (it.kind, it.id.0)) {
+ None => "$".to_string(),
+ Some((DelimiterKind::Parenthesis, id)) => format!("() {}", id),
+ Some((DelimiterKind::Brace, id)) => format!("{{}} {}", id),
+ Some((DelimiterKind::Bracket, id)) => format!("[] {}", id),
+ };
+
+ if subtree.token_trees.is_empty() {
+ write!(f, "{}SUBTREE {}", align, aux)?;
+ } else {
+ writeln!(f, "{}SUBTREE {}", align, aux)?;
+ for (idx, child) in subtree.token_trees.iter().enumerate() {
+ print_debug_token(f, child, level + 1)?;
+ if idx != subtree.token_trees.len() - 1 {
+ writeln!(f)?;
+ }
+ }
+ }
+
+ Ok(())
+}
+
+fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) -> fmt::Result {
+ let align = " ".repeat(level);
+
+ match tkn {
+ TokenTree::Leaf(leaf) => match leaf {
+ Leaf::Literal(lit) => write!(f, "{}LITERAL {} {}", align, lit.text, lit.id.0)?,
+ Leaf::Punct(punct) => write!(
+ f,
+ "{}PUNCH {} [{}] {}",
+ align,
+ punct.char,
+ if punct.spacing == Spacing::Alone { "alone" } else { "joint" },
+ punct.id.0
+ )?,
+ Leaf::Ident(ident) => write!(f, "{}IDENT {} {}", align, ident.text, ident.id.0)?,
+ },
+ TokenTree::Subtree(subtree) => {
+ print_debug_subtree(f, subtree, level)?;
+ }
+ }
+
+ Ok(())
+}
+
+impl fmt::Debug for Subtree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ print_debug_subtree(f, self, 0)
+ }
+}
+
+impl fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ TokenTree::Leaf(it) => fmt::Display::fmt(it, f),
+ TokenTree::Subtree(it) => fmt::Display::fmt(it, f),
+ }
+ }
+}
+
+impl fmt::Display for Subtree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let (l, r) = match self.delimiter_kind() {
+ Some(DelimiterKind::Parenthesis) => ("(", ")"),
+ Some(DelimiterKind::Brace) => ("{", "}"),
+ Some(DelimiterKind::Bracket) => ("[", "]"),
+ None => ("", ""),
+ };
+ f.write_str(l)?;
+ let mut needs_space = false;
+ for tt in &self.token_trees {
+ if needs_space {
+ f.write_str(" ")?;
+ }
+ needs_space = true;
+ match tt {
+ TokenTree::Leaf(Leaf::Punct(p)) => {
+ needs_space = p.spacing == Spacing::Alone;
+ fmt::Display::fmt(p, f)?;
+ }
+ tt => fmt::Display::fmt(tt, f)?,
+ }
+ }
+ f.write_str(r)?;
+ Ok(())
+ }
+}
+
+impl fmt::Display for Leaf {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Leaf::Ident(it) => fmt::Display::fmt(it, f),
+ Leaf::Literal(it) => fmt::Display::fmt(it, f),
+ Leaf::Punct(it) => fmt::Display::fmt(it, f),
+ }
+ }
+}
+
+impl fmt::Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.text, f)
+ }
+}
+
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.text, f)
+ }
+}
+
+impl fmt::Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.char, f)
+ }
+}
+
+impl Subtree {
+ /// Count the number of tokens recursively
+ pub fn count(&self) -> usize {
+ let children_count = self
+ .token_trees
+ .iter()
+ .map(|c| match c {
+ TokenTree::Subtree(c) => c.count(),
+ TokenTree::Leaf(_) => 0,
+ })
+ .sum::<usize>();
+
+ self.token_trees.len() + children_count
+ }
+
+ pub fn delimiter_kind(&self) -> Option<DelimiterKind> {
+ self.delimiter.map(|it| it.kind)
+ }
+}
+
+impl Subtree {
+ /// A simple line string used for debugging
+ pub fn as_debug_string(&self) -> String {
+ let delim = match self.delimiter_kind() {
+ Some(DelimiterKind::Brace) => ("{", "}"),
+ Some(DelimiterKind::Bracket) => ("[", "]"),
+ Some(DelimiterKind::Parenthesis) => ("(", ")"),
+ None => (" ", " "),
+ };
+
+ let mut res = String::new();
+ res.push_str(delim.0);
+ let mut last = None;
+ for child in &self.token_trees {
+ let s = match child {
+ TokenTree::Leaf(it) => {
+ let s = match it {
+ Leaf::Literal(it) => it.text.to_string(),
+ Leaf::Punct(it) => it.char.to_string(),
+ Leaf::Ident(it) => it.text.to_string(),
+ };
+ match (it, last) {
+ (Leaf::Ident(_), Some(&TokenTree::Leaf(Leaf::Ident(_)))) => {
+ " ".to_string() + &s
+ }
+ (Leaf::Punct(_), Some(&TokenTree::Leaf(Leaf::Punct(punct)))) => {
+ if punct.spacing == Spacing::Alone {
+ " ".to_string() + &s
+ } else {
+ s
+ }
+ }
+ _ => s,
+ }
+ }
+ TokenTree::Subtree(it) => it.as_debug_string(),
+ };
+ res.push_str(&s);
+ last = Some(child);
+ }
+
+ res.push_str(delim.1);
+ res
+ }
+}
+
+pub mod buffer;
+
+pub fn pretty(tkns: &[TokenTree]) -> String {
+ fn tokentree_to_text(tkn: &TokenTree) -> String {
+ match tkn {
+ TokenTree::Leaf(Leaf::Ident(ident)) => ident.text.clone().into(),
+ TokenTree::Leaf(Leaf::Literal(literal)) => literal.text.clone().into(),
+ TokenTree::Leaf(Leaf::Punct(punct)) => format!("{}", punct.char),
+ TokenTree::Subtree(subtree) => {
+ let content = pretty(&subtree.token_trees);
+ let (open, close) = match subtree.delimiter.map(|it| it.kind) {
+ None => ("", ""),
+ Some(DelimiterKind::Brace) => ("{", "}"),
+ Some(DelimiterKind::Parenthesis) => ("(", ")"),
+ Some(DelimiterKind::Bracket) => ("[", "]"),
+ };
+ format!("{}{}{}", open, content, close)
+ }
+ }
+ }
+
+ tkns.iter()
+ .fold((String::new(), true), |(last, last_to_joint), tkn| {
+ let s = [last, tokentree_to_text(tkn)].join(if last_to_joint { "" } else { " " });
+ let mut is_joint = false;
+ if let TokenTree::Leaf(Leaf::Punct(punct)) = tkn {
+ if punct.spacing == Spacing::Joint {
+ is_joint = true;
+ }
+ }
+ (s, is_joint)
+ })
+ .0
+}
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
new file mode 100644
index 000000000..9ee4415dc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "vfs-notify"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+tracing = "0.1.35"
+jod-thread = "0.1.2"
+walkdir = "2.3.2"
+crossbeam-channel = "0.5.5"
+notify = "=5.0.0-pre.15"
+
+vfs = { path = "../vfs", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
new file mode 100644
index 000000000..4d33a9afb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
@@ -0,0 +1,234 @@
+//! An implementation of `loader::Handle`, based on `walkdir` and `notify`.
+//!
+//! The file watching bits here are untested and quite probably buggy. For this
+//! reason, by default we don't watch files and rely on editor's file watching
+//! capabilities.
+//!
+//! Hopefully, one day a reliable file watching/walking crate appears on
+//! crates.io, and we can reduce this to trivial glue code.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::fs;
+
+use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
+use notify::{RecommendedWatcher, RecursiveMode, Watcher};
+use paths::{AbsPath, AbsPathBuf};
+use vfs::loader;
+use walkdir::WalkDir;
+
+#[derive(Debug)]
+pub struct NotifyHandle {
+ // Relative order of fields below is significant.
+ sender: Sender<Message>,
+ _thread: jod_thread::JoinHandle,
+}
+
+#[derive(Debug)]
+enum Message {
+ Config(loader::Config),
+ Invalidate(AbsPathBuf),
+}
+
+impl loader::Handle for NotifyHandle {
+ fn spawn(sender: loader::Sender) -> NotifyHandle {
+ let actor = NotifyActor::new(sender);
+ let (sender, receiver) = unbounded::<Message>();
+ let thread = jod_thread::Builder::new()
+ .name("VfsLoader".to_owned())
+ .spawn(move || actor.run(receiver))
+ .expect("failed to spawn thread");
+ NotifyHandle { sender, _thread: thread }
+ }
+ fn set_config(&mut self, config: loader::Config) {
+ self.sender.send(Message::Config(config)).unwrap();
+ }
+ fn invalidate(&mut self, path: AbsPathBuf) {
+ self.sender.send(Message::Invalidate(path)).unwrap();
+ }
+ fn load_sync(&mut self, path: &AbsPath) -> Option<Vec<u8>> {
+ read(path)
+ }
+}
+
+type NotifyEvent = notify::Result<notify::Event>;
+
+struct NotifyActor {
+ sender: loader::Sender,
+ watched_entries: Vec<loader::Entry>,
+ // Drop order is significant.
+ watcher: Option<(RecommendedWatcher, Receiver<NotifyEvent>)>,
+}
+
+#[derive(Debug)]
+enum Event {
+ Message(Message),
+ NotifyEvent(NotifyEvent),
+}
+
+impl NotifyActor {
+ fn new(sender: loader::Sender) -> NotifyActor {
+ NotifyActor { sender, watched_entries: Vec::new(), watcher: None }
+ }
+ fn next_event(&self, receiver: &Receiver<Message>) -> Option<Event> {
+ let watcher_receiver = self.watcher.as_ref().map(|(_, receiver)| receiver);
+ select! {
+ recv(receiver) -> it => it.ok().map(Event::Message),
+ recv(watcher_receiver.unwrap_or(&never())) -> it => Some(Event::NotifyEvent(it.unwrap())),
+ }
+ }
+ fn run(mut self, inbox: Receiver<Message>) {
+ while let Some(event) = self.next_event(&inbox) {
+ tracing::debug!("vfs-notify event: {:?}", event);
+ match event {
+ Event::Message(msg) => match msg {
+ Message::Config(config) => {
+ self.watcher = None;
+ if !config.watch.is_empty() {
+ let (watcher_sender, watcher_receiver) = unbounded();
+ let watcher = log_notify_error(RecommendedWatcher::new(move |event| {
+ watcher_sender.send(event).unwrap();
+ }));
+ self.watcher = watcher.map(|it| (it, watcher_receiver));
+ }
+
+ let config_version = config.version;
+
+ let n_total = config.load.len();
+ self.send(loader::Message::Progress { n_total, n_done: 0, config_version });
+
+ self.watched_entries.clear();
+
+ for (i, entry) in config.load.into_iter().enumerate() {
+ let watch = config.watch.contains(&i);
+ if watch {
+ self.watched_entries.push(entry.clone());
+ }
+ let files = self.load_entry(entry, watch);
+ self.send(loader::Message::Loaded { files });
+ self.send(loader::Message::Progress {
+ n_total,
+ n_done: i + 1,
+ config_version,
+ });
+ }
+ }
+ Message::Invalidate(path) => {
+ let contents = read(path.as_path());
+ let files = vec![(path, contents)];
+ self.send(loader::Message::Loaded { files });
+ }
+ },
+ Event::NotifyEvent(event) => {
+ if let Some(event) = log_notify_error(event) {
+ let files = event
+ .paths
+ .into_iter()
+ .map(|path| AbsPathBuf::try_from(path).unwrap())
+ .filter_map(|path| {
+ let meta = fs::metadata(&path).ok()?;
+ if meta.file_type().is_dir()
+ && self
+ .watched_entries
+ .iter()
+ .any(|entry| entry.contains_dir(&path))
+ {
+ self.watch(path);
+ return None;
+ }
+
+ if !meta.file_type().is_file() {
+ return None;
+ }
+ if !self
+ .watched_entries
+ .iter()
+ .any(|entry| entry.contains_file(&path))
+ {
+ return None;
+ }
+
+ let contents = read(&path);
+ Some((path, contents))
+ })
+ .collect();
+ self.send(loader::Message::Loaded { files });
+ }
+ }
+ }
+ }
+ }
+ fn load_entry(
+ &mut self,
+ entry: loader::Entry,
+ watch: bool,
+ ) -> Vec<(AbsPathBuf, Option<Vec<u8>>)> {
+ match entry {
+ loader::Entry::Files(files) => files
+ .into_iter()
+ .map(|file| {
+ if watch {
+ self.watch(file.clone());
+ }
+ let contents = read(file.as_path());
+ (file, contents)
+ })
+ .collect::<Vec<_>>(),
+ loader::Entry::Directories(dirs) => {
+ let mut res = Vec::new();
+
+ for root in &dirs.include {
+ let walkdir =
+ WalkDir::new(root).follow_links(true).into_iter().filter_entry(|entry| {
+ if !entry.file_type().is_dir() {
+ return true;
+ }
+ let path = AbsPath::assert(entry.path());
+ root == path
+ || dirs.exclude.iter().chain(&dirs.include).all(|it| it != path)
+ });
+
+ let files = walkdir.filter_map(|it| it.ok()).filter_map(|entry| {
+ let is_dir = entry.file_type().is_dir();
+ let is_file = entry.file_type().is_file();
+ let abs_path = AbsPathBuf::assert(entry.into_path());
+ if is_dir && watch {
+ self.watch(abs_path.clone());
+ }
+ if !is_file {
+ return None;
+ }
+ let ext = abs_path.extension().unwrap_or_default();
+ if dirs.extensions.iter().all(|it| it.as_str() != ext) {
+ return None;
+ }
+ Some(abs_path)
+ });
+
+ res.extend(files.map(|file| {
+ let contents = read(file.as_path());
+ (file, contents)
+ }));
+ }
+ res
+ }
+ }
+ }
+
+ fn watch(&mut self, path: AbsPathBuf) {
+ if let Some((watcher, _)) = &mut self.watcher {
+ log_notify_error(watcher.watch(path.as_ref(), RecursiveMode::NonRecursive));
+ }
+ }
+ fn send(&mut self, msg: loader::Message) {
+ (self.sender)(msg);
+ }
+}
+
+fn read(path: &AbsPath) -> Option<Vec<u8>> {
+ std::fs::read(path).ok()
+}
+
+fn log_notify_error<T>(res: notify::Result<T>) -> Option<T> {
+ res.map_err(|err| tracing::warn!("notify error: {}", err)).ok()
+}
diff --git a/src/tools/rust-analyzer/crates/vfs/Cargo.toml b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
new file mode 100644
index 000000000..c63773487
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "vfs"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+rustc-hash = "1.1.0"
+fst = "0.4.7"
+
+paths = { path = "../paths", version = "0.0.0" }
+indexmap = "1.9.1"
diff --git a/src/tools/rust-analyzer/crates/vfs/src/anchored_path.rs b/src/tools/rust-analyzer/crates/vfs/src/anchored_path.rs
new file mode 100644
index 000000000..db15a2a21
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/anchored_path.rs
@@ -0,0 +1,49 @@
+//! Analysis-level representation of file-system paths.
+//!
+//! The primary goal of this is to losslessly represent paths like
+//!
+//! ```
+//! #[path = "./bar.rs"]
+//! mod foo;
+//! ```
+//!
+//! The first approach one might reach for is to use `PathBuf`. The problem here
+//! is that `PathBuf` depends on host target (windows or linux), but
+//! rust-analyzer should be capable to process `#[path = r"C:\bar.rs"]` on Unix.
+//!
+//! The second try is to use a `String`. This also fails, however. Consider a
+//! hypothetical scenario, where rust-analyzer operates in a
+//! networked/distributed mode. There's one global instance of rust-analyzer,
+//! which processes requests from different machines. Now, the semantics of
+//! `#[path = "/abs/path.rs"]` actually depends on which file-system we are at!
+//! That is, even absolute paths exist relative to a file system!
+//!
+//! A more realistic scenario here is virtual VFS paths we use for testing. More
+//! generally, there can be separate "universes" of VFS paths.
+//!
+//! That's why we use anchored representation -- each path carries an info about
+//! a file this path originates from. We can fetch fs/"universe" information
+//! from the anchor than.
+use crate::FileId;
+
+/// Path relative to a file.
+///
+/// Owned version of [`AnchoredPath`].
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct AnchoredPathBuf {
+ /// File that this path is relative to.
+ pub anchor: FileId,
+ /// Path relative to `anchor`'s containing directory.
+ pub path: String,
+}
+
+/// Path relative to a file.
+///
+/// Borrowed version of [`AnchoredPathBuf`].
+#[derive(Clone, Copy, PartialEq, Eq, Debug)]
+pub struct AnchoredPath<'a> {
+ /// File that this path is relative to.
+ pub anchor: FileId,
+ /// Path relative to `anchor`'s containing directory.
+ pub path: &'a str,
+}
diff --git a/src/tools/rust-analyzer/crates/vfs/src/file_set.rs b/src/tools/rust-analyzer/crates/vfs/src/file_set.rs
new file mode 100644
index 000000000..6a89263e5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/file_set.rs
@@ -0,0 +1,218 @@
+//! Partitions a list of files into disjoint subsets.
+//!
+//! Files which do not belong to any explicitly configured `FileSet` belong to
+//! the default `FileSet`.
+use std::fmt;
+
+use fst::{IntoStreamer, Streamer};
+use rustc_hash::FxHashMap;
+
+use crate::{AnchoredPath, FileId, Vfs, VfsPath};
+
+/// A set of [`VfsPath`]s identified by [`FileId`]s.
+#[derive(Default, Clone, Eq, PartialEq)]
+pub struct FileSet {
+ files: FxHashMap<VfsPath, FileId>,
+ paths: FxHashMap<FileId, VfsPath>,
+}
+
+impl FileSet {
+ /// Returns the number of stored paths.
+ pub fn len(&self) -> usize {
+ self.files.len()
+ }
+
+ /// Get the id of the file corresponding to `path`.
+ ///
+ /// If either `path`'s [`anchor`](AnchoredPath::anchor) or the resolved path is not in
+ /// the set, returns [`None`].
+ pub fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+ let mut base = self.paths[&path.anchor].clone();
+ base.pop();
+ let path = base.join(path.path)?;
+ self.files.get(&path).copied()
+ }
+
+ /// Get the id corresponding to `path` if it exists in the set.
+ pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> {
+ self.files.get(path)
+ }
+
+ /// Get the path corresponding to `file` if it exists in the set.
+ pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> {
+ self.paths.get(file)
+ }
+
+ /// Insert the `file_id, path` pair into the set.
+ ///
+ /// # Note
+ /// Multiple [`FileId`] can be mapped to the same [`VfsPath`], and vice-versa.
+ pub fn insert(&mut self, file_id: FileId, path: VfsPath) {
+ self.files.insert(path.clone(), file_id);
+ self.paths.insert(file_id, path);
+ }
+
+ /// Iterate over this set's ids.
+ pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ {
+ self.paths.keys().copied()
+ }
+}
+
+impl fmt::Debug for FileSet {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("FileSet").field("n_files", &self.files.len()).finish()
+ }
+}
+
+/// This contains path prefixes to partition a [`Vfs`] into [`FileSet`]s.
+///
+/// # Example
+/// ```rust
+/// # use vfs::{file_set::FileSetConfigBuilder, VfsPath, Vfs};
+/// let mut builder = FileSetConfigBuilder::default();
+/// builder.add_file_set(vec![VfsPath::new_virtual_path("/src".to_string())]);
+/// let config = builder.build();
+/// let mut file_system = Vfs::default();
+/// file_system.set_file_contents(VfsPath::new_virtual_path("/src/main.rs".to_string()), Some(vec![]));
+/// file_system.set_file_contents(VfsPath::new_virtual_path("/src/lib.rs".to_string()), Some(vec![]));
+/// file_system.set_file_contents(VfsPath::new_virtual_path("/build.rs".to_string()), Some(vec![]));
+/// // contains the sets :
+/// // { "/src/main.rs", "/src/lib.rs" }
+/// // { "build.rs" }
+/// let sets = config.partition(&file_system);
+/// ```
+#[derive(Debug)]
+pub struct FileSetConfig {
+ /// Number of sets that `self` can partition a [`Vfs`] into.
+ ///
+ /// This should be the number of sets in `self.map` + 1 for files that don't fit in any
+ /// defined set.
+ n_file_sets: usize,
+ /// Map from encoded paths to the set they belong to.
+ map: fst::Map<Vec<u8>>,
+}
+
+impl Default for FileSetConfig {
+ fn default() -> Self {
+ FileSetConfig::builder().build()
+ }
+}
+
+impl FileSetConfig {
+ /// Returns a builder for `FileSetConfig`.
+ pub fn builder() -> FileSetConfigBuilder {
+ FileSetConfigBuilder::default()
+ }
+
+ /// Partition `vfs` into `FileSet`s.
+ ///
+ /// Creates a new [`FileSet`] for every set of prefixes in `self`.
+ pub fn partition(&self, vfs: &Vfs) -> Vec<FileSet> {
+ let mut scratch_space = Vec::new();
+ let mut res = vec![FileSet::default(); self.len()];
+ for (file_id, path) in vfs.iter() {
+ let root = self.classify(path, &mut scratch_space);
+ res[root].insert(file_id, path.clone());
+ }
+ res
+ }
+
+ /// Number of sets that `self` can partition a [`Vfs`] into.
+ fn len(&self) -> usize {
+ self.n_file_sets
+ }
+
+ /// Returns the set index for the given `path`.
+ ///
+ /// `scratch_space` is used as a buffer and will be entirely replaced.
+ fn classify(&self, path: &VfsPath, scratch_space: &mut Vec<u8>) -> usize {
+ scratch_space.clear();
+ path.encode(scratch_space);
+ let automaton = PrefixOf::new(scratch_space.as_slice());
+ let mut longest_prefix = self.len() - 1;
+ let mut stream = self.map.search(automaton).into_stream();
+ while let Some((_, v)) = stream.next() {
+ longest_prefix = v as usize;
+ }
+ longest_prefix
+ }
+}
+
+/// Builder for [`FileSetConfig`].
+pub struct FileSetConfigBuilder {
+ roots: Vec<Vec<VfsPath>>,
+}
+
+impl Default for FileSetConfigBuilder {
+ fn default() -> Self {
+ FileSetConfigBuilder { roots: Vec::new() }
+ }
+}
+
+impl FileSetConfigBuilder {
+ /// Returns the number of sets currently held.
+ pub fn len(&self) -> usize {
+ self.roots.len()
+ }
+
+ /// Add a new set of paths prefixes.
+ pub fn add_file_set(&mut self, roots: Vec<VfsPath>) {
+ self.roots.push(roots);
+ }
+
+ /// Build the `FileSetConfig`.
+ pub fn build(self) -> FileSetConfig {
+ let n_file_sets = self.roots.len() + 1;
+ let map = {
+ let mut entries = Vec::new();
+ for (i, paths) in self.roots.into_iter().enumerate() {
+ for p in paths {
+ let mut buf = Vec::new();
+ p.encode(&mut buf);
+ entries.push((buf, i as u64));
+ }
+ }
+ entries.sort();
+ entries.dedup_by(|(a, _), (b, _)| a == b);
+ fst::Map::from_iter(entries).unwrap()
+ };
+ FileSetConfig { n_file_sets, map }
+ }
+}
+
+/// Implements [`fst::Automaton`]
+///
+/// It will match if `prefix_of` is a prefix of the given data.
+struct PrefixOf<'a> {
+ prefix_of: &'a [u8],
+}
+
+impl<'a> PrefixOf<'a> {
+ /// Creates a new `PrefixOf` from the given slice.
+ fn new(prefix_of: &'a [u8]) -> Self {
+ Self { prefix_of }
+ }
+}
+
+impl fst::Automaton for PrefixOf<'_> {
+ type State = usize;
+ fn start(&self) -> usize {
+ 0
+ }
+ fn is_match(&self, &state: &usize) -> bool {
+ state != !0
+ }
+ fn can_match(&self, &state: &usize) -> bool {
+ state != !0
+ }
+ fn accept(&self, &state: &usize, byte: u8) -> usize {
+ if self.prefix_of.get(state) == Some(&byte) {
+ state + 1
+ } else {
+ !0
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/tools/rust-analyzer/crates/vfs/src/file_set/tests.rs b/src/tools/rust-analyzer/crates/vfs/src/file_set/tests.rs
new file mode 100644
index 000000000..2146df185
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/file_set/tests.rs
@@ -0,0 +1,42 @@
+use super::*;
+
+#[test]
+fn path_prefix() {
+ let mut file_set = FileSetConfig::builder();
+ file_set.add_file_set(vec![VfsPath::new_virtual_path("/foo".into())]);
+ file_set.add_file_set(vec![VfsPath::new_virtual_path("/foo/bar/baz".into())]);
+ let file_set = file_set.build();
+
+ let mut vfs = Vfs::default();
+ vfs.set_file_contents(VfsPath::new_virtual_path("/foo/src/lib.rs".into()), Some(Vec::new()));
+ vfs.set_file_contents(
+ VfsPath::new_virtual_path("/foo/src/bar/baz/lib.rs".into()),
+ Some(Vec::new()),
+ );
+ vfs.set_file_contents(
+ VfsPath::new_virtual_path("/foo/bar/baz/lib.rs".into()),
+ Some(Vec::new()),
+ );
+ vfs.set_file_contents(VfsPath::new_virtual_path("/quux/lib.rs".into()), Some(Vec::new()));
+
+ let partition = file_set.partition(&vfs).into_iter().map(|it| it.len()).collect::<Vec<_>>();
+ assert_eq!(partition, vec![2, 1, 1]);
+}
+
+#[test]
+fn name_prefix() {
+ let mut file_set = FileSetConfig::builder();
+ file_set.add_file_set(vec![VfsPath::new_virtual_path("/foo".into())]);
+ file_set.add_file_set(vec![VfsPath::new_virtual_path("/foo-things".into())]);
+ let file_set = file_set.build();
+
+ let mut vfs = Vfs::default();
+ vfs.set_file_contents(VfsPath::new_virtual_path("/foo/src/lib.rs".into()), Some(Vec::new()));
+ vfs.set_file_contents(
+ VfsPath::new_virtual_path("/foo-things/src/lib.rs".into()),
+ Some(Vec::new()),
+ );
+
+ let partition = file_set.partition(&vfs).into_iter().map(|it| it.len()).collect::<Vec<_>>();
+ assert_eq!(partition, vec![1, 1, 0]);
+}
diff --git a/src/tools/rust-analyzer/crates/vfs/src/lib.rs b/src/tools/rust-analyzer/crates/vfs/src/lib.rs
new file mode 100644
index 000000000..10fae41d0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/lib.rs
@@ -0,0 +1,221 @@
+//! # Virtual File System
+//!
+//! VFS stores all files read by rust-analyzer. Reading file contents from VFS
+//! always returns the same contents, unless VFS was explicitly modified with
+//! [`set_file_contents`]. All changes to VFS are logged, and can be retrieved via
+//! [`take_changes`] method. The pack of changes is then pushed to `salsa` and
+//! triggers incremental recomputation.
+//!
+//! Files in VFS are identified with [`FileId`]s -- interned paths. The notion of
+//! the path, [`VfsPath`] is somewhat abstract: at the moment, it is represented
+//! as an [`std::path::PathBuf`] internally, but this is an implementation detail.
+//!
+//! VFS doesn't do IO or file watching itself. For that, see the [`loader`]
+//! module. [`loader::Handle`] is an object-safe trait which abstracts both file
+//! loading and file watching. [`Handle`] is dynamically configured with a set of
+//! directory entries which should be scanned and watched. [`Handle`] then
+//! asynchronously pushes file changes. Directory entries are configured in
+//! free-form via list of globs, it's up to the [`Handle`] to interpret the globs
+//! in any specific way.
+//!
+//! VFS stores a flat list of files. [`file_set::FileSet`] can partition this list
+//! of files into disjoint sets of files. Traversal-like operations (including
+//! getting the neighbor file by the relative path) are handled by the [`FileSet`].
+//! [`FileSet`]s are also pushed to salsa and cause it to re-check `mod foo;`
+//! declarations when files are created or deleted.
+//!
+//! [`FileSet`] and [`loader::Entry`] play similar, but different roles.
+//! Both specify the "set of paths/files", one is geared towards file watching,
+//! the other towards salsa changes. In particular, single [`FileSet`]
+//! may correspond to several [`loader::Entry`]. For example, a crate from
+//! crates.io which uses code generation would have two [`Entries`] -- for sources
+//! in `~/.cargo`, and for generated code in `./target/debug/build`. It will
+//! have a single [`FileSet`] which unions the two sources.
+//!
+//! [`set_file_contents`]: Vfs::set_file_contents
+//! [`take_changes`]: Vfs::take_changes
+//! [`FileSet`]: file_set::FileSet
+//! [`Handle`]: loader::Handle
+//! [`Entries`]: loader::Entry
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod anchored_path;
+pub mod file_set;
+pub mod loader;
+mod path_interner;
+mod vfs_path;
+
+use std::{fmt, mem};
+
+use crate::path_interner::PathInterner;
+
+pub use crate::{
+ anchored_path::{AnchoredPath, AnchoredPathBuf},
+ vfs_path::VfsPath,
+};
+pub use paths::{AbsPath, AbsPathBuf};
+
+/// Handle to a file in [`Vfs`]
+///
+/// Most functions in rust-analyzer use this when they need to refer to a file.
+#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct FileId(pub u32);
+
+/// Storage for all files read by rust-analyzer.
+///
+/// For more informations see the [crate-level](crate) documentation.
+#[derive(Default)]
+pub struct Vfs {
+ interner: PathInterner,
+ data: Vec<Option<Vec<u8>>>,
+ changes: Vec<ChangedFile>,
+}
+
+/// Changed file in the [`Vfs`].
+pub struct ChangedFile {
+ /// Id of the changed file
+ pub file_id: FileId,
+ /// Kind of change
+ pub change_kind: ChangeKind,
+}
+
+impl ChangedFile {
+ /// Returns `true` if the change is not [`Delete`](ChangeKind::Delete).
+ pub fn exists(&self) -> bool {
+ self.change_kind != ChangeKind::Delete
+ }
+
+ /// Returns `true` if the change is [`Create`](ChangeKind::Create) or
+ /// [`Delete`](ChangeKind::Delete).
+ pub fn is_created_or_deleted(&self) -> bool {
+ matches!(self.change_kind, ChangeKind::Create | ChangeKind::Delete)
+ }
+}
+
+/// Kind of [file change](ChangedFile).
+#[derive(Eq, PartialEq, Copy, Clone, Debug)]
+pub enum ChangeKind {
+ /// The file was (re-)created
+ Create,
+ /// The file was modified
+ Modify,
+ /// The file was deleted
+ Delete,
+}
+
+impl Vfs {
+ /// Amount of files currently stored.
+ ///
+ /// Note that this includes deleted files.
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ /// Id of the given path if it exists in the `Vfs` and is not deleted.
+ pub fn file_id(&self, path: &VfsPath) -> Option<FileId> {
+ self.interner.get(path).filter(|&it| self.get(it).is_some())
+ }
+
+ /// File path corresponding to the given `file_id`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the id is not present in the `Vfs`.
+ pub fn file_path(&self, file_id: FileId) -> VfsPath {
+ self.interner.lookup(file_id).clone()
+ }
+
+ /// File content corresponding to the given `file_id`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the id is not present in the `Vfs`, or if the corresponding file is
+ /// deleted.
+ pub fn file_contents(&self, file_id: FileId) -> &[u8] {
+ self.get(file_id).as_deref().unwrap()
+ }
+
+ /// Returns an iterator over the stored ids and their corresponding paths.
+ ///
+ /// This will skip deleted files.
+ pub fn iter(&self) -> impl Iterator<Item = (FileId, &VfsPath)> + '_ {
+ (0..self.data.len())
+ .map(|it| FileId(it as u32))
+ .filter(move |&file_id| self.get(file_id).is_some())
+ .map(move |file_id| {
+ let path = self.interner.lookup(file_id);
+ (file_id, path)
+ })
+ }
+
+ /// Update the `path` with the given `contents`. `None` means the file was deleted.
+ ///
+ /// Returns `true` if the file was modified, and saves the [change](ChangedFile).
+ ///
+ /// If the path does not currently exists in the `Vfs`, allocates a new
+ /// [`FileId`] for it.
+ pub fn set_file_contents(&mut self, path: VfsPath, contents: Option<Vec<u8>>) -> bool {
+ let file_id = self.alloc_file_id(path);
+ let change_kind = match (&self.get(file_id), &contents) {
+ (None, None) => return false,
+ (None, Some(_)) => ChangeKind::Create,
+ (Some(_), None) => ChangeKind::Delete,
+ (Some(old), Some(new)) if old == new => return false,
+ (Some(_), Some(_)) => ChangeKind::Modify,
+ };
+
+ *self.get_mut(file_id) = contents;
+ self.changes.push(ChangedFile { file_id, change_kind });
+ true
+ }
+
+ /// Returns `true` if the `Vfs` contains [changes](ChangedFile).
+ pub fn has_changes(&self) -> bool {
+ !self.changes.is_empty()
+ }
+
+ /// Drain and returns all the changes in the `Vfs`.
+ pub fn take_changes(&mut self) -> Vec<ChangedFile> {
+ mem::take(&mut self.changes)
+ }
+
+ /// Returns the id associated with `path`
+ ///
+ /// - If `path` does not exists in the `Vfs`, allocate a new id for it, associated with a
+ /// deleted file;
+ /// - Else, returns `path`'s id.
+ ///
+ /// Does not record a change.
+ fn alloc_file_id(&mut self, path: VfsPath) -> FileId {
+ let file_id = self.interner.intern(path);
+ let idx = file_id.0 as usize;
+ let len = self.data.len().max(idx + 1);
+ self.data.resize_with(len, || None);
+ file_id
+ }
+
+ /// Returns the content associated with the given `file_id`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if no file is associated to that id.
+ fn get(&self, file_id: FileId) -> &Option<Vec<u8>> {
+ &self.data[file_id.0 as usize]
+ }
+
+ /// Mutably returns the content associated with the given `file_id`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if no file is associated to that id.
+ fn get_mut(&mut self, file_id: FileId) -> &mut Option<Vec<u8>> {
+ &mut self.data[file_id.0 as usize]
+ }
+}
+
+impl fmt::Debug for Vfs {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Vfs").field("n_files", &self.data.len()).finish()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/vfs/src/loader.rs b/src/tools/rust-analyzer/crates/vfs/src/loader.rs
new file mode 100644
index 000000000..e2d74782a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/loader.rs
@@ -0,0 +1,215 @@
+//! Object safe interface for file watching and reading.
+use std::fmt;
+
+use paths::{AbsPath, AbsPathBuf};
+
+/// A set of files on the file system.
+#[derive(Debug, Clone)]
+pub enum Entry {
+ /// The `Entry` is represented by a raw set of files.
+ Files(Vec<AbsPathBuf>),
+ /// The `Entry` is represented by `Directories`.
+ Directories(Directories),
+}
+
+/// Specifies a set of files on the file system.
+///
+/// A file is included if:
+/// * it has included extension
+/// * it is under an `include` path
+/// * it is not under `exclude` path
+///
+/// If many include/exclude paths match, the longest one wins.
+///
+/// If a path is in both `include` and `exclude`, the `exclude` one wins.
+#[derive(Debug, Clone, Default)]
+pub struct Directories {
+ pub extensions: Vec<String>,
+ pub include: Vec<AbsPathBuf>,
+ pub exclude: Vec<AbsPathBuf>,
+}
+
+/// [`Handle`]'s configuration.
+#[derive(Debug)]
+pub struct Config {
+ /// Version number to associate progress updates to the right config
+ /// version.
+ pub version: u32,
+ /// Set of initially loaded files.
+ pub load: Vec<Entry>,
+ /// Index of watched entries in `load`.
+ ///
+ /// If a path in a watched entry is modified,the [`Handle`] should notify it.
+ pub watch: Vec<usize>,
+}
+
+/// Message about an action taken by a [`Handle`].
+pub enum Message {
+ /// Indicate a gradual progress.
+ ///
+ /// This is supposed to be the number of loaded files.
+ Progress { n_total: usize, n_done: usize, config_version: u32 },
+ /// The handle loaded the following files' content.
+ Loaded { files: Vec<(AbsPathBuf, Option<Vec<u8>>)> },
+}
+
+/// Type that will receive [`Messages`](Message) from a [`Handle`].
+pub type Sender = Box<dyn Fn(Message) + Send>;
+
+/// Interface for reading and watching files.
+pub trait Handle: fmt::Debug {
+ /// Spawn a new handle with the given `sender`.
+ fn spawn(sender: Sender) -> Self
+ where
+ Self: Sized;
+
+ /// Set this handle's configuration.
+ fn set_config(&mut self, config: Config);
+
+ /// The file's content at `path` has been modified, and should be reloaded.
+ fn invalidate(&mut self, path: AbsPathBuf);
+
+ /// Load the content of the given file, returning [`None`] if it does not
+ /// exists.
+ fn load_sync(&mut self, path: &AbsPath) -> Option<Vec<u8>>;
+}
+
+impl Entry {
+ /// Returns:
+ /// ```text
+ /// Entry::Directories(Directories {
+ /// extensions: ["rs"],
+ /// include: [base],
+ /// exclude: [base/.git],
+ /// })
+ /// ```
+ pub fn rs_files_recursively(base: AbsPathBuf) -> Entry {
+ Entry::Directories(dirs(base, &[".git"]))
+ }
+
+ /// Returns:
+ /// ```text
+ /// Entry::Directories(Directories {
+ /// extensions: ["rs"],
+ /// include: [base],
+ /// exclude: [base/.git, base/target],
+ /// })
+ /// ```
+ pub fn local_cargo_package(base: AbsPathBuf) -> Entry {
+ Entry::Directories(dirs(base, &[".git", "target"]))
+ }
+
+ /// Returns:
+ /// ```text
+ /// Entry::Directories(Directories {
+ /// extensions: ["rs"],
+ /// include: [base],
+ /// exclude: [base/.git, /tests, /examples, /benches],
+ /// })
+ /// ```
+ pub fn cargo_package_dependency(base: AbsPathBuf) -> Entry {
+ Entry::Directories(dirs(base, &[".git", "/tests", "/examples", "/benches"]))
+ }
+
+ /// Returns `true` if `path` is included in `self`.
+ ///
+ /// See [`Directories::contains_file`].
+ pub fn contains_file(&self, path: &AbsPath) -> bool {
+ match self {
+ Entry::Files(files) => files.iter().any(|it| it == path),
+ Entry::Directories(dirs) => dirs.contains_file(path),
+ }
+ }
+
+ /// Returns `true` if `path` is included in `self`.
+ ///
+ /// - If `self` is `Entry::Files`, returns `false`
+ /// - Else, see [`Directories::contains_dir`].
+ pub fn contains_dir(&self, path: &AbsPath) -> bool {
+ match self {
+ Entry::Files(_) => false,
+ Entry::Directories(dirs) => dirs.contains_dir(path),
+ }
+ }
+}
+
+impl Directories {
+ /// Returns `true` if `path` is included in `self`.
+ pub fn contains_file(&self, path: &AbsPath) -> bool {
+ // First, check the file extension...
+ let ext = path.extension().unwrap_or_default();
+ if self.extensions.iter().all(|it| it.as_str() != ext) {
+ return false;
+ }
+
+ // Then, check for path inclusion...
+ self.includes_path(path)
+ }
+
+ /// Returns `true` if `path` is included in `self`.
+ ///
+ /// Since `path` is supposed to be a directory, this will not take extension
+ /// into account.
+ pub fn contains_dir(&self, path: &AbsPath) -> bool {
+ self.includes_path(path)
+ }
+
+ /// Returns `true` if `path` is included in `self`.
+ ///
+ /// It is included if
+ /// - An element in `self.include` is a prefix of `path`.
+ /// - This path is longer than any element in `self.exclude` that is a prefix
+ /// of `path`. In case of equality, exclusion wins.
+ fn includes_path(&self, path: &AbsPath) -> bool {
+ let mut include: Option<&AbsPathBuf> = None;
+ for incl in &self.include {
+ if path.starts_with(incl) {
+ include = Some(match include {
+ Some(prev) if prev.starts_with(incl) => prev,
+ _ => incl,
+ });
+ }
+ }
+
+ let include = match include {
+ Some(it) => it,
+ None => return false,
+ };
+
+ !self.exclude.iter().any(|excl| path.starts_with(excl) && excl.starts_with(include))
+ }
+}
+
+/// Returns :
+/// ```text
+/// Directories {
+/// extensions: ["rs"],
+/// include: [base],
+/// exclude: [base/<exclude>],
+/// }
+/// ```
+fn dirs(base: AbsPathBuf, exclude: &[&str]) -> Directories {
+ let exclude = exclude.iter().map(|it| base.join(it)).collect::<Vec<_>>();
+ Directories { extensions: vec!["rs".to_string()], include: vec![base], exclude }
+}
+
+impl fmt::Debug for Message {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Message::Loaded { files } => {
+ f.debug_struct("Loaded").field("n_files", &files.len()).finish()
+ }
+ Message::Progress { n_total, n_done, config_version } => f
+ .debug_struct("Progress")
+ .field("n_total", n_total)
+ .field("n_done", n_done)
+ .field("config_version", config_version)
+ .finish(),
+ }
+ }
+}
+
+#[test]
+fn handle_is_object_safe() {
+ fn _assert(_: &dyn Handle) {}
+}
diff --git a/src/tools/rust-analyzer/crates/vfs/src/path_interner.rs b/src/tools/rust-analyzer/crates/vfs/src/path_interner.rs
new file mode 100644
index 000000000..6e049f0d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/path_interner.rs
@@ -0,0 +1,48 @@
+//! Maps paths to compact integer ids. We don't care about clearings paths which
+//! no longer exist -- the assumption is total size of paths we ever look at is
+//! not too big.
+use std::hash::BuildHasherDefault;
+
+use indexmap::IndexSet;
+use rustc_hash::FxHasher;
+
+use crate::{FileId, VfsPath};
+
+/// Structure to map between [`VfsPath`] and [`FileId`].
+pub(crate) struct PathInterner {
+ map: IndexSet<VfsPath, BuildHasherDefault<FxHasher>>,
+}
+
+impl Default for PathInterner {
+ fn default() -> Self {
+ Self { map: IndexSet::default() }
+ }
+}
+
+impl PathInterner {
+ /// Get the id corresponding to `path`.
+ ///
+ /// If `path` does not exists in `self`, returns [`None`].
+ pub(crate) fn get(&self, path: &VfsPath) -> Option<FileId> {
+ self.map.get_index_of(path).map(|i| FileId(i as u32))
+ }
+
+ /// Insert `path` in `self`.
+ ///
+ /// - If `path` already exists in `self`, returns its associated id;
+ /// - Else, returns a newly allocated id.
+ pub(crate) fn intern(&mut self, path: VfsPath) -> FileId {
+ let (id, _added) = self.map.insert_full(path);
+ assert!(id < u32::MAX as usize);
+ FileId(id as u32)
+ }
+
+ /// Returns the path corresponding to `id`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `id` does not exists in `self`.
+ pub(crate) fn lookup(&self, id: FileId) -> &VfsPath {
+ self.map.get_index(id.0 as usize).unwrap()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
new file mode 100644
index 000000000..668c7320d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
@@ -0,0 +1,406 @@
+//! Abstract-ish representation of paths for VFS.
+use std::fmt;
+
+use paths::{AbsPath, AbsPathBuf};
+
+/// Path in [`Vfs`].
+///
+/// Long-term, we want to support files which do not reside in the file-system,
+/// so we treat `VfsPath`s as opaque identifiers.
+///
+/// [`Vfs`]: crate::Vfs
+#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct VfsPath(VfsPathRepr);
+
+impl VfsPath {
+ /// Creates an "in-memory" path from `/`-separated string.
+ ///
+ /// This is most useful for testing, to avoid windows/linux differences
+ ///
+ /// # Panics
+ ///
+ /// Panics if `path` does not start with `'/'`.
+ pub fn new_virtual_path(path: String) -> VfsPath {
+ assert!(path.starts_with('/'));
+ VfsPath(VfsPathRepr::VirtualPath(VirtualPath(path)))
+ }
+
+ /// Create a path from string. Input should be a string representation of
+ /// an absolute path inside filesystem
+ pub fn new_real_path(path: String) -> VfsPath {
+ VfsPath::from(AbsPathBuf::assert(path.into()))
+ }
+
+ /// Returns the `AbsPath` representation of `self` if `self` is on the file system.
+ pub fn as_path(&self) -> Option<&AbsPath> {
+ match &self.0 {
+ VfsPathRepr::PathBuf(it) => Some(it.as_path()),
+ VfsPathRepr::VirtualPath(_) => None,
+ }
+ }
+
+ /// Creates a new `VfsPath` with `path` adjoined to `self`.
+ pub fn join(&self, path: &str) -> Option<VfsPath> {
+ match &self.0 {
+ VfsPathRepr::PathBuf(it) => {
+ let res = it.join(path).normalize();
+ Some(VfsPath(VfsPathRepr::PathBuf(res)))
+ }
+ VfsPathRepr::VirtualPath(it) => {
+ let res = it.join(path)?;
+ Some(VfsPath(VfsPathRepr::VirtualPath(res)))
+ }
+ }
+ }
+
+ /// Remove the last component of `self` if there is one.
+ ///
+ /// If `self` has no component, returns `false`; else returns `true`.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// # use vfs::{AbsPathBuf, VfsPath};
+ /// let mut path = VfsPath::from(AbsPathBuf::assert("/foo/bar".into()));
+ /// assert!(path.pop());
+ /// assert_eq!(path, VfsPath::from(AbsPathBuf::assert("/foo".into())));
+ /// assert!(path.pop());
+ /// assert_eq!(path, VfsPath::from(AbsPathBuf::assert("/".into())));
+ /// assert!(!path.pop());
+ /// ```
+ pub fn pop(&mut self) -> bool {
+ match &mut self.0 {
+ VfsPathRepr::PathBuf(it) => it.pop(),
+ VfsPathRepr::VirtualPath(it) => it.pop(),
+ }
+ }
+
+ /// Returns `true` if `other` is a prefix of `self`.
+ pub fn starts_with(&self, other: &VfsPath) -> bool {
+ match (&self.0, &other.0) {
+ (VfsPathRepr::PathBuf(lhs), VfsPathRepr::PathBuf(rhs)) => lhs.starts_with(rhs),
+ (VfsPathRepr::VirtualPath(lhs), VfsPathRepr::VirtualPath(rhs)) => lhs.starts_with(rhs),
+ (VfsPathRepr::PathBuf(_) | VfsPathRepr::VirtualPath(_), _) => false,
+ }
+ }
+
+ /// Returns the `VfsPath` without its final component, if there is one.
+ ///
+ /// Returns [`None`] if the path is a root or prefix.
+ pub fn parent(&self) -> Option<VfsPath> {
+ let mut parent = self.clone();
+ if parent.pop() {
+ Some(parent)
+ } else {
+ None
+ }
+ }
+
+ /// Returns `self`'s base name and file extension.
+ pub fn name_and_extension(&self) -> Option<(&str, Option<&str>)> {
+ match &self.0 {
+ VfsPathRepr::PathBuf(p) => Some((
+ p.file_stem()?.to_str()?,
+ p.extension().and_then(|extension| extension.to_str()),
+ )),
+ VfsPathRepr::VirtualPath(p) => p.name_and_extension(),
+ }
+ }
+
+ /// **Don't make this `pub`**
+ ///
+ /// Encode the path in the given buffer.
+ ///
+ /// The encoding will be `0` if [`AbsPathBuf`], `1` if [`VirtualPath`], followed
+ /// by `self`'s representation.
+ ///
+ /// Note that this encoding is dependent on the operating system.
+ pub(crate) fn encode(&self, buf: &mut Vec<u8>) {
+ let tag = match &self.0 {
+ VfsPathRepr::PathBuf(_) => 0,
+ VfsPathRepr::VirtualPath(_) => 1,
+ };
+ buf.push(tag);
+ match &self.0 {
+ VfsPathRepr::PathBuf(path) => {
+ #[cfg(windows)]
+ {
+ use windows_paths::Encode;
+ let path: &std::path::Path = path.as_ref();
+ let components = path.components();
+ let mut add_sep = false;
+ for component in components {
+ if add_sep {
+ windows_paths::SEP.encode(buf);
+ }
+ let len_before = buf.len();
+ match component {
+ std::path::Component::Prefix(prefix) => {
+ // kind() returns a normalized and comparable path prefix.
+ prefix.kind().encode(buf);
+ }
+ std::path::Component::RootDir => {
+ if !add_sep {
+ component.as_os_str().encode(buf);
+ }
+ }
+ _ => component.as_os_str().encode(buf),
+ }
+
+ // some components may be encoded empty
+ add_sep = len_before != buf.len();
+ }
+ }
+ #[cfg(unix)]
+ {
+ use std::os::unix::ffi::OsStrExt;
+ buf.extend(path.as_os_str().as_bytes());
+ }
+ #[cfg(not(any(windows, unix)))]
+ {
+ buf.extend(path.as_os_str().to_string_lossy().as_bytes());
+ }
+ }
+ VfsPathRepr::VirtualPath(VirtualPath(s)) => buf.extend(s.as_bytes()),
+ }
+ }
+}
+
+#[cfg(windows)]
+mod windows_paths {
+ pub(crate) trait Encode {
+ fn encode(&self, buf: &mut Vec<u8>);
+ }
+
+ impl Encode for std::ffi::OsStr {
+ fn encode(&self, buf: &mut Vec<u8>) {
+ use std::os::windows::ffi::OsStrExt;
+ for wchar in self.encode_wide() {
+ buf.extend(wchar.to_le_bytes().iter().copied());
+ }
+ }
+ }
+
+ impl Encode for u8 {
+ fn encode(&self, buf: &mut Vec<u8>) {
+ let wide = *self as u16;
+ buf.extend(wide.to_le_bytes().iter().copied())
+ }
+ }
+
+ impl Encode for &str {
+ fn encode(&self, buf: &mut Vec<u8>) {
+ debug_assert!(self.is_ascii());
+ for b in self.as_bytes() {
+ b.encode(buf)
+ }
+ }
+ }
+
+ pub(crate) const SEP: &str = "\\";
+ const VERBATIM: &str = "\\\\?\\";
+ const UNC: &str = "UNC";
+ const DEVICE: &str = "\\\\.\\";
+ const COLON: &str = ":";
+
+ impl Encode for std::path::Prefix<'_> {
+ fn encode(&self, buf: &mut Vec<u8>) {
+ match self {
+ std::path::Prefix::Verbatim(c) => {
+ VERBATIM.encode(buf);
+ c.encode(buf);
+ }
+ std::path::Prefix::VerbatimUNC(server, share) => {
+ VERBATIM.encode(buf);
+ UNC.encode(buf);
+ SEP.encode(buf);
+ server.encode(buf);
+ SEP.encode(buf);
+ share.encode(buf);
+ }
+ std::path::Prefix::VerbatimDisk(d) => {
+ VERBATIM.encode(buf);
+ d.encode(buf);
+ COLON.encode(buf);
+ }
+ std::path::Prefix::DeviceNS(device) => {
+ DEVICE.encode(buf);
+ device.encode(buf);
+ }
+ std::path::Prefix::UNC(server, share) => {
+ SEP.encode(buf);
+ SEP.encode(buf);
+ server.encode(buf);
+ SEP.encode(buf);
+ share.encode(buf);
+ }
+ std::path::Prefix::Disk(d) => {
+ d.encode(buf);
+ COLON.encode(buf);
+ }
+ }
+ }
+ }
+ #[test]
+ fn paths_encoding() {
+ // drive letter casing agnostic
+ test_eq("C:/x.rs", "c:/x.rs");
+ // separator agnostic
+ test_eq("C:/x/y.rs", "C:\\x\\y.rs");
+
+ fn test_eq(a: &str, b: &str) {
+ let mut b1 = Vec::new();
+ let mut b2 = Vec::new();
+ vfs(a).encode(&mut b1);
+ vfs(b).encode(&mut b2);
+ assert_eq!(b1, b2);
+ }
+ }
+
+ #[test]
+ fn test_sep_root_dir_encoding() {
+ let mut buf = Vec::new();
+ vfs("C:/x/y").encode(&mut buf);
+ assert_eq!(&buf, &[0, 67, 0, 58, 0, 92, 0, 120, 0, 92, 0, 121, 0])
+ }
+
+ #[cfg(test)]
+ fn vfs(str: &str) -> super::VfsPath {
+ use super::{AbsPathBuf, VfsPath};
+ VfsPath::from(AbsPathBuf::try_from(str).unwrap())
+ }
+}
+
+/// Internal, private representation of [`VfsPath`].
+#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+enum VfsPathRepr {
+ PathBuf(AbsPathBuf),
+ VirtualPath(VirtualPath),
+}
+
+impl From<AbsPathBuf> for VfsPath {
+ fn from(v: AbsPathBuf) -> Self {
+ VfsPath(VfsPathRepr::PathBuf(v.normalize()))
+ }
+}
+
+impl fmt::Display for VfsPath {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match &self.0 {
+ VfsPathRepr::PathBuf(it) => fmt::Display::fmt(&it.display(), f),
+ VfsPathRepr::VirtualPath(VirtualPath(it)) => fmt::Display::fmt(it, f),
+ }
+ }
+}
+
+impl fmt::Debug for VfsPath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&self.0, f)
+ }
+}
+
+impl fmt::Debug for VfsPathRepr {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self {
+ VfsPathRepr::PathBuf(it) => fmt::Debug::fmt(&it.display(), f),
+ VfsPathRepr::VirtualPath(VirtualPath(it)) => fmt::Debug::fmt(&it, f),
+ }
+ }
+}
+
+/// `/`-separated virtual path.
+///
+/// This is used to describe files that do not reside on the file system.
+#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+struct VirtualPath(String);
+
+impl VirtualPath {
+ /// Returns `true` if `other` is a prefix of `self` (as strings).
+ fn starts_with(&self, other: &VirtualPath) -> bool {
+ self.0.starts_with(&other.0)
+ }
+
+ /// Remove the last component of `self`.
+ ///
+ /// This will find the last `'/'` in `self`, and remove everything after it,
+ /// including the `'/'`.
+ ///
+ /// If `self` contains no `'/'`, returns `false`; else returns `true`.
+ ///
+ /// # Example
+ ///
+ /// ```rust,ignore
+ /// let mut path = VirtualPath("/foo/bar".to_string());
+ /// path.pop();
+ /// assert_eq!(path.0, "/foo");
+ /// path.pop();
+ /// assert_eq!(path.0, "");
+ /// ```
+ fn pop(&mut self) -> bool {
+ let pos = match self.0.rfind('/') {
+ Some(pos) => pos,
+ None => return false,
+ };
+ self.0 = self.0[..pos].to_string();
+ true
+ }
+
+ /// Append the given *relative* path `path` to `self`.
+ ///
+ /// This will resolve any leading `"../"` in `path` before appending it.
+ ///
+ /// Returns [`None`] if `path` has more leading `"../"` than the number of
+ /// components in `self`.
+ ///
+ /// # Notes
+ ///
+ /// In practice, appending here means `self/path` as strings.
+ fn join(&self, mut path: &str) -> Option<VirtualPath> {
+ let mut res = self.clone();
+ while path.starts_with("../") {
+ if !res.pop() {
+ return None;
+ }
+ path = &path["../".len()..];
+ }
+ path = path.trim_start_matches("./");
+ res.0 = format!("{}/{}", res.0, path);
+ Some(res)
+ }
+
+ /// Returns `self`'s base name and file extension.
+ ///
+ /// # Returns
+ /// - `None` if `self` ends with `"//"`.
+ /// - `Some((name, None))` if `self`'s base contains no `.`, or only one `.` at
+ /// the start.
+ /// - `Some((name, Some(extension))` else.
+ ///
+ /// # Note
+ /// The extension will not contains `.`. This means `"/foo/bar.baz.rs"` will
+ /// return `Some(("bar.baz", Some("rs"))`.
+ fn name_and_extension(&self) -> Option<(&str, Option<&str>)> {
+ let file_path = if self.0.ends_with('/') { &self.0[..&self.0.len() - 1] } else { &self.0 };
+ let file_name = match file_path.rfind('/') {
+ Some(position) => &file_path[position + 1..],
+ None => file_path,
+ };
+
+ if file_name.is_empty() {
+ None
+ } else {
+ let mut file_stem_and_extension = file_name.rsplitn(2, '.');
+ let extension = file_stem_and_extension.next();
+ let file_stem = file_stem_and_extension.next();
+
+ match (file_stem, extension) {
+ (None, None) => None,
+ (None | Some(""), Some(_)) => Some((file_name, None)),
+ (Some(file_stem), extension) => Some((file_stem, extension)),
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/tools/rust-analyzer/crates/vfs/src/vfs_path/tests.rs b/src/tools/rust-analyzer/crates/vfs/src/vfs_path/tests.rs
new file mode 100644
index 000000000..510e021e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/vfs_path/tests.rs
@@ -0,0 +1,30 @@
+use super::*;
+
+#[test]
+fn virtual_path_extensions() {
+ assert_eq!(VirtualPath("/".to_string()).name_and_extension(), None);
+ assert_eq!(
+ VirtualPath("/directory".to_string()).name_and_extension(),
+ Some(("directory", None))
+ );
+ assert_eq!(
+ VirtualPath("/directory/".to_string()).name_and_extension(),
+ Some(("directory", None))
+ );
+ assert_eq!(
+ VirtualPath("/directory/file".to_string()).name_and_extension(),
+ Some(("file", None))
+ );
+ assert_eq!(
+ VirtualPath("/directory/.file".to_string()).name_and_extension(),
+ Some((".file", None))
+ );
+ assert_eq!(
+ VirtualPath("/directory/.file.rs".to_string()).name_and_extension(),
+ Some((".file", Some("rs")))
+ );
+ assert_eq!(
+ VirtualPath("/directory/file.rs".to_string()).name_and_extension(),
+ Some(("file", Some("rs")))
+ );
+}
diff --git a/src/tools/rust-analyzer/docs/dev/README.md b/src/tools/rust-analyzer/docs/dev/README.md
new file mode 100644
index 000000000..76bbd1e91
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/README.md
@@ -0,0 +1,266 @@
+# Contributing Quick Start
+
+rust-analyzer is an ordinary Rust project, which is organized as a Cargo workspace, builds on stable and doesn't depend on C libraries.
+So, just
+
+```
+$ cargo test
+```
+
+should be enough to get you started!
+
+To learn more about how rust-analyzer works, see [./architecture.md](./architecture.md).
+It also explains the high-level layout of the source code.
+Do skim through that document.
+
+We also publish rustdoc docs to pages: https://rust-lang.github.io/rust-analyzer/ide/.
+Note though, that the internal documentation is very incomplete.
+
+Various organizational and process issues are discussed in this document.
+
+# Getting in Touch
+
+rust-analyzer is a part of the [RLS-2.0 working
+group](https://github.com/rust-lang/compiler-team/tree/6a769c13656c0a6959ebc09e7b1f7c09b86fb9c0/working-groups/rls-2.0).
+Discussion happens in this Zulip stream:
+
+https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
+
+# Issue Labels
+
+* [good-first-issue](https://github.com/rust-lang/rust-analyzer/labels/good%20first%20issue)
+ are good issues to get into the project.
+* [E-has-instructions](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-has-instructions)
+ issues have links to the code in question and tests.
+* [Broken Window](https://github.com/rust-lang/rust-analyzer/issues?q=is:issue+is:open+label:%22Broken+Window%22)
+ are issues which are not necessarily critical by themselves, but which should be fixed ASAP regardless, to avoid accumulation of technical debt.
+* [E-easy](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-easy),
+ [E-medium](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-medium),
+ [E-hard](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-hard),
+ [E-unknown](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-unknown),
+ labels are *estimates* for how hard would be to write a fix. Each triaged issue should have one of these labels.
+* [S-actionable](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AS-actionable) and
+ [S-unactionable](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AS-unactionable)
+ specify if there are concrete steps to resolve or advance an issue. Roughly, actionable issues need only work to be fixed,
+ while unactionable ones are blocked either on user feedback (providing a reproducible example), or on larger architectural
+ work or decisions. This classification is descriptive, not prescriptive, and might be wrong: Any unactionable issue might have a simple fix that we missed.
+ Each triaged issue should have one of these labels.
+* [fun](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3Afun)
+ is for cool, but probably hard stuff.
+* [Design](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%Design)
+ is for moderate/large scale architecture discussion.
+ Also a kind of fun.
+ These issues should generally include a link to a Zulip discussion thread.
+
+# Code Style & Review Process
+
+Do see [./style.md](./style.md).
+
+# Cookbook
+
+## CI
+
+We use GitHub Actions for CI.
+Most of the things, including formatting, are checked by `cargo test`.
+If `cargo test` passes locally, that's a good sign that CI will be green as well.
+The only exception is that some long-running tests are skipped locally by default.
+Use `env RUN_SLOW_TESTS=1 cargo test` to run the full suite.
+
+We use bors to enforce the [not rocket science](https://graydon2.dreamwidth.org/1597.html) rule.
+
+## Launching rust-analyzer
+
+Debugging the language server can be tricky.
+LSP is rather chatty, so driving it from the command line is not really feasible, driving it via VS Code requires interacting with two processes.
+
+For this reason, the best way to see how rust-analyzer works is to **find a relevant test and execute it**.
+VS Code & Emacs include an action for running a single test.
+
+Launching a VS Code instance with a locally built language server is also possible.
+There's **"Run Extension (Debug Build)"** launch configuration for this in VS Code.
+
+In general, I use one of the following workflows for fixing bugs and implementing features:
+
+If the problem concerns only internal parts of rust-analyzer (i.e. I don't need to touch the `rust-analyzer` crate or TypeScript code), there is a unit-test for it.
+So, I use **Rust Analyzer: Run** action in VS Code to run this single test, and then just do printf-driven development/debugging.
+As a sanity check after I'm done, I use `cargo xtask install --server` and **Reload Window** action in VS Code to verify that the thing works as I expect.
+
+If the problem concerns only the VS Code extension, I use **Run Installed Extension** launch configuration from `launch.json`.
+Notably, this uses the usual `rust-analyzer` binary from `PATH`.
+For this, it is important to have the following in your `settings.json` file:
+```json
+{
+ "rust-analyzer.server.path": "rust-analyzer"
+}
+```
+After I am done with the fix, I use `cargo xtask install --client` to try the new extension for real.
+
+If I need to fix something in the `rust-analyzer` crate, I feel sad because it's on the boundary between the two processes, and working there is slow.
+I usually just `cargo xtask install --server` and poke changes from my live environment.
+Note that this uses `--release`, which is usually faster overall, because loading stdlib into debug version of rust-analyzer takes a lot of time.
+To speed things up, sometimes I open a temporary hello-world project which has `"rust-analyzer.cargo.noSysroot": true` in `.code/settings.json`.
+This flag causes rust-analyzer to skip loading the sysroot, which greatly reduces the amount of things rust-analyzer needs to do, and makes printf's more useful.
+Note that you should only use the `eprint!` family of macros for debugging: stdout is used for LSP communication, and `print!` would break it.
+
+If I need to fix something simultaneously in the server and in the client, I feel even more sad.
+I don't have a specific workflow for this case.
+
+Additionally, I use `cargo run --release -p rust-analyzer -- analysis-stats path/to/some/rust/crate` to run a batch analysis.
+This is primarily useful for performance optimizations, or for bug minimization.
+
+## TypeScript Tests
+
+If you change files under `editors/code` and would like to run the tests and linter, install npm and run:
+
+```bash
+cd editors/code
+npm ci
+npm run lint
+```
+## How to ...
+
+* ... add an assist? [#7535](https://github.com/rust-lang/rust-analyzer/pull/7535)
+* ... add a new protocol extension? [#4569](https://github.com/rust-lang/rust-analyzer/pull/4569)
+* ... add a new configuration option? [#7451](https://github.com/rust-lang/rust-analyzer/pull/7451)
+* ... add a new completion? [#6964](https://github.com/rust-lang/rust-analyzer/pull/6964)
+* ... allow new syntax in the parser? [#7338](https://github.com/rust-lang/rust-analyzer/pull/7338)
+
+## Logging
+
+Logging is done by both rust-analyzer and VS Code, so it might be tricky to figure out where logs go.
+
+Inside rust-analyzer, we use the [`tracing`](https://docs.rs/tracing/) crate for logging,
+and [`tracing-subscriber`](https://docs.rs/tracing-subscriber) for logging frontend.
+By default, log goes to stderr, but the stderr itself is processed by VS Code.
+`--log-file <PATH>` CLI argument allows logging to file.
+Setting the `RA_LOG_FILE=<PATH>` environment variable will also log to file, it will also override `--log-file`.
+
+To see stderr in the running VS Code instance, go to the "Output" tab of the panel and select `rust-analyzer`.
+This shows `eprintln!` as well.
+Note that `stdout` is used for the actual protocol, so `println!` will break things.
+
+To log all communication between the server and the client, there are two choices:
+
+* You can log on the server side, by running something like
+ ```
+ env RA_LOG=lsp_server=debug code .
+ ```
+* You can log on the client side, by enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
+ These logs are shown in a separate tab in the output and could be used with LSP inspector.
+ Kudos to [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
+
+
+There are also several VS Code commands which might be of interest:
+
+* `Rust Analyzer: Status` shows some memory-usage statistics.
+
+* `Rust Analyzer: Syntax Tree` shows syntax tree of the current file/selection.
+
+* `Rust Analyzer: View Hir` shows the HIR expressions within the function containing the cursor.
+
+ You can hover over syntax nodes in the opened text file to see the appropriate
+ rust code that it refers to and the rust editor will also highlight the proper
+ text range.
+
+ If you trigger Go to Definition in the inspected Rust source file,
+ the syntax tree read-only editor should scroll to and select the
+ appropriate syntax node token.
+
+ ![demo](https://user-images.githubusercontent.com/36276403/78225773-6636a480-74d3-11ea-9d9f-1c9d42da03b0.png)
+
+## Profiling
+
+We have a built-in hierarchical profiler, you can enable it by using `RA_PROFILE` env-var:
+
+```
+RA_PROFILE=* // dump everything
+RA_PROFILE=foo|bar|baz // enabled only selected entries
+RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms
+```
+
+In particular, I have `export RA_PROFILE='*>10'` in my shell profile.
+
+We also have a "counting" profiler which counts number of instances of popular structs.
+It is enabled by `RA_COUNT=1`.
+
+To measure time for from-scratch analysis, use something like this:
+
+```
+$ cargo run --release -p rust-analyzer -- analysis-stats ../chalk/
+```
+
+For measuring time of incremental analysis, use either of these:
+
+```
+$ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --highlight ../chalk/chalk-engine/src/logic.rs
+$ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --complete ../chalk/chalk-engine/src/logic.rs:94:0
+```
+
+Look for `fn benchmark_xxx` tests for a quick way to reproduce performance problems.
+
+## Release Process
+
+Release process is handled by `release`, `dist` and `promote` xtasks, `release` being the main one.
+
+`release` assumes that you have checkouts of `rust-analyzer`, `rust-analyzer.github.io`, and `rust-lang/rust` in the same directory:
+
+```
+./rust-analyzer
+./rust-analyzer.github.io
+./rust-rust-analyzer # Note the name!
+```
+
+The remote for `rust-analyzer` must be called `upstream` (I use `origin` to point to my fork).
+In addition, for `xtask promote` (see below), `rust-rust-analyzer` must have a `rust-analyzer` remote pointing to this repository on GitHub.
+
+`release` calls the GitHub API calls to scrape pull request comments and categorize them in the changelog.
+This step uses the `curl` and `jq` applications, which need to be available in `PATH`.
+Finally, you need to obtain a GitHub personal access token and set the `GITHUB_TOKEN` environment variable.
+
+Release steps:
+
+1. Set the `GITHUB_TOKEN` environment variable.
+2. Inside rust-analyzer, run `cargo xtask release`. This will:
+ * checkout the `release` branch
+ * reset it to `upstream/nightly`
+ * push it to `upstream`. This triggers GitHub Actions which:
+ * runs `cargo xtask dist` to package binaries and VS Code extension
+ * makes a GitHub release
+ * publishes the VS Code extension to the marketplace
+ * call the GitHub API for PR details
+ * create a new changelog in `rust-analyzer.github.io`
+3. While the release is in progress, fill in the changelog
+4. Commit & push the changelog
+5. Tweet
+6. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's subtree.
+ Self-approve the PR.
+
+If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console.
+If it fails because of something that needs to be fixed, remove the release tag (if needed), fix the problem, then start over.
+Make sure to remove the new changelog post created when running `cargo xtask release` a second time.
+
+We release "nightly" every night automatically and promote the latest nightly to "stable" manually, every week.
+
+We don't do "patch" releases, unless something truly egregious comes up.
+To do a patch release, cherry-pick the fix on top of the current `release` branch and push the branch.
+There's no need to write a changelog for a patch release, it's OK to include the notes about the fix into the next weekly one.
+Note: we tag releases by dates, releasing a patch release on the same day should work (by overwriting a tag), but I am not 100% sure.
+
+## Permissions
+
+There are three sets of people with extra permissions:
+
+* rust-analyzer GitHub organization [**admins**](https://github.com/orgs/rust-analyzer/people?query=role:owner) (which include current t-compiler leads).
+ Admins have full access to the org.
+* [**review**](https://github.com/orgs/rust-analyzer/teams/review) team in the organization.
+ Reviewers have `r+` access to all of organization's repositories and publish rights on crates.io.
+ They also have direct commit access, but all changes should via bors queue.
+ It's ok to self-approve if you think you know what you are doing!
+ bors should automatically sync the permissions.
+ Feel free to request a review or assign any PR to a reviewer with the relevant expertise to bring the work to their attention.
+ Don't feel pressured to review assigned PRs though.
+ If you don't feel like reviewing for whatever reason, someone else will pick the review up!
+* [**triage**](https://github.com/orgs/rust-analyzer/teams/triage) team in the organization.
+ This team can label and close issues.
+
+Note that at the time being you need to be a member of the org yourself to view the links.
diff --git a/src/tools/rust-analyzer/docs/dev/architecture.md b/src/tools/rust-analyzer/docs/dev/architecture.md
new file mode 100644
index 000000000..ea4035baf
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/architecture.md
@@ -0,0 +1,497 @@
+# Architecture
+
+This document describes the high-level architecture of rust-analyzer.
+If you want to familiarize yourself with the code base, you are just in the right place!
+
+You might also enjoy ["Explaining Rust Analyzer"](https://www.youtube.com/playlist?list=PLhb66M_x9UmrqXhQuIpWC5VgTdrGxMx3y) series on YouTube.
+It goes deeper than what is covered in this document, but will take some time to watch.
+
+See also these implementation-related blog posts:
+
+* https://rust-analyzer.github.io/blog/2019/11/13/find-usages.html
+* https://rust-analyzer.github.io/blog/2020/07/20/three-architectures-for-responsive-ide.html
+* https://rust-analyzer.github.io/blog/2020/09/16/challeging-LR-parsing.html
+* https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html
+* https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html
+
+For older, by now mostly outdated stuff, see the [guide](./guide.md) and [another playlist](https://www.youtube.com/playlist?list=PL85XCvVPmGQho7MZkdW-wtPtuJcFpzycE).
+
+
+## Bird's Eye View
+
+![](https://user-images.githubusercontent.com/4789492/107129398-0ab70f00-687a-11eb-9bfc-d4eb023aec06.png)
+
+On the highest level, rust-analyzer is a thing which accepts input source code from the client and produces a structured semantic model of the code.
+
+More specifically, input data consists of a set of test files (`(PathBuf, String)` pairs) and information about project structure, captured in the so called `CrateGraph`.
+The crate graph specifies which files are crate roots, which cfg flags are specified for each crate and what dependencies exist between the crates.
+This is the input (ground) state.
+The analyzer keeps all this input data in memory and never does any IO.
+Because the input data is source code, which typically measures in tens of megabytes at most, keeping everything in memory is OK.
+
+A "structured semantic model" is basically an object-oriented representation of modules, functions and types which appear in the source code.
+This representation is fully "resolved": all expressions have types, all references are bound to declarations, etc.
+This is derived state.
+
+The client can submit a small delta of input data (typically, a change to a single file) and get a fresh code model which accounts for changes.
+
+The underlying engine makes sure that model is computed lazily (on-demand) and can be quickly updated for small modifications.
+
+## Entry Points
+
+`crates/rust-analyzer/src/bin/main.rs` contains the main function which spawns LSP.
+This is *the* entry point, but it front-loads a lot of complexity, so it's fine to just skim through it.
+
+`crates/rust-analyzer/src/handlers.rs` implements all LSP requests and is a great place to start if you are already familiar with LSP.
+
+`Analysis` and `AnalysisHost` types define the main API for consumers of IDE services.
+
+## Code Map
+
+This section talks briefly about various important directories and data structures.
+Pay attention to the **Architecture Invariant** sections.
+They often talk about things which are deliberately absent in the source code.
+
+Note also which crates are **API Boundaries**.
+Remember, [rules at the boundary are different](https://www.tedinski.com/2018/02/06/system-boundaries.html).
+
+### `xtask`
+
+This is rust-analyzer's "build system".
+We use cargo to compile rust code, but there are also various other tasks, like release management or local installation.
+They are handled by Rust code in the xtask directory.
+
+### `editors/code`
+
+VS Code plugin.
+
+### `lib/`
+
+rust-analyzer independent libraries which we publish to crates.io.
+It's not heavily utilized at the moment.
+
+### `crates/parser`
+
+It is a hand-written recursive descent parser, which produces a sequence of events like "start node X", "finish node Y".
+It works similarly to
+[kotlin's parser](https://github.com/JetBrains/kotlin/blob/4d951de616b20feca92f3e9cc9679b2de9e65195/compiler/frontend/src/org/jetbrains/kotlin/parsing/KotlinParsing.java),
+which is a good source of inspiration for dealing with syntax errors and incomplete input.
+Original [libsyntax parser](https://github.com/rust-lang/rust/blob/6b99adeb11313197f409b4f7c4083c2ceca8a4fe/src/libsyntax/parse/parser.rs) is what we use for the definition of the Rust language.
+`TreeSink` and `TokenSource` traits bridge the tree-agnostic parser from `grammar` with `rowan` trees.
+
+**Architecture Invariant:** the parser is independent of the particular tree structure and particular representation of the tokens.
+It transforms one flat stream of events into another flat stream of events.
+Token independence allows us to parse out both text-based source code and `tt`-based macro input.
+Tree independence allows us to more easily vary the syntax tree implementation.
+It should also unlock efficient light-parsing approaches.
+For example, you can extract the set of names defined in a file (for typo correction) without building a syntax tree.
+
+**Architecture Invariant:** parsing never fails, the parser produces `(T, Vec<Error>)` rather than `Result<T, Error>`.
+
+### `crates/syntax`
+
+Rust syntax tree structure and parser.
+See [RFC](https://github.com/rust-lang/rfcs/pull/2256) and [./syntax.md](./syntax.md) for some design notes.
+
+- [rowan](https://github.com/rust-analyzer/rowan) library is used for constructing syntax trees.
+- `ast` provides a type safe API on top of the raw `rowan` tree.
+- `ungrammar` description of the grammar, which is used to generate `syntax_kinds` and `ast` modules, using `cargo test -p xtask` command.
+
+Tests for ra_syntax are mostly data-driven.
+`test_data/parser` contains subdirectories with a bunch of `.rs` (test vectors) and `.txt` files with corresponding syntax trees.
+During testing, we check `.rs` against `.txt`.
+If the `.txt` file is missing, it is created (this is how you update tests).
+Additionally, running the xtask test suite with `cargo test -p xtask` will walk the grammar module and collect all `// test test_name` comments into files inside `test_data/parser/inline` directory.
+
+To update test data, run with `UPDATE_EXPECT` variable:
+
+```bash
+env UPDATE_EXPECT=1 cargo qt
+```
+
+After adding a new inline test you need to run `cargo test -p xtask` and also update the test data as described above.
+
+Note [`api_walkthrough`](https://github.com/rust-lang/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348)
+in particular: it shows off various methods of working with syntax tree.
+
+See [#93](https://github.com/rust-lang/rust-analyzer/pull/93) for an example PR which fixes a bug in the grammar.
+
+**Architecture Invariant:** `syntax` crate is completely independent from the rest of rust-analyzer. It knows nothing about salsa or LSP.
+This is important because it is possible to make useful tooling using only the syntax tree.
+Without semantic information, you don't need to be able to _build_ code, which makes the tooling more robust.
+See also https://web.stanford.edu/~mlfbrown/paper.pdf.
+You can view the `syntax` crate as an entry point to rust-analyzer.
+`syntax` crate is an **API Boundary**.
+
+**Architecture Invariant:** syntax tree is a value type.
+The tree is fully determined by the contents of its syntax nodes, it doesn't need global context (like an interner) and doesn't store semantic info.
+Using the tree as a store for semantic info is convenient in traditional compilers, but doesn't work nicely in the IDE.
+Specifically, assists and refactors require transforming syntax trees, and that becomes awkward if you need to do something with the semantic info.
+
+**Architecture Invariant:** syntax tree is built for a single file.
+This is to enable parallel parsing of all files.
+
+**Architecture Invariant:** Syntax trees are by design incomplete and do not enforce well-formedness.
+If an AST method returns an `Option`, it *can* be `None` at runtime, even if this is forbidden by the grammar.
+
+### `crates/base_db`
+
+We use the [salsa](https://github.com/salsa-rs/salsa) crate for incremental and on-demand computation.
+Roughly, you can think of salsa as a key-value store, but it can also compute derived values using specified functions.
+The `base_db` crate provides basic infrastructure for interacting with salsa.
+Crucially, it defines most of the "input" queries: facts supplied by the client of the analyzer.
+Reading the docs of the `base_db::input` module should be useful: everything else is strictly derived from those inputs.
+
+**Architecture Invariant:** particularities of the build system are *not* the part of the ground state.
+In particular, `base_db` knows nothing about cargo.
+For example, `cfg` flags are a part of `base_db`, but `feature`s are not.
+A `foo` feature is a Cargo-level concept, which is lowered by Cargo to `--cfg feature=foo` argument on the command line.
+The `CrateGraph` structure is used to represent the dependencies between the crates abstractly.
+
+**Architecture Invariant:** `base_db` doesn't know about file system and file paths.
+Files are represented with opaque `FileId`, there's no operation to get an `std::path::Path` out of the `FileId`.
+
+### `crates/hir_expand`, `crates/hir_def`, `crates/hir_ty`
+
+These crates are the *brain* of rust-analyzer.
+This is the compiler part of the IDE.
+
+`hir_xxx` crates have a strong [ECS](https://en.wikipedia.org/wiki/Entity_component_system) flavor, in that they work with raw ids and directly query the database.
+There's little abstraction here.
+These crates integrate deeply with salsa and chalk.
+
+Name resolution, macro expansion and type inference all happen here.
+These crates also define various intermediate representations of the core.
+
+`ItemTree` condenses a single `SyntaxTree` into a "summary" data structure, which is stable over modifications to function bodies.
+
+`DefMap` contains the module tree of a crate and stores module scopes.
+
+`Body` stores information about expressions.
+
+**Architecture Invariant:** these crates are not, and will never be, an api boundary.
+
+**Architecture Invariant:** these crates explicitly care about being incremental.
+The core invariant we maintain is "typing inside a function's body never invalidates global derived data".
+i.e., if you change the body of `foo`, all facts about `bar` should remain intact.
+
+**Architecture Invariant:** hir exists only in context of particular crate instance with specific CFG flags.
+The same syntax may produce several instances of HIR if the crate participates in the crate graph more than once.
+
+### `crates/hir`
+
+The top-level `hir` crate is an **API Boundary**.
+If you think about "using rust-analyzer as a library", `hir` crate is most likely the façade you'll be talking to.
+
+It wraps ECS-style internal API into a more OO-flavored API (with an extra `db` argument for each call).
+
+**Architecture Invariant:** `hir` provides a static, fully resolved view of the code.
+While internal `hir_*` crates _compute_ things, `hir`, from the outside, looks like an inert data structure.
+
+`hir` also handles the delicate task of going from syntax to the corresponding `hir`.
+Remember that the mapping here is one-to-many.
+See `Semantics` type and `source_to_def` module.
+
+Note in particular a curious recursive structure in `source_to_def`.
+We first resolve the parent _syntax_ node to the parent _hir_ element.
+Then we ask the _hir_ parent what _syntax_ children does it have.
+Then we look for our node in the set of children.
+
+This is the heart of many IDE features, like goto definition, which start with figuring out the hir node at the cursor.
+This is some kind of (yet unnamed) uber-IDE pattern, as it is present in Roslyn and Kotlin as well.
+
+### `crates/ide`
+
+The `ide` crate builds on top of `hir` semantic model to provide high-level IDE features like completion or goto definition.
+It is an **API Boundary**.
+If you want to use IDE parts of rust-analyzer via LSP, custom flatbuffers-based protocol or just as a library in your text editor, this is the right API.
+
+**Architecture Invariant:** `ide` crate's API is build out of POD types with public fields.
+The API uses editor's terminology, it talks about offsets and string labels rather than in terms of definitions or types.
+It is effectively the view in MVC and viewmodel in [MVVM](https://en.wikipedia.org/wiki/Model%E2%80%93view%E2%80%93viewmodel).
+All arguments and return types are conceptually serializable.
+In particular, syntax trees and hir types are generally absent from the API (but are used heavily in the implementation).
+Shout outs to LSP developers for popularizing the idea that "UI" is a good place to draw a boundary at.
+
+`ide` is also the first crate which has the notion of change over time.
+`AnalysisHost` is a state to which you can transactionally `apply_change`.
+`Analysis` is an immutable snapshot of the state.
+
+Internally, `ide` is split across several crates. `ide_assists`, `ide_completion` and `ide_ssr` implement large isolated features.
+`ide_db` implements common IDE functionality (notably, reference search is implemented here).
+The `ide` contains a public API/façade, as well as implementation for a plethora of smaller features.
+
+**Architecture Invariant:** `ide` crate strives to provide a _perfect_ API.
+Although at the moment it has only one consumer, the LSP server, LSP *does not* influence its API design.
+Instead, we keep in mind a hypothetical _ideal_ client -- an IDE tailored specifically for rust, every nook and cranny of which is packed with Rust-specific goodies.
+
+### `crates/rust-analyzer`
+
+This crate defines the `rust-analyzer` binary, so it is the **entry point**.
+It implements the language server.
+
+**Architecture Invariant:** `rust-analyzer` is the only crate that knows about LSP and JSON serialization.
+If you want to expose a data structure `X` from ide to LSP, don't make it serializable.
+Instead, create a serializable counterpart in `rust-analyzer` crate and manually convert between the two.
+
+`GlobalState` is the state of the server.
+The `main_loop` defines the server event loop which accepts requests and sends responses.
+Requests that modify the state or might block user's typing are handled on the main thread.
+All other requests are processed in background.
+
+**Architecture Invariant:** the server is stateless, a-la HTTP.
+Sometimes state needs to be preserved between requests.
+For example, "what is the `edit` for the fifth completion item of the last completion edit?".
+For this, the second request should include enough info to re-create the context from scratch.
+This generally means including all the parameters of the original request.
+
+`reload` module contains the code that handles configuration and Cargo.toml changes.
+This is a tricky business.
+
+**Architecture Invariant:** `rust-analyzer` should be partially available even when the build is broken.
+Reloading process should not prevent IDE features from working.
+
+### `crates/toolchain`, `crates/project_model`, `crates/flycheck`
+
+These crates deal with invoking `cargo` to learn about project structure and get compiler errors for the "check on save" feature.
+
+They use `crates/path` heavily instead of `std::path`.
+A single `rust-analyzer` process can serve many projects, so it is important that server's current directory does not leak.
+
+### `crates/mbe`, `crates/tt`, `crates/proc_macro_api`, `crates/proc_macro_srv`
+
+These crates implement macros as token tree -> token tree transforms.
+They are independent from the rest of the code.
+
+`tt` crate defined `TokenTree`, a single token or a delimited sequence of token trees.
+`mbe` crate contains tools for transforming between syntax trees and token tree.
+And it also handles the actual parsing and expansion of declarative macro (a-la "Macros By Example" or mbe).
+
+For proc macros, the client-server model are used.
+We pass an argument `--proc-macro` to `rust-analyzer` binary to start a separate process (`proc_macro_srv`).
+And the client (`proc_macro_api`) provides an interface to talk to that server separately.
+
+And then token trees are passed from client, and the server will load the corresponding dynamic library (which built by `cargo`).
+And due to the fact the api for getting result from proc macro are always unstable in `rustc`,
+we maintain our own copy (and paste) of that part of code to allow us to build the whole thing in stable rust.
+
+ **Architecture Invariant:**
+Bad proc macros may panic or segfault accidentally. So we run it in another process and recover it from fatal error.
+And they may be non-deterministic which conflict how `salsa` works, so special attention is required.
+
+### `crates/cfg`
+
+This crate is responsible for parsing, evaluation and general definition of `cfg` attributes.
+
+### `crates/vfs`, `crates/vfs-notify`
+
+These crates implement a virtual file system.
+They provide consistent snapshots of the underlying file system and insulate messy OS paths.
+
+**Architecture Invariant:** vfs doesn't assume a single unified file system.
+i.e., a single rust-analyzer process can act as a remote server for two different machines, where the same `/tmp/foo.rs` path points to different files.
+For this reason, all path APIs generally take some existing path as a "file system witness".
+
+### `crates/stdx`
+
+This crate contains various non-rust-analyzer specific utils, which could have been in std, as well
+as copies of unstable std items we would like to make use of already, like `std::str::split_once`.
+
+### `crates/profile`
+
+This crate contains utilities for CPU and memory profiling.
+
+
+## Cross-Cutting Concerns
+
+This sections talks about the things which are everywhere and nowhere in particular.
+
+### Stability Guarantees
+
+One of the reasons rust-analyzer moves relatively fast is that we don't introduce new stability guarantees.
+Instead, as much as possible we leverage existing ones.
+
+Examples:
+
+* The `ide` API of rust-analyzer are explicitly unstable, but the LSP interface is stable, and here we just implement a stable API managed by someone else.
+* Rust language and Cargo are stable, and they are the primary inputs to rust-analyzer.
+* The `rowan` library is published to crates.io, but it is deliberately kept under `1.0` and always makes semver-incompatible upgrades
+
+Another important example is that rust-analyzer isn't run on CI, so, unlike `rustc` and `clippy`, it is actually ok for us to change runtime behavior.
+
+At some point we might consider opening up APIs or allowing crates.io libraries to include rust-analyzer specific annotations, but that's going to be a big commitment on our side.
+
+Exceptions:
+
+* `rust-project.json` is a de-facto stable format for non-cargo build systems.
+ It is probably ok enough, but was definitely stabilized implicitly.
+ Lesson for the future: when designing API which could become a stability boundary, don't wait for the first users until you stabilize it.
+ By the time you have first users, it is already de-facto stable.
+ And the users will first use the thing, and *then* inform you that now you have users.
+ The sad thing is that stuff should be stable before someone uses it for the first time, or it should contain explicit opt-in.
+* We ship some LSP extensions, and we try to keep those somewhat stable.
+ Here, we need to work with a finite set of editor maintainers, so not providing rock-solid guarantees works.
+
+### Code generation
+
+Some components in this repository are generated through automatic processes.
+Generated code is updated automatically on `cargo test`.
+Generated code is generally committed to the git repository.
+
+In particular, we generate:
+
+* API for working with syntax trees (`syntax::ast`, the [`ungrammar`](https://github.com/rust-analyzer/ungrammar) crate).
+* Various sections of the manual:
+
+ * features
+ * assists
+ * config
+
+* Documentation tests for assists
+
+See the `sourcegen` crate for details.
+
+**Architecture Invariant:** we avoid bootstrapping.
+For codegen we need to parse Rust code.
+Using rust-analyzer for that would work and would be fun, but it would also complicate the build process a lot.
+For that reason, we use syn and manual string parsing.
+
+### Cancellation
+
+Let's say that the IDE is in the process of computing syntax highlighting, when the user types `foo`.
+What should happen?
+`rust-analyzer`s answer is that the highlighting process should be cancelled -- its results are now stale, and it also blocks modification of the inputs.
+
+The salsa database maintains a global revision counter.
+When applying a change, salsa bumps this counter and waits until all other threads using salsa finish.
+If a thread does salsa-based computation and notices that the counter is incremented, it panics with a special value (see `Canceled::throw`).
+That is, rust-analyzer requires unwinding.
+
+`ide` is the boundary where the panic is caught and transformed into a `Result<T, Cancelled>`.
+
+### Testing
+
+Rust Analyzer has three interesting [system boundaries](https://www.tedinski.com/2018/04/10/making-tests-a-positive-influence-on-design.html) to concentrate tests on.
+
+The outermost boundary is the `rust-analyzer` crate, which defines an LSP interface in terms of stdio.
+We do integration testing of this component, by feeding it with a stream of LSP requests and checking responses.
+These tests are known as "heavy", because they interact with Cargo and read real files from disk.
+For this reason, we try to avoid writing too many tests on this boundary: in a statically typed language, it's hard to make an error in the protocol itself if messages are themselves typed.
+Heavy tests are only run when `RUN_SLOW_TESTS` env var is set.
+
+The middle, and most important, boundary is `ide`.
+Unlike `rust-analyzer`, which exposes API, `ide` uses Rust API and is intended for use by various tools.
+A typical test creates an `AnalysisHost`, calls some `Analysis` functions and compares the results against expectation.
+
+The innermost and most elaborate boundary is `hir`.
+It has a much richer vocabulary of types than `ide`, but the basic testing setup is the same: we create a database, run some queries, assert result.
+
+For comparisons, we use the `expect` crate for snapshot testing.
+
+To test various analysis corner cases and avoid forgetting about old tests, we use so-called marks.
+See the `marks` module in the `test_utils` crate for more.
+
+**Architecture Invariant:** rust-analyzer tests do not use libcore or libstd.
+All required library code must be a part of the tests.
+This ensures fast test execution.
+
+**Architecture Invariant:** tests are data driven and do not test the API.
+Tests which directly call various API functions are a liability, because they make refactoring the API significantly more complicated.
+So most of the tests look like this:
+
+```rust
+#[track_caller]
+fn check(input: &str, expect: expect_test::Expect) {
+ // The single place that actually exercises a particular API
+}
+
+#[test]
+fn foo() {
+ check("foo", expect![["bar"]]);
+}
+
+#[test]
+fn spam() {
+ check("spam", expect![["eggs"]]);
+}
+// ...and a hundred more tests that don't care about the specific API at all.
+```
+
+To specify input data, we use a single string literal in a special format, which can describe a set of rust files.
+See the `Fixture` its module for fixture examples and documentation.
+
+**Architecture Invariant:** all code invariants are tested by `#[test]` tests.
+There's no additional checks in CI, formatting and tidy tests are run with `cargo test`.
+
+**Architecture Invariant:** tests do not depend on any kind of external resources, they are perfectly reproducible.
+
+
+### Performance Testing
+
+TBA, take a look at the `metrics` xtask and `#[test] fn benchmark_xxx()` functions.
+
+### Error Handling
+
+**Architecture Invariant:** core parts of rust-analyzer (`ide`/`hir`) don't interact with the outside world and thus can't fail.
+Only parts touching LSP are allowed to do IO.
+
+Internals of rust-analyzer need to deal with broken code, but this is not an error condition.
+rust-analyzer is robust: various analysis compute `(T, Vec<Error>)` rather than `Result<T, Error>`.
+
+rust-analyzer is a complex long-running process.
+It will always have bugs and panics.
+But a panic in an isolated feature should not bring down the whole process.
+Each LSP-request is protected by a `catch_unwind`.
+We use `always` and `never` macros instead of `assert` to gracefully recover from impossible conditions.
+
+### Observability
+
+rust-analyzer is a long-running process, so it is important to understand what's going on inside.
+We have several instruments for that.
+
+The event loop that runs rust-analyzer is very explicit.
+Rather than spawning futures or scheduling callbacks (open), the event loop accepts an `enum` of possible events (closed).
+It's easy to see all the things that trigger rust-analyzer processing, together with their performance
+
+rust-analyzer includes a simple hierarchical profiler (`hprof`).
+It is enabled with `RA_PROFILE='*>50'` env var (log all (`*`) actions which take more than `50` ms) and produces output like:
+
+```
+85ms - handle_completion
+ 68ms - import_on_the_fly
+ 67ms - import_assets::search_for_relative_paths
+ 0ms - crate_def_map:wait (804 calls)
+ 0ms - find_path (16 calls)
+ 2ms - find_similar_imports (1 calls)
+ 0ms - generic_params_query (334 calls)
+ 59ms - trait_solve_query (186 calls)
+ 0ms - Semantics::analyze_impl (1 calls)
+ 1ms - render_resolution (8 calls)
+ 0ms - Semantics::analyze_impl (5 calls)
+```
+
+This is cheap enough to enable in production.
+
+
+Similarly, we save live object counting (`RA_COUNT=1`).
+It is not cheap enough to enable in prod, and this is a bug which should be fixed.
+
+### Configurability
+
+rust-analyzer strives to be as configurable as possible while offering reasonable defaults where no configuration exists yet.
+There will always be features that some people find more annoying than helpful, so giving the users the ability to tweak or disable these is a big part of offering a good user experience.
+Mind the code--architecture gap: at the moment, we are using fewer feature flags than we really should.
+
+### Serialization
+
+In Rust, it is easy (often too easy) to add serialization to any type by adding `#[derive(Serialize)]`.
+This easiness is misleading -- serializable types impose significant backwards compatability constraints.
+If a type is serializable, then it is a part of some IPC boundary.
+You often don't control the other side of this boundary, so changing serializable types is hard.
+
+For this reason, the types in `ide`, `base_db` and below are not serializable by design.
+If such types need to cross an IPC boundary, then the client of rust-analyzer needs to provide custom, client-specific serialization format.
+This isolates backwards compatibility and migration concerns to a specific client.
+
+For example, `rust-project.json` is it's own format -- it doesn't include `CrateGraph` as is.
+Instead, it creates a `CrateGraph` by calling appropriate constructing functions.
diff --git a/src/tools/rust-analyzer/docs/dev/debugging.md b/src/tools/rust-analyzer/docs/dev/debugging.md
new file mode 100644
index 000000000..48caec1d8
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/debugging.md
@@ -0,0 +1,99 @@
+# Debugging VSCode plugin and the language server
+
+## Prerequisites
+
+- Install [LLDB](https://lldb.llvm.org/) and the [LLDB Extension](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb).
+- Open the root folder in VSCode. Here you can access the preconfigured debug setups.
+
+ <img height=150px src="https://user-images.githubusercontent.com/36276403/74611090-92ec5380-5101-11ea-8a41-598f51f3f3e3.png" alt="Debug options view">
+
+- Install all TypeScript dependencies
+ ```bash
+ cd editors/code
+ npm ci
+ ```
+
+## Common knowledge
+
+* All debug configurations open a new `[Extension Development Host]` VSCode instance
+where **only** the `rust-analyzer` extension being debugged is enabled.
+* To activate the extension you need to open any Rust project folder in `[Extension Development Host]`.
+
+
+## Debug TypeScript VSCode extension
+
+- `Run Installed Extension` - runs the extension with the globally installed `rust-analyzer` binary.
+- `Run Extension (Debug Build)` - runs extension with the locally built LSP server (`target/debug/rust-analyzer`).
+
+TypeScript debugging is configured to watch your source edits and recompile.
+To apply changes to an already running debug process, press <kbd>Ctrl+Shift+P</kbd> and run the following command in your `[Extension Development Host]`
+
+```
+> Developer: Reload Window
+```
+
+## Debug Rust LSP server
+
+- When attaching a debugger to an already running `rust-analyzer` server on Linux you might need to enable `ptrace` for unrelated processes by running:
+
+ ```
+ echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope
+ ```
+
+
+- By default, the LSP server is built without debug information. To enable it, you'll need to change `Cargo.toml`:
+ ```toml
+ [profile.dev]
+ debug = 2
+ ```
+
+- Select `Run Extension (Debug Build)` to run your locally built `target/debug/rust-analyzer`.
+
+- In the original VSCode window once again select the `Attach To Server` debug configuration.
+
+- A list of running processes should appear. Select the `rust-analyzer` from this repo.
+
+- Navigate to `crates/rust-analyzer/src/main_loop.rs` and add a breakpoint to the `on_request` function.
+
+- Go back to the `[Extension Development Host]` instance and hover over a Rust variable and your breakpoint should hit.
+
+If you need to debug the server from the very beginning, including its initialization code, you can use the `--wait-dbg` command line argument or `RA_WAIT_DBG` environment variable. The server will spin at the beginning of the `try_main` function (see `crates\rust-analyzer\src\bin\main.rs`)
+```rust
+ let mut d = 4;
+ while d == 4 { // set a breakpoint here and change the value
+ d = 4;
+ }
+```
+
+However for this to work, you will need to enable debug_assertions in your build
+```rust
+RUSTFLAGS='--cfg debug_assertions' cargo build --release
+```
+
+## Demo
+
+- [Debugging TypeScript VScode extension](https://www.youtube.com/watch?v=T-hvpK6s4wM).
+- [Debugging Rust LSP server](https://www.youtube.com/watch?v=EaNb5rg4E0M).
+
+## Troubleshooting
+
+### Can't find the `rust-analyzer` process
+
+It could be a case of just jumping the gun.
+
+The `rust-analyzer` is only started once the `onLanguage:rust` activation.
+
+Make sure you open a rust file in the `[Extension Development Host]` and try again.
+
+### Can't connect to `rust-analyzer`
+
+Make sure you have run `echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope`.
+
+By default this should reset back to 1 every time you log in.
+
+### Breakpoints are never being hit
+
+Check your version of `lldb`. If it's version 6 and lower, use the `classic` adapter type.
+It's `lldb.adapterType` in settings file.
+
+If you're running `lldb` version 7, change the lldb adapter type to `bundled` or `native`.
diff --git a/src/tools/rust-analyzer/docs/dev/guide.md b/src/tools/rust-analyzer/docs/dev/guide.md
new file mode 100644
index 000000000..47ae3f3e6
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/guide.md
@@ -0,0 +1,573 @@
+# Guide to rust-analyzer
+
+## About the guide
+
+This guide describes the current state of rust-analyzer as of 2019-01-20 (git
+tag [guide-2019-01]). Its purpose is to document various problems and
+architectural solutions related to the problem of building IDE-first compiler
+for Rust. There is a video version of this guide as well:
+https://youtu.be/ANKBNiSWyfc.
+
+[guide-2019-01]: https://github.com/rust-lang/rust-analyzer/tree/guide-2019-01
+
+## The big picture
+
+On the highest possible level, rust-analyzer is a stateful component. A client may
+apply changes to the analyzer (new contents of `foo.rs` file is "fn main() {}")
+and it may ask semantic questions about the current state (what is the
+definition of the identifier with offset 92 in file `bar.rs`?). Two important
+properties hold:
+
+* Analyzer does not do any I/O. It starts in an empty state and all input data is
+ provided via `apply_change` API.
+
+* Only queries about the current state are supported. One can, of course,
+ simulate undo and redo by keeping a log of changes and inverse changes respectively.
+
+## IDE API
+
+To see the bigger picture of how the IDE features work, let's take a look at the [`AnalysisHost`] and
+[`Analysis`] pair of types. `AnalysisHost` has three methods:
+
+* `default()` for creating an empty analysis instance
+* `apply_change(&mut self)` to make changes (this is how you get from an empty
+ state to something interesting)
+* `analysis(&self)` to get an instance of `Analysis`
+
+`Analysis` has a ton of methods for IDEs, like `goto_definition`, or
+`completions`. Both inputs and outputs of `Analysis`' methods are formulated in
+terms of files and offsets, and **not** in terms of Rust concepts like structs,
+traits, etc. The "typed" API with Rust specific types is slightly lower in the
+stack, we'll talk about it later.
+
+[`AnalysisHost`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L265-L284
+[`Analysis`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L291-L478
+
+The reason for this separation of `Analysis` and `AnalysisHost` is that we want to apply
+changes "uniquely", but we might also want to fork an `Analysis` and send it to
+another thread for background processing. That is, there is only a single
+`AnalysisHost`, but there may be several (equivalent) `Analysis`.
+
+Note that all of the `Analysis` API return `Cancellable<T>`. This is required to
+be responsive in an IDE setting. Sometimes a long-running query is being computed
+and the user types something in the editor and asks for completion. In this
+case, we cancel the long-running computation (so it returns `Err(Cancelled)`),
+apply the change and execute request for completion. We never use stale data to
+answer requests. Under the cover, `AnalysisHost` "remembers" all outstanding
+`Analysis` instances. The `AnalysisHost::apply_change` method cancels all
+`Analysis`es, blocks until all of them are `Dropped` and then applies changes
+in-place. This may be familiar to Rustaceans who use read-write locks for interior
+mutability.
+
+Next, let's talk about what the inputs to the `Analysis` are, precisely.
+
+## Inputs
+
+Rust Analyzer never does any I/O itself, all inputs get passed explicitly via
+the `AnalysisHost::apply_change` method, which accepts a single argument, a
+`Change`. [`Change`] is a builder for a single change
+"transaction", so it suffices to study its methods to understand all of the
+input data.
+
+[`Change`]: https://github.com/rust-lang/rust-analyzer/blob/master/crates/base_db/src/change.rs#L14-L89
+
+The `(add|change|remove)_file` methods control the set of the input files, where
+each file has an integer id (`FileId`, picked by the client), text (`String`)
+and a filesystem path. Paths are tricky; they'll be explained below, in source roots
+section, together with the `add_root` method. The `add_library` method allows us to add a
+group of files which are assumed to rarely change. It's mostly an optimization
+and does not change the fundamental picture.
+
+The `set_crate_graph` method allows us to control how the input files are partitioned
+into compilation units -- crates. It also controls (in theory, not implemented
+yet) `cfg` flags. `CrateGraph` is a directed acyclic graph of crates. Each crate
+has a root `FileId`, a set of active `cfg` flags and a set of dependencies. Each
+dependency is a pair of a crate and a name. It is possible to have two crates
+with the same root `FileId` but different `cfg`-flags/dependencies. This model
+is lower than Cargo's model of packages: each Cargo package consists of several
+targets, each of which is a separate crate (or several crates, if you try
+different feature combinations).
+
+Procedural macros should become inputs as well, but currently they are not
+supported. Procedural macro will be a black box `Box<dyn Fn(TokenStream) -> TokenStream>`
+function, and will be inserted into the crate graph just like dependencies.
+
+Soon we'll talk how we build an LSP server on top of `Analysis`, but first,
+let's deal with that paths issue.
+
+## Source roots (a.k.a. "Filesystems are horrible")
+
+This is a non-essential section, feel free to skip.
+
+The previous section said that the filesystem path is an attribute of a file,
+but this is not the whole truth. Making it an absolute `PathBuf` will be bad for
+several reasons. First, filesystems are full of (platform-dependent) edge cases:
+
+* It's hard (requires a syscall) to decide if two paths are equivalent.
+* Some filesystems are case-sensitive (e.g. macOS).
+* Paths are not necessarily UTF-8.
+* Symlinks can form cycles.
+
+Second, this might hurt the reproducibility and hermeticity of builds. In theory,
+moving a project from `/foo/bar/my-project` to `/spam/eggs/my-project` should
+not change a bit in the output. However, if the absolute path is a part of the
+input, it is at least in theory observable, and *could* affect the output.
+
+Yet another problem is that we really *really* want to avoid doing I/O, but with
+Rust the set of "input" files is not necessarily known up-front. In theory, you
+can have `#[path="/dev/random"] mod foo;`.
+
+To solve (or explicitly refuse to solve) these problems rust-analyzer uses the
+concept of a "source root". Roughly speaking, source roots are the contents of a
+directory on a file systems, like `/home/matklad/projects/rustraytracer/**.rs`.
+
+More precisely, all files (`FileId`s) are partitioned into disjoint
+`SourceRoot`s. Each file has a relative UTF-8 path within the `SourceRoot`.
+`SourceRoot` has an identity (integer ID). Crucially, the root path of the
+source root itself is unknown to the analyzer: A client is supposed to maintain a
+mapping between `SourceRoot` IDs (which are assigned by the client) and actual
+`PathBuf`s. `SourceRoot`s give a sane tree model of the file system to the
+analyzer.
+
+Note that `mod`, `#[path]` and `include!()` can only reference files from the
+same source root. It is of course possible to explicitly add extra files to
+the source root, even `/dev/random`.
+
+## Language Server Protocol
+
+Now let's see how the `Analysis` API is exposed via the JSON RPC based language server protocol. The
+hard part here is managing changes (which can come either from the file system
+or from the editor) and concurrency (we want to spawn background jobs for things
+like syntax highlighting). We use the event loop pattern to manage the zoo, and
+the loop is the [`main_loop_inner`] function. The [`main_loop`] does a one-time
+initialization and tearing down of the resources.
+
+[`main_loop`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L51-L110
+[`main_loop_inner`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L156-L258
+
+
+Let's walk through a typical analyzer session!
+
+First, we need to figure out what to analyze. To do this, we run `cargo
+metadata` to learn about Cargo packages for current workspace and dependencies,
+and we run `rustc --print sysroot` and scan the "sysroot" (the directory containing the current Rust toolchain's files) to learn about crates like
+`std`. Currently we load this configuration once at the start of the server, but
+it should be possible to dynamically reconfigure it later without restart.
+
+[main_loop.rs#L62-L70](https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L62-L70)
+
+The [`ProjectModel`] we get after this step is very Cargo and sysroot specific,
+it needs to be lowered to get the input in the form of `Change`. This
+happens in [`ServerWorldState::new`] method. Specifically
+
+* Create a `SourceRoot` for each Cargo package and sysroot.
+* Schedule a filesystem scan of the roots.
+* Create an analyzer's `Crate` for each Cargo **target** and sysroot crate.
+* Setup dependencies between the crates.
+
+[`ProjectModel`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/project_model.rs#L16-L20
+[`ServerWorldState::new`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/server_world.rs#L38-L160
+
+The results of the scan (which may take a while) will be processed in the body
+of the main loop, just like any other change. Here's where we handle:
+
+* [File system changes](https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L194)
+* [Changes from the editor](https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L377)
+
+After a single loop's turn, we group the changes into one `Change` and
+[apply] it. This always happens on the main thread and blocks the loop.
+
+[apply]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/server_world.rs#L216
+
+To handle requests, like ["goto definition"], we create an instance of the
+`Analysis` and [`schedule`] the task (which consumes `Analysis`) on the
+threadpool. [The task] calls the corresponding `Analysis` method, while
+massaging the types into the LSP representation. Keep in mind that if we are
+executing "goto definition" on the threadpool and a new change comes in, the
+task will be canceled as soon as the main loop calls `apply_change` on the
+`AnalysisHost`.
+
+["goto definition"]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/server_world.rs#L216
+[`schedule`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L426-L455
+[The task]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop/handlers.rs#L205-L223
+
+This concludes the overview of the analyzer's programing *interface*. Next, let's
+dig into the implementation!
+
+## Salsa
+
+The most straightforward way to implement an "apply change, get analysis, repeat"
+API would be to maintain the input state and to compute all possible analysis
+information from scratch after every change. This works, but scales poorly with
+the size of the project. To make this fast, we need to take advantage of the
+fact that most of the changes are small, and that analysis results are unlikely
+to change significantly between invocations.
+
+To do this we use [salsa]: a framework for incremental on-demand computation.
+You can skip the rest of the section if you are familiar with `rustc`'s red-green
+algorithm (which is used for incremental compilation).
+
+[salsa]: https://github.com/salsa-rs/salsa
+
+It's better to refer to salsa's docs to learn about it. Here's a small excerpt:
+
+The key idea of salsa is that you define your program as a set of queries. Every
+query is used like a function `K -> V` that maps from some key of type `K` to a value
+of type `V`. Queries come in two basic varieties:
+
+* **Inputs**: the base inputs to your system. You can change these whenever you
+ like.
+
+* **Functions**: pure functions (no side effects) that transform your inputs
+ into other values. The results of queries are memoized to avoid recomputing
+ them a lot. When you make changes to the inputs, we'll figure out (fairly
+ intelligently) when we can re-use these memoized values and when we have to
+ recompute them.
+
+For further discussion, its important to understand one bit of "fairly
+intelligently". Suppose we have two functions, `f1` and `f2`, and one input,
+`z`. We call `f1(X)` which in turn calls `f2(Y)` which inspects `i(Z)`. `i(Z)`
+returns some value `V1`, `f2` uses that and returns `R1`, `f1` uses that and
+returns `O`. Now, let's change `i` at `Z` to `V2` from `V1` and try to compute
+`f1(X)` again. Because `f1(X)` (transitively) depends on `i(Z)`, we can't just
+reuse its value as is. However, if `f2(Y)` is *still* equal to `R1` (despite
+`i`'s change), we, in fact, *can* reuse `O` as result of `f1(X)`. And that's how
+salsa works: it recomputes results in *reverse* order, starting from inputs and
+progressing towards outputs, stopping as soon as it sees an intermediate value
+that hasn't changed. If this sounds confusing to you, don't worry: it is
+confusing. This illustration by @killercup might help:
+
+<img alt="step 1" src="https://user-images.githubusercontent.com/1711539/51460907-c5484780-1d6d-11e9-9cd2-d6f62bd746e0.png" width="50%">
+
+<img alt="step 2" src="https://user-images.githubusercontent.com/1711539/51460915-c9746500-1d6d-11e9-9a77-27d33a0c51b5.png" width="50%">
+
+<img alt="step 3" src="https://user-images.githubusercontent.com/1711539/51460920-cda08280-1d6d-11e9-8d96-a782aa57a4d4.png" width="50%">
+
+<img alt="step 4" src="https://user-images.githubusercontent.com/1711539/51460927-d1340980-1d6d-11e9-851e-13c149d5c406.png" width="50%">
+
+## Salsa Input Queries
+
+All analyzer information is stored in a salsa database. `Analysis` and
+`AnalysisHost` types are newtype wrappers for [`RootDatabase`] -- a salsa
+database.
+
+[`RootDatabase`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/db.rs#L88-L134
+
+Salsa input queries are defined in [`FilesDatabase`] (which is a part of
+`RootDatabase`). They closely mirror the familiar `Change` structure:
+indeed, what `apply_change` does is it sets the values of input queries.
+
+[`FilesDatabase`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/base_db/src/input.rs#L150-L174
+
+## From text to semantic model
+
+The bulk of the rust-analyzer is transforming input text into a semantic model of
+Rust code: a web of entities like modules, structs, functions and traits.
+
+An important fact to realize is that (unlike most other languages like C# or
+Java) there is not a one-to-one mapping between the source code and the semantic model. A
+single function definition in the source code might result in several semantic
+functions: for example, the same source file might get included as a module in
+several crates or a single crate might be present in the compilation DAG
+several times, with different sets of `cfg`s enabled. The IDE-specific task of
+mapping source code into a semantic model is inherently imprecise for
+this reason and gets handled by the [`source_binder`].
+
+[`source_binder`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/source_binder.rs
+
+The semantic interface is declared in the [`code_model_api`] module. Each entity is
+identified by an integer ID and has a bunch of methods which take a salsa database
+as an argument and returns other entities (which are also IDs). Internally, these
+methods invoke various queries on the database to build the model on demand.
+Here's [the list of queries].
+
+[`code_model_api`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/code_model_api.rs
+[the list of queries]: https://github.com/rust-lang/rust-analyzer/blob/7e84440e25e19529e4ff8a66e521d1b06349c6ec/crates/hir/src/db.rs#L20-L106
+
+The first step of building the model is parsing the source code.
+
+## Syntax trees
+
+An important property of the Rust language is that each file can be parsed in
+isolation. Unlike, say, `C++`, an `include` can't change the meaning of the
+syntax. For this reason, rust-analyzer can build a syntax tree for each "source
+file", which could then be reused by several semantic models if this file
+happens to be a part of several crates.
+
+The representation of syntax trees that rust-analyzer uses is similar to that of `Roslyn`
+and Swift's new [libsyntax]. Swift's docs give an excellent overview of the
+approach, so I skip this part here and instead outline the main characteristics
+of the syntax trees:
+
+* Syntax trees are fully lossless. Converting **any** text to a syntax tree and
+ back is a total identity function. All whitespace and comments are explicitly
+ represented in the tree.
+
+* Syntax nodes have generic `(next|previous)_sibling`, `parent`,
+ `(first|last)_child` functions. You can get from any one node to any other
+ node in the file using only these functions.
+
+* Syntax nodes know their range (start offset and length) in the file.
+
+* Syntax nodes share the ownership of their syntax tree: if you keep a reference
+ to a single function, the whole enclosing file is alive.
+
+* Syntax trees are immutable and the cost of replacing the subtree is
+ proportional to the depth of the subtree. Read Swift's docs to learn how
+ immutable + parent pointers + cheap modification is possible.
+
+* Syntax trees are build on best-effort basis. All accessor methods return
+ `Option`s. The tree for `fn foo` will contain a function declaration with
+ `None` for parameter list and body.
+
+* Syntax trees do not know the file they are built from, they only know about
+ the text.
+
+The implementation is based on the generic [rowan] crate on top of which a
+[rust-specific] AST is generated.
+
+[libsyntax]: https://github.com/apple/swift/tree/5e2c815edfd758f9b1309ce07bfc01c4bc20ec23/lib/Syntax
+[rowan]: https://github.com/rust-analyzer/rowan/tree/100a36dc820eb393b74abe0d20ddf99077b61f88
+[rust-specific]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_syntax/src/ast/generated.rs
+
+The next step in constructing the semantic model is ...
+
+## Building a Module Tree
+
+The algorithm for building a tree of modules is to start with a crate root
+(remember, each `Crate` from a `CrateGraph` has a `FileId`), collect all `mod`
+declarations and recursively process child modules. This is handled by the
+[`module_tree_query`], with two slight variations.
+
+[`module_tree_query`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/module_tree.rs#L116-L123
+
+First, rust-analyzer builds a module tree for all crates in a source root
+simultaneously. The main reason for this is historical (`module_tree` predates
+`CrateGraph`), but this approach also enables accounting for files which are not
+part of any crate. That is, if you create a file but do not include it as a
+submodule anywhere, you still get semantic completion, and you get a warning
+about a free-floating module (the actual warning is not implemented yet).
+
+The second difference is that `module_tree_query` does not *directly* depend on
+the "parse" query (which is confusingly called `source_file`). Why would calling
+the parse directly be bad? Suppose the user changes the file slightly, by adding
+an insignificant whitespace. Adding whitespace changes the parse tree (because
+it includes whitespace), and that means recomputing the whole module tree.
+
+We deal with this problem by introducing an intermediate [`submodules_query`].
+This query processes the syntax tree and extracts a set of declared submodule
+names. Now, changing the whitespace results in `submodules_query` being
+re-executed for a *single* module, but because the result of this query stays
+the same, we don't have to re-execute [`module_tree_query`]. In fact, we only
+need to re-execute it when we add/remove new files or when we change mod
+declarations.
+
+[`submodules_query`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/module_tree.rs#L41
+
+We store the resulting modules in a `Vec`-based indexed arena. The indices in
+the arena becomes module IDs. And this brings us to the next topic:
+assigning IDs in the general case.
+
+## Location Interner pattern
+
+One way to assign IDs is how we've dealt with modules: Collect all items into a
+single array in some specific order and use the index in the array as an ID. The
+main drawback of this approach is that these IDs are not stable: Adding a new item can
+shift the IDs of all other items. This works for modules, because adding a module is
+a comparatively rare operation, but would be less convenient for, for example,
+functions.
+
+Another solution here is positional IDs: We can identify a function as "the
+function with name `foo` in a ModuleId(92) module". Such locations are stable:
+adding a new function to the module (unless it is also named `foo`) does not
+change the location. However, such "ID" types ceases to be a `Copy`able integer and in
+general can become pretty large if we account for nesting (for example: "third parameter of
+the `foo` function of the `bar` `impl` in the `baz` module").
+
+[`LocationInterner`] allows us to combine the benefits of positional and numeric
+IDs. It is a bidirectional append-only map between locations and consecutive
+integers which can "intern" a location and return an integer ID back. The salsa
+database we use includes a couple of [interners]. How to "garbage collect"
+unused locations is an open question.
+
+[`LocationInterner`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/base_db/src/loc2id.rs#L65-L71
+[interners]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/db.rs#L22-L23
+
+For example, we use `LocationInterner` to assign IDs to definitions of functions,
+structs, enums, etc. The location, [`DefLoc`] contains two bits of information:
+
+* the ID of the module which contains the definition,
+* the ID of the specific item in the modules source code.
+
+We "could" use a text offset for the location of a particular item, but that would play
+badly with salsa: offsets change after edits. So, as a rule of thumb, we avoid
+using offsets, text ranges or syntax trees as keys and values for queries. What
+we do instead is we store "index" of the item among all of the items of a file
+(so, a positional based ID, but localized to a single file).
+
+[`DefLoc`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/ids.rs#L127-L139
+
+One thing we've glossed over for the time being is support for macros. We have
+only proof of concept handling of macros at the moment, but they are extremely
+interesting from an "assigning IDs" perspective.
+
+## Macros and recursive locations
+
+The tricky bit about macros is that they effectively create new source files.
+While we can use `FileId`s to refer to original files, we can't just assign them
+willy-nilly to the pseudo files of macro expansion. Instead, we use a special
+ID, [`HirFileId`] to refer to either a usual file or a macro-generated file:
+
+```rust
+enum HirFileId {
+ FileId(FileId),
+ Macro(MacroCallId),
+}
+```
+
+`MacroCallId` is an interned ID that specifies a particular macro invocation.
+Its `MacroCallLoc` contains:
+
+* `ModuleId` of the containing module
+* `HirFileId` of the containing file or pseudo file
+* an index of this particular macro invocation in this file (positional id
+ again).
+
+Note how `HirFileId` is defined in terms of `MacroCallLoc` which is defined in
+terms of `HirFileId`! This does not recur infinitely though: any chain of
+`HirFileId`s bottoms out in `HirFileId::FileId`, that is, some source file
+actually written by the user.
+
+[`HirFileId`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/ids.rs#L18-L125
+
+Now that we understand how to identify a definition, in a source or in a
+macro-generated file, we can discuss name resolution a bit.
+
+## Name resolution
+
+Name resolution faces the same problem as the module tree: if we look at the
+syntax tree directly, we'll have to recompute name resolution after every
+modification. The solution to the problem is the same: We [lower] the source code of
+each module into a position-independent representation which does not change if
+we modify bodies of the items. After that we [loop] resolving all imports until
+we've reached a fixed point.
+
+[lower]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L113-L117
+[loop]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres.rs#L186-L196
+
+And, given all our preparation with IDs and a position-independent representation,
+it is satisfying to [test] that typing inside function body does not invalidate
+name resolution results.
+
+[test]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/tests.rs#L376
+
+An interesting fact about name resolution is that it "erases" all of the
+intermediate paths from the imports: in the end, we know which items are defined
+and which items are imported in each module, but, if the import was `use
+foo::bar::baz`, we deliberately forget what modules `foo` and `bar` resolve to.
+
+To serve "goto definition" requests on intermediate segments we need this info
+in the IDE, however. Luckily, we need it only for a tiny fraction of imports, so we just ask
+the module explicitly, "What does the path `foo::bar` resolve to?". This is a
+general pattern: we try to compute the minimal possible amount of information
+during analysis while allowing IDE to ask for additional specific bits.
+
+Name resolution is also a good place to introduce another salsa pattern used
+throughout the analyzer:
+
+## Source Map pattern
+
+Due to an obscure edge case in completion, IDE needs to know the syntax node of
+a use statement which imported the given completion candidate. We can't just
+store the syntax node as a part of name resolution: this will break
+incrementality, due to the fact that syntax changes after every file
+modification.
+
+We solve this problem during the lowering step of name resolution. The lowering
+query actually produces a *pair* of outputs: `LoweredModule` and [`SourceMap`].
+The `LoweredModule` module contains [imports], but in a position-independent form.
+The `SourceMap` contains a mapping from position-independent imports to
+(position-dependent) syntax nodes.
+
+The result of this basic lowering query changes after every modification. But
+there's an intermediate [projection query] which returns only the first
+position-independent part of the lowering. The result of this query is stable.
+Naturally, name resolution [uses] this stable projection query.
+
+[imports]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L52-L59
+[`SourceMap`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L52-L59
+[projection query]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L97-L103
+[uses]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/query_definitions.rs#L49
+
+## Type inference
+
+First of all, implementation of type inference in rust-analyzer was spearheaded
+by [@flodiebold]. [#327] was an awesome Christmas present, thank you, Florian!
+
+Type inference runs on per-function granularity and uses the patterns we've
+discussed previously.
+
+First, we [lower the AST] of a function body into a position-independent
+representation. In this representation, each expression is assigned a
+[positional ID]. Alongside the lowered expression, [a source map] is produced,
+which maps between expression ids and original syntax. This lowering step also
+deals with "incomplete" source trees by replacing missing expressions by an
+explicit `Missing` expression.
+
+Given the lowered body of the function, we can now run [type inference] and
+construct a mapping from `ExprId`s to types.
+
+[@flodiebold]: https://github.com/flodiebold
+[#327]: https://github.com/rust-lang/rust-analyzer/pull/327
+[lower the AST]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs
+[positional ID]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs#L13-L15
+[a source map]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs#L41-L44
+[type inference]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/ty.rs#L1208-L1223
+
+## Tying it all together: completion
+
+To conclude the overview of the rust-analyzer, let's trace the request for
+(type-inference powered!) code completion!
+
+We start by [receiving a message] from the language client. We decode the
+message as a request for completion and [schedule it on the threadpool]. This is
+the place where we [catch] canceled errors if, immediately after completion, the
+client sends some modification.
+
+In [the handler], we deserialize LSP requests into rust-analyzer specific data
+types (by converting a file url into a numeric `FileId`), [ask analysis for
+completion] and serialize results into the LSP.
+
+The [completion implementation] is finally the place where we start doing the actual
+work. The first step is to collect the `CompletionContext` -- a struct which
+describes the cursor position in terms of Rust syntax and semantics. For
+example, `function_syntax: Option<&'a ast::FnDef>` stores a reference to
+the enclosing function *syntax*, while `function: Option<hir::Function>` is the
+`Def` for this function.
+
+To construct the context, we first do an ["IntelliJ Trick"]: we insert a dummy
+identifier at the cursor's position and parse this modified file, to get a
+reasonably looking syntax tree. Then we do a bunch of "classification" routines
+to figure out the context. For example, we [find an ancestor `fn` node] and we get a
+[semantic model] for it (using the lossy `source_binder` infrastructure).
+
+The second step is to run a [series of independent completion routines]. Let's
+take a closer look at [`complete_dot`], which completes fields and methods in
+`foo.bar|`. First we extract a semantic function and a syntactic receiver
+expression out of the `Context`. Then we run type-inference for this single
+function and map our syntactic expression to `ExprId`. Using the ID, we figure
+out the type of the receiver expression. Then we add all fields & methods from
+the type to completion.
+
+[receiving a message]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L203
+[schedule it on the threadpool]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L428
+[catch]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L436-L442
+[the handler]: https://salsa.zulipchat.com/#narrow/stream/181542-rfcs.2Fsalsa-query-group/topic/design.20next.20steps
+[ask analysis for completion]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L439-L444
+[completion implementation]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion.rs#L46-L62
+[`CompletionContext`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L14-L37
+["IntelliJ Trick"]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L72-L75
+[find an ancestor `fn` node]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L116-L120
+[semantic model]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L123
+[series of independent completion routines]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion.rs#L52-L59
+[`complete_dot`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/complete_dot.rs#L6-L22
diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
new file mode 100644
index 000000000..5040643d3
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
@@ -0,0 +1,761 @@
+<!---
+lsp_ext.rs hash: 2a188defec26cc7c
+
+If you need to change the above hash to make the test pass, please check if you
+need to adjust this doc as well and ping this issue:
+
+ https://github.com/rust-lang/rust-analyzer/issues/4604
+
+--->
+
+# LSP Extensions
+
+This document describes LSP extensions used by rust-analyzer.
+It's a best effort document, when in doubt, consult the source (and send a PR with clarification ;-) ).
+We aim to upstream all non Rust-specific extensions to the protocol, but this is not a top priority.
+All capabilities are enabled via the `experimental` field of `ClientCapabilities` or `ServerCapabilities`.
+Requests which we hope to upstream live under `experimental/` namespace.
+Requests, which are likely to always remain specific to `rust-analyzer` are under `rust-analyzer/` namespace.
+
+If you want to be notified about the changes to this document, subscribe to [#4604](https://github.com/rust-lang/rust-analyzer/issues/4604).
+
+## UTF-8 offsets
+
+rust-analyzer supports clangd's extension for opting into UTF-8 as the coordinate space for offsets (by default, LSP uses UTF-16 offsets).
+
+https://clangd.llvm.org/extensions.html#utf-8-offsets
+
+## Configuration in `initializationOptions`
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/567
+
+The `initializationOptions` field of the `InitializeParams` of the initialization request should contain the `"rust-analyzer"` section of the configuration.
+
+`rust-analyzer` normally sends a `"workspace/configuration"` request with `{ "items": ["rust-analyzer"] }` payload.
+However, the server can't do this during initialization.
+At the same time some essential configuration parameters are needed early on, before servicing requests.
+For this reason, we ask that `initializationOptions` contains the configuration, as if the server did make a `"workspace/configuration"` request.
+
+If a language client does not know about `rust-analyzer`'s configuration options it can get sensible defaults by doing any of the following:
+ * Not sending `initializationOptions`
+ * Sending `"initializationOptions": null`
+ * Sending `"initializationOptions": {}`
+
+## Snippet `TextEdit`
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/724
+
+**Experimental Client Capability:** `{ "snippetTextEdit": boolean }`
+
+If this capability is set, `WorkspaceEdit`s returned from `codeAction` requests and `TextEdit`s returned from `textDocument/onTypeFormatting` requests might contain `SnippetTextEdit`s instead of usual `TextEdit`s:
+
+```typescript
+interface SnippetTextEdit extends TextEdit {
+ insertTextFormat?: InsertTextFormat;
+ annotationId?: ChangeAnnotationIdentifier;
+}
+```
+
+```typescript
+export interface TextDocumentEdit {
+ textDocument: OptionalVersionedTextDocumentIdentifier;
+ edits: (TextEdit | SnippetTextEdit)[];
+}
+```
+
+When applying such code action or text edit, the editor should insert snippet, with tab stops and placeholder.
+At the moment, rust-analyzer guarantees that only a single edit will have `InsertTextFormat.Snippet`.
+
+### Example
+
+"Add `derive`" code action transforms `struct S;` into `#[derive($0)] struct S;`
+
+### Unresolved Questions
+
+* Where exactly are `SnippetTextEdit`s allowed (only in code actions at the moment)?
+* Can snippets span multiple files (so far, no)?
+
+## `CodeAction` Groups
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/994
+
+**Experimental Client Capability:** `{ "codeActionGroup": boolean }`
+
+If this capability is set, `CodeAction`s returned from the server contain an additional field, `group`:
+
+```typescript
+interface CodeAction {
+ title: string;
+ group?: string;
+ ...
+}
+```
+
+All code-actions with the same `group` should be grouped under single (extendable) entry in lightbulb menu.
+The set of actions `[ { title: "foo" }, { group: "frobnicate", title: "bar" }, { group: "frobnicate", title: "baz" }]` should be rendered as
+
+```
+💡
+ +-------------+
+ | foo |
+ +-------------+-----+
+ | frobnicate >| bar |
+ +-------------+-----+
+ | baz |
+ +-----+
+```
+
+Alternatively, selecting `frobnicate` could present a user with an additional menu to choose between `bar` and `baz`.
+
+### Example
+
+```rust
+fn main() {
+ let x: Entry/*cursor here*/ = todo!();
+}
+```
+
+Invoking code action at this position will yield two code actions for importing `Entry` from either `collections::HashMap` or `collection::BTreeMap`, grouped under a single "import" group.
+
+### Unresolved Questions
+
+* Is a fixed two-level structure enough?
+* Should we devise a general way to encode custom interaction protocols for GUI refactorings?
+
+## Parent Module
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/1002
+
+**Experimental Server Capability:** `{ "parentModule": boolean }`
+
+This request is sent from client to server to handle "Goto Parent Module" editor action.
+
+**Method:** `experimental/parentModule`
+
+**Request:** `TextDocumentPositionParams`
+
+**Response:** `Location | Location[] | LocationLink[] | null`
+
+
+### Example
+
+```rust
+// src/main.rs
+mod foo;
+// src/foo.rs
+
+/* cursor here*/
+```
+
+`experimental/parentModule` returns a single `Link` to the `mod foo;` declaration.
+
+### Unresolved Question
+
+* An alternative would be to use a more general "gotoSuper" request, which would work for super methods, super classes and super modules.
+ This is the approach IntelliJ Rust is taking.
+ However, experience shows that super module (which generally has a feeling of navigation between files) should be separate.
+ If you want super module, but the cursor happens to be inside an overridden function, the behavior with single "gotoSuper" request is surprising.
+
+## Join Lines
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/992
+
+**Experimental Server Capability:** `{ "joinLines": boolean }`
+
+This request is sent from client to server to handle "Join Lines" editor action.
+
+**Method:** `experimental/joinLines`
+
+**Request:**
+
+```typescript
+interface JoinLinesParams {
+ textDocument: TextDocumentIdentifier,
+ /// Currently active selections/cursor offsets.
+ /// This is an array to support multiple cursors.
+ ranges: Range[],
+}
+```
+
+**Response:** `TextEdit[]`
+
+### Example
+
+```rust
+fn main() {
+ /*cursor here*/let x = {
+ 92
+ };
+}
+```
+
+`experimental/joinLines` yields (curly braces are automagically removed)
+
+```rust
+fn main() {
+ let x = 92;
+}
+```
+
+### Unresolved Question
+
+* What is the position of the cursor after `joinLines`?
+ Currently, this is left to editor's discretion, but it might be useful to specify on the server via snippets.
+ However, it then becomes unclear how it works with multi cursor.
+
+## On Enter
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/1001
+
+**Experimental Server Capability:** `{ "onEnter": boolean }`
+
+This request is sent from client to server to handle the <kbd>Enter</kbd> key press.
+
+**Method:** `experimental/onEnter`
+
+**Request:**: `TextDocumentPositionParams`
+
+**Response:**
+
+```typescript
+SnippetTextEdit[]
+```
+
+### Example
+
+```rust
+fn main() {
+ // Some /*cursor here*/ docs
+ let x = 92;
+}
+```
+
+`experimental/onEnter` returns the following snippet
+
+```rust
+fn main() {
+ // Some
+ // $0 docs
+ let x = 92;
+}
+```
+
+The primary goal of `onEnter` is to handle automatic indentation when opening a new line.
+This is not yet implemented.
+The secondary goal is to handle fixing up syntax, like continuing doc strings and comments, and escaping `\n` in string literals.
+
+As proper cursor positioning is raison-d'etat for `onEnter`, it uses `SnippetTextEdit`.
+
+### Unresolved Question
+
+* How to deal with synchronicity of the request?
+ One option is to require the client to block until the server returns the response.
+ Another option is to do a OT-style merging of edits from client and server.
+ A third option is to do a record-replay: client applies heuristic on enter immediately, then applies all user's keypresses.
+ When the server is ready with the response, the client rollbacks all the changes and applies the recorded actions on top of the correct response.
+* How to deal with multiple carets?
+* Should we extend this to arbitrary typed events and not just `onEnter`?
+
+## Structural Search Replace (SSR)
+
+**Experimental Server Capability:** `{ "ssr": boolean }`
+
+This request is sent from client to server to handle structural search replace -- automated syntax tree based transformation of the source.
+
+**Method:** `experimental/ssr`
+
+**Request:**
+
+```typescript
+interface SsrParams {
+ /// Search query.
+ /// The specific syntax is specified outside of the protocol.
+ query: string,
+ /// If true, only check the syntax of the query and don't compute the actual edit.
+ parseOnly: boolean,
+ /// The current text document. This and `position` will be used to determine in what scope
+ /// paths in `query` should be resolved.
+ textDocument: TextDocumentIdentifier;
+ /// Position where SSR was invoked.
+ position: Position;
+ /// Current selections. Search/replace will be restricted to these if non-empty.
+ selections: Range[];
+}
+```
+
+**Response:**
+
+```typescript
+WorkspaceEdit
+```
+
+### Example
+
+SSR with query `foo($a, $b) ==>> ($a).foo($b)` will transform, eg `foo(y + 5, z)` into `(y + 5).foo(z)`.
+
+### Unresolved Question
+
+* Probably needs search without replace mode
+* Needs a way to limit the scope to certain files.
+
+## Matching Brace
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/999
+
+**Experimental Server Capability:** `{ "matchingBrace": boolean }`
+
+This request is sent from client to server to handle "Matching Brace" editor action.
+
+**Method:** `experimental/matchingBrace`
+
+**Request:**
+
+```typescript
+interface MatchingBraceParams {
+ textDocument: TextDocumentIdentifier,
+ /// Position for each cursor
+ positions: Position[],
+}
+```
+
+**Response:**
+
+```typescript
+Position[]
+```
+
+### Example
+
+```rust
+fn main() {
+ let x: Vec<()>/*cursor here*/ = vec![]
+}
+```
+
+`experimental/matchingBrace` yields the position of `<`.
+In many cases, matching braces can be handled by the editor.
+However, some cases (like disambiguating between generics and comparison operations) need a real parser.
+Moreover, it would be cool if editors didn't need to implement even basic language parsing
+
+### Unresolved Question
+
+* Should we return a nested brace structure, to allow paredit-like actions of jump *out* of the current brace pair?
+ This is how `SelectionRange` request works.
+* Alternatively, should we perhaps flag certain `SelectionRange`s as being brace pairs?
+
+## Runnables
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/944
+
+**Experimental Server Capability:** `{ "runnables": { "kinds": string[] } }`
+
+This request is sent from client to server to get the list of things that can be run (tests, binaries, `cargo check -p`).
+
+**Method:** `experimental/runnables`
+
+**Request:**
+
+```typescript
+interface RunnablesParams {
+ textDocument: TextDocumentIdentifier;
+ /// If null, compute runnables for the whole file.
+ position?: Position;
+}
+```
+
+**Response:** `Runnable[]`
+
+```typescript
+interface Runnable {
+ label: string;
+ /// If this Runnable is associated with a specific function/module, etc, the location of this item
+ location?: LocationLink;
+ /// Running things is necessary technology specific, `kind` needs to be advertised via server capabilities,
+ // the type of `args` is specific to `kind`. The actual running is handled by the client.
+ kind: string;
+ args: any;
+}
+```
+
+rust-analyzer supports only one `kind`, `"cargo"`. The `args` for `"cargo"` look like this:
+
+```typescript
+{
+ workspaceRoot?: string;
+ cargoArgs: string[];
+ cargoExtraArgs: string[];
+ executableArgs: string[];
+ expectTest?: boolean;
+ overrideCargo?: string;
+}
+```
+
+## Open External Documentation
+
+This request is sent from client to server to get a URL to documentation for the symbol under the cursor, if available.
+
+**Method** `experimental/externalDocs`
+
+**Request:**: `TextDocumentPositionParams`
+
+**Response** `string | null`
+
+
+## Analyzer Status
+
+**Method:** `rust-analyzer/analyzerStatus`
+
+**Request:**
+
+```typescript
+interface AnalyzerStatusParams {
+ /// If specified, show dependencies of the current file.
+ textDocument?: TextDocumentIdentifier;
+}
+```
+
+**Response:** `string`
+
+Returns internal status message, mostly for debugging purposes.
+
+## Reload Workspace
+
+**Method:** `rust-analyzer/reloadWorkspace`
+
+**Request:** `null`
+
+**Response:** `null`
+
+Reloads project information (that is, re-executes `cargo metadata`).
+
+## Server Status
+
+**Experimental Client Capability:** `{ "serverStatusNotification": boolean }`
+
+**Method:** `experimental/serverStatus`
+
+**Notification:**
+
+```typescript
+interface ServerStatusParams {
+ /// `ok` means that the server is completely functional.
+ ///
+ /// `warning` means that the server is partially functional.
+ /// It can answer correctly to most requests, but some results
+ /// might be wrong due to, for example, some missing dependencies.
+ ///
+ /// `error` means that the server is not functional. For example,
+ /// there's a fatal build configuration problem. The server might
+ /// still give correct answers to simple requests, but most results
+ /// will be incomplete or wrong.
+ health: "ok" | "warning" | "error",
+ /// Is there any pending background work which might change the status?
+ /// For example, are dependencies being downloaded?
+ quiescent: boolean,
+ /// Explanatory message to show on hover.
+ message?: string,
+}
+```
+
+This notification is sent from server to client.
+The client can use it to display *persistent* status to the user (in modline).
+It is similar to the `showMessage`, but is intended for stares rather than point-in-time events.
+
+Note that this functionality is intended primarily to inform the end user about the state of the server.
+In particular, it's valid for the client to completely ignore this extension.
+Clients are discouraged from but are allowed to use the `health` status to decide if it's worth sending a request to the server.
+
+## Syntax Tree
+
+**Method:** `rust-analyzer/syntaxTree`
+
+**Request:**
+
+```typescript
+interface SyntaxTreeParams {
+ textDocument: TextDocumentIdentifier,
+ range?: Range,
+}
+```
+
+**Response:** `string`
+
+Returns textual representation of a parse tree for the file/selected region.
+Primarily for debugging, but very useful for all people working on rust-analyzer itself.
+
+## View Hir
+
+**Method:** `rust-analyzer/viewHir`
+
+**Request:** `TextDocumentPositionParams`
+
+**Response:** `string`
+
+Returns a textual representation of the HIR of the function containing the cursor.
+For debugging or when working on rust-analyzer itself.
+
+## View File Text
+
+**Method:** `rust-analyzer/viewFileText`
+
+**Request:** `TextDocumentIdentifier`
+
+**Response:** `string`
+
+Returns the text of a file as seen by the server.
+This is for debugging file sync problems.
+
+## View ItemTree
+
+**Method:** `rust-analyzer/viewItemTree`
+
+**Request:**
+
+```typescript
+interface ViewItemTreeParams {
+ textDocument: TextDocumentIdentifier,
+}
+```
+
+**Response:** `string`
+
+Returns a textual representation of the `ItemTree` of the currently open file, for debugging.
+
+## View Crate Graph
+
+**Method:** `rust-analyzer/viewCrateGraph`
+
+**Request:**
+
+```typescript
+interface ViewCrateGraphParams {
+ full: boolean,
+}
+```
+
+**Response:** `string`
+
+Renders rust-analyzer's crate graph as an SVG image.
+
+If `full` is `true`, the graph includes non-workspace crates (crates.io dependencies as well as sysroot crates).
+
+## Shuffle Crate Graph
+
+**Method:** `rust-analyzer/shuffleCrateGraph`
+
+**Request:** `null`
+
+Shuffles the crate IDs in the crate graph, for debugging purposes.
+
+## Expand Macro
+
+**Method:** `rust-analyzer/expandMacro`
+
+**Request:**
+
+```typescript
+interface ExpandMacroParams {
+ textDocument: TextDocumentIdentifier,
+ position: Position,
+}
+```
+
+**Response:**
+
+```typescript
+interface ExpandedMacro {
+ name: string,
+ expansion: string,
+}
+```
+
+Expands macro call at a given position.
+
+## Hover Actions
+
+**Experimental Client Capability:** `{ "hoverActions": boolean }`
+
+If this capability is set, `Hover` request returned from the server might contain an additional field, `actions`:
+
+```typescript
+interface Hover {
+ ...
+ actions?: CommandLinkGroup[];
+}
+
+interface CommandLink extends Command {
+ /**
+ * A tooltip for the command, when represented in the UI.
+ */
+ tooltip?: string;
+}
+
+interface CommandLinkGroup {
+ title?: string;
+ commands: CommandLink[];
+}
+```
+
+Such actions on the client side are appended to a hover bottom as command links:
+```
+ +-----------------------------+
+ | Hover content |
+ | |
+ +-----------------------------+
+ | _Action1_ | _Action2_ | <- first group, no TITLE
+ +-----------------------------+
+ | TITLE _Action1_ | _Action2_ | <- second group
+ +-----------------------------+
+ ...
+```
+
+## Open Cargo.toml
+
+**Upstream Issue:** https://github.com/rust-lang/rust-analyzer/issues/6462
+
+**Experimental Server Capability:** `{ "openCargoToml": boolean }`
+
+This request is sent from client to server to open the current project's Cargo.toml
+
+**Method:** `experimental/openCargoToml`
+
+**Request:** `OpenCargoTomlParams`
+
+**Response:** `Location | null`
+
+
+### Example
+
+```rust
+// Cargo.toml
+[package]
+// src/main.rs
+
+/* cursor here*/
+```
+
+`experimental/openCargoToml` returns a single `Link` to the start of the `[package]` keyword.
+
+## Related tests
+
+This request is sent from client to server to get the list of tests for the specified position.
+
+**Method:** `rust-analyzer/relatedTests`
+
+**Request:** `TextDocumentPositionParams`
+
+**Response:** `TestInfo[]`
+
+```typescript
+interface TestInfo {
+ runnable: Runnable;
+}
+```
+
+## Hover Range
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/377
+
+**Experimental Server Capability:** { "hoverRange": boolean }
+
+This extension allows passing a `Range` as a `position` field of `HoverParams`.
+The primary use-case is to use the hover request to show the type of the expression currently selected.
+
+```typescript
+interface HoverParams extends WorkDoneProgressParams {
+ textDocument: TextDocumentIdentifier;
+ position: Range | Position;
+}
+```
+Whenever the client sends a `Range`, it is understood as the current selection and any hover included in the range will show the type of the expression if possible.
+
+### Example
+
+```rust
+fn main() {
+ let expression = $01 + 2 * 3$0;
+}
+```
+
+Triggering a hover inside the selection above will show a result of `i32`.
+
+## Move Item
+
+**Upstream Issue:** https://github.com/rust-lang/rust-analyzer/issues/6823
+
+This request is sent from client to server to move item under cursor or selection in some direction.
+
+**Method:** `experimental/moveItem`
+
+**Request:** `MoveItemParams`
+
+**Response:** `SnippetTextEdit[]`
+
+```typescript
+export interface MoveItemParams {
+ textDocument: TextDocumentIdentifier,
+ range: Range,
+ direction: Direction
+}
+
+export const enum Direction {
+ Up = "Up",
+ Down = "Down"
+}
+```
+
+## Workspace Symbols Filtering
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/941
+
+**Experimental Server Capability:** `{ "workspaceSymbolScopeKindFiltering": boolean }`
+
+Extends the existing `workspace/symbol` request with ability to filter symbols by broad scope and kind of symbol.
+If this capability is set, `workspace/symbol` parameter gains two new optional fields:
+
+
+```typescript
+interface WorkspaceSymbolParams {
+ /**
+ * Return only the symbols defined in the specified scope.
+ */
+ searchScope?: WorkspaceSymbolSearchScope;
+ /**
+ * Return only the symbols of specified kinds.
+ */
+ searchKind?: WorkspaceSymbolSearchKind;
+ ...
+}
+
+const enum WorkspaceSymbolSearchScope {
+ Workspace = "workspace",
+ WorkspaceAndDependencies = "workspaceAndDependencies"
+}
+
+const enum WorkspaceSymbolSearchKind {
+ OnlyTypes = "onlyTypes",
+ AllSymbols = "allSymbols"
+}
+```
+
+## Client Commands
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/642
+
+**Experimental Client Capability:** `{ "commands?": ClientCommandOptions }`
+
+Certain LSP types originating on the server, notably code lenses, embed commands.
+Commands can be serviced either by the server or by the client.
+However, the server doesn't know which commands are available on the client.
+
+This extensions allows the client to communicate this info.
+
+
+```typescript
+export interface ClientCommandOptions {
+ /**
+ * The commands to be executed on the client
+ */
+ commands: string[];
+}
+```
diff --git a/src/tools/rust-analyzer/docs/dev/style.md b/src/tools/rust-analyzer/docs/dev/style.md
new file mode 100644
index 000000000..a80eebd63
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/style.md
@@ -0,0 +1,1172 @@
+Our approach to "clean code" is two-fold:
+
+* We generally don't block PRs on style changes.
+* At the same time, all code in rust-analyzer is constantly refactored.
+
+It is explicitly OK for a reviewer to flag only some nits in the PR, and then send a follow-up cleanup PR for things which are easier to explain by example, cc-ing the original author.
+Sending small cleanup PRs (like renaming a single local variable) is encouraged.
+
+When reviewing pull requests prefer extending this document to leaving
+non-reusable comments on the pull request itself.
+
+# General
+
+## Scale of Changes
+
+Everyone knows that it's better to send small & focused pull requests.
+The problem is, sometimes you *have* to, eg, rewrite the whole compiler, and that just doesn't fit into a set of isolated PRs.
+
+The main things to keep an eye on are the boundaries between various components.
+There are three kinds of changes:
+
+1. Internals of a single component are changed.
+ Specifically, you don't change any `pub` items.
+ A good example here would be an addition of a new assist.
+
+2. API of a component is expanded.
+ Specifically, you add a new `pub` function which wasn't there before.
+ A good example here would be expansion of assist API, for example, to implement lazy assists or assists groups.
+
+3. A new dependency between components is introduced.
+ Specifically, you add a `pub use` reexport from another crate or you add a new line to the `[dependencies]` section of `Cargo.toml`.
+ A good example here would be adding reference search capability to the assists crates.
+
+For the first group, the change is generally merged as long as:
+
+* it works for the happy case,
+* it has tests,
+* it doesn't panic for the unhappy case.
+
+For the second group, the change would be subjected to quite a bit of scrutiny and iteration.
+The new API needs to be right (or at least easy to change later).
+The actual implementation doesn't matter that much.
+It's very important to minimize the amount of changed lines of code for changes of the second kind.
+Often, you start doing a change of the first kind, only to realize that you need to elevate to a change of the second kind.
+In this case, we'll probably ask you to split API changes into a separate PR.
+
+Changes of the third group should be pretty rare, so we don't specify any specific process for them.
+That said, adding an innocent-looking `pub use` is a very simple way to break encapsulation, keep an eye on it!
+
+Note: if you enjoyed this abstract hand-waving about boundaries, you might appreciate
+https://www.tedinski.com/2018/02/06/system-boundaries.html
+
+## Crates.io Dependencies
+
+We try to be very conservative with usage of crates.io dependencies.
+Don't use small "helper" crates (exception: `itertools` and `either` are allowed).
+If there's some general reusable bit of code you need, consider adding it to the `stdx` crate.
+A useful exercise is to read Cargo.lock and see if some *transitive* dependencies do not make sense for rust-analyzer.
+
+**Rationale:** keep compile times low, create ecosystem pressure for faster compiles, reduce the number of things which might break.
+
+## Commit Style
+
+We don't have specific rules around git history hygiene.
+Maintaining clean git history is strongly encouraged, but not enforced.
+Use rebase workflow, it's OK to rewrite history during PR review process.
+After you are happy with the state of the code, please use [interactive rebase](https://git-scm.com/book/en/v2/Git-Tools-Rewriting-History) to squash fixup commits.
+
+Avoid @mentioning people in commit messages and pull request descriptions(they are added to commit message by bors).
+Such messages create a lot of duplicate notification traffic during rebases.
+
+If possible, write Pull Request titles and descriptions from the user's perspective:
+
+```
+# GOOD
+Make goto definition work inside macros
+
+# BAD
+Use original span for FileId
+```
+
+This makes it easier to prepare a changelog.
+
+If the change adds a new user-visible functionality, consider recording a GIF with [peek](https://github.com/phw/peek) and pasting it into the PR description.
+
+To make writing the release notes easier, you can mark a pull request as a feature, fix, internal change, or minor.
+Minor changes are excluded from the release notes, while the other types are distributed in their corresponding sections.
+There are two ways to mark this:
+
+* use a `feat: `, `feature: `, `fix: `, `internal: ` or `minor: ` prefix in the PR title
+* write `changelog [feature|fix|internal|skip] [description]` in a comment or in the PR description; the description is optional, and will replace the title if included.
+
+These comments don't have to be added by the PR author.
+Editing a comment or the PR description or title is also fine, as long as it happens before the release.
+
+**Rationale:** clean history is potentially useful, but rarely used.
+But many users read changelogs.
+Including a description and GIF suitable for the changelog means less work for the maintainers on the release day.
+
+## Clippy
+
+We don't enforce Clippy.
+A number of default lints have high false positive rate.
+Selectively patching false-positives with `allow(clippy)` is considered worse than not using Clippy at all.
+There's a `cargo lint` command which runs a subset of low-FPR lints.
+Careful tweaking of `lint` is welcome.
+Of course, applying Clippy suggestions is welcome as long as they indeed improve the code.
+
+**Rationale:** see [rust-lang/clippy#5537](https://github.com/rust-lang/rust-clippy/issues/5537).
+
+# Code
+
+## Minimal Tests
+
+Most tests in rust-analyzer start with a snippet of Rust code.
+These snippets should be minimal -- if you copy-paste a snippet of real code into the tests, make sure to remove everything which could be removed.
+
+It also makes sense to format snippets more compactly (for example, by placing enum definitions like `enum E { Foo, Bar }` on a single line),
+as long as they are still readable.
+
+When using multiline fixtures, use unindented raw string literals:
+
+```rust
+ #[test]
+ fn inline_field_shorthand() {
+ check_assist(
+ inline_local_variable,
+ r#"
+struct S { foo: i32}
+fn main() {
+ let $0foo = 92;
+ S { foo }
+}
+"#,
+ r#"
+struct S { foo: i32}
+fn main() {
+ S { foo: 92 }
+}
+"#,
+ );
+ }
+```
+
+**Rationale:**
+
+There are many benefits to this:
+
+* less to read or to scroll past
+* easier to understand what exactly is tested
+* less stuff printed during printf-debugging
+* less time to run test
+
+Formatting ensures that you can use your editor's "number of selected characters" feature to correlate offsets with test's source code.
+
+## Marked Tests
+
+Use
+[`cov_mark::hit! / cov_mark::check!`](https://github.com/matklad/cov-mark)
+when testing specific conditions.
+Do not place several marks into a single test or condition.
+Do not reuse marks between several tests.
+
+**Rationale:** marks provide an easy way to find the canonical test for each bit of code.
+This makes it much easier to understand.
+More than one mark per test / code branch doesn't add significantly to understanding.
+
+## `#[should_panic]`
+
+Do not use `#[should_panic]` tests.
+Instead, explicitly check for `None`, `Err`, etc.
+
+**Rationale:** `#[should_panic]` is a tool for library authors to make sure that the API does not fail silently when misused.
+`rust-analyzer` is not a library, we don't need to test for API misuse, and we have to handle any user input without panics.
+Panic messages in the logs from the `#[should_panic]` tests are confusing.
+
+## `#[ignore]`
+
+Do not `#[ignore]` tests.
+If the test currently does not work, assert the wrong behavior and add a fixme explaining why it is wrong.
+
+**Rationale:** noticing when the behavior is fixed, making sure that even the wrong behavior is acceptable (ie, not a panic).
+
+## Function Preconditions
+
+Express function preconditions in types and force the caller to provide them (rather than checking in callee):
+
+```rust
+// GOOD
+fn frobnicate(walrus: Walrus) {
+ ...
+}
+
+// BAD
+fn frobnicate(walrus: Option<Walrus>) {
+ let walrus = match walrus {
+ Some(it) => it,
+ None => return,
+ };
+ ...
+}
+```
+
+**Rationale:** this makes control flow explicit at the call site.
+Call-site has more context, it often happens that the precondition falls out naturally or can be bubbled up higher in the stack.
+
+Avoid splitting precondition check and precondition use across functions:
+
+```rust
+// GOOD
+fn main() {
+ let s: &str = ...;
+ if let Some(contents) = string_literal_contents(s) {
+
+ }
+}
+
+fn string_literal_contents(s: &str) -> Option<&str> {
+ if s.starts_with('"') && s.ends_with('"') {
+ Some(&s[1..s.len() - 1])
+ } else {
+ None
+ }
+}
+
+// BAD
+fn main() {
+ let s: &str = ...;
+ if is_string_literal(s) {
+ let contents = &s[1..s.len() - 1];
+ }
+}
+
+fn is_string_literal(s: &str) -> bool {
+ s.starts_with('"') && s.ends_with('"')
+}
+```
+
+In the "Not as good" version, the precondition that `1` is a valid char boundary is checked in `is_string_literal` and used in `foo`.
+In the "Good" version, the precondition check and usage are checked in the same block, and then encoded in the types.
+
+**Rationale:** non-local code properties degrade under change.
+
+When checking a boolean precondition, prefer `if !invariant` to `if negated_invariant`:
+
+```rust
+// GOOD
+if !(idx < len) {
+ return None;
+}
+
+// BAD
+if idx >= len {
+ return None;
+}
+```
+
+**Rationale:** it's useful to see the invariant relied upon by the rest of the function clearly spelled out.
+
+## Control Flow
+
+As a special case of the previous rule, do not hide control flow inside functions, push it to the caller:
+
+```rust
+// GOOD
+if cond {
+ f()
+}
+
+// BAD
+fn f() {
+ if !cond {
+ return;
+ }
+ ...
+}
+```
+
+## Assertions
+
+Assert liberally.
+Prefer [`stdx::never!`](https://docs.rs/always-assert/0.1.2/always_assert/macro.never.html) to standard `assert!`.
+
+**Rationale:** See [cross cutting concern: error handling](https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#error-handling).
+
+## Getters & Setters
+
+If a field can have any value without breaking invariants, make the field public.
+Conversely, if there is an invariant, document it, enforce it in the "constructor" function, make the field private, and provide a getter.
+Never provide setters.
+
+Getters should return borrowed data:
+
+```rust
+struct Person {
+ // Invariant: never empty
+ first_name: String,
+ middle_name: Option<String>
+}
+
+// GOOD
+impl Person {
+ fn first_name(&self) -> &str { self.first_name.as_str() }
+ fn middle_name(&self) -> Option<&str> { self.middle_name.as_ref() }
+}
+
+// BAD
+impl Person {
+ fn first_name(&self) -> String { self.first_name.clone() }
+ fn middle_name(&self) -> &Option<String> { &self.middle_name }
+}
+```
+
+**Rationale:** we don't provide public API, it's cheaper to refactor than to pay getters rent.
+Non-local code properties degrade under change, privacy makes invariant local.
+Borrowed owned types (`&String`) disclose irrelevant details about internal representation.
+Irrelevant (neither right nor wrong) things obscure correctness.
+
+## Useless Types
+
+More generally, always prefer types on the left
+
+```rust
+// GOOD BAD
+&[T] &Vec<T>
+&str &String
+Option<&T> &Option<T>
+&Path &PathBuf
+```
+
+**Rationale:** types on the left are strictly more general.
+Even when generality is not required, consistency is important.
+
+## Constructors
+
+Prefer `Default` to zero-argument `new` function.
+
+```rust
+// GOOD
+#[derive(Default)]
+struct Foo {
+ bar: Option<Bar>
+}
+
+// BAD
+struct Foo {
+ bar: Option<Bar>
+}
+
+impl Foo {
+ fn new() -> Foo {
+ Foo { bar: None }
+ }
+}
+```
+
+Prefer `Default` even if it has to be implemented manually.
+
+**Rationale:** less typing in the common case, uniformity.
+
+Use `Vec::new` rather than `vec![]`.
+
+**Rationale:** uniformity, strength reduction.
+
+Avoid using "dummy" states to implement a `Default`.
+If a type doesn't have a sensible default, empty value, don't hide it.
+Let the caller explicitly decide what the right initial state is.
+
+## Functions Over Objects
+
+Avoid creating "doer" objects.
+That is, objects which are created only to execute a single action.
+
+```rust
+// GOOD
+do_thing(arg1, arg2);
+
+// BAD
+ThingDoer::new(arg1, arg2).do();
+```
+
+Note that this concerns only outward API.
+When implementing `do_thing`, it might be very useful to create a context object.
+
+```rust
+pub fn do_thing(arg1: Arg1, arg2: Arg2) -> Res {
+ let mut ctx = Ctx { arg1, arg2 };
+ ctx.run()
+}
+
+struct Ctx {
+ arg1: Arg1, arg2: Arg2
+}
+
+impl Ctx {
+ fn run(self) -> Res {
+ ...
+ }
+}
+```
+
+The difference is that `Ctx` is an impl detail here.
+
+Sometimes a middle ground is acceptable if this can save some busywork:
+
+```rust
+ThingDoer::do(arg1, arg2);
+
+pub struct ThingDoer {
+ arg1: Arg1, arg2: Arg2,
+}
+
+impl ThingDoer {
+ pub fn do(arg1: Arg1, arg2: Arg2) -> Res {
+ ThingDoer { arg1, arg2 }.run()
+ }
+ fn run(self) -> Res {
+ ...
+ }
+}
+```
+
+**Rationale:** not bothering the caller with irrelevant details, not mixing user API with implementor API.
+
+## Functions with many parameters
+
+Avoid creating functions with many optional or boolean parameters.
+Introduce a `Config` struct instead.
+
+```rust
+// GOOD
+pub struct AnnotationConfig {
+ pub binary_target: bool,
+ pub annotate_runnables: bool,
+ pub annotate_impls: bool,
+}
+
+pub fn annotations(
+ db: &RootDatabase,
+ file_id: FileId,
+ config: AnnotationConfig
+) -> Vec<Annotation> {
+ ...
+}
+
+// BAD
+pub fn annotations(
+ db: &RootDatabase,
+ file_id: FileId,
+ binary_target: bool,
+ annotate_runnables: bool,
+ annotate_impls: bool,
+) -> Vec<Annotation> {
+ ...
+}
+```
+
+**Rationale:** reducing churn.
+If the function has many parameters, they most likely change frequently.
+By packing them into a struct we protect all intermediary functions from changes.
+
+Do not implement `Default` for the `Config` struct, the caller has more context to determine better defaults.
+Do not store `Config` as a part of the `state`, pass it explicitly.
+This gives more flexibility for the caller.
+
+If there is variation not only in the input parameters, but in the return type as well, consider introducing a `Command` type.
+
+```rust
+// MAYBE GOOD
+pub struct Query {
+ pub name: String,
+ pub case_sensitive: bool,
+}
+
+impl Query {
+ pub fn all(self) -> Vec<Item> { ... }
+ pub fn first(self) -> Option<Item> { ... }
+}
+
+// MAYBE BAD
+fn query_all(name: String, case_sensitive: bool) -> Vec<Item> { ... }
+fn query_first(name: String, case_sensitive: bool) -> Option<Item> { ... }
+```
+
+## Prefer Separate Functions Over Parameters
+
+If a function has a `bool` or an `Option` parameter, and it is always called with `true`, `false`, `Some` and `None` literals, split the function in two.
+
+```rust
+// GOOD
+fn caller_a() {
+ foo()
+}
+
+fn caller_b() {
+ foo_with_bar(Bar::new())
+}
+
+fn foo() { ... }
+fn foo_with_bar(bar: Bar) { ... }
+
+// BAD
+fn caller_a() {
+ foo(None)
+}
+
+fn caller_b() {
+ foo(Some(Bar::new()))
+}
+
+fn foo(bar: Option<Bar>) { ... }
+```
+
+**Rationale:** more often than not, such functions display "`false sharing`" -- they have additional `if` branching inside for two different cases.
+Splitting the two different control flows into two functions simplifies each path, and remove cross-dependencies between the two paths.
+If there's common code between `foo` and `foo_with_bar`, extract *that* into a common helper.
+
+## Appropriate String Types
+
+When interfacing with OS APIs, use `OsString`, even if the original source of data is utf-8 encoded.
+**Rationale:** cleanly delineates the boundary when the data goes into the OS-land.
+
+Use `AbsPathBuf` and `AbsPath` over `std::Path`.
+**Rationale:** rust-analyzer is a long-lived process which handles several projects at the same time.
+It is important not to leak cwd by accident.
+
+# Premature Pessimization
+
+## Avoid Allocations
+
+Avoid writing code which is slower than it needs to be.
+Don't allocate a `Vec` where an iterator would do, don't allocate strings needlessly.
+
+```rust
+// GOOD
+use itertools::Itertools;
+
+let (first_word, second_word) = match text.split_ascii_whitespace().collect_tuple() {
+ Some(it) => it,
+ None => return,
+}
+
+// BAD
+let words = text.split_ascii_whitespace().collect::<Vec<_>>();
+if words.len() != 2 {
+ return
+}
+```
+
+**Rationale:** not allocating is almost always faster.
+
+## Push Allocations to the Call Site
+
+If allocation is inevitable, let the caller allocate the resource:
+
+```rust
+// GOOD
+fn frobnicate(s: String) {
+ ...
+}
+
+// BAD
+fn frobnicate(s: &str) {
+ let s = s.to_string();
+ ...
+}
+```
+
+**Rationale:** reveals the costs.
+It is also more efficient when the caller already owns the allocation.
+
+## Collection Types
+
+Prefer `rustc_hash::FxHashMap` and `rustc_hash::FxHashSet` instead of the ones in `std::collections`.
+
+**Rationale:** they use a hasher that's significantly faster and using them consistently will reduce code size by some small amount.
+
+## Avoid Intermediate Collections
+
+When writing a recursive function to compute a sets of things, use an accumulator parameter instead of returning a fresh collection.
+Accumulator goes first in the list of arguments.
+
+```rust
+// GOOD
+pub fn reachable_nodes(node: Node) -> FxHashSet<Node> {
+ let mut res = FxHashSet::default();
+ go(&mut res, node);
+ res
+}
+fn go(acc: &mut FxHashSet<Node>, node: Node) {
+ acc.insert(node);
+ for n in node.neighbors() {
+ go(acc, n);
+ }
+}
+
+// BAD
+pub fn reachable_nodes(node: Node) -> FxHashSet<Node> {
+ let mut res = FxHashSet::default();
+ res.insert(node);
+ for n in node.neighbors() {
+ res.extend(reachable_nodes(n));
+ }
+ res
+}
+```
+
+**Rationale:** re-use allocations, accumulator style is more concise for complex cases.
+
+## Avoid Monomorphization
+
+Avoid making a lot of code type parametric, *especially* on the boundaries between crates.
+
+```rust
+// GOOD
+fn frobnicate(f: impl FnMut()) {
+ frobnicate_impl(&mut f)
+}
+fn frobnicate_impl(f: &mut dyn FnMut()) {
+ // lots of code
+}
+
+// BAD
+fn frobnicate(f: impl FnMut()) {
+ // lots of code
+}
+```
+
+Avoid `AsRef` polymorphism, it pays back only for widely used libraries:
+
+```rust
+// GOOD
+fn frobnicate(f: &Path) {
+}
+
+// BAD
+fn frobnicate(f: impl AsRef<Path>) {
+}
+```
+
+**Rationale:** Rust uses monomorphization to compile generic code, meaning that for each instantiation of a generic functions with concrete types, the function is compiled afresh, *per crate*.
+This allows for exceptionally good performance, but leads to increased compile times.
+Runtime performance obeys 80%/20% rule -- only a small fraction of code is hot.
+Compile time **does not** obey this rule -- all code has to be compiled.
+
+# Style
+
+## Order of Imports
+
+Separate import groups with blank lines.
+Use one `use` per crate.
+
+Module declarations come before the imports.
+Order them in "suggested reading order" for a person new to the code base.
+
+```rust
+mod x;
+mod y;
+
+// First std.
+use std::{ ... }
+
+// Second, external crates (both crates.io crates and other rust-analyzer crates).
+use crate_foo::{ ... }
+use crate_bar::{ ... }
+
+// Then current crate.
+use crate::{}
+
+// Finally, parent and child modules, but prefer `use crate::`.
+use super::{}
+
+// Re-exports are treated as item definitions rather than imports, so they go
+// after imports and modules. Use them sparingly.
+pub use crate::x::Z;
+```
+
+**Rationale:** consistency.
+Reading order is important for new contributors.
+Grouping by crate allows spotting unwanted dependencies easier.
+
+## Import Style
+
+Qualify items from `hir` and `ast`.
+
+```rust
+// GOOD
+use syntax::ast;
+
+fn frobnicate(func: hir::Function, strukt: ast::Struct) {}
+
+// BAD
+use hir::Function;
+use syntax::ast::Struct;
+
+fn frobnicate(func: Function, strukt: Struct) {}
+```
+
+**Rationale:** avoids name clashes, makes the layer clear at a glance.
+
+When implementing traits from `std::fmt` or `std::ops`, import the module:
+
+```rust
+// GOOD
+use std::fmt;
+
+impl fmt::Display for RenameError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { .. }
+}
+
+// BAD
+impl std::fmt::Display for RenameError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { .. }
+}
+
+// BAD
+use std::ops::Deref;
+
+impl Deref for Widget {
+ type Target = str;
+ fn deref(&self) -> &str { .. }
+}
+```
+
+**Rationale:** overall, less typing.
+Makes it clear that a trait is implemented, rather than used.
+
+Avoid local `use MyEnum::*` imports.
+**Rationale:** consistency.
+
+Prefer `use crate::foo::bar` to `use super::bar` or `use self::bar::baz`.
+**Rationale:** consistency, this is the style which works in all cases.
+
+By default, avoid re-exports.
+**Rationale:** for non-library code, re-exports introduce two ways to use something and allow for inconsistency.
+
+## Order of Items
+
+Optimize for the reader who sees the file for the first time, and wants to get a general idea about what's going on.
+People read things from top to bottom, so place most important things first.
+
+Specifically, if all items except one are private, always put the non-private item on top.
+
+```rust
+// GOOD
+pub(crate) fn frobnicate() {
+ Helper::act()
+}
+
+#[derive(Default)]
+struct Helper { stuff: i32 }
+
+impl Helper {
+ fn act(&self) {
+
+ }
+}
+
+// BAD
+#[derive(Default)]
+struct Helper { stuff: i32 }
+
+pub(crate) fn frobnicate() {
+ Helper::act()
+}
+
+impl Helper {
+ fn act(&self) {
+
+ }
+}
+```
+
+If there's a mixture of private and public items, put public items first.
+
+Put `struct`s and `enum`s first, functions and impls last. Order type declarations in top-down manner.
+
+```rust
+// GOOD
+struct Parent {
+ children: Vec<Child>
+}
+
+struct Child;
+
+impl Parent {
+}
+
+impl Child {
+}
+
+// BAD
+struct Child;
+
+impl Child {
+}
+
+struct Parent {
+ children: Vec<Child>
+}
+
+impl Parent {
+}
+```
+
+**Rationale:** easier to get the sense of the API by visually scanning the file.
+If function bodies are folded in the editor, the source code should read as documentation for the public API.
+
+## Context Parameters
+
+Some parameters are threaded unchanged through many function calls.
+They determine the "context" of the operation.
+Pass such parameters first, not last.
+If there are several context parameters, consider packing them into a `struct Ctx` and passing it as `&self`.
+
+```rust
+// GOOD
+fn dfs(graph: &Graph, v: Vertex) -> usize {
+ let mut visited = FxHashSet::default();
+ return go(graph, &mut visited, v);
+
+ fn go(graph: &Graph, visited: &mut FxHashSet<Vertex>, v: usize) -> usize {
+ ...
+ }
+}
+
+// BAD
+fn dfs(v: Vertex, graph: &Graph) -> usize {
+ fn go(v: usize, graph: &Graph, visited: &mut FxHashSet<Vertex>) -> usize {
+ ...
+ }
+
+ let mut visited = FxHashSet::default();
+ go(v, graph, &mut visited)
+}
+```
+
+**Rationale:** consistency.
+Context-first works better when non-context parameter is a lambda.
+
+## Variable Naming
+
+Use boring and long names for local variables ([yay code completion](https://github.com/rust-lang/rust-analyzer/pull/4162#discussion_r417130973)).
+The default name is a lowercased name of the type: `global_state: GlobalState`.
+Avoid ad-hoc acronyms and contractions, but use the ones that exist consistently (`db`, `ctx`, `acc`).
+Prefer American spelling (color, behavior).
+
+Default names:
+
+* `res` -- "result of the function" local variable
+* `it` -- I don't really care about the name
+* `n_foos` -- number of foos (prefer this to `foo_count`)
+* `foo_idx` -- index of `foo`
+
+Many names in rust-analyzer conflict with keywords.
+We use mangled names instead of `r#ident` syntax:
+
+```
+crate -> krate
+enum -> enum_
+fn -> func
+impl -> imp
+macro -> mac
+mod -> module
+struct -> strukt
+trait -> trait_
+type -> ty
+```
+
+**Rationale:** consistency.
+
+## Early Returns
+
+Do use early returns
+
+```rust
+// GOOD
+fn foo() -> Option<Bar> {
+ if !condition() {
+ return None;
+ }
+
+ Some(...)
+}
+
+// BAD
+fn foo() -> Option<Bar> {
+ if condition() {
+ Some(...)
+ } else {
+ None
+ }
+}
+```
+
+**Rationale:** reduce cognitive stack usage.
+
+Use `return Err(err)` to throw an error:
+
+```rust
+// GOOD
+fn f() -> Result<(), ()> {
+ if condition {
+ return Err(());
+ }
+ Ok(())
+}
+
+// BAD
+fn f() -> Result<(), ()> {
+ if condition {
+ Err(())?;
+ }
+ Ok(())
+}
+```
+
+**Rationale:** `return` has type `!`, which allows the compiler to flag dead
+code (`Err(...)?` is of unconstrained generic type `T`).
+
+## Comparisons
+
+When doing multiple comparisons use `<`/`<=`, avoid `>`/`>=`.
+
+```rust
+// GOOD
+assert!(lo <= x && x <= hi);
+assert!(r1 < l2 || r2 < l1);
+assert!(x < y);
+assert!(0 < x);
+
+// BAD
+assert!(x >= lo && x <= hi);
+assert!(r1 < l2 || l1 > r2);
+assert!(y > x);
+assert!(x > 0);
+```
+
+**Rationale:** Less-then comparisons are more intuitive, they correspond spatially to [real line](https://en.wikipedia.org/wiki/Real_line).
+
+## If-let
+
+Avoid `if let ... { } else { }` construct, use `match` instead.
+
+```rust
+// GOOD
+match ctx.expected_type.as_ref() {
+ Some(expected_type) => completion_ty == expected_type && !expected_type.is_unit(),
+ None => false,
+}
+
+// BAD
+if let Some(expected_type) = ctx.expected_type.as_ref() {
+ completion_ty == expected_type && !expected_type.is_unit()
+} else {
+ false
+}
+```
+
+**Rationale:** `match` is almost always more compact.
+The `else` branch can get a more precise pattern: `None` or `Err(_)` instead of `_`.
+
+## Match Ergonomics
+
+Don't use the `ref` keyword.
+
+**Rationale:** consistency & simplicity.
+`ref` was required before [match ergonomics](https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md).
+Today, it is redundant.
+Between `ref` and mach ergonomics, the latter is more ergonomic in most cases, and is simpler (does not require a keyword).
+
+## Empty Match Arms
+
+Use `=> (),` when a match arm is intentionally empty:
+
+```rust
+// GOOD
+match result {
+ Ok(_) => (),
+ Err(err) => error!("{}", err),
+}
+
+// BAD
+match result {
+ Ok(_) => {}
+ Err(err) => error!("{}", err),
+}
+```
+
+**Rationale:** consistency.
+
+## Functional Combinators
+
+Use high order monadic combinators like `map`, `then` when they are a natural choice; don't bend the code to fit into some combinator.
+If writing a chain of combinators creates friction, replace them with control flow constructs: `for`, `if`, `match`.
+Mostly avoid `bool::then` and `Option::filter`.
+
+```rust
+// GOOD
+if !x.cond() {
+ return None;
+}
+Some(x)
+
+// BAD
+Some(x).filter(|it| it.cond())
+```
+
+This rule is more "soft" then others, and boils down mostly to taste.
+The guiding principle behind this rule is that code should be dense in computation, and sparse in the number of expressions per line.
+The second example contains *less* computation -- the `filter` function is an indirection for `if`, it doesn't do any useful work by itself.
+At the same time, it is more crowded -- it takes more time to visually scan it.
+
+**Rationale:** consistency, playing to language's strengths.
+Rust has first-class support for imperative control flow constructs like `for` and `if`, while functions are less first-class due to lack of universal function type, currying, and non-first-class effects (`?`, `.await`).
+
+## Turbofish
+
+Prefer type ascription over the turbofish.
+When ascribing types, avoid `_`
+
+```rust
+// GOOD
+let mutable: Vec<T> = old.into_iter().map(|it| builder.make_mut(it)).collect();
+
+// BAD
+let mutable: Vec<_> = old.into_iter().map(|it| builder.make_mut(it)).collect();
+
+// BAD
+let mutable = old.into_iter().map(|it| builder.make_mut(it)).collect::<Vec<_>>();
+```
+
+**Rationale:** consistency, readability.
+If compiler struggles to infer the type, the human would as well.
+Having the result type specified up-front helps with understanding what the chain of iterator methods is doing.
+
+## Helper Functions
+
+Avoid creating single-use helper functions:
+
+```rust
+// GOOD
+let buf = {
+ let mut buf = get_empty_buf(&mut arena);
+ buf.add_item(item);
+ buf
+};
+
+// BAD
+let buf = prepare_buf(&mut arena, item);
+
+...
+
+fn prepare_buf(arena: &mut Arena, item: Item) -> ItemBuf {
+ let mut res = get_empty_buf(&mut arena);
+ res.add_item(item);
+ res
+}
+```
+
+Exception: if you want to make use of `return` or `?`.
+
+**Rationale:** single-use functions change frequently, adding or removing parameters adds churn.
+A block serves just as well to delineate a bit of logic, but has access to all the context.
+Re-using originally single-purpose function often leads to bad coupling.
+
+## Local Helper Functions
+
+Put nested helper functions at the end of the enclosing functions
+(this requires using return statement).
+Don't nest more than one level deep.
+
+```rust
+// GOOD
+fn dfs(graph: &Graph, v: Vertex) -> usize {
+ let mut visited = FxHashSet::default();
+ return go(graph, &mut visited, v);
+
+ fn go(graph: &Graph, visited: &mut FxHashSet<Vertex>, v: usize) -> usize {
+ ...
+ }
+}
+
+// BAD
+fn dfs(graph: &Graph, v: Vertex) -> usize {
+ fn go(graph: &Graph, visited: &mut FxHashSet<Vertex>, v: usize) -> usize {
+ ...
+ }
+
+ let mut visited = FxHashSet::default();
+ go(graph, &mut visited, v)
+}
+```
+
+**Rationale:** consistency, improved top-down readability.
+
+## Helper Variables
+
+Introduce helper variables freely, especially for multiline conditions:
+
+```rust
+// GOOD
+let rustfmt_not_installed =
+ captured_stderr.contains("not installed") || captured_stderr.contains("not available");
+
+match output.status.code() {
+ Some(1) if !rustfmt_not_installed => Ok(None),
+ _ => Err(format_err!("rustfmt failed:\n{}", captured_stderr)),
+};
+
+// BAD
+match output.status.code() {
+ Some(1)
+ if !captured_stderr.contains("not installed")
+ && !captured_stderr.contains("not available") => Ok(None),
+ _ => Err(format_err!("rustfmt failed:\n{}", captured_stderr)),
+};
+```
+
+**Rationale:** Like blocks, single-use variables are a cognitively cheap abstraction, as they have access to all the context.
+Extra variables help during debugging, they make it easy to print/view important intermediate results.
+Giving a name to a condition inside an `if` expression often improves clarity and leads to nicely formatted code.
+
+## Token names
+
+Use `T![foo]` instead of `SyntaxKind::FOO_KW`.
+
+```rust
+// GOOD
+match p.current() {
+ T![true] | T![false] => true,
+ _ => false,
+}
+
+// BAD
+
+match p.current() {
+ SyntaxKind::TRUE_KW | SyntaxKind::FALSE_KW => true,
+ _ => false,
+}
+```
+
+**Rationale:** The macro uses the familiar Rust syntax, avoiding ambiguities like "is this a brace or bracket?".
+
+## Documentation
+
+Style inline code comments as proper sentences.
+Start with a capital letter, end with a dot.
+
+```rust
+// GOOD
+
+// Only simple single segment paths are allowed.
+MergeBehavior::Last => {
+ tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1)
+}
+
+// BAD
+
+// only simple single segment paths are allowed
+MergeBehavior::Last => {
+ tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1)
+}
+```
+
+**Rationale:** writing a sentence (or maybe even a paragraph) rather just "a comment" creates a more appropriate frame of mind.
+It tricks you into writing down more of the context you keep in your head while coding.
+
+For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines.
+If the line is too long, you want to split the sentence in two :-)
+
+**Rationale:** much easier to edit the text and read the diff, see [this link](https://asciidoctor.org/docs/asciidoc-recommended-practices/#one-sentence-per-line).
diff --git a/src/tools/rust-analyzer/docs/dev/syntax.md b/src/tools/rust-analyzer/docs/dev/syntax.md
new file mode 100644
index 000000000..30e137013
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/syntax.md
@@ -0,0 +1,534 @@
+# Syntax in rust-analyzer
+
+## About the guide
+
+This guide describes the current state of syntax trees and parsing in rust-analyzer as of 2020-01-09 ([link to commit](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6)).
+
+## Source Code
+
+The things described are implemented in three places
+
+* [rowan](https://github.com/rust-analyzer/rowan/tree/v0.9.0) -- a generic library for rowan syntax trees.
+* [ra_syntax](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_syntax) crate inside rust-analyzer which wraps `rowan` into rust-analyzer specific API.
+ Nothing in rust-analyzer except this crate knows about `rowan`.
+* [parser](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/parser) crate parses input tokens into an `ra_syntax` tree
+
+## Design Goals
+
+* Syntax trees are lossless, or full fidelity. All comments and whitespace get preserved.
+* Syntax trees are semantic-less. They describe *strictly* the structure of a sequence of characters, they don't have hygiene, name resolution or type information attached.
+* Syntax trees are simple value types. It is possible to create trees for a syntax without any external context.
+* Syntax trees have intuitive traversal API (parent, children, siblings, etc).
+* Parsing is lossless (even if the input is invalid, the tree produced by the parser represents it exactly).
+* Parsing is resilient (even if the input is invalid, parser tries to see as much syntax tree fragments in the input as it can).
+* Performance is important, it's OK to use `unsafe` if it means better memory/cpu usage.
+* Keep the parser and the syntax tree isolated from each other, such that they can vary independently.
+
+## Trees
+
+### Overview
+
+The syntax tree consists of three layers:
+
+* GreenNodes
+* SyntaxNodes (aka RedNode)
+* AST
+
+Of these, only GreenNodes store the actual data, the other two layers are (non-trivial) views into green tree.
+Red-green terminology comes from Roslyn ([link](https://ericlippert.com/2012/06/08/red-green-trees/)) and gives the name to the `rowan` library. Green and syntax nodes are defined in rowan, ast is defined in rust-analyzer.
+
+Syntax trees are a semi-transient data structure.
+In general, frontend does not keep syntax trees for all files in memory.
+Instead, it *lowers* syntax trees to more compact and rigid representation, which is not full-fidelity, but which can be mapped back to a syntax tree if so desired.
+
+
+### GreenNode
+
+GreenNode is a purely-functional tree with arbitrary arity. Conceptually, it is equivalent to the following run of the mill struct:
+
+```rust
+#[derive(PartialEq, Eq, Clone, Copy)]
+struct SyntaxKind(u16);
+
+#[derive(PartialEq, Eq, Clone)]
+struct Node {
+ kind: SyntaxKind,
+ text_len: usize,
+ children: Vec<Arc<Either<Node, Token>>>,
+}
+
+#[derive(PartialEq, Eq, Clone)]
+struct Token {
+ kind: SyntaxKind,
+ text: String,
+}
+```
+
+All the difference between the above sketch and the real implementation are strictly due to optimizations.
+
+Points of note:
+* The tree is untyped. Each node has a "type tag", `SyntaxKind`.
+* Interior and leaf nodes are distinguished on the type level.
+* Trivia and non-trivia tokens are not distinguished on the type level.
+* Each token carries its full text.
+* The original text can be recovered by concatenating the texts of all tokens in order.
+* Accessing a child of particular type (for example, parameter list of a function) generally involves linearly traversing the children, looking for a specific `kind`.
+* Modifying the tree is roughly `O(depth)`.
+ We don't make special efforts to guarantee that the depth is not linear, but, in practice, syntax trees are branchy and shallow.
+* If mandatory (grammar wise) node is missing from the input, it's just missing from the tree.
+* If an extra erroneous input is present, it is wrapped into a node with `ERROR` kind, and treated just like any other node.
+* Parser errors are not a part of syntax tree.
+
+An input like `fn f() { 90 + 2 }` might be parsed as
+
+```
+FN@0..17
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..4
+ IDENT@3..4 "f"
+ PARAM_LIST@4..6
+ L_PAREN@4..5 "("
+ R_PAREN@5..6 ")"
+ WHITESPACE@6..7 " "
+ BLOCK_EXPR@7..17
+ L_CURLY@7..8 "{"
+ WHITESPACE@8..9 " "
+ BIN_EXPR@9..15
+ LITERAL@9..11
+ INT_NUMBER@9..11 "90"
+ WHITESPACE@11..12 " "
+ PLUS@12..13 "+"
+ WHITESPACE@13..14 " "
+ LITERAL@14..15
+ INT_NUMBER@14..15 "2"
+ WHITESPACE@15..16 " "
+ R_CURLY@16..17 "}"
+```
+
+#### Optimizations
+
+(significant amount of implementation work here was done by [CAD97](https://github.com/cad97)).
+
+To reduce the amount of allocations, the GreenNode is a [DST](https://doc.rust-lang.org/reference/dynamically-sized-types.html), which uses a single allocation for header and children. Thus, it is only usable behind a pointer.
+
+```
+*-----------+------+----------+------------+--------+--------+-----+--------*
+| ref_count | kind | text_len | n_children | child1 | child2 | ... | childn |
+*-----------+------+----------+------------+--------+--------+-----+--------*
+```
+
+To more compactly store the children, we box *both* interior nodes and tokens, and represent
+`Either<Arc<Node>, Arc<Token>>` as a single pointer with a tag in the last bit.
+
+To avoid allocating EVERY SINGLE TOKEN on the heap, syntax trees use interning.
+Because the tree is fully immutable, it's valid to structurally share subtrees.
+For example, in `1 + 1`, there will be a *single* token for `1` with ref count 2; the same goes for the ` ` whitespace token.
+Interior nodes are shared as well (for example in `(1 + 1) * (1 + 1)`).
+
+Note that, the result of the interning is an `Arc<Node>`.
+That is, it's not an index into interning table, so you don't have to have the table around to do anything with the tree.
+Each tree is fully self-contained (although different trees might share parts).
+Currently, the interner is created per-file, but it will be easy to use a per-thread or per-some-contex one.
+
+We use a `TextSize`, a newtyped `u32`, to store the length of the text.
+
+We currently use `SmolStr`, a small object optimized string to store text.
+This was mostly relevant *before* we implemented tree interning, to avoid allocating common keywords and identifiers. We should switch to storing text data alongside the interned tokens.
+
+#### Alternative designs
+
+##### Dealing with trivia
+
+In the above model, whitespace is not treated specially.
+Another alternative (used by swift and roslyn) is to explicitly divide the set of tokens into trivia and non-trivia tokens, and represent non-trivia tokens as
+
+```rust
+struct Token {
+ kind: NonTriviaTokenKind,
+ text: String,
+ leading_trivia: Vec<TriviaToken>,
+ trailing_trivia: Vec<TriviaToken>,
+}
+```
+
+The tree then contains only non-trivia tokens.
+
+Another approach (from Dart) is to, in addition to a syntax tree, link all the tokens into a bidirectional link list.
+That way, the tree again contains only non-trivia tokens.
+
+Explicit trivia nodes, like in `rowan`, are used by IntelliJ.
+
+##### Accessing Children
+
+As noted before, accessing a specific child in the node requires a linear traversal of the children (though we can skip tokens, because the tag is encoded in the pointer itself).
+It is possible to recover O(1) access with another representation.
+We explicitly store optional and missing (required by the grammar, but not present) nodes.
+That is, we use `Option<Node>` for children.
+We also remove trivia tokens from the tree.
+This way, each child kind generally occupies a fixed position in a parent, and we can use index access to fetch it.
+The cost is that we now need to allocate space for all not-present optional nodes.
+So, `fn foo() {}` will have slots for visibility, unsafeness, attributes, abi and return type.
+
+IntelliJ uses linear traversal.
+Roslyn and Swift do `O(1)` access.
+
+##### Mutable Trees
+
+IntelliJ uses mutable trees.
+Overall, it creates a lot of additional complexity.
+However, the API for *editing* syntax trees is nice.
+
+For example the assist to move generic bounds to where clause has this code:
+
+```kotlin
+ for typeBound in typeBounds {
+ typeBound.typeParamBounds?.delete()
+}
+```
+
+Modeling this with immutable trees is possible, but annoying.
+
+### Syntax Nodes
+
+A function green tree is not super-convenient to use.
+The biggest problem is accessing parents (there are no parent pointers!).
+But there are also "identify" issues.
+Let's say you want to write a code which builds a list of expressions in a file: `fn collect_expressions(file: GreenNode) -> HashSet<GreenNode>`.
+For the input like
+
+```rust
+fn main() {
+ let x = 90i8;
+ let x = x + 2;
+ let x = 90i64;
+ let x = x + 2;
+}
+```
+
+both copies of the `x + 2` expression are representing by equal (and, with interning in mind, actually the same) green nodes.
+Green trees just can't differentiate between the two.
+
+`SyntaxNode` adds parent pointers and identify semantics to green nodes.
+They can be called cursors or [zippers](https://en.wikipedia.org/wiki/Zipper_(data_structure)) (fun fact: zipper is a derivative (as in ′) of a data structure).
+
+Conceptually, a `SyntaxNode` looks like this:
+
+```rust
+type SyntaxNode = Arc<SyntaxData>;
+
+struct SyntaxData {
+ offset: usize,
+ parent: Option<SyntaxNode>,
+ green: Arc<GreenNode>,
+}
+
+impl SyntaxNode {
+ fn new_root(root: Arc<GreenNode>) -> SyntaxNode {
+ Arc::new(SyntaxData {
+ offset: 0,
+ parent: None,
+ green: root,
+ })
+ }
+ fn parent(&self) -> Option<SyntaxNode> {
+ self.parent.clone()
+ }
+ fn children(&self) -> impl Iterator<Item = SyntaxNode> {
+ let mut offset = self.offset;
+ self.green.children().map(|green_child| {
+ let child_offset = offset;
+ offset += green_child.text_len;
+ Arc::new(SyntaxData {
+ offset: child_offset,
+ parent: Some(Arc::clone(self)),
+ green: Arc::clone(green_child),
+ })
+ })
+ }
+}
+
+impl PartialEq for SyntaxNode {
+ fn eq(&self, other: &SyntaxNode) -> bool {
+ self.offset == other.offset
+ && Arc::ptr_eq(&self.green, &other.green)
+ }
+}
+```
+
+Points of note:
+
+* SyntaxNode remembers its parent node (and, transitively, the path to the root of the tree)
+* SyntaxNode knows its *absolute* text offset in the whole file
+* Equality is based on identity. Comparing nodes from different trees does not make sense.
+
+#### Optimization
+
+The reality is different though :-)
+Traversal of trees is a common operation, and it makes sense to optimize it.
+In particular, the above code allocates and does atomic operations during a traversal.
+
+To get rid of atomics, `rowan` uses non thread-safe `Rc`.
+This is OK because trees traversals mostly (always, in case of rust-analyzer) run on a single thread. If you need to send a `SyntaxNode` to another thread, you can send a pair of **root**`GreenNode` (which is thread safe) and a `Range<usize>`.
+The other thread can restore the `SyntaxNode` by traversing from the root green node and looking for a node with specified range.
+You can also use the similar trick to store a `SyntaxNode`.
+That is, a data structure that holds a `(GreenNode, Range<usize>)` will be `Sync`.
+However, rust-analyzer goes even further.
+It treats trees as semi-transient and instead of storing a `GreenNode`, it generally stores just the id of the file from which the tree originated: `(FileId, Range<usize>)`.
+The `SyntaxNode` is the restored by reparsing the file and traversing it from root.
+With this trick, rust-analyzer holds only a small amount of trees in memory at the same time, which reduces memory usage.
+
+Additionally, only the root `SyntaxNode` owns an `Arc` to the (root) `GreenNode`.
+All other `SyntaxNode`s point to corresponding `GreenNode`s with a raw pointer.
+They also point to the parent (and, consequently, to the root) with an owning `Rc`, so this is sound.
+In other words, one needs *one* arc bump when initiating a traversal.
+
+To get rid of allocations, `rowan` takes advantage of `SyntaxNode: !Sync` and uses a thread-local free list of `SyntaxNode`s.
+In a typical traversal, you only directly hold a few `SyntaxNode`s at a time (and their ancestors indirectly), so a free list proportional to the depth of the tree removes all allocations in a typical case.
+
+So, while traversal is not exactly incrementing a pointer, it's still pretty cheap: TLS + rc bump!
+
+Traversal also yields (cheap) owned nodes, which improves ergonomics quite a bit.
+
+#### Alternative Designs
+
+##### Memoized RedNodes
+
+C# and Swift follow the design where the red nodes are memoized, which would look roughly like this in Rust:
+
+```rust
+type SyntaxNode = Arc<SyntaxData>;
+
+struct SyntaxData {
+ offset: usize,
+ parent: Option<SyntaxNode>,
+ green: Arc<GreenNode>,
+ children: Vec<OnceCell<SyntaxNode>>,
+}
+```
+
+This allows using true pointer equality for comparison of identities of `SyntaxNodes`.
+rust-analyzer used to have this design as well, but we've since switched to cursors.
+The main problem with memoizing the red nodes is that it more than doubles the memory requirements for fully realized syntax trees.
+In contrast, cursors generally retain only a path to the root.
+C# combats increased memory usage by using weak references.
+
+### AST
+
+`GreenTree`s are untyped and homogeneous, because it makes accommodating error nodes, arbitrary whitespace and comments natural, and because it makes possible to write generic tree traversals.
+However, when working with a specific node, like a function definition, one would want a strongly typed API.
+
+This is what is provided by the AST layer. AST nodes are transparent wrappers over untyped syntax nodes:
+
+```rust
+pub trait AstNode {
+ fn cast(syntax: SyntaxNode) -> Option<Self>
+ where
+ Self: Sized;
+
+ fn syntax(&self) -> &SyntaxNode;
+}
+```
+
+Concrete nodes are generated (there are 117 of them), and look roughly like this:
+
+```rust
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FnDef {
+ syntax: SyntaxNode,
+}
+
+impl AstNode for FnDef {
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ match kind {
+ FN => Some(FnDef { syntax }),
+ _ => None,
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ &self.syntax
+ }
+}
+
+impl FnDef {
+ pub fn param_list(&self) -> Option<ParamList> {
+ self.syntax.children().find_map(ParamList::cast)
+ }
+ pub fn ret_type(&self) -> Option<RetType> {
+ self.syntax.children().find_map(RetType::cast)
+ }
+ pub fn body(&self) -> Option<BlockExpr> {
+ self.syntax.children().find_map(BlockExpr::cast)
+ }
+ // ...
+}
+```
+
+Variants like expressions, patterns or items are modeled with `enum`s, which also implement `AstNode`:
+
+```rust
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItem {
+ FnDef(FnDef),
+ TypeAliasDef(TypeAliasDef),
+ ConstDef(ConstDef),
+}
+
+impl AstNode for AssocItem {
+ ...
+}
+```
+
+Shared AST substructures are modeled via (object safe) traits:
+
+```rust
+trait HasVisibility: AstNode {
+ fn visibility(&self) -> Option<Visibility>;
+}
+
+impl HasVisibility for FnDef {
+ fn visibility(&self) -> Option<Visibility> {
+ self.syntax.children().find_map(Visibility::cast)
+ }
+}
+```
+
+Points of note:
+
+* Like `SyntaxNode`s, AST nodes are cheap to clone pointer-sized owned values.
+* All "fields" are optional, to accommodate incomplete and/or erroneous source code.
+* It's always possible to go from an ast node to an untyped `SyntaxNode`.
+* It's possible to go in the opposite direction with a checked cast.
+* `enum`s allow modeling of arbitrary intersecting subsets of AST types.
+* Most of rust-analyzer works with the ast layer, with notable exceptions of:
+ * macro expansion, which needs access to raw tokens and works with `SyntaxNode`s
+ * some IDE-specific features like syntax highlighting are more conveniently implemented over a homogeneous `SyntaxNode` tree
+
+#### Alternative Designs
+
+##### Semantic Full AST
+
+In IntelliJ the AST layer (dubbed **P**rogram **S**tructure **I**nterface) can have semantics attached, and is usually backed by either syntax tree, indices, or metadata from compiled libraries.
+The backend for PSI can change dynamically.
+
+### Syntax Tree Recap
+
+At its core, the syntax tree is a purely functional n-ary tree, which stores text at the leaf nodes and node "kinds" at all nodes.
+A cursor layer is added on top, which gives owned, cheap to clone nodes with identity semantics, parent links and absolute offsets.
+An AST layer is added on top, which reifies each node `Kind` as a separate Rust type with the corresponding API.
+
+## Parsing
+
+The (green) tree is constructed by a DFS "traversal" of the desired tree structure:
+
+```rust
+pub struct GreenNodeBuilder { ... }
+
+impl GreenNodeBuilder {
+ pub fn new() -> GreenNodeBuilder { ... }
+
+ pub fn token(&mut self, kind: SyntaxKind, text: &str) { ... }
+
+ pub fn start_node(&mut self, kind: SyntaxKind) { ... }
+ pub fn finish_node(&mut self) { ... }
+
+ pub fn finish(self) -> GreenNode { ... }
+}
+```
+
+The parser, ultimately, needs to invoke the `GreenNodeBuilder`.
+There are two principal sources of inputs for the parser:
+ * source text, which contains trivia tokens (whitespace and comments)
+ * token trees from macros, which lack trivia
+
+Additionally, input tokens do not correspond 1-to-1 with output tokens.
+For example, two consecutive `>` tokens might be glued, by the parser, into a single `>>`.
+
+For these reasons, the parser crate defines a callback interfaces for both input tokens and output trees.
+The explicit glue layer then bridges various gaps.
+
+The parser interface looks like this:
+
+```rust
+pub struct Token {
+ pub kind: SyntaxKind,
+ pub is_joined_to_next: bool,
+}
+
+pub trait TokenSource {
+ fn current(&self) -> Token;
+ fn lookahead_nth(&self, n: usize) -> Token;
+ fn is_keyword(&self, kw: &str) -> bool;
+
+ fn bump(&mut self);
+}
+
+pub trait TreeSink {
+ fn token(&mut self, kind: SyntaxKind, n_tokens: u8);
+
+ fn start_node(&mut self, kind: SyntaxKind);
+ fn finish_node(&mut self);
+
+ fn error(&mut self, error: ParseError);
+}
+
+pub fn parse(
+ token_source: &mut dyn TokenSource,
+ tree_sink: &mut dyn TreeSink,
+) { ... }
+```
+
+Points of note:
+
+* The parser and the syntax tree are independent, they live in different crates neither of which depends on the other.
+* The parser doesn't know anything about textual contents of the tokens, with an isolated hack for checking contextual keywords.
+* For gluing tokens, the `TreeSink::token` might advance further than one atomic token ahead.
+
+### Reporting Syntax Errors
+
+Syntax errors are not stored directly in the tree.
+The primary motivation for this is that syntax tree is not necessary produced by the parser, it may also be assembled manually from pieces (which happens all the time in refactorings).
+Instead, parser reports errors to an error sink, which stores them in a `Vec`.
+If possible, errors are not reported during parsing and are postponed for a separate validation step.
+For example, parser accepts visibility modifiers on trait methods, but then a separate tree traversal flags all such visibilities as erroneous.
+
+### Macros
+
+The primary difficulty with macros is that individual tokens have identities, which need to be preserved in the syntax tree for hygiene purposes.
+This is handled by the `TreeSink` layer.
+Specifically, `TreeSink` constructs the tree in lockstep with draining the original token stream.
+In the process, it records which tokens of the tree correspond to which tokens of the input, by using text ranges to identify syntax tokens.
+The end result is that parsing an expanded code yields a syntax tree and a mapping of text-ranges of the tree to original tokens.
+
+To deal with precedence in cases like `$expr * 1`, we use special invisible parenthesis, which are explicitly handled by the parser
+
+### Whitespace & Comments
+
+Parser does not see whitespace nodes.
+Instead, they are attached to the tree in the `TreeSink` layer.
+
+For example, in
+
+```rust
+// non doc comment
+fn foo() {}
+```
+
+the comment will be (heuristically) made a child of function node.
+
+### Incremental Reparse
+
+Green trees are cheap to modify, so incremental reparse works by patching a previous tree, without maintaining any additional state.
+The reparse is based on heuristic: we try to contain a change to a single `{}` block, and reparse only this block.
+To do this, we maintain the invariant that, even for invalid code, curly braces are always paired correctly.
+
+In practice, incremental reparsing doesn't actually matter much for IDE use-cases, parsing from scratch seems to be fast enough.
+
+### Parsing Algorithm
+
+We use a boring hand-crafted recursive descent + pratt combination, with a special effort of continuing the parsing if an error is detected.
+
+### Parser Recap
+
+Parser itself defines traits for token sequence input and syntax tree output.
+It doesn't care about where the tokens come from, and how the resulting syntax tree looks like.
diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc
new file mode 100644
index 000000000..b0f2f1614
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc
@@ -0,0 +1,620 @@
+[[rust-analyzer.assist.expressionFillDefault]]rust-analyzer.assist.expressionFillDefault (default: `"todo"`)::
++
+--
+Placeholder expression to use for missing expressions in assists.
+--
+[[rust-analyzer.cachePriming.enable]]rust-analyzer.cachePriming.enable (default: `true`)::
++
+--
+Warm up caches on project load.
+--
+[[rust-analyzer.cachePriming.numThreads]]rust-analyzer.cachePriming.numThreads (default: `0`)::
++
+--
+How many worker threads to handle priming caches. The default `0` means to pick automatically.
+--
+[[rust-analyzer.cargo.autoreload]]rust-analyzer.cargo.autoreload (default: `true`)::
++
+--
+Automatically refresh project info via `cargo metadata` on
+`Cargo.toml` or `.cargo/config.toml` changes.
+--
+[[rust-analyzer.cargo.buildScripts.enable]]rust-analyzer.cargo.buildScripts.enable (default: `true`)::
++
+--
+Run build scripts (`build.rs`) for more precise code analysis.
+--
+[[rust-analyzer.cargo.buildScripts.overrideCommand]]rust-analyzer.cargo.buildScripts.overrideCommand (default: `null`)::
++
+--
+Override the command rust-analyzer uses to run build scripts and
+build procedural macros. The command is required to output json
+and should therefore include `--message-format=json` or a similar
+option.
+
+By default, a cargo invocation will be constructed for the configured
+targets and features, with the following base command line:
+
+```bash
+cargo check --quiet --workspace --message-format=json --all-targets
+```
+.
+--
+[[rust-analyzer.cargo.buildScripts.useRustcWrapper]]rust-analyzer.cargo.buildScripts.useRustcWrapper (default: `true`)::
++
+--
+Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
+avoid checking unnecessary things.
+--
+[[rust-analyzer.cargo.features]]rust-analyzer.cargo.features (default: `[]`)::
++
+--
+List of features to activate.
+
+Set this to `"all"` to pass `--all-features` to cargo.
+--
+[[rust-analyzer.cargo.noDefaultFeatures]]rust-analyzer.cargo.noDefaultFeatures (default: `false`)::
++
+--
+Whether to pass `--no-default-features` to cargo.
+--
+[[rust-analyzer.cargo.noSysroot]]rust-analyzer.cargo.noSysroot (default: `false`)::
++
+--
+Internal config for debugging, disables loading of sysroot crates.
+--
+[[rust-analyzer.cargo.target]]rust-analyzer.cargo.target (default: `null`)::
++
+--
+Compilation target override (target triple).
+--
+[[rust-analyzer.cargo.unsetTest]]rust-analyzer.cargo.unsetTest (default: `["core"]`)::
++
+--
+Unsets `#[cfg(test)]` for the specified crates.
+--
+[[rust-analyzer.checkOnSave.allTargets]]rust-analyzer.checkOnSave.allTargets (default: `true`)::
++
+--
+Check all targets and tests (`--all-targets`).
+--
+[[rust-analyzer.checkOnSave.command]]rust-analyzer.checkOnSave.command (default: `"check"`)::
++
+--
+Cargo command to use for `cargo check`.
+--
+[[rust-analyzer.checkOnSave.enable]]rust-analyzer.checkOnSave.enable (default: `true`)::
++
+--
+Run specified `cargo check` command for diagnostics on save.
+--
+[[rust-analyzer.checkOnSave.extraArgs]]rust-analyzer.checkOnSave.extraArgs (default: `[]`)::
++
+--
+Extra arguments for `cargo check`.
+--
+[[rust-analyzer.checkOnSave.features]]rust-analyzer.checkOnSave.features (default: `null`)::
++
+--
+List of features to activate. Defaults to
+`#rust-analyzer.cargo.features#`.
+
+Set to `"all"` to pass `--all-features` to Cargo.
+--
+[[rust-analyzer.checkOnSave.noDefaultFeatures]]rust-analyzer.checkOnSave.noDefaultFeatures (default: `null`)::
++
+--
+Whether to pass `--no-default-features` to Cargo. Defaults to
+`#rust-analyzer.cargo.noDefaultFeatures#`.
+--
+[[rust-analyzer.checkOnSave.overrideCommand]]rust-analyzer.checkOnSave.overrideCommand (default: `null`)::
++
+--
+Override the command rust-analyzer uses instead of `cargo check` for
+diagnostics on save. The command is required to output json and
+should therefor include `--message-format=json` or a similar option.
+
+If you're changing this because you're using some tool wrapping
+Cargo, you might also want to change
+`#rust-analyzer.cargo.buildScripts.overrideCommand#`.
+
+An example command would be:
+
+```bash
+cargo check --workspace --message-format=json --all-targets
+```
+.
+--
+[[rust-analyzer.checkOnSave.target]]rust-analyzer.checkOnSave.target (default: `null`)::
++
+--
+Check for a specific target. Defaults to
+`#rust-analyzer.cargo.target#`.
+--
+[[rust-analyzer.completion.autoimport.enable]]rust-analyzer.completion.autoimport.enable (default: `true`)::
++
+--
+Toggles the additional completions that automatically add imports when completed.
+Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
+--
+[[rust-analyzer.completion.autoself.enable]]rust-analyzer.completion.autoself.enable (default: `true`)::
++
+--
+Toggles the additional completions that automatically show method calls and field accesses
+with `self` prefixed to them when inside a method.
+--
+[[rust-analyzer.completion.callable.snippets]]rust-analyzer.completion.callable.snippets (default: `"fill_arguments"`)::
++
+--
+Whether to add parenthesis and argument snippets when completing function.
+--
+[[rust-analyzer.completion.postfix.enable]]rust-analyzer.completion.postfix.enable (default: `true`)::
++
+--
+Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
+--
+[[rust-analyzer.completion.privateEditable.enable]]rust-analyzer.completion.privateEditable.enable (default: `false`)::
++
+--
+Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
+--
+[[rust-analyzer.completion.snippets.custom]]rust-analyzer.completion.snippets.custom::
++
+--
+Default:
+----
+{
+ "Arc::new": {
+ "postfix": "arc",
+ "body": "Arc::new(${receiver})",
+ "requires": "std::sync::Arc",
+ "description": "Put the expression into an `Arc`",
+ "scope": "expr"
+ },
+ "Rc::new": {
+ "postfix": "rc",
+ "body": "Rc::new(${receiver})",
+ "requires": "std::rc::Rc",
+ "description": "Put the expression into an `Rc`",
+ "scope": "expr"
+ },
+ "Box::pin": {
+ "postfix": "pinbox",
+ "body": "Box::pin(${receiver})",
+ "requires": "std::boxed::Box",
+ "description": "Put the expression into a pinned `Box`",
+ "scope": "expr"
+ },
+ "Ok": {
+ "postfix": "ok",
+ "body": "Ok(${receiver})",
+ "description": "Wrap the expression in a `Result::Ok`",
+ "scope": "expr"
+ },
+ "Err": {
+ "postfix": "err",
+ "body": "Err(${receiver})",
+ "description": "Wrap the expression in a `Result::Err`",
+ "scope": "expr"
+ },
+ "Some": {
+ "postfix": "some",
+ "body": "Some(${receiver})",
+ "description": "Wrap the expression in an `Option::Some`",
+ "scope": "expr"
+ }
+ }
+----
+Custom completion snippets.
+
+--
+[[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`)::
++
+--
+List of rust-analyzer diagnostics to disable.
+--
+[[rust-analyzer.diagnostics.enable]]rust-analyzer.diagnostics.enable (default: `true`)::
++
+--
+Whether to show native rust-analyzer diagnostics.
+--
+[[rust-analyzer.diagnostics.experimental.enable]]rust-analyzer.diagnostics.experimental.enable (default: `false`)::
++
+--
+Whether to show experimental rust-analyzer diagnostics that might
+have more false positives than usual.
+--
+[[rust-analyzer.diagnostics.remapPrefix]]rust-analyzer.diagnostics.remapPrefix (default: `{}`)::
++
+--
+Map of prefixes to be substituted when parsing diagnostic file paths.
+This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.
+--
+[[rust-analyzer.diagnostics.warningsAsHint]]rust-analyzer.diagnostics.warningsAsHint (default: `[]`)::
++
+--
+List of warnings that should be displayed with hint severity.
+
+The warnings will be indicated by faded text or three dots in code
+and will not show up in the `Problems Panel`.
+--
+[[rust-analyzer.diagnostics.warningsAsInfo]]rust-analyzer.diagnostics.warningsAsInfo (default: `[]`)::
++
+--
+List of warnings that should be displayed with info severity.
+
+The warnings will be indicated by a blue squiggly underline in code
+and a blue icon in the `Problems Panel`.
+--
+[[rust-analyzer.files.excludeDirs]]rust-analyzer.files.excludeDirs (default: `[]`)::
++
+--
+These directories will be ignored by rust-analyzer. They are
+relative to the workspace root, and globs are not supported. You may
+also need to add the folders to Code's `files.watcherExclude`.
+--
+[[rust-analyzer.files.watcher]]rust-analyzer.files.watcher (default: `"client"`)::
++
+--
+Controls file watching implementation.
+--
+[[rust-analyzer.highlightRelated.breakPoints.enable]]rust-analyzer.highlightRelated.breakPoints.enable (default: `true`)::
++
+--
+Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
+--
+[[rust-analyzer.highlightRelated.exitPoints.enable]]rust-analyzer.highlightRelated.exitPoints.enable (default: `true`)::
++
+--
+Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
+--
+[[rust-analyzer.highlightRelated.references.enable]]rust-analyzer.highlightRelated.references.enable (default: `true`)::
++
+--
+Enables highlighting of related references while the cursor is on any identifier.
+--
+[[rust-analyzer.highlightRelated.yieldPoints.enable]]rust-analyzer.highlightRelated.yieldPoints.enable (default: `true`)::
++
+--
+Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
+--
+[[rust-analyzer.hover.actions.debug.enable]]rust-analyzer.hover.actions.debug.enable (default: `true`)::
++
+--
+Whether to show `Debug` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.actions.enable]]rust-analyzer.hover.actions.enable (default: `true`)::
++
+--
+Whether to show HoverActions in Rust files.
+--
+[[rust-analyzer.hover.actions.gotoTypeDef.enable]]rust-analyzer.hover.actions.gotoTypeDef.enable (default: `true`)::
++
+--
+Whether to show `Go to Type Definition` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.actions.implementations.enable]]rust-analyzer.hover.actions.implementations.enable (default: `true`)::
++
+--
+Whether to show `Implementations` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.actions.references.enable]]rust-analyzer.hover.actions.references.enable (default: `false`)::
++
+--
+Whether to show `References` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.actions.run.enable]]rust-analyzer.hover.actions.run.enable (default: `true`)::
++
+--
+Whether to show `Run` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.documentation.enable]]rust-analyzer.hover.documentation.enable (default: `true`)::
++
+--
+Whether to show documentation on hover.
+--
+[[rust-analyzer.hover.links.enable]]rust-analyzer.hover.links.enable (default: `true`)::
++
+--
+Use markdown syntax for links in hover.
+--
+[[rust-analyzer.imports.granularity.enforce]]rust-analyzer.imports.granularity.enforce (default: `false`)::
++
+--
+Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
+--
+[[rust-analyzer.imports.granularity.group]]rust-analyzer.imports.granularity.group (default: `"crate"`)::
++
+--
+How imports should be grouped into use statements.
+--
+[[rust-analyzer.imports.group.enable]]rust-analyzer.imports.group.enable (default: `true`)::
++
+--
+Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
+--
+[[rust-analyzer.imports.merge.glob]]rust-analyzer.imports.merge.glob (default: `true`)::
++
+--
+Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
+--
+[[rust-analyzer.imports.prefix]]rust-analyzer.imports.prefix (default: `"plain"`)::
++
+--
+The path structure for newly inserted paths to use.
+--
+[[rust-analyzer.inlayHints.bindingModeHints.enable]]rust-analyzer.inlayHints.bindingModeHints.enable (default: `false`)::
++
+--
+Whether to show inlay type hints for binding modes.
+--
+[[rust-analyzer.inlayHints.chainingHints.enable]]rust-analyzer.inlayHints.chainingHints.enable (default: `true`)::
++
+--
+Whether to show inlay type hints for method chains.
+--
+[[rust-analyzer.inlayHints.closingBraceHints.enable]]rust-analyzer.inlayHints.closingBraceHints.enable (default: `true`)::
++
+--
+Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
+--
+[[rust-analyzer.inlayHints.closingBraceHints.minLines]]rust-analyzer.inlayHints.closingBraceHints.minLines (default: `25`)::
++
+--
+Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
+to always show them).
+--
+[[rust-analyzer.inlayHints.closureReturnTypeHints.enable]]rust-analyzer.inlayHints.closureReturnTypeHints.enable (default: `"never"`)::
++
+--
+Whether to show inlay type hints for return types of closures.
+--
+[[rust-analyzer.inlayHints.lifetimeElisionHints.enable]]rust-analyzer.inlayHints.lifetimeElisionHints.enable (default: `"never"`)::
++
+--
+Whether to show inlay type hints for elided lifetimes in function signatures.
+--
+[[rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames]]rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames (default: `false`)::
++
+--
+Whether to prefer using parameter names as the name for elided lifetime hints if possible.
+--
+[[rust-analyzer.inlayHints.maxLength]]rust-analyzer.inlayHints.maxLength (default: `25`)::
++
+--
+Maximum length for inlay hints. Set to null to have an unlimited length.
+--
+[[rust-analyzer.inlayHints.parameterHints.enable]]rust-analyzer.inlayHints.parameterHints.enable (default: `true`)::
++
+--
+Whether to show function parameter name inlay hints at the call
+site.
+--
+[[rust-analyzer.inlayHints.reborrowHints.enable]]rust-analyzer.inlayHints.reborrowHints.enable (default: `"never"`)::
++
+--
+Whether to show inlay type hints for compiler inserted reborrows.
+--
+[[rust-analyzer.inlayHints.renderColons]]rust-analyzer.inlayHints.renderColons (default: `true`)::
++
+--
+Whether to render leading colons for type hints, and trailing colons for parameter hints.
+--
+[[rust-analyzer.inlayHints.typeHints.enable]]rust-analyzer.inlayHints.typeHints.enable (default: `true`)::
++
+--
+Whether to show inlay type hints for variables.
+--
+[[rust-analyzer.inlayHints.typeHints.hideClosureInitialization]]rust-analyzer.inlayHints.typeHints.hideClosureInitialization (default: `false`)::
++
+--
+Whether to hide inlay type hints for `let` statements that initialize to a closure.
+Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
+--
+[[rust-analyzer.inlayHints.typeHints.hideNamedConstructor]]rust-analyzer.inlayHints.typeHints.hideNamedConstructor (default: `false`)::
++
+--
+Whether to hide inlay type hints for constructors.
+--
+[[rust-analyzer.joinLines.joinAssignments]]rust-analyzer.joinLines.joinAssignments (default: `true`)::
++
+--
+Join lines merges consecutive declaration and initialization of an assignment.
+--
+[[rust-analyzer.joinLines.joinElseIf]]rust-analyzer.joinLines.joinElseIf (default: `true`)::
++
+--
+Join lines inserts else between consecutive ifs.
+--
+[[rust-analyzer.joinLines.removeTrailingComma]]rust-analyzer.joinLines.removeTrailingComma (default: `true`)::
++
+--
+Join lines removes trailing commas.
+--
+[[rust-analyzer.joinLines.unwrapTrivialBlock]]rust-analyzer.joinLines.unwrapTrivialBlock (default: `true`)::
++
+--
+Join lines unwraps trivial blocks.
+--
+[[rust-analyzer.lens.debug.enable]]rust-analyzer.lens.debug.enable (default: `true`)::
++
+--
+Whether to show `Debug` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.enable]]rust-analyzer.lens.enable (default: `true`)::
++
+--
+Whether to show CodeLens in Rust files.
+--
+[[rust-analyzer.lens.forceCustomCommands]]rust-analyzer.lens.forceCustomCommands (default: `true`)::
++
+--
+Internal config: use custom client-side commands even when the
+client doesn't set the corresponding capability.
+--
+[[rust-analyzer.lens.implementations.enable]]rust-analyzer.lens.implementations.enable (default: `true`)::
++
+--
+Whether to show `Implementations` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.references.adt.enable]]rust-analyzer.lens.references.adt.enable (default: `false`)::
++
+--
+Whether to show `References` lens for Struct, Enum, and Union.
+Only applies when `#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.references.enumVariant.enable]]rust-analyzer.lens.references.enumVariant.enable (default: `false`)::
++
+--
+Whether to show `References` lens for Enum Variants.
+Only applies when `#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.references.method.enable]]rust-analyzer.lens.references.method.enable (default: `false`)::
++
+--
+Whether to show `Method References` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.references.trait.enable]]rust-analyzer.lens.references.trait.enable (default: `false`)::
++
+--
+Whether to show `References` lens for Trait.
+Only applies when `#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.run.enable]]rust-analyzer.lens.run.enable (default: `true`)::
++
+--
+Whether to show `Run` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.linkedProjects]]rust-analyzer.linkedProjects (default: `[]`)::
++
+--
+Disable project auto-discovery in favor of explicitly specified set
+of projects.
+
+Elements must be paths pointing to `Cargo.toml`,
+`rust-project.json`, or JSON objects in `rust-project.json` format.
+--
+[[rust-analyzer.lru.capacity]]rust-analyzer.lru.capacity (default: `null`)::
++
+--
+Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
+--
+[[rust-analyzer.notifications.cargoTomlNotFound]]rust-analyzer.notifications.cargoTomlNotFound (default: `true`)::
++
+--
+Whether to show `can't find Cargo.toml` error message.
+--
+[[rust-analyzer.procMacro.attributes.enable]]rust-analyzer.procMacro.attributes.enable (default: `true`)::
++
+--
+Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
+--
+[[rust-analyzer.procMacro.enable]]rust-analyzer.procMacro.enable (default: `true`)::
++
+--
+Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
+--
+[[rust-analyzer.procMacro.ignored]]rust-analyzer.procMacro.ignored (default: `{}`)::
++
+--
+These proc-macros will be ignored when trying to expand them.
+
+This config takes a map of crate names with the exported proc-macro names to ignore as values.
+--
+[[rust-analyzer.procMacro.server]]rust-analyzer.procMacro.server (default: `null`)::
++
+--
+Internal config, path to proc-macro server executable (typically,
+this is rust-analyzer itself, but we override this in tests).
+--
+[[rust-analyzer.runnables.command]]rust-analyzer.runnables.command (default: `null`)::
++
+--
+Command to be executed instead of 'cargo' for runnables.
+--
+[[rust-analyzer.runnables.extraArgs]]rust-analyzer.runnables.extraArgs (default: `[]`)::
++
+--
+Additional arguments to be passed to cargo for runnables such as
+tests or binaries. For example, it may be `--release`.
+--
+[[rust-analyzer.rustc.source]]rust-analyzer.rustc.source (default: `null`)::
++
+--
+Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
+projects, or "discover" to try to automatically find it if the `rustc-dev` component
+is installed.
+
+Any project which uses rust-analyzer with the rustcPrivate
+crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.
+
+This option does not take effect until rust-analyzer is restarted.
+--
+[[rust-analyzer.rustfmt.extraArgs]]rust-analyzer.rustfmt.extraArgs (default: `[]`)::
++
+--
+Additional arguments to `rustfmt`.
+--
+[[rust-analyzer.rustfmt.overrideCommand]]rust-analyzer.rustfmt.overrideCommand (default: `null`)::
++
+--
+Advanced option, fully override the command rust-analyzer uses for
+formatting.
+--
+[[rust-analyzer.rustfmt.rangeFormatting.enable]]rust-analyzer.rustfmt.rangeFormatting.enable (default: `false`)::
++
+--
+Enables the use of rustfmt's unstable range formatting command for the
+`textDocument/rangeFormatting` request. The rustfmt option is unstable and only
+available on a nightly build.
+--
+[[rust-analyzer.semanticHighlighting.strings.enable]]rust-analyzer.semanticHighlighting.strings.enable (default: `true`)::
++
+--
+Use semantic tokens for strings.
+
+In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
+By disabling semantic tokens for strings, other grammars can be used to highlight
+their contents.
+--
+[[rust-analyzer.signatureInfo.detail]]rust-analyzer.signatureInfo.detail (default: `"full"`)::
++
+--
+Show full signature of the callable. Only shows parameters if disabled.
+--
+[[rust-analyzer.signatureInfo.documentation.enable]]rust-analyzer.signatureInfo.documentation.enable (default: `true`)::
++
+--
+Show documentation.
+--
+[[rust-analyzer.typing.autoClosingAngleBrackets.enable]]rust-analyzer.typing.autoClosingAngleBrackets.enable (default: `false`)::
++
+--
+Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.
+--
+[[rust-analyzer.workspace.symbol.search.kind]]rust-analyzer.workspace.symbol.search.kind (default: `"only_types"`)::
++
+--
+Workspace symbol search kind.
+--
+[[rust-analyzer.workspace.symbol.search.limit]]rust-analyzer.workspace.symbol.search.limit (default: `128`)::
++
+--
+Limits the number of items returned from a workspace symbol search (Defaults to 128).
+Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
+Other clients requires all results upfront and might require a higher limit.
+--
+[[rust-analyzer.workspace.symbol.search.scope]]rust-analyzer.workspace.symbol.search.scope (default: `"workspace"`)::
++
+--
+Workspace symbol search scope.
+--
diff --git a/src/tools/rust-analyzer/docs/user/manual.adoc b/src/tools/rust-analyzer/docs/user/manual.adoc
new file mode 100644
index 000000000..999a6437a
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/user/manual.adoc
@@ -0,0 +1,863 @@
+= User Manual
+:toc: preamble
+:sectanchors:
+:page-layout: post
+:icons: font
+:source-highlighter: rouge
+:experimental:
+
+////
+IMPORTANT: the master copy of this document lives in the https://github.com/rust-lang/rust-analyzer repository
+////
+
+At its core, rust-analyzer is a *library* for semantic analysis of Rust code as it changes over time.
+This manual focuses on a specific usage of the library -- running it as part of a server that implements the
+https://microsoft.github.io/language-server-protocol/[Language Server Protocol] (LSP).
+The LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic features like completion or goto definition by talking to an external language server process.
+
+[TIP]
+====
+[.lead]
+To improve this document, send a pull request: +
+https://github.com/rust-lang/rust-analyzer/blob/master/docs/user/manual.adoc[https://github.com/rust-analyzer/.../manual.adoc]
+
+The manual is written in https://asciidoc.org[AsciiDoc] and includes some extra files which are generated from the source code. Run `cargo test` and `cargo test -p xtask` to create these and then `asciidoctor manual.adoc` to create an HTML copy.
+====
+
+If you have questions about using rust-analyzer, please ask them in the https://users.rust-lang.org/c/ide/14["`IDEs and Editors`"] topic of Rust users forum.
+
+== Installation
+
+In theory, one should be able to just install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> and have it automatically work with any editor.
+We are not there yet, so some editor specific setup is required.
+
+Additionally, rust-analyzer needs the sources of the standard library.
+If the source code is not present, rust-analyzer will attempt to install it automatically.
+
+To add the sources manually, run the following command:
+
+```bash
+$ rustup component add rust-src
+```
+
+=== Toolchain
+
+Only the latest stable standard library source is officially supported for use with rust-analyzer.
+If you are using an older toolchain or have an override set, rust-analyzer may fail to understand the Rust source.
+You will either need to update your toolchain or use an older version of rust-analyzer that is compatible with your toolchain.
+
+If you are using an override in your project, you can still force rust-analyzer to use the stable toolchain via the environment variable `RUSTUP_TOOLCHAIN`.
+For example, with VS Code or coc-rust-analyzer:
+
+[source,json]
+----
+{ "rust-analyzer.server.extraEnv": { "RUSTUP_TOOLCHAIN": "stable" } }
+----
+
+=== VS Code
+
+This is the best supported editor at the moment.
+The rust-analyzer plugin for VS Code is maintained
+https://github.com/rust-lang/rust-analyzer/tree/master/editors/code[in tree].
+
+You can install the latest release of the plugin from
+https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer[the marketplace].
+
+Note that the plugin may cause conflicts with the
+https://marketplace.visualstudio.com/items?itemName=rust-lang.rust[official Rust plugin].
+It is recommended to disable the Rust plugin when using the rust-analyzer extension.
+
+By default, the plugin will prompt you to download the matching version of the server as well:
+
+image::https://user-images.githubusercontent.com/9021944/75067008-17502500-54ba-11ea-835a-f92aac50e866.png[]
+
+[NOTE]
+====
+To disable this notification put the following to `settings.json`
+
+[source,json]
+----
+{ "rust-analyzer.updates.askBeforeDownload": false }
+----
+====
+
+The server binary is stored in the extension install directory, which starts with `rust-lang.rust-analyzer-` and is located under:
+
+* Linux: `~/.vscode/extensions`
+* Linux (Remote, such as WSL): `~/.vscode-server/extensions`
+* macOS: `~/.vscode/extensions`
+* Windows: `%USERPROFILE%\.vscode\extensions`
+
+As an exception, on NixOS, the extension makes a copy of the server and stores it under `~/.config/Code/User/globalStorage/rust-lang.rust-analyzer`.
+
+Note that we only support the two most recent versions of VS Code.
+
+==== Updates
+
+The extension will be updated automatically as new versions become available.
+It will ask your permission to download the matching language server version binary if needed.
+
+===== Nightly
+
+We ship nightly releases for VS Code.
+To help us out by testing the newest code, you can enable pre-release versions in the Code extension page.
+
+==== Manual installation
+
+Alternatively, download a VSIX corresponding to your platform from the
+https://github.com/rust-lang/rust-analyzer/releases[releases] page.
+
+Install the extension with the `Extensions: Install from VSIX` command within VS Code, or from the command line via:
+[source]
+----
+$ code --install-extension /path/to/rust-analyzer.vsix
+----
+
+If you are running an unsupported platform, you can install `rust-analyzer-no-server.vsix` and compile or obtain a server binary.
+Copy the server anywhere, then add the path to your settings.json, for example:
+[source,json]
+----
+{ "rust-analyzer.server.path": "~/.local/bin/rust-analyzer-linux" }
+----
+
+==== Building From Source
+
+Both the server and the Code plugin can be installed from source:
+
+[source]
+----
+$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
+$ cargo xtask install
+----
+
+You'll need Cargo, nodejs (matching a supported version of VS Code) and npm for this.
+
+Note that installing via `xtask install` does not work for VS Code Remote, instead you'll need to install the `.vsix` manually.
+
+If you're not using Code, you can compile and install only the LSP server:
+
+[source]
+----
+$ cargo xtask install --server
+----
+
+=== rust-analyzer Language Server Binary
+
+Other editors generally require the `rust-analyzer` binary to be in `$PATH`.
+You can download pre-built binaries from the https://github.com/rust-lang/rust-analyzer/releases[releases] page.
+You will need to uncompress and rename the binary for your platform, e.g. from `rust-analyzer-aarch64-apple-darwin.gz` on Mac OS to `rust-analyzer`, make it executable, then move it into a directory in your `$PATH`.
+
+On Linux to install the `rust-analyzer` binary into `~/.local/bin`, these commands should work:
+
+[source,bash]
+----
+$ mkdir -p ~/.local/bin
+$ curl -L https://github.com/rust-lang/rust-analyzer/releases/latest/download/rust-analyzer-x86_64-unknown-linux-gnu.gz | gunzip -c - > ~/.local/bin/rust-analyzer
+$ chmod +x ~/.local/bin/rust-analyzer
+----
+
+Make sure that `~/.local/bin` is listed in the `$PATH` variable and use the appropriate URL if you're not on a `x86-64` system.
+
+You don't have to use `~/.local/bin`, any other path like `~/.cargo/bin` or `/usr/local/bin` will work just as well.
+
+Alternatively, you can install it from source using the command below.
+You'll need the latest stable version of the Rust toolchain.
+
+[source,bash]
+----
+$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
+$ cargo xtask install --server
+----
+
+If your editor can't find the binary even though the binary is on your `$PATH`, the likely explanation is that it doesn't see the same `$PATH` as the shell, see https://github.com/rust-lang/rust-analyzer/issues/1811[this issue].
+On Unix, running the editor from a shell or changing the `.desktop` file to set the environment should help.
+
+==== `rustup`
+
+`rust-analyzer` is available in `rustup`, but only in the nightly toolchain:
+
+[source,bash]
+----
+$ rustup +nightly component add rust-analyzer-preview
+----
+
+However, in contrast to `component add clippy` or `component add rustfmt`, this does not actually place a `rust-analyzer` binary in `~/.cargo/bin`, see https://github.com/rust-lang/rustup/issues/2411[this issue].
+
+==== Arch Linux
+
+The `rust-analyzer` binary can be installed from the repos or AUR (Arch User Repository):
+
+- https://www.archlinux.org/packages/community/x86_64/rust-analyzer/[`rust-analyzer`] (built from latest tagged source)
+- https://aur.archlinux.org/packages/rust-analyzer-git[`rust-analyzer-git`] (latest Git version)
+
+Install it with pacman, for example:
+
+[source,bash]
+----
+$ pacman -S rust-analyzer
+----
+
+==== Gentoo Linux
+
+`rust-analyzer` is available in the GURU repository:
+
+- https://gitweb.gentoo.org/repo/proj/guru.git/tree/dev-util/rust-analyzer?id=9895cea62602cfe599bd48e0fb02127411ca6e81[`dev-util/rust-analyzer`] builds from source
+- https://gitweb.gentoo.org/repo/proj/guru.git/tree/dev-util/rust-analyzer-bin?id=9895cea62602cfe599bd48e0fb02127411ca6e81[`dev-util/rust-analyzer-bin`] installs an official binary release
+
+If not already, GURU must be enabled (e.g. using `app-eselect/eselect-repository`) and sync'd before running `emerge`:
+
+[source,bash]
+----
+$ eselect repository enable guru && emaint sync -r guru
+$ emerge rust-analyzer-bin
+----
+
+==== macOS
+
+The `rust-analyzer` binary can be installed via https://brew.sh/[Homebrew].
+
+[source,bash]
+----
+$ brew install rust-analyzer
+----
+
+=== Emacs
+
+Note this excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm].
+
+Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+
+Emacs support is maintained as part of the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP] package in https://github.com/emacs-lsp/lsp-mode/blob/master/lsp-rust.el[lsp-rust.el].
+
+1. Install the most recent version of `emacs-lsp` package by following the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP instructions].
+2. Set `lsp-rust-server` to `'rust-analyzer`.
+3. Run `lsp` in a Rust buffer.
+4. (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys.
+
+=== Vim/NeoVim
+
+Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+Not needed if the extension can install/update it on its own, coc-rust-analyzer is one example.
+
+There are several LSP client implementations for vim or neovim:
+
+==== coc-rust-analyzer
+
+1. Install coc.nvim by following the instructions at
+ https://github.com/neoclide/coc.nvim[coc.nvim]
+ (Node.js required)
+2. Run `:CocInstall coc-rust-analyzer` to install
+ https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer],
+ this extension implements _most_ of the features supported in the VSCode extension:
+ * automatically install and upgrade stable/nightly releases
+ * same configurations as VSCode extension, `rust-analyzer.server.path`, `rust-analyzer.cargo.features` etc.
+ * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc.
+ * inlay hints for variables and method chaining, _Neovim Only_
+
+Note: for code actions, use `coc-codeaction-cursor` and `coc-codeaction-selected`; `coc-codeaction` and `coc-codeaction-line` are unlikely to be useful.
+
+==== LanguageClient-neovim
+
+1. Install LanguageClient-neovim by following the instructions
+ https://github.com/autozimu/LanguageClient-neovim[here]
+ * The GitHub project wiki has extra tips on configuration
+
+2. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists):
++
+[source,vim]
+----
+let g:LanguageClient_serverCommands = {
+\ 'rust': ['rust-analyzer'],
+\ }
+----
+
+==== YouCompleteMe
+
+Install YouCompleteMe by following the instructions
+ https://github.com/ycm-core/YouCompleteMe#installation[here].
+
+rust-analyzer is the default in ycm, it should work out of the box.
+
+==== ALE
+
+To use the LSP server in https://github.com/dense-analysis/ale[ale]:
+
+[source,vim]
+----
+let g:ale_linters = {'rust': ['analyzer']}
+----
+
+==== nvim-lsp
+
+NeoVim 0.5 has built-in language server support.
+For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lspconfig#rust_analyzer[neovim/nvim-lspconfig].
+Once `neovim/nvim-lspconfig` is installed, use `+lua require'lspconfig'.rust_analyzer.setup({})+` in your `init.vim`.
+
+You can also pass LSP settings to the server:
+
+[source,vim]
+----
+lua << EOF
+local nvim_lsp = require'lspconfig'
+
+local on_attach = function(client)
+ require'completion'.on_attach(client)
+end
+
+nvim_lsp.rust_analyzer.setup({
+ on_attach=on_attach,
+ settings = {
+ ["rust-analyzer"] = {
+ imports = {
+ granularity = {
+ group = "module",
+ },
+ prefix = "self",
+ },
+ cargo = {
+ buildScripts = {
+ enable = true,
+ },
+ },
+ procMacro = {
+ enable = true
+ },
+ }
+ }
+})
+EOF
+----
+
+See https://sharksforarms.dev/posts/neovim-rust/ for more tips on getting started.
+
+Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for neovim.
+
+==== vim-lsp
+
+vim-lsp is installed by following https://github.com/prabirshrestha/vim-lsp[the plugin instructions].
+It can be as simple as adding this line to your `.vimrc`:
+
+[source,vim]
+----
+Plug 'prabirshrestha/vim-lsp'
+----
+
+Next you need to register the `rust-analyzer` binary.
+If it is available in `$PATH`, you may want to add this to your `.vimrc`:
+
+[source,vim]
+----
+if executable('rust-analyzer')
+ au User lsp_setup call lsp#register_server({
+ \ 'name': 'Rust Language Server',
+ \ 'cmd': {server_info->['rust-analyzer']},
+ \ 'whitelist': ['rust'],
+ \ })
+endif
+----
+
+There is no dedicated UI for the server configuration, so you would need to send any options as a value of the `initialization_options` field, as described in the <<_configuration,Configuration>> section.
+Here is an example of how to enable the proc-macro support:
+
+[source,vim]
+----
+if executable('rust-analyzer')
+ au User lsp_setup call lsp#register_server({
+ \ 'name': 'Rust Language Server',
+ \ 'cmd': {server_info->['rust-analyzer']},
+ \ 'whitelist': ['rust'],
+ \ 'initialization_options': {
+ \ 'cargo': {
+ \ 'buildScripts': {
+ \ 'enable': v:true,
+ \ },
+ \ },
+ \ 'procMacro': {
+ \ 'enable': v:true,
+ \ },
+ \ },
+ \ })
+endif
+----
+
+=== Sublime Text
+
+==== Sublime Text 4:
+* Follow the instructions in link:https://github.com/sublimelsp/LSP-rust-analyzer[LSP-rust-analyzer].
+
+NOTE: Install link:https://packagecontrol.io/packages/LSP-file-watcher-chokidar[LSP-file-watcher-chokidar] to enable file watching (`workspace/didChangeWatchedFiles`).
+
+==== Sublime Text 3:
+* Install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+* Install the link:https://packagecontrol.io/packages/LSP[LSP package].
+* From the command palette, run `LSP: Enable Language Server Globally` and select `rust-analyzer`.
+
+If it worked, you should see "rust-analyzer, Line X, Column Y" on the left side of the status bar, and after waiting a bit, functionalities like tooltips on hovering over variables should become available.
+
+If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> section on installing the language server binary.
+
+=== GNOME Builder
+
+GNOME Builder 3.37.1 and newer has native `rust-analyzer` support.
+If the LSP binary is not available, GNOME Builder can install it when opening a Rust file.
+
+
+=== Eclipse IDE
+
+Support for Rust development in the Eclipse IDE is provided by link:https://github.com/eclipse/corrosion[Eclipse Corrosion].
+If available in PATH or in some standard location, `rust-analyzer` is detected and powers editing of Rust files without further configuration.
+If `rust-analyzer` is not detected, Corrosion will prompt you for configuration of your Rust toolchain and language server with a link to the __Window > Preferences > Rust__ preference page; from here a button allows to download and configure `rust-analyzer`, but you can also reference another installation.
+You'll need to close and reopen all .rs and Cargo files, or to restart the IDE, for this change to take effect.
+
+=== Kate Text Editor
+
+Support for the language server protocol is built into Kate through the LSP plugin, which is included by default.
+It is preconfigured to use rust-analyzer for Rust sources since Kate 21.12.
+
+Earlier versions allow you to use rust-analyzer through a simple settings change.
+In the LSP Client settings of Kate, copy the content of the third tab "default parameters" to the second tab "server configuration".
+Then in the configuration replace:
+[source,json]
+----
+ "rust": {
+ "command": ["rls"],
+ "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
+ "url": "https://github.com/rust-lang/rls",
+ "highlightingModeRegex": "^Rust$"
+ },
+----
+With
+[source,json]
+----
+ "rust": {
+ "command": ["rust-analyzer"],
+ "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
+ "url": "https://github.com/rust-lang/rust-analyzer",
+ "highlightingModeRegex": "^Rust$"
+ },
+----
+Then click on apply, and restart the LSP server for your rust project.
+
+=== juCi++
+
+https://gitlab.com/cppit/jucipp[juCi++] has built-in support for the language server protocol, and since version 1.7.0 offers installation of both Rust and rust-analyzer when opening a Rust file.
+
+=== Kakoune
+
+https://kakoune.org/[Kakoune] supports LSP with the help of https://github.com/kak-lsp/kak-lsp[`kak-lsp`].
+Follow the https://github.com/kak-lsp/kak-lsp#installation[instructions] to install `kak-lsp`.
+To configure `kak-lsp`, refer to the https://github.com/kak-lsp/kak-lsp#configuring-kak-lsp[configuration section] which is basically about copying the https://github.com/kak-lsp/kak-lsp/blob/master/kak-lsp.toml[configuration file] in the right place (latest versions should use `rust-analyzer` by default).
+
+Finally, you need to configure Kakoune to talk to `kak-lsp` (see https://github.com/kak-lsp/kak-lsp#usage[Usage section]).
+A basic configuration will only get you LSP but you can also activate inlay diagnostics and auto-formatting on save.
+The following might help you get all of this.
+
+[source,txt]
+----
+eval %sh{kak-lsp --kakoune -s $kak_session} # Not needed if you load it with plug.kak.
+hook global WinSetOption filetype=rust %{
+ # Enable LSP
+ lsp-enable-window
+
+ # Auto-formatting on save
+ hook window BufWritePre .* lsp-formatting-sync
+
+ # Configure inlay hints (only on save)
+ hook window -group rust-inlay-hints BufWritePost .* rust-analyzer-inlay-hints
+ hook -once -always window WinSetOption filetype=.* %{
+ remove-hooks window rust-inlay-hints
+ }
+}
+----
+
+=== Helix
+
+https://docs.helix-editor.com/[Helix] supports LSP by default.
+However, it won't install `rust-analyzer` automatically.
+You can follow instructions for installing <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+
+== Troubleshooting
+
+Start with looking at the rust-analyzer version.
+Try **Rust Analyzer: Show RA Version** in VS Code (using **Command Palette** feature typically activated by Ctrl+Shift+P) or `rust-analyzer --version` in the command line.
+If the date is more than a week ago, it's better to update rust-analyzer version.
+
+The next thing to check would be panic messages in rust-analyzer's log.
+Log messages are printed to stderr, in VS Code you can see then in the `Output > Rust Analyzer Language Server` tab of the panel.
+To see more logs, set the `RA_LOG=info` environment variable, this can be done either by setting the environment variable manually or by using `rust-analyzer.server.extraEnv`, note that both of these approaches require the server to be restarted.
+
+To fully capture LSP messages between the editor and the server, set `"rust-analyzer.trace.server": "verbose"` config and check
+`Output > Rust Analyzer Language Server Trace`.
+
+The root cause for many "`nothing works`" problems is that rust-analyzer fails to understand the project structure.
+To debug that, first note the `rust-analyzer` section in the status bar.
+If it has an error icon and red, that's the problem (hover will have somewhat helpful error message).
+**Rust Analyzer: Status** prints dependency information for the current file.
+Finally, `RA_LOG=project_model=debug` enables verbose logs during project loading.
+
+If rust-analyzer outright crashes, try running `rust-analyzer analysis-stats /path/to/project/directory/` on the command line.
+This command type checks the whole project in batch mode bypassing LSP machinery.
+
+When filing issues, it is useful (but not necessary) to try to minimize examples.
+An ideal bug reproduction looks like this:
+
+```bash
+$ git clone https://github.com/username/repo.git && cd repo && git switch --detach commit-hash
+$ rust-analyzer --version
+rust-analyzer dd12184e4 2021-05-08 dev
+$ rust-analyzer analysis-stats .
+💀 💀 💀
+```
+
+It is especially useful when the `repo` doesn't use external crates or the standard library.
+
+If you want to go as far as to modify the source code to debug the problem, be sure to take a look at the
+https://github.com/rust-lang/rust-analyzer/tree/master/docs/dev[dev docs]!
+
+== Configuration
+
+**Source:** https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/config.rs[config.rs]
+
+The <<_installation,Installation>> section contains details on configuration for some of the editors.
+In general `rust-analyzer` is configured via LSP messages, which means that it's up to the editor to decide on the exact format and location of configuration files.
+
+Some clients, such as <<vs-code,VS Code>> or <<coc-rust-analyzer,COC plugin in Vim>> provide `rust-analyzer` specific configuration UIs. Others may require you to know a bit more about the interaction with `rust-analyzer`.
+
+For the later category, it might help to know that the initial configuration is specified as a value of the `initializationOptions` field of the https://microsoft.github.io/language-server-protocol/specifications/specification-current/#initialize[`InitializeParams` message, in the LSP protocol].
+The spec says that the field type is `any?`, but `rust-analyzer` is looking for a JSON object that is constructed using settings from the list below.
+Name of the setting, ignoring the `rust-analyzer.` prefix, is used as a path, and value of the setting becomes the JSON property value.
+
+For example, a very common configuration is to enable proc-macro support, can be achieved by sending this JSON:
+
+[source,json]
+----
+{
+ "cargo": {
+ "buildScripts": {
+ "enable": true,
+ },
+ },
+ "procMacro": {
+ "enable": true,
+ }
+}
+----
+
+Please consult your editor's documentation to learn more about how to configure https://microsoft.github.io/language-server-protocol/[LSP servers].
+
+To verify which configuration is actually used by `rust-analyzer`, set `RA_LOG` environment variable to `rust_analyzer=info` and look for config-related messages.
+Logs should show both the JSON that `rust-analyzer` sees as well as the updated config.
+
+This is the list of config options `rust-analyzer` supports:
+
+include::./generated_config.adoc[]
+
+== Non-Cargo Based Projects
+
+rust-analyzer does not require Cargo.
+However, if you use some other build system, you'll have to describe the structure of your project for rust-analyzer in the `rust-project.json` format:
+
+[source,TypeScript]
+----
+interface JsonProject {
+ /// Path to the directory with *source code* of
+ /// sysroot crates.
+ ///
+ /// It should point to the directory where std,
+ /// core, and friends can be found:
+ ///
+ /// https://github.com/rust-lang/rust/tree/master/library.
+ ///
+ /// If provided, rust-analyzer automatically adds
+ /// dependencies on sysroot crates. Conversely,
+ /// if you omit this path, you can specify sysroot
+ /// dependencies yourself and, for example, have
+ /// several different "sysroots" in one graph of
+ /// crates.
+ sysroot_src?: string;
+ /// The set of crates comprising the current
+ /// project. Must include all transitive
+ /// dependencies as well as sysroot crate (libstd,
+ /// libcore and such).
+ crates: Crate[];
+}
+
+interface Crate {
+ /// Optional crate name used for display purposes,
+ /// without affecting semantics. See the `deps`
+ /// key for semantically-significant crate names.
+ display_name?: string;
+ /// Path to the root module of the crate.
+ root_module: string;
+ /// Edition of the crate.
+ edition: "2015" | "2018" | "2021";
+ /// Dependencies
+ deps: Dep[];
+ /// Should this crate be treated as a member of
+ /// current "workspace".
+ ///
+ /// By default, inferred from the `root_module`
+ /// (members are the crates which reside inside
+ /// the directory opened in the editor).
+ ///
+ /// Set this to `false` for things like standard
+ /// library and 3rd party crates to enable
+ /// performance optimizations (rust-analyzer
+ /// assumes that non-member crates don't change).
+ is_workspace_member?: boolean;
+ /// Optionally specify the (super)set of `.rs`
+ /// files comprising this crate.
+ ///
+ /// By default, rust-analyzer assumes that only
+ /// files under `root_module.parent` can belong
+ /// to a crate. `include_dirs` are included
+ /// recursively, unless a subdirectory is in
+ /// `exclude_dirs`.
+ ///
+ /// Different crates can share the same `source`.
+ ///
+ /// If two crates share an `.rs` file in common,
+ /// they *must* have the same `source`.
+ /// rust-analyzer assumes that files from one
+ /// source can't refer to files in another source.
+ source?: {
+ include_dirs: string[],
+ exclude_dirs: string[],
+ },
+ /// The set of cfgs activated for a given crate, like
+ /// `["unix", "feature=\"foo\"", "feature=\"bar\""]`.
+ cfg: string[];
+ /// Target triple for this Crate.
+ ///
+ /// Used when running `rustc --print cfg`
+ /// to get target-specific cfgs.
+ target?: string;
+ /// Environment variables, used for
+ /// the `env!` macro
+ env: { [key: string]: string; },
+
+ /// Whether the crate is a proc-macro crate.
+ is_proc_macro: boolean;
+ /// For proc-macro crates, path to compiled
+ /// proc-macro (.so file).
+ proc_macro_dylib_path?: string;
+}
+
+interface Dep {
+ /// Index of a crate in the `crates` array.
+ crate: number,
+ /// Name as should appear in the (implicit)
+ /// `extern crate name` declaration.
+ name: string,
+}
+----
+
+This format is provisional and subject to change.
+Specifically, the `roots` setup will be different eventually.
+
+There are three ways to feed `rust-project.json` to rust-analyzer:
+
+* Place `rust-project.json` file at the root of the project, and rust-analyzer will discover it.
+* Specify `"rust-analyzer.linkedProjects": [ "path/to/rust-project.json" ]` in the settings (and make sure that your LSP client sends settings as a part of initialize request).
+* Specify `"rust-analyzer.linkedProjects": [ { "roots": [...], "crates": [...] }]` inline.
+
+Relative paths are interpreted relative to `rust-project.json` file location or (for inline JSON) relative to `rootUri`.
+
+See https://github.com/rust-analyzer/rust-project.json-example for a small example.
+
+You can set the `RA_LOG` environment variable to `rust_analyzer=info` to inspect how rust-analyzer handles config and project loading.
+
+Note that calls to `cargo check` are disabled when using `rust-project.json` by default, so compilation errors and warnings will no longer be sent to your LSP client. To enable these compilation errors you will need to specify explicitly what command rust-analyzer should run to perform the checks using the `checkOnSave.overrideCommand` configuration. As an example, the following configuration explicitly sets `cargo check` as the `checkOnSave` command.
+
+[source,json]
+----
+{ "rust-analyzer.checkOnSave.overrideCommand": ["cargo", "check", "--message-format=json"] }
+----
+
+The `checkOnSave.overrideCommand` requires the command specified to output json error messages for rust-analyzer to consume. The `--message-format=json` flag does this for `cargo check` so whichever command you use must also output errors in this format. See the <<Configuration>> section for more information.
+
+== Security
+
+At the moment, rust-analyzer assumes that all code is trusted.
+Here is a **non-exhaustive** list of ways to make rust-analyzer execute arbitrary code:
+
+* proc macros and build scripts are executed by default
+* `.cargo/config` can override `rustc` with an arbitrary executable
+* `rust-toolchain.toml` can override `rustc` with an arbitrary executable
+* VS Code plugin reads configuration from project directory, and that can be used to override paths to various executables, like `rustfmt` or `rust-analyzer` itself.
+* rust-analyzer's syntax trees library uses a lot of `unsafe` and hasn't been properly audited for memory safety.
+
+== Privacy
+
+The LSP server performs no network access in itself, but runs `cargo metadata` which will update or download the crate registry and the source code of the project dependencies.
+If enabled (the default), build scripts and procedural macros can do anything.
+
+The Code extension does not access the network.
+
+Any other editor plugins are not under the control of the `rust-analyzer` developers. For any privacy concerns, you should check with their respective developers.
+
+For `rust-analyzer` developers, `cargo xtask release` uses the GitHub API to put together the release notes.
+
+== Features
+
+include::./generated_features.adoc[]
+
+== Assists (Code Actions)
+
+Assists, or code actions, are small local refactorings, available in a particular context.
+They are usually triggered by a shortcut or by clicking a light bulb icon in the editor.
+Cursor position or selection is signified by `┃` character.
+
+include::./generated_assists.adoc[]
+
+== Diagnostics
+
+While most errors and warnings provided by rust-analyzer come from the `cargo check` integration, there's a growing number of diagnostics implemented using rust-analyzer's own analysis.
+Some of these diagnostics don't respect `\#[allow]` or `\#[deny]` attributes yet, but can be turned off using the `rust-analyzer.diagnostics.enable`, `rust-analyzer.diagnostics.experimental.enable` or `rust-analyzer.diagnostics.disabled` settings.
+
+include::./generated_diagnostic.adoc[]
+
+== Editor Features
+=== VS Code
+
+==== Color configurations
+
+It is possible to change the foreground/background color and font family/size of inlay hints.
+Just add this to your `settings.json`:
+
+[source,jsonc]
+----
+{
+ "editor.inlayHints.fontFamily": "Courier New",
+ "editor.inlayHints.fontSize": 11,
+
+ "workbench.colorCustomizations": {
+ // Name of the theme you are currently using
+ "[Default Dark+]": {
+ "editorInlayHint.foreground": "#868686f0",
+ "editorInlayHint.background": "#3d3d3d48",
+
+ // Overrides for specific kinds of inlay hints
+ "editorInlayHint.typeForeground": "#fdb6fdf0",
+ "editorInlayHint.parameterForeground": "#fdb6fdf0",
+ }
+ }
+}
+----
+
+==== Semantic style customizations
+
+You can customize the look of different semantic elements in the source code.
+For example, mutable bindings are underlined by default and you can override this behavior by adding the following section to your `settings.json`:
+
+[source,jsonc]
+----
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "*.mutable": {
+ "fontStyle": "", // underline is the default
+ },
+ }
+ },
+}
+----
+
+Most themes doesn't support styling unsafe operations differently yet. You can fix this by adding overrides for the rules `operator.unsafe`, `function.unsafe`, and `method.unsafe`:
+
+[source,jsonc]
+----
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "operator.unsafe": "#ff6600",
+ "function.unsafe": "#ff6600",
+ "method.unsafe": "#ff6600"
+ }
+ },
+}
+----
+
+In addition to the top-level rules you can specify overrides for specific themes. For example, if you wanted to use a darker text color on a specific light theme, you might write:
+
+[source,jsonc]
+----
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "operator.unsafe": "#ff6600"
+ },
+ "[Ayu Light]": {
+ "rules": {
+ "operator.unsafe": "#572300"
+ }
+ }
+ },
+}
+----
+
+Make sure you include the brackets around the theme name. For example, use `"[Ayu Light]"` to customize the theme Ayu Light.
+
+==== Special `when` clause context for keybindings.
+You may use `inRustProject` context to configure keybindings for rust projects only.
+For example:
+
+[source,json]
+----
+{
+ "key": "ctrl+alt+d",
+ "command": "rust-analyzer.openDocs",
+ "when": "inRustProject"
+}
+----
+More about `when` clause contexts https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts[here].
+
+==== Setting runnable environment variables
+You can use "rust-analyzer.runnableEnv" setting to define runnable environment-specific substitution variables.
+The simplest way for all runnables in a bunch:
+```jsonc
+"rust-analyzer.runnableEnv": {
+ "RUN_SLOW_TESTS": "1"
+}
+```
+
+Or it is possible to specify vars more granularly:
+```jsonc
+"rust-analyzer.runnableEnv": [
+ {
+ // "mask": null, // null mask means that this rule will be applied for all runnables
+ env: {
+ "APP_ID": "1",
+ "APP_DATA": "asdf"
+ }
+ },
+ {
+ "mask": "test_name",
+ "env": {
+ "APP_ID": "2", // overwrites only APP_ID
+ }
+ }
+]
+```
+
+You can use any valid regular expression as a mask.
+Also note that a full runnable name is something like *run bin_or_example_name*, *test some::mod::test_name* or *test-mod some::mod*, so it is possible to distinguish binaries, single tests, and test modules with this masks: `"^run"`, `"^test "` (the trailing space matters!), and `"^test-mod"` respectively.
+
+==== Compiler feedback from external commands
+
+Instead of relying on the built-in `cargo check`, you can configure Code to run a command in the background and use the `$rustc-watch` problem matcher to generate inline error markers from its output.
+
+To do this you need to create a new https://code.visualstudio.com/docs/editor/tasks[VS Code Task] and set `rust-analyzer.checkOnSave.enable: false` in preferences.
+
+For example, if you want to run https://crates.io/crates/cargo-watch[`cargo watch`] instead, you might add the following to `.vscode/tasks.json`:
+
+```json
+{
+ "label": "Watch",
+ "group": "build",
+ "type": "shell",
+ "command": "cargo watch",
+ "problemMatcher": "$rustc-watch",
+ "isBackground": true
+}
+```
diff --git a/src/tools/rust-analyzer/lib/README.md b/src/tools/rust-analyzer/lib/README.md
new file mode 100644
index 000000000..6b2eeac2c
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/README.md
@@ -0,0 +1,2 @@
+Crates in this directory are published to crates.io and obey semver.
+They *could* live in a separate repo, but we want to experiment with a monorepo setup.
diff --git a/src/tools/rust-analyzer/lib/la-arena/Cargo.toml b/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
new file mode 100644
index 000000000..ec5ba8ba0
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
@@ -0,0 +1,10 @@
+[package]
+name = "la-arena"
+version = "0.3.0"
+description = "Simple index-based arena without deletion."
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/la-arena"
+documentation = "https://docs.rs/la-arena"
+categories = ["data-structures", "memory-management", "rust-patterns"]
+edition = "2021"
+rust-version = "1.56"
diff --git a/src/tools/rust-analyzer/lib/la-arena/src/lib.rs b/src/tools/rust-analyzer/lib/la-arena/src/lib.rs
new file mode 100644
index 000000000..dadee43b1
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/la-arena/src/lib.rs
@@ -0,0 +1,366 @@
+//! Yet another index-based arena.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(missing_docs)]
+
+use std::{
+ fmt,
+ hash::{Hash, Hasher},
+ iter::FromIterator,
+ marker::PhantomData,
+ ops::{Index, IndexMut, Range, RangeInclusive},
+};
+
+mod map;
+pub use map::ArenaMap;
+
+/// The raw index of a value in an arena.
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct RawIdx(u32);
+
+impl From<RawIdx> for u32 {
+ fn from(raw: RawIdx) -> u32 {
+ raw.0
+ }
+}
+
+impl From<u32> for RawIdx {
+ fn from(idx: u32) -> RawIdx {
+ RawIdx(idx)
+ }
+}
+
+impl fmt::Debug for RawIdx {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl fmt::Display for RawIdx {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// The index of a value allocated in an arena that holds `T`s.
+pub struct Idx<T> {
+ raw: RawIdx,
+ _ty: PhantomData<fn() -> T>,
+}
+
+impl<T> Clone for Idx<T> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+impl<T> Copy for Idx<T> {}
+
+impl<T> PartialEq for Idx<T> {
+ fn eq(&self, other: &Idx<T>) -> bool {
+ self.raw == other.raw
+ }
+}
+impl<T> Eq for Idx<T> {}
+
+impl<T> Hash for Idx<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.raw.hash(state);
+ }
+}
+
+impl<T> fmt::Debug for Idx<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut type_name = std::any::type_name::<T>();
+ if let Some(idx) = type_name.rfind(':') {
+ type_name = &type_name[idx + 1..];
+ }
+ write!(f, "Idx::<{}>({})", type_name, self.raw)
+ }
+}
+
+impl<T> Idx<T> {
+ /// Creates a new index from a [`RawIdx`].
+ pub fn from_raw(raw: RawIdx) -> Self {
+ Idx { raw, _ty: PhantomData }
+ }
+
+ /// Converts this index into the underlying [`RawIdx`].
+ pub fn into_raw(self) -> RawIdx {
+ self.raw
+ }
+}
+
+/// A range of densely allocated arena values.
+pub struct IdxRange<T> {
+ range: Range<u32>,
+ _p: PhantomData<T>,
+}
+
+impl<T> IdxRange<T> {
+ /// Creates a new index range
+ /// inclusive of the start value and exclusive of the end value.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let a = arena.alloc("a");
+ /// let b = arena.alloc("b");
+ /// let c = arena.alloc("c");
+ /// let d = arena.alloc("d");
+ ///
+ /// let range = la_arena::IdxRange::new(b..d);
+ /// assert_eq!(&arena[range], &["b", "c"]);
+ /// ```
+ pub fn new(range: Range<Idx<T>>) -> Self {
+ Self { range: range.start.into_raw().into()..range.end.into_raw().into(), _p: PhantomData }
+ }
+
+ /// Creates a new index range
+ /// inclusive of the start value and end value.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let foo = arena.alloc("foo");
+ /// let bar = arena.alloc("bar");
+ /// let baz = arena.alloc("baz");
+ ///
+ /// let range = la_arena::IdxRange::new_inclusive(foo..=baz);
+ /// assert_eq!(&arena[range], &["foo", "bar", "baz"]);
+ ///
+ /// let range = la_arena::IdxRange::new_inclusive(foo..=foo);
+ /// assert_eq!(&arena[range], &["foo"]);
+ /// ```
+ pub fn new_inclusive(range: RangeInclusive<Idx<T>>) -> Self {
+ Self {
+ range: u32::from(range.start().into_raw())..u32::from(range.end().into_raw()) + 1,
+ _p: PhantomData,
+ }
+ }
+
+ /// Returns whether the index range is empty.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let one = arena.alloc(1);
+ /// let two = arena.alloc(2);
+ ///
+ /// assert!(la_arena::IdxRange::new(one..one).is_empty());
+ /// ```
+ pub fn is_empty(&self) -> bool {
+ self.range.is_empty()
+ }
+}
+
+impl<T> Iterator for IdxRange<T> {
+ type Item = Idx<T>;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.range.next().map(|raw| Idx::from_raw(raw.into()))
+ }
+}
+
+impl<T> DoubleEndedIterator for IdxRange<T> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.range.next_back().map(|raw| Idx::from_raw(raw.into()))
+ }
+}
+
+impl<T> fmt::Debug for IdxRange<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple(&format!("IdxRange::<{}>", std::any::type_name::<T>()))
+ .field(&self.range)
+ .finish()
+ }
+}
+
+impl<T> Clone for IdxRange<T> {
+ fn clone(&self) -> Self {
+ Self { range: self.range.clone(), _p: PhantomData }
+ }
+}
+
+impl<T> PartialEq for IdxRange<T> {
+ fn eq(&self, other: &Self) -> bool {
+ self.range == other.range
+ }
+}
+
+impl<T> Eq for IdxRange<T> {}
+
+/// Yet another index-based arena.
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct Arena<T> {
+ data: Vec<T>,
+}
+
+impl<T: fmt::Debug> fmt::Debug for Arena<T> {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt.debug_struct("Arena").field("len", &self.len()).field("data", &self.data).finish()
+ }
+}
+
+impl<T> Arena<T> {
+ /// Creates a new empty arena.
+ ///
+ /// ```
+ /// let arena: la_arena::Arena<i32> = la_arena::Arena::new();
+ /// assert!(arena.is_empty());
+ /// ```
+ pub const fn new() -> Arena<T> {
+ Arena { data: Vec::new() }
+ }
+
+ /// Empties the arena, removing all contained values.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ ///
+ /// arena.alloc(1);
+ /// arena.alloc(2);
+ /// arena.alloc(3);
+ /// assert_eq!(arena.len(), 3);
+ ///
+ /// arena.clear();
+ /// assert!(arena.is_empty());
+ /// ```
+ pub fn clear(&mut self) {
+ self.data.clear();
+ }
+
+ /// Returns the length of the arena.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// assert_eq!(arena.len(), 0);
+ ///
+ /// arena.alloc("foo");
+ /// assert_eq!(arena.len(), 1);
+ ///
+ /// arena.alloc("bar");
+ /// assert_eq!(arena.len(), 2);
+ ///
+ /// arena.alloc("baz");
+ /// assert_eq!(arena.len(), 3);
+ /// ```
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ /// Returns whether the arena contains no elements.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// assert!(arena.is_empty());
+ ///
+ /// arena.alloc(0.5);
+ /// assert!(!arena.is_empty());
+ /// ```
+ pub fn is_empty(&self) -> bool {
+ self.data.is_empty()
+ }
+
+ /// Allocates a new value on the arena, returning the value’s index.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let idx = arena.alloc(50);
+ ///
+ /// assert_eq!(arena[idx], 50);
+ /// ```
+ pub fn alloc(&mut self, value: T) -> Idx<T> {
+ let idx = self.next_idx();
+ self.data.push(value);
+ idx
+ }
+
+ /// Returns an iterator over the arena’s elements.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let idx1 = arena.alloc(20);
+ /// let idx2 = arena.alloc(40);
+ /// let idx3 = arena.alloc(60);
+ ///
+ /// let mut iterator = arena.iter();
+ /// assert_eq!(iterator.next(), Some((idx1, &20)));
+ /// assert_eq!(iterator.next(), Some((idx2, &40)));
+ /// assert_eq!(iterator.next(), Some((idx3, &60)));
+ /// ```
+ pub fn iter(
+ &self,
+ ) -> impl Iterator<Item = (Idx<T>, &T)> + ExactSizeIterator + DoubleEndedIterator {
+ self.data.iter().enumerate().map(|(idx, value)| (Idx::from_raw(RawIdx(idx as u32)), value))
+ }
+
+ /// Returns an iterator over the arena’s mutable elements.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let idx1 = arena.alloc(20);
+ ///
+ /// assert_eq!(arena[idx1], 20);
+ ///
+ /// let mut iterator = arena.iter_mut();
+ /// *iterator.next().unwrap().1 = 10;
+ /// drop(iterator);
+ ///
+ /// assert_eq!(arena[idx1], 10);
+ /// ```
+ pub fn iter_mut(
+ &mut self,
+ ) -> impl Iterator<Item = (Idx<T>, &mut T)> + ExactSizeIterator + DoubleEndedIterator {
+ self.data
+ .iter_mut()
+ .enumerate()
+ .map(|(idx, value)| (Idx::from_raw(RawIdx(idx as u32)), value))
+ }
+
+ /// Reallocates the arena to make it take up as little space as possible.
+ pub fn shrink_to_fit(&mut self) {
+ self.data.shrink_to_fit();
+ }
+
+ /// Returns the index of the next value allocated on the arena.
+ ///
+ /// This method should remain private to make creating invalid `Idx`s harder.
+ fn next_idx(&self) -> Idx<T> {
+ Idx::from_raw(RawIdx(self.data.len() as u32))
+ }
+}
+
+impl<T> Default for Arena<T> {
+ fn default() -> Arena<T> {
+ Arena { data: Vec::new() }
+ }
+}
+
+impl<T> Index<Idx<T>> for Arena<T> {
+ type Output = T;
+ fn index(&self, idx: Idx<T>) -> &T {
+ let idx = idx.into_raw().0 as usize;
+ &self.data[idx]
+ }
+}
+
+impl<T> IndexMut<Idx<T>> for Arena<T> {
+ fn index_mut(&mut self, idx: Idx<T>) -> &mut T {
+ let idx = idx.into_raw().0 as usize;
+ &mut self.data[idx]
+ }
+}
+
+impl<T> Index<IdxRange<T>> for Arena<T> {
+ type Output = [T];
+ fn index(&self, range: IdxRange<T>) -> &[T] {
+ let start = range.range.start as usize;
+ let end = range.range.end as usize;
+ &self.data[start..end]
+ }
+}
+
+impl<T> FromIterator<T> for Arena<T> {
+ fn from_iter<I>(iter: I) -> Self
+ where
+ I: IntoIterator<Item = T>,
+ {
+ Arena { data: Vec::from_iter(iter) }
+ }
+}
diff --git a/src/tools/rust-analyzer/lib/la-arena/src/map.rs b/src/tools/rust-analyzer/lib/la-arena/src/map.rs
new file mode 100644
index 000000000..d27f086d3
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/la-arena/src/map.rs
@@ -0,0 +1,75 @@
+use std::marker::PhantomData;
+
+use crate::Idx;
+
+/// A map from arena indexes to some other type.
+/// Space requirement is O(highest index).
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct ArenaMap<IDX, V> {
+ v: Vec<Option<V>>,
+ _ty: PhantomData<IDX>,
+}
+
+impl<T, V> ArenaMap<Idx<T>, V> {
+ /// Inserts a value associated with a given arena index into the map.
+ pub fn insert(&mut self, idx: Idx<T>, t: V) {
+ let idx = Self::to_idx(idx);
+
+ self.v.resize_with((idx + 1).max(self.v.len()), || None);
+ self.v[idx] = Some(t);
+ }
+
+ /// Returns a reference to the value associated with the provided index
+ /// if it is present.
+ pub fn get(&self, idx: Idx<T>) -> Option<&V> {
+ self.v.get(Self::to_idx(idx)).and_then(|it| it.as_ref())
+ }
+
+ /// Returns a mutable reference to the value associated with the provided index
+ /// if it is present.
+ pub fn get_mut(&mut self, idx: Idx<T>) -> Option<&mut V> {
+ self.v.get_mut(Self::to_idx(idx)).and_then(|it| it.as_mut())
+ }
+
+ /// Returns an iterator over the values in the map.
+ pub fn values(&self) -> impl Iterator<Item = &V> {
+ self.v.iter().filter_map(|o| o.as_ref())
+ }
+
+ /// Returns an iterator over mutable references to the values in the map.
+ pub fn values_mut(&mut self) -> impl Iterator<Item = &mut V> {
+ self.v.iter_mut().filter_map(|o| o.as_mut())
+ }
+
+ /// Returns an iterator over the arena indexes and values in the map.
+ pub fn iter(&self) -> impl Iterator<Item = (Idx<T>, &V)> {
+ self.v.iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?)))
+ }
+
+ fn to_idx(idx: Idx<T>) -> usize {
+ u32::from(idx.into_raw()) as usize
+ }
+
+ fn from_idx(idx: usize) -> Idx<T> {
+ Idx::from_raw((idx as u32).into())
+ }
+}
+
+impl<T, V> std::ops::Index<Idx<V>> for ArenaMap<Idx<V>, T> {
+ type Output = T;
+ fn index(&self, idx: Idx<V>) -> &T {
+ self.v[Self::to_idx(idx)].as_ref().unwrap()
+ }
+}
+
+impl<T, V> std::ops::IndexMut<Idx<V>> for ArenaMap<Idx<V>, T> {
+ fn index_mut(&mut self, idx: Idx<V>) -> &mut T {
+ self.v[Self::to_idx(idx)].as_mut().unwrap()
+ }
+}
+
+impl<T, V> Default for ArenaMap<Idx<V>, T> {
+ fn default() -> Self {
+ ArenaMap { v: Vec::new(), _ty: PhantomData }
+ }
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
new file mode 100644
index 000000000..204d120d0
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "lsp-server"
+version = "0.6.0"
+description = "Generic LSP server scaffold."
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server"
+edition = "2021"
+
+[dependencies]
+log = "0.4.17"
+serde_json = "1.0.81"
+serde = { version = "1.0.137", features = ["derive"] }
+crossbeam-channel = "0.5.5"
+
+[dev-dependencies]
+lsp-types = "0.93.0"
diff --git a/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs b/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs
new file mode 100644
index 000000000..ca7ad0b53
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs
@@ -0,0 +1,121 @@
+//! A minimal example LSP server that can only respond to the `gotoDefinition` request. To use
+//! this example, execute it and then send an `initialize` request.
+//!
+//! ```no_run
+//! Content-Length: 85
+//!
+//! {"jsonrpc": "2.0", "method": "initialize", "id": 1, "params": {"capabilities": {}}}
+//! ```
+//!
+//! This will respond with a server response. Then send it a `initialized` notification which will
+//! have no response.
+//!
+//! ```no_run
+//! Content-Length: 59
+//!
+//! {"jsonrpc": "2.0", "method": "initialized", "params": {}}
+//! ```
+//!
+//! Once these two are sent, then we enter the main loop of the server. The only request this
+//! example can handle is `gotoDefinition`:
+//!
+//! ```no_run
+//! Content-Length: 159
+//!
+//! {"jsonrpc": "2.0", "method": "textDocument/definition", "id": 2, "params": {"textDocument": {"uri": "file://temp"}, "position": {"line": 1, "character": 1}}}
+//! ```
+//!
+//! To finish up without errors, send a shutdown request:
+//!
+//! ```no_run
+//! Content-Length: 67
+//!
+//! {"jsonrpc": "2.0", "method": "shutdown", "id": 3, "params": null}
+//! ```
+//!
+//! The server will exit the main loop and finally we send a `shutdown` notification to stop
+//! the server.
+//!
+//! ```
+//! Content-Length: 54
+//!
+//! {"jsonrpc": "2.0", "method": "exit", "params": null}
+//! ```
+use std::error::Error;
+
+use lsp_types::OneOf;
+use lsp_types::{
+ request::GotoDefinition, GotoDefinitionResponse, InitializeParams, ServerCapabilities,
+};
+
+use lsp_server::{Connection, ExtractError, Message, Request, RequestId, Response};
+
+fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
+ // Note that we must have our logging only write out to stderr.
+ eprintln!("starting generic LSP server");
+
+ // Create the transport. Includes the stdio (stdin and stdout) versions but this could
+ // also be implemented to use sockets or HTTP.
+ let (connection, io_threads) = Connection::stdio();
+
+ // Run the server and wait for the two threads to end (typically by trigger LSP Exit event).
+ let server_capabilities = serde_json::to_value(&ServerCapabilities {
+ definition_provider: Some(OneOf::Left(true)),
+ ..Default::default()
+ })
+ .unwrap();
+ let initialization_params = connection.initialize(server_capabilities)?;
+ main_loop(connection, initialization_params)?;
+ io_threads.join()?;
+
+ // Shut down gracefully.
+ eprintln!("shutting down server");
+ Ok(())
+}
+
+fn main_loop(
+ connection: Connection,
+ params: serde_json::Value,
+) -> Result<(), Box<dyn Error + Sync + Send>> {
+ let _params: InitializeParams = serde_json::from_value(params).unwrap();
+ eprintln!("starting example main loop");
+ for msg in &connection.receiver {
+ eprintln!("got msg: {:?}", msg);
+ match msg {
+ Message::Request(req) => {
+ if connection.handle_shutdown(&req)? {
+ return Ok(());
+ }
+ eprintln!("got request: {:?}", req);
+ match cast::<GotoDefinition>(req) {
+ Ok((id, params)) => {
+ eprintln!("got gotoDefinition request #{}: {:?}", id, params);
+ let result = Some(GotoDefinitionResponse::Array(Vec::new()));
+ let result = serde_json::to_value(&result).unwrap();
+ let resp = Response { id, result: Some(result), error: None };
+ connection.sender.send(Message::Response(resp))?;
+ continue;
+ }
+ Err(err @ ExtractError::JsonError { .. }) => panic!("{:?}", err),
+ Err(ExtractError::MethodMismatch(req)) => req,
+ };
+ // ...
+ }
+ Message::Response(resp) => {
+ eprintln!("got response: {:?}", resp);
+ }
+ Message::Notification(not) => {
+ eprintln!("got notification: {:?}", not);
+ }
+ }
+ }
+ Ok(())
+}
+
+fn cast<R>(req: Request) -> Result<(RequestId, R::Params), ExtractError<Request>>
+where
+ R: lsp_types::request::Request,
+ R::Params: serde::de::DeserializeOwned,
+{
+ req.extract(R::METHOD)
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/error.rs b/src/tools/rust-analyzer/lib/lsp-server/src/error.rs
new file mode 100644
index 000000000..4c934d9ec
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/error.rs
@@ -0,0 +1,50 @@
+use std::fmt;
+
+use crate::{Notification, Request};
+
+#[derive(Debug, Clone)]
+pub struct ProtocolError(pub(crate) String);
+
+impl std::error::Error for ProtocolError {}
+
+impl fmt::Display for ProtocolError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.0, f)
+ }
+}
+
+#[derive(Debug)]
+pub enum ExtractError<T> {
+ /// The extracted message was of a different method than expected.
+ MethodMismatch(T),
+ /// Failed to deserialize the message.
+ JsonError { method: String, error: serde_json::Error },
+}
+
+impl std::error::Error for ExtractError<Request> {}
+impl fmt::Display for ExtractError<Request> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ExtractError::MethodMismatch(req) => {
+ write!(f, "Method mismatch for request '{}'", req.method)
+ }
+ ExtractError::JsonError { method, error } => {
+ write!(f, "Invalid request\nMethod: {method}\n error: {error}",)
+ }
+ }
+ }
+}
+
+impl std::error::Error for ExtractError<Notification> {}
+impl fmt::Display for ExtractError<Notification> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ExtractError::MethodMismatch(req) => {
+ write!(f, "Method mismatch for notification '{}'", req.method)
+ }
+ ExtractError::JsonError { method, error } => {
+ write!(f, "Invalid notification\nMethod: {method}\n error: {error}")
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs b/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs
new file mode 100644
index 000000000..d567077d4
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs
@@ -0,0 +1,232 @@
+//! A language server scaffold, exposing a synchronous crossbeam-channel based API.
+//! This crate handles protocol handshaking and parsing messages, while you
+//! control the message dispatch loop yourself.
+//!
+//! Run with `RUST_LOG=lsp_server=debug` to see all the messages.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod msg;
+mod stdio;
+mod error;
+mod socket;
+mod req_queue;
+
+use std::{
+ io,
+ net::{TcpListener, TcpStream, ToSocketAddrs},
+};
+
+use crossbeam_channel::{Receiver, Sender};
+
+pub use crate::{
+ error::{ExtractError, ProtocolError},
+ msg::{ErrorCode, Message, Notification, Request, RequestId, Response, ResponseError},
+ req_queue::{Incoming, Outgoing, ReqQueue},
+ stdio::IoThreads,
+};
+
+/// Connection is just a pair of channels of LSP messages.
+pub struct Connection {
+ pub sender: Sender<Message>,
+ pub receiver: Receiver<Message>,
+}
+
+impl Connection {
+ /// Create connection over standard in/standard out.
+ ///
+ /// Use this to create a real language server.
+ pub fn stdio() -> (Connection, IoThreads) {
+ let (sender, receiver, io_threads) = stdio::stdio_transport();
+ (Connection { sender, receiver }, io_threads)
+ }
+
+ /// Open a connection over tcp.
+ /// This call blocks until a connection is established.
+ ///
+ /// Use this to create a real language server.
+ pub fn connect<A: ToSocketAddrs>(addr: A) -> io::Result<(Connection, IoThreads)> {
+ let stream = TcpStream::connect(addr)?;
+ let (sender, receiver, io_threads) = socket::socket_transport(stream);
+ Ok((Connection { sender, receiver }, io_threads))
+ }
+
+ /// Listen for a connection over tcp.
+ /// This call blocks until a connection is established.
+ ///
+ /// Use this to create a real language server.
+ pub fn listen<A: ToSocketAddrs>(addr: A) -> io::Result<(Connection, IoThreads)> {
+ let listener = TcpListener::bind(addr)?;
+ let (stream, _) = listener.accept()?;
+ let (sender, receiver, io_threads) = socket::socket_transport(stream);
+ Ok((Connection { sender, receiver }, io_threads))
+ }
+
+ /// Creates a pair of connected connections.
+ ///
+ /// Use this for testing.
+ pub fn memory() -> (Connection, Connection) {
+ let (s1, r1) = crossbeam_channel::unbounded();
+ let (s2, r2) = crossbeam_channel::unbounded();
+ (Connection { sender: s1, receiver: r2 }, Connection { sender: s2, receiver: r1 })
+ }
+
+ /// Starts the initialization process by waiting for an initialize
+ /// request from the client. Use this for more advanced customization than
+ /// `initialize` can provide.
+ ///
+ /// Returns the request id and serialized `InitializeParams` from the client.
+ ///
+ /// # Example
+ ///
+ /// ```no_run
+ /// use std::error::Error;
+ /// use lsp_types::{ClientCapabilities, InitializeParams, ServerCapabilities};
+ ///
+ /// use lsp_server::{Connection, Message, Request, RequestId, Response};
+ ///
+ /// fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
+ /// // Create the transport. Includes the stdio (stdin and stdout) versions but this could
+ /// // also be implemented to use sockets or HTTP.
+ /// let (connection, io_threads) = Connection::stdio();
+ ///
+ /// // Run the server
+ /// let (id, params) = connection.initialize_start()?;
+ ///
+ /// let init_params: InitializeParams = serde_json::from_value(params).unwrap();
+ /// let client_capabilities: ClientCapabilities = init_params.capabilities;
+ /// let server_capabilities = ServerCapabilities::default();
+ ///
+ /// let initialize_data = serde_json::json!({
+ /// "capabilities": server_capabilities,
+ /// "serverInfo": {
+ /// "name": "lsp-server-test",
+ /// "version": "0.1"
+ /// }
+ /// });
+ ///
+ /// connection.initialize_finish(id, initialize_data)?;
+ ///
+ /// // ... Run main loop ...
+ ///
+ /// Ok(())
+ /// }
+ /// ```
+ pub fn initialize_start(&self) -> Result<(RequestId, serde_json::Value), ProtocolError> {
+ loop {
+ match self.receiver.recv() {
+ Ok(Message::Request(req)) if req.is_initialize() => {
+ return Ok((req.id, req.params))
+ }
+ // Respond to non-initialize requests with ServerNotInitialized
+ Ok(Message::Request(req)) => {
+ let resp = Response::new_err(
+ req.id.clone(),
+ ErrorCode::ServerNotInitialized as i32,
+ format!("expected initialize request, got {:?}", req),
+ );
+ self.sender.send(resp.into()).unwrap();
+ }
+ Ok(msg) => {
+ return Err(ProtocolError(format!(
+ "expected initialize request, got {:?}",
+ msg
+ )))
+ }
+ Err(e) => {
+ return Err(ProtocolError(format!(
+ "expected initialize request, got error: {}",
+ e
+ )))
+ }
+ };
+ }
+ }
+
+ /// Finishes the initialization process by sending an `InitializeResult` to the client
+ pub fn initialize_finish(
+ &self,
+ initialize_id: RequestId,
+ initialize_result: serde_json::Value,
+ ) -> Result<(), ProtocolError> {
+ let resp = Response::new_ok(initialize_id, initialize_result);
+ self.sender.send(resp.into()).unwrap();
+ match &self.receiver.recv() {
+ Ok(Message::Notification(n)) if n.is_initialized() => (),
+ Ok(msg) => {
+ return Err(ProtocolError(format!(
+ "expected Message::Notification, got: {:?}",
+ msg,
+ )))
+ }
+ Err(e) => {
+ return Err(ProtocolError(format!(
+ "expected initialized notification, got error: {}",
+ e,
+ )))
+ }
+ }
+ Ok(())
+ }
+
+ /// Initialize the connection. Sends the server capabilities
+ /// to the client and returns the serialized client capabilities
+ /// on success. If more fine-grained initialization is required use
+ /// `initialize_start`/`initialize_finish`.
+ ///
+ /// # Example
+ ///
+ /// ```no_run
+ /// use std::error::Error;
+ /// use lsp_types::ServerCapabilities;
+ ///
+ /// use lsp_server::{Connection, Message, Request, RequestId, Response};
+ ///
+ /// fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
+ /// // Create the transport. Includes the stdio (stdin and stdout) versions but this could
+ /// // also be implemented to use sockets or HTTP.
+ /// let (connection, io_threads) = Connection::stdio();
+ ///
+ /// // Run the server
+ /// let server_capabilities = serde_json::to_value(&ServerCapabilities::default()).unwrap();
+ /// let initialization_params = connection.initialize(server_capabilities)?;
+ ///
+ /// // ... Run main loop ...
+ ///
+ /// Ok(())
+ /// }
+ /// ```
+ pub fn initialize(
+ &self,
+ server_capabilities: serde_json::Value,
+ ) -> Result<serde_json::Value, ProtocolError> {
+ let (id, params) = self.initialize_start()?;
+
+ let initialize_data = serde_json::json!({
+ "capabilities": server_capabilities,
+ });
+
+ self.initialize_finish(id, initialize_data)?;
+
+ Ok(params)
+ }
+
+ /// If `req` is `Shutdown`, respond to it and return `true`, otherwise return `false`
+ pub fn handle_shutdown(&self, req: &Request) -> Result<bool, ProtocolError> {
+ if !req.is_shutdown() {
+ return Ok(false);
+ }
+ let resp = Response::new_ok(req.id.clone(), ());
+ let _ = self.sender.send(resp.into());
+ match &self.receiver.recv_timeout(std::time::Duration::from_secs(30)) {
+ Ok(Message::Notification(n)) if n.is_exit() => (),
+ Ok(msg) => {
+ return Err(ProtocolError(format!("unexpected message during shutdown: {:?}", msg)))
+ }
+ Err(e) => {
+ return Err(ProtocolError(format!("unexpected error during shutdown: {}", e)))
+ }
+ }
+ Ok(true)
+ }
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs
new file mode 100644
index 000000000..97e5bd35c
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs
@@ -0,0 +1,343 @@
+use std::{
+ fmt,
+ io::{self, BufRead, Write},
+};
+
+use serde::{de::DeserializeOwned, Deserialize, Serialize};
+
+use crate::error::ExtractError;
+
+#[derive(Serialize, Deserialize, Debug, Clone)]
+#[serde(untagged)]
+pub enum Message {
+ Request(Request),
+ Response(Response),
+ Notification(Notification),
+}
+
+impl From<Request> for Message {
+ fn from(request: Request) -> Message {
+ Message::Request(request)
+ }
+}
+
+impl From<Response> for Message {
+ fn from(response: Response) -> Message {
+ Message::Response(response)
+ }
+}
+
+impl From<Notification> for Message {
+ fn from(notification: Notification) -> Message {
+ Message::Notification(notification)
+ }
+}
+
+#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[serde(transparent)]
+pub struct RequestId(IdRepr);
+
+#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[serde(untagged)]
+enum IdRepr {
+ I32(i32),
+ String(String),
+}
+
+impl From<i32> for RequestId {
+ fn from(id: i32) -> RequestId {
+ RequestId(IdRepr::I32(id))
+ }
+}
+
+impl From<String> for RequestId {
+ fn from(id: String) -> RequestId {
+ RequestId(IdRepr::String(id))
+ }
+}
+
+impl fmt::Display for RequestId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.0 {
+ IdRepr::I32(it) => fmt::Display::fmt(it, f),
+ // Use debug here, to make it clear that `92` and `"92"` are
+ // different, and to reduce WTF factor if the sever uses `" "` as an
+ // ID.
+ IdRepr::String(it) => fmt::Debug::fmt(it, f),
+ }
+ }
+}
+
+#[derive(Debug, Serialize, Deserialize, Clone)]
+pub struct Request {
+ pub id: RequestId,
+ pub method: String,
+ #[serde(default = "serde_json::Value::default")]
+ #[serde(skip_serializing_if = "serde_json::Value::is_null")]
+ pub params: serde_json::Value,
+}
+
+#[derive(Debug, Serialize, Deserialize, Clone)]
+pub struct Response {
+ // JSON RPC allows this to be null if it was impossible
+ // to decode the request's id. Ignore this special case
+ // and just die horribly.
+ pub id: RequestId,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub result: Option<serde_json::Value>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub error: Option<ResponseError>,
+}
+
+#[derive(Debug, Serialize, Deserialize, Clone)]
+pub struct ResponseError {
+ pub code: i32,
+ pub message: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<serde_json::Value>,
+}
+
+#[derive(Clone, Copy, Debug)]
+#[allow(unused)]
+pub enum ErrorCode {
+ // Defined by JSON RPC:
+ ParseError = -32700,
+ InvalidRequest = -32600,
+ MethodNotFound = -32601,
+ InvalidParams = -32602,
+ InternalError = -32603,
+ ServerErrorStart = -32099,
+ ServerErrorEnd = -32000,
+
+ /// Error code indicating that a server received a notification or
+ /// request before the server has received the `initialize` request.
+ ServerNotInitialized = -32002,
+ UnknownErrorCode = -32001,
+
+ // Defined by the protocol:
+ /// The client has canceled a request and a server has detected
+ /// the cancel.
+ RequestCanceled = -32800,
+
+ /// The server detected that the content of a document got
+ /// modified outside normal conditions. A server should
+ /// NOT send this error code if it detects a content change
+ /// in it unprocessed messages. The result even computed
+ /// on an older state might still be useful for the client.
+ ///
+ /// If a client decides that a result is not of any use anymore
+ /// the client should cancel the request.
+ ContentModified = -32801,
+
+ /// The server cancelled the request. This error code should
+ /// only be used for requests that explicitly support being
+ /// server cancellable.
+ ///
+ /// @since 3.17.0
+ ServerCancelled = -32802,
+}
+
+#[derive(Debug, Serialize, Deserialize, Clone)]
+pub struct Notification {
+ pub method: String,
+ #[serde(default = "serde_json::Value::default")]
+ #[serde(skip_serializing_if = "serde_json::Value::is_null")]
+ pub params: serde_json::Value,
+}
+
+impl Message {
+ pub fn read(r: &mut impl BufRead) -> io::Result<Option<Message>> {
+ Message::_read(r)
+ }
+ fn _read(r: &mut dyn BufRead) -> io::Result<Option<Message>> {
+ let text = match read_msg_text(r)? {
+ None => return Ok(None),
+ Some(text) => text,
+ };
+ let msg = serde_json::from_str(&text)?;
+ Ok(Some(msg))
+ }
+ pub fn write(self, w: &mut impl Write) -> io::Result<()> {
+ self._write(w)
+ }
+ fn _write(self, w: &mut dyn Write) -> io::Result<()> {
+ #[derive(Serialize)]
+ struct JsonRpc {
+ jsonrpc: &'static str,
+ #[serde(flatten)]
+ msg: Message,
+ }
+ let text = serde_json::to_string(&JsonRpc { jsonrpc: "2.0", msg: self })?;
+ write_msg_text(w, &text)
+ }
+}
+
+impl Response {
+ pub fn new_ok<R: Serialize>(id: RequestId, result: R) -> Response {
+ Response { id, result: Some(serde_json::to_value(result).unwrap()), error: None }
+ }
+ pub fn new_err(id: RequestId, code: i32, message: String) -> Response {
+ let error = ResponseError { code, message, data: None };
+ Response { id, result: None, error: Some(error) }
+ }
+}
+
+impl Request {
+ pub fn new<P: Serialize>(id: RequestId, method: String, params: P) -> Request {
+ Request { id, method, params: serde_json::to_value(params).unwrap() }
+ }
+ pub fn extract<P: DeserializeOwned>(
+ self,
+ method: &str,
+ ) -> Result<(RequestId, P), ExtractError<Request>> {
+ if self.method != method {
+ return Err(ExtractError::MethodMismatch(self));
+ }
+ match serde_json::from_value(self.params) {
+ Ok(params) => Ok((self.id, params)),
+ Err(error) => Err(ExtractError::JsonError { method: self.method, error }),
+ }
+ }
+
+ pub(crate) fn is_shutdown(&self) -> bool {
+ self.method == "shutdown"
+ }
+ pub(crate) fn is_initialize(&self) -> bool {
+ self.method == "initialize"
+ }
+}
+
+impl Notification {
+ pub fn new(method: String, params: impl Serialize) -> Notification {
+ Notification { method, params: serde_json::to_value(params).unwrap() }
+ }
+ pub fn extract<P: DeserializeOwned>(
+ self,
+ method: &str,
+ ) -> Result<P, ExtractError<Notification>> {
+ if self.method != method {
+ return Err(ExtractError::MethodMismatch(self));
+ }
+ match serde_json::from_value(self.params) {
+ Ok(params) => Ok(params),
+ Err(error) => Err(ExtractError::JsonError { method: self.method, error }),
+ }
+ }
+ pub(crate) fn is_exit(&self) -> bool {
+ self.method == "exit"
+ }
+ pub(crate) fn is_initialized(&self) -> bool {
+ self.method == "initialized"
+ }
+}
+
+fn read_msg_text(inp: &mut dyn BufRead) -> io::Result<Option<String>> {
+ fn invalid_data(error: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> io::Error {
+ io::Error::new(io::ErrorKind::InvalidData, error)
+ }
+ macro_rules! invalid_data {
+ ($($tt:tt)*) => (invalid_data(format!($($tt)*)))
+ }
+
+ let mut size = None;
+ let mut buf = String::new();
+ loop {
+ buf.clear();
+ if inp.read_line(&mut buf)? == 0 {
+ return Ok(None);
+ }
+ if !buf.ends_with("\r\n") {
+ return Err(invalid_data!("malformed header: {:?}", buf));
+ }
+ let buf = &buf[..buf.len() - 2];
+ if buf.is_empty() {
+ break;
+ }
+ let mut parts = buf.splitn(2, ": ");
+ let header_name = parts.next().unwrap();
+ let header_value =
+ parts.next().ok_or_else(|| invalid_data!("malformed header: {:?}", buf))?;
+ if header_name == "Content-Length" {
+ size = Some(header_value.parse::<usize>().map_err(invalid_data)?);
+ }
+ }
+ let size: usize = size.ok_or_else(|| invalid_data!("no Content-Length"))?;
+ let mut buf = buf.into_bytes();
+ buf.resize(size, 0);
+ inp.read_exact(&mut buf)?;
+ let buf = String::from_utf8(buf).map_err(invalid_data)?;
+ log::debug!("< {}", buf);
+ Ok(Some(buf))
+}
+
+fn write_msg_text(out: &mut dyn Write, msg: &str) -> io::Result<()> {
+ log::debug!("> {}", msg);
+ write!(out, "Content-Length: {}\r\n\r\n", msg.len())?;
+ out.write_all(msg.as_bytes())?;
+ out.flush()?;
+ Ok(())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::{Message, Notification, Request, RequestId};
+
+ #[test]
+ fn shutdown_with_explicit_null() {
+ let text = "{\"jsonrpc\": \"2.0\",\"id\": 3,\"method\": \"shutdown\", \"params\": null }";
+ let msg: Message = serde_json::from_str(text).unwrap();
+
+ assert!(
+ matches!(msg, Message::Request(req) if req.id == 3.into() && req.method == "shutdown")
+ );
+ }
+
+ #[test]
+ fn shutdown_with_no_params() {
+ let text = "{\"jsonrpc\": \"2.0\",\"id\": 3,\"method\": \"shutdown\"}";
+ let msg: Message = serde_json::from_str(text).unwrap();
+
+ assert!(
+ matches!(msg, Message::Request(req) if req.id == 3.into() && req.method == "shutdown")
+ );
+ }
+
+ #[test]
+ fn notification_with_explicit_null() {
+ let text = "{\"jsonrpc\": \"2.0\",\"method\": \"exit\", \"params\": null }";
+ let msg: Message = serde_json::from_str(text).unwrap();
+
+ assert!(matches!(msg, Message::Notification(not) if not.method == "exit"));
+ }
+
+ #[test]
+ fn notification_with_no_params() {
+ let text = "{\"jsonrpc\": \"2.0\",\"method\": \"exit\"}";
+ let msg: Message = serde_json::from_str(text).unwrap();
+
+ assert!(matches!(msg, Message::Notification(not) if not.method == "exit"));
+ }
+
+ #[test]
+ fn serialize_request_with_null_params() {
+ let msg = Message::Request(Request {
+ id: RequestId::from(3),
+ method: "shutdown".into(),
+ params: serde_json::Value::Null,
+ });
+ let serialized = serde_json::to_string(&msg).unwrap();
+
+ assert_eq!("{\"id\":3,\"method\":\"shutdown\"}", serialized);
+ }
+
+ #[test]
+ fn serialize_notification_with_null_params() {
+ let msg = Message::Notification(Notification {
+ method: "exit".into(),
+ params: serde_json::Value::Null,
+ });
+ let serialized = serde_json::to_string(&msg).unwrap();
+
+ assert_eq!("{\"method\":\"exit\"}", serialized);
+ }
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/req_queue.rs b/src/tools/rust-analyzer/lib/lsp-server/src/req_queue.rs
new file mode 100644
index 000000000..1f3d44715
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/req_queue.rs
@@ -0,0 +1,62 @@
+use std::collections::HashMap;
+
+use serde::Serialize;
+
+use crate::{ErrorCode, Request, RequestId, Response, ResponseError};
+
+/// Manages the set of pending requests, both incoming and outgoing.
+#[derive(Debug)]
+pub struct ReqQueue<I, O> {
+ pub incoming: Incoming<I>,
+ pub outgoing: Outgoing<O>,
+}
+
+impl<I, O> Default for ReqQueue<I, O> {
+ fn default() -> ReqQueue<I, O> {
+ ReqQueue {
+ incoming: Incoming { pending: HashMap::default() },
+ outgoing: Outgoing { next_id: 0, pending: HashMap::default() },
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct Incoming<I> {
+ pending: HashMap<RequestId, I>,
+}
+
+#[derive(Debug)]
+pub struct Outgoing<O> {
+ next_id: i32,
+ pending: HashMap<RequestId, O>,
+}
+
+impl<I> Incoming<I> {
+ pub fn register(&mut self, id: RequestId, data: I) {
+ self.pending.insert(id, data);
+ }
+ pub fn cancel(&mut self, id: RequestId) -> Option<Response> {
+ let _data = self.complete(id.clone())?;
+ let error = ResponseError {
+ code: ErrorCode::RequestCanceled as i32,
+ message: "canceled by client".to_string(),
+ data: None,
+ };
+ Some(Response { id, result: None, error: Some(error) })
+ }
+ pub fn complete(&mut self, id: RequestId) -> Option<I> {
+ self.pending.remove(&id)
+ }
+}
+
+impl<O> Outgoing<O> {
+ pub fn register<P: Serialize>(&mut self, method: String, params: P, data: O) -> Request {
+ let id = RequestId::from(self.next_id);
+ self.pending.insert(id.clone(), data);
+ self.next_id += 1;
+ Request::new(id, method, params)
+ }
+ pub fn complete(&mut self, id: RequestId) -> Option<O> {
+ self.pending.remove(&id)
+ }
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/socket.rs b/src/tools/rust-analyzer/lib/lsp-server/src/socket.rs
new file mode 100644
index 000000000..4a59c4c0f
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/socket.rs
@@ -0,0 +1,46 @@
+use std::{
+ io::{self, BufReader},
+ net::TcpStream,
+ thread,
+};
+
+use crossbeam_channel::{bounded, Receiver, Sender};
+
+use crate::{
+ stdio::{make_io_threads, IoThreads},
+ Message,
+};
+
+pub(crate) fn socket_transport(
+ stream: TcpStream,
+) -> (Sender<Message>, Receiver<Message>, IoThreads) {
+ let (reader_receiver, reader) = make_reader(stream.try_clone().unwrap());
+ let (writer_sender, writer) = make_write(stream.try_clone().unwrap());
+ let io_threads = make_io_threads(reader, writer);
+ (writer_sender, reader_receiver, io_threads)
+}
+
+fn make_reader(stream: TcpStream) -> (Receiver<Message>, thread::JoinHandle<io::Result<()>>) {
+ let (reader_sender, reader_receiver) = bounded::<Message>(0);
+ let reader = thread::spawn(move || {
+ let mut buf_read = BufReader::new(stream);
+ while let Some(msg) = Message::read(&mut buf_read).unwrap() {
+ let is_exit = matches!(&msg, Message::Notification(n) if n.is_exit());
+ reader_sender.send(msg).unwrap();
+ if is_exit {
+ break;
+ }
+ }
+ Ok(())
+ });
+ (reader_receiver, reader)
+}
+
+fn make_write(mut stream: TcpStream) -> (Sender<Message>, thread::JoinHandle<io::Result<()>>) {
+ let (writer_sender, writer_receiver) = bounded::<Message>(0);
+ let writer = thread::spawn(move || {
+ writer_receiver.into_iter().try_for_each(|it| it.write(&mut stream)).unwrap();
+ Ok(())
+ });
+ (writer_sender, writer)
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs b/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs
new file mode 100644
index 000000000..cdee6432d
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs
@@ -0,0 +1,71 @@
+use std::{
+ io::{self, stdin, stdout},
+ thread,
+};
+
+use crossbeam_channel::{bounded, Receiver, Sender};
+
+use crate::Message;
+
+/// Creates an LSP connection via stdio.
+pub(crate) fn stdio_transport() -> (Sender<Message>, Receiver<Message>, IoThreads) {
+ let (writer_sender, writer_receiver) = bounded::<Message>(0);
+ let writer = thread::spawn(move || {
+ let stdout = stdout();
+ let mut stdout = stdout.lock();
+ writer_receiver.into_iter().try_for_each(|it| it.write(&mut stdout))?;
+ Ok(())
+ });
+ let (reader_sender, reader_receiver) = bounded::<Message>(0);
+ let reader = thread::spawn(move || {
+ let stdin = stdin();
+ let mut stdin = stdin.lock();
+ while let Some(msg) = Message::read(&mut stdin)? {
+ let is_exit = match &msg {
+ Message::Notification(n) => n.is_exit(),
+ _ => false,
+ };
+
+ reader_sender.send(msg).unwrap();
+
+ if is_exit {
+ break;
+ }
+ }
+ Ok(())
+ });
+ let threads = IoThreads { reader, writer };
+ (writer_sender, reader_receiver, threads)
+}
+
+// Creates an IoThreads
+pub(crate) fn make_io_threads(
+ reader: thread::JoinHandle<io::Result<()>>,
+ writer: thread::JoinHandle<io::Result<()>>,
+) -> IoThreads {
+ IoThreads { reader, writer }
+}
+
+pub struct IoThreads {
+ reader: thread::JoinHandle<io::Result<()>>,
+ writer: thread::JoinHandle<io::Result<()>>,
+}
+
+impl IoThreads {
+ pub fn join(self) -> io::Result<()> {
+ match self.reader.join() {
+ Ok(r) => r?,
+ Err(err) => {
+ println!("reader panicked!");
+ std::panic::panic_any(err)
+ }
+ }
+ match self.writer.join() {
+ Ok(r) => r,
+ Err(err) => {
+ println!("writer panicked!");
+ std::panic::panic_any(err);
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/rustfmt.toml b/src/tools/rust-analyzer/rustfmt.toml
new file mode 100644
index 000000000..71007de81
--- /dev/null
+++ b/src/tools/rust-analyzer/rustfmt.toml
@@ -0,0 +1,2 @@
+reorder_modules = false
+use_small_heuristics = "Max"
diff --git a/src/tools/rust-analyzer/triagebot.toml b/src/tools/rust-analyzer/triagebot.toml
new file mode 100644
index 000000000..fa0824ac5
--- /dev/null
+++ b/src/tools/rust-analyzer/triagebot.toml
@@ -0,0 +1 @@
+[assign]
diff --git a/src/tools/rust-analyzer/xtask/Cargo.toml b/src/tools/rust-analyzer/xtask/Cargo.toml
new file mode 100644
index 000000000..95d44e9b9
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "xtask"
+version = "0.1.0"
+publish = false
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[dependencies]
+anyhow = "1.0.57"
+flate2 = "1.0.24"
+write-json = "0.1.2"
+xshell = "0.2.2"
+xflags = "0.2.4"
+# Avoid adding more dependencies to this crate
diff --git a/src/tools/rust-analyzer/xtask/src/dist.rs b/src/tools/rust-analyzer/xtask/src/dist.rs
new file mode 100644
index 000000000..686aec4ae
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/dist.rs
@@ -0,0 +1,170 @@
+use std::{
+ env,
+ fs::File,
+ io,
+ path::{Path, PathBuf},
+};
+
+use flate2::{write::GzEncoder, Compression};
+use xshell::{cmd, Shell};
+
+use crate::{date_iso, flags, project_root};
+
+const VERSION_STABLE: &str = "0.3";
+const VERSION_NIGHTLY: &str = "0.4";
+const VERSION_DEV: &str = "0.5"; // keep this one in sync with `package.json`
+
+impl flags::Dist {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
+ let stable = sh.var("GITHUB_REF").unwrap_or_default().as_str() == "refs/heads/release";
+
+ let project_root = project_root();
+ let target = Target::get(&project_root);
+ let dist = project_root.join("dist");
+ sh.remove_path(&dist)?;
+ sh.create_dir(&dist)?;
+
+ if let Some(patch_version) = self.client_patch_version {
+ let version = if stable {
+ format!("{}.{}", VERSION_STABLE, patch_version)
+ } else {
+ // A hack to make VS Code prefer nightly over stable.
+ format!("{}.{}", VERSION_NIGHTLY, patch_version)
+ };
+ dist_server(sh, &format!("{version}-standalone"), &target)?;
+ let release_tag = if stable { date_iso(sh)? } else { "nightly".to_string() };
+ dist_client(sh, &version, &release_tag, &target)?;
+ } else {
+ dist_server(sh, "0.0.0-standalone", &target)?;
+ }
+ Ok(())
+ }
+}
+
+fn dist_client(
+ sh: &Shell,
+ version: &str,
+ release_tag: &str,
+ target: &Target,
+) -> anyhow::Result<()> {
+ let bundle_path = Path::new("editors").join("code").join("server");
+ sh.create_dir(&bundle_path)?;
+ sh.copy_file(&target.server_path, &bundle_path)?;
+ if let Some(symbols_path) = &target.symbols_path {
+ sh.copy_file(symbols_path, &bundle_path)?;
+ }
+
+ let _d = sh.push_dir("./editors/code");
+
+ let mut patch = Patch::new(sh, "./package.json")?;
+ patch
+ .replace(
+ &format!(r#""version": "{}.0-dev""#, VERSION_DEV),
+ &format!(r#""version": "{}""#, version),
+ )
+ .replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{}""#, release_tag))
+ .replace(r#""$generated-start": {},"#, "")
+ .replace(",\n \"$generated-end\": {}", "")
+ .replace(r#""enabledApiProposals": [],"#, r#""#);
+ patch.commit(sh)?;
+
+ Ok(())
+}
+
+fn dist_server(sh: &Shell, release: &str, target: &Target) -> anyhow::Result<()> {
+ let _e = sh.push_env("CFG_RELEASE", release);
+ let _e = sh.push_env("CARGO_PROFILE_RELEASE_LTO", "thin");
+
+ // Uncomment to enable debug info for releases. Note that:
+ // * debug info is split on windows and macs, so it does nothing for those platforms,
+ // * on Linux, this blows up the binary size from 8MB to 43MB, which is unreasonable.
+ // let _e = sh.push_env("CARGO_PROFILE_RELEASE_DEBUG", "1");
+
+ if target.name.contains("-linux-") {
+ env::set_var("CC", "clang");
+ }
+
+ let target_name = &target.name;
+ cmd!(sh, "cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --target {target_name} --release").run()?;
+
+ let dst = Path::new("dist").join(&target.artifact_name);
+ gzip(&target.server_path, &dst.with_extension("gz"))?;
+
+ Ok(())
+}
+
+fn gzip(src_path: &Path, dest_path: &Path) -> anyhow::Result<()> {
+ let mut encoder = GzEncoder::new(File::create(dest_path)?, Compression::best());
+ let mut input = io::BufReader::new(File::open(src_path)?);
+ io::copy(&mut input, &mut encoder)?;
+ encoder.finish()?;
+ Ok(())
+}
+
+struct Target {
+ name: String,
+ server_path: PathBuf,
+ symbols_path: Option<PathBuf>,
+ artifact_name: String,
+}
+
+impl Target {
+ fn get(project_root: &Path) -> Self {
+ let name = match env::var("RA_TARGET") {
+ Ok(target) => target,
+ _ => {
+ if cfg!(target_os = "linux") {
+ "x86_64-unknown-linux-gnu".to_string()
+ } else if cfg!(target_os = "windows") {
+ "x86_64-pc-windows-msvc".to_string()
+ } else if cfg!(target_os = "macos") {
+ "x86_64-apple-darwin".to_string()
+ } else {
+ panic!("Unsupported OS, maybe try setting RA_TARGET")
+ }
+ }
+ };
+ let out_path = project_root.join("target").join(&name).join("release");
+ let (exe_suffix, symbols_path) = if name.contains("-windows-") {
+ (".exe".into(), Some(out_path.join("rust_analyzer.pdb")))
+ } else {
+ (String::new(), None)
+ };
+ let server_path = out_path.join(format!("rust-analyzer{}", exe_suffix));
+ let artifact_name = format!("rust-analyzer-{}{}", name, exe_suffix);
+ Self { name, server_path, symbols_path, artifact_name }
+ }
+}
+
+struct Patch {
+ path: PathBuf,
+ original_contents: String,
+ contents: String,
+}
+
+impl Patch {
+ fn new(sh: &Shell, path: impl Into<PathBuf>) -> anyhow::Result<Patch> {
+ let path = path.into();
+ let contents = sh.read_file(&path)?;
+ Ok(Patch { path, original_contents: contents.clone(), contents })
+ }
+
+ fn replace(&mut self, from: &str, to: &str) -> &mut Patch {
+ assert!(self.contents.contains(from));
+ self.contents = self.contents.replace(from, to);
+ self
+ }
+
+ fn commit(&self, sh: &Shell) -> anyhow::Result<()> {
+ sh.write_file(&self.path, &self.contents)?;
+ Ok(())
+ }
+}
+
+impl Drop for Patch {
+ fn drop(&mut self) {
+ // FIXME: find a way to bring this back
+ let _ = &self.original_contents;
+ // write_file(&self.path, &self.original_contents).unwrap();
+ }
+}
diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs
new file mode 100644
index 000000000..993c64cce
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/flags.rs
@@ -0,0 +1,148 @@
+#![allow(unreachable_pub)]
+
+use crate::install::{ClientOpt, Malloc, ServerOpt};
+
+xflags::xflags! {
+ src "./src/flags.rs"
+
+ /// Run custom build command.
+ cmd xtask {
+ default cmd help {
+ /// Print help information.
+ optional -h, --help
+ }
+
+ /// Install rust-analyzer server or editor plugin.
+ cmd install {
+ /// Install only VS Code plugin.
+ optional --client
+ /// One of 'code', 'code-exploration', 'code-insiders', 'codium', or 'code-oss'.
+ optional --code-bin name: String
+
+ /// Install only the language server.
+ optional --server
+ /// Use mimalloc allocator for server
+ optional --mimalloc
+ /// Use jemalloc allocator for server
+ optional --jemalloc
+ }
+
+ cmd fuzz-tests {}
+
+ cmd release {
+ optional --dry-run
+ }
+ cmd promote {
+ optional --dry-run
+ }
+ cmd dist {
+ optional --client-patch-version version: String
+ }
+ cmd metrics {
+ optional --dry-run
+ }
+ /// Builds a benchmark version of rust-analyzer and puts it into `./target`.
+ cmd bb
+ required suffix: String
+ {}
+ }
+}
+
+// generated start
+// The following code is generated by `xflags` macro.
+// Run `env UPDATE_XFLAGS=1 cargo build` to regenerate.
+#[derive(Debug)]
+pub struct Xtask {
+ pub subcommand: XtaskCmd,
+}
+
+#[derive(Debug)]
+pub enum XtaskCmd {
+ Help(Help),
+ Install(Install),
+ FuzzTests(FuzzTests),
+ Release(Release),
+ Promote(Promote),
+ Dist(Dist),
+ Metrics(Metrics),
+ Bb(Bb),
+}
+
+#[derive(Debug)]
+pub struct Help {
+ pub help: bool,
+}
+
+#[derive(Debug)]
+pub struct Install {
+ pub client: bool,
+ pub code_bin: Option<String>,
+ pub server: bool,
+ pub mimalloc: bool,
+ pub jemalloc: bool,
+}
+
+#[derive(Debug)]
+pub struct FuzzTests;
+
+#[derive(Debug)]
+pub struct Release {
+ pub dry_run: bool,
+}
+
+#[derive(Debug)]
+pub struct Promote {
+ pub dry_run: bool,
+}
+
+#[derive(Debug)]
+pub struct Dist {
+ pub client_patch_version: Option<String>,
+}
+
+#[derive(Debug)]
+pub struct Metrics {
+ pub dry_run: bool,
+}
+
+#[derive(Debug)]
+pub struct Bb {
+ pub suffix: String,
+}
+
+impl Xtask {
+ pub const HELP: &'static str = Self::HELP_;
+
+ #[allow(dead_code)]
+ pub fn from_env() -> xflags::Result<Self> {
+ Self::from_env_()
+ }
+
+ #[allow(dead_code)]
+ pub fn from_vec(args: Vec<std::ffi::OsString>) -> xflags::Result<Self> {
+ Self::from_vec_(args)
+ }
+}
+// generated end
+
+impl Install {
+ pub(crate) fn server(&self) -> Option<ServerOpt> {
+ if self.client && !self.server {
+ return None;
+ }
+ let malloc = if self.mimalloc {
+ Malloc::Mimalloc
+ } else if self.jemalloc {
+ Malloc::Jemalloc
+ } else {
+ Malloc::System
+ };
+ Some(ServerOpt { malloc })
+ }
+ pub(crate) fn client(&self) -> Option<ClientOpt> {
+ if !self.client && self.server {
+ return None;
+ }
+ Some(ClientOpt { code_bin: self.code_bin.clone() })
+ }
+}
diff --git a/src/tools/rust-analyzer/xtask/src/install.rs b/src/tools/rust-analyzer/xtask/src/install.rs
new file mode 100644
index 000000000..ae978d551
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/install.rs
@@ -0,0 +1,142 @@
+//! Installs rust-analyzer language server and/or editor plugin.
+
+use std::{env, path::PathBuf, str};
+
+use anyhow::{bail, format_err, Context, Result};
+use xshell::{cmd, Shell};
+
+use crate::flags;
+
+impl flags::Install {
+ pub(crate) fn run(self, sh: &Shell) -> Result<()> {
+ if cfg!(target_os = "macos") {
+ fix_path_for_mac(sh).context("Fix path for mac")?;
+ }
+ if let Some(server) = self.server() {
+ install_server(sh, server).context("install server")?;
+ }
+ if let Some(client) = self.client() {
+ install_client(sh, client).context("install client")?;
+ }
+ Ok(())
+ }
+}
+
+#[derive(Clone)]
+pub(crate) struct ClientOpt {
+ pub(crate) code_bin: Option<String>,
+}
+
+const VS_CODES: &[&str] = &["code", "code-exploration", "code-insiders", "codium", "code-oss"];
+
+pub(crate) struct ServerOpt {
+ pub(crate) malloc: Malloc,
+}
+
+pub(crate) enum Malloc {
+ System,
+ Mimalloc,
+ Jemalloc,
+}
+
+fn fix_path_for_mac(sh: &Shell) -> Result<()> {
+ let mut vscode_path: Vec<PathBuf> = {
+ const COMMON_APP_PATH: &str =
+ r"/Applications/Visual Studio Code.app/Contents/Resources/app/bin";
+ const ROOT_DIR: &str = "";
+ let home_dir = sh.var("HOME").map_err(|err| {
+ format_err!("Failed getting HOME from environment with error: {}.", err)
+ })?;
+
+ [ROOT_DIR, &home_dir]
+ .into_iter()
+ .map(|dir| dir.to_string() + COMMON_APP_PATH)
+ .map(PathBuf::from)
+ .filter(|path| path.exists())
+ .collect()
+ };
+
+ if !vscode_path.is_empty() {
+ let vars = sh.var_os("PATH").context("Could not get PATH variable from env.")?;
+
+ let mut paths = env::split_paths(&vars).collect::<Vec<_>>();
+ paths.append(&mut vscode_path);
+ let new_paths = env::join_paths(paths).context("build env PATH")?;
+ sh.set_var("PATH", &new_paths);
+ }
+
+ Ok(())
+}
+
+fn install_client(sh: &Shell, client_opt: ClientOpt) -> Result<()> {
+ let _dir = sh.push_dir("./editors/code");
+
+ // Package extension.
+ if cfg!(unix) {
+ cmd!(sh, "npm --version").run().context("`npm` is required to build the VS Code plugin")?;
+ cmd!(sh, "npm ci").run()?;
+
+ cmd!(sh, "npm run package --scripts-prepend-node-path").run()?;
+ } else {
+ cmd!(sh, "cmd.exe /c npm --version")
+ .run()
+ .context("`npm` is required to build the VS Code plugin")?;
+ cmd!(sh, "cmd.exe /c npm ci").run()?;
+
+ cmd!(sh, "cmd.exe /c npm run package").run()?;
+ };
+
+ // Find the appropriate VS Code binary.
+ let lifetime_extender;
+ let candidates: &[&str] = match client_opt.code_bin.as_deref() {
+ Some(it) => {
+ lifetime_extender = [it];
+ &lifetime_extender[..]
+ }
+ None => VS_CODES,
+ };
+ let code = candidates
+ .iter()
+ .copied()
+ .find(|&bin| {
+ if cfg!(unix) {
+ cmd!(sh, "{bin} --version").read().is_ok()
+ } else {
+ cmd!(sh, "cmd.exe /c {bin}.cmd --version").read().is_ok()
+ }
+ })
+ .ok_or_else(|| {
+ format_err!("Can't execute `{} --version`. Perhaps it is not in $PATH?", candidates[0])
+ })?;
+
+ // Install & verify.
+ let installed_extensions = if cfg!(unix) {
+ cmd!(sh, "{code} --install-extension rust-analyzer.vsix --force").run()?;
+ cmd!(sh, "{code} --list-extensions").read()?
+ } else {
+ cmd!(sh, "cmd.exe /c {code}.cmd --install-extension rust-analyzer.vsix --force").run()?;
+ cmd!(sh, "cmd.exe /c {code}.cmd --list-extensions").read()?
+ };
+
+ if !installed_extensions.contains("rust-analyzer") {
+ bail!(
+ "Could not install the Visual Studio Code extension. \
+ Please make sure you have at least NodeJS 12.x together with the latest version of VS Code installed and try again. \
+ Note that installing via xtask install does not work for VS Code Remote, instead you’ll need to install the .vsix manually."
+ );
+ }
+
+ Ok(())
+}
+
+fn install_server(sh: &Shell, opts: ServerOpt) -> Result<()> {
+ let features = match opts.malloc {
+ Malloc::System => &[][..],
+ Malloc::Mimalloc => &["--features", "mimalloc"],
+ Malloc::Jemalloc => &["--features", "jemalloc"],
+ };
+
+ let cmd = cmd!(sh, "cargo install --path crates/rust-analyzer --locked --force --features force-always-assert {features...}");
+ cmd.run()?;
+ Ok(())
+}
diff --git a/src/tools/rust-analyzer/xtask/src/main.rs b/src/tools/rust-analyzer/xtask/src/main.rs
new file mode 100644
index 000000000..335ac324a
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/main.rs
@@ -0,0 +1,91 @@
+//! See <https://github.com/matklad/cargo-xtask/>.
+//!
+//! This binary defines various auxiliary build commands, which are not
+//! expressible with just `cargo`. Notably, it provides tests via `cargo test -p xtask`
+//! for code generation and `cargo xtask install` for installation of
+//! rust-analyzer server and client.
+//!
+//! This binary is integrated into the `cargo` command line by using an alias in
+//! `.cargo/config`.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod flags;
+
+mod install;
+mod release;
+mod dist;
+mod metrics;
+
+use anyhow::bail;
+use std::{
+ env,
+ path::{Path, PathBuf},
+};
+use xshell::{cmd, Shell};
+
+fn main() -> anyhow::Result<()> {
+ let sh = &Shell::new()?;
+ sh.change_dir(project_root());
+
+ let flags = flags::Xtask::from_env()?;
+ match flags.subcommand {
+ flags::XtaskCmd::Help(_) => {
+ println!("{}", flags::Xtask::HELP);
+ Ok(())
+ }
+ flags::XtaskCmd::Install(cmd) => cmd.run(sh),
+ flags::XtaskCmd::FuzzTests(_) => run_fuzzer(sh),
+ flags::XtaskCmd::Release(cmd) => cmd.run(sh),
+ flags::XtaskCmd::Promote(cmd) => cmd.run(sh),
+ flags::XtaskCmd::Dist(cmd) => cmd.run(sh),
+ flags::XtaskCmd::Metrics(cmd) => cmd.run(sh),
+ flags::XtaskCmd::Bb(cmd) => {
+ {
+ let _d = sh.push_dir("./crates/rust-analyzer");
+ cmd!(sh, "cargo build --release --features jemalloc").run()?;
+ }
+ sh.copy_file(
+ "./target/release/rust-analyzer",
+ format!("./target/rust-analyzer-{}", cmd.suffix),
+ )?;
+ Ok(())
+ }
+ }
+}
+
+fn project_root() -> PathBuf {
+ Path::new(
+ &env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()),
+ )
+ .ancestors()
+ .nth(1)
+ .unwrap()
+ .to_path_buf()
+}
+
+fn run_fuzzer(sh: &Shell) -> anyhow::Result<()> {
+ let _d = sh.push_dir("./crates/syntax");
+ let _e = sh.push_env("RUSTUP_TOOLCHAIN", "nightly");
+ if cmd!(sh, "cargo fuzz --help").read().is_err() {
+ cmd!(sh, "cargo install cargo-fuzz").run()?;
+ };
+
+ // Expecting nightly rustc
+ let out = cmd!(sh, "rustc --version").read()?;
+ if !out.contains("nightly") {
+ bail!("fuzz tests require nightly rustc")
+ }
+
+ cmd!(sh, "cargo fuzz run parser").run()?;
+ Ok(())
+}
+
+fn date_iso(sh: &Shell) -> anyhow::Result<String> {
+ let res = cmd!(sh, "date -u +%Y-%m-%d").read()?;
+ Ok(res)
+}
+
+fn is_release_tag(tag: &str) -> bool {
+ tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit())
+}
diff --git a/src/tools/rust-analyzer/xtask/src/metrics.rs b/src/tools/rust-analyzer/xtask/src/metrics.rs
new file mode 100644
index 000000000..ebeb87346
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/metrics.rs
@@ -0,0 +1,200 @@
+use std::{
+ collections::BTreeMap,
+ env, fs,
+ io::Write as _,
+ path::Path,
+ time::{Instant, SystemTime, UNIX_EPOCH},
+};
+
+use anyhow::{bail, format_err};
+use xshell::{cmd, Shell};
+
+use crate::flags;
+
+type Unit = String;
+
+impl flags::Metrics {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
+ let mut metrics = Metrics::new(sh)?;
+ if !self.dry_run {
+ sh.remove_path("./target/release")?;
+ }
+ if !Path::new("./target/rustc-perf").exists() {
+ sh.create_dir("./target/rustc-perf")?;
+ cmd!(sh, "git clone https://github.com/rust-lang/rustc-perf.git ./target/rustc-perf")
+ .run()?;
+ }
+ {
+ let _d = sh.push_dir("./target/rustc-perf");
+ let revision = &metrics.perf_revision;
+ cmd!(sh, "git reset --hard {revision}").run()?;
+ }
+
+ let _env = sh.push_env("RA_METRICS", "1");
+
+ {
+ // https://github.com/rust-lang/rust-analyzer/issues/9997
+ let _d = sh.push_dir("target/rustc-perf/collector/benchmarks/webrender");
+ cmd!(sh, "cargo update -p url --precise 1.6.1").run()?;
+ }
+ metrics.measure_build(sh)?;
+ metrics.measure_analysis_stats_self(sh)?;
+ metrics.measure_analysis_stats(sh, "ripgrep")?;
+ metrics.measure_analysis_stats(sh, "webrender")?;
+ metrics.measure_analysis_stats(sh, "diesel/diesel")?;
+
+ if !self.dry_run {
+ let _d = sh.push_dir("target");
+ let metrics_token = env::var("METRICS_TOKEN").unwrap();
+ cmd!(
+ sh,
+ "git clone --depth 1 https://{metrics_token}@github.com/rust-analyzer/metrics.git"
+ )
+ .run()?;
+
+ {
+ let mut file =
+ fs::File::options().append(true).open("target/metrics/metrics.json")?;
+ writeln!(file, "{}", metrics.json())?;
+ }
+
+ let _d = sh.push_dir("metrics");
+ cmd!(sh, "git add .").run()?;
+ cmd!(sh, "git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈")
+ .run()?;
+ cmd!(sh, "git push origin master").run()?;
+ }
+ eprintln!("{metrics:#?}");
+ Ok(())
+ }
+}
+
+impl Metrics {
+ fn measure_build(&mut self, sh: &Shell) -> anyhow::Result<()> {
+ eprintln!("\nMeasuring build");
+ cmd!(sh, "cargo fetch").run()?;
+
+ let time = Instant::now();
+ cmd!(sh, "cargo build --release --package rust-analyzer --bin rust-analyzer").run()?;
+ let time = time.elapsed();
+ self.report("build", time.as_millis() as u64, "ms".into());
+ Ok(())
+ }
+ fn measure_analysis_stats_self(&mut self, sh: &Shell) -> anyhow::Result<()> {
+ self.measure_analysis_stats_path(sh, "self", ".")
+ }
+ fn measure_analysis_stats(&mut self, sh: &Shell, bench: &str) -> anyhow::Result<()> {
+ self.measure_analysis_stats_path(
+ sh,
+ bench,
+ &format!("./target/rustc-perf/collector/benchmarks/{}", bench),
+ )
+ }
+ fn measure_analysis_stats_path(
+ &mut self,
+ sh: &Shell,
+ name: &str,
+ path: &str,
+ ) -> anyhow::Result<()> {
+ eprintln!("\nMeasuring analysis-stats/{name}");
+ let output =
+ cmd!(sh, "./target/release/rust-analyzer -q analysis-stats --memory-usage {path}")
+ .read()?;
+ for (metric, value, unit) in parse_metrics(&output) {
+ self.report(&format!("analysis-stats/{name}/{metric}"), value, unit.into());
+ }
+ Ok(())
+ }
+}
+
+fn parse_metrics(output: &str) -> Vec<(&str, u64, &str)> {
+ output
+ .lines()
+ .filter_map(|it| {
+ let entry = it.split(':').collect::<Vec<_>>();
+ match entry.as_slice() {
+ ["METRIC", name, value, unit] => Some((*name, value.parse().unwrap(), *unit)),
+ _ => None,
+ }
+ })
+ .collect()
+}
+
+#[derive(Debug)]
+struct Metrics {
+ host: Host,
+ timestamp: SystemTime,
+ revision: String,
+ perf_revision: String,
+ metrics: BTreeMap<String, (u64, Unit)>,
+}
+
+#[derive(Debug)]
+struct Host {
+ os: String,
+ cpu: String,
+ mem: String,
+}
+
+impl Metrics {
+ fn new(sh: &Shell) -> anyhow::Result<Metrics> {
+ let host = Host::new(sh)?;
+ let timestamp = SystemTime::now();
+ let revision = cmd!(sh, "git rev-parse HEAD").read()?;
+ let perf_revision = "c52ee623e231e7690a93be88d943016968c1036b".into();
+ Ok(Metrics { host, timestamp, revision, perf_revision, metrics: BTreeMap::new() })
+ }
+
+ fn report(&mut self, name: &str, value: u64, unit: Unit) {
+ self.metrics.insert(name.into(), (value, unit));
+ }
+
+ fn json(&self) -> String {
+ let mut buf = String::new();
+ self.to_json(write_json::object(&mut buf));
+ buf
+ }
+
+ fn to_json(&self, mut obj: write_json::Object<'_>) {
+ self.host.to_json(obj.object("host"));
+ let timestamp = self.timestamp.duration_since(UNIX_EPOCH).unwrap();
+ obj.number("timestamp", timestamp.as_secs() as f64);
+ obj.string("revision", &self.revision);
+ obj.string("perf_revision", &self.perf_revision);
+ let mut metrics = obj.object("metrics");
+ for (k, (value, unit)) in &self.metrics {
+ metrics.array(k).number(*value as f64).string(unit);
+ }
+ }
+}
+
+impl Host {
+ fn new(sh: &Shell) -> anyhow::Result<Host> {
+ if cfg!(not(target_os = "linux")) {
+ bail!("can only collect metrics on Linux ");
+ }
+
+ let os = read_field(sh, "/etc/os-release", "PRETTY_NAME=")?.trim_matches('"').to_string();
+
+ let cpu = read_field(sh, "/proc/cpuinfo", "model name")?
+ .trim_start_matches(':')
+ .trim()
+ .to_string();
+
+ let mem = read_field(sh, "/proc/meminfo", "MemTotal:")?;
+
+ return Ok(Host { os, cpu, mem });
+
+ fn read_field(sh: &Shell, path: &str, field: &str) -> anyhow::Result<String> {
+ let text = sh.read_file(path)?;
+
+ text.lines()
+ .find_map(|it| it.strip_prefix(field))
+ .map(|it| it.trim().to_string())
+ .ok_or_else(|| format_err!("can't parse {}", path))
+ }
+ }
+ fn to_json(&self, mut obj: write_json::Object<'_>) {
+ obj.string("os", &self.os).string("cpu", &self.cpu).string("mem", &self.mem);
+ }
+}
diff --git a/src/tools/rust-analyzer/xtask/src/release.rs b/src/tools/rust-analyzer/xtask/src/release.rs
new file mode 100644
index 000000000..17ada5156
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/release.rs
@@ -0,0 +1,96 @@
+mod changelog;
+
+use xshell::{cmd, Shell};
+
+use crate::{date_iso, flags, is_release_tag, project_root};
+
+impl flags::Release {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
+ if !self.dry_run {
+ cmd!(sh, "git switch release").run()?;
+ cmd!(sh, "git fetch upstream --tags --force").run()?;
+ cmd!(sh, "git reset --hard tags/nightly").run()?;
+ // The `release` branch sometimes has a couple of cherry-picked
+ // commits for patch releases. If that's the case, just overwrite
+ // it. As we are setting `release` branch to an up-to-date `nightly`
+ // tag, this shouldn't be problematic in general.
+ //
+ // Note that, as we tag releases, we don't worry about "losing"
+ // commits -- they'll be kept alive by the tag. More generally, we
+ // don't care about historic releases all that much, it's fine even
+ // to delete old tags.
+ cmd!(sh, "git push --force").run()?;
+ }
+
+ // Generates bits of manual.adoc.
+ cmd!(sh, "cargo test -p ide-assists -p ide-diagnostics -p rust-analyzer -- sourcegen_")
+ .run()?;
+
+ let website_root = project_root().join("../rust-analyzer.github.io");
+ {
+ let _dir = sh.push_dir(&website_root);
+ cmd!(sh, "git switch src").run()?;
+ cmd!(sh, "git pull").run()?;
+ }
+ let changelog_dir = website_root.join("./thisweek/_posts");
+
+ let today = date_iso(sh)?;
+ let commit = cmd!(sh, "git rev-parse HEAD").read()?;
+ let changelog_n = sh
+ .read_dir(changelog_dir.as_path())?
+ .into_iter()
+ .filter_map(|p| p.file_stem().map(|s| s.to_string_lossy().to_string()))
+ .filter_map(|s| s.splitn(5, '-').last().map(|n| n.replace('-', ".")))
+ .filter_map(|s| s.parse::<f32>().ok())
+ .map(|n| 1 + n.floor() as usize)
+ .max()
+ .unwrap_or_default();
+
+ for adoc in [
+ "manual.adoc",
+ "generated_assists.adoc",
+ "generated_config.adoc",
+ "generated_diagnostic.adoc",
+ "generated_features.adoc",
+ ] {
+ let src = project_root().join("./docs/user/").join(adoc);
+ let dst = website_root.join(adoc);
+
+ let contents = sh.read_file(src)?;
+ sh.write_file(dst, contents)?;
+ }
+
+ let tags = cmd!(sh, "git tag --list").read()?;
+ let prev_tag = tags.lines().filter(|line| is_release_tag(line)).last().unwrap();
+
+ let contents = changelog::get_changelog(sh, changelog_n, &commit, prev_tag, &today)?;
+ let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n));
+ sh.write_file(&path, &contents)?;
+
+ Ok(())
+ }
+}
+
+impl flags::Promote {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
+ let _dir = sh.push_dir("../rust-rust-analyzer");
+ cmd!(sh, "git switch master").run()?;
+ cmd!(sh, "git fetch upstream").run()?;
+ cmd!(sh, "git reset --hard upstream/master").run()?;
+
+ let date = date_iso(sh)?;
+ let branch = format!("rust-analyzer-{date}");
+ cmd!(sh, "git switch -c {branch}").run()?;
+ cmd!(sh, "git subtree pull -P src/tools/rust-analyzer rust-analyzer master").run()?;
+
+ if !self.dry_run {
+ cmd!(sh, "git push -u origin {branch}").run()?;
+ cmd!(
+ sh,
+ "xdg-open https://github.com/matklad/rust/pull/new/{branch}?body=r%3F%20%40ghost"
+ )
+ .run()?;
+ }
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/xtask/src/release/changelog.rs b/src/tools/rust-analyzer/xtask/src/release/changelog.rs
new file mode 100644
index 000000000..2647f7794
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/release/changelog.rs
@@ -0,0 +1,171 @@
+use std::fmt::Write;
+use std::{env, iter};
+
+use anyhow::bail;
+use xshell::{cmd, Shell};
+
+pub(crate) fn get_changelog(
+ sh: &Shell,
+ changelog_n: usize,
+ commit: &str,
+ prev_tag: &str,
+ today: &str,
+) -> anyhow::Result<String> {
+ let token = match env::var("GITHUB_TOKEN") {
+ Ok(token) => token,
+ Err(_) => bail!("Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."),
+ };
+
+ let git_log = cmd!(sh, "git log {prev_tag}..HEAD --reverse").read()?;
+ let mut features = String::new();
+ let mut fixes = String::new();
+ let mut internal = String::new();
+ let mut others = String::new();
+ for line in git_log.lines() {
+ let line = line.trim_start();
+ if let Some(pr_num) = parse_pr_number(&line) {
+ let accept = "Accept: application/vnd.github.v3+json";
+ let authorization = format!("Authorization: token {}", token);
+ let pr_url = "https://api.github.com/repos/rust-lang/rust-analyzer/issues";
+
+ // we don't use an HTTPS client or JSON parser to keep the build times low
+ let pr = pr_num.to_string();
+ let pr_json =
+ cmd!(sh, "curl -s -H {accept} -H {authorization} {pr_url}/{pr}").read()?;
+ let pr_title = cmd!(sh, "jq .title").stdin(&pr_json).read()?;
+ let pr_title = unescape(&pr_title[1..pr_title.len() - 1]);
+ let pr_comment = cmd!(sh, "jq .body").stdin(pr_json).read()?;
+
+ let comments_json =
+ cmd!(sh, "curl -s -H {accept} -H {authorization} {pr_url}/{pr}/comments").read()?;
+ let pr_comments = cmd!(sh, "jq .[].body").stdin(comments_json).read()?;
+
+ let l = iter::once(pr_comment.as_str())
+ .chain(pr_comments.lines())
+ .rev()
+ .find_map(|it| {
+ let it = unescape(&it[1..it.len() - 1]);
+ it.lines().find_map(parse_changelog_line)
+ })
+ .into_iter()
+ .next()
+ .unwrap_or_else(|| parse_title_line(&pr_title));
+ let s = match l.kind {
+ PrKind::Feature => &mut features,
+ PrKind::Fix => &mut fixes,
+ PrKind::Internal => &mut internal,
+ PrKind::Other => &mut others,
+ PrKind::Skip => continue,
+ };
+ writeln!(s, "* pr:{}[] {}", pr_num, l.message.as_deref().unwrap_or(&pr_title)).unwrap();
+ }
+ }
+
+ let contents = format!(
+ "\
+= Changelog #{}
+:sectanchors:
+:page-layout: post
+
+Commit: commit:{}[] +
+Release: release:{}[]
+
+== New Features
+
+{}
+
+== Fixes
+
+{}
+
+== Internal Improvements
+
+{}
+
+== Others
+
+{}
+",
+ changelog_n, commit, today, features, fixes, internal, others
+ );
+ Ok(contents)
+}
+
+#[derive(Clone, Copy)]
+enum PrKind {
+ Feature,
+ Fix,
+ Internal,
+ Other,
+ Skip,
+}
+
+struct PrInfo {
+ message: Option<String>,
+ kind: PrKind,
+}
+
+fn unescape(s: &str) -> String {
+ s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
+}
+
+fn parse_pr_number(s: &str) -> Option<u32> {
+ const BORS_PREFIX: &str = "Merge #";
+ const HOMU_PREFIX: &str = "Auto merge of #";
+ if s.starts_with(BORS_PREFIX) {
+ let s = &s[BORS_PREFIX.len()..];
+ s.parse().ok()
+ } else if s.starts_with(HOMU_PREFIX) {
+ let s = &s[HOMU_PREFIX.len()..];
+ if let Some(space) = s.find(' ') {
+ s[..space].parse().ok()
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+}
+
+fn parse_changelog_line(s: &str) -> Option<PrInfo> {
+ let parts = s.splitn(3, ' ').collect::<Vec<_>>();
+ if parts.len() < 2 || parts[0] != "changelog" {
+ return None;
+ }
+ let message = parts.get(2).map(|it| it.to_string());
+ let kind = match parts[1].trim_end_matches(':') {
+ "feature" => PrKind::Feature,
+ "fix" => PrKind::Fix,
+ "internal" => PrKind::Internal,
+ "skip" => PrKind::Skip,
+ _ => {
+ let kind = PrKind::Other;
+ let message = format!("{} {}", parts[1], message.unwrap_or_default());
+ return Some(PrInfo { kind, message: Some(message) });
+ }
+ };
+ let res = PrInfo { message, kind };
+ Some(res)
+}
+
+fn parse_title_line(s: &str) -> PrInfo {
+ let lower = s.to_ascii_lowercase();
+ const PREFIXES: [(&str, PrKind); 5] = [
+ ("feat: ", PrKind::Feature),
+ ("feature: ", PrKind::Feature),
+ ("fix: ", PrKind::Fix),
+ ("internal: ", PrKind::Internal),
+ ("minor: ", PrKind::Skip),
+ ];
+
+ for &(prefix, kind) in &PREFIXES {
+ if lower.starts_with(prefix) {
+ let message = match &kind {
+ PrKind::Skip => None,
+ _ => Some(s[prefix.len()..].to_string()),
+ };
+ return PrInfo { message, kind };
+ }
+ }
+ PrInfo { kind: PrKind::Other, message: Some(s.to_string()) }
+}